From 486d398c7d68ce1b0784a540feb7be5fb2c680c1 Mon Sep 17 00:00:00 2001 From: Julio Castillo Date: Fri, 11 Nov 2022 19:05:39 +0100 Subject: [PATCH 1/7] Update logging sink to tf1.3 in resman modules --- fast/stages/00-bootstrap/organization.tf | 10 +- modules/bigquery-dataset/outputs.tf | 15 ++ modules/folder/README.md | 40 ++--- modules/folder/logging.tf | 38 +++-- modules/folder/variables.tf | 35 +++-- modules/gcs/outputs.tf | 25 ++- modules/logging-bucket/outputs.tf | 13 ++ modules/organization/README.md | 40 ++--- modules/organization/logging.tf | 30 ++-- modules/organization/variables.tf | 40 +++-- modules/project/README.md | 104 ++++++------ modules/project/logging.tf | 38 +++-- modules/project/variables.tf | 37 +++-- modules/pubsub/outputs.tf | 11 ++ .../folder/fixture/test.logging-sinks.tfvars | 37 +++++ tests/modules/folder/test_plan_logging.py | 148 +++++++----------- .../fixture/test.logging-sinks.tfvars | 37 +++++ .../modules/organization/test_plan_logging.py | 84 +++------- .../project/fixture/test.logging-sinks.tfvars | 37 +++++ tests/modules/project/fixture/variables.tf | 9 +- tests/modules/project/test_plan_logging.py | 74 ++------- 21 files changed, 484 insertions(+), 418 deletions(-) create mode 100644 tests/modules/folder/fixture/test.logging-sinks.tfvars create mode 100644 tests/modules/organization/fixture/test.logging-sinks.tfvars create mode 100644 tests/modules/project/fixture/test.logging-sinks.tfvars diff --git a/fast/stages/00-bootstrap/organization.tf b/fast/stages/00-bootstrap/organization.tf index 39845012..7a12ea32 100644 --- a/fast/stages/00-bootstrap/organization.tf +++ b/fast/stages/00-bootstrap/organization.tf @@ -192,13 +192,9 @@ module "organization" { } logging_sinks = { for name, attrs in var.log_sinks : name => { - bq_partitioned_table = attrs.type == "bigquery" - destination = local.log_sink_destinations[name].id - exclusions = {} - filter = attrs.filter - iam = true - include_children = true - type = attrs.type + destination = local.log_sink_destinations[name].as_logging_destination + filter = attrs.filter + bigquery_use_partitioned_table = attrs.type == "bigquery" } } } diff --git a/modules/bigquery-dataset/outputs.tf b/modules/bigquery-dataset/outputs.tf index dd2da22c..11d7f3bb 100644 --- a/modules/bigquery-dataset/outputs.tf +++ b/modules/bigquery-dataset/outputs.tf @@ -14,6 +14,21 @@ * limitations under the License. */ +output "as_logging_destination" { + description = "Parameters to use this dataset as a log sink destination." + value = { + type = "bigquery" + target = google_bigquery_dataset.default.id + } + depends_on = [ + google_bigquery_dataset_access.domain, + google_bigquery_dataset_access.group_by_email, + google_bigquery_dataset_access.special_group, + google_bigquery_dataset_access.user_by_email, + google_bigquery_dataset_access.views + ] +} + output "dataset" { description = "Dataset resource." value = google_bigquery_dataset.default diff --git a/modules/folder/README.md b/modules/folder/README.md index 2190eaac..4e5e752f 100644 --- a/modules/folder/README.md +++ b/modules/folder/README.md @@ -166,37 +166,27 @@ module "bucket" { id = "bucket" } + module "folder-sink" { source = "./fabric/modules/folder" parent = "folders/657104291943" name = "my-folder" logging_sinks = { warnings = { - type = "storage" - destination = module.gcs.id - filter = "severity=WARNING" - include_children = true - exclusions = {} + destination = module.gcs.as_logging_destination + filter = "severity=WARNING" } info = { - type = "bigquery" - destination = module.dataset.id - filter = "severity=INFO" - include_children = true - exclusions = {} + destination = module.dataset.as_logging_destination + filter = "severity=INFO" } notice = { - type = "pubsub" - destination = module.pubsub.id - filter = "severity=NOTICE" - include_children = true - exclusions = {} + destination = module.pubsub.as_logging_destination + filter = "severity=NOTICE" } debug = { - type = "logging" - destination = module.bucket.id - filter = "severity=DEBUG" - include_children = true + destination = module.bucket.as_logging_destination + filter = "severity=DEBUG" exclusions = { no-compute = "logName:compute" } @@ -312,12 +302,12 @@ module "folder" { | [iam_additive_members](variables.tf#L85) | IAM additive bindings in {MEMBERS => [ROLE]} format. This might break if members are dynamic values. | map(list(string)) | | {} | | [id](variables.tf#L92) | Folder ID in case you use folder_create=false. | string | | null | | [logging_exclusions](variables.tf#L98) | Logging exclusions for this folder in the form {NAME -> FILTER}. | map(string) | | {} | -| [logging_sinks](variables.tf#L105) | Logging sinks to create for this folder. | map(object({…})) | | {} | -| [name](variables.tf#L126) | Folder name. | string | | null | -| [org_policies](variables.tf#L132) | Organization policies applied to this folder keyed by policy name. | map(object({…})) | | {} | -| [org_policies_data_path](variables.tf#L172) | Path containing org policies in YAML format. | string | | null | -| [parent](variables.tf#L178) | Parent in folders/folder_id or organizations/org_id format. | string | | null | -| [tag_bindings](variables.tf#L188) | Tag bindings for this folder, in key => tag value id format. | map(string) | | null | +| [logging_sinks](variables.tf#L105) | Logging sinks to create for this folder. | map(object({…})) | | {} | +| [name](variables.tf#L137) | Folder name. | string | | null | +| [org_policies](variables.tf#L143) | Organization policies applied to this folder keyed by policy name. | map(object({…})) | | {} | +| [org_policies_data_path](variables.tf#L183) | Path containing org policies in YAML format. | string | | null | +| [parent](variables.tf#L189) | Parent in folders/folder_id or organizations/org_id format. | string | | null | +| [tag_bindings](variables.tf#L199) | Tag bindings for this folder, in key => tag value id format. | map(string) | | null | ## Outputs diff --git a/modules/folder/logging.tf b/modules/folder/logging.tf index d6a195e1..517b74e1 100644 --- a/modules/folder/logging.tf +++ b/modules/folder/logging.tf @@ -22,19 +22,27 @@ locals { type => { for name, sink in var.logging_sinks : name => sink - if sink.type == type + if sink.destination.type == type } } } resource "google_logging_folder_sink" "sink" { - for_each = var.logging_sinks - name = each.key - #description = "${each.key} (Terraform-managed)." + for_each = var.logging_sinks + name = each.key + description = coalesce(each.value.description, "${each.key} (Terraform-managed).") folder = local.folder.name - destination = "${each.value.type}.googleapis.com/${each.value.destination}" + destination = "${each.value.destination.type}.googleapis.com/${each.value.destination.target}" filter = each.value.filter include_children = each.value.include_children + disabled = each.value.disabled + + dynamic "bigquery_options" { + for_each = each.value.bigquery_use_partitioned_table != null ? [""] : [] + content { + use_partitioned_tables = each.value.bigquery_use_partitioned_table + } + } dynamic "exclusions" { for_each = each.value.exclusions @@ -52,34 +60,38 @@ resource "google_logging_folder_sink" "sink" { resource "google_storage_bucket_iam_member" "gcs-sinks-binding" { for_each = local.sink_bindings["storage"] - bucket = each.value.destination + bucket = each.value.destination.target role = "roles/storage.objectCreator" member = google_logging_folder_sink.sink[each.key].writer_identity } resource "google_bigquery_dataset_iam_member" "bq-sinks-binding" { for_each = local.sink_bindings["bigquery"] - project = split("/", each.value.destination)[1] - dataset_id = split("/", each.value.destination)[3] + project = split("/", each.value.destination.target)[1] + dataset_id = split("/", each.value.destination.target)[3] role = "roles/bigquery.dataEditor" member = google_logging_folder_sink.sink[each.key].writer_identity } resource "google_pubsub_topic_iam_member" "pubsub-sinks-binding" { for_each = local.sink_bindings["pubsub"] - project = split("/", each.value.destination)[1] - topic = split("/", each.value.destination)[3] + project = split("/", each.value.destination.target)[1] + topic = split("/", each.value.destination.target)[3] role = "roles/pubsub.publisher" member = google_logging_folder_sink.sink[each.key].writer_identity } resource "google_project_iam_member" "bucket-sinks-binding" { for_each = local.sink_bindings["logging"] - project = split("/", each.value.destination)[1] + project = split("/", each.value.destination.target)[1] role = "roles/logging.bucketWriter" member = google_logging_folder_sink.sink[each.key].writer_identity - # TODO(jccb): use a condition to limit writer-identity only to this - # bucket + + condition { + title = "${each.key} bucket writer" + description = "Grants bucketWriter to ${google_logging_folder_sink.sink[each.key].writer_identity} used by log sink ${each.key} on ${local.folder.id}" + expression = "resource.name.endsWith('${each.value.destination.target}')" + } } resource "google_logging_folder_exclusion" "logging-exclusion" { diff --git a/modules/folder/variables.tf b/modules/folder/variables.tf index 359531b7..b5824884 100644 --- a/modules/folder/variables.tf +++ b/modules/folder/variables.tf @@ -105,22 +105,33 @@ variable "logging_exclusions" { variable "logging_sinks" { description = "Logging sinks to create for this folder." type = map(object({ - destination = string - type = string + bigquery_use_partitioned_table = optional(bool) + description = optional(string) + destination = object({ + type = string + target = string + }) + disabled = optional(bool, false) + exclusions = optional(map(string), {}) filter = string - include_children = bool - # TODO exclusions also support description and disabled - exclusions = map(string) + include_children = optional(bool, true) })) - validation { - condition = alltrue([ - for k, v in(var.logging_sinks == null ? {} : var.logging_sinks) : - contains(["bigquery", "logging", "pubsub", "storage"], v.type) - ]) - error_message = "Type must be one of 'bigquery', 'logging', 'pubsub', 'storage'." - } default = {} nullable = false + validation { + condition = alltrue([ + for k, v in var.logging_sinks : + contains(["bigquery", "logging", "pubsub", "storage"], v.destination.type) + ]) + error_message = "Destination type must be one of 'bigquery', 'logging', 'pubsub', 'storage'." + } + validation { + condition = alltrue([ + for k, v in var.logging_sinks : + v.bigquery_use_partitioned_table != true || v.destination.type == "bigquery" + ]) + error_message = "Can only set bigquery_use_partitioned_table when destination type is `bigquery`." + } } variable "name" { diff --git a/modules/gcs/outputs.tf b/modules/gcs/outputs.tf index a00c04cf..a92118da 100644 --- a/modules/gcs/outputs.tf +++ b/modules/gcs/outputs.tf @@ -14,26 +14,23 @@ * limitations under the License. */ -output "bucket" { - description = "Bucket resource." - value = google_storage_bucket.bucket -} - -# We add `id` as an alias to `name` to simplify log sink handling. -# Since all other log destinations (pubsub, logging-bucket, bigquery) -# have an id output, it is convenient to have in this module too to -# handle all log destination as homogeneous objects (i.e. you can -# assume any valid log destination has an `id` output). - -output "id" { - description = "Bucket ID (same as name)." - value = "${local.prefix}${lower(var.name)}" +output "as_logging_destination" { + description = "Parameters to use this bucket as a log sink destination." + value = { + type = "storage" + target = "${local.prefix}${lower(var.name)}" + } depends_on = [ google_storage_bucket.bucket, google_storage_bucket_iam_binding.bindings ] } +output "bucket" { + description = "Bucket resource." + value = google_storage_bucket.bucket +} + output "name" { description = "Bucket name." value = "${local.prefix}${lower(var.name)}" diff --git a/modules/logging-bucket/outputs.tf b/modules/logging-bucket/outputs.tf index 7100237e..945c56f9 100644 --- a/modules/logging-bucket/outputs.tf +++ b/modules/logging-bucket/outputs.tf @@ -14,6 +14,19 @@ * limitations under the License. */ +output "as_logging_destination" { + description = "Parameters to use this bucket as a log sink destination." + value = { + type = "logging" + target = try( + google_logging_project_bucket_config.bucket.0.id, + google_logging_folder_bucket_config.bucket.0.id, + google_logging_organization_bucket_config.bucket.0.id, + google_logging_billing_account_bucket_config.bucket.0.id, + ) + } +} + output "id" { description = "ID of the created bucket." value = try( diff --git a/modules/organization/README.md b/modules/organization/README.md index 95c4e2e8..3adbff3d 100644 --- a/modules/organization/README.md +++ b/modules/organization/README.md @@ -311,35 +311,21 @@ module "org" { logging_sinks = { warnings = { - type = "storage" - destination = module.gcs.id + destination = module.gcs.as_logging_destination filter = "severity=WARNING" - include_children = true - bq_partitioned_table = null - exclusions = {} } info = { - type = "bigquery" - destination = module.dataset.id + destination = module.dataset.as_logging_destination filter = "severity=INFO" - include_children = true - bq_partitioned_table = true - exclusions = {} + bigquery_use_partitioned_table = true } notice = { - type = "pubsub" - destination = module.pubsub.id + destination = module.pubsub.as_logging_destination filter = "severity=NOTICE" - include_children = true - bq_partitioned_table = null - exclusions = {} } debug = { - type = "logging" - destination = module.bucket.id + destination = module.bucket.as_logging_destination filter = "severity=DEBUG" - include_children = false - bq_partitioned_table = null exclusions = { no-compute = "logName:compute" } @@ -425,7 +411,7 @@ module "org" { | name | description | type | required | default | |---|---|:---:|:---:|:---:| -| [organization_id](variables.tf#L217) | Organization id in organizations/nnnnnn format. | string | ✓ | | +| [organization_id](variables.tf#L227) | Organization id in organizations/nnnnnn format. | string | ✓ | | | [contacts](variables.tf#L17) | List of essential contacts for this resource. Must be in the form EMAIL -> [NOTIFICATION_TYPES]. Valid notification types are ALL, SUSPENSION, SECURITY, TECHNICAL, BILLING, LEGAL, PRODUCT_UPDATES. | map(list(string)) | | {} | | [custom_roles](variables.tf#L24) | Map of role name => list of permissions to create in this project. | map(list(string)) | | {} | | [firewall_policies](variables.tf#L31) | Hierarchical firewall policy rules created in the organization. | map(map(object({…}))) | | {} | @@ -439,13 +425,13 @@ module "org" { | [iam_audit_config_authoritative](variables.tf#L105) | IAM Authoritative service audit logging configuration. Service as key, map of log permission (eg DATA_READ) and excluded members as value for each service. Audit config should also be authoritative when using authoritative bindings. Use with caution. | map(map(list(string))) | | null | | [iam_bindings_authoritative](variables.tf#L116) | IAM authoritative bindings, in {ROLE => [MEMBERS]} format. Roles and members not explicitly listed will be cleared. Bindings should also be authoritative when using authoritative audit config. Use with caution. | map(list(string)) | | null | | [logging_exclusions](variables.tf#L122) | Logging exclusions for this organization in the form {NAME -> FILTER}. | map(string) | | {} | -| [logging_sinks](variables.tf#L129) | Logging sinks to create for this organization. | map(object({…})) | | {} | -| [org_policies](variables.tf#L151) | Organization policies applied to this organization keyed by policy name. | map(object({…})) | | {} | -| [org_policies_data_path](variables.tf#L191) | Path containing org policies in YAML format. | string | | null | -| [org_policy_custom_constraints](variables.tf#L197) | Organization policiy custom constraints keyed by constraint name. | map(object({…})) | | {} | -| [org_policy_custom_constraints_data_path](variables.tf#L211) | Path containing org policy custom constraints in YAML format. | string | | null | -| [tag_bindings](variables.tf#L227) | Tag bindings for this organization, in key => tag value id format. | map(string) | | null | -| [tags](variables.tf#L233) | Tags by key name. The `iam` attribute behaves like the similarly named one at module level. | map(object({…})) | | null | +| [logging_sinks](variables.tf#L129) | Logging sinks to create for the organization. | map(object({…})) | | {} | +| [org_policies](variables.tf#L161) | Organization policies applied to this organization keyed by policy name. | map(object({…})) | | {} | +| [org_policies_data_path](variables.tf#L201) | Path containing org policies in YAML format. | string | | null | +| [org_policy_custom_constraints](variables.tf#L207) | Organization policiy custom constraints keyed by constraint name. | map(object({…})) | | {} | +| [org_policy_custom_constraints_data_path](variables.tf#L221) | Path containing org policy custom constraints in YAML format. | string | | null | +| [tag_bindings](variables.tf#L237) | Tag bindings for this organization, in key => tag value id format. | map(string) | | null | +| [tags](variables.tf#L243) | Tags by key name. The `iam` attribute behaves like the similarly named one at module level. | map(object({…})) | | null | ## Outputs diff --git a/modules/organization/logging.tf b/modules/organization/logging.tf index 0beeb0f8..57f2da1f 100644 --- a/modules/organization/logging.tf +++ b/modules/organization/logging.tf @@ -21,7 +21,7 @@ locals { for type in ["bigquery", "logging", "pubsub", "storage"] : type => { for name, sink in var.logging_sinks : - name => sink if sink.type == type + name => sink if sink.destination.type == type } } } @@ -29,15 +29,17 @@ locals { resource "google_logging_organization_sink" "sink" { for_each = var.logging_sinks name = each.key + description = coalesce(each.value.description, "${each.key} (Terraform-managed).") org_id = local.organization_id_numeric - destination = "${each.value.type}.googleapis.com/${each.value.destination}" + destination = "${each.value.destination.type}.googleapis.com/${each.value.destination.target}" filter = each.value.filter include_children = each.value.include_children + disabled = each.value.disabled dynamic "bigquery_options" { - for_each = each.value.bq_partitioned_table == true ? [""] : [] + for_each = each.value.bigquery_use_partitioned_table != null ? [""] : [] content { - use_partitioned_tables = each.value.bq_partitioned_table + use_partitioned_tables = each.value.bigquery_use_partitioned_table } } @@ -49,6 +51,7 @@ resource "google_logging_organization_sink" "sink" { filter = exclusion.value } } + depends_on = [ google_organization_iam_binding.authoritative, google_organization_iam_member.additive, @@ -58,33 +61,38 @@ resource "google_logging_organization_sink" "sink" { resource "google_storage_bucket_iam_member" "storage-sinks-binding" { for_each = local.sink_bindings["storage"] - bucket = each.value.destination + bucket = each.value.destination.target role = "roles/storage.objectCreator" member = google_logging_organization_sink.sink[each.key].writer_identity } resource "google_bigquery_dataset_iam_member" "bq-sinks-binding" { for_each = local.sink_bindings["bigquery"] - project = split("/", each.value.destination)[1] - dataset_id = split("/", each.value.destination)[3] + project = split("/", each.value.destination.target)[1] + dataset_id = split("/", each.value.destination.target)[3] role = "roles/bigquery.dataEditor" member = google_logging_organization_sink.sink[each.key].writer_identity } resource "google_pubsub_topic_iam_member" "pubsub-sinks-binding" { for_each = local.sink_bindings["pubsub"] - project = split("/", each.value.destination)[1] - topic = split("/", each.value.destination)[3] + project = split("/", each.value.destination.target)[1] + topic = split("/", each.value.destination.target)[3] role = "roles/pubsub.publisher" member = google_logging_organization_sink.sink[each.key].writer_identity } resource "google_project_iam_member" "bucket-sinks-binding" { for_each = local.sink_bindings["logging"] - project = split("/", each.value.destination)[1] + project = split("/", each.value.destination.target)[1] role = "roles/logging.bucketWriter" member = google_logging_organization_sink.sink[each.key].writer_identity - # TODO(jccb): use a condition to limit writer-identity only to this bucket + + condition { + title = "${each.key} bucket writer" + description = "Grants bucketWriter to ${google_logging_organization_sink.sink[each.key].writer_identity} used by log sink ${each.key} on ${var.organization_id}" + expression = "resource.name.endsWith('${each.value.destination.target}')" + } } resource "google_logging_organization_exclusion" "logging-exclusion" { diff --git a/modules/organization/variables.tf b/modules/organization/variables.tf index 5b98a9e1..3fcbefea 100644 --- a/modules/organization/variables.tf +++ b/modules/organization/variables.tf @@ -127,25 +127,35 @@ variable "logging_exclusions" { } variable "logging_sinks" { - description = "Logging sinks to create for this organization." + description = "Logging sinks to create for the organization." type = map(object({ - destination = string - type = string - filter = string - include_children = bool - bq_partitioned_table = bool - # TODO exclusions also support description and disabled - exclusions = map(string) + bigquery_use_partitioned_table = optional(bool) + description = optional(string) + destination = object({ + type = string + target = string + }) + disabled = optional(bool, false) + exclusions = optional(map(string), {}) + filter = string + include_children = optional(bool, true) })) - validation { - condition = alltrue([ - for k, v in(var.logging_sinks == null ? {} : var.logging_sinks) : - contains(["bigquery", "logging", "pubsub", "storage"], v.type) - ]) - error_message = "Type must be one of 'bigquery', 'logging', 'pubsub', 'storage'." - } default = {} nullable = false + validation { + condition = alltrue([ + for k, v in var.logging_sinks : + contains(["bigquery", "logging", "pubsub", "storage"], v.destination.type) + ]) + error_message = "Destination type must be one of 'bigquery', 'logging', 'pubsub', 'storage'." + } + validation { + condition = alltrue([ + for k, v in var.logging_sinks : + v.bigquery_use_partitioned_table != true || v.destination.type == "bigquery" + ]) + error_message = "Can only set bigquery_use_partitioned_table when destination type is `bigquery`." + } } variable "org_policies" { diff --git a/modules/project/README.md b/modules/project/README.md index 215c782d..3ee8e312 100644 --- a/modules/project/README.md +++ b/modules/project/README.md @@ -276,7 +276,7 @@ compute.restrictLoadBalancerCreationForTypes: ``` -## Logging Sinks +## Logging Sinks (in same project) ```hcl module "gcs" { @@ -312,35 +312,20 @@ module "project-host" { parent = "folders/1234567890" logging_sinks = { warnings = { - type = "storage" - destination = module.gcs.id - filter = "severity=WARNING" - iam = false - unique_writer = false - exclusions = {} + destination = module.gcs.as_logging_destination + filter = "severity=WARNING" } info = { - type = "bigquery" - destination = module.dataset.id - filter = "severity=INFO" - iam = false - unique_writer = false - exclusions = {} + destination = module.dataset.as_logging_destination + filter = "severity=INFO" } notice = { - type = "pubsub" - destination = module.pubsub.id - filter = "severity=NOTICE" - iam = true - unique_writer = false - exclusions = {} + destination = module.pubsub.as_logging_destination + filter = "severity=NOTICE" } debug = { - type = "logging" - destination = module.bucket.id - filter = "severity=DEBUG" - iam = true - unique_writer = false + destination = module.bucket.as_logging_destination + filter = "severity=DEBUG" exclusions = { no-compute = "logName:compute" } @@ -350,9 +335,38 @@ module "project-host" { no-gce-instances = "resource.type=gce_instance" } } -# tftest modules=5 resources=12 +# tftest modules=5 resources=14 ``` +## Logging Sinks (in different project) + +When writing to destinations in a different project, set `unique_writer` to `true`. + +```hcl +module "gcs" { + source = "./fabric/modules/gcs" + project_id = "project-1" + name = "gcs_sink" + force_destroy = true +} + +module "project-host" { + source = "./fabric/modules/project" + name = "project-2" + billing_account = "123456-123456-123456" + parent = "folders/1234567890" + logging_sinks = { + warnings = { + destination = module.gcs.as_logging_destination + filter = "severity=WARNING" + unique_writer = true + } + } +} +# tftest modules=2 resources=4 +``` + + ## Cloud KMS encryption keys The module offers a simple, centralized way to assign `roles/cloudkms.cryptoKeyEncrypterDecrypter` to service identities. @@ -455,7 +469,7 @@ output "compute_robot" { | name | description | type | required | default | |---|---|:---:|:---:|:---:| -| [name](variables.tf#L131) | Project name and id suffix. | string | ✓ | | +| [name](variables.tf#L142) | Project name and id suffix. | string | ✓ | | | [auto_create_network](variables.tf#L17) | Whether to create the default network for the project. | bool | | false | | [billing_account](variables.tf#L23) | Billing account id. | string | | null | | [contacts](variables.tf#L29) | List of essential contacts for this resource. Must be in the form EMAIL -> [NOTIFICATION_TYPES]. Valid notification types are ALL, SUSPENSION, SECURITY, TECHNICAL, BILLING, LEGAL, PRODUCT_UPDATES. | map(list(string)) | | {} | @@ -469,25 +483,25 @@ output "compute_robot" { | [labels](variables.tf#L82) | Resource labels. | map(string) | | {} | | [lien_reason](variables.tf#L89) | If non-empty, creates a project lien with this description. | string | | "" | | [logging_exclusions](variables.tf#L95) | Logging exclusions for this project in the form {NAME -> FILTER}. | map(string) | | {} | -| [logging_sinks](variables.tf#L102) | Logging sinks to create for this project. | map(object({…})) | | {} | -| [metric_scopes](variables.tf#L124) | List of projects that will act as metric scopes for this project. | list(string) | | [] | -| [org_policies](variables.tf#L136) | Organization policies applied to this project keyed by policy name. | map(object({…})) | | {} | -| [org_policies_data_path](variables.tf#L176) | Path containing org policies in YAML format. | string | | null | -| [oslogin](variables.tf#L182) | Enable OS Login. | bool | | false | -| [oslogin_admins](variables.tf#L188) | List of IAM-style identities that will be granted roles necessary for OS Login administrators. | list(string) | | [] | -| [oslogin_users](variables.tf#L196) | List of IAM-style identities that will be granted roles necessary for OS Login users. | list(string) | | [] | -| [parent](variables.tf#L203) | Parent folder or organization in 'folders/folder_id' or 'organizations/org_id' format. | string | | null | -| [prefix](variables.tf#L213) | Optional prefix used to generate project id and name. | string | | null | -| [project_create](variables.tf#L223) | Create project. When set to false, uses a data source to reference existing project. | bool | | true | -| [service_config](variables.tf#L229) | Configure service API activation. | object({…}) | | {…} | -| [service_encryption_key_ids](variables.tf#L241) | Cloud KMS encryption key in {SERVICE => [KEY_URL]} format. | map(list(string)) | | {} | -| [service_perimeter_bridges](variables.tf#L248) | Name of VPC-SC Bridge perimeters to add project into. See comment in the variables file for format. | list(string) | | null | -| [service_perimeter_standard](variables.tf#L255) | Name of VPC-SC Standard perimeter to add project into. See comment in the variables file for format. | string | | null | -| [services](variables.tf#L261) | Service APIs to enable. | list(string) | | [] | -| [shared_vpc_host_config](variables.tf#L267) | Configures this project as a Shared VPC host project (mutually exclusive with shared_vpc_service_project). | object({…}) | | null | -| [shared_vpc_service_config](variables.tf#L276) | Configures this project as a Shared VPC service project (mutually exclusive with shared_vpc_host_config). | object({…}) | | null | -| [skip_delete](variables.tf#L286) | Allows the underlying resources to be destroyed without destroying the project itself. | bool | | false | -| [tag_bindings](variables.tf#L292) | Tag bindings for this project, in key => tag value id format. | map(string) | | null | +| [logging_sinks](variables.tf#L102) | Logging sinks to create for this project. | map(object({…})) | | {} | +| [metric_scopes](variables.tf#L135) | List of projects that will act as metric scopes for this project. | list(string) | | [] | +| [org_policies](variables.tf#L147) | Organization policies applied to this project keyed by policy name. | map(object({…})) | | {} | +| [org_policies_data_path](variables.tf#L187) | Path containing org policies in YAML format. | string | | null | +| [oslogin](variables.tf#L193) | Enable OS Login. | bool | | false | +| [oslogin_admins](variables.tf#L199) | List of IAM-style identities that will be granted roles necessary for OS Login administrators. | list(string) | | [] | +| [oslogin_users](variables.tf#L207) | List of IAM-style identities that will be granted roles necessary for OS Login users. | list(string) | | [] | +| [parent](variables.tf#L214) | Parent folder or organization in 'folders/folder_id' or 'organizations/org_id' format. | string | | null | +| [prefix](variables.tf#L224) | Optional prefix used to generate project id and name. | string | | null | +| [project_create](variables.tf#L234) | Create project. When set to false, uses a data source to reference existing project. | bool | | true | +| [service_config](variables.tf#L240) | Configure service API activation. | object({…}) | | {…} | +| [service_encryption_key_ids](variables.tf#L252) | Cloud KMS encryption key in {SERVICE => [KEY_URL]} format. | map(list(string)) | | {} | +| [service_perimeter_bridges](variables.tf#L259) | Name of VPC-SC Bridge perimeters to add project into. See comment in the variables file for format. | list(string) | | null | +| [service_perimeter_standard](variables.tf#L266) | Name of VPC-SC Standard perimeter to add project into. See comment in the variables file for format. | string | | null | +| [services](variables.tf#L272) | Service APIs to enable. | list(string) | | [] | +| [shared_vpc_host_config](variables.tf#L278) | Configures this project as a Shared VPC host project (mutually exclusive with shared_vpc_service_project). | object({…}) | | null | +| [shared_vpc_service_config](variables.tf#L287) | Configures this project as a Shared VPC service project (mutually exclusive with shared_vpc_host_config). | object({…}) | | null | +| [skip_delete](variables.tf#L297) | Allows the underlying resources to be destroyed without destroying the project itself. | bool | | false | +| [tag_bindings](variables.tf#L303) | Tag bindings for this project, in key => tag value id format. | map(string) | | null | ## Outputs diff --git a/modules/project/logging.tf b/modules/project/logging.tf index 04d7abf1..6293ac32 100644 --- a/modules/project/logging.tf +++ b/modules/project/logging.tf @@ -21,19 +21,27 @@ locals { for type in ["bigquery", "pubsub", "logging", "storage"] : type => { for name, sink in var.logging_sinks : - name => sink if sink.iam && sink.type == type + name => sink if sink.iam && sink.destination.type == type } } } resource "google_logging_project_sink" "sink" { - for_each = var.logging_sinks - name = each.key - #description = "${each.key} (Terraform-managed)." + for_each = var.logging_sinks + name = each.key + description = coalesce(each.value.description, "${each.key} (Terraform-managed).") project = local.project.project_id - destination = "${each.value.type}.googleapis.com/${each.value.destination}" + destination = "${each.value.destination.type}.googleapis.com/${each.value.destination.target}" filter = each.value.filter unique_writer_identity = each.value.unique_writer + disabled = each.value.disabled + + dynamic "bigquery_options" { + for_each = each.value.bigquery_use_partitioned_table != null ? [""] : [] + content { + use_partitioned_tables = each.value.bigquery_use_partitioned_table + } + } dynamic "exclusions" { for_each = each.value.exclusions @@ -52,34 +60,38 @@ resource "google_logging_project_sink" "sink" { resource "google_storage_bucket_iam_member" "gcs-sinks-binding" { for_each = local.sink_bindings["storage"] - bucket = each.value.destination + bucket = each.value.destination.target role = "roles/storage.objectCreator" member = google_logging_project_sink.sink[each.key].writer_identity } resource "google_bigquery_dataset_iam_member" "bq-sinks-binding" { for_each = local.sink_bindings["bigquery"] - project = split("/", each.value.destination)[1] - dataset_id = split("/", each.value.destination)[3] + project = split("/", each.value.destination.target)[1] + dataset_id = split("/", each.value.destination.target)[3] role = "roles/bigquery.dataEditor" member = google_logging_project_sink.sink[each.key].writer_identity } resource "google_pubsub_topic_iam_member" "pubsub-sinks-binding" { for_each = local.sink_bindings["pubsub"] - project = split("/", each.value.destination)[1] - topic = split("/", each.value.destination)[3] + project = split("/", each.value.destination.target)[1] + topic = split("/", each.value.destination.target)[3] role = "roles/pubsub.publisher" member = google_logging_project_sink.sink[each.key].writer_identity } resource "google_project_iam_member" "bucket-sinks-binding" { for_each = local.sink_bindings["logging"] - project = split("/", each.value.destination)[1] + project = split("/", each.value.destination.target)[1] role = "roles/logging.bucketWriter" member = google_logging_project_sink.sink[each.key].writer_identity - # TODO(jccb): use a condition to limit writer-identity only to this - # bucket + + condition { + title = "${each.key} bucket writer" + description = "Grants bucketWriter to ${google_logging_project_sink.sink[each.key].writer_identity} used by log sink ${each.key} on ${local.project.project_id}" + expression = "resource.name.endsWith('${each.value.destination.target}')" + } } resource "google_logging_project_exclusion" "logging-exclusion" { diff --git a/modules/project/variables.tf b/modules/project/variables.tf index be388d60..b3f8fa52 100644 --- a/modules/project/variables.tf +++ b/modules/project/variables.tf @@ -102,23 +102,34 @@ variable "logging_exclusions" { variable "logging_sinks" { description = "Logging sinks to create for this project." type = map(object({ - destination = string - type = string + bigquery_use_partitioned_table = optional(bool) + description = optional(string) + destination = object({ + type = string + target = string + }) + disabled = optional(bool, false) + exclusions = optional(map(string), {}) filter = string - iam = bool - unique_writer = bool - # TODO exclusions also support description and disabled - exclusions = map(string) + iam = optional(bool, true) + unique_writer = optional(bool) })) - validation { - condition = alltrue([ - for k, v in(var.logging_sinks == null ? {} : var.logging_sinks) : - contains(["bigquery", "logging", "pubsub", "storage"], v.type) - ]) - error_message = "Type must be one of 'bigquery', 'logging', 'pubsub', 'storage'." - } default = {} nullable = false + validation { + condition = alltrue([ + for k, v in var.logging_sinks : + contains(["bigquery", "logging", "pubsub", "storage"], v.destination.type) + ]) + error_message = "Destination type must be one of 'bigquery', 'logging', 'pubsub', 'storage'." + } + validation { + condition = alltrue([ + for k, v in var.logging_sinks : + v.bigquery_use_partitioned_table != true || v.destination.type == "bigquery" + ]) + error_message = "Can only set bigquery_use_partitioned_table when destination type is `bigquery`." + } } variable "metric_scopes" { diff --git a/modules/pubsub/outputs.tf b/modules/pubsub/outputs.tf index c26eb4d9..9a2be657 100644 --- a/modules/pubsub/outputs.tf +++ b/modules/pubsub/outputs.tf @@ -14,6 +14,17 @@ * limitations under the License. */ +output "as_logging_destination" { + description = "Parameters to use this topic as a log sink destination." + value = { + type = "pubsub" + target = google_pubsub_topic.default.id + } + depends_on = [ + google_pubsub_topic_iam_binding.default + ] +} + output "id" { description = "Topic id." value = google_pubsub_topic.default.id diff --git a/tests/modules/folder/fixture/test.logging-sinks.tfvars b/tests/modules/folder/fixture/test.logging-sinks.tfvars new file mode 100644 index 00000000..b7dc6687 --- /dev/null +++ b/tests/modules/folder/fixture/test.logging-sinks.tfvars @@ -0,0 +1,37 @@ +logging_sinks = { + warning = { + destination = { + type = "storage" + target = "mybucket" + } + filter = "severity=WARNING" + } + info = { + destination = { + type = "bigquery" + target = "projects/myproject/datasets/mydataset" + } + filter = "severity=INFO" + disabled = true + } + notice = { + destination = { + type = "pubsub" + target = "projects/myproject/topics/mytopic" + } + filter = "severity=NOTICE" + include_children = false + } + debug = { + destination = { + type = "logging" + target = "projects/myproject/locations/global/buckets/mybucket" + } + filter = "severity=DEBUG" + include_children = false + exclusions = { + no-compute = "logName:compute" + no-container = "logName:container" + } + } +} diff --git a/tests/modules/folder/test_plan_logging.py b/tests/modules/folder/test_plan_logging.py index 9f3da533..be627155 100644 --- a/tests/modules/folder/test_plan_logging.py +++ b/tests/modules/folder/test_plan_logging.py @@ -14,47 +14,13 @@ from collections import Counter +from icecream import ic + + def test_sinks(plan_runner): "Test folder-level sinks." - logging_sinks = """ { - warning = { - type = "storage" - destination = "mybucket" - filter = "severity=WARNING" - iam = true - include_children = true - exclusions = {} - } - info = { - type = "bigquery" - destination = "projects/myproject/datasets/mydataset" - filter = "severity=INFO" - iam = true - include_children = true - exclusions = {} - } - notice = { - type = "pubsub" - destination = "projects/myproject/topics/mytopic" - filter = "severity=NOTICE" - iam = true - include_children = false - exclusions = {} - } - debug = { - type = "logging" - destination = "projects/myproject/locations/global/buckets/mybucket" - filter = "severity=DEBUG" - iam = true - include_children = false - exclusions = { - no-compute = "logName:compute" - no-container = "logName:container" - } - } - } - """ - _, resources = plan_runner(logging_sinks=logging_sinks) + tfvars = 'test.logging-sinks.tfvars' + _, resources = plan_runner(tf_var_file=tfvars) assert len(resources) == 9 resource_types = Counter([r["type"] for r in resources]) @@ -74,65 +40,59 @@ def test_sinks(plan_runner): "notice", "warning", ] - values = [ - ( - r["index"], - r["values"]["filter"], - r["values"]["destination"], - r["values"]["include_children"], - ) - for r in sinks - ] + values = [( + r["index"], + r["values"]["filter"], + r["values"]["destination"], + r["values"]["description"], + r["values"]["include_children"], + r["values"]["disabled"], + ) for r in sinks] assert sorted(values) == [ - ( - "debug", - "severity=DEBUG", - "logging.googleapis.com/projects/myproject/locations/global/buckets/mybucket", - False, - ), - ( - "info", - "severity=INFO", - "bigquery.googleapis.com/projects/myproject/datasets/mydataset", - True, - ), - ( - "notice", - "severity=NOTICE", - "pubsub.googleapis.com/projects/myproject/topics/mytopic", - False, - ), - ("warning", "severity=WARNING", "storage.googleapis.com/mybucket", True), + ("debug", "severity=DEBUG", + "logging.googleapis.com/projects/myproject/locations/global/buckets/mybucket", + "debug (Terraform-managed).", False, False), + ("info", "severity=INFO", + "bigquery.googleapis.com/projects/myproject/datasets/mydataset", + "info (Terraform-managed).", True, True), + ("notice", "severity=NOTICE", + "pubsub.googleapis.com/projects/myproject/topics/mytopic", + "notice (Terraform-managed).", False, False), + ("warning", "severity=WARNING", "storage.googleapis.com/mybucket", + "warning (Terraform-managed).", True, False), ] bindings = [r for r in resources if "member" in r["type"]] - values = [(r["index"], r["type"], r["values"]["role"]) for r in bindings] + values = [(r["index"], r["type"], r["values"]["role"], + r["values"]["condition"]) for r in bindings] assert sorted(values) == [ - ("debug", "google_project_iam_member", "roles/logging.bucketWriter"), - ("info", "google_bigquery_dataset_iam_member", "roles/bigquery.dataEditor"), - ("notice", "google_pubsub_topic_iam_member", "roles/pubsub.publisher"), - ("warning", "google_storage_bucket_iam_member", "roles/storage.objectCreator"), + ("debug", "google_project_iam_member", "roles/logging.bucketWriter", [{ + 'expression': + "resource.name.endsWith('projects/myproject/locations/global/buckets/mybucket')", + 'title': + 'debug bucket writer' + }]), + ("info", "google_bigquery_dataset_iam_member", + "roles/bigquery.dataEditor", []), + ("notice", "google_pubsub_topic_iam_member", "roles/pubsub.publisher", + []), + ("warning", "google_storage_bucket_iam_member", + "roles/storage.objectCreator", []), ] exclusions = [(r["index"], r["values"]["exclusions"]) for r in sinks] assert sorted(exclusions) == [ - ( - "debug", - [ - { - "description": None, - "disabled": False, - "filter": "logName:compute", - "name": "no-compute", - }, - { - "description": None, - "disabled": False, - "filter": "logName:container", - "name": "no-container", - }, - ], - ), + ("debug", [{ + "description": None, + "disabled": False, + "filter": "logName:compute", + "name": "no-compute" + }, { + "description": None, + "disabled": False, + "filter": "logName:container", + "name": "no-container" + }]), ("info", []), ("notice", []), ("warning", []), @@ -141,12 +101,10 @@ def test_sinks(plan_runner): def test_exclusions(plan_runner): "Test folder-level logging exclusions." - logging_exclusions = ( - "{" - 'exclusion1 = "resource.type=gce_instance", ' - 'exclusion2 = "severity=NOTICE", ' - "}" - ) + logging_exclusions = ("{" + 'exclusion1 = "resource.type=gce_instance", ' + 'exclusion2 = "severity=NOTICE", ' + "}") _, resources = plan_runner(logging_exclusions=logging_exclusions) assert len(resources) == 3 exclusions = [ diff --git a/tests/modules/organization/fixture/test.logging-sinks.tfvars b/tests/modules/organization/fixture/test.logging-sinks.tfvars new file mode 100644 index 00000000..b7dc6687 --- /dev/null +++ b/tests/modules/organization/fixture/test.logging-sinks.tfvars @@ -0,0 +1,37 @@ +logging_sinks = { + warning = { + destination = { + type = "storage" + target = "mybucket" + } + filter = "severity=WARNING" + } + info = { + destination = { + type = "bigquery" + target = "projects/myproject/datasets/mydataset" + } + filter = "severity=INFO" + disabled = true + } + notice = { + destination = { + type = "pubsub" + target = "projects/myproject/topics/mytopic" + } + filter = "severity=NOTICE" + include_children = false + } + debug = { + destination = { + type = "logging" + target = "projects/myproject/locations/global/buckets/mybucket" + } + filter = "severity=DEBUG" + include_children = false + exclusions = { + no-compute = "logName:compute" + no-container = "logName:container" + } + } +} diff --git a/tests/modules/organization/test_plan_logging.py b/tests/modules/organization/test_plan_logging.py index 5d3b862f..287a5a48 100644 --- a/tests/modules/organization/test_plan_logging.py +++ b/tests/modules/organization/test_plan_logging.py @@ -17,49 +17,8 @@ from collections import Counter def test_sinks(plan_runner): "Test folder-level sinks." - logging_sinks = """ { - warning = { - type = "storage" - destination = "mybucket" - filter = "severity=WARNING" - iam = true - include_children = true - bq_partitioned_table = null - exclusions = {} - } - info = { - type = "bigquery" - destination = "projects/myproject/datasets/mydataset" - filter = "severity=INFO" - iam = true - include_children = true - bq_partitioned_table = false - exclusions = {} - } - notice = { - type = "pubsub" - destination = "projects/myproject/topics/mytopic" - filter = "severity=NOTICE" - iam = true - include_children = false - bq_partitioned_table = null - exclusions = {} - } - debug = { - type = "logging" - destination = "projects/myproject/locations/global/buckets/mybucket" - filter = "severity=DEBUG" - iam = true - include_children = false - bq_partitioned_table = null - exclusions = { - no-compute = "logName:compute" - no-container = "logName:container" - } - } - } - """ - _, resources = plan_runner(logging_sinks=logging_sinks) + tfvars = 'test.logging-sinks.tfvars' + _, resources = plan_runner(tf_var_file=tfvars) assert len(resources) == 8 resource_types = Counter([r["type"] for r in resources]) @@ -71,23 +30,21 @@ def test_sinks(plan_runner): "google_storage_bucket_iam_member": 1, } - sinks = [r for r in resources if r["type"] - == "google_logging_organization_sink"] + sinks = [ + r for r in resources if r["type"] == "google_logging_organization_sink" + ] assert sorted([r["index"] for r in sinks]) == [ "debug", "info", "notice", "warning", ] - values = [ - ( - r["index"], - r["values"]["filter"], - r["values"]["destination"], - r["values"]["include_children"], - ) - for r in sinks - ] + values = [( + r["index"], + r["values"]["filter"], + r["values"]["destination"], + r["values"]["include_children"], + ) for r in sinks] assert sorted(values) == [ ( "debug", @@ -114,9 +71,11 @@ def test_sinks(plan_runner): values = [(r["index"], r["type"], r["values"]["role"]) for r in bindings] assert sorted(values) == [ ("debug", "google_project_iam_member", "roles/logging.bucketWriter"), - ("info", "google_bigquery_dataset_iam_member", "roles/bigquery.dataEditor"), + ("info", "google_bigquery_dataset_iam_member", + "roles/bigquery.dataEditor"), ("notice", "google_pubsub_topic_iam_member", "roles/pubsub.publisher"), - ("warning", "google_storage_bucket_iam_member", "roles/storage.objectCreator"), + ("warning", "google_storage_bucket_iam_member", + "roles/storage.objectCreator"), ] exclusions = [(r["index"], r["values"]["exclusions"]) for r in sinks] @@ -146,16 +105,15 @@ def test_sinks(plan_runner): def test_exclusions(plan_runner): "Test folder-level logging exclusions." - logging_exclusions = ( - "{" - 'exclusion1 = "resource.type=gce_instance", ' - 'exclusion2 = "severity=NOTICE", ' - "}" - ) + logging_exclusions = ("{" + 'exclusion1 = "resource.type=gce_instance", ' + 'exclusion2 = "severity=NOTICE", ' + "}") _, resources = plan_runner(logging_exclusions=logging_exclusions) assert len(resources) == 2 exclusions = [ - r for r in resources if r["type"] == "google_logging_organization_exclusion" + r for r in resources + if r["type"] == "google_logging_organization_exclusion" ] assert sorted([r["index"] for r in exclusions]) == [ "exclusion1", diff --git a/tests/modules/project/fixture/test.logging-sinks.tfvars b/tests/modules/project/fixture/test.logging-sinks.tfvars new file mode 100644 index 00000000..35f99170 --- /dev/null +++ b/tests/modules/project/fixture/test.logging-sinks.tfvars @@ -0,0 +1,37 @@ +logging_sinks = { + warning = { + destination = { + type = "storage" + target = "mybucket" + } + filter = "severity=WARNING" + } + info = { + destination = { + type = "bigquery" + target = "projects/myproject/datasets/mydataset" + } + filter = "severity=INFO" + disabled = true + } + notice = { + destination = { + type = "pubsub" + target = "projects/myproject/topics/mytopic" + } + filter = "severity=NOTICE" + unique_writer = true + } + debug = { + destination = { + type = "logging" + target = "projects/myproject/locations/global/buckets/mybucket" + } + filter = "severity=DEBUG" + exclusions = { + no-compute = "logName:compute" + no-container = "logName:container" + } + unique_writer = true + } +} diff --git a/tests/modules/project/fixture/variables.tf b/tests/modules/project/fixture/variables.tf index 93843396..4c3474f0 100644 --- a/tests/modules/project/fixture/variables.tf +++ b/tests/modules/project/fixture/variables.tf @@ -110,14 +110,7 @@ variable "services" { } variable "logging_sinks" { - type = map(object({ - destination = string - type = string - filter = string - iam = bool - exclusions = map(string) - unique_writer = bool - })) + type = any default = {} } diff --git a/tests/modules/project/test_plan_logging.py b/tests/modules/project/test_plan_logging.py index 1ad3e64a..59c9179b 100644 --- a/tests/modules/project/test_plan_logging.py +++ b/tests/modules/project/test_plan_logging.py @@ -17,45 +17,8 @@ from collections import Counter def test_sinks(plan_runner): "Test folder-level sinks." - logging_sinks = """ { - warning = { - type = "storage" - destination = "mybucket" - filter = "severity=WARNING" - iam = true - exclusions = {} - unique_writer = false - } - info = { - type = "bigquery" - destination = "projects/myproject/datasets/mydataset" - filter = "severity=INFO" - iam = true - exclusions = {} - unique_writer = false - } - notice = { - type = "pubsub" - destination = "projects/myproject/topics/mytopic" - filter = "severity=NOTICE" - iam = true - exclusions = {} - unique_writer = false - } - debug = { - type = "logging" - destination = "projects/myproject/locations/global/buckets/mybucket" - filter = "severity=DEBUG" - iam = true - exclusions = { - no-compute = "logName:compute" - no-container = "logName:container" - } - unique_writer = true - } - } - """ - _, resources = plan_runner(logging_sinks=logging_sinks) + tfvars = 'test.logging-sinks.tfvars' + _, resources = plan_runner(tf_var_file=tfvars) assert len(resources) == 12 resource_types = Counter([r["type"] for r in resources]) @@ -77,15 +40,12 @@ def test_sinks(plan_runner): "notice", "warning", ] - values = [ - ( - r["index"], - r["values"]["filter"], - r["values"]["destination"], - r["values"]["unique_writer_identity"], - ) - for r in sinks - ] + values = [( + r["index"], + r["values"]["filter"], + r["values"]["destination"], + r["values"]["unique_writer_identity"], + ) for r in sinks] assert sorted(values) == [ ( "debug", @@ -103,7 +63,7 @@ def test_sinks(plan_runner): "notice", "severity=NOTICE", "pubsub.googleapis.com/projects/myproject/topics/mytopic", - False, + True, ), ("warning", "severity=WARNING", "storage.googleapis.com/mybucket", False), ] @@ -112,9 +72,11 @@ def test_sinks(plan_runner): values = [(r["index"], r["type"], r["values"]["role"]) for r in bindings] assert sorted(values) == [ ("debug", "google_project_iam_member", "roles/logging.bucketWriter"), - ("info", "google_bigquery_dataset_iam_member", "roles/bigquery.dataEditor"), + ("info", "google_bigquery_dataset_iam_member", + "roles/bigquery.dataEditor"), ("notice", "google_pubsub_topic_iam_member", "roles/pubsub.publisher"), - ("warning", "google_storage_bucket_iam_member", "roles/storage.objectCreator"), + ("warning", "google_storage_bucket_iam_member", + "roles/storage.objectCreator"), ] exclusions = [(r["index"], r["values"]["exclusions"]) for r in sinks] @@ -144,12 +106,10 @@ def test_sinks(plan_runner): def test_exclusions(plan_runner): "Test folder-level logging exclusions." - logging_exclusions = ( - "{" - 'exclusion1 = "resource.type=gce_instance", ' - 'exclusion2 = "severity=NOTICE", ' - "}" - ) + logging_exclusions = ("{" + 'exclusion1 = "resource.type=gce_instance", ' + 'exclusion2 = "severity=NOTICE", ' + "}") _, resources = plan_runner(logging_exclusions=logging_exclusions) assert len(resources) == 6 exclusions = [ From c7fe6da12e59d30ede80c6b7c14d526240aed516 Mon Sep 17 00:00:00 2001 From: Julio Castillo Date: Fri, 11 Nov 2022 19:24:18 +0100 Subject: [PATCH 2/7] Update READMEs --- modules/bigquery-dataset/README.md | 17 +++++++++-------- modules/gcs/README.md | 12 ++++++------ modules/logging-bucket/README.md | 3 ++- modules/pubsub/README.md | 9 +++++---- tests/modules/folder/test_plan_logging.py | 2 -- 5 files changed, 22 insertions(+), 21 deletions(-) diff --git a/modules/bigquery-dataset/README.md b/modules/bigquery-dataset/README.md index 29acba39..ebca51e2 100644 --- a/modules/bigquery-dataset/README.md +++ b/modules/bigquery-dataset/README.md @@ -196,13 +196,14 @@ module "bigquery-dataset" { | name | description | sensitive | |---|---|:---:| -| [dataset](outputs.tf#L17) | Dataset resource. | | -| [dataset_id](outputs.tf#L22) | Dataset id. | | -| [id](outputs.tf#L34) | Fully qualified dataset id. | | -| [self_link](outputs.tf#L46) | Dataset self link. | | -| [table_ids](outputs.tf#L58) | Map of fully qualified table ids keyed by table ids. | | -| [tables](outputs.tf#L63) | Table resources. | | -| [view_ids](outputs.tf#L68) | Map of fully qualified view ids keyed by view ids. | | -| [views](outputs.tf#L73) | View resources. | | +| [as_logging_destination](outputs.tf#L17) | Parameters to use this dataset as a log sink destination. | | +| [dataset](outputs.tf#L32) | Dataset resource. | | +| [dataset_id](outputs.tf#L37) | Dataset id. | | +| [id](outputs.tf#L49) | Fully qualified dataset id. | | +| [self_link](outputs.tf#L61) | Dataset self link. | | +| [table_ids](outputs.tf#L73) | Map of fully qualified table ids keyed by table ids. | | +| [tables](outputs.tf#L78) | Table resources. | | +| [view_ids](outputs.tf#L83) | Map of fully qualified view ids keyed by view ids. | | +| [views](outputs.tf#L88) | View resources. | | diff --git a/modules/gcs/README.md b/modules/gcs/README.md index 7e6cc22f..11b4b6ba 100644 --- a/modules/gcs/README.md +++ b/modules/gcs/README.md @@ -134,11 +134,11 @@ module "bucket-gcs-notification" { | name | description | sensitive | |---|---|:---:| -| [bucket](outputs.tf#L17) | Bucket resource. | | -| [id](outputs.tf#L28) | Bucket ID (same as name). | | -| [name](outputs.tf#L37) | Bucket name. | | -| [notification](outputs.tf#L46) | GCS Notification self link. | | -| [topic](outputs.tf#L51) | Topic ID used by GCS. | | -| [url](outputs.tf#L56) | Bucket URL. | | +| [as_logging_destination](outputs.tf#L17) | Parameters to use this bucket as a log sink destination. | | +| [bucket](outputs.tf#L29) | Bucket resource. | | +| [name](outputs.tf#L34) | Bucket name. | | +| [notification](outputs.tf#L43) | GCS Notification self link. | | +| [topic](outputs.tf#L48) | Topic ID used by GCS. | | +| [url](outputs.tf#L53) | Bucket URL. | | diff --git a/modules/logging-bucket/README.md b/modules/logging-bucket/README.md index 7af82ccb..5328b943 100644 --- a/modules/logging-bucket/README.md +++ b/modules/logging-bucket/README.md @@ -56,6 +56,7 @@ module "bucket-default" { | name | description | sensitive | |---|---|:---:| -| [id](outputs.tf#L17) | ID of the created bucket. | | +| [as_logging_destination](outputs.tf#L17) | Parameters to use this bucket as a log sink destination. | | +| [id](outputs.tf#L30) | ID of the created bucket. | | diff --git a/modules/pubsub/README.md b/modules/pubsub/README.md index b75aaf6d..a8339110 100644 --- a/modules/pubsub/README.md +++ b/modules/pubsub/README.md @@ -111,9 +111,10 @@ module "pubsub" { | name | description | sensitive | |---|---|:---:| -| [id](outputs.tf#L17) | Topic id. | | -| [subscription_id](outputs.tf#L25) | Subscription ids. | | -| [subscriptions](outputs.tf#L35) | Subscription resources. | | -| [topic](outputs.tf#L43) | Topic resource. | | +| [as_logging_destination](outputs.tf#L17) | Parameters to use this topic as a log sink destination. | | +| [id](outputs.tf#L28) | Topic id. | | +| [subscription_id](outputs.tf#L36) | Subscription ids. | | +| [subscriptions](outputs.tf#L46) | Subscription resources. | | +| [topic](outputs.tf#L54) | Topic resource. | | diff --git a/tests/modules/folder/test_plan_logging.py b/tests/modules/folder/test_plan_logging.py index be627155..6b305d0b 100644 --- a/tests/modules/folder/test_plan_logging.py +++ b/tests/modules/folder/test_plan_logging.py @@ -14,8 +14,6 @@ from collections import Counter -from icecream import ic - def test_sinks(plan_runner): "Test folder-level sinks." From 8fe19ad7c2232957d84d22ba00d0338b79428d3d Mon Sep 17 00:00:00 2001 From: Julio Castillo Date: Sat, 12 Nov 2022 11:30:34 +0100 Subject: [PATCH 3/7] Rename bigquery_use_partitioned_table --- fast/stages/00-bootstrap/organization.tf | 6 +++--- modules/folder/README.md | 2 +- modules/folder/logging.tf | 4 ++-- modules/folder/variables.tf | 6 +++--- modules/organization/README.md | 18 +++++++++--------- modules/organization/logging.tf | 4 ++-- modules/organization/variables.tf | 8 ++++---- modules/project/README.md | 2 +- modules/project/logging.tf | 4 ++-- modules/project/variables.tf | 8 ++++---- 10 files changed, 31 insertions(+), 31 deletions(-) diff --git a/fast/stages/00-bootstrap/organization.tf b/fast/stages/00-bootstrap/organization.tf index 7a12ea32..297e0f86 100644 --- a/fast/stages/00-bootstrap/organization.tf +++ b/fast/stages/00-bootstrap/organization.tf @@ -192,9 +192,9 @@ module "organization" { } logging_sinks = { for name, attrs in var.log_sinks : name => { - destination = local.log_sink_destinations[name].as_logging_destination - filter = attrs.filter - bigquery_use_partitioned_table = attrs.type == "bigquery" + bq_partitioned_table = attrs.type == "bigquery" + destination = local.log_sink_destinations[name].as_logging_destination + filter = attrs.filter } } } diff --git a/modules/folder/README.md b/modules/folder/README.md index 4e5e752f..c71cdacc 100644 --- a/modules/folder/README.md +++ b/modules/folder/README.md @@ -302,7 +302,7 @@ module "folder" { | [iam_additive_members](variables.tf#L85) | IAM additive bindings in {MEMBERS => [ROLE]} format. This might break if members are dynamic values. | map(list(string)) | | {} | | [id](variables.tf#L92) | Folder ID in case you use folder_create=false. | string | | null | | [logging_exclusions](variables.tf#L98) | Logging exclusions for this folder in the form {NAME -> FILTER}. | map(string) | | {} | -| [logging_sinks](variables.tf#L105) | Logging sinks to create for this folder. | map(object({…})) | | {} | +| [logging_sinks](variables.tf#L105) | Logging sinks to create for this folder. | map(object({…})) | | {} | | [name](variables.tf#L137) | Folder name. | string | | null | | [org_policies](variables.tf#L143) | Organization policies applied to this folder keyed by policy name. | map(object({…})) | | {} | | [org_policies_data_path](variables.tf#L183) | Path containing org policies in YAML format. | string | | null | diff --git a/modules/folder/logging.tf b/modules/folder/logging.tf index 517b74e1..e733dfbc 100644 --- a/modules/folder/logging.tf +++ b/modules/folder/logging.tf @@ -38,9 +38,9 @@ resource "google_logging_folder_sink" "sink" { disabled = each.value.disabled dynamic "bigquery_options" { - for_each = each.value.bigquery_use_partitioned_table != null ? [""] : [] + for_each = each.value.bq_partitioned_table != null ? [""] : [] content { - use_partitioned_tables = each.value.bigquery_use_partitioned_table + use_partitioned_tables = each.value.bq_partitioned_table } } diff --git a/modules/folder/variables.tf b/modules/folder/variables.tf index b5824884..03e2d7e7 100644 --- a/modules/folder/variables.tf +++ b/modules/folder/variables.tf @@ -105,7 +105,7 @@ variable "logging_exclusions" { variable "logging_sinks" { description = "Logging sinks to create for this folder." type = map(object({ - bigquery_use_partitioned_table = optional(bool) + bq_partitioned_table = optional(bool) description = optional(string) destination = object({ type = string @@ -128,9 +128,9 @@ variable "logging_sinks" { validation { condition = alltrue([ for k, v in var.logging_sinks : - v.bigquery_use_partitioned_table != true || v.destination.type == "bigquery" + v. != true || v.destination.type == "bigquery" ]) - error_message = "Can only set bigquery_use_partitioned_table when destination type is `bigquery`." + error_message = "Can only set when destination type is `bigquery`." } } diff --git a/modules/organization/README.md b/modules/organization/README.md index 3adbff3d..b99f9a29 100644 --- a/modules/organization/README.md +++ b/modules/organization/README.md @@ -311,22 +311,22 @@ module "org" { logging_sinks = { warnings = { - destination = module.gcs.as_logging_destination - filter = "severity=WARNING" + destination = module.gcs.as_logging_destination + filter = "severity=WARNING" } info = { destination = module.dataset.as_logging_destination filter = "severity=INFO" - bigquery_use_partitioned_table = true + bq_partitioned_table = true } notice = { - destination = module.pubsub.as_logging_destination - filter = "severity=NOTICE" + destination = module.pubsub.as_logging_destination + filter = "severity=NOTICE" } debug = { - destination = module.bucket.as_logging_destination - filter = "severity=DEBUG" - exclusions = { + destination = module.bucket.as_logging_destination + filter = "severity=DEBUG" + exclusions = { no-compute = "logName:compute" } } @@ -425,7 +425,7 @@ module "org" { | [iam_audit_config_authoritative](variables.tf#L105) | IAM Authoritative service audit logging configuration. Service as key, map of log permission (eg DATA_READ) and excluded members as value for each service. Audit config should also be authoritative when using authoritative bindings. Use with caution. | map(map(list(string))) | | null | | [iam_bindings_authoritative](variables.tf#L116) | IAM authoritative bindings, in {ROLE => [MEMBERS]} format. Roles and members not explicitly listed will be cleared. Bindings should also be authoritative when using authoritative audit config. Use with caution. | map(list(string)) | | null | | [logging_exclusions](variables.tf#L122) | Logging exclusions for this organization in the form {NAME -> FILTER}. | map(string) | | {} | -| [logging_sinks](variables.tf#L129) | Logging sinks to create for the organization. | map(object({…})) | | {} | +| [logging_sinks](variables.tf#L129) | Logging sinks to create for the organization. | map(object({…})) | | {} | | [org_policies](variables.tf#L161) | Organization policies applied to this organization keyed by policy name. | map(object({…})) | | {} | | [org_policies_data_path](variables.tf#L201) | Path containing org policies in YAML format. | string | | null | | [org_policy_custom_constraints](variables.tf#L207) | Organization policiy custom constraints keyed by constraint name. | map(object({…})) | | {} | diff --git a/modules/organization/logging.tf b/modules/organization/logging.tf index 57f2da1f..042228d4 100644 --- a/modules/organization/logging.tf +++ b/modules/organization/logging.tf @@ -37,9 +37,9 @@ resource "google_logging_organization_sink" "sink" { disabled = each.value.disabled dynamic "bigquery_options" { - for_each = each.value.bigquery_use_partitioned_table != null ? [""] : [] + for_each = each.value.bq_partitioned_table != null ? [""] : [] content { - use_partitioned_tables = each.value.bigquery_use_partitioned_table + use_partitioned_tables = each.value.bq_partitioned_table } } diff --git a/modules/organization/variables.tf b/modules/organization/variables.tf index 3fcbefea..636bba74 100644 --- a/modules/organization/variables.tf +++ b/modules/organization/variables.tf @@ -129,8 +129,8 @@ variable "logging_exclusions" { variable "logging_sinks" { description = "Logging sinks to create for the organization." type = map(object({ - bigquery_use_partitioned_table = optional(bool) - description = optional(string) + bq_partitioned_table = optional(bool) + description = optional(string) destination = object({ type = string target = string @@ -152,9 +152,9 @@ variable "logging_sinks" { validation { condition = alltrue([ for k, v in var.logging_sinks : - v.bigquery_use_partitioned_table != true || v.destination.type == "bigquery" + v.bq_partitioned_table != true || v.destination.type == "bigquery" ]) - error_message = "Can only set bigquery_use_partitioned_table when destination type is `bigquery`." + error_message = "Can only set bq_partitioned_table when destination type is `bigquery`." } } diff --git a/modules/project/README.md b/modules/project/README.md index 3ee8e312..55798a54 100644 --- a/modules/project/README.md +++ b/modules/project/README.md @@ -483,7 +483,7 @@ output "compute_robot" { | [labels](variables.tf#L82) | Resource labels. | map(string) | | {} | | [lien_reason](variables.tf#L89) | If non-empty, creates a project lien with this description. | string | | "" | | [logging_exclusions](variables.tf#L95) | Logging exclusions for this project in the form {NAME -> FILTER}. | map(string) | | {} | -| [logging_sinks](variables.tf#L102) | Logging sinks to create for this project. | map(object({…})) | | {} | +| [logging_sinks](variables.tf#L102) | Logging sinks to create for this project. | map(object({…})) | | {} | | [metric_scopes](variables.tf#L135) | List of projects that will act as metric scopes for this project. | list(string) | | [] | | [org_policies](variables.tf#L147) | Organization policies applied to this project keyed by policy name. | map(object({…})) | | {} | | [org_policies_data_path](variables.tf#L187) | Path containing org policies in YAML format. | string | | null | diff --git a/modules/project/logging.tf b/modules/project/logging.tf index 6293ac32..1e5948f7 100644 --- a/modules/project/logging.tf +++ b/modules/project/logging.tf @@ -37,9 +37,9 @@ resource "google_logging_project_sink" "sink" { disabled = each.value.disabled dynamic "bigquery_options" { - for_each = each.value.bigquery_use_partitioned_table != null ? [""] : [] + for_each = each.value.bq_partitioned_table != null ? [""] : [] content { - use_partitioned_tables = each.value.bigquery_use_partitioned_table + use_partitioned_tables = each.value.bq_partitioned_table } } diff --git a/modules/project/variables.tf b/modules/project/variables.tf index b3f8fa52..e31ee84c 100644 --- a/modules/project/variables.tf +++ b/modules/project/variables.tf @@ -102,8 +102,8 @@ variable "logging_exclusions" { variable "logging_sinks" { description = "Logging sinks to create for this project." type = map(object({ - bigquery_use_partitioned_table = optional(bool) - description = optional(string) + bq_partitioned_table = optional(bool) + description = optional(string) destination = object({ type = string target = string @@ -126,9 +126,9 @@ variable "logging_sinks" { validation { condition = alltrue([ for k, v in var.logging_sinks : - v.bigquery_use_partitioned_table != true || v.destination.type == "bigquery" + v.bq_partitioned_table != true || v.destination.type == "bigquery" ]) - error_message = "Can only set bigquery_use_partitioned_table when destination type is `bigquery`." + error_message = "Can only set bq_partitioned_table when destination type is `bigquery`." } } From daf0fef7cd4512e8777eea857c35653f1c54588e Mon Sep 17 00:00:00 2001 From: Julio Castillo Date: Sat, 12 Nov 2022 12:07:48 +0100 Subject: [PATCH 4/7] Fix folder variables --- modules/folder/variables.tf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/folder/variables.tf b/modules/folder/variables.tf index 03e2d7e7..44bee5da 100644 --- a/modules/folder/variables.tf +++ b/modules/folder/variables.tf @@ -106,7 +106,7 @@ variable "logging_sinks" { description = "Logging sinks to create for this folder." type = map(object({ bq_partitioned_table = optional(bool) - description = optional(string) + description = optional(string) destination = object({ type = string target = string @@ -128,7 +128,7 @@ variable "logging_sinks" { validation { condition = alltrue([ for k, v in var.logging_sinks : - v. != true || v.destination.type == "bigquery" + v.bq_partitioned_table != true || v.destination.type == "bigquery" ]) error_message = "Can only set when destination type is `bigquery`." } From b37ef3a90a5dc99732fec9ac58737bf0ce962bf2 Mon Sep 17 00:00:00 2001 From: Julio Castillo Date: Sat, 12 Nov 2022 12:22:09 +0100 Subject: [PATCH 5/7] Update folder readme --- modules/folder/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/folder/README.md b/modules/folder/README.md index c71cdacc..c8dbcda0 100644 --- a/modules/folder/README.md +++ b/modules/folder/README.md @@ -302,7 +302,7 @@ module "folder" { | [iam_additive_members](variables.tf#L85) | IAM additive bindings in {MEMBERS => [ROLE]} format. This might break if members are dynamic values. | map(list(string)) | | {} | | [id](variables.tf#L92) | Folder ID in case you use folder_create=false. | string | | null | | [logging_exclusions](variables.tf#L98) | Logging exclusions for this folder in the form {NAME -> FILTER}. | map(string) | | {} | -| [logging_sinks](variables.tf#L105) | Logging sinks to create for this folder. | map(object({…})) | | {} | +| [logging_sinks](variables.tf#L105) | Logging sinks to create for this folder. | map(object({…})) | | {} | | [name](variables.tf#L137) | Folder name. | string | | null | | [org_policies](variables.tf#L143) | Organization policies applied to this folder keyed by policy name. | map(object({…})) | | {} | | [org_policies_data_path](variables.tf#L183) | Path containing org policies in YAML format. | string | | null | From c83a7de0763064bcac79c9063f731b9d04b46a67 Mon Sep 17 00:00:00 2001 From: Julio Castillo Date: Sat, 12 Nov 2022 19:24:41 +0100 Subject: [PATCH 6/7] Remove as_logging_destination --- fast/stages/00-bootstrap/organization.tf | 3 +- modules/bigquery-dataset/outputs.tf | 15 ----- modules/folder/README.md | 24 ++++---- modules/folder/logging.tf | 18 +++--- modules/folder/variables.tf | 24 ++++---- modules/gcs/outputs.tf | 25 +++++---- modules/logging-bucket/outputs.tf | 13 ----- modules/organization/README.md | 30 +++++----- modules/organization/logging.tf | 18 +++--- modules/organization/variables.tf | 22 ++++---- modules/project/README.md | 55 ++++++++++--------- modules/project/logging.tf | 18 +++--- modules/project/variables.tf | 24 ++++---- modules/pubsub/outputs.tf | 11 ---- .../folder/fixture/test.logging-sinks.tfvars | 30 ++++------ .../fixture/test.logging-sinks.tfvars | 30 ++++------ .../project/fixture/test.logging-sinks.tfvars | 32 ++++------- 17 files changed, 170 insertions(+), 222 deletions(-) diff --git a/fast/stages/00-bootstrap/organization.tf b/fast/stages/00-bootstrap/organization.tf index 297e0f86..0700d564 100644 --- a/fast/stages/00-bootstrap/organization.tf +++ b/fast/stages/00-bootstrap/organization.tf @@ -193,8 +193,9 @@ module "organization" { logging_sinks = { for name, attrs in var.log_sinks : name => { bq_partitioned_table = attrs.type == "bigquery" - destination = local.log_sink_destinations[name].as_logging_destination + destination = local.log_sink_destinations[name].id filter = attrs.filter + type = attrs.type } } } diff --git a/modules/bigquery-dataset/outputs.tf b/modules/bigquery-dataset/outputs.tf index 11d7f3bb..dd2da22c 100644 --- a/modules/bigquery-dataset/outputs.tf +++ b/modules/bigquery-dataset/outputs.tf @@ -14,21 +14,6 @@ * limitations under the License. */ -output "as_logging_destination" { - description = "Parameters to use this dataset as a log sink destination." - value = { - type = "bigquery" - target = google_bigquery_dataset.default.id - } - depends_on = [ - google_bigquery_dataset_access.domain, - google_bigquery_dataset_access.group_by_email, - google_bigquery_dataset_access.special_group, - google_bigquery_dataset_access.user_by_email, - google_bigquery_dataset_access.views - ] -} - output "dataset" { description = "Dataset resource." value = google_bigquery_dataset.default diff --git a/modules/folder/README.md b/modules/folder/README.md index c8dbcda0..1943882d 100644 --- a/modules/folder/README.md +++ b/modules/folder/README.md @@ -173,23 +173,27 @@ module "folder-sink" { name = "my-folder" logging_sinks = { warnings = { - destination = module.gcs.as_logging_destination + destination = module.gcs.id filter = "severity=WARNING" + type = "storage" } info = { - destination = module.dataset.as_logging_destination + destination = module.dataset.id filter = "severity=INFO" + type = "bigquery" } notice = { - destination = module.pubsub.as_logging_destination + destination = module.pubsub.id filter = "severity=NOTICE" + type = "pubsub" } debug = { - destination = module.bucket.as_logging_destination + destination = module.bucket.id filter = "severity=DEBUG" exclusions = { no-compute = "logName:compute" } + type = "logging" } } logging_exclusions = { @@ -302,12 +306,12 @@ module "folder" { | [iam_additive_members](variables.tf#L85) | IAM additive bindings in {MEMBERS => [ROLE]} format. This might break if members are dynamic values. | map(list(string)) | | {} | | [id](variables.tf#L92) | Folder ID in case you use folder_create=false. | string | | null | | [logging_exclusions](variables.tf#L98) | Logging exclusions for this folder in the form {NAME -> FILTER}. | map(string) | | {} | -| [logging_sinks](variables.tf#L105) | Logging sinks to create for this folder. | map(object({…})) | | {} | -| [name](variables.tf#L137) | Folder name. | string | | null | -| [org_policies](variables.tf#L143) | Organization policies applied to this folder keyed by policy name. | map(object({…})) | | {} | -| [org_policies_data_path](variables.tf#L183) | Path containing org policies in YAML format. | string | | null | -| [parent](variables.tf#L189) | Parent in folders/folder_id or organizations/org_id format. | string | | null | -| [tag_bindings](variables.tf#L199) | Tag bindings for this folder, in key => tag value id format. | map(string) | | null | +| [logging_sinks](variables.tf#L105) | Logging sinks to create for the organization. | map(object({…})) | | {} | +| [name](variables.tf#L135) | Folder name. | string | | null | +| [org_policies](variables.tf#L141) | Organization policies applied to this folder keyed by policy name. | map(object({…})) | | {} | +| [org_policies_data_path](variables.tf#L181) | Path containing org policies in YAML format. | string | | null | +| [parent](variables.tf#L187) | Parent in folders/folder_id or organizations/org_id format. | string | | null | +| [tag_bindings](variables.tf#L197) | Tag bindings for this folder, in key => tag value id format. | map(string) | | null | ## Outputs diff --git a/modules/folder/logging.tf b/modules/folder/logging.tf index e733dfbc..6351194a 100644 --- a/modules/folder/logging.tf +++ b/modules/folder/logging.tf @@ -22,7 +22,7 @@ locals { type => { for name, sink in var.logging_sinks : name => sink - if sink.destination.type == type + if sink.type == type } } } @@ -32,7 +32,7 @@ resource "google_logging_folder_sink" "sink" { name = each.key description = coalesce(each.value.description, "${each.key} (Terraform-managed).") folder = local.folder.name - destination = "${each.value.destination.type}.googleapis.com/${each.value.destination.target}" + destination = "${each.value.type}.googleapis.com/${each.value.destination}" filter = each.value.filter include_children = each.value.include_children disabled = each.value.disabled @@ -60,37 +60,37 @@ resource "google_logging_folder_sink" "sink" { resource "google_storage_bucket_iam_member" "gcs-sinks-binding" { for_each = local.sink_bindings["storage"] - bucket = each.value.destination.target + bucket = each.value.destination role = "roles/storage.objectCreator" member = google_logging_folder_sink.sink[each.key].writer_identity } resource "google_bigquery_dataset_iam_member" "bq-sinks-binding" { for_each = local.sink_bindings["bigquery"] - project = split("/", each.value.destination.target)[1] - dataset_id = split("/", each.value.destination.target)[3] + project = split("/", each.value.destination)[1] + dataset_id = split("/", each.value.destination)[3] role = "roles/bigquery.dataEditor" member = google_logging_folder_sink.sink[each.key].writer_identity } resource "google_pubsub_topic_iam_member" "pubsub-sinks-binding" { for_each = local.sink_bindings["pubsub"] - project = split("/", each.value.destination.target)[1] - topic = split("/", each.value.destination.target)[3] + project = split("/", each.value.destination)[1] + topic = split("/", each.value.destination)[3] role = "roles/pubsub.publisher" member = google_logging_folder_sink.sink[each.key].writer_identity } resource "google_project_iam_member" "bucket-sinks-binding" { for_each = local.sink_bindings["logging"] - project = split("/", each.value.destination.target)[1] + project = split("/", each.value.destination)[1] role = "roles/logging.bucketWriter" member = google_logging_folder_sink.sink[each.key].writer_identity condition { title = "${each.key} bucket writer" description = "Grants bucketWriter to ${google_logging_folder_sink.sink[each.key].writer_identity} used by log sink ${each.key} on ${local.folder.id}" - expression = "resource.name.endsWith('${each.value.destination.target}')" + expression = "resource.name.endsWith('${each.value.destination}')" } } diff --git a/modules/folder/variables.tf b/modules/folder/variables.tf index 44bee5da..a93ea1aa 100644 --- a/modules/folder/variables.tf +++ b/modules/folder/variables.tf @@ -103,34 +103,32 @@ variable "logging_exclusions" { } variable "logging_sinks" { - description = "Logging sinks to create for this folder." + description = "Logging sinks to create for the organization." type = map(object({ bq_partitioned_table = optional(bool) description = optional(string) - destination = object({ - type = string - target = string - }) - disabled = optional(bool, false) - exclusions = optional(map(string), {}) - filter = string - include_children = optional(bool, true) + destination = string + disabled = optional(bool, false) + exclusions = optional(map(string), {}) + filter = string + include_children = optional(bool, true) + type = string })) default = {} nullable = false validation { condition = alltrue([ for k, v in var.logging_sinks : - contains(["bigquery", "logging", "pubsub", "storage"], v.destination.type) + contains(["bigquery", "logging", "pubsub", "storage"], v.type) ]) - error_message = "Destination type must be one of 'bigquery', 'logging', 'pubsub', 'storage'." + error_message = "Type must be one of 'bigquery', 'logging', 'pubsub', 'storage'." } validation { condition = alltrue([ for k, v in var.logging_sinks : - v.bq_partitioned_table != true || v.destination.type == "bigquery" + v.bq_partitioned_table != true || v.type == "bigquery" ]) - error_message = "Can only set when destination type is `bigquery`." + error_message = "Can only set bq_partitioned_table when type is `bigquery`." } } diff --git a/modules/gcs/outputs.tf b/modules/gcs/outputs.tf index a92118da..a00c04cf 100644 --- a/modules/gcs/outputs.tf +++ b/modules/gcs/outputs.tf @@ -14,23 +14,26 @@ * limitations under the License. */ -output "as_logging_destination" { - description = "Parameters to use this bucket as a log sink destination." - value = { - type = "storage" - target = "${local.prefix}${lower(var.name)}" - } +output "bucket" { + description = "Bucket resource." + value = google_storage_bucket.bucket +} + +# We add `id` as an alias to `name` to simplify log sink handling. +# Since all other log destinations (pubsub, logging-bucket, bigquery) +# have an id output, it is convenient to have in this module too to +# handle all log destination as homogeneous objects (i.e. you can +# assume any valid log destination has an `id` output). + +output "id" { + description = "Bucket ID (same as name)." + value = "${local.prefix}${lower(var.name)}" depends_on = [ google_storage_bucket.bucket, google_storage_bucket_iam_binding.bindings ] } -output "bucket" { - description = "Bucket resource." - value = google_storage_bucket.bucket -} - output "name" { description = "Bucket name." value = "${local.prefix}${lower(var.name)}" diff --git a/modules/logging-bucket/outputs.tf b/modules/logging-bucket/outputs.tf index 945c56f9..7100237e 100644 --- a/modules/logging-bucket/outputs.tf +++ b/modules/logging-bucket/outputs.tf @@ -14,19 +14,6 @@ * limitations under the License. */ -output "as_logging_destination" { - description = "Parameters to use this bucket as a log sink destination." - value = { - type = "logging" - target = try( - google_logging_project_bucket_config.bucket.0.id, - google_logging_folder_bucket_config.bucket.0.id, - google_logging_organization_bucket_config.bucket.0.id, - google_logging_billing_account_bucket_config.bucket.0.id, - ) - } -} - output "id" { description = "ID of the created bucket." value = try( diff --git a/modules/organization/README.md b/modules/organization/README.md index b99f9a29..d81611ee 100644 --- a/modules/organization/README.md +++ b/modules/organization/README.md @@ -311,24 +311,28 @@ module "org" { logging_sinks = { warnings = { - destination = module.gcs.as_logging_destination + destination = module.gcs.id filter = "severity=WARNING" + type = "storage" } info = { - destination = module.dataset.as_logging_destination - filter = "severity=INFO" bq_partitioned_table = true + destination = module.dataset.id + filter = "severity=INFO" + type = "bigquery" } notice = { - destination = module.pubsub.as_logging_destination + destination = module.pubsub.id filter = "severity=NOTICE" + type = "pubsub" } debug = { - destination = module.bucket.as_logging_destination + destination = module.bucket.id filter = "severity=DEBUG" exclusions = { no-compute = "logName:compute" } + type = "logging" } } logging_exclusions = { @@ -411,7 +415,7 @@ module "org" { | name | description | type | required | default | |---|---|:---:|:---:|:---:| -| [organization_id](variables.tf#L227) | Organization id in organizations/nnnnnn format. | string | ✓ | | +| [organization_id](variables.tf#L225) | Organization id in organizations/nnnnnn format. | string | ✓ | | | [contacts](variables.tf#L17) | List of essential contacts for this resource. Must be in the form EMAIL -> [NOTIFICATION_TYPES]. Valid notification types are ALL, SUSPENSION, SECURITY, TECHNICAL, BILLING, LEGAL, PRODUCT_UPDATES. | map(list(string)) | | {} | | [custom_roles](variables.tf#L24) | Map of role name => list of permissions to create in this project. | map(list(string)) | | {} | | [firewall_policies](variables.tf#L31) | Hierarchical firewall policy rules created in the organization. | map(map(object({…}))) | | {} | @@ -425,13 +429,13 @@ module "org" { | [iam_audit_config_authoritative](variables.tf#L105) | IAM Authoritative service audit logging configuration. Service as key, map of log permission (eg DATA_READ) and excluded members as value for each service. Audit config should also be authoritative when using authoritative bindings. Use with caution. | map(map(list(string))) | | null | | [iam_bindings_authoritative](variables.tf#L116) | IAM authoritative bindings, in {ROLE => [MEMBERS]} format. Roles and members not explicitly listed will be cleared. Bindings should also be authoritative when using authoritative audit config. Use with caution. | map(list(string)) | | null | | [logging_exclusions](variables.tf#L122) | Logging exclusions for this organization in the form {NAME -> FILTER}. | map(string) | | {} | -| [logging_sinks](variables.tf#L129) | Logging sinks to create for the organization. | map(object({…})) | | {} | -| [org_policies](variables.tf#L161) | Organization policies applied to this organization keyed by policy name. | map(object({…})) | | {} | -| [org_policies_data_path](variables.tf#L201) | Path containing org policies in YAML format. | string | | null | -| [org_policy_custom_constraints](variables.tf#L207) | Organization policiy custom constraints keyed by constraint name. | map(object({…})) | | {} | -| [org_policy_custom_constraints_data_path](variables.tf#L221) | Path containing org policy custom constraints in YAML format. | string | | null | -| [tag_bindings](variables.tf#L237) | Tag bindings for this organization, in key => tag value id format. | map(string) | | null | -| [tags](variables.tf#L243) | Tags by key name. The `iam` attribute behaves like the similarly named one at module level. | map(object({…})) | | null | +| [logging_sinks](variables.tf#L129) | Logging sinks to create for the organization. | map(object({…})) | | {} | +| [org_policies](variables.tf#L159) | Organization policies applied to this organization keyed by policy name. | map(object({…})) | | {} | +| [org_policies_data_path](variables.tf#L199) | Path containing org policies in YAML format. | string | | null | +| [org_policy_custom_constraints](variables.tf#L205) | Organization policiy custom constraints keyed by constraint name. | map(object({…})) | | {} | +| [org_policy_custom_constraints_data_path](variables.tf#L219) | Path containing org policy custom constraints in YAML format. | string | | null | +| [tag_bindings](variables.tf#L235) | Tag bindings for this organization, in key => tag value id format. | map(string) | | null | +| [tags](variables.tf#L241) | Tags by key name. The `iam` attribute behaves like the similarly named one at module level. | map(object({…})) | | null | ## Outputs diff --git a/modules/organization/logging.tf b/modules/organization/logging.tf index 042228d4..a4f90ef5 100644 --- a/modules/organization/logging.tf +++ b/modules/organization/logging.tf @@ -21,7 +21,7 @@ locals { for type in ["bigquery", "logging", "pubsub", "storage"] : type => { for name, sink in var.logging_sinks : - name => sink if sink.destination.type == type + name => sink if sink.type == type } } } @@ -31,7 +31,7 @@ resource "google_logging_organization_sink" "sink" { name = each.key description = coalesce(each.value.description, "${each.key} (Terraform-managed).") org_id = local.organization_id_numeric - destination = "${each.value.destination.type}.googleapis.com/${each.value.destination.target}" + destination = "${each.value.type}.googleapis.com/${each.value.destination}" filter = each.value.filter include_children = each.value.include_children disabled = each.value.disabled @@ -61,37 +61,37 @@ resource "google_logging_organization_sink" "sink" { resource "google_storage_bucket_iam_member" "storage-sinks-binding" { for_each = local.sink_bindings["storage"] - bucket = each.value.destination.target + bucket = each.value.destination role = "roles/storage.objectCreator" member = google_logging_organization_sink.sink[each.key].writer_identity } resource "google_bigquery_dataset_iam_member" "bq-sinks-binding" { for_each = local.sink_bindings["bigquery"] - project = split("/", each.value.destination.target)[1] - dataset_id = split("/", each.value.destination.target)[3] + project = split("/", each.value.destination)[1] + dataset_id = split("/", each.value.destination)[3] role = "roles/bigquery.dataEditor" member = google_logging_organization_sink.sink[each.key].writer_identity } resource "google_pubsub_topic_iam_member" "pubsub-sinks-binding" { for_each = local.sink_bindings["pubsub"] - project = split("/", each.value.destination.target)[1] - topic = split("/", each.value.destination.target)[3] + project = split("/", each.value.destination)[1] + topic = split("/", each.value.destination)[3] role = "roles/pubsub.publisher" member = google_logging_organization_sink.sink[each.key].writer_identity } resource "google_project_iam_member" "bucket-sinks-binding" { for_each = local.sink_bindings["logging"] - project = split("/", each.value.destination.target)[1] + project = split("/", each.value.destination)[1] role = "roles/logging.bucketWriter" member = google_logging_organization_sink.sink[each.key].writer_identity condition { title = "${each.key} bucket writer" description = "Grants bucketWriter to ${google_logging_organization_sink.sink[each.key].writer_identity} used by log sink ${each.key} on ${var.organization_id}" - expression = "resource.name.endsWith('${each.value.destination.target}')" + expression = "resource.name.endsWith('${each.value.destination}')" } } diff --git a/modules/organization/variables.tf b/modules/organization/variables.tf index 636bba74..f8a949f5 100644 --- a/modules/organization/variables.tf +++ b/modules/organization/variables.tf @@ -131,30 +131,28 @@ variable "logging_sinks" { type = map(object({ bq_partitioned_table = optional(bool) description = optional(string) - destination = object({ - type = string - target = string - }) - disabled = optional(bool, false) - exclusions = optional(map(string), {}) - filter = string - include_children = optional(bool, true) + destination = string + disabled = optional(bool, false) + exclusions = optional(map(string), {}) + filter = string + include_children = optional(bool, true) + type = string })) default = {} nullable = false validation { condition = alltrue([ for k, v in var.logging_sinks : - contains(["bigquery", "logging", "pubsub", "storage"], v.destination.type) + contains(["bigquery", "logging", "pubsub", "storage"], v.type) ]) - error_message = "Destination type must be one of 'bigquery', 'logging', 'pubsub', 'storage'." + error_message = "Type must be one of 'bigquery', 'logging', 'pubsub', 'storage'." } validation { condition = alltrue([ for k, v in var.logging_sinks : - v.bq_partitioned_table != true || v.destination.type == "bigquery" + v.bq_partitioned_table != true || v.type == "bigquery" ]) - error_message = "Can only set bq_partitioned_table when destination type is `bigquery`." + error_message = "Can only set bq_partitioned_table when type is `bigquery`." } } diff --git a/modules/project/README.md b/modules/project/README.md index 55798a54..03ea00a1 100644 --- a/modules/project/README.md +++ b/modules/project/README.md @@ -312,23 +312,27 @@ module "project-host" { parent = "folders/1234567890" logging_sinks = { warnings = { - destination = module.gcs.as_logging_destination + destination = module.gcs.id filter = "severity=WARNING" + type = "storage" } info = { - destination = module.dataset.as_logging_destination + destination = module.dataset.id filter = "severity=INFO" + type = "bigquery" } notice = { - destination = module.pubsub.as_logging_destination + destination = module.pubsub.id filter = "severity=NOTICE" + type = "pubsub" } debug = { - destination = module.bucket.as_logging_destination + destination = module.bucket.id filter = "severity=DEBUG" exclusions = { no-compute = "logName:compute" } + type = "logging" } } logging_exclusions = { @@ -357,9 +361,10 @@ module "project-host" { parent = "folders/1234567890" logging_sinks = { warnings = { - destination = module.gcs.as_logging_destination + destination = module.gcs.id filter = "severity=WARNING" unique_writer = true + type = "storage" } } } @@ -469,7 +474,7 @@ output "compute_robot" { | name | description | type | required | default | |---|---|:---:|:---:|:---:| -| [name](variables.tf#L142) | Project name and id suffix. | string | ✓ | | +| [name](variables.tf#L140) | Project name and id suffix. | string | ✓ | | | [auto_create_network](variables.tf#L17) | Whether to create the default network for the project. | bool | | false | | [billing_account](variables.tf#L23) | Billing account id. | string | | null | | [contacts](variables.tf#L29) | List of essential contacts for this resource. Must be in the form EMAIL -> [NOTIFICATION_TYPES]. Valid notification types are ALL, SUSPENSION, SECURITY, TECHNICAL, BILLING, LEGAL, PRODUCT_UPDATES. | map(list(string)) | | {} | @@ -483,25 +488,25 @@ output "compute_robot" { | [labels](variables.tf#L82) | Resource labels. | map(string) | | {} | | [lien_reason](variables.tf#L89) | If non-empty, creates a project lien with this description. | string | | "" | | [logging_exclusions](variables.tf#L95) | Logging exclusions for this project in the form {NAME -> FILTER}. | map(string) | | {} | -| [logging_sinks](variables.tf#L102) | Logging sinks to create for this project. | map(object({…})) | | {} | -| [metric_scopes](variables.tf#L135) | List of projects that will act as metric scopes for this project. | list(string) | | [] | -| [org_policies](variables.tf#L147) | Organization policies applied to this project keyed by policy name. | map(object({…})) | | {} | -| [org_policies_data_path](variables.tf#L187) | Path containing org policies in YAML format. | string | | null | -| [oslogin](variables.tf#L193) | Enable OS Login. | bool | | false | -| [oslogin_admins](variables.tf#L199) | List of IAM-style identities that will be granted roles necessary for OS Login administrators. | list(string) | | [] | -| [oslogin_users](variables.tf#L207) | List of IAM-style identities that will be granted roles necessary for OS Login users. | list(string) | | [] | -| [parent](variables.tf#L214) | Parent folder or organization in 'folders/folder_id' or 'organizations/org_id' format. | string | | null | -| [prefix](variables.tf#L224) | Optional prefix used to generate project id and name. | string | | null | -| [project_create](variables.tf#L234) | Create project. When set to false, uses a data source to reference existing project. | bool | | true | -| [service_config](variables.tf#L240) | Configure service API activation. | object({…}) | | {…} | -| [service_encryption_key_ids](variables.tf#L252) | Cloud KMS encryption key in {SERVICE => [KEY_URL]} format. | map(list(string)) | | {} | -| [service_perimeter_bridges](variables.tf#L259) | Name of VPC-SC Bridge perimeters to add project into. See comment in the variables file for format. | list(string) | | null | -| [service_perimeter_standard](variables.tf#L266) | Name of VPC-SC Standard perimeter to add project into. See comment in the variables file for format. | string | | null | -| [services](variables.tf#L272) | Service APIs to enable. | list(string) | | [] | -| [shared_vpc_host_config](variables.tf#L278) | Configures this project as a Shared VPC host project (mutually exclusive with shared_vpc_service_project). | object({…}) | | null | -| [shared_vpc_service_config](variables.tf#L287) | Configures this project as a Shared VPC service project (mutually exclusive with shared_vpc_host_config). | object({…}) | | null | -| [skip_delete](variables.tf#L297) | Allows the underlying resources to be destroyed without destroying the project itself. | bool | | false | -| [tag_bindings](variables.tf#L303) | Tag bindings for this project, in key => tag value id format. | map(string) | | null | +| [logging_sinks](variables.tf#L102) | Logging sinks to create for this project. | map(object({…})) | | {} | +| [metric_scopes](variables.tf#L133) | List of projects that will act as metric scopes for this project. | list(string) | | [] | +| [org_policies](variables.tf#L145) | Organization policies applied to this project keyed by policy name. | map(object({…})) | | {} | +| [org_policies_data_path](variables.tf#L185) | Path containing org policies in YAML format. | string | | null | +| [oslogin](variables.tf#L191) | Enable OS Login. | bool | | false | +| [oslogin_admins](variables.tf#L197) | List of IAM-style identities that will be granted roles necessary for OS Login administrators. | list(string) | | [] | +| [oslogin_users](variables.tf#L205) | List of IAM-style identities that will be granted roles necessary for OS Login users. | list(string) | | [] | +| [parent](variables.tf#L212) | Parent folder or organization in 'folders/folder_id' or 'organizations/org_id' format. | string | | null | +| [prefix](variables.tf#L222) | Optional prefix used to generate project id and name. | string | | null | +| [project_create](variables.tf#L232) | Create project. When set to false, uses a data source to reference existing project. | bool | | true | +| [service_config](variables.tf#L238) | Configure service API activation. | object({…}) | | {…} | +| [service_encryption_key_ids](variables.tf#L250) | Cloud KMS encryption key in {SERVICE => [KEY_URL]} format. | map(list(string)) | | {} | +| [service_perimeter_bridges](variables.tf#L257) | Name of VPC-SC Bridge perimeters to add project into. See comment in the variables file for format. | list(string) | | null | +| [service_perimeter_standard](variables.tf#L264) | Name of VPC-SC Standard perimeter to add project into. See comment in the variables file for format. | string | | null | +| [services](variables.tf#L270) | Service APIs to enable. | list(string) | | [] | +| [shared_vpc_host_config](variables.tf#L276) | Configures this project as a Shared VPC host project (mutually exclusive with shared_vpc_service_project). | object({…}) | | null | +| [shared_vpc_service_config](variables.tf#L285) | Configures this project as a Shared VPC service project (mutually exclusive with shared_vpc_host_config). | object({…}) | | null | +| [skip_delete](variables.tf#L295) | Allows the underlying resources to be destroyed without destroying the project itself. | bool | | false | +| [tag_bindings](variables.tf#L301) | Tag bindings for this project, in key => tag value id format. | map(string) | | null | ## Outputs diff --git a/modules/project/logging.tf b/modules/project/logging.tf index 1e5948f7..bc1b1e8b 100644 --- a/modules/project/logging.tf +++ b/modules/project/logging.tf @@ -21,7 +21,7 @@ locals { for type in ["bigquery", "pubsub", "logging", "storage"] : type => { for name, sink in var.logging_sinks : - name => sink if sink.iam && sink.destination.type == type + name => sink if sink.iam && sink.type == type } } } @@ -31,7 +31,7 @@ resource "google_logging_project_sink" "sink" { name = each.key description = coalesce(each.value.description, "${each.key} (Terraform-managed).") project = local.project.project_id - destination = "${each.value.destination.type}.googleapis.com/${each.value.destination.target}" + destination = "${each.value.type}.googleapis.com/${each.value.destination}" filter = each.value.filter unique_writer_identity = each.value.unique_writer disabled = each.value.disabled @@ -60,37 +60,37 @@ resource "google_logging_project_sink" "sink" { resource "google_storage_bucket_iam_member" "gcs-sinks-binding" { for_each = local.sink_bindings["storage"] - bucket = each.value.destination.target + bucket = each.value.destination role = "roles/storage.objectCreator" member = google_logging_project_sink.sink[each.key].writer_identity } resource "google_bigquery_dataset_iam_member" "bq-sinks-binding" { for_each = local.sink_bindings["bigquery"] - project = split("/", each.value.destination.target)[1] - dataset_id = split("/", each.value.destination.target)[3] + project = split("/", each.value.destination)[1] + dataset_id = split("/", each.value.destination)[3] role = "roles/bigquery.dataEditor" member = google_logging_project_sink.sink[each.key].writer_identity } resource "google_pubsub_topic_iam_member" "pubsub-sinks-binding" { for_each = local.sink_bindings["pubsub"] - project = split("/", each.value.destination.target)[1] - topic = split("/", each.value.destination.target)[3] + project = split("/", each.value.destination)[1] + topic = split("/", each.value.destination)[3] role = "roles/pubsub.publisher" member = google_logging_project_sink.sink[each.key].writer_identity } resource "google_project_iam_member" "bucket-sinks-binding" { for_each = local.sink_bindings["logging"] - project = split("/", each.value.destination.target)[1] + project = split("/", each.value.destination)[1] role = "roles/logging.bucketWriter" member = google_logging_project_sink.sink[each.key].writer_identity condition { title = "${each.key} bucket writer" description = "Grants bucketWriter to ${google_logging_project_sink.sink[each.key].writer_identity} used by log sink ${each.key} on ${local.project.project_id}" - expression = "resource.name.endsWith('${each.value.destination.target}')" + expression = "resource.name.endsWith('${each.value.destination}')" } } diff --git a/modules/project/variables.tf b/modules/project/variables.tf index e31ee84c..3769a1fb 100644 --- a/modules/project/variables.tf +++ b/modules/project/variables.tf @@ -104,31 +104,29 @@ variable "logging_sinks" { type = map(object({ bq_partitioned_table = optional(bool) description = optional(string) - destination = object({ - type = string - target = string - }) - disabled = optional(bool, false) - exclusions = optional(map(string), {}) - filter = string - iam = optional(bool, true) - unique_writer = optional(bool) + destination = string + disabled = optional(bool, false) + exclusions = optional(map(string), {}) + filter = string + iam = optional(bool, true) + type = string + unique_writer = optional(bool) })) default = {} nullable = false validation { condition = alltrue([ for k, v in var.logging_sinks : - contains(["bigquery", "logging", "pubsub", "storage"], v.destination.type) + contains(["bigquery", "logging", "pubsub", "storage"], v.type) ]) - error_message = "Destination type must be one of 'bigquery', 'logging', 'pubsub', 'storage'." + error_message = "Type must be one of 'bigquery', 'logging', 'pubsub', 'storage'." } validation { condition = alltrue([ for k, v in var.logging_sinks : - v.bq_partitioned_table != true || v.destination.type == "bigquery" + v.bq_partitioned_table != true || v.type == "bigquery" ]) - error_message = "Can only set bq_partitioned_table when destination type is `bigquery`." + error_message = "Can only set bq_partitioned_table when type is `bigquery`." } } diff --git a/modules/pubsub/outputs.tf b/modules/pubsub/outputs.tf index 9a2be657..c26eb4d9 100644 --- a/modules/pubsub/outputs.tf +++ b/modules/pubsub/outputs.tf @@ -14,17 +14,6 @@ * limitations under the License. */ -output "as_logging_destination" { - description = "Parameters to use this topic as a log sink destination." - value = { - type = "pubsub" - target = google_pubsub_topic.default.id - } - depends_on = [ - google_pubsub_topic_iam_binding.default - ] -} - output "id" { description = "Topic id." value = google_pubsub_topic.default.id diff --git a/tests/modules/folder/fixture/test.logging-sinks.tfvars b/tests/modules/folder/fixture/test.logging-sinks.tfvars index b7dc6687..95a272e1 100644 --- a/tests/modules/folder/fixture/test.logging-sinks.tfvars +++ b/tests/modules/folder/fixture/test.logging-sinks.tfvars @@ -1,32 +1,24 @@ logging_sinks = { warning = { - destination = { - type = "storage" - target = "mybucket" - } - filter = "severity=WARNING" + destination = "mybucket" + type = "storage" + filter = "severity=WARNING" } info = { - destination = { - type = "bigquery" - target = "projects/myproject/datasets/mydataset" - } - filter = "severity=INFO" - disabled = true + destination = "projects/myproject/datasets/mydataset" + type = "bigquery" + filter = "severity=INFO" + disabled = true } notice = { - destination = { - type = "pubsub" - target = "projects/myproject/topics/mytopic" - } + destination = "projects/myproject/topics/mytopic" + type = "pubsub" filter = "severity=NOTICE" include_children = false } debug = { - destination = { - type = "logging" - target = "projects/myproject/locations/global/buckets/mybucket" - } + destination = "projects/myproject/locations/global/buckets/mybucket" + type = "logging" filter = "severity=DEBUG" include_children = false exclusions = { diff --git a/tests/modules/organization/fixture/test.logging-sinks.tfvars b/tests/modules/organization/fixture/test.logging-sinks.tfvars index b7dc6687..95a272e1 100644 --- a/tests/modules/organization/fixture/test.logging-sinks.tfvars +++ b/tests/modules/organization/fixture/test.logging-sinks.tfvars @@ -1,32 +1,24 @@ logging_sinks = { warning = { - destination = { - type = "storage" - target = "mybucket" - } - filter = "severity=WARNING" + destination = "mybucket" + type = "storage" + filter = "severity=WARNING" } info = { - destination = { - type = "bigquery" - target = "projects/myproject/datasets/mydataset" - } - filter = "severity=INFO" - disabled = true + destination = "projects/myproject/datasets/mydataset" + type = "bigquery" + filter = "severity=INFO" + disabled = true } notice = { - destination = { - type = "pubsub" - target = "projects/myproject/topics/mytopic" - } + destination = "projects/myproject/topics/mytopic" + type = "pubsub" filter = "severity=NOTICE" include_children = false } debug = { - destination = { - type = "logging" - target = "projects/myproject/locations/global/buckets/mybucket" - } + destination = "projects/myproject/locations/global/buckets/mybucket" + type = "logging" filter = "severity=DEBUG" include_children = false exclusions = { diff --git a/tests/modules/project/fixture/test.logging-sinks.tfvars b/tests/modules/project/fixture/test.logging-sinks.tfvars index 35f99170..5c79cfb5 100644 --- a/tests/modules/project/fixture/test.logging-sinks.tfvars +++ b/tests/modules/project/fixture/test.logging-sinks.tfvars @@ -1,33 +1,25 @@ logging_sinks = { warning = { - destination = { - type = "storage" - target = "mybucket" - } - filter = "severity=WARNING" + destination = "mybucket" + type = "storage" + filter = "severity=WARNING" } info = { - destination = { - type = "bigquery" - target = "projects/myproject/datasets/mydataset" - } - filter = "severity=INFO" - disabled = true + destination = "projects/myproject/datasets/mydataset" + type = "bigquery" + filter = "severity=INFO" + disabled = true } notice = { - destination = { - type = "pubsub" - target = "projects/myproject/topics/mytopic" - } + destination = "projects/myproject/topics/mytopic" + type = "pubsub" filter = "severity=NOTICE" unique_writer = true } debug = { - destination = { - type = "logging" - target = "projects/myproject/locations/global/buckets/mybucket" - } - filter = "severity=DEBUG" + destination = "projects/myproject/locations/global/buckets/mybucket" + type = "logging" + filter = "severity=DEBUG" exclusions = { no-compute = "logName:compute" no-container = "logName:container" From 4a25129765f4cfa49a503713aa7cff7547984b7e Mon Sep 17 00:00:00 2001 From: Julio Castillo Date: Sat, 12 Nov 2022 19:25:41 +0100 Subject: [PATCH 7/7] Update readme --- modules/bigquery-dataset/README.md | 17 ++++++++--------- modules/gcs/README.md | 12 ++++++------ modules/logging-bucket/README.md | 3 +-- modules/pubsub/README.md | 9 ++++----- 4 files changed, 19 insertions(+), 22 deletions(-) diff --git a/modules/bigquery-dataset/README.md b/modules/bigquery-dataset/README.md index ebca51e2..29acba39 100644 --- a/modules/bigquery-dataset/README.md +++ b/modules/bigquery-dataset/README.md @@ -196,14 +196,13 @@ module "bigquery-dataset" { | name | description | sensitive | |---|---|:---:| -| [as_logging_destination](outputs.tf#L17) | Parameters to use this dataset as a log sink destination. | | -| [dataset](outputs.tf#L32) | Dataset resource. | | -| [dataset_id](outputs.tf#L37) | Dataset id. | | -| [id](outputs.tf#L49) | Fully qualified dataset id. | | -| [self_link](outputs.tf#L61) | Dataset self link. | | -| [table_ids](outputs.tf#L73) | Map of fully qualified table ids keyed by table ids. | | -| [tables](outputs.tf#L78) | Table resources. | | -| [view_ids](outputs.tf#L83) | Map of fully qualified view ids keyed by view ids. | | -| [views](outputs.tf#L88) | View resources. | | +| [dataset](outputs.tf#L17) | Dataset resource. | | +| [dataset_id](outputs.tf#L22) | Dataset id. | | +| [id](outputs.tf#L34) | Fully qualified dataset id. | | +| [self_link](outputs.tf#L46) | Dataset self link. | | +| [table_ids](outputs.tf#L58) | Map of fully qualified table ids keyed by table ids. | | +| [tables](outputs.tf#L63) | Table resources. | | +| [view_ids](outputs.tf#L68) | Map of fully qualified view ids keyed by view ids. | | +| [views](outputs.tf#L73) | View resources. | | diff --git a/modules/gcs/README.md b/modules/gcs/README.md index 11b4b6ba..7e6cc22f 100644 --- a/modules/gcs/README.md +++ b/modules/gcs/README.md @@ -134,11 +134,11 @@ module "bucket-gcs-notification" { | name | description | sensitive | |---|---|:---:| -| [as_logging_destination](outputs.tf#L17) | Parameters to use this bucket as a log sink destination. | | -| [bucket](outputs.tf#L29) | Bucket resource. | | -| [name](outputs.tf#L34) | Bucket name. | | -| [notification](outputs.tf#L43) | GCS Notification self link. | | -| [topic](outputs.tf#L48) | Topic ID used by GCS. | | -| [url](outputs.tf#L53) | Bucket URL. | | +| [bucket](outputs.tf#L17) | Bucket resource. | | +| [id](outputs.tf#L28) | Bucket ID (same as name). | | +| [name](outputs.tf#L37) | Bucket name. | | +| [notification](outputs.tf#L46) | GCS Notification self link. | | +| [topic](outputs.tf#L51) | Topic ID used by GCS. | | +| [url](outputs.tf#L56) | Bucket URL. | | diff --git a/modules/logging-bucket/README.md b/modules/logging-bucket/README.md index 5328b943..7af82ccb 100644 --- a/modules/logging-bucket/README.md +++ b/modules/logging-bucket/README.md @@ -56,7 +56,6 @@ module "bucket-default" { | name | description | sensitive | |---|---|:---:| -| [as_logging_destination](outputs.tf#L17) | Parameters to use this bucket as a log sink destination. | | -| [id](outputs.tf#L30) | ID of the created bucket. | | +| [id](outputs.tf#L17) | ID of the created bucket. | | diff --git a/modules/pubsub/README.md b/modules/pubsub/README.md index a8339110..b75aaf6d 100644 --- a/modules/pubsub/README.md +++ b/modules/pubsub/README.md @@ -111,10 +111,9 @@ module "pubsub" { | name | description | sensitive | |---|---|:---:| -| [as_logging_destination](outputs.tf#L17) | Parameters to use this topic as a log sink destination. | | -| [id](outputs.tf#L28) | Topic id. | | -| [subscription_id](outputs.tf#L36) | Subscription ids. | | -| [subscriptions](outputs.tf#L46) | Subscription resources. | | -| [topic](outputs.tf#L54) | Topic resource. | | +| [id](outputs.tf#L17) | Topic id. | | +| [subscription_id](outputs.tf#L25) | Subscription ids. | | +| [subscriptions](outputs.tf#L35) | Subscription resources. | | +| [topic](outputs.tf#L43) | Topic resource. | |