Update environments e2e example with new sinks

This commit is contained in:
Julio Castillo 2020-12-05 13:24:44 +01:00
parent 1af70c748c
commit d4201c740d
9 changed files with 30 additions and 243 deletions

View File

@ -36,4 +36,13 @@ locals {
||
substr(var.root_node, 0, 13) == "organizations"
)
logging_sinks = {
audit-logs = {
type = "bigquery"
destination = module.audit-dataset.id
filter = var.audit_filter
iam = true
}
}
root_node_type = split("/", var.root_node)[0]
}

View File

@ -105,8 +105,7 @@ module "audit-project" {
prefix = var.prefix
billing_account = var.billing_account_id
iam = {
"roles/bigquery.dataEditor" = [module.audit-log-sinks.writer_identities[0]]
"roles/viewer" = var.iam_audit_viewers
"roles/viewer" = var.iam_audit_viewers
}
services = concat(var.project_services, [
"bigquery.googleapis.com",
@ -128,17 +127,22 @@ module "audit-dataset" {
}
}
module "audit-log-sinks" {
source = "../../modules/logging-sinks"
parent = var.root_node
destinations = {
audit-logs = "bigquery.googleapis.com/${module.audit-dataset.id}"
}
sinks = {
audit-logs = var.audit_filter
}
module "root_org" {
count = local.root_node_type == "organizations" ? 1 : 0
source = "../../modules/organization"
organization_id = var.root_node
logging_sinks = local.logging_sinks
}
module "root_folder" {
count = local.root_node_type == "folders" ? 1 : 0
source = "../../modules/folder"
id = var.root_node
folder_create = false
logging_sinks = local.logging_sinks
}
###############################################################################
# Shared resources (GCR, GCS, KMS, etc.) #
###############################################################################

View File

@ -1,26 +0,0 @@
# Terraform Logging Sinks Module
This module allows easy creation of one or more logging sinks.
## Example
<!-- BEGIN TFDOC -->
## Variables
| name | description | type | required | default |
|---|---|:---: |:---:|:---:|
| destinations | Map of destinations by sink name. | <code title="map&#40;string&#41;">map(string)</code> | ✓ | |
| parent | Resource where the sink will be created, eg 'organizations/nnnnnnnn'. | <code title="">string</code> | ✓ | |
| sinks | Map of sink name / sink filter. | <code title="map&#40;string&#41;">map(string)</code> | ✓ | |
| *default_options* | Default options used for sinks where no specific options are set. | <code title="object&#40;&#123;&#10;bigquery_partitioned_tables &#61; bool&#10;include_children &#61; bool&#10;unique_writer_identity &#61; bool&#10;&#125;&#41;">object({...})</code> | | <code title="&#123;&#10;bigquery_partitioned_tables &#61; true&#10;include_children &#61; true&#10;unique_writer_identity &#61; false&#10;&#125;">...</code> |
| *sink_options* | Optional map of sink name / sink options. If no options are specified for a sink defaults will be used. | <code title="map&#40;object&#40;&#123;&#10;bigquery_partitioned_tables &#61; bool&#10;include_children &#61; bool&#10;unique_writer_identity &#61; bool&#10;&#125;&#41;&#41;">map(object({...}))</code> | | <code title="">{}</code> |
## Outputs
| name | description | sensitive |
|---|---|:---:|
| names | Log sink names. | |
| sinks | Log sink resources. | |
| writer_identities | Log sink writer identities. | |
<!-- END TFDOC -->

View File

@ -1,97 +0,0 @@
/**
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
locals {
bigquery_destinations = {
for name, destination in var.destinations :
name => substr(destination, 0, 8) == "bigquery"
}
resource_type = element(split("/", var.parent), 0)
resource_id = element(split("/", var.parent), 1)
sink_options = {
for name, _ in var.sinks :
name => lookup(var.sink_options, name, var.default_options)
}
sink_resources = concat(
[for _, sink in google_logging_organization_sink.sinks : sink],
[for _, sink in google_logging_billing_account_sink.sinks : sink],
[for _, sink in google_logging_folder_sink.sinks : sink],
[for _, sink in google_logging_project_sink.sinks : sink],
)
}
resource "google_logging_organization_sink" "sinks" {
for_each = local.resource_type == "organizations" ? var.sinks : {}
name = each.key
org_id = local.resource_id
filter = each.value
destination = var.destinations[each.key]
include_children = local.sink_options[each.key].include_children
dynamic bigquery_options {
for_each = local.bigquery_destinations[each.key] ? ["1"] : []
iterator = config
content {
use_partitioned_tables = local.sink_options[each.key].bigquery_partitioned_tables
}
}
}
resource "google_logging_billing_account_sink" "sinks" {
for_each = local.resource_type == "billing_accounts" ? var.sinks : {}
name = each.key
billing_account = local.resource_id
filter = each.value
destination = var.destinations[each.key]
dynamic bigquery_options {
for_each = local.bigquery_destinations[each.key] ? ["1"] : []
iterator = config
content {
use_partitioned_tables = local.sink_options[each.key].bigquery_partitioned_tables
}
}
}
resource "google_logging_folder_sink" "sinks" {
for_each = local.resource_type == "folders" ? var.sinks : {}
name = each.key
folder = var.parent
filter = each.value
destination = var.destinations[each.key]
include_children = local.sink_options[each.key].include_children
dynamic bigquery_options {
for_each = local.bigquery_destinations[each.key] ? ["1"] : []
iterator = config
content {
use_partitioned_tables = local.sink_options[each.key].bigquery_partitioned_tables
}
}
}
resource "google_logging_project_sink" "sinks" {
for_each = local.resource_type == "projects" ? var.sinks : {}
name = each.key
project = local.resource_id
filter = each.value
destination = var.destinations[each.key]
unique_writer_identity = local.sink_options[each.key].unique_writer_identity
dynamic bigquery_options {
for_each = local.bigquery_destinations[each.key] ? ["1"] : []
iterator = config
content {
use_partitioned_tables = local.sink_options[each.key].bigquery_partitioned_tables
}
}
}

View File

@ -1,30 +0,0 @@
/**
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
output "sinks" {
description = "Log sink resources."
value = local.sink_resources
}
output "names" {
description = "Log sink names."
value = [for sink in local.sink_resources : sink.name]
}
output "writer_identities" {
description = "Log sink writer identities."
value = [for sink in local.sink_resources : sink.writer_identity]
}

View File

@ -1,54 +0,0 @@
/**
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
variable "default_options" {
description = "Default options used for sinks where no specific options are set."
type = object({
bigquery_partitioned_tables = bool
include_children = bool
unique_writer_identity = bool
})
default = {
bigquery_partitioned_tables = true
include_children = true
unique_writer_identity = false
}
}
variable "destinations" {
description = "Map of destinations by sink name."
type = map(string)
}
variable "parent" {
description = "Resource where the sink will be created, eg 'organizations/nnnnnnnn'."
type = string
}
variable "sink_options" {
description = "Optional map of sink name / sink options. If no options are specified for a sink defaults will be used."
type = map(object({
bigquery_partitioned_tables = bool
include_children = bool
unique_writer_identity = bool
}))
default = {}
}
variable "sinks" {
description = "Map of sink name / sink filter."
type = map(string)
}

View File

@ -1,19 +0,0 @@
/**
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
terraform {
required_version = ">= 0.12.6"
}

View File

@ -26,12 +26,12 @@ BASEDIR = os.path.dirname(os.path.dirname(__file__))
def _plan_runner():
"Returns a function to run Terraform plan on a fixture."
def run_plan(fixture_path, targets=None, **tf_vars):
def run_plan(fixture_path, targets=None, refresh=True, **tf_vars):
"Runs Terraform plan and returns parsed output."
tf = tftest.TerraformTest(fixture_path, BASEDIR,
os.environ.get('TERRAFORM', 'terraform'))
tf.setup()
return tf.plan(output=True, tf_vars=tf_vars, targets=targets)
return tf.plan(output=True, refresh=refresh, tf_vars=tf_vars, targets=targets)
return run_plan
@ -54,9 +54,9 @@ def plan_runner(_plan_runner):
def e2e_plan_runner(_plan_runner):
"Returns a function to run Terraform plan on an end-to-end fixture."
def run_plan(fixture_path, targets=None, **tf_vars):
def run_plan(fixture_path, targets=None, refresh=True, **tf_vars):
"Runs Terraform plan on an end-to-end module using defaults, returns data."
plan = _plan_runner(fixture_path, targets=targets, **tf_vars)
plan = _plan_runner(fixture_path, targets=targets, refresh=refresh, **tf_vars)
# skip the fixture
root_module = plan.root_module['child_modules'][0]
modules = dict((mod['address'], mod['resources'])

View File

@ -22,7 +22,7 @@ FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'fixture')
def test_folder_roles(e2e_plan_runner):
"Test folder roles."
modules, _ = e2e_plan_runner(FIXTURES_DIR)
modules, _ = e2e_plan_runner(FIXTURES_DIR, refresh=False)
for env in ['test', 'prod']:
resources = modules[f'module.test.module.environment-folders["{env}"]']
folders = [r for r in resources if r['type'] == 'google_folder']
@ -41,7 +41,7 @@ def test_org_roles(e2e_plan_runner):
'organization_id': 'organizations/123',
'iam_xpn_config': '{grant = true, target_org = true}'
}
modules, _ = e2e_plan_runner(FIXTURES_DIR, **tf_vars)
modules, _ = e2e_plan_runner(FIXTURES_DIR, refresh=False, **tf_vars)
for env in ['test', 'prod']:
resources = modules[f'module.test.module.environment-folders["{env}"]']
folder_bindings = [r['index']