diff --git a/cloud-operations/scheduled-asset-inventory-export-bq/README.md b/cloud-operations/scheduled-asset-inventory-export-bq/README.md index 1abecdd9..ef8bca88 100644 --- a/cloud-operations/scheduled-asset-inventory-export-bq/README.md +++ b/cloud-operations/scheduled-asset-inventory-export-bq/README.md @@ -43,7 +43,7 @@ You can also create a dashboard connecting [Datalab](https://datastudio.google.c | name | description | type | required | default | |---|---|:---: |:---:|:---:| -| cai_config | Cloud Asset inventory export config. | object({...}) | ✓ | | +| cai_config | Cloud Asset inventory export config. | object({...}) | ✓ | | | project_id | Project id that references existing project. | string | ✓ | | | *billing_account* | Billing account id used as default for new projects. | string | | null | | *bundle_path* | Path used to write the intermediate Cloud Function code bundle. | string | | ./bundle.zip | diff --git a/cloud-operations/scheduled-asset-inventory-export-bq/cf/main.py b/cloud-operations/scheduled-asset-inventory-export-bq/cf/main.py index 77768190..ad97c326 100755 --- a/cloud-operations/scheduled-asset-inventory-export-bq/cf/main.py +++ b/cloud-operations/scheduled-asset-inventory-export-bq/cf/main.py @@ -50,17 +50,18 @@ def _configure_logging(verbose=True): @click.option('--bq-project', required=True, help='Bigquery project to use.') @click.option('--bq-dataset', required=True, help='Bigquery dataset to use.') @click.option('--bq-table', required=True, help='Bigquery table name to use.') +@click.option('--target-node', required=True, help='Node in Google Cloud resource hierarchy.') @click.option('--read-time', required=False, help=( 'Day to take an asset snapshot in \'YYYYMMDD\' format, uses current day ' ' as default. Export will run at midnight of the specified day.')) @click.option('--verbose', is_flag=True, help='Verbose output') -def main_cli(project=None, bq_project=None, bq_dataset=None, bq_table=None, +def main_cli(project=None, bq_project=None, bq_dataset=None, bq_table=None, target_node=None, read_time=None, verbose=False): '''Trigger Cloud Asset inventory export to Bigquery. Data will be stored in the dataset specified on a dated table with the name specified. ''' try: - _main(project, bq_project, bq_dataset, bq_table, read_time, verbose) + _main(project, bq_project, bq_dataset, bq_table, target_node, read_time, verbose) except RuntimeError: logging.exception('exception raised') @@ -78,25 +79,25 @@ def main(event, context): logging.exception('exception in cloud function entry point') -def _main(project=None, bq_project=None, bq_dataset=None, bq_table=None, read_time=None, verbose=False): +def _main(project=None, bq_project=None, bq_dataset=None, bq_table=None, target_node=None, read_time=None, verbose=False): 'Module entry point used by cli and cloud function wrappers.' _configure_logging(verbose) if not read_time: read_time = datetime.datetime.now() client = asset_v1.AssetServiceClient() - parent = 'projects/%s' % project content_type = asset_v1.ContentType.RESOURCE output_config = asset_v1.OutputConfig() output_config.bigquery_destination.dataset = 'projects/%s/datasets/%s' % ( bq_project, bq_dataset) output_config.bigquery_destination.table = '%s_%s' % ( bq_table, read_time.strftime('%Y%m%d')) + output_config.bigquery_destination.separate_tables_per_asset_type = True output_config.bigquery_destination.force = True try: response = client.export_assets( request={ - 'parent': parent, + 'parent': target_node, 'read_time': read_time, 'content_type': content_type, 'output_config': output_config @@ -105,7 +106,7 @@ def _main(project=None, bq_project=None, bq_dataset=None, bq_table=None, read_ti except (GoogleAPIError, googleapiclient.errors.HttpError) as e: logging.debug('API Error: %s', e, exc_info=True) raise RuntimeError( - 'Error fetching Asset Inventory entries (project: %s)' % parent, e) + 'Error fetching Asset Inventory entries (resource manager node: %s)' % target_node, e) if __name__ == '__main__': diff --git a/cloud-operations/scheduled-asset-inventory-export-bq/main.tf b/cloud-operations/scheduled-asset-inventory-export-bq/main.tf index 1b5306c4..0052401d 100644 --- a/cloud-operations/scheduled-asset-inventory-export-bq/main.tf +++ b/cloud-operations/scheduled-asset-inventory-export-bq/main.tf @@ -73,6 +73,7 @@ module "pubsub" { module "cf" { source = "../../modules/cloud-function" project_id = module.project.project_id + region = var.region name = var.name bucket_name = "${var.name}-${random_pet.random.id}" bucket_config = { @@ -108,8 +109,8 @@ resource "google_app_engine_application" "app" { resource "google_cloud_scheduler_job" "job" { project = google_app_engine_application.app.project region = var.region - name = "test-job" - description = "test http job" + name = "cai-export-job" + description = "CAI Export Job" schedule = "* 9 * * 1" time_zone = "Etc/UTC" @@ -117,10 +118,11 @@ resource "google_cloud_scheduler_job" "job" { attributes = {} topic_name = module.pubsub.topic.id data = base64encode(jsonencode({ - project = module.project.project_id - bq_project = module.project.project_id - bq_dataset = var.cai_config.bq_dataset - bq_table = var.cai_config.bq_table + project = module.project.project_id + bq_project = module.project.project_id + bq_dataset = var.cai_config.bq_dataset + bq_table = var.cai_config.bq_table + target_node = var.cai_config.target_node })) } } @@ -133,6 +135,7 @@ module "bq" { source = "../../modules/bigquery-dataset" project_id = module.project.project_id id = var.cai_config.bq_dataset + location = var.region access = { owner = { role = "OWNER", type = "user" } } diff --git a/cloud-operations/scheduled-asset-inventory-export-bq/variables.tf b/cloud-operations/scheduled-asset-inventory-export-bq/variables.tf index 6f8217d3..5bb62166 100644 --- a/cloud-operations/scheduled-asset-inventory-export-bq/variables.tf +++ b/cloud-operations/scheduled-asset-inventory-export-bq/variables.tf @@ -29,8 +29,9 @@ variable "bundle_path" { variable "cai_config" { description = "Cloud Asset inventory export config." type = object({ - bq_dataset = string - bq_table = string + bq_dataset = string + bq_table = string + target_node = string }) } diff --git a/tests/cloud_operations/scheduled_asset_inventory_export_bq/fixture/variables.tf b/tests/cloud_operations/scheduled_asset_inventory_export_bq/fixture/variables.tf index 67d2624b..1d70f827 100644 --- a/tests/cloud_operations/scheduled_asset_inventory_export_bq/fixture/variables.tf +++ b/tests/cloud_operations/scheduled_asset_inventory_export_bq/fixture/variables.tf @@ -19,12 +19,14 @@ variable "billing_account" { variable "cai_config" { type = object({ - bq_dataset = string - bq_table = string + bq_dataset = string + bq_table = string + target_node = string }) default = { - bq_dataset = "my-dataset" - bq_table = "my_table" + bq_dataset = "my-dataset" + bq_table = "my_table" + target_node = "organization/1234567890" } }