Add target_node and minor fixes (#389)

* Add separate BQ tables per asset type

* Add missing BQ location

* Add missing CF region

* Change Scheduler job to non-generic

* Add target_node, pass tests

* Do tfdoc.py

* Do terraform fmt

* Do terraform fmt -recursive for tests' fixtures

* Fix typo in test's variables
This commit is contained in:
arsenyspb 2021-12-15 21:07:48 +08:00 committed by GitHub
parent a4c48b8950
commit 5beba11058
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 26 additions and 19 deletions

View File

@ -43,7 +43,7 @@ You can also create a dashboard connecting [Datalab](https://datastudio.google.c
| name | description | type | required | default |
|---|---|:---: |:---:|:---:|
| cai_config | Cloud Asset inventory export config. | <code title="object&#40;&#123;&#10;bq_dataset &#61; string&#10;bq_table &#61; string&#10;&#125;&#41;">object({...})</code> | ✓ | |
| cai_config | Cloud Asset inventory export config. | <code title="object&#40;&#123;&#10;bq_dataset &#61; string&#10;bq_table &#61; string&#10;target_node &#61; string&#10;&#125;&#41;">object({...})</code> | ✓ | |
| project_id | Project id that references existing project. | <code title="">string</code> | ✓ | |
| *billing_account* | Billing account id used as default for new projects. | <code title="">string</code> | | <code title="">null</code> |
| *bundle_path* | Path used to write the intermediate Cloud Function code bundle. | <code title="">string</code> | | <code title="">./bundle.zip</code> |

View File

@ -50,17 +50,18 @@ def _configure_logging(verbose=True):
@click.option('--bq-project', required=True, help='Bigquery project to use.')
@click.option('--bq-dataset', required=True, help='Bigquery dataset to use.')
@click.option('--bq-table', required=True, help='Bigquery table name to use.')
@click.option('--target-node', required=True, help='Node in Google Cloud resource hierarchy.')
@click.option('--read-time', required=False, help=(
'Day to take an asset snapshot in \'YYYYMMDD\' format, uses current day '
' as default. Export will run at midnight of the specified day.'))
@click.option('--verbose', is_flag=True, help='Verbose output')
def main_cli(project=None, bq_project=None, bq_dataset=None, bq_table=None,
def main_cli(project=None, bq_project=None, bq_dataset=None, bq_table=None, target_node=None,
read_time=None, verbose=False):
'''Trigger Cloud Asset inventory export to Bigquery. Data will be stored in
the dataset specified on a dated table with the name specified.
'''
try:
_main(project, bq_project, bq_dataset, bq_table, read_time, verbose)
_main(project, bq_project, bq_dataset, bq_table, target_node, read_time, verbose)
except RuntimeError:
logging.exception('exception raised')
@ -78,25 +79,25 @@ def main(event, context):
logging.exception('exception in cloud function entry point')
def _main(project=None, bq_project=None, bq_dataset=None, bq_table=None, read_time=None, verbose=False):
def _main(project=None, bq_project=None, bq_dataset=None, bq_table=None, target_node=None, read_time=None, verbose=False):
'Module entry point used by cli and cloud function wrappers.'
_configure_logging(verbose)
if not read_time:
read_time = datetime.datetime.now()
client = asset_v1.AssetServiceClient()
parent = 'projects/%s' % project
content_type = asset_v1.ContentType.RESOURCE
output_config = asset_v1.OutputConfig()
output_config.bigquery_destination.dataset = 'projects/%s/datasets/%s' % (
bq_project, bq_dataset)
output_config.bigquery_destination.table = '%s_%s' % (
bq_table, read_time.strftime('%Y%m%d'))
output_config.bigquery_destination.separate_tables_per_asset_type = True
output_config.bigquery_destination.force = True
try:
response = client.export_assets(
request={
'parent': parent,
'parent': target_node,
'read_time': read_time,
'content_type': content_type,
'output_config': output_config
@ -105,7 +106,7 @@ def _main(project=None, bq_project=None, bq_dataset=None, bq_table=None, read_ti
except (GoogleAPIError, googleapiclient.errors.HttpError) as e:
logging.debug('API Error: %s', e, exc_info=True)
raise RuntimeError(
'Error fetching Asset Inventory entries (project: %s)' % parent, e)
'Error fetching Asset Inventory entries (resource manager node: %s)' % target_node, e)
if __name__ == '__main__':

View File

@ -73,6 +73,7 @@ module "pubsub" {
module "cf" {
source = "../../modules/cloud-function"
project_id = module.project.project_id
region = var.region
name = var.name
bucket_name = "${var.name}-${random_pet.random.id}"
bucket_config = {
@ -108,8 +109,8 @@ resource "google_app_engine_application" "app" {
resource "google_cloud_scheduler_job" "job" {
project = google_app_engine_application.app.project
region = var.region
name = "test-job"
description = "test http job"
name = "cai-export-job"
description = "CAI Export Job"
schedule = "* 9 * * 1"
time_zone = "Etc/UTC"
@ -117,10 +118,11 @@ resource "google_cloud_scheduler_job" "job" {
attributes = {}
topic_name = module.pubsub.topic.id
data = base64encode(jsonencode({
project = module.project.project_id
bq_project = module.project.project_id
bq_dataset = var.cai_config.bq_dataset
bq_table = var.cai_config.bq_table
project = module.project.project_id
bq_project = module.project.project_id
bq_dataset = var.cai_config.bq_dataset
bq_table = var.cai_config.bq_table
target_node = var.cai_config.target_node
}))
}
}
@ -133,6 +135,7 @@ module "bq" {
source = "../../modules/bigquery-dataset"
project_id = module.project.project_id
id = var.cai_config.bq_dataset
location = var.region
access = {
owner = { role = "OWNER", type = "user" }
}

View File

@ -29,8 +29,9 @@ variable "bundle_path" {
variable "cai_config" {
description = "Cloud Asset inventory export config."
type = object({
bq_dataset = string
bq_table = string
bq_dataset = string
bq_table = string
target_node = string
})
}

View File

@ -19,12 +19,14 @@ variable "billing_account" {
variable "cai_config" {
type = object({
bq_dataset = string
bq_table = string
bq_dataset = string
bq_table = string
target_node = string
})
default = {
bq_dataset = "my-dataset"
bq_table = "my_table"
bq_dataset = "my-dataset"
bq_table = "my_table"
target_node = "organization/1234567890"
}
}