Add bq_table_overwrite handling

This commit is contained in:
Arseny Chernov 2021-12-16 15:39:49 +08:00
parent 0fe5b6b03f
commit 976fabdf86
4 changed files with 32 additions and 23 deletions

View File

@ -50,18 +50,19 @@ def _configure_logging(verbose=True):
@click.option('--bq-project', required=True, help='Bigquery project to use.')
@click.option('--bq-dataset', required=True, help='Bigquery dataset to use.')
@click.option('--bq-table', required=True, help='Bigquery table name to use.')
@click.option('--bq-table-overwrite', required=True, help='Overwrite existing BQ table or create new datetime() one.')
@click.option('--target-node', required=True, help='Node in Google Cloud resource hierarchy.')
@click.option('--read-time', required=False, help=(
'Day to take an asset snapshot in \'YYYYMMDD\' format, uses current day '
' as default. Export will run at midnight of the specified day.'))
@click.option('--verbose', is_flag=True, help='Verbose output')
def main_cli(project=None, bq_project=None, bq_dataset=None, bq_table=None, target_node=None,
def main_cli(project=None, bq_project=None, bq_dataset=None, bq_table=None, bq_table_overwrite=None, target_node=None,
read_time=None, verbose=False):
'''Trigger Cloud Asset inventory export to Bigquery. Data will be stored in
the dataset specified on a dated table with the name specified.
'''
try:
_main(project, bq_project, bq_dataset, bq_table, target_node, read_time, verbose)
_main(project, bq_project, bq_dataset, bq_table, bq_table_overwrite, target_node, read_time, verbose)
except RuntimeError:
logging.exception('exception raised')
@ -79,19 +80,22 @@ def main(event, context):
logging.exception('exception in cloud function entry point')
def _main(project=None, bq_project=None, bq_dataset=None, bq_table=None, target_node=None, read_time=None, verbose=False):
def _main(project=None, bq_project=None, bq_dataset=None, bq_table=None, bq_table_overwrite=None, target_node=None, read_time=None, verbose=False):
'Module entry point used by cli and cloud function wrappers.'
_configure_logging(verbose)
if not read_time:
read_time = datetime.datetime.now()
client = asset_v1.AssetServiceClient()
content_type = asset_v1.ContentType.RESOURCE
output_config = asset_v1.OutputConfig()
client = asset_v1.AssetServiceClient()
if bq_table_overwrite == False:
read_time = datetime.datetime.now()
output_config.bigquery_destination.table = '%s_%s' % (
bq_table, read_time.strftime('%Y%m%d'))
else:
output_config.bigquery_destination.table = '%s_latest' % (
bq_table)
content_type = asset_v1.ContentType.RESOURCE
output_config.bigquery_destination.dataset = 'projects/%s/datasets/%s' % (
bq_project, bq_dataset)
output_config.bigquery_destination.table = '%s_%s' % (
bq_table, read_time.strftime('%Y%m%d'))
output_config.bigquery_destination.separate_tables_per_asset_type = True
output_config.bigquery_destination.force = True
try:

View File

@ -118,11 +118,12 @@ resource "google_cloud_scheduler_job" "job" {
attributes = {}
topic_name = module.pubsub.topic.id
data = base64encode(jsonencode({
project = module.project.project_id
bq_project = module.project.project_id
bq_dataset = var.cai_config.bq_dataset
bq_table = var.cai_config.bq_table
target_node = var.cai_config.target_node
project = module.project.project_id
bq_project = module.project.project_id
bq_dataset = var.cai_config.bq_dataset
bq_table = var.cai_config.bq_table
bq_table_overwrite = var.cai_config.bq_table_overwrite
target_node = var.cai_config.target_node
}))
}
}

View File

@ -29,9 +29,10 @@ variable "bundle_path" {
variable "cai_config" {
description = "Cloud Asset inventory export config."
type = object({
bq_dataset = string
bq_table = string
target_node = string
bq_dataset = string
bq_table = string
bq_table_overwrite = bool
target_node = string
})
}

View File

@ -19,17 +19,20 @@ variable "billing_account" {
variable "cai_config" {
type = object({
bq_dataset = string
bq_table = string
target_node = string
bq_dataset = string
bq_table = string
bq_table_overwrite = bool
target_node = string
})
default = {
bq_dataset = "my-dataset"
bq_table = "my_table"
target_node = "organization/1234567890"
bq_dataset = "my-dataset"
bq_table = "my_table"
bq_table_overwrite = "true"
target_node = "organization/1234567890"
}
}
variable "project_create" {
type = bool
default = true