2023-03-24 05:28:32 -07:00
|
|
|
# Copyright 2023 Google LLC
|
2022-12-04 12:42:14 -08:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2022-12-05 15:02:16 -08:00
|
|
|
"""Common fixtures."""
|
|
|
|
|
2022-12-04 12:42:14 -08:00
|
|
|
import collections
|
2022-12-06 06:08:06 -08:00
|
|
|
import contextlib
|
2023-03-24 05:28:32 -07:00
|
|
|
import glob
|
2022-12-04 12:42:14 -08:00
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import tempfile
|
2023-10-20 00:59:52 -07:00
|
|
|
import time
|
2022-12-04 12:42:14 -08:00
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
import pytest
|
|
|
|
import tftest
|
|
|
|
import yaml
|
|
|
|
|
2023-04-22 15:40:05 -07:00
|
|
|
_REPO_ROOT = Path(__file__).parents[1]
|
2022-12-04 12:42:14 -08:00
|
|
|
PlanSummary = collections.namedtuple('PlanSummary', 'values counts outputs')
|
|
|
|
|
|
|
|
|
2022-12-06 06:08:06 -08:00
|
|
|
@contextlib.contextmanager
|
|
|
|
def _prepare_root_module(path):
|
|
|
|
"""Context manager to prepare a terraform module to be tested.
|
FAST multitenant bootstrap and resource management, rename org-level FAST stages (#1052)
* rename stages
* remove support for external org billing, rename output files
* resman: make groups optional, align on new billing account variable
* bootstrap: multitenant outputs
* tenant bootstrap stage, untested
* fix folder name
* fix stage 0 output names
* optional creation for tag keys in organization module
* single tenant bootstrap minus tag
* rename output files, add tenant tag key
* fix organization module tag values output
* test skipping creation for tags in organization module
* single tenant bootstrap plan working
* multitenant bootstrap
* tfdoc
* fix check links error messages
* fix links
* tfdoc
* fix links
* rename fast tests, fix bootstrap tests
* multitenant stages have their own folder, simplify stage numbering
* stage renumbering
* wip
* rename tests
* exclude fast providers in fixture
* stage 0 tests
* stage 1 tests
* network stages tests
* stage tests
* tfdoc
* fix links
* tfdoc
* multitenant tests
* remove local files
* stage links command
* fix links script, TODO
* wip
* wip single tenant bootstrap
* working tenant bootstrap
* update gitignore
* remove local files
* tfdoc
* remove local files
* allow tests for tenant bootstrap stage
* tenant bootstrap proxies stage 1 tfvars
* stage 2 and 3 service accounts and IAM in tenant bootstrap
* wip
* wip
* wip
* drop multitenant bootstrap
* tfdoc
* add missing stage 2 SAs, fix org-level IAM condition
* wip
* wip
* optional tag value creation in organization module
* stage 1 working
* linting
* linting
* READMEs
* wip
* Make stage-links script work in old macos bash
* stage links command help
* fix output file names
* diagrams
* fix svg
* stage 0 skeleton and diagram
* test svg
* test svg
* test diagram
* diagram
* readme
* fix stage links script
* stage 0 readme
* README changes
* stage readmes
* fix outputs order
* fix link
* fix tests
* stage 1 test
* skip stage example
* boilerplate
* fix tftest skip
* default bootstrap stage log sinks to log buckets
* add logging to tenant bootstrap
* move iam variables out of tenant config
* fix cicd, reintroduce missing variable
* use optional in stage 1 cicd variable
* rename extras stage
* rename and move identity providers local, use optional for cicd variable
* tfdoc
* add support for wif pool and providers, ci/cd
* tfdoc
* fix links
* better handling of modules repository
* add missing role on logging project
* fix cicd pools in locals, test cicd
* fix workflow extension
* fix module source replacement
* allow tenant bootstrap cicd sa to impersonate resman sa
* tenant workflow templates fix for no providers file
* fix output files, push github workflow template to new repository
* remove try from outpout files
* align stage 1 cicd internals to stage 0
* tfdoc
* tests
* fix tests
* tests
* improve variable descriptions
* use optional in fast features
* actually create tenant log sinks, and allow the resman sa to do it
* test
* tests
* aaaand tests again
* fast features tenant override
* fast features tenant override
* fix wording
* add missing comment
* configure pf service accounts
* add missing comment
* tfdoc
* tests
* IAM docs
* update copyright
---------
Co-authored-by: Julio Castillo <jccb@google.com>
2023-02-04 06:00:45 -08:00
|
|
|
|
2022-12-06 06:08:06 -08:00
|
|
|
If the TFTEST_COPY environment variable is set, `path` is copied to
|
|
|
|
a temporary directory and a few terraform files (e.g.
|
2023-03-24 05:28:32 -07:00
|
|
|
terraform.tfvars) are deleted to ensure a clean test environment.
|
2022-12-06 06:08:06 -08:00
|
|
|
Otherwise, `path` is simply returned untouched.
|
|
|
|
"""
|
2023-10-20 00:59:52 -07:00
|
|
|
# if we're copying the module, we might as well ignore files and
|
|
|
|
# directories that are automatically read by terraform. Useful
|
|
|
|
# to avoid surprises if, for example, you have an active fast
|
|
|
|
# deployment with links to configs)
|
|
|
|
ignore_patterns = shutil.ignore_patterns('*.auto.tfvars',
|
|
|
|
'*.auto.tfvars.json',
|
|
|
|
'[0-9]-*-providers.tf',
|
|
|
|
'terraform.tfstate*',
|
|
|
|
'.terraform.lock.hcl',
|
|
|
|
'terraform.tfvars', '.terraform')
|
|
|
|
|
2022-12-06 06:08:06 -08:00
|
|
|
if os.environ.get('TFTEST_COPY'):
|
|
|
|
# if the TFTEST_COPY is set, create temp dir and copy the root
|
|
|
|
# module there
|
|
|
|
with tempfile.TemporaryDirectory(dir=path.parent) as tmp_path:
|
|
|
|
tmp_path = Path(tmp_path)
|
|
|
|
|
2023-12-29 01:43:44 -08:00
|
|
|
# Running tests in a copy made with symlinks=True makes them run
|
|
|
|
# ~20% slower than when run in a copy made with symlinks=False.
|
|
|
|
shutil.copytree(path, tmp_path, dirs_exist_ok=True, symlinks=False,
|
2022-12-06 06:08:06 -08:00
|
|
|
ignore=ignore_patterns)
|
2023-04-22 15:40:05 -07:00
|
|
|
lockfile = _REPO_ROOT / 'tools' / 'lockfile' / '.terraform.lock.hcl'
|
|
|
|
if lockfile.exists():
|
|
|
|
shutil.copy(lockfile, tmp_path / '.terraform.lock.hcl')
|
2022-12-06 06:08:06 -08:00
|
|
|
|
|
|
|
yield tmp_path
|
|
|
|
else:
|
2023-10-20 00:59:52 -07:00
|
|
|
# check if any ignore_patterns files are present in path
|
|
|
|
if unwanted_files := ignore_patterns(path, os.listdir(path=path)):
|
|
|
|
# prevent shooting yourself in the foot (unexpected test results) when ignored files are present
|
|
|
|
raise RuntimeError(
|
|
|
|
f'Test in path {path} contains {", ".join(unwanted_files)} which may affect '
|
|
|
|
f'test results. Please run tests with TFTEST_COPY=1 environment variable'
|
|
|
|
)
|
2022-12-06 06:08:06 -08:00
|
|
|
# if TFTEST_COPY is not set, just return the same path
|
|
|
|
yield path
|
|
|
|
|
|
|
|
|
2023-03-24 05:28:32 -07:00
|
|
|
def plan_summary(module_path, basedir, tf_var_files=None, extra_files=None,
|
|
|
|
**tf_vars):
|
2022-12-04 12:42:14 -08:00
|
|
|
"""
|
|
|
|
Run a Terraform plan on the module located at `module_path`.
|
|
|
|
|
|
|
|
- module_path: terraform root module to run. Can be an absolute
|
|
|
|
path or relative to the root of the repository
|
|
|
|
|
|
|
|
- basedir: directory root to use for relative paths in
|
|
|
|
tf_var_files.
|
|
|
|
|
|
|
|
- tf_var_files: set of terraform variable files (tfvars) to pass
|
|
|
|
in to terraform
|
|
|
|
|
|
|
|
Returns a PlanSummary object containing 3 attributes:
|
|
|
|
- values: dictionary where the keys are terraform plan addresses
|
|
|
|
and values are the JSON representation (converted to python
|
|
|
|
types) of the attribute values of the resource.
|
|
|
|
|
|
|
|
- counts: dictionary where the keys are the terraform resource
|
|
|
|
types and the values are the number of times that type appears
|
|
|
|
in the plan
|
|
|
|
|
|
|
|
- outputs: dictionary of the modules outputs that can be
|
|
|
|
determined at plan type.
|
|
|
|
|
|
|
|
Consult [1] for mode details on the structure of values and outputs
|
|
|
|
|
|
|
|
[1] https://developer.hashicorp.com/terraform/internals/json-format
|
|
|
|
"""
|
2022-12-06 06:08:06 -08:00
|
|
|
# make the module_path relative to the root of the repo while still
|
|
|
|
# supporting absolute paths
|
2023-04-22 15:40:05 -07:00
|
|
|
module_path = _REPO_ROOT / module_path
|
2022-12-06 06:08:06 -08:00
|
|
|
with _prepare_root_module(module_path) as test_path:
|
2022-12-04 12:42:14 -08:00
|
|
|
binary = os.environ.get('TERRAFORM', 'terraform')
|
|
|
|
tf = tftest.TerraformTest(test_path, binary=binary)
|
2023-03-24 05:28:32 -07:00
|
|
|
extra_files = [(module_path / filename).resolve()
|
|
|
|
for x in extra_files or []
|
|
|
|
for filename in glob.glob(x, root_dir=module_path)]
|
|
|
|
tf.setup(extra_files=extra_files, upgrade=True)
|
2022-12-04 12:42:14 -08:00
|
|
|
tf_var_files = [(basedir / x).resolve() for x in tf_var_files or []]
|
2022-12-06 05:44:18 -08:00
|
|
|
plan = tf.plan(output=True, tf_var_file=tf_var_files, tf_vars=tf_vars)
|
2022-12-04 12:42:14 -08:00
|
|
|
|
|
|
|
# compute resource type counts and address->values map
|
|
|
|
values = {}
|
|
|
|
counts = collections.defaultdict(int)
|
2022-12-15 03:27:53 -08:00
|
|
|
counts['modules'] = counts['resources'] = 0
|
2022-12-04 12:42:14 -08:00
|
|
|
q = collections.deque([plan.root_module])
|
|
|
|
while q:
|
|
|
|
e = q.popleft()
|
|
|
|
|
|
|
|
if 'type' in e:
|
|
|
|
counts[e['type']] += 1
|
|
|
|
if 'values' in e:
|
|
|
|
values[e['address']] = e['values']
|
|
|
|
|
|
|
|
for x in e.get('resources', []):
|
2022-12-15 03:27:53 -08:00
|
|
|
counts['resources'] += 1
|
2022-12-04 12:42:14 -08:00
|
|
|
q.append(x)
|
|
|
|
for x in e.get('child_modules', []):
|
2022-12-15 03:27:53 -08:00
|
|
|
counts['modules'] += 1
|
2022-12-04 12:42:14 -08:00
|
|
|
q.append(x)
|
|
|
|
|
|
|
|
# extract planned outputs
|
|
|
|
outputs = plan.get('planned_values', {}).get('outputs', {})
|
|
|
|
|
2023-08-28 09:35:47 -07:00
|
|
|
# force the destruction of the tftest object, otherwise pytest
|
|
|
|
# will complain about unraisable exceptions caused by the context
|
|
|
|
# manager deleting temporary files, including the extra_files that
|
|
|
|
# tftest tries to remove on cleanup
|
|
|
|
del tf
|
|
|
|
|
2022-12-04 12:42:14 -08:00
|
|
|
return PlanSummary(values, dict(counts), outputs)
|
|
|
|
|
|
|
|
|
2022-12-05 02:20:20 -08:00
|
|
|
@pytest.fixture(name='plan_summary')
|
|
|
|
def plan_summary_fixture(request):
|
2022-12-04 12:42:14 -08:00
|
|
|
"""Return a function to generate a PlanSummary.
|
|
|
|
|
|
|
|
In the returned function `basedir` becomes optional and it defaults
|
|
|
|
to the directory of the calling test
|
|
|
|
"""
|
|
|
|
|
2023-03-24 05:28:32 -07:00
|
|
|
def inner(module_path, basedir=None, tf_var_files=None, extra_files=None,
|
|
|
|
**tf_vars):
|
2022-12-04 12:42:14 -08:00
|
|
|
if basedir is None:
|
|
|
|
basedir = Path(request.fspath).parent
|
2022-12-05 02:20:20 -08:00
|
|
|
return plan_summary(module_path=module_path, basedir=basedir,
|
2023-03-24 05:28:32 -07:00
|
|
|
tf_var_files=tf_var_files, extra_files=extra_files,
|
|
|
|
**tf_vars)
|
2022-12-04 12:42:14 -08:00
|
|
|
|
|
|
|
return inner
|
|
|
|
|
|
|
|
|
2022-12-05 02:20:20 -08:00
|
|
|
def plan_validator(module_path, inventory_paths, basedir, tf_var_files=None,
|
2023-03-24 05:28:32 -07:00
|
|
|
extra_files=None, **tf_vars):
|
2022-12-05 02:20:20 -08:00
|
|
|
summary = plan_summary(module_path=module_path, tf_var_files=tf_var_files,
|
2023-03-24 05:28:32 -07:00
|
|
|
extra_files=extra_files, basedir=basedir, **tf_vars)
|
2022-12-04 12:42:14 -08:00
|
|
|
|
|
|
|
# allow single single string for inventory_paths
|
|
|
|
if not isinstance(inventory_paths, list):
|
|
|
|
inventory_paths = [inventory_paths]
|
|
|
|
|
|
|
|
for path in inventory_paths:
|
|
|
|
# allow tfvars and inventory to be relative to the caller
|
|
|
|
path = basedir / path
|
2023-08-11 03:07:07 -07:00
|
|
|
relative_path = path.relative_to(_REPO_ROOT)
|
2022-12-05 02:28:58 -08:00
|
|
|
try:
|
|
|
|
inventory = yaml.safe_load(path.read_text())
|
|
|
|
except (IOError, OSError, yaml.YAMLError) as e:
|
|
|
|
raise Exception(f'cannot read test inventory {path}: {e}')
|
2022-12-04 12:42:14 -08:00
|
|
|
|
|
|
|
# don't fail if the inventory is empty
|
|
|
|
inventory = inventory or {}
|
|
|
|
|
|
|
|
# If you add additional asserts to this function:
|
|
|
|
# - put the values coming from the plan on the left side of
|
|
|
|
# any comparison operators
|
|
|
|
# - put the values coming from user's inventory the right
|
|
|
|
# side of any comparison operators.
|
|
|
|
# - include a descriptive error message to the assert
|
|
|
|
|
|
|
|
if 'values' in inventory:
|
2023-10-20 00:59:52 -07:00
|
|
|
validate_plan_object(inventory['values'], summary.values, relative_path,
|
|
|
|
"")
|
2022-12-04 12:42:14 -08:00
|
|
|
|
|
|
|
if 'counts' in inventory:
|
2023-12-27 03:33:16 -08:00
|
|
|
try:
|
|
|
|
expected_counts = inventory['counts']
|
|
|
|
for type_, expected_count in expected_counts.items():
|
|
|
|
assert type_ in summary.counts, \
|
|
|
|
f'{relative_path}: module does not create any resources of type `{type_}`'
|
|
|
|
plan_count = summary.counts[type_]
|
|
|
|
assert plan_count == expected_count, \
|
|
|
|
f'{relative_path}: count of {type_} resources failed. Got {plan_count}, expected {expected_count}'
|
|
|
|
except AssertionError:
|
|
|
|
print(yaml.dump({'counts': summary.counts}))
|
|
|
|
raise
|
2022-12-04 12:42:14 -08:00
|
|
|
|
|
|
|
if 'outputs' in inventory:
|
2023-12-27 03:33:16 -08:00
|
|
|
_buffer = None
|
|
|
|
try:
|
|
|
|
expected_outputs = inventory['outputs']
|
|
|
|
for output_name, expected_output in expected_outputs.items():
|
|
|
|
assert output_name in summary.outputs, \
|
|
|
|
f'{relative_path}: module does not output `{output_name}`'
|
|
|
|
output = summary.outputs[output_name]
|
|
|
|
# assert 'value' in output, \
|
|
|
|
# f'output `{output_name}` does not have a value (is it sensitive or dynamic?)'
|
|
|
|
plan_output = output.get('value', '__missing__')
|
|
|
|
_buffer = {output_name: plan_output}
|
|
|
|
assert plan_output == expected_output, \
|
|
|
|
f'{relative_path}: output {output_name} failed. Got `{plan_output}`, expected `{expected_output}`'
|
|
|
|
except AssertionError:
|
|
|
|
if _buffer:
|
|
|
|
print(yaml.dump(_buffer))
|
|
|
|
raise
|
2022-12-04 12:42:14 -08:00
|
|
|
|
|
|
|
return summary
|
|
|
|
|
|
|
|
|
2023-10-20 00:59:52 -07:00
|
|
|
def validate_plan_object(expected_value, plan_value, relative_path,
|
|
|
|
relative_address):
|
2023-09-25 11:16:57 -07:00
|
|
|
"""
|
|
|
|
Validate that plan object matches inventory
|
|
|
|
|
|
|
|
1. Verify each address in the user's inventory exists in the plan
|
|
|
|
2. For those address that exist on both the user's inventory and
|
|
|
|
the plan output, ensure the set of keys on the inventory are a
|
|
|
|
subset of the keys in the plan, and compare their values by
|
|
|
|
equality
|
|
|
|
3. For lists, verify that they have the same length and check
|
|
|
|
whether its members are equal (according to this function)
|
|
|
|
"""
|
|
|
|
# dictionaries / objects
|
|
|
|
if isinstance(expected_value, dict) and isinstance(plan_value, dict):
|
|
|
|
for k, v in expected_value.items():
|
|
|
|
assert k in plan_value, \
|
|
|
|
f'{relative_path}: {k} is not a valid address in the plan'
|
2023-10-20 00:59:52 -07:00
|
|
|
validate_plan_object(v, plan_value[k], relative_path,
|
|
|
|
f'{relative_address}.{k}')
|
2023-09-25 11:16:57 -07:00
|
|
|
|
|
|
|
# lists
|
|
|
|
elif isinstance(expected_value, list) and isinstance(plan_value, list):
|
|
|
|
assert len(plan_value) == len(expected_value), \
|
|
|
|
f'{relative_path}: {relative_address} has different length. Got {plan_value}, expected {expected_value}'
|
|
|
|
|
|
|
|
for i, (exp, actual) in enumerate(zip(expected_value, plan_value)):
|
2023-10-20 00:59:52 -07:00
|
|
|
validate_plan_object(exp, actual, relative_path,
|
|
|
|
f'{relative_address}[{i}]')
|
2023-09-25 11:16:57 -07:00
|
|
|
|
|
|
|
# all other objects
|
|
|
|
else:
|
|
|
|
assert plan_value == expected_value, \
|
|
|
|
f'{relative_path}: {relative_address} failed. Got `{plan_value}`, expected `{expected_value}`'
|
|
|
|
|
|
|
|
|
2022-12-05 02:20:20 -08:00
|
|
|
@pytest.fixture(name='plan_validator')
|
|
|
|
def plan_validator_fixture(request):
|
2022-12-05 15:02:16 -08:00
|
|
|
"""Return a function to build a PlanSummary and compare it to a YAML inventory.
|
2022-12-04 12:42:14 -08:00
|
|
|
|
|
|
|
In the returned function `basedir` becomes optional and it defaults
|
|
|
|
to the directory of the calling test'
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
def inner(module_path, inventory_paths, basedir=None, tf_var_files=None,
|
|
|
|
**tf_vars):
|
|
|
|
if basedir is None:
|
|
|
|
basedir = Path(request.fspath).parent
|
2022-12-05 02:20:20 -08:00
|
|
|
return plan_validator(module_path=module_path,
|
|
|
|
inventory_paths=inventory_paths, basedir=basedir,
|
2022-12-15 03:27:53 -08:00
|
|
|
tf_var_files=tf_var_files, **tf_vars)
|
2022-12-04 12:42:14 -08:00
|
|
|
|
|
|
|
return inner
|
2022-12-05 15:02:16 -08:00
|
|
|
|
|
|
|
|
2023-11-27 13:26:06 -08:00
|
|
|
def get_tfvars_for_e2e():
|
2023-12-27 03:33:16 -08:00
|
|
|
_variables = [
|
|
|
|
"billing_account", "group_email", "organization_id", "parent", "prefix",
|
|
|
|
"region"
|
|
|
|
]
|
2023-11-27 13:26:06 -08:00
|
|
|
tf_vars = {k: os.environ.get(f"TFTEST_E2E_{k}") for k in _variables}
|
|
|
|
return tf_vars
|
|
|
|
|
|
|
|
|
2023-10-20 00:59:52 -07:00
|
|
|
def e2e_validator(module_path, extra_files, tf_var_files, basedir=None):
|
|
|
|
"""Function running apply, plan and destroy to verify the case end to end
|
|
|
|
|
|
|
|
1. Tests whether apply does not return errors
|
|
|
|
2. Tests whether plan after apply is empty
|
|
|
|
3. Tests whether destroy does not return errors
|
|
|
|
"""
|
|
|
|
module_path = _REPO_ROOT / module_path
|
|
|
|
with _prepare_root_module(module_path) as test_path:
|
|
|
|
binary = os.environ.get('TERRAFORM', 'terraform')
|
|
|
|
tf = tftest.TerraformTest(test_path, binary=binary)
|
|
|
|
extra_files = [(module_path / filename).resolve()
|
|
|
|
for x in extra_files or []
|
|
|
|
for filename in glob.glob(x, root_dir=module_path)]
|
|
|
|
tf.setup(extra_files=extra_files, upgrade=True)
|
|
|
|
tf_var_files = [(basedir / x).resolve() for x in tf_var_files or []]
|
|
|
|
|
2023-11-27 13:26:06 -08:00
|
|
|
# we need only prefix variable to run the example test, all the other are passed in terraform.tfvars file
|
|
|
|
prefix = get_tfvars_for_e2e()["prefix"]
|
2023-11-03 10:04:19 -07:00
|
|
|
# to allow different tests to create projects (or other globally unique resources) with the same name
|
|
|
|
# bump prefix forward on each test execution
|
2023-12-27 03:33:16 -08:00
|
|
|
tf_vars = {
|
|
|
|
"prefix":
|
|
|
|
f'{prefix}-{int(time.time())}{os.environ.get("PYTEST_XDIST_WORKER", "0")[-2:]}'
|
|
|
|
}
|
2023-10-20 00:59:52 -07:00
|
|
|
try:
|
2023-11-27 13:26:06 -08:00
|
|
|
apply = tf.apply(tf_var_file=tf_var_files, tf_vars=tf_vars)
|
|
|
|
plan = tf.plan(output=True, tf_var_file=tf_var_files, tf_vars=tf_vars)
|
2023-10-20 00:59:52 -07:00
|
|
|
changes = {}
|
|
|
|
for resource_name, value in plan.resource_changes.items():
|
|
|
|
if value.get('change', {}).get('actions') != ['no-op']:
|
2023-11-17 03:47:44 -08:00
|
|
|
changes[resource_name] = value['change']
|
2023-10-20 00:59:52 -07:00
|
|
|
|
|
|
|
# compare before with after to raise more meaningful failure to the user, i.e one
|
|
|
|
# that shows how resource will change
|
2023-11-17 03:47:44 -08:00
|
|
|
plan_before_state = {k: v.get('before') for k, v in changes.items()}
|
|
|
|
plan_after_state = {k: v.get('after') for k, v in changes.items()}
|
2023-10-20 00:59:52 -07:00
|
|
|
|
|
|
|
assert plan_before_state == plan_after_state, f'Plan not empty after apply for values'
|
|
|
|
|
|
|
|
plan_before_sensitive_state = {
|
2023-11-17 03:47:44 -08:00
|
|
|
k: v.get('before_sensitive') for k, v in changes.items()
|
2023-10-20 00:59:52 -07:00
|
|
|
}
|
|
|
|
plan_after_sensitive_state = {
|
2023-11-17 03:47:44 -08:00
|
|
|
k: v.get('after_sensitive') for k, v in changes.items()
|
2023-10-20 00:59:52 -07:00
|
|
|
}
|
|
|
|
assert plan_before_sensitive_state == plan_after_sensitive_state, f'Plan not empty after apply for sensitive values'
|
|
|
|
|
|
|
|
# If above did not fail, this should not either, but left as a safety check
|
|
|
|
assert changes == {}, f'Plan not empty for following resources: {", ".join(changes.keys())}'
|
|
|
|
finally:
|
2023-11-27 13:26:06 -08:00
|
|
|
destroy = tf.destroy(tf_var_file=tf_var_files, tf_vars=tf_vars)
|
2023-10-20 00:59:52 -07:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(name='e2e_validator')
|
|
|
|
def e2e_validator_fixture(request):
|
|
|
|
"""Return a function to run end-to-end test
|
|
|
|
|
|
|
|
In the returned function `basedir` becomes optional and it defaults
|
|
|
|
to the directory of the calling test
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
def inner(module_path: str, extra_files: list, tf_var_files: list,
|
|
|
|
basedir: os.PathLike = None):
|
|
|
|
if basedir is None:
|
|
|
|
basedir = Path(request.fspath).parent
|
|
|
|
return e2e_validator(module_path, extra_files, tf_var_files, basedir)
|
|
|
|
|
|
|
|
return inner
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session', name='e2e_tfvars_path')
|
|
|
|
def e2e_tfvars_path():
|
|
|
|
"""Fixture preparing end-to-end test environment
|
|
|
|
|
|
|
|
If TFTEST_E2E_TFVARS_PATH is set in the environment, then assume the environment is already provisioned
|
|
|
|
and necessary variables are set in the file to which variable is pointing to.
|
|
|
|
|
|
|
|
Otherwise, create a unique test environment (in case of multiple workers - as many environments as
|
|
|
|
there are workers), that will be injected into each example test instead of `tests/examples/variables.tf`.
|
|
|
|
|
|
|
|
Returns path to tfvars file that contains information about envrionment to use for the tests.
|
|
|
|
"""
|
|
|
|
if tfvars_path := os.environ.get('TFTEST_E2E_TFVARS_PATH'):
|
|
|
|
# no need to set up the project
|
|
|
|
if int(os.environ.get('PYTEST_XDIST_WORKER_COUNT', '0')) > 1:
|
|
|
|
raise RuntimeError(
|
|
|
|
'Setting TFTEST_E2E_TFVARS_PATH is not compatible with running tests in parallel'
|
|
|
|
)
|
|
|
|
yield tfvars_path
|
|
|
|
else:
|
|
|
|
with _prepare_root_module(_REPO_ROOT / 'tests' / 'examples_e2e' /
|
|
|
|
'setup_module') as test_path:
|
|
|
|
binary = os.environ.get('TERRAFORM', 'terraform')
|
|
|
|
tf = tftest.TerraformTest(test_path, binary=binary)
|
|
|
|
tf_vars_file = None
|
2023-11-27 13:26:06 -08:00
|
|
|
tf_vars = get_tfvars_for_e2e()
|
2023-12-27 03:33:16 -08:00
|
|
|
tf_vars['suffix'] = os.environ.get(
|
|
|
|
"PYTEST_XDIST_WORKER",
|
|
|
|
"0")[-2:] # take at most 2 last chars for suffix
|
2023-11-27 13:26:06 -08:00
|
|
|
tf_vars['timestamp'] = str(int(time.time()))
|
|
|
|
|
2023-10-20 00:59:52 -07:00
|
|
|
if 'TFTEST_E2E_SETUP_TFVARS_PATH' in os.environ:
|
|
|
|
tf_vars_file = os.environ["TFTEST_E2E_SETUP_TFVARS_PATH"]
|
|
|
|
tf.setup(upgrade=True)
|
|
|
|
tf.apply(tf_vars=tf_vars, tf_var_file=tf_vars_file)
|
|
|
|
yield test_path / "e2e_tests.tfvars"
|
|
|
|
tf.destroy(tf_vars=tf_vars, tf_var_file=tf_vars_file)
|
|
|
|
|
|
|
|
|
2022-12-05 15:02:16 -08:00
|
|
|
# @pytest.fixture
|
|
|
|
# def repo_root():
|
|
|
|
# 'Return a pathlib.Path to the root of the repository'
|
|
|
|
# return Path(__file__).parents[1]
|