Skip to content

Commit 5493d6c

Browse files
authoredMar 7, 2025··
Additional override locations (#68)
1 parent aad63b0 commit 5493d6c

File tree

4 files changed

+166
-49
lines changed

4 files changed

+166
-49
lines changed
 

Diff for: ‎.gitignore

+1
Original file line numberDiff line numberDiff line change
@@ -116,3 +116,4 @@ fabric.properties
116116
.ionide
117117

118118
# End of https://www.toptal.com/developers/gitignore/api/pycharm+all,visualstudiocode
119+
.venv/

Diff for: ‎README.md

+28
Original file line numberDiff line numberDiff line change
@@ -42,14 +42,42 @@ The following environment variables can be configured:
4242
* falls back to the default `AWS_ACCESS_KEY_ID` mock value
4343
* `AWS_ACCESS_KEY_ID`: AWS Access Key ID to use for multi account setups (default: `test` -> account ID: `000000000000`)
4444
* `SKIP_ALIASES`: Allows to skip generating AWS provider overrides for specified aliased providers, e.g. `SKIP_ALIASES=aws_secrets,real_aws`
45+
* `ADDITIONAL_TF_OVERRIDE_LOCATIONS`: Comma-separated list of folder paths that will also receive a temporary `localstack_providers_override.tf` file
4546

4647
## Usage
4748

4849
The `tflocal` command has the same usage as the `terraform` command. For detailed usage,
4950
please refer to the man pages of `terraform --help`.
5051

52+
### Validation errors when using local terraform modules
53+
54+
Note that if your project uses local terraform modules, and those modules reference providers, those folders *also* need to receive a temporary `localstack_providers_override.tf` file. Without it, you would get an error that looks like this when starting to process code from inside the module
55+
56+
```
57+
58+
│ Error: No valid credential sources found
59+
60+
│ with module.lambda.provider["registry.terraform.io/hashicorp/aws"],
61+
│ on ../../providers.tf line 11, in provider "aws":
62+
│ 11: provider "aws" {
63+
64+
│ Please see https://registry.terraform.io/providers/hashicorp/aws
65+
│ for more information about providing credentials.
66+
67+
│ Error: failed to refresh cached credentials, no EC2 IMDS role found, operation error ec2imds: GetMetadata, access disabled to EC2 IMDS via client option, or "AWS_EC2_METADATA_DISABLED" environment variable
68+
```
69+
70+
To address this issue, you may include a comma-separated list of folder paths that will recieve additional override files via an environment variable
71+
72+
```
73+
ADDITIONAL_TF_OVERRIDE_LOCATIONS=/path/to/module1,path/to/module2 tflocal plan
74+
```
75+
76+
[See this issue for more discussion](https://github.com/localstack/terraform-local/issues/67)
77+
5178
## Change Log
5279

80+
* v0.21.0: Add ability to drop an override file in additional locations
5381
* v0.20.1: Fix list config rendering
5482
* v0.20.0: Fix S3 backend option merging
5583
* v0.19.0: Add `SKIP_ALIASES` configuration environment variable

Diff for: ‎bin/tflocal

+136-48
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ import textwrap
1818

1919
from packaging import version
2020
from urllib.parse import urlparse
21-
from typing import Optional
21+
from typing import Iterable, Optional
2222

2323
PARENT_FOLDER = os.path.realpath(os.path.join(os.path.dirname(__file__), ".."))
2424
if os.path.isdir(os.path.join(PARENT_FOLDER, ".venv")):
@@ -31,14 +31,30 @@ DRY_RUN = str(os.environ.get("DRY_RUN")).strip().lower() in ["1", "true"]
3131
DEFAULT_REGION = "us-east-1"
3232
DEFAULT_ACCESS_KEY = "test"
3333
AWS_ENDPOINT_URL = os.environ.get("AWS_ENDPOINT_URL")
34-
CUSTOMIZE_ACCESS_KEY = str(os.environ.get("CUSTOMIZE_ACCESS_KEY")).strip().lower() in ["1", "true"]
34+
CUSTOMIZE_ACCESS_KEY = str(os.environ.get("CUSTOMIZE_ACCESS_KEY")).strip().lower() in [
35+
"1",
36+
"true",
37+
]
3538
LOCALHOST_HOSTNAME = "localhost.localstack.cloud"
3639
S3_HOSTNAME = os.environ.get("S3_HOSTNAME") or f"s3.{LOCALHOST_HOSTNAME}"
3740
USE_EXEC = str(os.environ.get("USE_EXEC")).strip().lower() in ["1", "true"]
3841
TF_CMD = os.environ.get("TF_CMD") or "terraform"
39-
TF_UNPROXIED_CMDS = os.environ.get("TF_UNPROXIED_CMDS").split(sep=",") if os.environ.get("TF_UNPROXIED_CMDS") else ("fmt", "validate", "version")
40-
LS_PROVIDERS_FILE = os.environ.get("LS_PROVIDERS_FILE") or "localstack_providers_override.tf"
41-
LOCALSTACK_HOSTNAME = urlparse(AWS_ENDPOINT_URL).hostname or os.environ.get("LOCALSTACK_HOSTNAME") or "localhost"
42+
ADDITIONAL_TF_OVERRIDE_LOCATIONS = os.environ.get(
43+
"ADDITIONAL_TF_OVERRIDE_LOCATIONS", default=""
44+
)
45+
TF_UNPROXIED_CMDS = (
46+
os.environ.get("TF_UNPROXIED_CMDS").split(sep=",")
47+
if os.environ.get("TF_UNPROXIED_CMDS")
48+
else ("fmt", "validate", "version")
49+
)
50+
LS_PROVIDERS_FILE = (
51+
os.environ.get("LS_PROVIDERS_FILE") or "localstack_providers_override.tf"
52+
)
53+
LOCALSTACK_HOSTNAME = (
54+
urlparse(AWS_ENDPOINT_URL).hostname
55+
or os.environ.get("LOCALSTACK_HOSTNAME")
56+
or "localhost"
57+
)
4258
EDGE_PORT = int(urlparse(AWS_ENDPOINT_URL).port or os.environ.get("EDGE_PORT") or 4566)
4359
TF_VERSION: Optional[version.Version] = None
4460
TF_PROVIDER_CONFIG = """
@@ -133,11 +149,19 @@ SERVICE_REPLACEMENTS = {
133149
# CONFIG GENERATION UTILS
134150
# ---
135151

136-
def create_provider_config_file(provider_aliases=None):
152+
153+
def create_provider_config_file(provider_file_path: str, provider_aliases=None) -> None:
137154
provider_aliases = provider_aliases or []
138155

139156
# Force service alias replacements
140-
SERVICE_REPLACEMENTS.update({alias: alias_pairs[0] for alias_pairs in SERVICE_ALIASES for alias in alias_pairs if alias != alias_pairs[0]})
157+
SERVICE_REPLACEMENTS.update(
158+
{
159+
alias: alias_pairs[0]
160+
for alias_pairs in SERVICE_ALIASES
161+
for alias in alias_pairs
162+
if alias != alias_pairs[0]
163+
}
164+
)
141165

142166
# create list of service names
143167
services = list(config.get_service_ports())
@@ -162,9 +186,11 @@ def create_provider_config_file(provider_aliases=None):
162186
for provider in provider_aliases:
163187
provider_config = TF_PROVIDER_CONFIG.replace(
164188
"<access_key>",
165-
get_access_key(provider) if CUSTOMIZE_ACCESS_KEY else DEFAULT_ACCESS_KEY
189+
get_access_key(provider) if CUSTOMIZE_ACCESS_KEY else DEFAULT_ACCESS_KEY,
190+
)
191+
endpoints = "\n".join(
192+
[f' {s} = "{get_service_endpoint(s)}"' for s in services]
166193
)
167-
endpoints = "\n".join([f' {s} = "{get_service_endpoint(s)}"' for s in services])
168194
provider_config = provider_config.replace("<endpoints>", endpoints)
169195
additional_configs = []
170196
if use_s3_path_style():
@@ -178,7 +204,9 @@ def create_provider_config_file(provider_aliases=None):
178204
if isinstance(region, list):
179205
region = region[0]
180206
additional_configs += [f'region = "{region}"']
181-
provider_config = provider_config.replace("<configs>", "\n".join(additional_configs))
207+
provider_config = provider_config.replace(
208+
"<configs>", "\n".join(additional_configs)
209+
)
182210
provider_configs.append(provider_config)
183211

184212
# construct final config file content
@@ -188,10 +216,7 @@ def create_provider_config_file(provider_aliases=None):
188216
tf_config += generate_s3_backend_config()
189217

190218
# write temporary config file
191-
providers_file = get_providers_file_path()
192-
write_provider_config_file(providers_file, tf_config)
193-
194-
return providers_file
219+
write_provider_config_file(provider_file_path, tf_config)
195220

196221

197222
def write_provider_config_file(providers_file, tf_config):
@@ -200,12 +225,18 @@ def write_provider_config_file(providers_file, tf_config):
200225
fp.write(tf_config)
201226

202227

203-
def get_providers_file_path() -> str:
204-
"""Determine the path under which the providers override file should be stored"""
228+
def get_default_provider_folder_path() -> str:
229+
"""Determine the folder under which the providers override file should be stored"""
205230
chdir = [arg for arg in sys.argv if arg.startswith("-chdir=")]
206231
base_dir = "."
207232
if chdir:
208233
base_dir = chdir[0].removeprefix("-chdir=")
234+
235+
return os.path.abspath(base_dir)
236+
237+
238+
def get_providers_file_path(base_dir) -> str:
239+
"""Retrieve the path under which the providers override file should be stored"""
209240
return os.path.join(base_dir, LS_PROVIDERS_FILE)
210241

211242

@@ -217,7 +248,11 @@ def determine_provider_aliases() -> list:
217248
for _file, obj in tf_files.items():
218249
try:
219250
providers = ensure_list(obj.get("provider", []))
220-
aws_providers = [prov["aws"] for prov in providers if prov.get("aws") and prov.get("aws").get("alias") not in skipped]
251+
aws_providers = [
252+
prov["aws"]
253+
for prov in providers
254+
if prov.get("aws") and prov.get("aws").get("alias") not in skipped
255+
]
221256
result.extend(aws_providers)
222257
except Exception as e:
223258
print(f"Warning: Unable to extract providers from {_file}:", e)
@@ -258,7 +293,6 @@ def generate_s3_backend_config() -> str:
258293
"skip_credentials_validation": True,
259294
"skip_metadata_api_check": True,
260295
"secret_key": "test",
261-
262296
"endpoints": {
263297
"s3": get_service_endpoint("s3"),
264298
"iam": get_service_endpoint("iam"),
@@ -269,23 +303,37 @@ def generate_s3_backend_config() -> str:
269303
}
270304
# Merge in legacy endpoint configs if not existing already
271305
if is_tf_legacy and backend_config.get("endpoints"):
272-
print("Warning: Unsupported backend option(s) detected (`endpoints`). Please make sure you always use the corresponding options to your Terraform version.")
306+
print(
307+
"Warning: Unsupported backend option(s) detected (`endpoints`). Please make sure you always use the corresponding options to your Terraform version."
308+
)
273309
exit(1)
274310
for legacy_endpoint, endpoint in legacy_endpoint_mappings.items():
275-
if legacy_endpoint in backend_config and backend_config.get("endpoints") and endpoint in backend_config["endpoints"]:
311+
if (
312+
legacy_endpoint in backend_config
313+
and backend_config.get("endpoints")
314+
and endpoint in backend_config["endpoints"]
315+
):
276316
del backend_config[legacy_endpoint]
277317
continue
278-
if legacy_endpoint in backend_config and (not backend_config.get("endpoints") or endpoint not in backend_config["endpoints"]):
318+
if legacy_endpoint in backend_config and (
319+
not backend_config.get("endpoints")
320+
or endpoint not in backend_config["endpoints"]
321+
):
279322
if not backend_config.get("endpoints"):
280323
backend_config["endpoints"] = {}
281-
backend_config["endpoints"].update({endpoint: backend_config[legacy_endpoint]})
324+
backend_config["endpoints"].update(
325+
{endpoint: backend_config[legacy_endpoint]}
326+
)
282327
del backend_config[legacy_endpoint]
283328
# Add any missing default endpoints
284329
if backend_config.get("endpoints"):
285330
backend_config["endpoints"] = {
286331
k: backend_config["endpoints"].get(k) or v
287-
for k, v in configs["endpoints"].items()}
288-
backend_config["access_key"] = get_access_key(backend_config) if CUSTOMIZE_ACCESS_KEY else DEFAULT_ACCESS_KEY
332+
for k, v in configs["endpoints"].items()
333+
}
334+
backend_config["access_key"] = (
335+
get_access_key(backend_config) if CUSTOMIZE_ACCESS_KEY else DEFAULT_ACCESS_KEY
336+
)
289337
configs.update(backend_config)
290338
if not DRY_RUN:
291339
get_or_create_bucket(configs["bucket"])
@@ -298,22 +346,27 @@ def generate_s3_backend_config() -> str:
298346
elif isinstance(value, dict):
299347
if key == "endpoints" and is_tf_legacy:
300348
for legacy_endpoint, endpoint in legacy_endpoint_mappings.items():
301-
config_options += f'\n {legacy_endpoint} = "{configs[key][endpoint]}"'
349+
config_options += (
350+
f'\n {legacy_endpoint} = "{configs[key][endpoint]}"'
351+
)
302352
continue
303353
else:
304354
value = textwrap.indent(
305-
text=f"{key} = {{\n" + "\n".join([f' {k} = "{v}"' for k, v in value.items()]) + "\n}",
306-
prefix=" " * 4)
355+
text=f"{key} = {{\n"
356+
+ "\n".join([f' {k} = "{v}"' for k, v in value.items()])
357+
+ "\n}",
358+
prefix=" " * 4,
359+
)
307360
config_options += f"\n{value}"
308361
continue
309362
elif isinstance(value, list):
310363
# TODO this will break if it's a list of dicts or other complex object
311364
# this serialization logic should probably be moved to a separate recursive function
312365
as_string = [f'"{item}"' for item in value]
313-
value = f'[{", ".join(as_string)}]'
366+
value = f"[{', '.join(as_string)}]"
314367
else:
315368
value = f'"{str(value)}"'
316-
config_options += f'\n {key} = {value}'
369+
config_options += f"\n {key} = {value}"
317370
result = result.replace("<configs>", config_options)
318371
return result
319372

@@ -337,6 +390,7 @@ def check_override_file(providers_file: str) -> None:
337390
# AWS CLIENT UTILS
338391
# ---
339392

393+
340394
def use_s3_path_style() -> bool:
341395
"""
342396
Whether to use S3 path addressing (depending on the configured S3 endpoint)
@@ -361,6 +415,7 @@ def get_region() -> str:
361415
# Note that boto3 is currently not included in the dependencies, to
362416
# keep the library lightweight.
363417
import boto3
418+
364419
region = boto3.session.Session().region_name
365420
except Exception:
366421
pass
@@ -369,7 +424,9 @@ def get_region() -> str:
369424

370425

371426
def get_access_key(provider: dict) -> str:
372-
access_key = str(os.environ.get("AWS_ACCESS_KEY_ID") or provider.get("access_key", "")).strip()
427+
access_key = str(
428+
os.environ.get("AWS_ACCESS_KEY_ID") or provider.get("access_key", "")
429+
).strip()
373430
if access_key and access_key != DEFAULT_ACCESS_KEY:
374431
# Change live access key to mocked one
375432
return deactivate_access_key(access_key)
@@ -378,6 +435,7 @@ def get_access_key(provider: dict) -> str:
378435
# Note that boto3 is currently not included in the dependencies, to
379436
# keep the library lightweight.
380437
import boto3
438+
381439
access_key = boto3.session.Session().get_credentials().access_key
382440
except Exception:
383441
pass
@@ -387,7 +445,7 @@ def get_access_key(provider: dict) -> str:
387445

388446
def deactivate_access_key(access_key: str) -> str:
389447
"""Safe guarding user from accidental live credential usage by deactivating access key IDs.
390-
See more: https://docs.localstack.cloud/references/credentials/"""
448+
See more: https://docs.localstack.cloud/references/credentials/"""
391449
return "L" + access_key[1:] if access_key[0] == "A" else access_key
392450

393451

@@ -413,10 +471,14 @@ def get_service_endpoint(service: str) -> str:
413471

414472
def connect_to_service(service: str, region: str = None):
415473
import boto3
474+
416475
region = region or get_region()
417476
return boto3.client(
418-
service, endpoint_url=get_service_endpoint(service), region_name=region,
419-
aws_access_key_id="test", aws_secret_access_key="test",
477+
service,
478+
endpoint_url=get_service_endpoint(service),
479+
region_name=region,
480+
aws_access_key_id="test",
481+
aws_secret_access_key="test",
420482
)
421483

422484

@@ -440,9 +502,10 @@ def get_or_create_ddb_table(table_name: str, region: str = None):
440502
return ddb_client.describe_table(TableName=table_name)
441503
except Exception:
442504
return ddb_client.create_table(
443-
TableName=table_name, BillingMode="PAY_PER_REQUEST",
505+
TableName=table_name,
506+
BillingMode="PAY_PER_REQUEST",
444507
KeySchema=[{"AttributeName": "LockID", "KeyType": "HASH"}],
445-
AttributeDefinitions=[{"AttributeName": "LockID", "AttributeType": "S"}]
508+
AttributeDefinitions=[{"AttributeName": "LockID", "AttributeType": "S"}],
446509
)
447510

448511

@@ -469,13 +532,15 @@ def parse_tf_files() -> dict:
469532

470533
def get_tf_version(env):
471534
global TF_VERSION
472-
output = subprocess.run([f"{TF_CMD}", "version", "-json"], env=env, check=True, capture_output=True).stdout.decode("utf-8")
535+
output = subprocess.run(
536+
[f"{TF_CMD}", "version", "-json"], env=env, check=True, capture_output=True
537+
).stdout.decode("utf-8")
473538
TF_VERSION = version.parse(json.loads(output)["terraform_version"])
474539

475540

476541
def run_tf_exec(cmd, env):
477542
"""Run terraform using os.exec - can be useful as it does not require any I/O
478-
handling for stdin/out/err. Does *not* allow us to perform any cleanup logic."""
543+
handling for stdin/out/err. Does *not* allow us to perform any cleanup logic."""
479544
os.execvpe(cmd[0], cmd, env=env)
480545

481546

@@ -485,18 +550,41 @@ def run_tf_subprocess(cmd, env):
485550

486551
# register signal handlers
487552
import signal
553+
488554
signal.signal(signal.SIGINT, signal_handler)
489555

490556
PROCESS = subprocess.Popen(
491-
cmd, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stdout)
557+
cmd, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stdout
558+
)
492559
PROCESS.communicate()
493560
sys.exit(PROCESS.returncode)
494561

495562

563+
def cleanup_override_files(override_files: Iterable[str]):
564+
for file_path in override_files:
565+
try:
566+
os.remove(file_path)
567+
except Exception:
568+
print(
569+
f"Count not clean up '{file_path}'. This is not normally a problem but you can delete this file manually."
570+
)
571+
572+
573+
def get_folder_paths_that_require_an_override_file() -> Iterable[str]:
574+
if not is_override_needed(sys.argv[1:]):
575+
return
576+
577+
yield get_default_provider_folder_path()
578+
for path in ADDITIONAL_TF_OVERRIDE_LOCATIONS.split(sep=","):
579+
if path.strip():
580+
yield path
581+
582+
496583
# ---
497584
# UTIL FUNCTIONS
498585
# ---
499586

587+
500588
def signal_handler(sig, frame):
501589
PROCESS.send_signal(sig)
502590

@@ -517,6 +605,7 @@ def to_str(obj) -> bytes:
517605
# MAIN ENTRYPOINT
518606
# ---
519607

608+
520609
def main():
521610
env = dict(os.environ)
522611
cmd = [TF_CMD] + sys.argv[1:]
@@ -529,26 +618,25 @@ def main():
529618
print(f"Unable to determine version. See error message for details: {e}")
530619
exit(1)
531620

532-
if is_override_needed(sys.argv[1:]):
533-
check_override_file(get_providers_file_path())
621+
config_override_files = []
622+
623+
for folder_path in get_folder_paths_that_require_an_override_file():
624+
config_file_path = get_providers_file_path(folder_path)
625+
check_override_file(config_file_path)
534626

535-
# create TF provider config file
536627
providers = determine_provider_aliases()
537-
config_file = create_provider_config_file(providers)
538-
else:
539-
config_file = None
628+
create_provider_config_file(config_file_path, providers)
629+
config_override_files.append(config_file_path)
540630

541631
# call terraform command if not dry-run or any of the commands
542-
if not DRY_RUN or not is_override_needed(sys.argv[1:]):
632+
if not DRY_RUN or not config_override_files:
543633
try:
544634
if USE_EXEC:
545635
run_tf_exec(cmd, env)
546636
else:
547637
run_tf_subprocess(cmd, env)
548638
finally:
549-
# fall through if haven't set during dry-run
550-
if config_file:
551-
os.remove(config_file)
639+
cleanup_override_files(config_override_files)
552640

553641

554642
if __name__ == "__main__":

Diff for: ‎setup.cfg

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[metadata]
22
name = terraform-local
3-
version = 0.20.1
3+
version = 0.21.0
44
url = https://github.com/localstack/terraform-local
55
author = LocalStack Team
66
author_email = info@localstack.cloud

0 commit comments

Comments
 (0)
Please sign in to comment.