atlas-init 0.3.7__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -246,22 +246,32 @@ def create_mock_data(
246
246
  request_path = rt.request.path
247
247
  method = rt.request.method
248
248
  spec_path = find_normalized_path(request_path, api_spec_paths[method])
249
- rt_variables = spec_path.variables(request_path)
250
- normalized_path = spec_path.path
251
- try:
252
- mock_data.update_variables(rt_variables)
253
- except VariablesChangedError as e:
254
- for change in e.changes:
255
- rt_variables.pop(change.var_name)
256
- rt_variables[change.new_var_name] = change.new
257
- normalized_path = normalize_text(request_path, rt_variables)
258
- for modifier in modifiers:
259
- if modifier.match(rt, normalized_path):
260
- modifier.modification(rt)
261
- normalized_text = normalize_text(rt.request.text, mock_data.variables, expect_json=True)
262
- normalized_response_text = normalize_text(rt.response.text, mock_data.variables, expect_json=True)
249
+ normalized_path, normalized_text, normalized_response_text = normalize_rt(modifiers, mock_data, rt, spec_path)
263
250
  mock_data.add_roundtrip(rt, normalized_path, normalized_text, normalized_response_text, is_diff(rt))
264
251
  mock_data.replace_text_variables()
265
252
  if prune_duplicates:
266
253
  mock_data.prune_duplicate_responses() # better to keep duplicates to stay KISS
267
254
  return mock_data
255
+
256
+
257
+ def normalize_rt(
258
+ modifiers: list[RTModifier],
259
+ mock_data: MockRequestData,
260
+ rt: SDKRoundtrip,
261
+ spec_path: ApiSpecPath,
262
+ ):
263
+ request_path = rt.request.path
264
+ rt_variables = spec_path.variables(request_path)
265
+ try:
266
+ mock_data.update_variables(rt_variables)
267
+ except VariablesChangedError as e:
268
+ for change in e.changes:
269
+ rt_variables.pop(change.var_name)
270
+ rt_variables[change.new_var_name] = change.new
271
+ normalized_path = normalize_text(request_path, rt_variables)
272
+ for modifier in modifiers:
273
+ if modifier.match(rt, normalized_path):
274
+ modifier.modification(rt)
275
+ normalized_text = normalize_text(rt.request.text, mock_data.variables, expect_json=True)
276
+ normalized_response_text = normalize_text(rt.response.text, mock_data.variables, expect_json=True)
277
+ return normalized_path, normalized_text, normalized_response_text
@@ -127,6 +127,8 @@ def find_job_test_runs(workflow_dir: Path, job: WorkflowJob) -> list[GoTestRun]:
127
127
 
128
128
 
129
129
  def parse_job_logs(job: WorkflowJob, logs_path: Path) -> list[GoTestRun]:
130
+ if job.conclusion in {"skipped", "cancelled", None}:
131
+ return []
130
132
  step, logs_lines = select_step_and_log_content(job, logs_path)
131
133
  return list(parse(logs_lines, job, step))
132
134
 
@@ -151,7 +153,7 @@ def download_job_safely(workflow_dir: Path, job: WorkflowJob) -> Path | None:
151
153
  def logs_dir() -> Path:
152
154
  logs_dir_str = os.environ.get(GITHUB_CI_RUN_LOGS_ENV_NAME)
153
155
  if not logs_dir_str:
154
- logger.warning(f"using {DEFAULT_GITHUB_CI_RUN_LOGS} to store github ci logs!")
156
+ logger.info(f"using {DEFAULT_GITHUB_CI_RUN_LOGS} to store github ci logs!")
155
157
  return DEFAULT_GITHUB_CI_RUN_LOGS
156
158
  return Path(logs_dir_str)
157
159
 
@@ -159,7 +161,7 @@ def logs_dir() -> Path:
159
161
  def summary_dir(summary_name: str) -> Path:
160
162
  summary_dir_str = os.environ.get(GITHUB_CI_SUMMARY_DIR_ENV_NAME)
161
163
  if not summary_dir_str:
162
- logger.warning(f"using {DEFAULT_GITHUB_SUMMARY_DIR / summary_name} to store summaries")
164
+ logger.info(f"using {DEFAULT_GITHUB_SUMMARY_DIR / summary_name} to store summaries")
163
165
  return DEFAULT_GITHUB_SUMMARY_DIR / summary_name
164
166
  return Path(summary_dir_str) / summary_name
165
167
 
@@ -222,7 +224,8 @@ def select_step_and_log_content(job: WorkflowJob, logs_path: Path) -> tuple[int,
222
224
 
223
225
  def test_step(steps: list[WorkflowStep]) -> int:
224
226
  for i, step in enumerate(steps, 1):
225
- if "test" in step.name.lower():
227
+ name_lower = step.name.lower()
228
+ if "acceptance test" in name_lower and "mocked" not in name_lower:
226
229
  return i
227
230
  last_step = len(steps)
228
231
  logger.warning(f"using {last_step} as final step, unable to find 'test' in {steps}")
@@ -128,7 +128,7 @@ def mock_tf_log_cmd(
128
128
  log_diff_roundtrips: bool = typer.Option(
129
129
  False, "-l", "--log-diff-roundtrips", help="print out the roundtrips used in diffs"
130
130
  ),
131
- package_name: str = typer.Option("", "-p", "--package-name", help="the package name to use for modifiers"),
131
+ package_name: str = typer.Option("-p", "--package-name", prompt=True, help="the package name to use for modifiers"),
132
132
  ):
133
133
  cwd = Path.cwd()
134
134
  default_testdir = cwd / "testdata"
atlas_init/cloud/aws.py CHANGED
@@ -1,13 +1,19 @@
1
1
  import logging
2
2
  from collections.abc import Callable
3
3
  from concurrent.futures import ThreadPoolExecutor, wait
4
+ from pathlib import Path
5
+ from tempfile import TemporaryDirectory
4
6
  from typing import Annotated, TypeVar
5
7
 
6
8
  import stringcase
7
9
  from pydantic import AfterValidator, ConfigDict
10
+ from zero_3rdparty.file_utils import copy, file_modified_time, iter_paths_and_relative
8
11
  from zero_3rdparty.iter_utils import flat_map
9
12
  from zero_3rdparty.object_name import as_name
10
13
 
14
+ from atlas_init.cli_helper.run import run_binary_command_is_ok
15
+ from atlas_init.cli_root import is_dry_run
16
+
11
17
  logger = logging.getLogger(__name__)
12
18
  PascalAlias = ConfigDict(alias_generator=stringcase.pascalcase, populate_by_name=True)
13
19
  REGIONS = "af-south-1,ap-east-1,ap-northeast-1,ap-northeast-2,ap-northeast-3,ap-south-1,ap-southeast-1,ap-southeast-2,ap-southeast-3,ca-central-1,eu-central-1,eu-north-1,eu-south-1,eu-west-1,eu-west-2,eu-west-3,me-south-1,sa-east-1,us-east-1,us-east-2,us-west-1,us-west-2,ap-south-2,ap-southeast-4,eu-central-2,eu-south-2,me-central-1,il-central-1".split(
@@ -62,3 +68,60 @@ def run_in_regions(call: Callable[[str], T], regions: list[str] | None = None) -
62
68
  except Exception:
63
69
  logger.exception(f"failed to call {name} in region = {region}, error 👆")
64
70
  return region_responses
71
+
72
+
73
+ def upload_to_s3(profile_path: Path, s3_bucket: str, s3_prefix: str = ""):
74
+ profiles_path = profile_path.parent
75
+ assert profiles_path.name == "profiles"
76
+ excluded = [".DS_Store", ".terraform/*"]
77
+ excluded_str = " ".join([f'--exclude "{pattern}"' for pattern in excluded])
78
+ dest_path = _s3_path(s3_bucket, profile_path.name, "", s3_prefix=s3_prefix)
79
+ assert run_binary_command_is_ok(
80
+ "aws",
81
+ f"s3 sync {profile_path.name} {dest_path} {excluded_str}",
82
+ profiles_path,
83
+ logger=logger,
84
+ dry_run=is_dry_run(),
85
+ )
86
+
87
+
88
+ def _s3_path(s3_bucket: str, profile_name: str, rel_path: str, s3_prefix: str = "") -> str:
89
+ return f"s3://{s3_bucket}//{s3_prefix}profiles/{profile_name}/{rel_path}"
90
+
91
+
92
+ def download_from_s3(profile_path: Path, s3_bucket: str, s3_prefix: str = ""):
93
+ profiles_path = profile_path.parent
94
+ assert profiles_path.name == "profiles"
95
+ src_path = _s3_path(s3_bucket, profile_path.name, "", s3_prefix)
96
+ copy_dir = f"{profile_path.name}_copy/"
97
+ with TemporaryDirectory() as tmp_dir:
98
+ copy_dir = Path(tmp_dir) / f"safe-{profile_path.name}"
99
+ assert run_binary_command_is_ok(
100
+ "aws",
101
+ f"s3 sync {src_path} {copy_dir}",
102
+ profiles_path,
103
+ logger=logger,
104
+ dry_run=is_dry_run(),
105
+ )
106
+ copy_new_files(copy_dir, profile_path)
107
+
108
+
109
+ _aws_keys = (
110
+ "AWS_ACCESS_KEY_ID",
111
+ "AWS_SECRET_ACCESS_KEY",
112
+ "AWS_PROFILE",
113
+ )
114
+
115
+
116
+ def copy_new_files(src_dir: Path, dest_dir: Path):
117
+ for src_path, rel_path in iter_paths_and_relative(src_dir, "*", only_files=True):
118
+ dest_path = dest_dir / rel_path
119
+ if not dest_path.exists() or file_modified_time(src_path) > file_modified_time(dest_path):
120
+ dest_path.parent.mkdir(parents=True, exist_ok=True)
121
+ if src_path.name == ".env-manual":
122
+ if dest_path.exists():
123
+ continue # never overwrite the manual file
124
+ lines_no_aws = [line for line in src_path.read_text().splitlines() if not line.startswith(_aws_keys)]
125
+ dest_path.write_text("\n".join(lines_no_aws) + "\n")
126
+ else:
127
+ copy(src_path, dest_path)
@@ -65,6 +65,11 @@ class TerraformVars(Entity):
65
65
  return config
66
66
 
67
67
 
68
+ class PyHook(Entity):
69
+ name: str
70
+ locate: str
71
+
72
+
68
73
  @dump_ignore_falsy
69
74
  @total_ordering
70
75
  class TestSuite(Entity):
@@ -75,6 +80,7 @@ class TestSuite(Entity):
75
80
  repo_go_packages: dict[str, list[str]] = Field(default_factory=dict)
76
81
  repo_globs: dict[str, list[str]] = Field(default_factory=dict)
77
82
  vars: TerraformVars = Field(default_factory=TerraformVars) # type: ignore
83
+ post_apply_hooks: list[PyHook] = Field(default_factory=list)
78
84
 
79
85
  def __lt__(self, other) -> bool:
80
86
  if not isinstance(other, TestSuite): # type: ignore
@@ -2,13 +2,13 @@ from __future__ import annotations
2
2
 
3
3
  import logging
4
4
  import os
5
+ from contextlib import suppress
5
6
  from functools import cached_property
6
7
  from pathlib import Path
7
- from typing import Any, NamedTuple
8
+ from typing import Any, NamedTuple, TypeVar
8
9
 
9
- import typer
10
- from model_lib import field_names, parse_payload
11
- from pydantic import field_validator, model_validator
10
+ from model_lib import parse_payload
11
+ from pydantic import ValidationError, field_validator, model_validator
12
12
  from pydantic_settings import BaseSettings, SettingsConfigDict
13
13
 
14
14
  from atlas_init.cloud.aws import AwsRegion
@@ -19,11 +19,13 @@ from atlas_init.settings.config import (
19
19
  from atlas_init.settings.config import (
20
20
  active_suites as config_active_suites,
21
21
  )
22
+ from atlas_init.settings.env_vars_generated import AtlasSettings
22
23
  from atlas_init.settings.path import (
23
24
  DEFAULT_CONFIG_PATH,
24
25
  DEFAULT_PROFILES_PATH,
25
26
  DEFAULT_SCHEMA_CONFIG_PATH,
26
27
  DEFAULT_TF_PATH,
28
+ dump_dotenv,
27
29
  load_dotenv,
28
30
  repo_path_rel_path,
29
31
  )
@@ -31,25 +33,36 @@ from atlas_init.settings.path import (
31
33
  logger = logging.getLogger(__name__)
32
34
  ENV_PREFIX = "ATLAS_INIT_"
33
35
  DEFAULT_PROFILE = "default"
36
+ ENV_S3_PROFILE_BUCKET = f"{ENV_PREFIX}S3_PROFILE_BUCKET"
37
+ ENV_PROJECT_NAME = f"{ENV_PREFIX}PROJECT_NAME"
38
+ ENV_PROFILE = f"{ENV_PREFIX}PROFILE"
39
+ ENV_PROFILES_PATH = f"{ENV_PREFIX}PROFILES_PATH"
40
+ ENV_TEST_SUITES = f"{ENV_PREFIX}TEST_SUITES"
41
+ ENV_CLIPBOARD_COPY = f"{ENV_PREFIX}CLIPBOARD_COPY"
34
42
  REQUIRED_FIELDS = [
35
43
  "MONGODB_ATLAS_ORG_ID",
36
44
  "MONGODB_ATLAS_PRIVATE_KEY",
37
45
  "MONGODB_ATLAS_PUBLIC_KEY",
38
46
  ]
47
+ FILENAME_ENV_MANUAL = ".env-manual"
48
+ T = TypeVar("T")
39
49
 
40
50
 
41
- class ExternalSettings(BaseSettings):
42
- model_config = SettingsConfigDict(env_prefix="")
51
+ class ExternalSettings(AtlasSettings):
52
+ model_config = SettingsConfigDict(env_prefix="", extra="ignore")
43
53
 
44
54
  TF_CLI_CONFIG_FILE: str = ""
45
55
  AWS_PROFILE: str = ""
46
56
  AWS_REGION: AwsRegion = "us-east-1"
47
- MONGODB_ATLAS_ORG_ID: str
48
- MONGODB_ATLAS_PRIVATE_KEY: str
49
- MONGODB_ATLAS_PUBLIC_KEY: str
50
- MONGODB_ATLAS_BASE_URL: str = "https://cloud-dev.mongodb.com/"
51
57
  non_interactive: bool = False
52
58
 
59
+ @property
60
+ def realm_url(self) -> str:
61
+ assert not self.is_mongodbgov_cloud, "realm_url is not supported for mongodbgov cloud"
62
+ if "cloud-dev." in self.MONGODB_ATLAS_BASE_URL:
63
+ return "https://services.cloud-dev.mongodb.com/"
64
+ return "https://services.cloud.mongodb.com/"
65
+
53
66
  @property
54
67
  def is_interactive(self) -> bool:
55
68
  return not self.non_interactive
@@ -59,49 +72,10 @@ class ExternalSettings(BaseSettings):
59
72
  return "mongodbgov" in self.MONGODB_ATLAS_BASE_URL
60
73
 
61
74
 
62
- def as_env_var_name(field_name: str) -> str:
63
- names = set(field_names(AtlasInitSettings))
64
- assert (
65
- field_name in names or field_name.lower() in names
66
- ), f"unknown field name for {AtlasInitSettings}: {field_name}"
67
- external_settings_names = set(field_names(ExternalSettings))
68
- if field_name in external_settings_names:
69
- return field_name.upper()
70
- return f"{ENV_PREFIX}{field_name}".upper()
71
-
72
-
73
- def dump_manual_dotenv_from_env(path: Path) -> None:
74
- env_vars: dict[str, str] = {}
75
- names = field_names(AtlasInitSettings)
76
- ext_settings_names = field_names(ExternalSettings)
77
- path_settings_names = field_names(AtlasInitPaths)
78
- names = set(names + ext_settings_names + path_settings_names)
79
- os_env = os.environ
80
- for name in sorted(names):
81
- env_name = as_env_var_name(name)
82
- if env_name.lower() in os_env or env_name.upper() in os_env:
83
- env_value = os_env.get(env_name.upper()) or os_env.get(env_name.lower())
84
- if env_value:
85
- env_vars[env_name] = env_value
86
-
87
- content = "\n".join(f"{k}={v}" for k, v in env_vars.items())
88
- path.parent.mkdir(parents=True, exist_ok=True)
89
- path.write_text(content)
90
-
91
-
92
- def env_var_names(field_name: str) -> list[str]:
93
- return [f"{ENV_PREFIX}{name}" for name in (field_name, field_name.lower(), field_name.upper())]
94
-
95
-
96
- def read_from_env(field_name: str, default: str = "") -> str:
97
- assert as_env_var_name(field_name)
98
- for name in [field_name, field_name.lower(), field_name.upper()]:
75
+ def read_from_env(env_key: str, default: str = "") -> str:
76
+ for name in [env_key, env_key.lower(), env_key.upper()]:
99
77
  if name in os.environ:
100
78
  return os.environ[name]
101
- prefix_name = f"{ENV_PREFIX}{name}"
102
- if prefix_name in os.environ:
103
- return os.environ[prefix_name]
104
- logger.info(f"field not found in env: {field_name}, using default: {default}")
105
79
  return default
106
80
 
107
81
 
@@ -114,6 +88,7 @@ class AtlasInitPaths(BaseSettings):
114
88
  profiles_path: Path = DEFAULT_PROFILES_PATH
115
89
  tf_schema_config_path: Path = DEFAULT_SCHEMA_CONFIG_PATH
116
90
  schema_out_path: Path | None = None
91
+ s3_profile_bucket: str = ""
117
92
 
118
93
  @property
119
94
  def schema_out_path_computed(self) -> Path:
@@ -125,14 +100,12 @@ class AtlasInitPaths(BaseSettings):
125
100
 
126
101
  @property
127
102
  def env_file_manual(self) -> Path:
128
- return self.profile_dir / ".env-manual"
103
+ return self.profile_dir / FILENAME_ENV_MANUAL
129
104
 
130
105
  @property
131
106
  def manual_env_vars(self) -> dict[str, str]:
132
107
  env_manual_path = self.env_file_manual
133
- if env_manual_path.exists():
134
- return load_dotenv(env_manual_path)
135
- return {}
108
+ return load_dotenv(env_manual_path) if env_manual_path.exists() else {}
136
109
 
137
110
  @property
138
111
  def env_vars_generated(self) -> Path:
@@ -165,23 +138,40 @@ class AtlasInitPaths(BaseSettings):
165
138
  def load_env_vars(self, path: Path) -> dict[str, str]:
166
139
  return load_dotenv(path)
167
140
 
168
- def load_env_vars_generated(self) -> dict[str, str]:
169
- env_path = self.env_vars_generated
141
+ def load_env_vars_full(self) -> dict[str, str]:
142
+ env_path = self.env_vars_vs_code
170
143
  assert env_path.exists(), f"no env-vars exist {env_path} have you forgotten apply?"
171
144
  return load_dotenv(env_path)
172
145
 
146
+ def env_vars_cls_or_none(self, t: type[T], *, path: Path | None = None) -> T | None:
147
+ with suppress(ValidationError):
148
+ return self.env_vars_cls(t, path=path)
149
+
150
+ def env_vars_cls(self, t: type[T], *, path: Path | None = None) -> T:
151
+ path = path or self.env_vars_vs_code
152
+ env_vars = self.load_env_vars(path) if path.exists() else {}
153
+ return t(**env_vars)
154
+
173
155
  def load_profile_manual_env_vars(self, *, skip_os_update: bool = False) -> dict[str, str]:
174
156
  # sourcery skip: dict-assign-update-to-union
175
157
  manual_env_vars = self.manual_env_vars
176
158
  if manual_env_vars:
177
159
  if skip_os_update:
178
160
  return manual_env_vars
179
- logger.warning(f"loading manual env-vars from {self.env_file_manual}")
180
- os.environ.update(manual_env_vars)
161
+ if new_updates := {k: v for k, v in manual_env_vars.items() if k not in os.environ}:
162
+ logger.info(f"loading manual env-vars {','.join(new_updates)}")
163
+ os.environ.update(new_updates)
181
164
  else:
182
- logger.warning(f"no {self.env_file_manual}")
165
+ logger.warning(f"no {self.env_file_manual} exists")
183
166
  return manual_env_vars
184
167
 
168
+ def include_extra_env_vars_in_vscode(self, extra_env_vars: dict[str, str]) -> None:
169
+ extra_name = ", ".join(extra_env_vars.keys())
170
+ original_env_vars = self.load_env_vars(self.env_vars_vs_code)
171
+ new_env_vars = original_env_vars | extra_env_vars
172
+ dump_dotenv(self.env_vars_vs_code, new_env_vars)
173
+ logger.info(f"done {self.env_vars_vs_code} updated with {extra_name} env-vars ✅")
174
+
185
175
 
186
176
  class EnvVarsCheck(NamedTuple):
187
177
  missing: list[str]
@@ -196,42 +186,36 @@ class AtlasInitSettings(AtlasInitPaths, ExternalSettings):
196
186
  cfn_use_kms_key: bool = False
197
187
  project_name: str = ""
198
188
 
199
- skip_copy: bool = False
189
+ cliboard_copy: str = ""
200
190
  test_suites: str = ""
201
191
 
202
192
  @classmethod
203
193
  def check_env_vars(
204
194
  cls,
205
195
  profile: str = DEFAULT_PROFILE,
206
- required_extra_fields: list[str] | None = None,
207
- explicit_env_vars: dict[str, str] | None = None,
196
+ required_env_vars: list[str] | None = None,
208
197
  ) -> EnvVarsCheck:
209
- """side effect of loading env-vars and set profile"""
210
- os.environ[as_env_var_name("profile")] = profile
211
- required_extra_fields = required_extra_fields or []
212
- explicit_env_vars = explicit_env_vars or {}
213
- path_settings = AtlasInitPaths()
214
- manual_env_vars = path_settings.load_profile_manual_env_vars()
198
+ required_env_vars = required_env_vars or []
199
+ path_settings = AtlasInitPaths(profile=profile)
200
+ manual_env_vars = path_settings.manual_env_vars
215
201
  ambiguous: list[str] = []
216
- for env_name, env_value in explicit_env_vars.items():
217
- manual_value = manual_env_vars.get(env_name)
218
- if manual_value and manual_value != env_value:
202
+ for env_name, manual_value in manual_env_vars.items():
203
+ env_value = read_from_env(env_name)
204
+ if env_value and manual_value != env_value:
219
205
  ambiguous.append(env_name)
220
- else:
221
- os.environ[env_name] = env_value
222
206
  missing_env_vars = sorted(
223
- as_env_var_name(field_name)
224
- for field_name in REQUIRED_FIELDS + required_extra_fields
225
- if read_from_env(field_name) == ""
207
+ env_name
208
+ for env_name in REQUIRED_FIELDS + required_env_vars
209
+ if read_from_env(env_name) == "" and env_name not in manual_env_vars
226
210
  )
227
211
  return EnvVarsCheck(missing=missing_env_vars, ambiguous=sorted(ambiguous))
228
212
 
229
213
  @classmethod
230
- def safe_settings(cls) -> AtlasInitSettings:
231
- """loads .env_manual before creating the settings"""
232
- path_settings = AtlasInitPaths()
233
- path_settings.load_profile_manual_env_vars()
234
- ext_settings = ExternalSettings() # type: ignore
214
+ def safe_settings(cls, profile: str, *, ext_settings: ExternalSettings | None = None) -> AtlasInitSettings:
215
+ """side effect of loading manual env-vars and set profile"""
216
+ os.environ[ENV_PROFILE] = profile
217
+ AtlasInitPaths(profile=profile).load_profile_manual_env_vars()
218
+ ext_settings = ext_settings or ExternalSettings() # type: ignore
235
219
  path_settings = AtlasInitPaths()
236
220
  return cls(**path_settings.model_dump(), **ext_settings.model_dump())
237
221
 
@@ -257,16 +241,17 @@ class AtlasInitSettings(AtlasInitPaths, ExternalSettings):
257
241
  def test_suites_parsed(self) -> list[str]:
258
242
  return [t for t in self.test_suites.split(",") if t]
259
243
 
260
- def cfn_config(self) -> dict[str, Any]:
244
+ def tf_vars(self) -> dict[str, Any]:
245
+ variables = {}
261
246
  if self.cfn_profile:
262
- return {
263
- "cfn_config": {
264
- "profile": self.cfn_profile,
265
- "region": self.cfn_region,
266
- "use_kms_key": self.cfn_use_kms_key,
267
- }
247
+ variables["cfn_config"] = {
248
+ "profile": self.cfn_profile,
249
+ "region": self.cfn_region,
250
+ "use_kms_key": self.cfn_use_kms_key,
268
251
  }
269
- return {}
252
+ if self.s3_profile_bucket:
253
+ variables["use_aws_s3"] = True
254
+ return variables
270
255
 
271
256
 
272
257
  def active_suites(settings: AtlasInitSettings) -> list[TestSuite]:
@@ -274,17 +259,45 @@ def active_suites(settings: AtlasInitSettings) -> list[TestSuite]:
274
259
  return config_active_suites(settings.config, repo_path, cwd_rel_path, settings.test_suites_parsed)
275
260
 
276
261
 
277
- def init_settings() -> AtlasInitSettings:
262
+ _sentinel = object()
263
+ PLACEHOLDER_VALUE = "PLACEHOLDER"
264
+
265
+
266
+ class EnvVarsError(Exception):
267
+ def __init__(self, missing: list[str], ambiguous: list[str]):
268
+ self.missing = missing
269
+ self.ambiguous = ambiguous
270
+ super().__init__(f"missing: {missing}, ambiguous: {ambiguous}")
271
+
272
+ def __str__(self) -> str:
273
+ return f"missing: {self.missing}, ambiguous: {self.ambiguous}"
274
+
275
+
276
+ def init_settings(
277
+ required_env_vars: list[str] | object = _sentinel,
278
+ *,
279
+ non_required: bool = False,
280
+ ) -> AtlasInitSettings:
281
+ if required_env_vars is _sentinel:
282
+ required_env_vars = [ENV_PROJECT_NAME]
283
+ if non_required:
284
+ required_env_vars = []
285
+ profile = os.getenv("ATLAS_INIT_PROFILE", DEFAULT_PROFILE)
278
286
  missing_env_vars, ambiguous_env_vars = AtlasInitSettings.check_env_vars(
279
- os.getenv("ATLAS_INIT_PROFILE", DEFAULT_PROFILE),
280
- required_extra_fields=["project_name"],
287
+ profile,
288
+ required_env_vars=required_env_vars, # type: ignore
281
289
  )
282
- if missing_env_vars:
283
- typer.echo(f"missing env_vars: {missing_env_vars}")
290
+ if missing_env_vars and not non_required:
291
+ logger.warning(f"missing env_vars: {missing_env_vars}")
284
292
  if ambiguous_env_vars:
285
- typer.echo(
286
- f"amiguous env_vars: {ambiguous_env_vars} (specified both in cli & in .env-manual file with different values)"
293
+ logger.warning(
294
+ f"amiguous env_vars: {ambiguous_env_vars} (specified both in cli/env & in .env-manual file with different values)"
287
295
  )
296
+ ext_settings = None
297
+ if non_required and missing_env_vars:
298
+ placeholders = {k: PLACEHOLDER_VALUE for k in missing_env_vars}
299
+ missing_env_vars = []
300
+ ext_settings = ExternalSettings(**placeholders) # type: ignore
288
301
  if missing_env_vars or ambiguous_env_vars:
289
- raise typer.Exit(1)
290
- return AtlasInitSettings.safe_settings()
302
+ raise EnvVarsError(missing_env_vars, ambiguous_env_vars)
303
+ return AtlasInitSettings.safe_settings(profile, ext_settings=ext_settings)
@@ -0,0 +1,34 @@
1
+ import random
2
+
3
+ from pydantic import ConfigDict, Field
4
+ from pydantic_settings import BaseSettings
5
+
6
+
7
+ class _EnvVarsGenerated(BaseSettings):
8
+ model_config = ConfigDict(extra="ignore") # type: ignore
9
+
10
+
11
+ class AtlasSettings(_EnvVarsGenerated):
12
+ MONGODB_ATLAS_ORG_ID: str
13
+ MONGODB_ATLAS_PRIVATE_KEY: str
14
+ MONGODB_ATLAS_PUBLIC_KEY: str
15
+ MONGODB_ATLAS_BASE_URL: str = "https://cloud-dev.mongodb.com/"
16
+
17
+
18
+ class RealmSettings(_EnvVarsGenerated):
19
+ MONGODB_REALM_APP_ID: str
20
+ MONGODB_REALM_SERVICE_ID: str
21
+ MONGODB_REALM_FUNCTION_ID: str
22
+ MONGODB_REALM_FUNCTION_NAME: str
23
+ MONGODB_REALM_BASE_URL: str
24
+ RANDOM_INT_100K: str = Field(default_factory=lambda: str(random.randint(0, 100_000))) # noqa: S311 # not used for cryptographic purposes
25
+
26
+
27
+ class EnvVarsGenerated(AtlasSettings):
28
+ MONGODB_ATLAS_PROJECT_ID: str
29
+
30
+
31
+ class TFModuleCluster(_EnvVarsGenerated):
32
+ MONGODB_ATLAS_CLUSTER_NAME: str
33
+ MONGODB_ATLAS_CONTAINER_ID: str
34
+ MONGODB_URL: str
@@ -1,6 +1,7 @@
1
1
  import logging
2
2
  from typing import Literal
3
3
 
4
+ import typer
4
5
  from pydantic import BaseModel
5
6
  from rich.logging import RichHandler
6
7
 
@@ -44,13 +45,20 @@ def hide_secrets(handler: logging.Handler, secrets_dict: dict[str, str]) -> None
44
45
  handler.addFilter(SecretsHider(list(secrets_to_hide), name="secrets-hider"))
45
46
 
46
47
 
47
- def configure_logging(log_level: str = "INFO") -> logging.Handler:
48
+ def configure_logging(
49
+ app: typer.Typer, log_level: str = "INFO", *, is_running_in_repo: bool = False
50
+ ) -> logging.Handler:
48
51
  _LogLevel(log_level=log_level) # type: ignore
49
- handler = RichHandler(rich_tracebacks=False)
52
+ handler = RichHandler(rich_tracebacks=False, level=log_level)
50
53
  logging.basicConfig(
51
- level=logging.getLevelName(log_level),
54
+ level=log_level,
52
55
  format="%(message)s",
53
56
  datefmt="[%X]",
54
57
  handlers=[handler],
55
58
  )
59
+ if not is_running_in_repo or handler.level >= logging.WARNING:
60
+ logging.warning("using basic tracebacks/errors")
61
+ app.pretty_exceptions_enable = False
62
+ app.pretty_exceptions_show_locals = False
63
+
56
64
  return handler