atlas-init 0.3.0__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
atlas_init/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  from pathlib import Path
2
2
 
3
- VERSION = "0.3.0"
3
+ VERSION = "0.3.1"
4
4
 
5
5
 
6
6
  def running_in_repo() -> bool:
atlas_init/cli_tf/app.py CHANGED
@@ -30,7 +30,6 @@ from atlas_init.cli_tf.go_test_summary import (
30
30
  )
31
31
  from atlas_init.cli_tf.mock_tf_log import mock_tf_log_cmd
32
32
  from atlas_init.cli_tf.schema import (
33
- download_admin_api,
34
33
  dump_generator_config,
35
34
  parse_py_terraform_schema,
36
35
  update_provider_code_spec,
@@ -42,6 +41,7 @@ from atlas_init.cli_tf.schema_v2 import (
42
41
  )
43
42
  from atlas_init.cli_tf.schema_v2_api_parsing import add_api_spec_info
44
43
  from atlas_init.cli_tf.schema_v2_sdk import generate_model_go, parse_sdk_model
44
+ from atlas_init.repos.go_sdk import download_admin_api
45
45
  from atlas_init.repos.path import Repo, current_repo_path
46
46
  from atlas_init.settings.env_vars import init_settings
47
47
  from atlas_init.settings.interactive import confirm
@@ -31,14 +31,7 @@ class RequestInfo(Entity):
31
31
 
32
32
  @property
33
33
  def id(self):
34
- return "__".join( # noqa: FLY002
35
- [
36
- self.method,
37
- self.path,
38
- self.version,
39
- self.text,
40
- ] # need to include text to differentiate between requests
41
- )
34
+ return "__".join((self.method, self.path, self.version, self.text)) # noqa: FLY002
42
35
 
43
36
 
44
37
  class StepRequests(Entity):
@@ -1,5 +1,6 @@
1
1
  import json
2
2
  import logging
3
+ import time
3
4
  from pathlib import Path
4
5
  from typing import Self
5
6
 
@@ -15,7 +16,8 @@ from atlas_init.cli_tf.debug_logs import (
15
16
  parse_test_name,
16
17
  )
17
18
  from atlas_init.cli_tf.debug_logs_test_data import create_mock_data, default_is_diff
18
- from atlas_init.repos.go_sdk import parse_api_spec_paths
19
+ from atlas_init.repos.go_sdk import api_spec_path_transformed, download_admin_api, parse_api_spec_paths
20
+ from atlas_init.settings.path import DEFAULT_DOWNLOADS_DIR
19
21
 
20
22
  logger = logging.getLogger(__name__)
21
23
 
@@ -23,18 +25,18 @@ logger = logging.getLogger(__name__)
23
25
  class MockTFLog(Entity):
24
26
  log_path: Path
25
27
  output_dir: Path
26
- sdk_path: Path
28
+ admin_api_path: Path
27
29
  diff_skip_suffixes: list[str] = Field(default_factory=list)
28
30
  keep_duplicates: bool = False
29
31
 
30
32
  @model_validator(mode="after")
31
33
  def ensure_paths_exist(self) -> Self:
32
34
  if not self.log_path.exists():
33
- raise ValueError(f"log_path: {self.log_path} doesn't exist")
34
- if not self.sdk_path.exists():
35
- raise ValueError(f"sdk_path: {self.sdk_path} doesn't exist")
35
+ raise ValueError(f"log_path: '{self.log_path}' doesn't exist")
36
+ if not self.admin_api_path.exists():
37
+ raise ValueError(f"admin_api_path: '{self.admin_api_path}' doesn't exist")
36
38
  if not self.output_dir.exists():
37
- raise ValueError(f"output_dir: {self.output_dir} doesn't exist")
39
+ raise ValueError(f"output_dir: '{self.output_dir}' doesn't exist")
38
40
  assert self.output_dir.name == "testdata", "output_path should be a directory named testdata"
39
41
  return self
40
42
 
@@ -46,7 +48,7 @@ def mock_tf_log(req: MockTFLog) -> None:
46
48
  log_file_text = req.log_path.read_text()
47
49
  test_name = parse_test_name(log_file_text)
48
50
  roundtrips = parse_http_requests(log_file_text)
49
- api_spec_paths = parse_api_spec_paths(req.sdk_path)
51
+ api_spec_paths = parse_api_spec_paths(req.admin_api_path)
50
52
  data = create_mock_data(
51
53
  roundtrips,
52
54
  api_spec_paths,
@@ -62,24 +64,55 @@ def mock_tf_log(req: MockTFLog) -> None:
62
64
 
63
65
  def mock_tf_log_cmd(
64
66
  log_path: str = typer.Argument(..., help="the path to the log file generated with TF_LOG_PATH"),
65
- sdk_repo_path_str: str = option_sdk_repo_path,
66
67
  output_testdir: str = typer.Option(
67
68
  "",
68
69
  "-o",
69
70
  "--output-testdir",
70
- help="the path to the output test directory, for example: internal/service/advancedclustertpf/testdata/",
71
+ help="the path to the output test directory, for example: internal/service/advancedclustertpf/testdata/, uses cwd/testdata by default",
72
+ ),
73
+ sdk_repo_path_str: str = option_sdk_repo_path,
74
+ sdk_branch: str = typer.Option("main", "-b", "--branch", help="the branch for downloading openapi spec"),
75
+ admin_api_path: str = typer.Option(
76
+ "", "-a", "--admin-api-path", help="the path to store/download the openapi spec"
71
77
  ),
72
78
  diff_skip_suffixes: list[str] = typer.Option(..., "-s", "--skip-suffixes", default_factory=list),
73
79
  keep_duplicates: bool = typer.Option(False, "-keep", "--keep-duplicates", help="keep duplicate requests"),
74
80
  ):
75
81
  cwd = Path.cwd()
76
- default_sdk_path = cwd.parent / "atlas-sdk-go"
77
- default_testdir = cwd.parent / "testdata"
82
+ default_testdir = cwd / "testdata"
83
+ resolved_admin_api_path = resolve_admin_api_path(sdk_repo_path_str, sdk_branch, admin_api_path)
78
84
  event_in = MockTFLog(
79
85
  log_path=Path(log_path),
80
86
  output_dir=Path(output_testdir) if output_testdir else default_testdir,
81
- sdk_path=Path(sdk_repo_path_str) if sdk_repo_path_str else default_sdk_path,
87
+ admin_api_path=resolved_admin_api_path,
82
88
  diff_skip_suffixes=diff_skip_suffixes,
83
89
  keep_duplicates=keep_duplicates,
84
90
  )
85
91
  mock_tf_log(event_in)
92
+
93
+
94
+ def is_cache_up_to_date(cache_path: Path, cache_ttl: int) -> bool:
95
+ if cache_path.exists():
96
+ modified_ts = file_utils.file_modified_time(cache_path)
97
+ if modified_ts > time.time() - cache_ttl:
98
+ logger.info(f"using cached admin api: {cache_path} downloaded {time.time()-modified_ts:.0f}s ago")
99
+ return True
100
+ return False
101
+
102
+
103
+ def resolve_admin_api_path(sdk_repo_path_str: str, sdk_branch: str, admin_api_path: str) -> Path:
104
+ if admin_api_path:
105
+ resolved_admin_api_path = Path(admin_api_path)
106
+ if not resolved_admin_api_path.exists():
107
+ download_admin_api(resolved_admin_api_path, sdk_branch)
108
+ elif sdk_repo_path_str:
109
+ sdk_repo_path = Path(sdk_repo_path_str)
110
+ assert sdk_repo_path.exists(), f"not found sdk_repo_path={sdk_repo_path}"
111
+ resolved_admin_api_path = api_spec_path_transformed(sdk_repo_path)
112
+ else:
113
+ resolved_admin_api_path = DEFAULT_DOWNLOADS_DIR / "atlas-api-transformed.yaml"
114
+ if not is_cache_up_to_date(resolved_admin_api_path, 3600):
115
+ download_admin_api(resolved_admin_api_path, sdk_branch)
116
+ assert resolved_admin_api_path.exists(), f"unable to resolve admin_api_path={resolved_admin_api_path}"
117
+ assert resolved_admin_api_path.is_file(), f"not a file admin_api_path={resolved_admin_api_path}"
118
+ return resolved_admin_api_path
@@ -5,7 +5,6 @@ from pathlib import Path
5
5
  from typing import Annotated, Literal, NamedTuple
6
6
 
7
7
  import pydantic
8
- import requests
9
8
  from model_lib import Entity, dump, field_names, parse_model
10
9
  from zero_3rdparty import dict_nested
11
10
  from zero_3rdparty.enum_utils import StrEnum
@@ -66,14 +65,19 @@ class SkipValidators(Entity):
66
65
  type: Literal["skip_validators"] = "skip_validators"
67
66
 
68
67
 
69
- Extension = Annotated[IgnoreNested | RenameAttribute | ChangeAttributeType | SkipValidators, pydantic.Field("type")]
68
+ Extension = Annotated[
69
+ IgnoreNested | RenameAttribute | ChangeAttributeType | SkipValidators,
70
+ pydantic.Field("type"),
71
+ ]
70
72
 
71
73
 
72
74
  class TFResource(Entity):
73
75
  model_config = pydantic.ConfigDict(extra="allow")
74
76
  name: str
75
77
  extensions: list[Extension] = pydantic.Field(default_factory=list)
76
- provider_spec_attributes: list[ProviderSpecAttribute] = pydantic.Field(default_factory=list)
78
+ provider_spec_attributes: list[ProviderSpecAttribute] = pydantic.Field(
79
+ default_factory=list
80
+ )
77
81
 
78
82
  def dump_generator_config(self) -> dict:
79
83
  names = field_names(self)
@@ -129,7 +133,9 @@ class ProviderCodeSpec(Entity):
129
133
  raise ValueError(f"{self.root_name(name, is_datasource)} not found!")
130
134
  return root_value
131
135
 
132
- def schema_attributes(self, name: str, is_datasource: bool = False) -> list: # noqa: FBT002
136
+ def schema_attributes(
137
+ self, name: str, is_datasource: bool = False
138
+ ) -> list:
133
139
  root_dict = self.root_dict(name, is_datasource)
134
140
  return root_dict["schema"]["attributes"]
135
141
 
@@ -139,16 +145,26 @@ class ProviderCodeSpec(Entity):
139
145
  def root_name(self, name: str, is_datasource: bool):
140
146
  return f"{self._type_name(is_datasource)}.{name}"
141
147
 
142
- def attribute_names(self, name: str, is_datasource: bool = False) -> list[str]: # noqa: FBT002
143
- return [a["name"] for a in self.schema_attributes(name, is_datasource=is_datasource)]
148
+ def attribute_names(
149
+ self, name: str, is_datasource: bool = False
150
+ ) -> list[str]:
151
+ return [
152
+ a["name"] for a in self.schema_attributes(name, is_datasource=is_datasource)
153
+ ]
144
154
 
145
- def iter_all_attributes(self, name: str, is_datasource: bool = False) -> Iterable[AttributeTuple]: # noqa: FBT002
155
+ def iter_all_attributes(
156
+ self, name: str, is_datasource: bool = False
157
+ ) -> Iterable[AttributeTuple]:
146
158
  for attribute in self.schema_attributes(name=name, is_datasource=is_datasource):
147
159
  yield AttributeTuple(attribute["name"], "", attribute)
148
160
  yield from self.iter_nested_attributes(name, is_datasource=is_datasource)
149
161
 
150
- def iter_nested_attributes(self, name: str, is_datasource: bool = False) -> Iterable[AttributeTuple]: # noqa: FBT002
151
- for i, attribute in enumerate(self.schema_attributes(name=name, is_datasource=is_datasource)):
162
+ def iter_nested_attributes(
163
+ self, name: str, is_datasource: bool = False
164
+ ) -> Iterable[AttributeTuple]:
165
+ for i, attribute in enumerate(
166
+ self.schema_attributes(name=name, is_datasource=is_datasource)
167
+ ):
152
168
  for path, attr_dict in dict_nested.iter_nested_key_values(
153
169
  attribute, type_filter=dict, include_list_indexes=True
154
170
  ):
@@ -156,32 +172,53 @@ class ProviderCodeSpec(Entity):
156
172
  if name := attr_dict.get("name", ""):
157
173
  yield AttributeTuple(name, full_path, attr_dict)
158
174
 
159
- def remove_nested_attribute(self, name: str, path: str, is_datasource: bool = False) -> None: # noqa: FBT002
175
+ def remove_nested_attribute(
176
+ self, name: str, path: str, is_datasource: bool = False
177
+ ) -> None:
160
178
  root_name = self.root_name(name, is_datasource)
161
179
  logger.info(f"will remove attribute from {root_name} with path: {path}")
162
180
  root_attributes = self.root_dict(name, is_datasource)
163
181
  full_path = f"schema.attributes.{path}"
164
182
  popped = dict_nested.pop_nested(root_attributes, full_path, "")
165
183
  if popped == "":
166
- raise ValueError(f"failed to remove attribute from resource {name} with path: {full_path}")
167
- assert isinstance(popped, dict), f"expected removed attribute to be a dict, got: {popped}"
184
+ raise ValueError(
185
+ f"failed to remove attribute from resource {name} with path: {full_path}"
186
+ )
187
+ assert isinstance(
188
+ popped, dict
189
+ ), f"expected removed attribute to be a dict, got: {popped}"
168
190
  logger.info(f"removal ok, attribute_name: '{root_name}.{popped.get('name')}'")
169
191
 
170
- def read_attribute(self, name: str, path: str, *, is_datasource: bool = False) -> dict:
192
+ def read_attribute(
193
+ self, name: str, path: str, *, is_datasource: bool = False
194
+ ) -> dict:
171
195
  if "." not in path:
172
- attribute_dict = next((a for a in self.schema_attributes(name, is_datasource) if a["name"] == path), None)
196
+ attribute_dict = next(
197
+ (
198
+ a
199
+ for a in self.schema_attributes(name, is_datasource)
200
+ if a["name"] == path
201
+ ),
202
+ None,
203
+ )
173
204
  else:
174
205
  root_dict = self.root_dict(name, is_datasource)
175
- attribute_dict = dict_nested.read_nested_or_none(root_dict, f"schema.attributes.{path}")
206
+ attribute_dict = dict_nested.read_nested_or_none(
207
+ root_dict, f"schema.attributes.{path}"
208
+ )
176
209
  if attribute_dict is None:
177
- raise ValueError(f"attribute {path} not found in {self.root_name(name, is_datasource)}")
210
+ raise ValueError(
211
+ f"attribute {path} not found in {self.root_name(name, is_datasource)}"
212
+ )
178
213
  assert isinstance(
179
214
  attribute_dict, dict
180
215
  ), f"expected attribute to be a dict, got: {attribute_dict} @ {path} for resource={name}"
181
216
  return attribute_dict
182
217
 
183
218
 
184
- def update_provider_code_spec(schema: PyTerraformSchema, provider_code_spec_path: Path) -> str:
219
+ def update_provider_code_spec(
220
+ schema: PyTerraformSchema, provider_code_spec_path: Path
221
+ ) -> str:
185
222
  spec = parse_model(provider_code_spec_path, t=ProviderCodeSpec)
186
223
  for resource in schema.resources:
187
224
  resource_name = resource.name
@@ -192,41 +229,67 @@ def update_provider_code_spec(schema: PyTerraformSchema, provider_code_spec_path
192
229
  for data_source in schema.data_sources:
193
230
  data_source_name = data_source.name
194
231
  if extra_spec_attributes := data_source.provider_spec_attributes:
195
- add_explicit_attributes(spec, data_source_name, extra_spec_attributes, is_datasource=True)
232
+ add_explicit_attributes(
233
+ spec, data_source_name, extra_spec_attributes, is_datasource=True
234
+ )
196
235
  for extension in data_source.extensions:
197
236
  apply_extension(extension, spec, data_source_name, is_datasource=True)
198
237
  return dump(spec, "json")
199
238
 
200
239
 
201
240
  def add_explicit_attributes(
202
- spec: ProviderCodeSpec, name: str, extra_spec_attributes: list[ProviderSpecAttribute], *, is_datasource=False
241
+ spec: ProviderCodeSpec,
242
+ name: str,
243
+ extra_spec_attributes: list[ProviderSpecAttribute],
244
+ *,
245
+ is_datasource=False,
203
246
  ):
204
247
  resource_attributes = spec.schema_attributes(name, is_datasource=is_datasource)
205
248
  existing_names = spec.attribute_names(name, is_datasource=is_datasource)
206
249
  new_names = [extra.name for extra in extra_spec_attributes]
207
250
  if both := set(existing_names) & set(new_names):
208
251
  raise ValueError(f"resource: {name}, has already: {both} attributes")
209
- resource_attributes.extend(extra.dump_provider_code_spec() for extra in extra_spec_attributes)
252
+ resource_attributes.extend(
253
+ extra.dump_provider_code_spec() for extra in extra_spec_attributes
254
+ )
210
255
 
211
256
 
212
257
  @singledispatch
213
- def apply_extension(extension: object, spec: ProviderCodeSpec, resource_name: str, *, is_datasource: bool = False): # noqa: ARG001
258
+ def apply_extension(
259
+ extension: object,
260
+ spec: ProviderCodeSpec,
261
+ resource_name: str,
262
+ *,
263
+ is_datasource: bool = False,
264
+ ):
214
265
  raise NotImplementedError(f"unsupported extension: {extension!r}")
215
266
 
216
267
 
217
268
  @apply_extension.register # type: ignore
218
- def _ignore_nested(extension: IgnoreNested, spec: ProviderCodeSpec, resource_name: str, *, is_datasource: bool = False):
269
+ def _ignore_nested(
270
+ extension: IgnoreNested,
271
+ spec: ProviderCodeSpec,
272
+ resource_name: str,
273
+ *,
274
+ is_datasource: bool = False,
275
+ ):
219
276
  if extension.use_wildcard:
220
277
  name_to_remove = extension.path.removeprefix("*.")
221
- assert "*" not in name_to_remove, f"only prefix *. is allowed for wildcard in path {extension.path}"
278
+ assert (
279
+ "*" not in name_to_remove
280
+ ), f"only prefix *. is allowed for wildcard in path {extension.path}"
222
281
  found_paths = [
223
282
  path
224
- for name, path, attribute_dict in spec.iter_nested_attributes(resource_name, is_datasource=is_datasource)
283
+ for name, path, attribute_dict in spec.iter_nested_attributes(
284
+ resource_name, is_datasource=is_datasource
285
+ )
225
286
  if name == name_to_remove
226
287
  ]
227
288
  while found_paths:
228
289
  next_to_remove = found_paths.pop()
229
- spec.remove_nested_attribute(resource_name, next_to_remove, is_datasource=is_datasource)
290
+ spec.remove_nested_attribute(
291
+ resource_name, next_to_remove, is_datasource=is_datasource
292
+ )
230
293
  found_paths = [
231
294
  path
232
295
  for name, path, attribute_dict in spec.iter_nested_attributes(
@@ -241,9 +304,15 @@ def _ignore_nested(extension: IgnoreNested, spec: ProviderCodeSpec, resource_nam
241
304
 
242
305
  @apply_extension.register # type: ignore
243
306
  def _rename_attribute(
244
- extension: RenameAttribute, spec: ProviderCodeSpec, resource_name: str, *, is_datasource: bool = False
307
+ extension: RenameAttribute,
308
+ spec: ProviderCodeSpec,
309
+ resource_name: str,
310
+ *,
311
+ is_datasource: bool = False,
245
312
  ):
246
- for attribute_dict in spec.schema_attributes(resource_name, is_datasource=is_datasource):
313
+ for attribute_dict in spec.schema_attributes(
314
+ resource_name, is_datasource=is_datasource
315
+ ):
247
316
  if attribute_dict.get("name") == extension.from_name:
248
317
  logger.info(
249
318
  f"renaming attribute for {spec.root_name(resource_name, is_datasource)}: {extension.from_name} -> {extension.to_name}"
@@ -253,9 +322,15 @@ def _rename_attribute(
253
322
 
254
323
  @apply_extension.register # type: ignore
255
324
  def _change_attribute_type(
256
- extension: ChangeAttributeType, spec: ProviderCodeSpec, resource_name: str, *, is_datasource: bool = False
325
+ extension: ChangeAttributeType,
326
+ spec: ProviderCodeSpec,
327
+ resource_name: str,
328
+ *,
329
+ is_datasource: bool = False,
257
330
  ):
258
- attribute_dict = spec.read_attribute(resource_name, extension.path, is_datasource=is_datasource)
331
+ attribute_dict = spec.read_attribute(
332
+ resource_name, extension.path, is_datasource=is_datasource
333
+ )
259
334
  old_value = extension.read_value(attribute_dict)
260
335
  if old_value == extension.new_value:
261
336
  logger.info(
@@ -270,31 +345,25 @@ def _change_attribute_type(
270
345
 
271
346
 
272
347
  @apply_extension.register # type: ignore
273
- def _skip_validators(_: SkipValidators, spec: ProviderCodeSpec, resource_name: str, *, is_datasource: bool = False):
274
- for attr_tuple in spec.iter_all_attributes(resource_name, is_datasource=is_datasource):
348
+ def _skip_validators(
349
+ _: SkipValidators,
350
+ spec: ProviderCodeSpec,
351
+ resource_name: str,
352
+ *,
353
+ is_datasource: bool = False,
354
+ ):
355
+ for attr_tuple in spec.iter_all_attributes(
356
+ resource_name, is_datasource=is_datasource
357
+ ):
275
358
  attribute_dict = attr_tuple.attribute_dict
276
359
  paths_to_pop = [
277
360
  f"{path}.validators"
278
- for path, nested_dict in dict_nested.iter_nested_key_values(attribute_dict, type_filter=dict)
361
+ for path, nested_dict in dict_nested.iter_nested_key_values(
362
+ attribute_dict, type_filter=dict
363
+ )
279
364
  if "validators" in nested_dict
280
365
  ]
281
366
  if paths_to_pop:
282
367
  logger.info(f"popping validators from '{attr_tuple.attribute_path}'")
283
368
  for path in paths_to_pop:
284
369
  dict_nested.pop_nested(attribute_dict, path)
285
-
286
-
287
- # reusing url from terraform-provider-mongodbatlas/scripts/schema-scaffold.sh
288
- ADMIN_API_URL = "https://raw.githubusercontent.com/mongodb/atlas-sdk-go/main/openapi/atlas-api-transformed.yaml"
289
-
290
-
291
- def admin_api_url(branch: str) -> str:
292
- return ADMIN_API_URL.replace("/main/", f"/{branch}/")
293
-
294
-
295
- def download_admin_api(dest: Path, branch: str = "main") -> None:
296
- url = admin_api_url(branch)
297
- logger.info(f"downloading admin api to {dest} from {url}")
298
- response = requests.get(url, timeout=10)
299
- response.raise_for_status()
300
- dest.write_bytes(response.content)
@@ -1,9 +1,11 @@
1
1
  from collections import defaultdict
2
2
  from pathlib import Path
3
3
 
4
+ import requests
4
5
  from model_lib import parse_model
5
6
 
6
7
  from atlas_init.cli_tf.debug_logs_test_data import ApiSpecPath
8
+ from atlas_init.cli_tf.schema import logger
7
9
  from atlas_init.cli_tf.schema_v2_api_parsing import OpenapiSchema
8
10
 
9
11
 
@@ -15,11 +17,31 @@ def go_sdk_breaking_changes(repo_path: Path, go_sdk_rel_path: str = "../atlas-sd
15
17
  return breaking_changes_dir
16
18
 
17
19
 
18
- def parse_api_spec_paths(sdk_repo_path: Path) -> dict[str, list[ApiSpecPath]]:
19
- api_spec_path = sdk_repo_path / "openapi/atlas-api-transformed.yaml"
20
+ def api_spec_path_transformed(sdk_repo_path: Path) -> Path:
21
+ return sdk_repo_path / "openapi/atlas-api-transformed.yaml"
22
+
23
+
24
+ def parse_api_spec_paths(api_spec_path: Path) -> dict[str, list[ApiSpecPath]]:
20
25
  model = parse_model(api_spec_path, t=OpenapiSchema)
21
26
  paths: dict[str, list[ApiSpecPath]] = defaultdict(list)
22
27
  for path, path_dict in model.paths.items():
23
28
  for method in path_dict:
24
29
  paths[method.upper()].append(ApiSpecPath(path=path))
25
30
  return paths
31
+
32
+
33
+ # reusing url from terraform-provider-mongodbatlas/scripts/schema-scaffold.sh
34
+ ADMIN_API_URL = "https://raw.githubusercontent.com/mongodb/atlas-sdk-go/main/openapi/atlas-api-transformed.yaml"
35
+
36
+
37
+ def admin_api_url(branch: str) -> str:
38
+ return ADMIN_API_URL.replace("/main/", f"/{branch}/")
39
+
40
+
41
+ def download_admin_api(dest: Path, branch: str = "main") -> None:
42
+ dest.parent.mkdir(parents=True, exist_ok=True)
43
+ url = admin_api_url(branch)
44
+ logger.info(f"downloading admin api to {dest} from {url}")
45
+ response = requests.get(url, timeout=10)
46
+ response.raise_for_status()
47
+ dest.write_bytes(response.content)
@@ -28,6 +28,7 @@ DEFAULT_CONFIG_PATH = ROOT_PATH / "atlas_init.yaml"
28
28
  DEFAULT_SCHEMA_CONFIG_PATH = ROOT_PATH / "terraform.yaml"
29
29
  DEFAULT_GITHUB_CI_RUN_LOGS = ROOT_PATH / "github_ci_run_logs"
30
30
  DEFAULT_GITHUB_SUMMARY_DIR = ROOT_PATH / "github_ci_summary"
31
+ DEFAULT_DOWNLOADS_DIR = ROOT_PATH / "downloads"
31
32
 
32
33
 
33
34
  def load_dotenv(env_path: Path) -> dict[str, str]:
@@ -46,7 +46,7 @@ def hide_secrets(handler: logging.Handler, secrets_dict: dict[str, str]) -> None
46
46
 
47
47
  def configure_logging(log_level: str = "INFO") -> logging.Handler:
48
48
  _LogLevel(log_level=log_level) # type: ignore
49
- handler = RichHandler(rich_tracebacks=True)
49
+ handler = RichHandler(rich_tracebacks=False)
50
50
  logging.basicConfig(
51
51
  level=logging.getLevelName(log_level),
52
52
  format="%(message)s",
atlas_init/typer_app.py CHANGED
@@ -59,4 +59,8 @@ def main(
59
59
  logger.info(f"running in repo: {running_in_repo()} python location:{sys.executable}")
60
60
  if not show_secrets:
61
61
  hide_secrets(log_handler, {**os.environ})
62
- logger.info(f"in the app callback, log-level: {log_level}, command: {ctx.command}")
62
+ logger.info(f"in the app callback, log-level: {log_level}, command: {format_cmd(ctx)}")
63
+
64
+
65
+ def format_cmd(ctx: typer.Context) -> str:
66
+ return f"'{ctx.info_name} {ctx.invoked_subcommand}'"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: atlas-init
3
- Version: 0.3.0
3
+ Version: 0.3.1
4
4
  Project-URL: Documentation, https://github.com/EspenAlbert/atlas-init#readme
5
5
  Project-URL: Issues, https://github.com/EspenAlbert/atlas-init/issues
6
6
  Project-URL: Source, https://github.com/EspenAlbert/atlas-init
@@ -1,11 +1,11 @@
1
- atlas_init/__init__.py,sha256=wefdLatWmYi2mgkWhGxe2aEGq2miR5yH7tbuwRX6TAc,372
1
+ atlas_init/__init__.py,sha256=Mja1wzBho0kdXUYEG2JEXu72cDv6N_Uf0xLaUN7aQsA,372
2
2
  atlas_init/__main__.py,sha256=dY1dWWvwxRZMmnOFla6RSfti-hMeLeKdoXP7SVYqMUc,52
3
3
  atlas_init/atlas_init.yaml,sha256=GMyJVhKKRc7WzEu7fafmWgeTsDaExTLv7QvXOmE_Brg,1907
4
4
  atlas_init/cli.py,sha256=IiOEC_Jry6vrSDH3_OvsU50F-_3iVIS4tV6-R7659fY,9642
5
5
  atlas_init/cli_args.py,sha256=tiwUYAE0JBSl9lHV6VJ41vFCU90ChBZ4mKvi-YoF_HY,541
6
6
  atlas_init/humps.py,sha256=l0ZXXuI34wwd9TskXhCjULfGbUyK-qNmiyC6_2ow6kU,7339
7
7
  atlas_init/terraform.yaml,sha256=qPrnbzBEP-JAQVkYadHsggRnDmshrOJyiv0ckyZCxwY,2734
8
- atlas_init/typer_app.py,sha256=nxwKwr2GNZ6Zd8uXfJV8mZucEHT0KvfRyF_qxJ3IpYQ,1962
8
+ atlas_init/typer_app.py,sha256=zbvYUlZrF4TZEPEwpa33fVSLVKcxRamuXCgF1FCUhCU,2068
9
9
  atlas_init/cli_cfn/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  atlas_init/cli_cfn/app.py,sha256=iMukpUDgsAgZh_U_APGZB3gmewOo-3vtFK0byJuDz9w,6649
11
11
  atlas_init/cli_cfn/aws.py,sha256=GbohR7uczSGwQjLEYozCmlxbeIHo1uwQIJMwsh7kF7M,17894
@@ -21,16 +21,16 @@ atlas_init/cli_helper/tf_runner.py,sha256=OYdC-Y6i-xRh8_LCudKdtP7CEYEO9e67nVhhol
21
21
  atlas_init/cli_root/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
22
  atlas_init/cli_root/trigger.py,sha256=oEgqb_l25tyYgUaFHEuChcOCJA7k3mnRa4D-Myz-Igs,5789
23
23
  atlas_init/cli_tf/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
- atlas_init/cli_tf/app.py,sha256=-50lyNwG5eh8nVCN6mCcIB_b4xd5aJjhEoK-XVHD8XU,11359
24
+ atlas_init/cli_tf/app.py,sha256=0Y5c-Pc9ibOz6kXvFlL-yhH_fx1nHLgBgK9OAVqjX9s,11390
25
25
  atlas_init/cli_tf/changelog.py,sha256=biWYKf1pZvXZ-jEgcZ5q9sY7nTGrL2PuI0h9mCILf_g,3181
26
26
  atlas_init/cli_tf/debug_logs.py,sha256=lnB5BpcEooVzGd2RLxbwAVQs0ZYXzRKy5sHa0hftHI8,8799
27
- atlas_init/cli_tf/debug_logs_test_data.py,sha256=CP1FbRvaFXOOr5m0u-Y6YAvWpqn6WzH-XoPYBJZN0bE,9594
27
+ atlas_init/cli_tf/debug_logs_test_data.py,sha256=bv4gqhHSNEnQqIijrcjvEUA0M6S-aeo73V4mji0pKCM,9435
28
28
  atlas_init/cli_tf/github_logs.py,sha256=VD7qhlXNuG21eTuJ5VI7rsflp5WHSodfngkRVgQlumw,8114
29
29
  atlas_init/cli_tf/go_test_run.py,sha256=ZoQSvIasmWauFxZJrWL0ObFX-P0k-D3c_ep3OnPY4zs,5842
30
30
  atlas_init/cli_tf/go_test_run_format.py,sha256=OUd6QPHDeTzbwVuh6MhP-xXgjOOGP9W_sCLJ8KylBTs,1201
31
31
  atlas_init/cli_tf/go_test_summary.py,sha256=agr4SITgxchjgOzRpScoTUk-iG38QDLkpnsMtTW9GTY,5382
32
- atlas_init/cli_tf/mock_tf_log.py,sha256=tefWI5mS5lwOWCUidI_p7LeTaoyYCQl951LFgLYGEy8,3157
33
- atlas_init/cli_tf/schema.py,sha256=q0RUHb-6ORbQ41u2bZXaWWQLB-xE8Q_O7U1AFq2Gfng,12398
32
+ atlas_init/cli_tf/mock_tf_log.py,sha256=c0geBR74UkHiyElnV0R_yTuXUgP4F_H53rbGj6D99yc,4958
33
+ atlas_init/cli_tf/schema.py,sha256=iwvb4wD2Wba0MMu7ooTNAIi1jHbpLiXGPOT51_o_YW8,12431
34
34
  atlas_init/cli_tf/schema_go_parser.py,sha256=PiRfFFVnkhltxcGFfOCgH53wwzIEynw2BXmSfaINLL8,8294
35
35
  atlas_init/cli_tf/schema_inspection.py,sha256=ujLvGfg3baByND4nRD0drZoI45STxo3VfYvim-PfVOc,1764
36
36
  atlas_init/cli_tf/schema_table.py,sha256=1i6urBFNVpyopmLbDkYhL3pceKc9NJBCphfVIbm-K6Y,5229
@@ -50,14 +50,14 @@ atlas_init/cloud/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
50
50
  atlas_init/cloud/aws.py,sha256=97kkURWHFAKDIw4704aFmyoeAfQKL11IXMyaQbZUt80,2473
51
51
  atlas_init/repos/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
52
52
  atlas_init/repos/cfn.py,sha256=rjyVVxRhWL65tdAbEHT72UReK2h99Bj6RA4O2pBO-bc,2466
53
- atlas_init/repos/go_sdk.py,sha256=nh3lw9iw4lDGdHnhC8KK0PZTDMUGKvCHTMTuEtIUKNg,1058
53
+ atlas_init/repos/go_sdk.py,sha256=1OzM9DjHEAzAAuI9ygoRRuhUK2gqpOhXExXRqhqa0tg,1793
54
54
  atlas_init/repos/path.py,sha256=wrT8e01OBoAHj8iMrxqutgqWu-BHPe9-bEWtcZRu238,4187
55
55
  atlas_init/settings/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
56
  atlas_init/settings/config.py,sha256=HIytZom8RRvpLGy6u8CpZ83tmFXI6v1tO3iSiuo08kc,6259
57
57
  atlas_init/settings/env_vars.py,sha256=q8Hj2LPJIg-PK0fCjrEigoPwTGIEbqjLEZckwgnkG8s,9688
58
58
  atlas_init/settings/interactive.py,sha256=Xy1Z5WMAOSaJ-vQI_4xjAbSR92rWQgnffwVoDT27L68,340
59
- atlas_init/settings/path.py,sha256=FQ6-SVgc_KyxUjU9GWqq5YjViLDEEpojnApk8EwlS6U,2570
60
- atlas_init/settings/rich_utils.py,sha256=JyVD_RuZTBeZdbnXN7B3WsYGkROhmypgzAym-zD92B4,1703
59
+ atlas_init/settings/path.py,sha256=KkXysu6-0AuSjsvYGknYGJX1hL2j1RD-Fpf8KsVYpkE,2618
60
+ atlas_init/settings/rich_utils.py,sha256=5LgJUmc9wyJTsoS6xWKadrT0MoQREDaKvEOCuBLDXRg,1704
61
61
  atlas_init/tf/.terraform.lock.hcl,sha256=DIojR50rr4fyLShYiQ-UpRV8z6vuBjwGWdK60FODoyM,6876
62
62
  atlas_init/tf/always.tf,sha256=ij6QKI8Lg0140bFZwOyiYK5c-2p5e7AGZ1qKbYyv6Os,1359
63
63
  atlas_init/tf/main.tf,sha256=DH0C8y9RDEHnSAZvL-TjE5MQjxj5ALfgk5zVO88cpZw,3960
@@ -86,7 +86,7 @@ atlas_init/tf/modules/vpc_peering/vpc_peering.tf,sha256=hJ3KJdGbLpOQednUpVuiJ0Cq
86
86
  atlas_init/tf/modules/vpc_privatelink/atlas-privatelink.tf,sha256=FloaaX1MNDvoMZxBnEopeLKyfIlq6kaX2dmx8WWlXNU,1298
87
87
  atlas_init/tf/modules/vpc_privatelink/variables.tf,sha256=gktHCDYD4rz6CEpLg5aiXcFbugw4L5S2Fqc52QYdJyc,255
88
88
  atlas_init/tf/modules/vpc_privatelink/versions.tf,sha256=G0u5V_Hvvrkux_tqfOY05pA-GzSp_qILpfx1dZaTGDc,237
89
- atlas_init-0.3.0.dist-info/METADATA,sha256=Kgl-C8C71uv9NU9s6q1uOfhF2fcUIAmYfU5mdzR1vSE,5650
90
- atlas_init-0.3.0.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
91
- atlas_init-0.3.0.dist-info/entry_points.txt,sha256=oSNFIEAS9nUZyyZ8Fc-0F0U5j-NErygy01LpJVSHapQ,57
92
- atlas_init-0.3.0.dist-info/RECORD,,
89
+ atlas_init-0.3.1.dist-info/METADATA,sha256=vb-qsxAC6t4yI4ec_x1L-nYaGT2sGSt0LCIRHBPSgl8,5650
90
+ atlas_init-0.3.1.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
91
+ atlas_init-0.3.1.dist-info/entry_points.txt,sha256=oSNFIEAS9nUZyyZ8Fc-0F0U5j-NErygy01LpJVSHapQ,57
92
+ atlas_init-0.3.1.dist-info/RECORD,,