atlas-init 0.2.0__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- atlas_init/__init__.py +1 -1
- atlas_init/cli_tf/app.py +1 -1
- atlas_init/cli_tf/debug_logs_test_data.py +1 -8
- atlas_init/cli_tf/mock_tf_log.py +45 -12
- atlas_init/cli_tf/schema.py +117 -48
- atlas_init/repos/go_sdk.py +24 -2
- atlas_init/settings/env_vars.py +13 -0
- atlas_init/settings/path.py +8 -3
- atlas_init/settings/rich_utils.py +2 -2
- atlas_init/typer_app.py +8 -18
- {atlas_init-0.2.0.dist-info → atlas_init-0.3.1.dist-info}/METADATA +14 -30
- {atlas_init-0.2.0.dist-info → atlas_init-0.3.1.dist-info}/RECORD +14 -14
- atlas_init-0.3.1.dist-info/entry_points.txt +2 -0
- atlas_init-0.2.0.dist-info/entry_points.txt +0 -2
- {atlas_init-0.2.0.dist-info → atlas_init-0.3.1.dist-info}/WHEEL +0 -0
atlas_init/__init__.py
CHANGED
atlas_init/cli_tf/app.py
CHANGED
@@ -30,7 +30,6 @@ from atlas_init.cli_tf.go_test_summary import (
|
|
30
30
|
)
|
31
31
|
from atlas_init.cli_tf.mock_tf_log import mock_tf_log_cmd
|
32
32
|
from atlas_init.cli_tf.schema import (
|
33
|
-
download_admin_api,
|
34
33
|
dump_generator_config,
|
35
34
|
parse_py_terraform_schema,
|
36
35
|
update_provider_code_spec,
|
@@ -42,6 +41,7 @@ from atlas_init.cli_tf.schema_v2 import (
|
|
42
41
|
)
|
43
42
|
from atlas_init.cli_tf.schema_v2_api_parsing import add_api_spec_info
|
44
43
|
from atlas_init.cli_tf.schema_v2_sdk import generate_model_go, parse_sdk_model
|
44
|
+
from atlas_init.repos.go_sdk import download_admin_api
|
45
45
|
from atlas_init.repos.path import Repo, current_repo_path
|
46
46
|
from atlas_init.settings.env_vars import init_settings
|
47
47
|
from atlas_init.settings.interactive import confirm
|
@@ -31,14 +31,7 @@ class RequestInfo(Entity):
|
|
31
31
|
|
32
32
|
@property
|
33
33
|
def id(self):
|
34
|
-
return "__".join( # noqa: FLY002
|
35
|
-
[
|
36
|
-
self.method,
|
37
|
-
self.path,
|
38
|
-
self.version,
|
39
|
-
self.text,
|
40
|
-
] # need to include text to differentiate between requests
|
41
|
-
)
|
34
|
+
return "__".join((self.method, self.path, self.version, self.text)) # noqa: FLY002
|
42
35
|
|
43
36
|
|
44
37
|
class StepRequests(Entity):
|
atlas_init/cli_tf/mock_tf_log.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
import json
|
2
2
|
import logging
|
3
|
+
import time
|
3
4
|
from pathlib import Path
|
4
5
|
from typing import Self
|
5
6
|
|
@@ -15,7 +16,8 @@ from atlas_init.cli_tf.debug_logs import (
|
|
15
16
|
parse_test_name,
|
16
17
|
)
|
17
18
|
from atlas_init.cli_tf.debug_logs_test_data import create_mock_data, default_is_diff
|
18
|
-
from atlas_init.repos.go_sdk import parse_api_spec_paths
|
19
|
+
from atlas_init.repos.go_sdk import api_spec_path_transformed, download_admin_api, parse_api_spec_paths
|
20
|
+
from atlas_init.settings.path import DEFAULT_DOWNLOADS_DIR
|
19
21
|
|
20
22
|
logger = logging.getLogger(__name__)
|
21
23
|
|
@@ -23,18 +25,18 @@ logger = logging.getLogger(__name__)
|
|
23
25
|
class MockTFLog(Entity):
|
24
26
|
log_path: Path
|
25
27
|
output_dir: Path
|
26
|
-
|
28
|
+
admin_api_path: Path
|
27
29
|
diff_skip_suffixes: list[str] = Field(default_factory=list)
|
28
30
|
keep_duplicates: bool = False
|
29
31
|
|
30
32
|
@model_validator(mode="after")
|
31
33
|
def ensure_paths_exist(self) -> Self:
|
32
34
|
if not self.log_path.exists():
|
33
|
-
raise ValueError(f"log_path: {self.log_path} doesn't exist")
|
34
|
-
if not self.
|
35
|
-
raise ValueError(f"
|
35
|
+
raise ValueError(f"log_path: '{self.log_path}' doesn't exist")
|
36
|
+
if not self.admin_api_path.exists():
|
37
|
+
raise ValueError(f"admin_api_path: '{self.admin_api_path}' doesn't exist")
|
36
38
|
if not self.output_dir.exists():
|
37
|
-
raise ValueError(f"output_dir: {self.output_dir} doesn't exist")
|
39
|
+
raise ValueError(f"output_dir: '{self.output_dir}' doesn't exist")
|
38
40
|
assert self.output_dir.name == "testdata", "output_path should be a directory named testdata"
|
39
41
|
return self
|
40
42
|
|
@@ -46,7 +48,7 @@ def mock_tf_log(req: MockTFLog) -> None:
|
|
46
48
|
log_file_text = req.log_path.read_text()
|
47
49
|
test_name = parse_test_name(log_file_text)
|
48
50
|
roundtrips = parse_http_requests(log_file_text)
|
49
|
-
api_spec_paths = parse_api_spec_paths(req.
|
51
|
+
api_spec_paths = parse_api_spec_paths(req.admin_api_path)
|
50
52
|
data = create_mock_data(
|
51
53
|
roundtrips,
|
52
54
|
api_spec_paths,
|
@@ -62,24 +64,55 @@ def mock_tf_log(req: MockTFLog) -> None:
|
|
62
64
|
|
63
65
|
def mock_tf_log_cmd(
|
64
66
|
log_path: str = typer.Argument(..., help="the path to the log file generated with TF_LOG_PATH"),
|
65
|
-
sdk_repo_path_str: str = option_sdk_repo_path,
|
66
67
|
output_testdir: str = typer.Option(
|
67
68
|
"",
|
68
69
|
"-o",
|
69
70
|
"--output-testdir",
|
70
|
-
help="the path to the output test directory, for example: internal/service/advancedclustertpf/testdata/",
|
71
|
+
help="the path to the output test directory, for example: internal/service/advancedclustertpf/testdata/, uses cwd/testdata by default",
|
72
|
+
),
|
73
|
+
sdk_repo_path_str: str = option_sdk_repo_path,
|
74
|
+
sdk_branch: str = typer.Option("main", "-b", "--branch", help="the branch for downloading openapi spec"),
|
75
|
+
admin_api_path: str = typer.Option(
|
76
|
+
"", "-a", "--admin-api-path", help="the path to store/download the openapi spec"
|
71
77
|
),
|
72
78
|
diff_skip_suffixes: list[str] = typer.Option(..., "-s", "--skip-suffixes", default_factory=list),
|
73
79
|
keep_duplicates: bool = typer.Option(False, "-keep", "--keep-duplicates", help="keep duplicate requests"),
|
74
80
|
):
|
75
81
|
cwd = Path.cwd()
|
76
|
-
|
77
|
-
|
82
|
+
default_testdir = cwd / "testdata"
|
83
|
+
resolved_admin_api_path = resolve_admin_api_path(sdk_repo_path_str, sdk_branch, admin_api_path)
|
78
84
|
event_in = MockTFLog(
|
79
85
|
log_path=Path(log_path),
|
80
86
|
output_dir=Path(output_testdir) if output_testdir else default_testdir,
|
81
|
-
|
87
|
+
admin_api_path=resolved_admin_api_path,
|
82
88
|
diff_skip_suffixes=diff_skip_suffixes,
|
83
89
|
keep_duplicates=keep_duplicates,
|
84
90
|
)
|
85
91
|
mock_tf_log(event_in)
|
92
|
+
|
93
|
+
|
94
|
+
def is_cache_up_to_date(cache_path: Path, cache_ttl: int) -> bool:
|
95
|
+
if cache_path.exists():
|
96
|
+
modified_ts = file_utils.file_modified_time(cache_path)
|
97
|
+
if modified_ts > time.time() - cache_ttl:
|
98
|
+
logger.info(f"using cached admin api: {cache_path} downloaded {time.time()-modified_ts:.0f}s ago")
|
99
|
+
return True
|
100
|
+
return False
|
101
|
+
|
102
|
+
|
103
|
+
def resolve_admin_api_path(sdk_repo_path_str: str, sdk_branch: str, admin_api_path: str) -> Path:
|
104
|
+
if admin_api_path:
|
105
|
+
resolved_admin_api_path = Path(admin_api_path)
|
106
|
+
if not resolved_admin_api_path.exists():
|
107
|
+
download_admin_api(resolved_admin_api_path, sdk_branch)
|
108
|
+
elif sdk_repo_path_str:
|
109
|
+
sdk_repo_path = Path(sdk_repo_path_str)
|
110
|
+
assert sdk_repo_path.exists(), f"not found sdk_repo_path={sdk_repo_path}"
|
111
|
+
resolved_admin_api_path = api_spec_path_transformed(sdk_repo_path)
|
112
|
+
else:
|
113
|
+
resolved_admin_api_path = DEFAULT_DOWNLOADS_DIR / "atlas-api-transformed.yaml"
|
114
|
+
if not is_cache_up_to_date(resolved_admin_api_path, 3600):
|
115
|
+
download_admin_api(resolved_admin_api_path, sdk_branch)
|
116
|
+
assert resolved_admin_api_path.exists(), f"unable to resolve admin_api_path={resolved_admin_api_path}"
|
117
|
+
assert resolved_admin_api_path.is_file(), f"not a file admin_api_path={resolved_admin_api_path}"
|
118
|
+
return resolved_admin_api_path
|
atlas_init/cli_tf/schema.py
CHANGED
@@ -5,7 +5,6 @@ from pathlib import Path
|
|
5
5
|
from typing import Annotated, Literal, NamedTuple
|
6
6
|
|
7
7
|
import pydantic
|
8
|
-
import requests
|
9
8
|
from model_lib import Entity, dump, field_names, parse_model
|
10
9
|
from zero_3rdparty import dict_nested
|
11
10
|
from zero_3rdparty.enum_utils import StrEnum
|
@@ -66,14 +65,19 @@ class SkipValidators(Entity):
|
|
66
65
|
type: Literal["skip_validators"] = "skip_validators"
|
67
66
|
|
68
67
|
|
69
|
-
Extension = Annotated[
|
68
|
+
Extension = Annotated[
|
69
|
+
IgnoreNested | RenameAttribute | ChangeAttributeType | SkipValidators,
|
70
|
+
pydantic.Field("type"),
|
71
|
+
]
|
70
72
|
|
71
73
|
|
72
74
|
class TFResource(Entity):
|
73
75
|
model_config = pydantic.ConfigDict(extra="allow")
|
74
76
|
name: str
|
75
77
|
extensions: list[Extension] = pydantic.Field(default_factory=list)
|
76
|
-
provider_spec_attributes: list[ProviderSpecAttribute] = pydantic.Field(
|
78
|
+
provider_spec_attributes: list[ProviderSpecAttribute] = pydantic.Field(
|
79
|
+
default_factory=list
|
80
|
+
)
|
77
81
|
|
78
82
|
def dump_generator_config(self) -> dict:
|
79
83
|
names = field_names(self)
|
@@ -129,7 +133,9 @@ class ProviderCodeSpec(Entity):
|
|
129
133
|
raise ValueError(f"{self.root_name(name, is_datasource)} not found!")
|
130
134
|
return root_value
|
131
135
|
|
132
|
-
def schema_attributes(
|
136
|
+
def schema_attributes(
|
137
|
+
self, name: str, is_datasource: bool = False
|
138
|
+
) -> list:
|
133
139
|
root_dict = self.root_dict(name, is_datasource)
|
134
140
|
return root_dict["schema"]["attributes"]
|
135
141
|
|
@@ -139,16 +145,26 @@ class ProviderCodeSpec(Entity):
|
|
139
145
|
def root_name(self, name: str, is_datasource: bool):
|
140
146
|
return f"{self._type_name(is_datasource)}.{name}"
|
141
147
|
|
142
|
-
def attribute_names(
|
143
|
-
|
148
|
+
def attribute_names(
|
149
|
+
self, name: str, is_datasource: bool = False
|
150
|
+
) -> list[str]:
|
151
|
+
return [
|
152
|
+
a["name"] for a in self.schema_attributes(name, is_datasource=is_datasource)
|
153
|
+
]
|
144
154
|
|
145
|
-
def iter_all_attributes(
|
155
|
+
def iter_all_attributes(
|
156
|
+
self, name: str, is_datasource: bool = False
|
157
|
+
) -> Iterable[AttributeTuple]:
|
146
158
|
for attribute in self.schema_attributes(name=name, is_datasource=is_datasource):
|
147
159
|
yield AttributeTuple(attribute["name"], "", attribute)
|
148
160
|
yield from self.iter_nested_attributes(name, is_datasource=is_datasource)
|
149
161
|
|
150
|
-
def iter_nested_attributes(
|
151
|
-
|
162
|
+
def iter_nested_attributes(
|
163
|
+
self, name: str, is_datasource: bool = False
|
164
|
+
) -> Iterable[AttributeTuple]:
|
165
|
+
for i, attribute in enumerate(
|
166
|
+
self.schema_attributes(name=name, is_datasource=is_datasource)
|
167
|
+
):
|
152
168
|
for path, attr_dict in dict_nested.iter_nested_key_values(
|
153
169
|
attribute, type_filter=dict, include_list_indexes=True
|
154
170
|
):
|
@@ -156,32 +172,53 @@ class ProviderCodeSpec(Entity):
|
|
156
172
|
if name := attr_dict.get("name", ""):
|
157
173
|
yield AttributeTuple(name, full_path, attr_dict)
|
158
174
|
|
159
|
-
def remove_nested_attribute(
|
175
|
+
def remove_nested_attribute(
|
176
|
+
self, name: str, path: str, is_datasource: bool = False
|
177
|
+
) -> None:
|
160
178
|
root_name = self.root_name(name, is_datasource)
|
161
179
|
logger.info(f"will remove attribute from {root_name} with path: {path}")
|
162
180
|
root_attributes = self.root_dict(name, is_datasource)
|
163
181
|
full_path = f"schema.attributes.{path}"
|
164
182
|
popped = dict_nested.pop_nested(root_attributes, full_path, "")
|
165
183
|
if popped == "":
|
166
|
-
raise ValueError(
|
167
|
-
|
184
|
+
raise ValueError(
|
185
|
+
f"failed to remove attribute from resource {name} with path: {full_path}"
|
186
|
+
)
|
187
|
+
assert isinstance(
|
188
|
+
popped, dict
|
189
|
+
), f"expected removed attribute to be a dict, got: {popped}"
|
168
190
|
logger.info(f"removal ok, attribute_name: '{root_name}.{popped.get('name')}'")
|
169
191
|
|
170
|
-
def read_attribute(
|
192
|
+
def read_attribute(
|
193
|
+
self, name: str, path: str, *, is_datasource: bool = False
|
194
|
+
) -> dict:
|
171
195
|
if "." not in path:
|
172
|
-
attribute_dict = next(
|
196
|
+
attribute_dict = next(
|
197
|
+
(
|
198
|
+
a
|
199
|
+
for a in self.schema_attributes(name, is_datasource)
|
200
|
+
if a["name"] == path
|
201
|
+
),
|
202
|
+
None,
|
203
|
+
)
|
173
204
|
else:
|
174
205
|
root_dict = self.root_dict(name, is_datasource)
|
175
|
-
attribute_dict = dict_nested.read_nested_or_none(
|
206
|
+
attribute_dict = dict_nested.read_nested_or_none(
|
207
|
+
root_dict, f"schema.attributes.{path}"
|
208
|
+
)
|
176
209
|
if attribute_dict is None:
|
177
|
-
raise ValueError(
|
210
|
+
raise ValueError(
|
211
|
+
f"attribute {path} not found in {self.root_name(name, is_datasource)}"
|
212
|
+
)
|
178
213
|
assert isinstance(
|
179
214
|
attribute_dict, dict
|
180
215
|
), f"expected attribute to be a dict, got: {attribute_dict} @ {path} for resource={name}"
|
181
216
|
return attribute_dict
|
182
217
|
|
183
218
|
|
184
|
-
def update_provider_code_spec(
|
219
|
+
def update_provider_code_spec(
|
220
|
+
schema: PyTerraformSchema, provider_code_spec_path: Path
|
221
|
+
) -> str:
|
185
222
|
spec = parse_model(provider_code_spec_path, t=ProviderCodeSpec)
|
186
223
|
for resource in schema.resources:
|
187
224
|
resource_name = resource.name
|
@@ -192,41 +229,67 @@ def update_provider_code_spec(schema: PyTerraformSchema, provider_code_spec_path
|
|
192
229
|
for data_source in schema.data_sources:
|
193
230
|
data_source_name = data_source.name
|
194
231
|
if extra_spec_attributes := data_source.provider_spec_attributes:
|
195
|
-
add_explicit_attributes(
|
232
|
+
add_explicit_attributes(
|
233
|
+
spec, data_source_name, extra_spec_attributes, is_datasource=True
|
234
|
+
)
|
196
235
|
for extension in data_source.extensions:
|
197
236
|
apply_extension(extension, spec, data_source_name, is_datasource=True)
|
198
237
|
return dump(spec, "json")
|
199
238
|
|
200
239
|
|
201
240
|
def add_explicit_attributes(
|
202
|
-
spec: ProviderCodeSpec,
|
241
|
+
spec: ProviderCodeSpec,
|
242
|
+
name: str,
|
243
|
+
extra_spec_attributes: list[ProviderSpecAttribute],
|
244
|
+
*,
|
245
|
+
is_datasource=False,
|
203
246
|
):
|
204
247
|
resource_attributes = spec.schema_attributes(name, is_datasource=is_datasource)
|
205
248
|
existing_names = spec.attribute_names(name, is_datasource=is_datasource)
|
206
249
|
new_names = [extra.name for extra in extra_spec_attributes]
|
207
250
|
if both := set(existing_names) & set(new_names):
|
208
251
|
raise ValueError(f"resource: {name}, has already: {both} attributes")
|
209
|
-
resource_attributes.extend(
|
252
|
+
resource_attributes.extend(
|
253
|
+
extra.dump_provider_code_spec() for extra in extra_spec_attributes
|
254
|
+
)
|
210
255
|
|
211
256
|
|
212
257
|
@singledispatch
|
213
|
-
def apply_extension(
|
258
|
+
def apply_extension(
|
259
|
+
extension: object,
|
260
|
+
spec: ProviderCodeSpec,
|
261
|
+
resource_name: str,
|
262
|
+
*,
|
263
|
+
is_datasource: bool = False,
|
264
|
+
):
|
214
265
|
raise NotImplementedError(f"unsupported extension: {extension!r}")
|
215
266
|
|
216
267
|
|
217
268
|
@apply_extension.register # type: ignore
|
218
|
-
def _ignore_nested(
|
269
|
+
def _ignore_nested(
|
270
|
+
extension: IgnoreNested,
|
271
|
+
spec: ProviderCodeSpec,
|
272
|
+
resource_name: str,
|
273
|
+
*,
|
274
|
+
is_datasource: bool = False,
|
275
|
+
):
|
219
276
|
if extension.use_wildcard:
|
220
277
|
name_to_remove = extension.path.removeprefix("*.")
|
221
|
-
assert
|
278
|
+
assert (
|
279
|
+
"*" not in name_to_remove
|
280
|
+
), f"only prefix *. is allowed for wildcard in path {extension.path}"
|
222
281
|
found_paths = [
|
223
282
|
path
|
224
|
-
for name, path, attribute_dict in spec.iter_nested_attributes(
|
283
|
+
for name, path, attribute_dict in spec.iter_nested_attributes(
|
284
|
+
resource_name, is_datasource=is_datasource
|
285
|
+
)
|
225
286
|
if name == name_to_remove
|
226
287
|
]
|
227
288
|
while found_paths:
|
228
289
|
next_to_remove = found_paths.pop()
|
229
|
-
spec.remove_nested_attribute(
|
290
|
+
spec.remove_nested_attribute(
|
291
|
+
resource_name, next_to_remove, is_datasource=is_datasource
|
292
|
+
)
|
230
293
|
found_paths = [
|
231
294
|
path
|
232
295
|
for name, path, attribute_dict in spec.iter_nested_attributes(
|
@@ -241,9 +304,15 @@ def _ignore_nested(extension: IgnoreNested, spec: ProviderCodeSpec, resource_nam
|
|
241
304
|
|
242
305
|
@apply_extension.register # type: ignore
|
243
306
|
def _rename_attribute(
|
244
|
-
extension: RenameAttribute,
|
307
|
+
extension: RenameAttribute,
|
308
|
+
spec: ProviderCodeSpec,
|
309
|
+
resource_name: str,
|
310
|
+
*,
|
311
|
+
is_datasource: bool = False,
|
245
312
|
):
|
246
|
-
for attribute_dict in spec.schema_attributes(
|
313
|
+
for attribute_dict in spec.schema_attributes(
|
314
|
+
resource_name, is_datasource=is_datasource
|
315
|
+
):
|
247
316
|
if attribute_dict.get("name") == extension.from_name:
|
248
317
|
logger.info(
|
249
318
|
f"renaming attribute for {spec.root_name(resource_name, is_datasource)}: {extension.from_name} -> {extension.to_name}"
|
@@ -253,9 +322,15 @@ def _rename_attribute(
|
|
253
322
|
|
254
323
|
@apply_extension.register # type: ignore
|
255
324
|
def _change_attribute_type(
|
256
|
-
extension: ChangeAttributeType,
|
325
|
+
extension: ChangeAttributeType,
|
326
|
+
spec: ProviderCodeSpec,
|
327
|
+
resource_name: str,
|
328
|
+
*,
|
329
|
+
is_datasource: bool = False,
|
257
330
|
):
|
258
|
-
attribute_dict = spec.read_attribute(
|
331
|
+
attribute_dict = spec.read_attribute(
|
332
|
+
resource_name, extension.path, is_datasource=is_datasource
|
333
|
+
)
|
259
334
|
old_value = extension.read_value(attribute_dict)
|
260
335
|
if old_value == extension.new_value:
|
261
336
|
logger.info(
|
@@ -270,31 +345,25 @@ def _change_attribute_type(
|
|
270
345
|
|
271
346
|
|
272
347
|
@apply_extension.register # type: ignore
|
273
|
-
def _skip_validators(
|
274
|
-
|
348
|
+
def _skip_validators(
|
349
|
+
_: SkipValidators,
|
350
|
+
spec: ProviderCodeSpec,
|
351
|
+
resource_name: str,
|
352
|
+
*,
|
353
|
+
is_datasource: bool = False,
|
354
|
+
):
|
355
|
+
for attr_tuple in spec.iter_all_attributes(
|
356
|
+
resource_name, is_datasource=is_datasource
|
357
|
+
):
|
275
358
|
attribute_dict = attr_tuple.attribute_dict
|
276
359
|
paths_to_pop = [
|
277
360
|
f"{path}.validators"
|
278
|
-
for path, nested_dict in dict_nested.iter_nested_key_values(
|
361
|
+
for path, nested_dict in dict_nested.iter_nested_key_values(
|
362
|
+
attribute_dict, type_filter=dict
|
363
|
+
)
|
279
364
|
if "validators" in nested_dict
|
280
365
|
]
|
281
366
|
if paths_to_pop:
|
282
367
|
logger.info(f"popping validators from '{attr_tuple.attribute_path}'")
|
283
368
|
for path in paths_to_pop:
|
284
369
|
dict_nested.pop_nested(attribute_dict, path)
|
285
|
-
|
286
|
-
|
287
|
-
# reusing url from terraform-provider-mongodbatlas/scripts/schema-scaffold.sh
|
288
|
-
ADMIN_API_URL = "https://raw.githubusercontent.com/mongodb/atlas-sdk-go/main/openapi/atlas-api-transformed.yaml"
|
289
|
-
|
290
|
-
|
291
|
-
def admin_api_url(branch: str) -> str:
|
292
|
-
return ADMIN_API_URL.replace("/main/", f"/{branch}/")
|
293
|
-
|
294
|
-
|
295
|
-
def download_admin_api(dest: Path, branch: str = "main") -> None:
|
296
|
-
url = admin_api_url(branch)
|
297
|
-
logger.info(f"downloading admin api to {dest} from {url}")
|
298
|
-
response = requests.get(url, timeout=10)
|
299
|
-
response.raise_for_status()
|
300
|
-
dest.write_bytes(response.content)
|
atlas_init/repos/go_sdk.py
CHANGED
@@ -1,9 +1,11 @@
|
|
1
1
|
from collections import defaultdict
|
2
2
|
from pathlib import Path
|
3
3
|
|
4
|
+
import requests
|
4
5
|
from model_lib import parse_model
|
5
6
|
|
6
7
|
from atlas_init.cli_tf.debug_logs_test_data import ApiSpecPath
|
8
|
+
from atlas_init.cli_tf.schema import logger
|
7
9
|
from atlas_init.cli_tf.schema_v2_api_parsing import OpenapiSchema
|
8
10
|
|
9
11
|
|
@@ -15,11 +17,31 @@ def go_sdk_breaking_changes(repo_path: Path, go_sdk_rel_path: str = "../atlas-sd
|
|
15
17
|
return breaking_changes_dir
|
16
18
|
|
17
19
|
|
18
|
-
def
|
19
|
-
|
20
|
+
def api_spec_path_transformed(sdk_repo_path: Path) -> Path:
|
21
|
+
return sdk_repo_path / "openapi/atlas-api-transformed.yaml"
|
22
|
+
|
23
|
+
|
24
|
+
def parse_api_spec_paths(api_spec_path: Path) -> dict[str, list[ApiSpecPath]]:
|
20
25
|
model = parse_model(api_spec_path, t=OpenapiSchema)
|
21
26
|
paths: dict[str, list[ApiSpecPath]] = defaultdict(list)
|
22
27
|
for path, path_dict in model.paths.items():
|
23
28
|
for method in path_dict:
|
24
29
|
paths[method.upper()].append(ApiSpecPath(path=path))
|
25
30
|
return paths
|
31
|
+
|
32
|
+
|
33
|
+
# reusing url from terraform-provider-mongodbatlas/scripts/schema-scaffold.sh
|
34
|
+
ADMIN_API_URL = "https://raw.githubusercontent.com/mongodb/atlas-sdk-go/main/openapi/atlas-api-transformed.yaml"
|
35
|
+
|
36
|
+
|
37
|
+
def admin_api_url(branch: str) -> str:
|
38
|
+
return ADMIN_API_URL.replace("/main/", f"/{branch}/")
|
39
|
+
|
40
|
+
|
41
|
+
def download_admin_api(dest: Path, branch: str = "main") -> None:
|
42
|
+
dest.parent.mkdir(parents=True, exist_ok=True)
|
43
|
+
url = admin_api_url(branch)
|
44
|
+
logger.info(f"downloading admin api to {dest} from {url}")
|
45
|
+
response = requests.get(url, timeout=10)
|
46
|
+
response.raise_for_status()
|
47
|
+
dest.write_bytes(response.content)
|
atlas_init/settings/env_vars.py
CHANGED
@@ -6,6 +6,7 @@ from functools import cached_property
|
|
6
6
|
from pathlib import Path
|
7
7
|
from typing import Any, NamedTuple
|
8
8
|
|
9
|
+
import typer
|
9
10
|
from model_lib import field_names, parse_payload
|
10
11
|
from pydantic import field_validator, model_validator
|
11
12
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
@@ -271,4 +272,16 @@ def active_suites(settings: AtlasInitSettings) -> list[TestSuite]:
|
|
271
272
|
|
272
273
|
|
273
274
|
def init_settings() -> AtlasInitSettings:
|
275
|
+
missing_env_vars, ambiguous_env_vars = AtlasInitSettings.check_env_vars(
|
276
|
+
os.getenv("ATLAS_INIT_PROFILE", DEFAULT_PROFILE),
|
277
|
+
required_extra_fields=["project_name"],
|
278
|
+
)
|
279
|
+
if missing_env_vars:
|
280
|
+
typer.echo(f"missing env_vars: {missing_env_vars}")
|
281
|
+
if ambiguous_env_vars:
|
282
|
+
typer.echo(
|
283
|
+
f"amiguous env_vars: {ambiguous_env_vars} (specified both in cli & in .env-manual file with different values)"
|
284
|
+
)
|
285
|
+
if missing_env_vars or ambiguous_env_vars:
|
286
|
+
raise typer.Exit(1)
|
274
287
|
return AtlasInitSettings.safe_settings()
|
atlas_init/settings/path.py
CHANGED
@@ -1,12 +1,15 @@
|
|
1
|
+
import logging
|
1
2
|
import os
|
2
3
|
from collections.abc import Callable
|
3
4
|
from pathlib import Path
|
4
5
|
|
5
6
|
import dotenv
|
7
|
+
from appdirs import user_data_dir
|
6
8
|
from zero_3rdparty.file_utils import ensure_parents_write_text
|
7
9
|
|
8
10
|
from atlas_init import running_in_repo
|
9
11
|
|
12
|
+
logger = logging.getLogger(__name__)
|
10
13
|
"""WARNING these variables should only be used through the AtlasInitSettings, not directly"""
|
11
14
|
if running_in_repo():
|
12
15
|
ROOT_PATH = Path(__file__).parent.parent.parent.parent # atlas_init REPO_PATH
|
@@ -14,9 +17,10 @@ if running_in_repo():
|
|
14
17
|
else:
|
15
18
|
ROOT_PATH = Path(__file__).parent.parent # site package install directory
|
16
19
|
_default_profiles_path = os.environ.get("ATLAS_INIT_PROFILES_PATH")
|
17
|
-
|
18
|
-
_default_profiles_path
|
19
|
-
|
20
|
+
if not _default_profiles_path:
|
21
|
+
_default_profiles_path = Path(user_data_dir("atlas_init")) / "profiles"
|
22
|
+
warning_msg = f"os.environ['ATLAS_INIT_PROFILES_PATH'] is not set using default: {_default_profiles_path}"
|
23
|
+
logger.warning(warning_msg)
|
20
24
|
DEFAULT_PROFILES_PATH = Path(_default_profiles_path)
|
21
25
|
DEFAULT_PROFILES_PATH.mkdir(exist_ok=True, parents=True)
|
22
26
|
DEFAULT_TF_PATH = ROOT_PATH / "tf"
|
@@ -24,6 +28,7 @@ DEFAULT_CONFIG_PATH = ROOT_PATH / "atlas_init.yaml"
|
|
24
28
|
DEFAULT_SCHEMA_CONFIG_PATH = ROOT_PATH / "terraform.yaml"
|
25
29
|
DEFAULT_GITHUB_CI_RUN_LOGS = ROOT_PATH / "github_ci_run_logs"
|
26
30
|
DEFAULT_GITHUB_SUMMARY_DIR = ROOT_PATH / "github_ci_summary"
|
31
|
+
DEFAULT_DOWNLOADS_DIR = ROOT_PATH / "downloads"
|
27
32
|
|
28
33
|
|
29
34
|
def load_dotenv(env_path: Path) -> dict[str, str]:
|
@@ -35,7 +35,7 @@ def hide_secrets(handler: logging.Handler, secrets_dict: dict[str, str]) -> None
|
|
35
35
|
if not isinstance(value, str):
|
36
36
|
continue
|
37
37
|
key_lower = key.lower()
|
38
|
-
if key_lower in {"true", "false"}:
|
38
|
+
if key_lower in {"true", "false"} or value.lower() in {"true", "false"}:
|
39
39
|
continue
|
40
40
|
if any(safe in key_lower for safe in safe_keys):
|
41
41
|
continue
|
@@ -46,7 +46,7 @@ def hide_secrets(handler: logging.Handler, secrets_dict: dict[str, str]) -> None
|
|
46
46
|
|
47
47
|
def configure_logging(log_level: str = "INFO") -> logging.Handler:
|
48
48
|
_LogLevel(log_level=log_level) # type: ignore
|
49
|
-
handler = RichHandler(rich_tracebacks=
|
49
|
+
handler = RichHandler(rich_tracebacks=False)
|
50
50
|
logging.basicConfig(
|
51
51
|
level=logging.getLevelName(log_level),
|
52
52
|
format="%(message)s",
|
atlas_init/typer_app.py
CHANGED
@@ -10,7 +10,6 @@ from atlas_init.cli_cfn.app import app as app_cfn
|
|
10
10
|
from atlas_init.cli_tf.app import app as app_tf
|
11
11
|
from atlas_init.settings.env_vars import (
|
12
12
|
DEFAULT_PROFILE,
|
13
|
-
AtlasInitSettings,
|
14
13
|
as_env_var_name,
|
15
14
|
env_var_names,
|
16
15
|
)
|
@@ -52,25 +51,16 @@ def main(
|
|
52
51
|
),
|
53
52
|
show_secrets: bool = typer.Option(False, help="show secrets in the logs"),
|
54
53
|
):
|
55
|
-
|
54
|
+
if profile != DEFAULT_PROFILE:
|
55
|
+
os.environ[as_env_var_name("profile")] = profile
|
56
56
|
if project_name != "":
|
57
|
-
|
57
|
+
os.environ[as_env_var_name("project_name")] = project_name
|
58
58
|
log_handler = configure_logging(log_level)
|
59
59
|
logger.info(f"running in repo: {running_in_repo()} python location:{sys.executable}")
|
60
|
-
missing_env_vars, ambiguous_env_vars = AtlasInitSettings.check_env_vars(
|
61
|
-
profile,
|
62
|
-
required_extra_fields=["project_name"],
|
63
|
-
explicit_env_vars=explicit_env_vars,
|
64
|
-
)
|
65
|
-
if missing_env_vars:
|
66
|
-
typer.echo(f"missing env_vars: {missing_env_vars}")
|
67
|
-
if ambiguous_env_vars:
|
68
|
-
typer.echo(
|
69
|
-
f"amiguous env_vars: {missing_env_vars} (specified both in cli & in .env-manual file with different values)"
|
70
|
-
)
|
71
|
-
if missing_env_vars or ambiguous_env_vars:
|
72
|
-
raise typer.Exit(1)
|
73
60
|
if not show_secrets:
|
74
61
|
hide_secrets(log_handler, {**os.environ})
|
75
|
-
command
|
76
|
-
|
62
|
+
logger.info(f"in the app callback, log-level: {log_level}, command: {format_cmd(ctx)}")
|
63
|
+
|
64
|
+
|
65
|
+
def format_cmd(ctx: typer.Context) -> str:
|
66
|
+
return f"'{ctx.info_name} {ctx.invoked_subcommand}'"
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.3
|
2
2
|
Name: atlas-init
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.3.1
|
4
4
|
Project-URL: Documentation, https://github.com/EspenAlbert/atlas-init#readme
|
5
5
|
Project-URL: Issues, https://github.com/EspenAlbert/atlas-init/issues
|
6
6
|
Project-URL: Source, https://github.com/EspenAlbert/atlas-init
|
@@ -10,6 +10,7 @@ Classifier: Development Status :: 4 - Beta
|
|
10
10
|
Classifier: Programming Language :: Python
|
11
11
|
Classifier: Programming Language :: Python :: 3.12
|
12
12
|
Requires-Python: >=3.12
|
13
|
+
Requires-Dist: appdirs==1.4.4
|
13
14
|
Requires-Dist: boto3==1.34.55
|
14
15
|
Requires-Dist: gitpython==3.1.42
|
15
16
|
Requires-Dist: humanize==4.9.0
|
@@ -46,7 +47,15 @@ Currently, used with
|
|
46
47
|
- <https://github.com/mongodb/mongodb/mongodbatlas-cloudformation-resources>
|
47
48
|
- see [atlas_init#repo_aliases](atlas_init.yaml) for an up-to-date list
|
48
49
|
|
49
|
-
##
|
50
|
+
## Quickstart
|
51
|
+
Recommended as a tool with [uvx](https://docs.astral.sh/uv/guides/tools/#running-tools)
|
52
|
+
```shell
|
53
|
+
uvx atlas-init # help info
|
54
|
+
uvx atlas-init tf # help for tf specific commands
|
55
|
+
uvx atlas-init cfn # help for cfn specific commands
|
56
|
+
```
|
57
|
+
|
58
|
+
## Profile Configuration
|
50
59
|
1. [Create an organization](https://cloud-dev.mongodb.com/v2#/preferences/organizations)
|
51
60
|
2. Go to `access_manager` and click `Create Api Key`: <https://cloud-dev.mongodb.com/v2#/org/{ORG_ID_IN_URL_FROM_1}/access/apiKeys>
|
52
61
|
- Tick all permissions
|
@@ -72,17 +81,7 @@ ATLAS_INIT_CFN_PROFILE=YOUR_NAME
|
|
72
81
|
ATLAS_INIT_CFN_REGION=eu-south-2 # find a region with few other profiles
|
73
82
|
```
|
74
83
|
|
75
|
-
##
|
76
|
-
|
77
|
-
### 1. `pip install` normal user
|
78
|
-
|
79
|
-
```shell
|
80
|
-
source .venv/bin/activate # ensure you are in your preferred python env
|
81
|
-
(uv) pip install atlas-init
|
82
|
-
# use export ATLAS_INIT_PROFILES_PATH=/somewhere/to/store/your/env-vars/and/tf/state
|
83
|
-
```
|
84
|
-
|
85
|
-
### 2. Local development, run from github repo
|
84
|
+
## Local development
|
86
85
|
|
87
86
|
```shell
|
88
87
|
git clone https://github.com/EspenAlbert/atlas-init
|
@@ -117,27 +116,11 @@ echo "alias atlas_init='export PYTHONPATH=$pypath && \"$VENV_PYTHON\" -m atlas_i
|
|
117
116
|
atlas_init # should show how to use the cli
|
118
117
|
```
|
119
118
|
|
120
|
-
###
|
121
|
-
- will be used by the CI in other repos
|
119
|
+
### CI Installation Tests (`pip install` local wheel)
|
122
120
|
- [atlasci_local_install](atlasci_local_install.sh)
|
123
121
|
- creates a local `.venv` builds the wheel from this repo and installs it
|
124
122
|
- use `export ATLAS_INIT_PROFILES_PATH=/somewhere/to/store/your/env-vars/and/tf/state`
|
125
123
|
|
126
|
-
## Commands
|
127
|
-
|
128
|
-
```shell
|
129
|
-
cd terraform/cfn/{YOUR_RESOURCE_PATH}
|
130
|
-
# if you used `pip install` replace `atlas_init` with `atlasci`
|
131
|
-
atlas_init # help info
|
132
|
-
atlas_init # initialize the terraform providers
|
133
|
-
atlas_init tf # help for tf specific commands
|
134
|
-
atlas_init cfn # help for cfn specific commands
|
135
|
-
atals_init apply # `terraform apply`
|
136
|
-
# use cmd+v if you plan on using other tools, e.g., cfn make commands
|
137
|
-
# see appendix on how to configure .vscode test env-vars
|
138
|
-
atals_init destroy # `terraform destroy`
|
139
|
-
```
|
140
|
-
|
141
124
|
|
142
125
|
## Appendix
|
143
126
|
|
@@ -177,3 +160,4 @@ terraform providers lock \
|
|
177
160
|
-platform=darwin_arm64 \
|
178
161
|
-platform=linux_arm64
|
179
162
|
# -platform=windows_amd64 \
|
163
|
+
```
|
@@ -1,11 +1,11 @@
|
|
1
|
-
atlas_init/__init__.py,sha256=
|
1
|
+
atlas_init/__init__.py,sha256=Mja1wzBho0kdXUYEG2JEXu72cDv6N_Uf0xLaUN7aQsA,372
|
2
2
|
atlas_init/__main__.py,sha256=dY1dWWvwxRZMmnOFla6RSfti-hMeLeKdoXP7SVYqMUc,52
|
3
3
|
atlas_init/atlas_init.yaml,sha256=GMyJVhKKRc7WzEu7fafmWgeTsDaExTLv7QvXOmE_Brg,1907
|
4
4
|
atlas_init/cli.py,sha256=IiOEC_Jry6vrSDH3_OvsU50F-_3iVIS4tV6-R7659fY,9642
|
5
5
|
atlas_init/cli_args.py,sha256=tiwUYAE0JBSl9lHV6VJ41vFCU90ChBZ4mKvi-YoF_HY,541
|
6
6
|
atlas_init/humps.py,sha256=l0ZXXuI34wwd9TskXhCjULfGbUyK-qNmiyC6_2ow6kU,7339
|
7
7
|
atlas_init/terraform.yaml,sha256=qPrnbzBEP-JAQVkYadHsggRnDmshrOJyiv0ckyZCxwY,2734
|
8
|
-
atlas_init/typer_app.py,sha256=
|
8
|
+
atlas_init/typer_app.py,sha256=zbvYUlZrF4TZEPEwpa33fVSLVKcxRamuXCgF1FCUhCU,2068
|
9
9
|
atlas_init/cli_cfn/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
10
10
|
atlas_init/cli_cfn/app.py,sha256=iMukpUDgsAgZh_U_APGZB3gmewOo-3vtFK0byJuDz9w,6649
|
11
11
|
atlas_init/cli_cfn/aws.py,sha256=GbohR7uczSGwQjLEYozCmlxbeIHo1uwQIJMwsh7kF7M,17894
|
@@ -21,16 +21,16 @@ atlas_init/cli_helper/tf_runner.py,sha256=OYdC-Y6i-xRh8_LCudKdtP7CEYEO9e67nVhhol
|
|
21
21
|
atlas_init/cli_root/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
22
22
|
atlas_init/cli_root/trigger.py,sha256=oEgqb_l25tyYgUaFHEuChcOCJA7k3mnRa4D-Myz-Igs,5789
|
23
23
|
atlas_init/cli_tf/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
24
|
-
atlas_init/cli_tf/app.py,sha256
|
24
|
+
atlas_init/cli_tf/app.py,sha256=0Y5c-Pc9ibOz6kXvFlL-yhH_fx1nHLgBgK9OAVqjX9s,11390
|
25
25
|
atlas_init/cli_tf/changelog.py,sha256=biWYKf1pZvXZ-jEgcZ5q9sY7nTGrL2PuI0h9mCILf_g,3181
|
26
26
|
atlas_init/cli_tf/debug_logs.py,sha256=lnB5BpcEooVzGd2RLxbwAVQs0ZYXzRKy5sHa0hftHI8,8799
|
27
|
-
atlas_init/cli_tf/debug_logs_test_data.py,sha256=
|
27
|
+
atlas_init/cli_tf/debug_logs_test_data.py,sha256=bv4gqhHSNEnQqIijrcjvEUA0M6S-aeo73V4mji0pKCM,9435
|
28
28
|
atlas_init/cli_tf/github_logs.py,sha256=VD7qhlXNuG21eTuJ5VI7rsflp5WHSodfngkRVgQlumw,8114
|
29
29
|
atlas_init/cli_tf/go_test_run.py,sha256=ZoQSvIasmWauFxZJrWL0ObFX-P0k-D3c_ep3OnPY4zs,5842
|
30
30
|
atlas_init/cli_tf/go_test_run_format.py,sha256=OUd6QPHDeTzbwVuh6MhP-xXgjOOGP9W_sCLJ8KylBTs,1201
|
31
31
|
atlas_init/cli_tf/go_test_summary.py,sha256=agr4SITgxchjgOzRpScoTUk-iG38QDLkpnsMtTW9GTY,5382
|
32
|
-
atlas_init/cli_tf/mock_tf_log.py,sha256=
|
33
|
-
atlas_init/cli_tf/schema.py,sha256=
|
32
|
+
atlas_init/cli_tf/mock_tf_log.py,sha256=c0geBR74UkHiyElnV0R_yTuXUgP4F_H53rbGj6D99yc,4958
|
33
|
+
atlas_init/cli_tf/schema.py,sha256=iwvb4wD2Wba0MMu7ooTNAIi1jHbpLiXGPOT51_o_YW8,12431
|
34
34
|
atlas_init/cli_tf/schema_go_parser.py,sha256=PiRfFFVnkhltxcGFfOCgH53wwzIEynw2BXmSfaINLL8,8294
|
35
35
|
atlas_init/cli_tf/schema_inspection.py,sha256=ujLvGfg3baByND4nRD0drZoI45STxo3VfYvim-PfVOc,1764
|
36
36
|
atlas_init/cli_tf/schema_table.py,sha256=1i6urBFNVpyopmLbDkYhL3pceKc9NJBCphfVIbm-K6Y,5229
|
@@ -50,14 +50,14 @@ atlas_init/cloud/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
|
|
50
50
|
atlas_init/cloud/aws.py,sha256=97kkURWHFAKDIw4704aFmyoeAfQKL11IXMyaQbZUt80,2473
|
51
51
|
atlas_init/repos/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
52
52
|
atlas_init/repos/cfn.py,sha256=rjyVVxRhWL65tdAbEHT72UReK2h99Bj6RA4O2pBO-bc,2466
|
53
|
-
atlas_init/repos/go_sdk.py,sha256=
|
53
|
+
atlas_init/repos/go_sdk.py,sha256=1OzM9DjHEAzAAuI9ygoRRuhUK2gqpOhXExXRqhqa0tg,1793
|
54
54
|
atlas_init/repos/path.py,sha256=wrT8e01OBoAHj8iMrxqutgqWu-BHPe9-bEWtcZRu238,4187
|
55
55
|
atlas_init/settings/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
56
56
|
atlas_init/settings/config.py,sha256=HIytZom8RRvpLGy6u8CpZ83tmFXI6v1tO3iSiuo08kc,6259
|
57
|
-
atlas_init/settings/env_vars.py,sha256=
|
57
|
+
atlas_init/settings/env_vars.py,sha256=q8Hj2LPJIg-PK0fCjrEigoPwTGIEbqjLEZckwgnkG8s,9688
|
58
58
|
atlas_init/settings/interactive.py,sha256=Xy1Z5WMAOSaJ-vQI_4xjAbSR92rWQgnffwVoDT27L68,340
|
59
|
-
atlas_init/settings/path.py,sha256=
|
60
|
-
atlas_init/settings/rich_utils.py,sha256=
|
59
|
+
atlas_init/settings/path.py,sha256=KkXysu6-0AuSjsvYGknYGJX1hL2j1RD-Fpf8KsVYpkE,2618
|
60
|
+
atlas_init/settings/rich_utils.py,sha256=5LgJUmc9wyJTsoS6xWKadrT0MoQREDaKvEOCuBLDXRg,1704
|
61
61
|
atlas_init/tf/.terraform.lock.hcl,sha256=DIojR50rr4fyLShYiQ-UpRV8z6vuBjwGWdK60FODoyM,6876
|
62
62
|
atlas_init/tf/always.tf,sha256=ij6QKI8Lg0140bFZwOyiYK5c-2p5e7AGZ1qKbYyv6Os,1359
|
63
63
|
atlas_init/tf/main.tf,sha256=DH0C8y9RDEHnSAZvL-TjE5MQjxj5ALfgk5zVO88cpZw,3960
|
@@ -86,7 +86,7 @@ atlas_init/tf/modules/vpc_peering/vpc_peering.tf,sha256=hJ3KJdGbLpOQednUpVuiJ0Cq
|
|
86
86
|
atlas_init/tf/modules/vpc_privatelink/atlas-privatelink.tf,sha256=FloaaX1MNDvoMZxBnEopeLKyfIlq6kaX2dmx8WWlXNU,1298
|
87
87
|
atlas_init/tf/modules/vpc_privatelink/variables.tf,sha256=gktHCDYD4rz6CEpLg5aiXcFbugw4L5S2Fqc52QYdJyc,255
|
88
88
|
atlas_init/tf/modules/vpc_privatelink/versions.tf,sha256=G0u5V_Hvvrkux_tqfOY05pA-GzSp_qILpfx1dZaTGDc,237
|
89
|
-
atlas_init-0.
|
90
|
-
atlas_init-0.
|
91
|
-
atlas_init-0.
|
92
|
-
atlas_init-0.
|
89
|
+
atlas_init-0.3.1.dist-info/METADATA,sha256=vb-qsxAC6t4yI4ec_x1L-nYaGT2sGSt0LCIRHBPSgl8,5650
|
90
|
+
atlas_init-0.3.1.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
|
91
|
+
atlas_init-0.3.1.dist-info/entry_points.txt,sha256=oSNFIEAS9nUZyyZ8Fc-0F0U5j-NErygy01LpJVSHapQ,57
|
92
|
+
atlas_init-0.3.1.dist-info/RECORD,,
|
File without changes
|