atlas-init 0.4.5__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. atlas_init/__init__.py +1 -1
  2. atlas_init/cli.py +2 -0
  3. atlas_init/cli_cfn/cfn_parameter_finder.py +59 -51
  4. atlas_init/cli_cfn/example.py +8 -16
  5. atlas_init/cli_helper/go.py +6 -10
  6. atlas_init/cli_root/mms_released.py +46 -0
  7. atlas_init/cli_tf/app.py +3 -84
  8. atlas_init/cli_tf/ci_tests.py +493 -0
  9. atlas_init/cli_tf/codegen/__init__.py +0 -0
  10. atlas_init/cli_tf/codegen/models.py +97 -0
  11. atlas_init/cli_tf/codegen/openapi_minimal.py +74 -0
  12. atlas_init/cli_tf/github_logs.py +7 -94
  13. atlas_init/cli_tf/go_test_run.py +385 -132
  14. atlas_init/cli_tf/go_test_summary.py +331 -4
  15. atlas_init/cli_tf/go_test_tf_error.py +380 -0
  16. atlas_init/cli_tf/hcl/modifier.py +14 -12
  17. atlas_init/cli_tf/hcl/modifier2.py +87 -0
  18. atlas_init/cli_tf/mock_tf_log.py +1 -1
  19. atlas_init/cli_tf/{schema_v2_api_parsing.py → openapi.py} +95 -17
  20. atlas_init/cli_tf/schema_v2.py +43 -1
  21. atlas_init/crud/__init__.py +0 -0
  22. atlas_init/crud/mongo_client.py +115 -0
  23. atlas_init/crud/mongo_dao.py +296 -0
  24. atlas_init/crud/mongo_utils.py +239 -0
  25. atlas_init/repos/go_sdk.py +12 -3
  26. atlas_init/repos/path.py +110 -7
  27. atlas_init/settings/config.py +3 -6
  28. atlas_init/settings/env_vars.py +5 -1
  29. atlas_init/settings/interactive2.py +134 -0
  30. atlas_init/tf/.terraform.lock.hcl +59 -59
  31. atlas_init/tf/always.tf +5 -5
  32. atlas_init/tf/main.tf +3 -3
  33. atlas_init/tf/modules/aws_kms/aws_kms.tf +1 -1
  34. atlas_init/tf/modules/aws_s3/provider.tf +2 -1
  35. atlas_init/tf/modules/aws_vpc/provider.tf +2 -1
  36. atlas_init/tf/modules/cfn/cfn.tf +0 -8
  37. atlas_init/tf/modules/cfn/kms.tf +5 -5
  38. atlas_init/tf/modules/cfn/provider.tf +7 -0
  39. atlas_init/tf/modules/cfn/variables.tf +1 -1
  40. atlas_init/tf/modules/cloud_provider/cloud_provider.tf +1 -1
  41. atlas_init/tf/modules/cloud_provider/provider.tf +2 -1
  42. atlas_init/tf/modules/cluster/cluster.tf +31 -31
  43. atlas_init/tf/modules/cluster/provider.tf +2 -1
  44. atlas_init/tf/modules/encryption_at_rest/provider.tf +2 -1
  45. atlas_init/tf/modules/federated_vars/federated_vars.tf +1 -1
  46. atlas_init/tf/modules/federated_vars/provider.tf +2 -1
  47. atlas_init/tf/modules/project_extra/project_extra.tf +1 -10
  48. atlas_init/tf/modules/project_extra/provider.tf +8 -0
  49. atlas_init/tf/modules/stream_instance/provider.tf +8 -0
  50. atlas_init/tf/modules/stream_instance/stream_instance.tf +0 -9
  51. atlas_init/tf/modules/vpc_peering/provider.tf +10 -0
  52. atlas_init/tf/modules/vpc_peering/vpc_peering.tf +0 -10
  53. atlas_init/tf/modules/vpc_privatelink/versions.tf +2 -1
  54. atlas_init/tf/outputs.tf +1 -0
  55. atlas_init/tf/providers.tf +1 -1
  56. atlas_init/tf/variables.tf +7 -7
  57. atlas_init/typer_app.py +4 -8
  58. {atlas_init-0.4.5.dist-info → atlas_init-0.6.0.dist-info}/METADATA +7 -4
  59. atlas_init-0.6.0.dist-info/RECORD +121 -0
  60. atlas_init-0.4.5.dist-info/RECORD +0 -105
  61. {atlas_init-0.4.5.dist-info → atlas_init-0.6.0.dist-info}/WHEEL +0 -0
  62. {atlas_init-0.4.5.dist-info → atlas_init-0.6.0.dist-info}/entry_points.txt +0 -0
  63. {atlas_init-0.4.5.dist-info → atlas_init-0.6.0.dist-info}/licenses/LICENSE +0 -0
atlas_init/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  from pathlib import Path
2
2
 
3
- VERSION = "0.4.5"
3
+ VERSION = "0.6.0"
4
4
 
5
5
 
6
6
  def running_in_repo() -> bool:
atlas_init/cli.py CHANGED
@@ -5,6 +5,7 @@ from pydoc import locate
5
5
  from typing import Literal
6
6
 
7
7
  import typer
8
+ from ask_shell.typer_command import configure_logging
8
9
  from model_lib import dump, parse_payload
9
10
  from zero_3rdparty.file_utils import iter_paths
10
11
 
@@ -255,6 +256,7 @@ def repo_clone():
255
256
 
256
257
  def typer_main():
257
258
  extra_root_commands()
259
+ configure_logging(app)
258
260
  app()
259
261
 
260
262
 
@@ -8,12 +8,14 @@ from mypy_boto3_cloudformation.type_defs import ParameterTypeDef
8
8
  from pydantic import ConfigDict, Field
9
9
  from rich import prompt
10
10
  from zero_3rdparty.file_utils import clean_dir
11
+ from zero_3rdparty.dict_nested import iter_nested_key_values, update
11
12
 
12
13
  from atlas_init.cli_cfn.files import create_sample_file, default_log_group_name
13
14
  from atlas_init.cloud.aws import PascalAlias
14
15
  from atlas_init.repos.cfn import CfnType, cfn_examples_dir, cfn_type_normalized
15
16
 
16
17
  logger = logging.getLogger(__name__)
18
+ UNKNOWN_PLACEHOLDER = "UNKNOWN"
17
19
 
18
20
 
19
21
  class TemplatePathNotFoundError(Exception):
@@ -35,21 +37,25 @@ def infer_template_path(repo_path: Path, type_name: str, stack_name: str, exampl
35
37
  if not template_paths:
36
38
  raise TemplatePathNotFoundError(type_name, examples_dir)
37
39
  if len(template_paths) > 1:
38
- expected_folder = cfn_type_normalized(type_name)
39
- if (expected_folders := [p for p in template_paths if p.parent.name == expected_folder]) and len(
40
- expected_folders
41
- ) == 1:
42
- logger.info(f"using template: {expected_folders[0]}")
43
- return expected_folders[0]
44
- choices = {p.stem: p for p in template_paths}
45
- if stack_path := choices.get(stack_name):
46
- logger.info(f"using template @ {stack_path} based on stack name: {stack_name}")
47
- return stack_path
48
- selected_path = prompt.Prompt("Choose example template: ", choices=list(choices))()
49
- return choices[selected_path]
40
+ return choose_template_path(type_name, template_paths, stack_name)
50
41
  return template_paths[0]
51
42
 
52
43
 
44
+ def choose_template_path(type_name: str, template_paths: list[Path], stack_name: str) -> Path:
45
+ expected_folder = cfn_type_normalized(type_name)
46
+ if (expected_folders := [p for p in template_paths if p.parent.name == expected_folder]) and len(
47
+ expected_folders
48
+ ) == 1:
49
+ logger.info(f"using template: {expected_folders[0]}")
50
+ return expected_folders[0]
51
+ choices = {p.stem: p for p in template_paths}
52
+ if stack_path := choices.get(stack_name):
53
+ logger.info(f"using template @ {stack_path} based on stack name: {stack_name}")
54
+ return stack_path
55
+ selected_path = prompt.Prompt("Choose example template: ", choices=list(choices))()
56
+ return choices[selected_path]
57
+
58
+
53
59
  parameters_exported_env_vars = {
54
60
  "OrgId": "MONGODB_ATLAS_ORG_ID",
55
61
  "Profile": "ATLAS_INIT_CFN_PROFILE",
@@ -122,10 +128,6 @@ class CfnTemplate(Entity):
122
128
  assert self.find_resource(type_name)
123
129
  return cfn_type_normalized(type_name)
124
130
 
125
- def add_resource_params(self, type_name: str, resources: dict[str, Any]):
126
- resource = self.find_resource(type_name)
127
- resource.properties.update(resources)
128
-
129
131
  def get_resource_properties(self, type_name: str, parameters: list[ParameterTypeDef]) -> dict:
130
132
  resource = self.find_resource(type_name)
131
133
  properties = resource.properties
@@ -152,40 +154,30 @@ class CfnTemplate(Entity):
152
154
  return properties
153
155
 
154
156
 
155
- def updated_template_path(path: Path) -> Path:
156
- old_stem = path.stem
157
- new_name = path.name.replace(old_stem, f"{old_stem}-updated")
158
- return path.with_name(new_name)
157
+ class CfnTemplateUnknownParametersError(Exception):
158
+ def __init__(self, unknown_params: list[str]) -> None:
159
+ self.unknown_params = unknown_params
159
160
 
160
161
 
161
- def decode_parameters(
162
- template_path: Path,
163
- type_name: str,
164
- stack_name: str,
165
- force_params: dict[str, Any] | None = None,
166
- resource_params: dict[str, Any] | None = None,
167
- ) -> tuple[Path, list[ParameterTypeDef], set[str]]:
168
- cfn_template = parse_model(template_path, t=CfnTemplate)
169
- if resource_params:
170
- cfn_template.add_resource_params(type_name, resource_params)
171
- template_path = updated_template_path(template_path)
172
- logger.info(f"updating template {template_path} with {resource_params}")
173
- raw_dict = cfn_template.model_dump(by_alias=True, exclude_unset=True)
174
- file_extension = template_path.suffix.lstrip(".")
175
- dump_format = "pretty_json" if file_extension == "json" else file_extension
176
- template_str = dump(raw_dict, format=dump_format)
177
- template_path.write_text(template_str)
178
- parameters_dict: dict[str, Any] = {}
162
+ def infer_template_parameters(
163
+ path: Path, type_name: str, stack_name: str, explicit_params: dict[str, Any]
164
+ ) -> list[ParameterTypeDef]:
165
+ cfn_template = parse_model(path, t=CfnTemplate)
166
+ parameters_dict: dict[str, Any] = {key: UNKNOWN_PLACEHOLDER for key in cfn_template.parameters.keys()}
179
167
  type_defaults = type_names_defaults.get(cfn_template.normalized_type_name(type_name), {})
180
168
  if stack_name_param := type_defaults.pop(STACK_NAME_PARAM, None):
181
169
  type_defaults[stack_name_param] = stack_name
182
-
183
170
  for param_name, param in cfn_template.parameters.items():
171
+ explicit_value = explicit_params.get(param_name)
172
+ if explicit_value is not None:
173
+ logger.info(f"using explicit value for {param_name}={explicit_value}")
174
+ parameters_dict[param_name] = explicit_value
175
+ continue
184
176
  if type_default := type_defaults.get(param_name):
185
177
  logger.info(f"using type default for {param_name}={type_default}")
186
178
  parameters_dict[param_name] = type_default
187
179
  continue
188
- if env_key := parameters_exported_env_vars.get(param_name): # noqa: SIM102
180
+ if env_key := parameters_exported_env_vars.get(param_name):
189
181
  if env_value := os.environ.get(env_key):
190
182
  logger.info(f"using {env_key} to fill parameter: {param_name}")
191
183
  parameters_dict[param_name] = env_value
@@ -195,19 +187,12 @@ def decode_parameters(
195
187
  parameters_dict[param_name] = "false"
196
188
  continue
197
189
  if default := param.default:
190
+ logger.info(f"using default for {param_name}={default}")
198
191
  parameters_dict[param_name] = default
199
192
  continue
200
- logger.warning(f"unable to auto-filll param: {param_name}")
201
- parameters_dict[param_name] = "UNKNOWN"
202
-
203
- if force_params:
204
- logger.warning(f"overiding params: {force_params} for {stack_name}")
205
- parameters_dict |= force_params
206
- unknown_params = {key for key, value in parameters_dict.items() if value == "UNKNOWN"}
207
- parameters: list[ParameterTypeDef] = [
208
- {"ParameterKey": key, "ParameterValue": value} for key, value in parameters_dict.items()
209
- ]
210
- return template_path, parameters, unknown_params
193
+ if unknown_params := {key for key, value in parameters_dict.items() if value == UNKNOWN_PLACEHOLDER}:
194
+ raise CfnTemplateUnknownParametersError(sorted(unknown_params))
195
+ return [{"ParameterKey": key, "ParameterValue": value} for key, value in parameters_dict.items()]
211
196
 
212
197
 
213
198
  def dump_resource_to_file(
@@ -240,3 +225,26 @@ def dump_sample_file(
240
225
  prev_resource_state={},
241
226
  )
242
227
  return samples_path
228
+
229
+
230
+ def modify_resource_with_params(resource: CfnResource, resource_params: dict[str, Any]) -> None:
231
+ updates: dict[str, tuple[str, Any]] = {}
232
+ resource_properties = resource.properties
233
+ for path, value in iter_nested_key_values(resource_properties, include_list_indexes=True):
234
+ if not isinstance(value, dict):
235
+ continue
236
+ if "Ref" not in value:
237
+ continue
238
+ param_name = value["Ref"]
239
+ assert isinstance(param_name, str), f"Ref must be a string, {path}, got={param_name!r}"
240
+ if param_value := resource_params.get(param_name):
241
+ updates[param_name] = (path, param_value)
242
+ else:
243
+ logger.warning(f"unable to find parameter {param_name} in resource params, path={path}")
244
+
245
+ for param_name, param_value in resource_params.items():
246
+ if update_path_value := updates.get(param_name):
247
+ update(resource_properties, *update_path_value)
248
+ else:
249
+ logger.warning(f"No ref found for {param_name} assumming top level on resource")
250
+ resource_properties[param_name] = param_value
@@ -15,9 +15,9 @@ from atlas_init.cli_cfn.aws import (
15
15
  from atlas_init.cli_cfn.aws import delete_stack as delete_stack_aws
16
16
  from atlas_init.cli_cfn.cfn_parameter_finder import (
17
17
  CfnTemplate,
18
- decode_parameters,
19
18
  dump_resource_to_file,
20
19
  dump_sample_file,
20
+ infer_template_parameters,
21
21
  infer_template_path,
22
22
  )
23
23
  from atlas_init.repos.cfn import CfnType, Operation, infer_cfn_type_name
@@ -142,6 +142,13 @@ def example_handler(
142
142
  delete_first = inputs.delete_stack_first
143
143
  force_deregister = inputs.force_deregister
144
144
  execution_role = inputs.execution_role
145
+
146
+ template_path = infer_template_path(repo_path, type_name, stack_name, inputs.example_name)
147
+ parameters = infer_template_parameters(template_path, type_name, stack_name, inputs.resource_params or {})
148
+ logger.info(f"parameters: {parameters}")
149
+ if not prompt.Confirm("parameters 👆looks good?")():
150
+ raise typer.Abort
151
+
145
152
  logger.info(f"using execution role: {execution_role}")
146
153
  if not inputs.is_export and not inputs.force_keep:
147
154
  ensure_resource_type_activated(
@@ -157,14 +164,6 @@ def example_handler(
157
164
  delete_stack_aws(region, stack_name, execution_role)
158
165
  if not delete_first:
159
166
  return
160
- template_path = infer_template_path(repo_path, type_name, stack_name, inputs.example_name)
161
- template_path, parameters, not_found = decode_parameters(
162
- template_path=template_path,
163
- stack_name=stack_name,
164
- force_params=inputs.resource_params,
165
- resource_params=inputs.resource_params,
166
- type_name=type_name,
167
- )
168
167
  if inputs.register_all_types_in_example:
169
168
  extra_example_types = [t for t in CfnTemplate.read_template_types(template_path) if t != type_name]
170
169
  for extra_type in extra_example_types:
@@ -177,13 +176,6 @@ def example_handler(
177
176
  resource_path,
178
177
  execution_role,
179
178
  )
180
- logger.info(f"parameters: {parameters}")
181
- if not_found:
182
- # TODO: support specifying these extra
183
- logger.critical(f"need to fill out parameters manually: {not_found} for {type_name}")
184
- raise typer.Exit(1)
185
- if not prompt.Confirm("parameters 👆looks good?")():
186
- raise typer.Abort
187
179
  if inputs.export_example_to_inputs:
188
180
  out_inputs = dump_resource_to_file(resource_path / "inputs", template_path, type_name, parameters)
189
181
  logger.info(f"dumped to {out_inputs} ✅")
@@ -9,10 +9,8 @@ from pydantic import Field
9
9
 
10
10
  from atlas_init.cli_helper.run import run_command_is_ok_output
11
11
  from atlas_init.cli_tf.go_test_run import (
12
- GoTestContext,
13
- GoTestContextStep,
14
12
  GoTestRun,
15
- parse,
13
+ parse_tests,
16
14
  )
17
15
  from atlas_init.settings.config import TestSuite
18
16
  from atlas_init.settings.env_vars import AtlasInitSettings
@@ -179,7 +177,7 @@ def resolve_env_vars(
179
177
  test_env_vars |= {
180
178
  "TF_ACC": "1",
181
179
  "TF_LOG": "DEBUG",
182
- "MONGODB_ATLAS_PREVIEW_PROVIDER_V2_ADVANCED_CLUSTER": "false" if use_old_schema else "true",
180
+ "MONGODB_ATLAS_PREVIEW_PROVIDER_V2_ADVANCED_CLUSTER": ("false" if use_old_schema else "true"),
183
181
  }
184
182
  test_env_vars |= env_vars_for_capture(capture_mode)
185
183
  logger.info(f"go test env-vars-extra: {sorted(test_env_vars)}")
@@ -231,17 +229,15 @@ def _run_tests(
231
229
  logger.exception(f"failed to run command for {name}")
232
230
  results.failure_names.add(name)
233
231
  continue
234
- context = GoTestContext(
235
- name=name,
236
- html_url=f"file://{_log_path(logs_dir, name)}",
237
- steps=[GoTestContextStep(name="local-run")],
238
- )
239
232
  try:
240
- parsed_tests = list(parse(command_out.splitlines(), context, test_step_nr=0))
233
+ parsed_tests = parse_tests(command_out.splitlines())
241
234
  except Exception:
242
235
  logger.exception(f"failed to parse tests for {name}")
243
236
  results.failure_names.add(name)
244
237
  continue
238
+ for test in parsed_tests:
239
+ test.log_path = _log_path(logs_dir, name)
240
+ # todo: possible add other fields
245
241
  if not parsed_tests and not ok:
246
242
  results.failure_names.add(name)
247
243
  logger.error(f"failed to run tests for {name}: {command_out}")
@@ -0,0 +1,46 @@
1
+ import logging
2
+ from pathlib import Path
3
+
4
+ import requests
5
+ import typer
6
+ from atlas_init.typer_app import app_command
7
+ from git import Repo
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ @app_command()
13
+ def mms_released(
14
+ mms_repo: Path = typer.Option(..., "-r", "--mms-repo", help="the path to the mms repo"),
15
+ commit_shas: list[str] = typer.Option(
16
+ ..., "-c", "--commit-shas", help="the commit shas to check for release, can be set multiple times"
17
+ ),
18
+ sha_url: str = typer.Option(
19
+ "https://cloud.mongodb.com/api/private/unauth/version",
20
+ "-u",
21
+ "--url",
22
+ help="the url to get the current sha from",
23
+ ),
24
+ ):
25
+ assert mms_repo.exists(), f"mms repo not found @ {mms_repo}"
26
+ git_repo = Repo(mms_repo)
27
+ git_repo.git.fetch("origin")
28
+ for sha in commit_shas:
29
+ assert git_repo.commit(sha), f"commit {sha} not found in {mms_repo}"
30
+ current_sha_response = requests.get(sha_url, timeout=10)
31
+ current_sha_response.raise_for_status()
32
+ current_sha = current_sha_response.text.strip()
33
+ assert current_sha, f"unable to get current sha from {current_sha_response.url}"
34
+ logger.info(f"current sha of prod: {current_sha}")
35
+ assert git_repo.commit(current_sha)
36
+ remaining_shas = set(commit_shas)
37
+ for commit in git_repo.iter_commits(rev=current_sha):
38
+ commit_sha = commit.hexsha
39
+ if commit_sha in commit_shas:
40
+ commit_message = commit.message.rstrip("\n") # type: ignore
41
+ logger.info(f"found commit {commit_sha} with message {commit_message}")
42
+ remaining_shas.remove(commit_sha)
43
+ if not remaining_shas:
44
+ logger.info("all commits found ✅")
45
+ return
46
+ logger.info(f"remaining shas: {','.join(sorted(remaining_shas))} ❌")
atlas_init/cli_tf/app.py CHANGED
@@ -1,34 +1,18 @@
1
1
  import logging
2
- import os
3
2
  import sys
4
- from collections import defaultdict
5
- from datetime import timedelta
6
3
  from pathlib import Path
7
4
 
8
5
  import typer
9
- from zero_3rdparty.datetime_utils import utc_now
10
6
  from zero_3rdparty.file_utils import clean_dir
11
7
 
12
8
  from atlas_init.cli_args import option_sdk_repo_path
13
9
  from atlas_init.cli_helper.run import (
14
- add_to_clipboard,
15
10
  run_binary_command_is_ok,
16
11
  run_command_exit_on_failure,
17
- run_command_receive_result,
18
12
  )
13
+ from atlas_init.cli_tf.ci_tests import ci_tests
19
14
  from atlas_init.cli_tf.changelog import convert_to_changelog
20
15
  from atlas_init.cli_tf.example_update import update_example_cmd
21
- from atlas_init.cli_tf.github_logs import (
22
- GH_TOKEN_ENV_NAME,
23
- find_test_runs,
24
- include_filestems,
25
- include_test_jobs,
26
- )
27
- from atlas_init.cli_tf.go_test_run import GoTestRun
28
- from atlas_init.cli_tf.go_test_summary import (
29
- create_detailed_summary,
30
- create_short_summary,
31
- )
32
16
  from atlas_init.cli_tf.log_clean import log_clean
33
17
  from atlas_init.cli_tf.mock_tf_log import mock_tf_log_cmd
34
18
  from atlas_init.cli_tf.schema import (
@@ -41,7 +25,7 @@ from atlas_init.cli_tf.schema_v2 import (
41
25
  generate_resource_go_resource_schema,
42
26
  parse_schema,
43
27
  )
44
- from atlas_init.cli_tf.schema_v2_api_parsing import add_api_spec_info
28
+ from atlas_init.cli_tf.openapi import add_api_spec_info
45
29
  from atlas_init.cli_tf.schema_v2_sdk import generate_model_go, parse_sdk_model
46
30
  from atlas_init.repos.go_sdk import download_admin_api
47
31
  from atlas_init.repos.path import Repo, current_repo_path
@@ -52,6 +36,7 @@ app = typer.Typer(no_args_is_help=True)
52
36
  app.command(name="mock-tf-log")(mock_tf_log_cmd)
53
37
  app.command(name="example-update")(update_example_cmd)
54
38
  app.command(name="log-clean")(log_clean)
39
+ app.command(name="ci-tests")(ci_tests)
55
40
  logger = logging.getLogger(__name__)
56
41
 
57
42
 
@@ -153,72 +138,6 @@ def example_gen(
153
138
  file_utils.copy(path, dest_path, clean_dest=False)
154
139
 
155
140
 
156
- @app.command()
157
- def ci_tests(
158
- test_group_name: str = typer.Option("", "-g"),
159
- max_days_ago: int = typer.Option(1, "-d", "--days"),
160
- branch: str = typer.Option("master", "-b", "--branch"),
161
- workflow_file_stems: str = typer.Option("test-suite,terraform-compatibility-matrix", "-w", "--workflow"),
162
- only_last_workflow: bool = typer.Option(False, "-l", "--last"),
163
- names: str = typer.Option(
164
- "",
165
- "-n",
166
- "--test-names",
167
- help="comma separated list of test names to filter, e.g., TestAccCloudProviderAccessAuthorizationAzure_basic,TestAccBackupSnapshotExportBucket_basicAzure",
168
- ),
169
- summary_name: str = typer.Option(
170
- "",
171
- "-s",
172
- "--summary",
173
- help="the name of the summary directory to store detailed test results",
174
- ),
175
- ): # sourcery skip: use-named-expression
176
- names_set: set[str] = set()
177
- if names:
178
- names_set.update(names.split(","))
179
- logger.info(f"filtering tests by names: {names_set}")
180
- repo_path = current_repo_path(Repo.TF)
181
- token = run_command_receive_result("gh auth token", cwd=repo_path, logger=logger)
182
- os.environ[GH_TOKEN_ENV_NAME] = token
183
- end_test_date = utc_now()
184
- start_test_date = end_test_date - timedelta(days=max_days_ago)
185
- job_runs = find_test_runs(
186
- start_test_date,
187
- include_job=include_test_jobs(test_group_name),
188
- branch=branch,
189
- include_workflow=include_filestems(set(workflow_file_stems.split(","))),
190
- )
191
- test_results: dict[str, list[GoTestRun]] = defaultdict(list)
192
- workflow_ids = set()
193
- for key in sorted(job_runs.keys(), reverse=True):
194
- workflow_id, job_id = key
195
- workflow_ids.add(workflow_id)
196
- if only_last_workflow and len(workflow_ids) > 1:
197
- logger.info("only showing last workflow")
198
- break
199
- runs = job_runs[key]
200
- if not runs:
201
- logger.warning(f"no go tests for job_id={job_id}")
202
- continue
203
- for run in runs:
204
- test_name = run.name
205
- if names_set and test_name not in names_set:
206
- continue
207
- test_results[test_name].append(run)
208
-
209
- if summary_name:
210
- summary = create_detailed_summary(summary_name, end_test_date, start_test_date, test_results, names_set)
211
- else:
212
- failing_names = [name for name, name_runs in test_results.items() if all(run.is_failure for run in name_runs)]
213
- if not failing_names:
214
- logger.info("ALL TESTS PASSED! ✅")
215
- return
216
- summary = create_short_summary(test_results, failing_names)
217
- summary_str = "\n".join(summary)
218
- add_to_clipboard(summary_str, logger)
219
- logger.info(summary_str)
220
-
221
-
222
141
  @app.command()
223
142
  def schema2(
224
143
  resource: str = typer.Argument(