atlas-init 0.4.4__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. atlas_init/__init__.py +1 -1
  2. atlas_init/cli.py +2 -0
  3. atlas_init/cli_cfn/app.py +3 -4
  4. atlas_init/cli_cfn/cfn_parameter_finder.py +61 -53
  5. atlas_init/cli_cfn/contract.py +4 -7
  6. atlas_init/cli_cfn/example.py +8 -18
  7. atlas_init/cli_helper/go.py +7 -11
  8. atlas_init/cli_root/mms_released.py +46 -0
  9. atlas_init/cli_root/trigger.py +6 -6
  10. atlas_init/cli_tf/app.py +3 -84
  11. atlas_init/cli_tf/ci_tests.py +493 -0
  12. atlas_init/cli_tf/codegen/__init__.py +0 -0
  13. atlas_init/cli_tf/codegen/models.py +97 -0
  14. atlas_init/cli_tf/codegen/openapi_minimal.py +74 -0
  15. atlas_init/cli_tf/github_logs.py +7 -94
  16. atlas_init/cli_tf/go_test_run.py +385 -132
  17. atlas_init/cli_tf/go_test_summary.py +331 -4
  18. atlas_init/cli_tf/go_test_tf_error.py +380 -0
  19. atlas_init/cli_tf/hcl/modifier.py +14 -12
  20. atlas_init/cli_tf/hcl/modifier2.py +87 -0
  21. atlas_init/cli_tf/mock_tf_log.py +1 -1
  22. atlas_init/cli_tf/{schema_v2_api_parsing.py → openapi.py} +95 -17
  23. atlas_init/cli_tf/schema_v2.py +43 -1
  24. atlas_init/crud/__init__.py +0 -0
  25. atlas_init/crud/mongo_client.py +115 -0
  26. atlas_init/crud/mongo_dao.py +296 -0
  27. atlas_init/crud/mongo_utils.py +239 -0
  28. atlas_init/repos/go_sdk.py +12 -3
  29. atlas_init/repos/path.py +110 -7
  30. atlas_init/settings/config.py +3 -6
  31. atlas_init/settings/env_vars.py +22 -31
  32. atlas_init/settings/interactive2.py +134 -0
  33. atlas_init/tf/.terraform.lock.hcl +59 -59
  34. atlas_init/tf/always.tf +5 -5
  35. atlas_init/tf/main.tf +3 -3
  36. atlas_init/tf/modules/aws_kms/aws_kms.tf +1 -1
  37. atlas_init/tf/modules/aws_s3/provider.tf +2 -1
  38. atlas_init/tf/modules/aws_vpc/provider.tf +2 -1
  39. atlas_init/tf/modules/cfn/cfn.tf +0 -8
  40. atlas_init/tf/modules/cfn/kms.tf +5 -5
  41. atlas_init/tf/modules/cfn/provider.tf +7 -0
  42. atlas_init/tf/modules/cfn/variables.tf +1 -1
  43. atlas_init/tf/modules/cloud_provider/cloud_provider.tf +1 -1
  44. atlas_init/tf/modules/cloud_provider/provider.tf +2 -1
  45. atlas_init/tf/modules/cluster/cluster.tf +31 -31
  46. atlas_init/tf/modules/cluster/provider.tf +2 -1
  47. atlas_init/tf/modules/encryption_at_rest/provider.tf +2 -1
  48. atlas_init/tf/modules/federated_vars/federated_vars.tf +1 -1
  49. atlas_init/tf/modules/federated_vars/provider.tf +2 -1
  50. atlas_init/tf/modules/project_extra/project_extra.tf +1 -10
  51. atlas_init/tf/modules/project_extra/provider.tf +8 -0
  52. atlas_init/tf/modules/stream_instance/provider.tf +8 -0
  53. atlas_init/tf/modules/stream_instance/stream_instance.tf +0 -9
  54. atlas_init/tf/modules/vpc_peering/provider.tf +10 -0
  55. atlas_init/tf/modules/vpc_peering/vpc_peering.tf +0 -10
  56. atlas_init/tf/modules/vpc_privatelink/versions.tf +2 -1
  57. atlas_init/tf/outputs.tf +1 -0
  58. atlas_init/tf/providers.tf +1 -1
  59. atlas_init/tf/variables.tf +7 -7
  60. atlas_init/typer_app.py +4 -8
  61. {atlas_init-0.4.4.dist-info → atlas_init-0.6.0.dist-info}/METADATA +7 -4
  62. atlas_init-0.6.0.dist-info/RECORD +121 -0
  63. atlas_init-0.4.4.dist-info/RECORD +0 -105
  64. {atlas_init-0.4.4.dist-info → atlas_init-0.6.0.dist-info}/WHEEL +0 -0
  65. {atlas_init-0.4.4.dist-info → atlas_init-0.6.0.dist-info}/entry_points.txt +0 -0
  66. {atlas_init-0.4.4.dist-info → atlas_init-0.6.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,12 +1,8 @@
1
1
  import logging
2
2
  import os
3
- from collections import defaultdict
4
3
  from collections.abc import Callable
5
- from concurrent.futures import Future, ThreadPoolExecutor, wait
6
- from datetime import datetime
7
4
  from functools import lru_cache
8
5
  from pathlib import Path
9
- from typing import NamedTuple
10
6
 
11
7
  import requests
12
8
  from github import Auth, Github
@@ -16,7 +12,6 @@ from github.WorkflowRun import WorkflowRun
16
12
  from github.WorkflowStep import WorkflowStep
17
13
  from zero_3rdparty import datetime_utils, file_utils
18
14
 
19
- from atlas_init.cli_tf.go_test_run import GoTestRun, parse
20
15
  from atlas_init.repos.path import (
21
16
  GH_OWNER_TERRAFORM_PROVIDER_MONGODBATLAS,
22
17
  )
@@ -25,7 +20,6 @@ from atlas_init.settings.env_vars import init_settings
25
20
  logger = logging.getLogger(__name__)
26
21
 
27
22
  GH_TOKEN_ENV_NAME = "GH_TOKEN" # noqa: S105 #nosec
28
- GITHUB_CI_SUMMARY_DIR_ENV_NAME = "GITHUB_CI_SUMMARY_DIR_ENV_NAME"
29
23
  REQUIRED_GH_ENV_VARS = [GH_TOKEN_ENV_NAME]
30
24
  MAX_DOWNLOADS = 5
31
25
 
@@ -44,101 +38,20 @@ def get_repo(repo_id: str) -> Repository:
44
38
  return g.get_repo(repo_id)
45
39
 
46
40
 
47
- _DEFAULT_FILESTEMS = {
48
- "test-suite",
49
- "terraform-compatibility-matrix",
50
- # "acceptance-tests",
51
- }
52
-
53
-
54
- def include_filestems(stems: set[str]) -> Callable[[WorkflowRun], bool]:
55
- def inner(run: WorkflowRun) -> bool:
56
- workflow_stem = stem_name(run.path)
57
- return workflow_stem in stems
58
-
59
- return inner
60
-
61
-
62
- def stem_name(workflow_path: str) -> str:
63
- return Path(workflow_path).stem
64
-
65
-
66
41
  def tf_repo() -> Repository:
67
42
  return get_repo(GH_OWNER_TERRAFORM_PROVIDER_MONGODBATLAS)
68
43
 
69
44
 
70
- class WorkflowJobId(NamedTuple):
71
- workflow_id: int
72
- job_id: int
73
-
74
-
75
- def find_test_runs(
76
- since: datetime,
77
- include_workflow: Callable[[WorkflowRun], bool] | None = None,
78
- include_job: Callable[[WorkflowJob], bool] | None = None,
79
- branch: str = "master",
80
- ) -> dict[WorkflowJobId, list[GoTestRun]]:
81
- include_workflow = include_workflow or include_filestems(_DEFAULT_FILESTEMS)
82
- include_job = include_job or include_test_jobs()
83
- jobs_found = defaultdict(list)
84
- repository = tf_repo()
85
- for workflow in repository.get_workflow_runs(
86
- created=f">{since.strftime('%Y-%m-%d')}",
87
- branch=branch, # type: ignore
88
- exclude_pull_requests=True, # type: ignore
89
- ):
90
- if not include_workflow(workflow):
91
- continue
92
- workflow_dir = workflow_logs_dir(workflow)
93
- paginated_jobs = workflow.jobs("all")
94
- worker_count = min(paginated_jobs.totalCount, 10) or 1
95
- with ThreadPoolExecutor(max_workers=worker_count) as pool:
96
- futures: dict[Future[list[GoTestRun]], WorkflowJob] = {}
97
- for job in paginated_jobs:
98
- if not include_job(job):
99
- continue
100
- future = pool.submit(find_job_test_runs, workflow_dir, job)
101
- futures[future] = job
102
- done, not_done = wait(futures.keys(), timeout=300)
103
- for f in not_done:
104
- logger.warning(f"timeout to find go tests for job = {futures[f].html_url}")
105
- workflow_id = workflow.id
106
- for f in done:
107
- job = futures[f]
108
- try:
109
- go_test_runs: list[GoTestRun] = f.result()
110
- except Exception:
111
- job_log_path = logs_file(workflow_dir, job)
112
- logger.exception(
113
- f"failed to find go tests for job: {job.html_url}, error 👆, local_path: {job_log_path}"
114
- )
115
- continue
116
- jobs_found[WorkflowJobId(workflow_id, job.id)].extend(go_test_runs)
117
- return jobs_found
118
-
119
-
120
- def find_job_test_runs(workflow_dir: Path, job: WorkflowJob) -> list[GoTestRun]:
121
- jobs_log_path = download_job_safely(workflow_dir, job)
122
- return [] if jobs_log_path is None else parse_job_logs(job, jobs_log_path)
123
-
124
-
125
- def parse_job_logs(job: WorkflowJob, logs_path: Path) -> list[GoTestRun]:
126
- if job.conclusion in {"skipped", "cancelled", None}:
127
- return []
128
- step, logs_lines = select_step_and_log_content(job, logs_path)
129
- test_runs = list(parse(logs_lines, job, step))
130
- for run in test_runs:
131
- run.log_path = logs_path
132
- return test_runs
133
-
134
-
135
45
  def download_job_safely(workflow_dir: Path, job: WorkflowJob) -> Path | None:
46
+ if job.conclusion in {"skipped", "cancelled", None}:
47
+ logger.debug(f"not downloading job: {job.html_url}, conclusion: {job.conclusion}")
48
+ return None
136
49
  path = logs_file(workflow_dir, job)
137
50
  job_summary = f"found test job: {job.name}, attempt {job.run_attempt}, {job.created_at}, url: {job.html_url}"
138
51
  if path.exists():
139
- logger.info(f"{job_summary} exist @ {path}")
52
+ logger.debug(f"{job_summary} exist @ {path}")
140
53
  return path
141
- logger.info(f"{job_summary}\n\t\t downloading to {path}")
54
+ logger.debug(f"{job_summary}\n\t\t downloading to {path}")
142
55
  try:
143
56
  logs_response = requests.get(job.logs_url(), timeout=60)
144
57
  logs_response.raise_for_status()
@@ -160,14 +73,14 @@ def summary_dir(summary_name: str) -> Path:
160
73
  def workflow_logs_dir(workflow: WorkflowRun) -> Path:
161
74
  dt = workflow.created_at
162
75
  date_str = datetime_utils.get_date_as_rfc3339_without_time(dt)
163
- workflow_name = stem_name(workflow.path)
76
+ workflow_name = Path(workflow.path).stem
164
77
  return logs_dir() / f"{date_str}/{workflow.id}_{workflow_name}"
165
78
 
166
79
 
167
80
  def logs_file(workflow_dir: Path, job: WorkflowJob) -> Path:
168
81
  if job.run_attempt != 1:
169
82
  workflow_dir = workflow_dir.with_name(f"{workflow_dir.name}_attempt{job.run_attempt}")
170
- filename = f"{job.id}_" + job.name.replace(" ", "").replace("/", "_").replace("__", "_") + ".txt"
83
+ filename = f"{job.id}_" + job.name.replace(" ", "").replace("/", "_").replace("__", "_") + ".log"
171
84
  return workflow_dir / filename
172
85
 
173
86