atlas-init 0.3.7__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. atlas_init/__init__.py +1 -1
  2. atlas_init/atlas_init.yaml +9 -0
  3. atlas_init/cli.py +9 -3
  4. atlas_init/cli_cfn/app.py +11 -19
  5. atlas_init/cli_cfn/aws.py +3 -3
  6. atlas_init/cli_cfn/contract.py +227 -0
  7. atlas_init/cli_cfn/example.py +17 -5
  8. atlas_init/cli_cfn/files.py +21 -2
  9. atlas_init/cli_helper/go.py +7 -4
  10. atlas_init/cli_helper/run.py +23 -25
  11. atlas_init/cli_helper/run_manager.py +272 -0
  12. atlas_init/cli_helper/tf_runner.py +7 -14
  13. atlas_init/cli_root/__init__.py +10 -0
  14. atlas_init/cli_root/go_test.py +2 -0
  15. atlas_init/cli_root/trigger.py +149 -61
  16. atlas_init/cli_tf/debug_logs.py +3 -3
  17. atlas_init/cli_tf/debug_logs_test_data.py +24 -14
  18. atlas_init/cli_tf/github_logs.py +8 -5
  19. atlas_init/cli_tf/go_test_run.py +1 -1
  20. atlas_init/cli_tf/hcl/parser.py +1 -1
  21. atlas_init/cli_tf/mock_tf_log.py +1 -1
  22. atlas_init/cli_tf/schema_table.py +1 -3
  23. atlas_init/cli_tf/schema_v3.py +1 -1
  24. atlas_init/cloud/aws.py +63 -0
  25. atlas_init/settings/config.py +6 -0
  26. atlas_init/settings/env_vars.py +113 -100
  27. atlas_init/settings/env_vars_generated.py +34 -0
  28. atlas_init/settings/rich_utils.py +11 -3
  29. atlas_init/tf/modules/cfn/cfn.tf +1 -1
  30. atlas_init/tf/modules/cloud_provider/cloud_provider.tf +1 -1
  31. atlas_init/typer_app.py +66 -11
  32. {atlas_init-0.3.7.dist-info → atlas_init-0.4.1.dist-info}/METADATA +8 -8
  33. {atlas_init-0.3.7.dist-info → atlas_init-0.4.1.dist-info}/RECORD +35 -33
  34. {atlas_init-0.3.7.dist-info → atlas_init-0.4.1.dist-info}/WHEEL +1 -1
  35. atlas_init/cli_tf/go_test_run_format.py +0 -31
  36. {atlas_init-0.3.7.dist-info → atlas_init-0.4.1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,272 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ import os
5
+ import signal
6
+ import subprocess # nosec
7
+ import sys
8
+ import threading
9
+ import time
10
+ from collections import deque
11
+ from collections.abc import Callable
12
+ from concurrent.futures import Future, ThreadPoolExecutor
13
+ from dataclasses import dataclass, field
14
+ from logging import Logger
15
+ from pathlib import Path
16
+ from time import monotonic
17
+
18
+ from atlas_init.cli_helper.run import LOG_CMD_PREFIX, find_binary_on_path
19
+
20
+ default_logger = logging.getLogger(__name__)
21
+
22
+
23
+ class ResultInProgressError(Exception):
24
+ pass
25
+
26
+
27
+ class ResultDoneError(Exception):
28
+ pass
29
+
30
+
31
+ class LogTextNotFoundError(Exception):
32
+ def __init__(self, store: ResultStore) -> None:
33
+ self.store = store
34
+ super().__init__(store)
35
+
36
+
37
+ @dataclass
38
+ class WaitOnText:
39
+ line: str
40
+ timeout: float
41
+
42
+
43
+ @dataclass
44
+ class ResultStore:
45
+ wait_condition: WaitOnText | None = None
46
+ _recent_lines: deque = field(default_factory=lambda: deque(maxlen=1000))
47
+
48
+ result: list[str] = field(default_factory=list)
49
+ exit_code: int | None = None
50
+ _aborted: bool = False
51
+ _terminated: bool = False
52
+ _killed: bool = False
53
+
54
+ @property
55
+ def result_str(self) -> str:
56
+ return "".join(self.result)
57
+
58
+ @property
59
+ def is_ok(self) -> bool:
60
+ if self.in_progress():
61
+ raise ResultInProgressError
62
+ return self.exit_code == 0
63
+
64
+ def _add_line(self, line: str) -> None:
65
+ self._recent_lines.append(line)
66
+ self.result.append(line)
67
+
68
+ def unexpected_error(self) -> bool:
69
+ if self.in_progress():
70
+ raise ResultInProgressError
71
+ return self.exit_code != 0 and not self._aborted
72
+
73
+ def force_stopped(self) -> bool:
74
+ if self.in_progress():
75
+ raise ResultInProgressError
76
+ return self._killed or self._terminated
77
+
78
+ def in_progress(self) -> bool:
79
+ return self.exit_code is None
80
+
81
+ def wait(self) -> None:
82
+ condition = self.wait_condition
83
+ if not condition:
84
+ return
85
+ timeout = condition.timeout
86
+ start = monotonic()
87
+ while monotonic() - start < timeout:
88
+ if not self.in_progress():
89
+ raise LogTextNotFoundError(self)
90
+ while self._recent_lines:
91
+ line = self._recent_lines.popleft()
92
+ if condition.line in line:
93
+ return
94
+ time.sleep(0.1)
95
+ raise LogTextNotFoundError(self)
96
+
97
+ def _abort(self) -> None:
98
+ self._aborted = True
99
+
100
+ def _terminate(self) -> None:
101
+ self._terminated = True
102
+
103
+ def _kill(self) -> None:
104
+ self._killed = True
105
+
106
+
107
+ class RunManager:
108
+ def __init__(
109
+ self,
110
+ worker_count: int = 100,
111
+ signal_int_timeout_s: float = 0.2,
112
+ signal_term_timeout_s: float = 0.2,
113
+ signal_kill_timeout_s: float = 0.2,
114
+ *,
115
+ dry_run: bool = False,
116
+ ):
117
+ """
118
+ Args:
119
+ worker_count: the number of workers to run in parallel
120
+ terminate_read_timeout: the time to wait after terminating a process before closing the output
121
+ """
122
+ self.processes: dict[int, subprocess.Popen] = {}
123
+ self.results: dict[int, ResultStore] = {}
124
+ self.lock = threading.RLock()
125
+ self.pool = ThreadPoolExecutor(max_workers=worker_count)
126
+ self.signal_int_timeout_s = signal_int_timeout_s
127
+ self.signal_term_timeout_s = signal_term_timeout_s
128
+ self.signal_kill_timeout_s = signal_kill_timeout_s
129
+ self.dry_run = dry_run
130
+
131
+ def set_timeouts(self, timeout: float):
132
+ self.signal_int_timeout_s = timeout
133
+ self.signal_term_timeout_s = timeout
134
+ self.signal_kill_timeout_s = timeout
135
+
136
+ def __enter__(self):
137
+ self.pool.__enter__()
138
+ return self
139
+
140
+ def run_process_wait_on_log(
141
+ self,
142
+ command: str,
143
+ cwd: Path,
144
+ logger: Logger | None,
145
+ env: dict | None = None,
146
+ result_store: ResultStore | None = None,
147
+ *,
148
+ line_in_log: str,
149
+ timeout: float,
150
+ binary: str = "",
151
+ ) -> Future[ResultStore]:
152
+ command = self._resolve_command(binary, command, logger)
153
+ store = result_store or ResultStore()
154
+ store.wait_condition = WaitOnText(line=line_in_log, timeout=timeout)
155
+ future = self.pool.submit(self._run, command, cwd, logger, env, store)
156
+ if not self.dry_run:
157
+ store.wait()
158
+ return future
159
+
160
+ def run_process(
161
+ self,
162
+ command: str,
163
+ cwd: Path,
164
+ logger: Logger | None,
165
+ env: dict | None = None,
166
+ result_store: ResultStore | None = None,
167
+ *,
168
+ binary: str = "",
169
+ ) -> Future[ResultStore]:
170
+ command = self._resolve_command(binary, command, logger)
171
+ return self.pool.submit(self._run, command, cwd, logger, env, result_store)
172
+
173
+ def _resolve_command(self, binary: str, command: str, logger: Logger | None):
174
+ if binary:
175
+ binary_path = find_binary_on_path(binary, logger or default_logger, allow_missing=self.dry_run)
176
+ command = f"{binary_path} {command}"
177
+ return command
178
+
179
+ def _run(
180
+ self,
181
+ command: str,
182
+ cwd: Path,
183
+ logger: Logger | None,
184
+ env: dict | None = None,
185
+ result: ResultStore | None = None,
186
+ ) -> ResultStore:
187
+ result = result or ResultStore()
188
+ logger = logger or default_logger
189
+
190
+ def read_output(process: subprocess.Popen):
191
+ for line in process.stdout: # type: ignore
192
+ result._add_line(line)
193
+
194
+ sys_stderr = sys.stderr
195
+
196
+ def read_stderr(process: subprocess.Popen):
197
+ for line in process.stderr: # type: ignore
198
+ sys_stderr.write(line)
199
+ result._add_line(line)
200
+
201
+ logger.info(f"{LOG_CMD_PREFIX}{command}' from '{cwd}'")
202
+ if self.dry_run:
203
+ result.exit_code = 0
204
+ result.result.append(f"DRY RUN: {command}")
205
+ return result
206
+ with subprocess.Popen(
207
+ command,
208
+ cwd=cwd,
209
+ env=env,
210
+ stdout=subprocess.PIPE,
211
+ stderr=subprocess.PIPE,
212
+ stdin=sys.stdin,
213
+ start_new_session=True,
214
+ shell=True, # noqa: S602 # We control the calls to this function and don't suspect any shell injection #nosec
215
+ bufsize=0,
216
+ text=True, # This makes it return strings instead of bytes
217
+ ) as process:
218
+ with self.lock:
219
+ self.processes[threading.get_ident()] = process
220
+ self.results[threading.get_ident()] = result
221
+ read_future_out = self.pool.submit(read_output, process)
222
+ read_future_err = self.pool.submit(read_stderr, process)
223
+ try:
224
+ process.wait()
225
+ except Exception:
226
+ logger.exception(f"failed to run command: {command}")
227
+ finally:
228
+ for std_name, future in zip(["stdout", "stderr"], [read_future_out, read_future_err], strict=False):
229
+ try:
230
+ future.result(1)
231
+ except BaseException:
232
+ logger.exception(f"failed to read output ({std_name}) for command: {command}")
233
+ with self.lock:
234
+ del self.processes[threading.get_ident()]
235
+ del self.results[threading.get_ident()]
236
+ result.exit_code = process.returncode
237
+ if result.unexpected_error():
238
+ logger.error(f"command failed '{command}', error code: {result.exit_code}")
239
+ if result.force_stopped():
240
+ logger.error(f"command killed '{command}'")
241
+ return result
242
+
243
+ def __exit__(self, *_):
244
+ self.pool.shutdown(wait=False, cancel_futures=True) # wait happens in __exit__, avoid new futures starting
245
+ self.terminate_all()
246
+ self.pool.__exit__(None, None, None)
247
+
248
+ def terminate_all(self):
249
+ self._send_signal_to_all(signal.SIGINT, ResultStore._abort)
250
+ self.wait_for_processes_ok(self.signal_int_timeout_s)
251
+ self._send_signal_to_all(signal.SIGTERM, ResultStore._terminate)
252
+ self.wait_for_processes_ok(self.signal_term_timeout_s)
253
+ self._send_signal_to_all(signal.SIGKILL, ResultStore._kill)
254
+ self.wait_for_processes_ok(self.signal_kill_timeout_s)
255
+
256
+ def _send_signal_to_all(self, signal_type: signal.Signals, result_call: Callable[[ResultStore], None]):
257
+ with self.lock:
258
+ for pid, process in self.processes.items():
259
+ result_call(self.results[pid])
260
+ gpid = os.getpgid(process.pid)
261
+ os.killpg(gpid, signal_type)
262
+
263
+ def wait_for_processes_ok(self, timeout: float):
264
+ start = monotonic()
265
+ if not self.processes:
266
+ return True
267
+ while monotonic() - start < timeout:
268
+ with self.lock:
269
+ if not any(result.in_progress() for result in self.results.values()):
270
+ return True
271
+ time.sleep(0.1)
272
+ return False
@@ -8,8 +8,7 @@ from model_lib import dump
8
8
  from zero_3rdparty.file_utils import copy, iter_paths_and_relative
9
9
 
10
10
  from atlas_init.cli_helper.run import (
11
- add_to_clipboard,
12
- run_command_is_ok,
11
+ run_binary_command_is_ok,
13
12
  run_command_receive_result,
14
13
  )
15
14
  from atlas_init.settings.config import TerraformVars, TestSuite
@@ -31,7 +30,7 @@ def get_tf_vars(settings: AtlasInitSettings, active_groups: list[TestSuite]) ->
31
30
  "project_name": settings.project_name,
32
31
  "out_dir": settings.profile_dir,
33
32
  "extra_env_vars": settings.manual_env_vars,
34
- **settings.cfn_config(),
33
+ **settings.tf_vars(),
35
34
  **tf_vars.as_configs(),
36
35
  }
37
36
 
@@ -63,34 +62,28 @@ def run_terraform(settings: AtlasInitSettings, command: str, extra_args: list[st
63
62
 
64
63
  def _run_terraform(settings: AtlasInitSettings, command: str, extra_args: list[str]):
65
64
  command_parts = [
66
- "terraform",
67
65
  command,
68
66
  "-var-file",
69
67
  str(settings.tf_vars_path),
70
68
  *extra_args,
71
69
  ]
72
- is_ok = run_command_is_ok(
73
- command_parts,
70
+ is_ok = run_binary_command_is_ok(
71
+ "terraform",
72
+ " ".join(command_parts),
74
73
  env=os.environ | {"TF_DATA_DIR": settings.tf_data_dir},
75
74
  cwd=settings.tf_path,
76
75
  logger=logger,
77
76
  )
78
77
  if not is_ok:
79
78
  raise TerraformRunError
80
- if settings.skip_copy:
81
- return
82
- env_generated = settings.env_vars_generated
83
- if env_generated.exists():
84
- clipboard_content = "\n".join(f"export {line}" for line in env_generated.read_text().splitlines())
85
- add_to_clipboard(clipboard_content, logger)
86
- logger.info("loaded env-vars to clipboard ✅")
87
79
 
88
80
 
89
81
  def dump_tf_vars(settings: AtlasInitSettings, tf_vars: dict[str, Any]):
90
82
  tf_vars_path = settings.tf_vars_path
91
83
  tf_vars_path.parent.mkdir(exist_ok=True, parents=True)
92
84
  tf_vars_str = dump(tf_vars, "pretty_json")
93
- logger.info(f"writing tf vars to {tf_vars_path}: \n{tf_vars_str}")
85
+ logger.info(f"writing tf vars to {tf_vars_path}")
86
+ logger.debug(f"tf vars:\n{tf_vars_str}")
94
87
  tf_vars_path.write_text(tf_vars_str)
95
88
 
96
89
 
@@ -0,0 +1,10 @@
1
+ _dry_run = False
2
+
3
+
4
+ def is_dry_run():
5
+ return _dry_run
6
+
7
+
8
+ def set_dry_run(dry_run: bool):
9
+ global _dry_run # noqa: PLW0603 # instead of passing dry_run everywhere we can use this global variable
10
+ _dry_run = dry_run
@@ -26,6 +26,7 @@ def go_test(
26
26
  names: list[str] = typer.Option(
27
27
  ..., "-n", "--names", default_factory=list, help="run only the tests with these names"
28
28
  ),
29
+ use_replay_mode: bool = typer.Option(False, "--replay", help="use replay mode and stored responses"),
29
30
  ):
30
31
  if export_mock_tf_log and mode != GoTestMode.individual:
31
32
  err_msg = "exporting mock-tf-log is only supported for individual tests"
@@ -54,6 +55,7 @@ def go_test(
54
55
  re_run=re_run,
55
56
  env_vars=env_method,
56
57
  names=set(names),
58
+ use_replay_mode=use_replay_mode,
57
59
  )
58
60
  case _:
59
61
  raise NotImplementedError
@@ -1,10 +1,16 @@
1
1
  import logging
2
2
 
3
3
  import requests
4
+ from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed
4
5
  from zero_3rdparty.id_creator import simple_id
5
6
 
6
7
  from atlas_init.settings.env_vars import init_settings
7
- from atlas_init.settings.path import dump_dotenv, dump_vscode_dotenv
8
+ from atlas_init.settings.env_vars_generated import (
9
+ EnvVarsGenerated,
10
+ RealmSettings,
11
+ TFModuleCluster,
12
+ )
13
+ from atlas_init.settings.path import dump_dotenv
8
14
  from atlas_init.typer_app import app_command
9
15
 
10
16
  logger = logging.getLogger(__name__)
@@ -12,67 +18,78 @@ logger = logging.getLogger(__name__)
12
18
 
13
19
  @app_command()
14
20
  def trigger_app():
21
+ create_realm_app()
22
+
23
+
24
+ def create_realm_app():
15
25
  settings = init_settings()
16
- login_req = {
17
- "username": settings.MONGODB_ATLAS_PUBLIC_KEY,
18
- "apiKey": settings.MONGODB_ATLAS_PRIVATE_KEY,
19
- }
20
- response = requests.post(
21
- "https://realm-dev.mongodb.com/api/admin/v3.0/auth/providers/mongodb-cloud/login",
22
- json=login_req,
23
- headers={"Accept": "application/json", "Content-Type": "application/json"},
24
- timeout=10,
25
- )
26
- response.raise_for_status()
27
- token_response = response.json()
28
- access_token = token_response["access_token"]
29
- logger.info(f"token: {access_token}")
30
- auth_headers = {"Authorization": f"Bearer {access_token}"}
31
- env_vars = settings.load_env_vars_generated()
32
- project_id = env_vars["MONGODB_ATLAS_PROJECT_ID"]
33
- cluster_name = env_vars["MONGODB_ATLAS_CLUSTER_NAME"]
34
- apps = list_apps(auth_headers, project_id)
35
- if apps:
26
+ base_url = settings.realm_url
27
+ project_id = settings.env_vars_cls(EnvVarsGenerated).MONGODB_ATLAS_PROJECT_ID
28
+ cluster_name = settings.env_vars_cls(TFModuleCluster).MONGODB_ATLAS_CLUSTER_NAME
29
+ auth_headers = login_to_realm(settings, base_url)
30
+ realm_settings = settings.env_vars_cls_or_none(RealmSettings, path=settings.env_vars_trigger)
31
+ if realm_settings and function_exists(
32
+ base_url,
33
+ auth_headers,
34
+ project_id,
35
+ realm_settings.MONGODB_REALM_APP_ID,
36
+ realm_settings.MONGODB_REALM_FUNCTION_ID,
37
+ ):
38
+ logger.info(f"function {realm_settings.MONGODB_REALM_FUNCTION_NAME} already exists ✅")
39
+ settings.include_extra_env_vars_in_vscode(realm_settings.model_dump())
40
+ return
41
+ logger.info("creating new realm app")
42
+ if apps := list_apps(base_url, auth_headers, project_id):
36
43
  logger.info(f"got apps: {apps}")
37
44
  app_id = apps[0]["_id"]
38
45
  else:
39
46
  logger.info("no apps found, creating one")
40
- app = create_app(auth_headers, project_id, cluster_name)
47
+ app = create_app(base_url, auth_headers, project_id, cluster_name, settings.AWS_REGION)
41
48
  logger.info(f"created app: {app}")
42
49
  app_id = app["_id"]
43
50
  logger.info(f"using app_id: {app_id}")
44
51
  suffix = simple_id(length=5)
45
- service = create_service(auth_headers, project_id, cluster_name, app_id, suffix)
52
+ service = create_service(base_url, auth_headers, project_id, cluster_name, app_id, suffix)
46
53
  logger.info(f"new service: {service}")
47
54
  service_id = service["_id"]
48
55
  logger.info(f"using service_id: {service_id}")
49
- func_response = create_function(auth_headers, project_id, app_id, suffix)
56
+ func_response = create_function(base_url, auth_headers, project_id, app_id, suffix)
50
57
  logger.info(f"new function: {func_response}")
51
58
  func_id = func_response["_id"]
52
59
  func_name = func_response["name"]
53
60
  logger.info(f"using func_id: {func_id}")
54
- extra_env_vars = {
55
- "MONGODB_REALM_APP_ID": app_id,
56
- "MONGODB_REALM_SERVICE_ID": service_id,
57
- "MONGODB_REALM_FUNCTION_ID": func_id,
58
- "MONGODB_REALM_FUNCTION_NAME": func_name,
59
- "MONGODB_REALM_BASE_URL": "https://realm-dev.mongodb.com/",
60
- }
61
+ realm_settings = RealmSettings(
62
+ MONGODB_REALM_APP_ID=app_id,
63
+ MONGODB_REALM_SERVICE_ID=service_id,
64
+ MONGODB_REALM_FUNCTION_ID=func_id,
65
+ MONGODB_REALM_FUNCTION_NAME=func_name,
66
+ MONGODB_REALM_BASE_URL=base_url,
67
+ )
68
+ extra_env_vars = realm_settings.model_dump()
61
69
  dump_dotenv(settings.env_vars_trigger, extra_env_vars)
62
70
  logger.info(f"done {settings.env_vars_trigger} created with trigger env-vars ✅")
71
+ settings.include_extra_env_vars_in_vscode(extra_env_vars)
63
72
 
64
- generated_env_vars = settings.load_env_vars_generated()
65
- generated_env_vars.update(extra_env_vars)
66
- dump_dotenv(settings.env_vars_generated, generated_env_vars)
67
- logger.info(f"done {settings.env_vars_generated} updated with trigger env-vars ✅")
68
73
 
69
- dump_vscode_dotenv(settings.env_vars_generated, settings.env_vars_vs_code, **extra_env_vars)
70
- logger.info(f"done {settings.env_vars_vs_code} updated with trigger env-vars ✅")
74
+ def login_to_realm(settings, base_url):
75
+ login_req = {
76
+ "username": settings.MONGODB_ATLAS_PUBLIC_KEY,
77
+ "apiKey": settings.MONGODB_ATLAS_PRIVATE_KEY,
78
+ }
79
+ token_response = _request_post_call(
80
+ f"{base_url}api/admin/v3.0/auth/providers/mongodb-cloud/login",
81
+ data=login_req,
82
+ headers={"Accept": "application/json", "Content-Type": "application/json"},
83
+ timeout=10,
84
+ )
85
+ access_token = token_response["access_token"]
86
+ logger.debug(f"token: {access_token}")
87
+ return {"Authorization": f"Bearer {access_token}"}
71
88
 
72
89
 
73
- def list_apps(auth_headers: dict[str, str], project_id: str) -> list[dict]:
90
+ def list_apps(base_url: str, auth_headers: dict[str, str], project_id: str) -> list[dict]:
74
91
  existing_apps_response = requests.get(
75
- f"https://realm-dev.mongodb.com/api/admin/v3.0/groups/{project_id}/apps",
92
+ f"{base_url}api/admin/v3.0/groups/{project_id}/apps",
76
93
  headers=auth_headers,
77
94
  timeout=10,
78
95
  )
@@ -82,13 +99,30 @@ def list_apps(auth_headers: dict[str, str], project_id: str) -> list[dict]:
82
99
  return apps
83
100
 
84
101
 
85
- def create_app(auth_headers: dict[str, str], project_id: str, cluster_name: str) -> dict:
102
+ # https://www.mongodb.com/docs/atlas/app-services/apps/deployment-models-and-regions/#cloud-deployment-regions
103
+ _cloud_deployment_regions = {
104
+ "aws-eu-west-1": "IE",
105
+ "aws-us-west-2": "US-OR",
106
+ "aws-ap-southeast-2": "AU",
107
+ "aws-us-east-1": "US-VA",
108
+ }
109
+
110
+
111
+ def create_app(
112
+ base_url: str, auth_headers: dict[str, str], project_id: str, cluster_name: str, aws_region: str
113
+ ) -> dict:
114
+ provider_region = f"aws-{aws_region}"
115
+ location = _cloud_deployment_regions.get(provider_region)
116
+ if not location:
117
+ raise ValueError(
118
+ f"unknown location for provider_region: {provider_region}, only supports: {_cloud_deployment_regions}"
119
+ )
86
120
  create_app_req = {
87
121
  "name": "atlas-init-app",
88
- "location": "US-VA",
122
+ "location": location,
89
123
  "deployment_model": "GLOBAL",
90
124
  "environment": "production",
91
- "provider_region": "aws-us-east-1",
125
+ "provider_region": provider_region,
92
126
  "data_source": {
93
127
  "name": "mongodb-atlas",
94
128
  "type": "mongodb-atlas",
@@ -99,19 +133,25 @@ def create_app(auth_headers: dict[str, str], project_id: str, cluster_name: str)
99
133
  },
100
134
  },
101
135
  }
102
- create_app_response = requests.post(
103
- f"https://realm-dev.mongodb.com/api/admin/v3.0/groups/{project_id}/apps",
104
- json=create_app_req,
136
+ app_response = _request_post_call(
137
+ f"{base_url}api/admin/v3.0/groups/{project_id}/apps",
138
+ data=create_app_req,
105
139
  headers=auth_headers,
106
140
  timeout=10,
141
+ log_data_on_failure=True,
107
142
  )
108
- create_app_response.raise_for_status()
109
- app = create_app_response.json()
110
- assert isinstance(app, dict), f"expected dict, got: {app!r}"
111
- return app
143
+ assert isinstance(app_response, dict), f"expected dict, got: {app_response!r}"
144
+ return app_response
112
145
 
113
146
 
114
- def create_service(auth_headers: dict[str, str], project_id: str, cluster_name: str, app_id: str, suffix: str) -> dict:
147
+ def create_service(
148
+ base_url: str,
149
+ auth_headers: dict[str, str],
150
+ project_id: str,
151
+ cluster_name: str,
152
+ app_id: str,
153
+ suffix: str,
154
+ ) -> dict:
115
155
  create_service_req = {
116
156
  "name": f"atlas-init-{suffix}",
117
157
  "type": "mongodb-atlas",
@@ -121,19 +161,24 @@ def create_service(auth_headers: dict[str, str], project_id: str, cluster_name:
121
161
  "wireProtocolEnabled": True,
122
162
  },
123
163
  }
124
- create_service_response = requests.post(
125
- f"https://realm-dev.mongodb.com/api/admin/v3.0/groups/{project_id}/apps/{app_id}/services",
126
- json=create_service_req,
164
+ service = _request_post_call(
165
+ f"{base_url}api/admin/v3.0/groups/{project_id}/apps/{app_id}/services",
166
+ data=create_service_req,
127
167
  headers=auth_headers,
128
168
  timeout=10,
169
+ log_data_on_failure=True,
129
170
  )
130
- create_service_response.raise_for_status()
131
- service = create_service_response.json()
132
171
  assert isinstance(service, dict), f"expected dict, got: {service}"
133
172
  return service
134
173
 
135
174
 
136
- def create_function(auth_headers: dict[str, str], project_id: str, app_id: str, suffix: str) -> dict:
175
+ def create_function(
176
+ base_url: str,
177
+ auth_headers: dict[str, str],
178
+ project_id: str,
179
+ app_id: str,
180
+ suffix: str,
181
+ ) -> dict:
137
182
  create_func_req = {
138
183
  "can_evaluate": {},
139
184
  "name": f"testfunc-{suffix}",
@@ -141,13 +186,56 @@ def create_function(auth_headers: dict[str, str], project_id: str, app_id: str,
141
186
  "source": 'exports = function(changeEvent) {console.log("New Document Inserted")};',
142
187
  "run_as_system": True,
143
188
  }
144
- create_func_response = requests.post(
145
- f"https://realm-dev.mongodb.com/api/admin/v3.0/groups/{project_id}/apps/{app_id}/functions",
146
- json=create_func_req,
189
+ func = _request_post_call(
190
+ f"{base_url}api/admin/v3.0/groups/{project_id}/apps/{app_id}/functions",
191
+ data=create_func_req,
147
192
  headers=auth_headers,
148
193
  timeout=10,
194
+ log_data_on_failure=True,
149
195
  )
150
- create_func_response.raise_for_status()
151
- func = create_func_response.json()
152
196
  assert isinstance(func, dict), f"expected dict, got: {func}"
153
197
  return func
198
+
199
+
200
+ def function_exists(
201
+ base_url: str,
202
+ auth_headers: dict[str, str],
203
+ project_id: str,
204
+ app_id: str,
205
+ func_id: str,
206
+ ) -> bool:
207
+ # https://services.cloud.mongodb.com/api/admin/v3.0/groups/{groupId}/apps/{appId}/functions/{functionId}
208
+ get_func_response = requests.get(
209
+ f"{base_url}api/admin/v3.0/groups/{project_id}/apps/{app_id}/functions/{func_id}",
210
+ headers=auth_headers,
211
+ timeout=10,
212
+ )
213
+ if get_func_response.status_code == 404: # noqa: PLR2004
214
+ return False
215
+ get_func_response.raise_for_status()
216
+ func = get_func_response.json()
217
+ assert isinstance(func, dict), f"expected dict response, got: {func}"
218
+ return True
219
+
220
+
221
+ class _RetryPostRequestError(Exception):
222
+ pass
223
+
224
+
225
+ @retry(
226
+ stop=stop_after_attempt(5),
227
+ wait=wait_fixed(10),
228
+ retry=retry_if_exception_type(),
229
+ reraise=True,
230
+ )
231
+ def _request_post_call(
232
+ url: str, data: dict, headers: dict[str, str], timeout: int, *, log_data_on_failure: bool = False
233
+ ) -> dict:
234
+ response = requests.post(url, json=data, headers=headers, timeout=timeout)
235
+ if response.status_code >= 500: # noqa: PLR2004
236
+ logger.warning(f"failed to post to {url}, status_code: {response.status_code}, response: {response.text}")
237
+ if log_data_on_failure:
238
+ logger.warning(f"data: {data}")
239
+ raise _RetryPostRequestError(f"status_code: {response.status_code}, response: {response.text}")
240
+ response.raise_for_status()
241
+ return response.json()
@@ -154,7 +154,7 @@ MARKER_TEST = "Starting TestSteps: "
154
154
 
155
155
 
156
156
  class FileRef(NamedTuple):
157
- index: int
157
+ request_index: int
158
158
  line_start: int
159
159
  line_end: int
160
160
 
@@ -249,12 +249,12 @@ def parse_raw_req_responses(
249
249
  in_response = True
250
250
  current_start = i + 1
251
251
  if in_request and line.startswith(MARKER_END):
252
- key = FileRef(index=request_count, line_start=current_start, line_end=i)
252
+ key = FileRef(request_index=request_count, line_start=current_start, line_end=i)
253
253
  requests[key] = log_lines[current_start:i]
254
254
  request_count += 1
255
255
  in_request = False
256
256
  if in_response and line.startswith(MARKER_END):
257
- key = FileRef(index=request_count, line_start=current_start, line_end=i)
257
+ key = FileRef(request_index=request_count, line_start=current_start, line_end=i)
258
258
  responses[key] = log_lines[current_start:i]
259
259
  response_count += 1
260
260
  in_response = False