artefacts-cli 0.6.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. artefacts_cli-0.6.8/PKG-INFO +101 -0
  2. artefacts_cli-0.6.8/README.md +63 -0
  3. artefacts_cli-0.6.8/README_INTERNAL.md +100 -0
  4. artefacts_cli-0.6.8/artefacts/cli/__init__.py +342 -0
  5. artefacts_cli-0.6.8/artefacts/cli/app.py +617 -0
  6. artefacts_cli-0.6.8/artefacts/cli/bagparser.py +98 -0
  7. artefacts_cli-0.6.8/artefacts/cli/constants.py +16 -0
  8. artefacts_cli-0.6.8/artefacts/cli/other.py +40 -0
  9. artefacts_cli-0.6.8/artefacts/cli/parameters.py +23 -0
  10. artefacts_cli-0.6.8/artefacts/cli/ros1.py +240 -0
  11. artefacts_cli-0.6.8/artefacts/cli/ros2.py +125 -0
  12. artefacts_cli-0.6.8/artefacts/cli/utils.py +35 -0
  13. artefacts_cli-0.6.8/artefacts/cli/utils_ros.py +68 -0
  14. artefacts_cli-0.6.8/artefacts/cli/version.py +16 -0
  15. artefacts_cli-0.6.8/artefacts/wrappers/artefacts_ros1_meta.launch +45 -0
  16. artefacts_cli-0.6.8/artefacts.yaml +58 -0
  17. artefacts_cli-0.6.8/artefacts_cli.egg-info/PKG-INFO +101 -0
  18. artefacts_cli-0.6.8/artefacts_cli.egg-info/SOURCES.txt +53 -0
  19. artefacts_cli-0.6.8/artefacts_cli.egg-info/dependency_links.txt +1 -0
  20. artefacts_cli-0.6.8/artefacts_cli.egg-info/entry_points.txt +2 -0
  21. artefacts_cli-0.6.8/artefacts_cli.egg-info/requires.txt +27 -0
  22. artefacts_cli-0.6.8/artefacts_cli.egg-info/top_level.txt +1 -0
  23. artefacts_cli-0.6.8/bin/release +33 -0
  24. artefacts_cli-0.6.8/infra-tests/turtlesim1/ros_workspace/src/turtle_odometry/CMakeLists.txt +205 -0
  25. artefacts_cli-0.6.8/infra-tests/turtlesim1/ros_workspace/src/turtle_odometry/launch/test_meta.launch +50 -0
  26. artefacts_cli-0.6.8/infra-tests/turtlesim1/ros_workspace/src/turtle_odometry/launch/test_turtle.launch +49 -0
  27. artefacts_cli-0.6.8/infra-tests/turtlesim1/ros_workspace/src/turtle_odometry/launch/turtle_odometry.launch +43 -0
  28. artefacts_cli-0.6.8/infra-tests/turtlesim1/ros_workspace/src/turtle_odometry/package.xml +68 -0
  29. artefacts_cli-0.6.8/infra-tests/turtlesim1/ros_workspace/src/turtle_odometry/setup.py +8 -0
  30. artefacts_cli-0.6.8/infra-tests/turtlesim1/ros_workspace/src/turtle_odometry/src/TestTurtle.py +87 -0
  31. artefacts_cli-0.6.8/infra-tests/turtlesim1/ros_workspace/src/turtle_odometry/src/__init__.py +0 -0
  32. artefacts_cli-0.6.8/infra-tests/turtlesim1/ros_workspace/src/turtle_odometry/src/turtle_odom.py +104 -0
  33. artefacts_cli-0.6.8/infra-tests/turtlesim1/ros_workspace/src/turtle_odometry/src/turtle_post_process.py +265 -0
  34. artefacts_cli-0.6.8/infra-tests/turtlesim1/ros_workspace/src/turtle_odometry/src/turtle_trajectory.py +123 -0
  35. artefacts_cli-0.6.8/infra-tests/turtlesim1/ros_workspace/src/turtle_odometry/test/viz_turtle_odom.xml +64 -0
  36. artefacts_cli-0.6.8/infra-tests/turtlesim2/launch_turtle.py +41 -0
  37. artefacts_cli-0.6.8/infra-tests/turtlesim2/sample_node.py +39 -0
  38. artefacts_cli-0.6.8/pyproject.toml +87 -0
  39. artefacts_cli-0.6.8/pytest.ini +7 -0
  40. artefacts_cli-0.6.8/setup.cfg +4 -0
  41. artefacts_cli-0.6.8/tests/__init__.py +0 -0
  42. artefacts_cli-0.6.8/tests/cli/__init__.py +0 -0
  43. artefacts_cli-0.6.8/tests/cli/test_cli.py +141 -0
  44. artefacts_cli-0.6.8/tests/cli/test_config_validation.py +70 -0
  45. artefacts_cli-0.6.8/tests/cli/test_other.py +22 -0
  46. artefacts_cli-0.6.8/tests/cli/test_parameters.py +36 -0
  47. artefacts_cli-0.6.8/tests/cli/test_ros1.py +49 -0
  48. artefacts_cli-0.6.8/tests/cli/test_ros2.py +40 -0
  49. artefacts_cli-0.6.8/tests/cli/test_warp.py +117 -0
  50. artefacts_cli-0.6.8/tests/conftest.py +8 -0
  51. artefacts_cli-0.6.8/tests/fixtures/artefacts_deprecated.yaml +23 -0
  52. artefacts_cli-0.6.8/tests/fixtures/artefacts_ros1.yaml +32 -0
  53. artefacts_cli-0.6.8/tests/fixtures/warp-env-param.yaml +29 -0
  54. artefacts_cli-0.6.8/tests/fixtures/warp.yaml +118 -0
  55. artefacts_cli-0.6.8/tests/test_config_validation.py +70 -0
@@ -0,0 +1,101 @@
1
+ Metadata-Version: 2.1
2
+ Name: artefacts_cli
3
+ Version: 0.6.8
4
+ Author-email: FD <fabian@artefacts.com>, AGC <alejandro@artefacts.com>, TN <tomo@artefacts.com>, EP <eric@artefacts.com>
5
+ Project-URL: Homepage, https://github.com/art-e-fact/artefacts-client
6
+ Project-URL: Bug Tracker, https://github.com/art-e-fact/artefacts-client/issues
7
+ Classifier: Programming Language :: Python :: 3
8
+ Classifier: License :: OSI Approved :: Apache Software License
9
+ Classifier: Operating System :: OS Independent
10
+ Requires-Python: >=3.8
11
+ Description-Content-Type: text/markdown
12
+ Requires-Dist: artefacts-copava>=0.1.11
13
+ Requires-Dist: click>=8.0.4
14
+ Requires-Dist: gitignore_parser>=0.1.11
15
+ Requires-Dist: junitparser>=2.5
16
+ Requires-Dist: mcap
17
+ Requires-Dist: mcap-ros2-support
18
+ Requires-Dist: PyYAML>=6.0
19
+ Requires-Dist: requests>=2.27.1
20
+ Provides-Extra: dev
21
+ Requires-Dist: awscli; extra == "dev"
22
+ Requires-Dist: build; extra == "dev"
23
+ Requires-Dist: lark; extra == "dev"
24
+ Requires-Dist: pyre-check; extra == "dev"
25
+ Requires-Dist: pytest; extra == "dev"
26
+ Requires-Dist: pytest-cov; extra == "dev"
27
+ Requires-Dist: pytest-env; extra == "dev"
28
+ Requires-Dist: pytest-mock; extra == "dev"
29
+ Requires-Dist: ruff; extra == "dev"
30
+ Requires-Dist: setuptools-scm; extra == "dev"
31
+ Requires-Dist: twine; extra == "dev"
32
+ Requires-Dist: mkdocs-click==0.8.0; extra == "dev"
33
+ Requires-Dist: mkdocs-material==8.5.6; extra == "dev"
34
+ Requires-Dist: mkdocs-mermaid2-plugin==0.6.0; extra == "dev"
35
+ Requires-Dist: mkdocs==1.4.2; extra == "dev"
36
+ Requires-Dist: pre-commit; extra == "dev"
37
+ Requires-Dist: python-markdown-math; extra == "dev"
38
+
39
+ # Artefacts CLI
40
+
41
+ CLI to the Artefacts platform.
42
+
43
+ [![Documentation](https://img.shields.io/badge/documentation-blue.svg?style=flat-square)](https://docs.artefacts.com/)
44
+ [![Code style: Black-compatible with Ruff](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
45
+
46
+ ## Requirements
47
+
48
+ * Currently working partially where Python can run.
49
+ * Fully working on ROS-compatible and ready environments. Notably need for packages like `ros-<dist>-rclpy` and `ros-<dist>-rosidl-runtime-py`.
50
+
51
+
52
+ ## Usage
53
+
54
+ To install:
55
+ ```
56
+ pip install artefacts-cli
57
+ ```
58
+
59
+ Check configuration: After creating `project-name` from the web UI and getting an API key, try:
60
+
61
+ ```
62
+ artefacts config add [project-name]
63
+ ```
64
+
65
+ and enter you `API_KEY` for the project when prompted.
66
+
67
+ You can then do
68
+
69
+ ```
70
+ artefacts hello [project-name]
71
+ ```
72
+
73
+ alternatively, you can specify you API KEY via environment variable
74
+
75
+ ```
76
+ ARTEFACTS_KEY=[your-key] artefacts hello [project-name]
77
+ ```
78
+
79
+ To run a job locally, for example the turtlesim example (need ros2 installed).
80
+ First edit `artefacts.yaml` to change the project name, then:
81
+
82
+ ```
83
+ cd examples/turtlesim
84
+ artefacts run basic_tests
85
+ ```
86
+
87
+
88
+ ## Configuration file syntax
89
+
90
+ See [the configuration syntax documentation](docs/configuration-syntax.md)
91
+
92
+
93
+ ## User docs
94
+
95
+ You can serve docs locally using mkdocs:
96
+
97
+ ```
98
+ mkdocs serve -a 127.0.0.1:7000
99
+ ```
100
+
101
+ The docs are automatically deployed by the documentation workflow.
@@ -0,0 +1,63 @@
1
+ # Artefacts CLI
2
+
3
+ CLI to the Artefacts platform.
4
+
5
+ [![Documentation](https://img.shields.io/badge/documentation-blue.svg?style=flat-square)](https://docs.artefacts.com/)
6
+ [![Code style: Black-compatible with Ruff](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
7
+
8
+ ## Requirements
9
+
10
+ * Currently working partially where Python can run.
11
+ * Fully working on ROS-compatible and ready environments. Notably need for packages like `ros-<dist>-rclpy` and `ros-<dist>-rosidl-runtime-py`.
12
+
13
+
14
+ ## Usage
15
+
16
+ To install:
17
+ ```
18
+ pip install artefacts-cli
19
+ ```
20
+
21
+ Check configuration: After creating `project-name` from the web UI and getting an API key, try:
22
+
23
+ ```
24
+ artefacts config add [project-name]
25
+ ```
26
+
27
+ and enter you `API_KEY` for the project when prompted.
28
+
29
+ You can then do
30
+
31
+ ```
32
+ artefacts hello [project-name]
33
+ ```
34
+
35
+ alternatively, you can specify you API KEY via environment variable
36
+
37
+ ```
38
+ ARTEFACTS_KEY=[your-key] artefacts hello [project-name]
39
+ ```
40
+
41
+ To run a job locally, for example the turtlesim example (need ros2 installed).
42
+ First edit `artefacts.yaml` to change the project name, then:
43
+
44
+ ```
45
+ cd examples/turtlesim
46
+ artefacts run basic_tests
47
+ ```
48
+
49
+
50
+ ## Configuration file syntax
51
+
52
+ See [the configuration syntax documentation](docs/configuration-syntax.md)
53
+
54
+
55
+ ## User docs
56
+
57
+ You can serve docs locally using mkdocs:
58
+
59
+ ```
60
+ mkdocs serve -a 127.0.0.1:7000
61
+ ```
62
+
63
+ The docs are automatically deployed by the documentation workflow.
@@ -0,0 +1,100 @@
1
+ # ARTEFACTS Client
2
+
3
+ Python client and CLI for ARTEFACTS
4
+
5
+ [![Documentation](https://img.shields.io/badge/documentation-blue.svg?style=flat-square)](https://docs.artefacts.com/)
6
+ [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
7
+
8
+
9
+ ## CLI
10
+
11
+ To install:
12
+ ```
13
+ pip install --editable "."
14
+ ```
15
+
16
+ Check configuration: after creating `project-name` from the web UI and getting an API key, try:
17
+
18
+ ```
19
+ artefacts config add [project-name]
20
+ ```
21
+
22
+ and enter you `API_KEY` for the project when prompted.
23
+
24
+ You can then do
25
+
26
+ ```
27
+ artefacts hello [project-name]
28
+ ```
29
+
30
+ alternatively, you can specify you API KEY via environment variable
31
+
32
+ ```
33
+ ARTEFACTS_KEY=[your-key] artefacts hello [project-name]
34
+ ```
35
+
36
+ To run a job locally, for example the turtlesim example (need ros2 installed).
37
+ First edit `artefacts.yaml` to change the project name, then:
38
+
39
+ ```
40
+ cd examples/turtlesim
41
+ artefacts run basic_tests
42
+ ```
43
+
44
+ ## Configuration file syntax
45
+
46
+ See [the configuration syntax documentation](docs/configuration-syntax.md)
47
+
48
+ ## Development
49
+
50
+ For the CLI dev environment:
51
+
52
+ ```
53
+ pip install --editable ".[dev]"
54
+ ```
55
+
56
+ You can run the tests with:
57
+
58
+ ```
59
+ pytest
60
+ ```
61
+
62
+ If you need to change the API url, you can:
63
+
64
+ * Edit `~/.artefacts/config`, and add `apiurl = http://localhost:5000/api` in the `[DEFAULT]` section
65
+ * Using an environment variable, `ARTEFACTS_API_URL=http://localhost:5000/api artefacts hello [project-name]`
66
+
67
+ You can setup the pre-commit hooks with:
68
+ ```
69
+ pre-commit install --install-hooks
70
+ ```
71
+ This will automatically run the black formatter on the files staged for commit whenever you run `git commit`
72
+
73
+
74
+ ### _Note_ when using Docker to Run a Job Locally
75
+
76
+ When using the client/cli dev environment on your machine, but building and running a job through Docker, e.g
77
+ ```
78
+ docker run --env WARP_KEY=<ApiKey> --env ARTEFACTS_JOB_NAME=basic_tests --env ARTEFACTS_API_URL=<yourlocalhostUrl> <tag>
79
+ ```
80
+ (such as the Dockerfile in the [dolly-demo](https://github.com/art-e-fact/dolly-demo/blob/main/Dockerfile) repo)
81
+
82
+ You need to point the ARTEFACTS_API_URL back to your host machine which is `host.docker.internal` i.e `ARTEFACTS_API_URL=http://host.docker.internal:5000`
83
+
84
+ ### Testing on Infra
85
+
86
+ See [here](./internal-docs/testing-on-infra.md)
87
+
88
+ ## Release management
89
+
90
+ Releases are managed with Twine through the `bin/release` script. By default it releases to TestPyPi. Passing the `production` parameter releases to PyPi. Note the script currently requires a tag being issued (semver) to let a release proceed.
91
+
92
+ ## User docs
93
+
94
+ You can serve docs locally using mkdocs:
95
+
96
+ ```
97
+ mkdocs serve -a 127.0.0.1:7000
98
+ ```
99
+
100
+ The docs are automatically deployed by the documentation workflow.
@@ -0,0 +1,342 @@
1
+ from importlib.metadata import version, PackageNotFoundError
2
+ import json
3
+ import glob
4
+ from datetime import datetime, timezone
5
+ import logging
6
+ import os
7
+ import math
8
+ import requests
9
+ import copy
10
+ from typing import Optional
11
+
12
+ from .parameters import iter_grid
13
+
14
+ logging.basicConfig(level=logging.INFO)
15
+
16
+
17
+ try:
18
+ __version__ = version("package-name")
19
+ except PackageNotFoundError:
20
+ try:
21
+ # Package is not installed, most likely dev/test mode
22
+ from setuptools_scm import get_version
23
+
24
+ __version__ = get_version()
25
+ except Exception as e:
26
+ logging.warning(f"Could not determine package version: {e}. Default to 0.0.0")
27
+ __version__ = "0.0.0"
28
+
29
+
30
+ class AuthenticationError(Exception):
31
+ """Raised when artefacts authentication failed"""
32
+
33
+ pass
34
+
35
+
36
+ class WarpJob:
37
+ def __init__(
38
+ self,
39
+ project_id,
40
+ api_conf,
41
+ jobname,
42
+ jobconf,
43
+ dryrun=False,
44
+ nosim=False,
45
+ noisolation=False,
46
+ context=None,
47
+ run_offset=0,
48
+ ):
49
+ self.project_id = project_id
50
+ self.job_id = os.environ.get("ARTEFACTS_JOB_ID", None)
51
+ self.api_conf = api_conf
52
+ self.start = datetime.now(timezone.utc).timestamp()
53
+ self.uploads = {}
54
+ self.jobname = jobname
55
+ self.params = jobconf
56
+ self.success = False
57
+ self.n_runs = run_offset
58
+ self.dryrun = dryrun
59
+ self.nosim = nosim
60
+ self.noisolation = noisolation
61
+ self.context = context
62
+
63
+ if dryrun:
64
+ self.job_id = "dryrun"
65
+ if self.job_id is None:
66
+ # Only create a new job if job_id is not specified
67
+ data = {
68
+ "start": round(self.start),
69
+ "status": "in progress",
70
+ "params": json.dumps(self.params),
71
+ "project": self.project_id,
72
+ "jobname": self.jobname,
73
+ "timeout": self.params.get("timeout", 5) * 60,
74
+ }
75
+ if context is not None:
76
+ data["message"] = context["description"]
77
+ data["commit"] = context["commit"]
78
+ data["ref"] = context["ref"]
79
+ response = requests.post(
80
+ f"{api_conf.api_url}/{self.project_id}/job",
81
+ json=data,
82
+ headers=api_conf.headers,
83
+ )
84
+ if response.status_code != 200:
85
+ if response.status_code == 403:
86
+ msg = response.json()["message"]
87
+ logging.warning(msg)
88
+ raise AuthenticationError(msg)
89
+ logging.warning(f"Error on job creation: {response.status_code}")
90
+ logging.warning(response.text)
91
+ raise AuthenticationError(str(response.status_code))
92
+ self.job_id = response.json()["job_id"]
93
+ self.output_path = self.params.get("output_path", f"/tmp/{self.job_id}")
94
+ os.makedirs(self.output_path, exist_ok=True)
95
+ return
96
+
97
+ def log_tests_result(self, success):
98
+ self.success = success
99
+
100
+ def stop(self):
101
+ end = datetime.now(timezone.utc).timestamp()
102
+ if self.dryrun:
103
+ return
104
+ # Log metadata
105
+ data = {
106
+ "end": round(end),
107
+ "duration": round(end - self.start),
108
+ "success": self.success, # need to be determined based on all runs, can be an AND in the API
109
+ "status": "finished", # need to be determined based on all runs
110
+ }
111
+ response = requests.put(
112
+ f"{self.api_conf.api_url}/{self.project_id}/job/{self.job_id}",
113
+ json=data,
114
+ headers=self.api_conf.headers,
115
+ )
116
+
117
+ return
118
+
119
+ def new_run(self, scenario):
120
+ run = WarpRun(self, scenario, self.n_runs)
121
+ self.n_runs += 1
122
+ return run
123
+
124
+
125
+ class WarpRun:
126
+ def __init__(self, job, scenario, run_n):
127
+ self.job = job
128
+ self.start = datetime.now(timezone.utc).timestamp()
129
+ self.uploads = {}
130
+ self.params = scenario
131
+ self.metrics = {}
132
+ self.run_n = run_n
133
+ self.output_path = self.params.get(
134
+ "output_path", f"{self.job.output_path}/{self.run_n}"
135
+ )
136
+ os.makedirs(self.output_path, exist_ok=True)
137
+ data = {
138
+ "job_id": job.job_id,
139
+ "run_n": self.run_n,
140
+ "start": round(self.start),
141
+ "tests": [],
142
+ "params": json.dumps(self.params),
143
+ }
144
+
145
+ if self.job.dryrun:
146
+ return
147
+ query_url = (
148
+ f"{self.job.api_conf.api_url}/{self.job.project_id}/job/{job.job_id}/run"
149
+ )
150
+ response = requests.post(
151
+ query_url,
152
+ json=data,
153
+ headers=self.job.api_conf.headers,
154
+ )
155
+ if response.status_code != 200:
156
+ if response.status_code == 403:
157
+ msg = response.json()["message"]
158
+ logging.warning(msg)
159
+ raise AuthenticationError(msg)
160
+ logging.warning(f"Error on scenario creation: {response.status_code}")
161
+ logging.warning(response.text)
162
+ raise AuthenticationError(str(response.status_code))
163
+ return
164
+
165
+ def log_params(self, params):
166
+ self.params = params
167
+
168
+ def log_metric(self, name, value):
169
+ self.metrics[name] = value
170
+
171
+ def log_metrics(self):
172
+ metrics = self.params.get("metrics", None)
173
+ if type(metrics) == str:
174
+ with open(f"{self.output_path}/{metrics}") as f:
175
+ metric_values = json.load(f)
176
+ for k, v in metric_values.items():
177
+ self.log_metric(k, v)
178
+
179
+ def log_post_process_metrics(self, post_process_folder):
180
+ try:
181
+ with open(f"{post_process_folder}/metrics.json") as f:
182
+ metric_values = json.load(f)
183
+ for k, v in metric_values.items():
184
+ self.log_metric(k, v)
185
+ except: # if the metrics.json file does not exist, do nothing
186
+ pass
187
+
188
+ def log_tests_results(self, test_results, success):
189
+ self.test_results = test_results
190
+ self.success = success
191
+
192
+ def log_artifacts(self, output_path, prefix=None):
193
+ """log all files within folder output_path"""
194
+
195
+ def _get_filename(root_path, full_path):
196
+ filename = full_path.split(f"{root_path}/")[-1]
197
+ # print(root_path, full_path, filename)
198
+ return filename
199
+
200
+ files = [
201
+ f
202
+ for f in glob.glob(f"{output_path}/**", recursive=True)
203
+ if "." in f and f[-1] != "/"
204
+ ]
205
+ # careful: glob with recursive sometimes returns non existent paths!
206
+ # https://stackoverflow.com/questions/72366844/unexpected-result-with-recursive-glob-glob-using-pattern
207
+
208
+ # update dictionary of uploads
209
+ # key = filename: value = file path
210
+ # Note: filename must not be empty string (happened when '.ros' in root path)
211
+ if prefix is not None:
212
+ self.uploads.update(
213
+ {f"{prefix}/{_get_filename(output_path, f)}": f for f in files}
214
+ )
215
+ else:
216
+ self.uploads.update(
217
+ {
218
+ _get_filename(output_path, f): f
219
+ for f in files
220
+ if _get_filename(output_path, f) != ""
221
+ }
222
+ )
223
+
224
+ def log_single_artifact(self, filename, prefix=None):
225
+ """log a single file filename"""
226
+
227
+ def _get_filename(path):
228
+ return path.split(f"/")[-1]
229
+
230
+ if prefix is not None:
231
+ self.uploads.update({f"{prefix}/{_get_filename(filename)}": filename})
232
+ else:
233
+ self.uploads.update({_get_filename(filename): filename})
234
+
235
+ def stop(self):
236
+ end = datetime.now(timezone.utc).timestamp()
237
+ if self.job.dryrun:
238
+ return
239
+ # Log metadata
240
+ data = {
241
+ "job_id": self.job.job_id,
242
+ "run_n": self.run_n,
243
+ "start": math.floor(self.start),
244
+ "params": json.dumps(self.params),
245
+ "end": round(end),
246
+ "duration": math.ceil(end - self.start),
247
+ "tests": self.test_results,
248
+ "success": self.success,
249
+ "uploads": self.uploads,
250
+ "metrics": self.metrics,
251
+ }
252
+ response = requests.put(
253
+ f"{self.job.api_conf.api_url}/{self.job.project_id}/job/{self.job.job_id}/run/{self.run_n}",
254
+ json=data,
255
+ headers=self.job.api_conf.headers,
256
+ )
257
+ # use s3 presigned urls to upload the artifacts
258
+ upload_urls = response.json()["upload_urls"]
259
+ for key, file_name in self.uploads.items():
260
+ files = {"file": open(file_name, "rb")}
261
+ upload_info = upload_urls[key]
262
+ file_size_mb = os.path.getsize(file_name) / 1024 / 1024
263
+ try:
264
+ print(f"Uploading {file_name} ({file_size_mb:.2f} MB)")
265
+ # TODO: add a retry policy
266
+ r = requests.post(
267
+ upload_info["url"],
268
+ data=upload_info["fields"],
269
+ files=files,
270
+ )
271
+ except OverflowError:
272
+ logging.warning(f"File too large: {file_name} could not be uploaded")
273
+ except Exception as e:
274
+ logging.warning(f"Error uploading {file_name}: {e}, skipping")
275
+
276
+
277
+ def init_job(
278
+ project_id: str,
279
+ api_token: str,
280
+ jobname: str,
281
+ jobconf: dict,
282
+ dryrun: bool = False,
283
+ nosim: bool = False,
284
+ noisolation: bool = False,
285
+ context: Optional[dict] = None,
286
+ run_offset=0,
287
+ ):
288
+ return WarpJob(
289
+ project_id,
290
+ api_token,
291
+ jobname,
292
+ jobconf,
293
+ dryrun,
294
+ nosim,
295
+ noisolation,
296
+ context,
297
+ run_offset,
298
+ )
299
+
300
+
301
+ def generate_scenarios(jobconf, scenario_n=None):
302
+ """Create each scenario conf by:
303
+ 1. selecting only named scenario specified by scenario_n (for parallel processing)
304
+ 2. merging default values to each scenario
305
+ 3. generating parameter grids
306
+ """
307
+ scenarios = sorted(jobconf["scenarios"]["settings"], key=lambda x: x["name"])
308
+ defaults = jobconf["scenarios"].get("defaults", {})
309
+ first_scenario = 0
310
+ last_scenario = None
311
+ generated_scenarios = []
312
+ for n, scenario_settings in enumerate(scenarios):
313
+ if scenario_n is not None:
314
+ if n == scenario_n:
315
+ first_scenario = len(generated_scenarios)
316
+ if n == scenario_n + 1:
317
+ last_scenario = len(generated_scenarios)
318
+ # add `settings` keys on top of `defaults` keys
319
+ # (taking special care to merge the `params` keys)
320
+ scenario = copy.deepcopy(defaults) # deepcopy mandatory
321
+ for k in scenario_settings.keys():
322
+ # merge scenario dict values into default dict values
323
+ if k == "params" or k == "launch_arguments":
324
+ scenario[k] = {
325
+ **scenario.get(k, {}),
326
+ **scenario_settings[k],
327
+ }
328
+ else:
329
+ # add all other keys (overwriting defaults if already present)
330
+ scenario[k] = scenario_settings[k]
331
+
332
+ # generate scenarios for each combination of parameter values (grid coverage)
333
+ if "params" in scenario.keys():
334
+ grid_values = iter_grid(scenario["params"])
335
+ for value in grid_values:
336
+ grid_scenario = scenario.copy()
337
+ grid_scenario["params"] = value
338
+ generated_scenarios.append(grid_scenario)
339
+
340
+ else:
341
+ generated_scenarios.append(scenario)
342
+ return generated_scenarios[first_scenario:last_scenario], first_scenario