artefacts-cli 0.6.17__py3-none-any.whl → 0.6.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- artefacts/cli/__init__.py +39 -31
- artefacts/cli/app.py +13 -7
- artefacts/cli/app_containers.py +4 -3
- artefacts/cli/containers/__init__.py +14 -14
- artefacts/cli/containers/docker.py +30 -19
- artefacts/cli/containers/utils.py +8 -4
- artefacts/cli/errors.py +1 -0
- artefacts/cli/logger.py +10 -0
- artefacts/cli/parameters.py +1 -1
- artefacts/cli/ros1.py +7 -7
- artefacts/cli/ros2.py +2 -2
- artefacts/cli/utils.py +1 -1
- artefacts/cli/utils_ros.py +1 -1
- artefacts/cli/version.py +2 -2
- {artefacts_cli-0.6.17.dist-info → artefacts_cli-0.6.19.dist-info}/METADATA +10 -11
- artefacts_cli-0.6.19.dist-info/RECORD +23 -0
- {artefacts_cli-0.6.17.dist-info → artefacts_cli-0.6.19.dist-info}/WHEEL +1 -1
- artefacts_cli-0.6.17.dist-info/RECORD +0 -21
- {artefacts_cli-0.6.17.dist-info → artefacts_cli-0.6.19.dist-info}/entry_points.txt +0 -0
- {artefacts_cli-0.6.17.dist-info → artefacts_cli-0.6.19.dist-info}/top_level.txt +0 -0
artefacts/cli/__init__.py
CHANGED
@@ -2,7 +2,6 @@ from importlib.metadata import version, PackageNotFoundError
|
|
2
2
|
import json
|
3
3
|
import glob
|
4
4
|
from datetime import datetime, timezone
|
5
|
-
import logging
|
6
5
|
import os
|
7
6
|
import math
|
8
7
|
import requests
|
@@ -10,8 +9,7 @@ import copy
|
|
10
9
|
from typing import Optional
|
11
10
|
|
12
11
|
from .parameters import iter_grid
|
13
|
-
|
14
|
-
logging.basicConfig(level=logging.INFO)
|
12
|
+
from .logger import logger
|
15
13
|
|
16
14
|
|
17
15
|
try:
|
@@ -23,7 +21,7 @@ except PackageNotFoundError:
|
|
23
21
|
|
24
22
|
__version__ = get_version()
|
25
23
|
except Exception as e:
|
26
|
-
|
24
|
+
logger.warning(f"Could not determine package version: {e}. Default to 0.0.0")
|
27
25
|
__version__ = "0.0.0"
|
28
26
|
|
29
27
|
|
@@ -42,6 +40,7 @@ class WarpJob:
|
|
42
40
|
jobconf,
|
43
41
|
dryrun=False,
|
44
42
|
nosim=False,
|
43
|
+
noupload=False,
|
45
44
|
noisolation=False,
|
46
45
|
context=None,
|
47
46
|
run_offset=0,
|
@@ -57,6 +56,7 @@ class WarpJob:
|
|
57
56
|
self.n_runs = run_offset
|
58
57
|
self.dryrun = dryrun
|
59
58
|
self.nosim = nosim
|
59
|
+
self.noupload = noupload
|
60
60
|
self.noisolation = noisolation
|
61
61
|
self.context = context
|
62
62
|
|
@@ -84,10 +84,10 @@ class WarpJob:
|
|
84
84
|
if response.status_code != 200:
|
85
85
|
if response.status_code == 403:
|
86
86
|
msg = response.json()["message"]
|
87
|
-
|
87
|
+
logger.warning(msg)
|
88
88
|
raise AuthenticationError(msg)
|
89
|
-
|
90
|
-
|
89
|
+
logger.warning(f"Error on job creation: {response.status_code}")
|
90
|
+
logger.warning(response.text)
|
91
91
|
raise AuthenticationError(str(response.status_code))
|
92
92
|
self.job_id = response.json()["job_id"]
|
93
93
|
self.output_path = self.params.get("output_path", f"/tmp/{self.job_id}")
|
@@ -108,7 +108,7 @@ class WarpJob:
|
|
108
108
|
"success": self.success, # need to be determined based on all runs, can be an AND in the API
|
109
109
|
"status": "finished", # need to be determined based on all runs
|
110
110
|
}
|
111
|
-
|
111
|
+
requests.put(
|
112
112
|
f"{self.api_conf.api_url}/{self.project_id}/job/{self.job_id}",
|
113
113
|
json=data,
|
114
114
|
headers=self.api_conf.headers,
|
@@ -155,10 +155,10 @@ class WarpRun:
|
|
155
155
|
if response.status_code != 200:
|
156
156
|
if response.status_code == 403:
|
157
157
|
msg = response.json()["message"]
|
158
|
-
|
158
|
+
logger.warning(msg)
|
159
159
|
raise AuthenticationError(msg)
|
160
|
-
|
161
|
-
|
160
|
+
logger.warning(f"Error on scenario creation: {response.status_code}")
|
161
|
+
logger.warning(response.text)
|
162
162
|
raise AuthenticationError(str(response.status_code))
|
163
163
|
return
|
164
164
|
|
@@ -170,7 +170,7 @@ class WarpRun:
|
|
170
170
|
|
171
171
|
def log_metrics(self):
|
172
172
|
metrics = self.params.get("metrics", None)
|
173
|
-
if type(metrics)
|
173
|
+
if type(metrics) is str:
|
174
174
|
with open(f"{self.output_path}/{metrics}") as f:
|
175
175
|
metric_values = json.load(f)
|
176
176
|
for k, v in metric_values.items():
|
@@ -182,7 +182,8 @@ class WarpRun:
|
|
182
182
|
metric_values = json.load(f)
|
183
183
|
for k, v in metric_values.items():
|
184
184
|
self.log_metric(k, v)
|
185
|
-
except:
|
185
|
+
except FileNotFoundError:
|
186
|
+
# if the metrics.json file does not exist, do nothing
|
186
187
|
pass
|
187
188
|
|
188
189
|
def log_tests_results(self, test_results, success):
|
@@ -225,7 +226,7 @@ class WarpRun:
|
|
225
226
|
"""log a single file filename"""
|
226
227
|
|
227
228
|
def _get_filename(path):
|
228
|
-
return path.split(
|
229
|
+
return path.split("/")[-1]
|
229
230
|
|
230
231
|
if prefix is not None:
|
231
232
|
self.uploads.update({f"{prefix}/{_get_filename(filename)}": filename})
|
@@ -255,23 +256,28 @@ class WarpRun:
|
|
255
256
|
headers=self.job.api_conf.headers,
|
256
257
|
)
|
257
258
|
# use s3 presigned urls to upload the artifacts
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
274
|
-
|
259
|
+
if self.job.noupload:
|
260
|
+
print(
|
261
|
+
"noupload: job artifacts are not uploaded to cloud, including the ones specified in output_dirs"
|
262
|
+
)
|
263
|
+
else:
|
264
|
+
upload_urls = response.json()["upload_urls"]
|
265
|
+
for key, file_name in self.uploads.items():
|
266
|
+
files = {"file": open(file_name, "rb")}
|
267
|
+
upload_info = upload_urls[key]
|
268
|
+
file_size_mb = os.path.getsize(file_name) / 1024 / 1024
|
269
|
+
try:
|
270
|
+
print(f"Uploading {file_name} ({file_size_mb:.2f} MB)")
|
271
|
+
# TODO: add a retry policy
|
272
|
+
requests.post(
|
273
|
+
upload_info["url"],
|
274
|
+
data=upload_info["fields"],
|
275
|
+
files=files,
|
276
|
+
)
|
277
|
+
except OverflowError:
|
278
|
+
logger.warning(f"File too large: {file_name} could not be uploaded")
|
279
|
+
except Exception as e:
|
280
|
+
logger.warning(f"Error uploading {file_name}: {e}, skipping")
|
275
281
|
|
276
282
|
|
277
283
|
def init_job(
|
@@ -281,6 +287,7 @@ def init_job(
|
|
281
287
|
jobconf: dict,
|
282
288
|
dryrun: bool = False,
|
283
289
|
nosim: bool = False,
|
290
|
+
noupload: bool = False,
|
284
291
|
noisolation: bool = False,
|
285
292
|
context: Optional[dict] = None,
|
286
293
|
run_offset=0,
|
@@ -292,6 +299,7 @@ def init_job(
|
|
292
299
|
jobconf,
|
293
300
|
dryrun,
|
294
301
|
nosim,
|
302
|
+
noupload,
|
295
303
|
noisolation,
|
296
304
|
context,
|
297
305
|
run_offset,
|
artefacts/cli/app.py
CHANGED
@@ -5,7 +5,6 @@ import os
|
|
5
5
|
import platform
|
6
6
|
import random
|
7
7
|
import subprocess
|
8
|
-
import sys
|
9
8
|
import tarfile
|
10
9
|
import tempfile
|
11
10
|
import time
|
@@ -22,7 +21,6 @@ from artefacts.cli import init_job, generate_scenarios, AuthenticationError, __v
|
|
22
21
|
from artefacts.cli import app_containers as containers
|
23
22
|
from artefacts.cli.constants import DEPRECATED_FRAMEWORKS, SUPPORTED_FRAMEWORKS
|
24
23
|
from artefacts.cli.utils import read_config, config_validation
|
25
|
-
import artefacts_copava as copava
|
26
24
|
|
27
25
|
HOME = os.path.expanduser("~")
|
28
26
|
CONFIG_DIR = f"{HOME}/.artefacts"
|
@@ -213,6 +211,12 @@ def hello(project_name):
|
|
213
211
|
default=False,
|
214
212
|
help="nosim: no simulator resource provided by Artefacts",
|
215
213
|
)
|
214
|
+
@click.option(
|
215
|
+
"--noupload",
|
216
|
+
is_flag=True,
|
217
|
+
default=False,
|
218
|
+
help="noupload: rosbags are not uploaded to cloud",
|
219
|
+
)
|
216
220
|
@click.option(
|
217
221
|
"--noisolation",
|
218
222
|
is_flag=True,
|
@@ -262,6 +266,7 @@ def run(
|
|
262
266
|
jobname,
|
263
267
|
dryrun,
|
264
268
|
nosim,
|
269
|
+
noupload,
|
265
270
|
noisolation,
|
266
271
|
description="",
|
267
272
|
skip_validation=False,
|
@@ -337,6 +342,7 @@ def run(
|
|
337
342
|
jobconf,
|
338
343
|
dryrun,
|
339
344
|
nosim,
|
345
|
+
noupload,
|
340
346
|
noisolation,
|
341
347
|
context,
|
342
348
|
first,
|
@@ -349,7 +355,7 @@ def run(
|
|
349
355
|
job_success = True
|
350
356
|
for scenario_n, scenario in enumerate(scenarios):
|
351
357
|
click.echo(
|
352
|
-
f"Starting scenario {scenario_n+1}/{len(scenarios)}: {scenario['name']}"
|
358
|
+
f"Starting scenario {scenario_n + 1}/{len(scenarios)}: {scenario['name']}"
|
353
359
|
)
|
354
360
|
try:
|
355
361
|
run = warpjob.new_run(scenario)
|
@@ -410,7 +416,7 @@ def run(
|
|
410
416
|
results, success = run_other_tests(run)
|
411
417
|
if not success:
|
412
418
|
job_success = False
|
413
|
-
if type(run.params.get("metrics", []))
|
419
|
+
if type(run.params.get("metrics", [])) is str:
|
414
420
|
run.log_metrics()
|
415
421
|
|
416
422
|
run.stop()
|
@@ -480,7 +486,7 @@ def run_remote(config, description, jobname, skip_validation=False):
|
|
480
486
|
if "on" in run_config:
|
481
487
|
del run_config["on"]
|
482
488
|
|
483
|
-
click.echo(
|
489
|
+
click.echo("Packaging source...")
|
484
490
|
|
485
491
|
with tempfile.NamedTemporaryFile(
|
486
492
|
prefix=project_id.split("/")[-1], suffix=".tgz", delete=True
|
@@ -490,7 +496,7 @@ def run_remote(config, description, jobname, skip_validation=False):
|
|
490
496
|
try:
|
491
497
|
ignore_matches = parse_gitignore(ignore_file)
|
492
498
|
except FileNotFoundError:
|
493
|
-
ignore_matches = lambda x: False
|
499
|
+
ignore_matches = lambda x: False # noqa: E731
|
494
500
|
with tarfile.open(fileobj=temp_file, mode="w:gz") as tar_file:
|
495
501
|
for root, dirs, files in os.walk(project_folder):
|
496
502
|
for file in files:
|
@@ -604,7 +610,7 @@ def run_remote(config, description, jobname, skip_validation=False):
|
|
604
610
|
)
|
605
611
|
|
606
612
|
click.echo(
|
607
|
-
f"Uploading complete! The new job will show up shortly at {dashboard_url}
|
613
|
+
f"Uploading complete! The new job will show up shortly at {dashboard_url}"
|
608
614
|
)
|
609
615
|
|
610
616
|
|
artefacts/cli/app_containers.py
CHANGED
@@ -85,14 +85,15 @@ def run(ctx: click.Context, image: str, jobname: str, config: str, with_gui: boo
|
|
85
85
|
image=image,
|
86
86
|
project=project,
|
87
87
|
jobname=jobname,
|
88
|
-
# Hidden setting primarily useful to Artefacts developers
|
89
|
-
api_url=os.environ.get("ARTEFACTS_API_URL", DEFAULT_API_URL),
|
90
88
|
with_gui=with_gui,
|
89
|
+
# Hidden settings primarily useful to Artefacts developers
|
90
|
+
api_url=os.environ.get("ARTEFACTS_API_URL", DEFAULT_API_URL),
|
91
|
+
api_key=os.environ.get("ARTEFACTS_KEY", None),
|
91
92
|
)
|
92
93
|
container, logs = handler.run(**params)
|
93
94
|
if container:
|
94
95
|
print(f"Package run complete: Container Id for inspection: {container['Id']}")
|
95
96
|
else:
|
96
|
-
print(
|
97
|
+
print("Package run failed:")
|
97
98
|
for entry in logs:
|
98
99
|
print("\t- " + entry)
|
@@ -2,7 +2,7 @@ from collections.abc import Generator
|
|
2
2
|
import configparser
|
3
3
|
import os
|
4
4
|
from pathlib import Path
|
5
|
-
from typing import Any, Tuple, Union
|
5
|
+
from typing import Any, Optional, Tuple, Union
|
6
6
|
|
7
7
|
from artefacts.cli.constants import DEFAULT_API_URL
|
8
8
|
|
@@ -13,13 +13,13 @@ class CMgr:
|
|
13
13
|
Returns the build image ID (e.g. sha256:abcdefghi)
|
14
14
|
and an iterator over the build log entries.
|
15
15
|
"""
|
16
|
-
raise
|
16
|
+
raise NotImplementedError()
|
17
17
|
|
18
18
|
def check(self, image: str) -> bool:
|
19
19
|
"""
|
20
20
|
Checks whether a target image exists locally.
|
21
21
|
"""
|
22
|
-
raise
|
22
|
+
raise NotImplementedError()
|
23
23
|
|
24
24
|
def run(
|
25
25
|
self,
|
@@ -34,19 +34,19 @@ class CMgr:
|
|
34
34
|
Returns a container (Any type as depends on the framework)
|
35
35
|
and an iterator over the container log entries.
|
36
36
|
"""
|
37
|
-
raise
|
37
|
+
raise NotImplementedError()
|
38
38
|
|
39
|
-
def
|
39
|
+
def _get_artefacts_api_key(
|
40
40
|
self, project: str, path: Union[str, Path] = Path("~/.artefacts").expanduser()
|
41
|
-
) ->
|
41
|
+
) -> Optional[str]:
|
42
42
|
"""
|
43
|
-
|
43
|
+
Get any valid API key to embed in containers.
|
44
44
|
|
45
|
-
1.
|
46
|
-
2. If `path` is not given, check the default
|
47
|
-
3. If `path` is given, check the file directly
|
45
|
+
1. Checks first from the ARTEFACTS_KEY environment variable.
|
46
|
+
2. If `path` is not given, check from the default configuraiton file in the .artefacts folder.
|
47
|
+
3. If `path` is given, check the file directly if a file, or check for a `config` file if a folder.
|
48
48
|
|
49
|
-
When a config file is found, we
|
49
|
+
When a config file is found, we get the API key for the `project`.
|
50
50
|
|
51
51
|
`path` set to None is an error, and aborts execution.
|
52
52
|
"""
|
@@ -55,7 +55,7 @@ class CMgr:
|
|
55
55
|
"`path` must be a string, a Path object, or excluded from the kwargs"
|
56
56
|
)
|
57
57
|
if os.environ.get("ARTEFACTS_KEY", None):
|
58
|
-
return
|
58
|
+
return os.environ["ARTEFACTS_KEY"]
|
59
59
|
path = Path(path) # Ensure we have a Path object
|
60
60
|
config = configparser.ConfigParser()
|
61
61
|
if path.is_dir():
|
@@ -63,6 +63,6 @@ class CMgr:
|
|
63
63
|
else:
|
64
64
|
config.read(path)
|
65
65
|
try:
|
66
|
-
return config[project].get("apikey")
|
66
|
+
return config[project].get("apikey")
|
67
67
|
except KeyError:
|
68
|
-
return
|
68
|
+
return None
|
@@ -12,8 +12,8 @@ from artefacts.cli.utils import ensure_available
|
|
12
12
|
|
13
13
|
ensure_available("docker")
|
14
14
|
|
15
|
-
import docker
|
16
|
-
from docker import APIClient
|
15
|
+
import docker # noqa: E402
|
16
|
+
from docker import APIClient # noqa: E402
|
17
17
|
|
18
18
|
|
19
19
|
class DockerManager(CMgr):
|
@@ -53,21 +53,35 @@ class DockerManager(CMgr):
|
|
53
53
|
jobname: str = None,
|
54
54
|
artefacts_dir: str = Path("~/.artefacts").expanduser(),
|
55
55
|
api_url: str = DEFAULT_API_URL,
|
56
|
+
api_key: str = None,
|
56
57
|
with_gui: bool = False,
|
57
58
|
) -> Tuple[Any, Generator]:
|
58
|
-
|
59
|
+
"""
|
60
|
+
Run an application as an Artefacts-enabled container in a Docker engine
|
61
|
+
|
62
|
+
The arguments are considered straightforward, except the different
|
63
|
+
priorities between `artefacts_dir` and `api_key`:
|
64
|
+
* `api_key` has the highest priority. When specified, `artefacts_dir`
|
65
|
+
is ignored. The container will rely on the key as an environment
|
66
|
+
variable (ARTEFACTS_KEY).
|
67
|
+
* Whenever `api_key` is not provided, the container gets `artefacts_dir`
|
68
|
+
mounted as volume. The directory must contain a valid configuration
|
69
|
+
with the project's key.
|
70
|
+
"""
|
71
|
+
env = {
|
72
|
+
"JOB_ID": str(uuid4()),
|
73
|
+
"ARTEFACTS_JOB_NAME": jobname,
|
74
|
+
"ARTEFACTS_API_URL": api_url,
|
75
|
+
}
|
76
|
+
|
77
|
+
env["ARTEFACTS_KEY"] = self._get_artefacts_api_key(project, artefacts_dir)
|
78
|
+
if env["ARTEFACTS_KEY"] is None:
|
59
79
|
return None, iter(
|
60
80
|
[
|
61
|
-
"Missing API key for the project. Does
|
81
|
+
f"Missing API key for the project. Does `{artefacts_dir}/config` exist and contain your key? Alternatively ARTEFACTS_KEY can be set with the key."
|
62
82
|
]
|
63
83
|
)
|
64
84
|
try:
|
65
|
-
env = {
|
66
|
-
"JOB_ID": str(uuid4()),
|
67
|
-
"ARTEFACTS_JOB_NAME": jobname,
|
68
|
-
"ARTEFACTS_API_URL": api_url,
|
69
|
-
}
|
70
|
-
|
71
85
|
if platform.system() in ["Darwin", "Windows"]:
|
72
86
|
# Assume we run in Docker Desktop
|
73
87
|
env["DISPLAY"] = "host.docker.internal:0"
|
@@ -77,24 +91,21 @@ class DockerManager(CMgr):
|
|
77
91
|
if not with_gui:
|
78
92
|
env["QT_QPA_PLATFORM"] = "offscreen"
|
79
93
|
|
80
|
-
|
81
|
-
image,
|
94
|
+
container_conf = dict(
|
95
|
+
image=image,
|
82
96
|
environment=env,
|
83
97
|
detach=False,
|
84
|
-
volumes=["/root/.artefacts"],
|
85
98
|
host_config=self.client.create_host_config(
|
86
|
-
binds={
|
87
|
-
artefacts_dir: {
|
88
|
-
"bind": "/root/.artefacts",
|
89
|
-
"mode": "ro",
|
90
|
-
},
|
91
|
-
},
|
92
99
|
network_mode="host",
|
93
100
|
),
|
94
101
|
)
|
102
|
+
|
103
|
+
container = self.client.create_container(**container_conf)
|
95
104
|
self.client.start(container=container.get("Id"))
|
105
|
+
|
96
106
|
for entry in self.client.logs(container=container.get("Id"), stream=True):
|
97
107
|
print(entry.decode("utf-8").strip())
|
108
|
+
|
98
109
|
return container, iter([])
|
99
110
|
except docker.errors.ImageNotFound:
|
100
111
|
return None, iter(
|
@@ -1,8 +1,11 @@
|
|
1
|
+
import sys
|
2
|
+
|
1
3
|
from collections.abc import Generator
|
2
|
-
import logging
|
3
4
|
from typing import Any, Tuple
|
4
5
|
|
6
|
+
from artefacts.cli import errors
|
5
7
|
from artefacts.cli.containers import CMgr
|
8
|
+
from artefacts.cli.logger import logger
|
6
9
|
|
7
10
|
|
8
11
|
class ContainerMgr:
|
@@ -12,12 +15,13 @@ class ContainerMgr:
|
|
12
15
|
}
|
13
16
|
|
14
17
|
def __init__(self):
|
15
|
-
self.logger =
|
18
|
+
self.logger = logger
|
16
19
|
self.mgr = self._configure()
|
17
20
|
if self.mgr is None:
|
18
|
-
|
19
|
-
f"Failed to find supported container stack. Please install and start one
|
21
|
+
self.logger.error(
|
22
|
+
f"Failed to find supported container stack. Please install and start one of {list(self.SUPPORTED_PRIORITISED_ENGINES.values())}, with default settings (custom sockets not supported at this stage)"
|
20
23
|
)
|
24
|
+
sys.exit(errors.CONTAINER_ENGINE_NOT_FOUND)
|
21
25
|
|
22
26
|
def _configure(self) -> CMgr:
|
23
27
|
manager = None
|
artefacts/cli/errors.py
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
CONTAINER_ENGINE_NOT_FOUND = 1000
|
artefacts/cli/logger.py
ADDED
artefacts/cli/parameters.py
CHANGED
@@ -17,7 +17,7 @@ def iter_grid(grid_spec: dict) -> Iterable[dict]:
|
|
17
17
|
items = sorted(grid_spec.items())
|
18
18
|
keys, values = zip(*items)
|
19
19
|
# Make sure single values are converted to lists
|
20
|
-
values = [x if type(x)
|
20
|
+
values = [x if type(x) is list else [x] for x in values]
|
21
21
|
for v in product(*values):
|
22
22
|
params = dict(zip(keys, v))
|
23
23
|
yield params
|
artefacts/cli/ros1.py
CHANGED
@@ -50,17 +50,17 @@ def generate_rosbag_args(scenario: dict) -> str:
|
|
50
50
|
return topics
|
51
51
|
else:
|
52
52
|
logging.warning(
|
53
|
-
|
53
|
+
"[warning in generate_rosbag_args] rosbag_record asks for 'subscriptions' but they are not specified. Falling back to default: no rosbag will be recorded"
|
54
54
|
)
|
55
55
|
return "none"
|
56
56
|
else:
|
57
|
-
assert (
|
58
|
-
|
59
|
-
)
|
57
|
+
assert type(rosbag_record) is list, (
|
58
|
+
"rosbag_record supports 'all', 'none', 'subscriptions' or a list of strings interpreted as a list of ROS topics, regex supported"
|
59
|
+
)
|
60
60
|
for e in rosbag_record:
|
61
|
-
assert (
|
62
|
-
|
63
|
-
)
|
61
|
+
assert type(e) is str, (
|
62
|
+
"Elements of the rosbag_record list must only be strings. They are interpreted as a list of ROS topics, regex supported"
|
63
|
+
)
|
64
64
|
return f"--regex {' '.join(rosbag_record)}"
|
65
65
|
|
66
66
|
|
artefacts/cli/ros2.py
CHANGED
@@ -114,9 +114,9 @@ def run_ros2_tests(run):
|
|
114
114
|
try:
|
115
115
|
last_value = bag.get_last_message(metric)[1].data
|
116
116
|
run.log_metric(metric, last_value)
|
117
|
-
except KeyError
|
117
|
+
except KeyError:
|
118
118
|
print(f"Metric {metric} not found in rosbag, skipping.")
|
119
|
-
except TypeError or IndexError
|
119
|
+
except TypeError or IndexError:
|
120
120
|
print(
|
121
121
|
f"Metric {metric} not found. Is it being published?. Skipping."
|
122
122
|
)
|
artefacts/cli/utils.py
CHANGED
artefacts/cli/utils_ros.py
CHANGED
@@ -49,7 +49,7 @@ def parse_tests_results(file):
|
|
49
49
|
|
50
50
|
except Exception as e:
|
51
51
|
print(f"[Exception in parse_tests_results] {e}")
|
52
|
-
print(
|
52
|
+
print("Test result xml could not be loaded, marking success as False")
|
53
53
|
results = [
|
54
54
|
{
|
55
55
|
"suite": "unittest.suite.TestSuite",
|
artefacts/cli/version.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.2
|
2
2
|
Name: artefacts_cli
|
3
|
-
Version: 0.6.
|
3
|
+
Version: 0.6.19
|
4
4
|
Author-email: FD <fabian@artefacts.com>, AGC <alejandro@artefacts.com>, TN <tomo@artefacts.com>, EP <eric@artefacts.com>
|
5
5
|
Project-URL: Homepage, https://github.com/art-e-fact/artefacts-client
|
6
6
|
Project-URL: Bug Tracker, https://github.com/art-e-fact/artefacts-client/issues
|
@@ -17,26 +17,25 @@ Requires-Dist: mcap
|
|
17
17
|
Requires-Dist: mcap-ros2-support
|
18
18
|
Requires-Dist: PyYAML>=6.0
|
19
19
|
Requires-Dist: requests>=2.27.1
|
20
|
-
Requires-Dist: setuptools-scm
|
21
|
-
Requires-Dist: setuptools>=74
|
22
20
|
Provides-Extra: dev
|
23
21
|
Requires-Dist: awscli; extra == "dev"
|
24
22
|
Requires-Dist: build; extra == "dev"
|
25
23
|
Requires-Dist: docker; extra == "dev"
|
26
24
|
Requires-Dist: lark; extra == "dev"
|
27
|
-
Requires-Dist: pyre-check; extra == "dev"
|
28
|
-
Requires-Dist: pytest; extra == "dev"
|
29
|
-
Requires-Dist: pytest-cov; extra == "dev"
|
30
|
-
Requires-Dist: pytest-env; extra == "dev"
|
31
|
-
Requires-Dist: pytest-mock; extra == "dev"
|
32
|
-
Requires-Dist: ruff; extra == "dev"
|
33
|
-
Requires-Dist: twine; extra == "dev"
|
34
25
|
Requires-Dist: mkdocs-click==0.8.0; extra == "dev"
|
35
26
|
Requires-Dist: mkdocs-material==8.5.6; extra == "dev"
|
36
27
|
Requires-Dist: mkdocs-mermaid2-plugin==0.6.0; extra == "dev"
|
37
28
|
Requires-Dist: mkdocs==1.4.2; extra == "dev"
|
29
|
+
Requires-Dist: numpy; extra == "dev"
|
38
30
|
Requires-Dist: pre-commit; extra == "dev"
|
31
|
+
Requires-Dist: pyre-check; extra == "dev"
|
32
|
+
Requires-Dist: pytest; extra == "dev"
|
33
|
+
Requires-Dist: pytest-cov; extra == "dev"
|
34
|
+
Requires-Dist: pytest-env; extra == "dev"
|
35
|
+
Requires-Dist: pytest-mock; extra == "dev"
|
39
36
|
Requires-Dist: python-markdown-math; extra == "dev"
|
37
|
+
Requires-Dist: ruff>=0.9.2; extra == "dev"
|
38
|
+
Requires-Dist: twine; extra == "dev"
|
40
39
|
|
41
40
|
# Artefacts CLI
|
42
41
|
|
@@ -0,0 +1,23 @@
|
|
1
|
+
artefacts/cli/__init__.py,sha256=pt8OK66hMeQUxT9iLcvzYIIjFGrPS63ecWo8hS0T2qQ,11980
|
2
|
+
artefacts/cli/app.py,sha256=fHqq4N_JdSO8jgN4uc0puLSSJk_NU5srbztveipbqLw,22221
|
3
|
+
artefacts/cli/app_containers.py,sha256=dsyzN8UzGNwxkhV8BsFK7Sz9EOL6Se3YpeiUgzC2qic,3099
|
4
|
+
artefacts/cli/bagparser.py,sha256=FE_QaztC9pg4hQzTjGSdyve6mzZbHJbyqa3wqvZSbxE,3702
|
5
|
+
artefacts/cli/constants.py,sha256=bvsVDwqkAc49IZN7j6k6IL6EG87bECHd_VINtKJqbv8,320
|
6
|
+
artefacts/cli/errors.py,sha256=BiCRo3IwVjtEotaFtmwsGTZiX-TRE69KqLrEQItLsag,34
|
7
|
+
artefacts/cli/logger.py,sha256=MP8WDImHA3BKVsn55BMWtGP5-aCmXl5ViVPtIo3jKk4,242
|
8
|
+
artefacts/cli/other.py,sha256=7NvzlspvG0zF7sryR-QznwdLupXLln1BKWxHB9VuEcc,1160
|
9
|
+
artefacts/cli/parameters.py,sha256=msf2aG-tmw0ahxwrPpB2W6KqdMj5A-nw9DPG9flkHTg,788
|
10
|
+
artefacts/cli/ros1.py,sha256=rKepZckAuy5O_qraF2CW5GiTmTZHar7LRD4pvESy6T0,9622
|
11
|
+
artefacts/cli/ros2.py,sha256=9Ax_WQIOV_cohKz3H1eo1LnWiahiaqxO8r99doMmhEc,4466
|
12
|
+
artefacts/cli/utils.py,sha256=bqADil7Aqvg-ci0244e-yf8G9KvIkYeWGNc_jMn6qv0,3151
|
13
|
+
artefacts/cli/utils_ros.py,sha256=3EFoMrzBdlhLc-wAL3mmS5sSw_pACkurYhssKHqYJsI,2089
|
14
|
+
artefacts/cli/version.py,sha256=ixVV8MU63NBV9smkuqL3bT6LQ9c0jfKN5D9k7wenwns,413
|
15
|
+
artefacts/cli/containers/__init__.py,sha256=K0efkJXNCqXH-qYBqhCE_8zVUCHbVmeuKH-y_fE8s4M,2254
|
16
|
+
artefacts/cli/containers/docker.py,sha256=fsGTzpj7Sj7ykCBxzaYlIt_so1yfWJ2j6ktxsWjvdvY,4073
|
17
|
+
artefacts/cli/containers/utils.py,sha256=bILX0uvazUJq7hoqKk4ztRzI_ZerYs04XQdKdx1ltjk,2002
|
18
|
+
artefacts/wrappers/artefacts_ros1_meta.launch,sha256=9tN7_0xLH8jW27KYFerhF3NuWDx2dED3ks_qoGVZAPw,1412
|
19
|
+
artefacts_cli-0.6.19.dist-info/METADATA,sha256=Syxim3fegwldEZqVIFHy_4SmLT9z33XT6hVwQ0GqaNo,3035
|
20
|
+
artefacts_cli-0.6.19.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
21
|
+
artefacts_cli-0.6.19.dist-info/entry_points.txt,sha256=nlTXRzilNjccbi53FgaRWCQPkG-pv61HRkaCkrKjlec,58
|
22
|
+
artefacts_cli-0.6.19.dist-info/top_level.txt,sha256=FdaMV1C9m36MWa-2Stm5xVODv7hss_nRYNwR83j_7ow,10
|
23
|
+
artefacts_cli-0.6.19.dist-info/RECORD,,
|
@@ -1,21 +0,0 @@
|
|
1
|
-
artefacts/cli/__init__.py,sha256=--1soc8cqq1JWhz4DuRUvJWgSeDCPGqm8mEmdqtclsI,11644
|
2
|
-
artefacts/cli/app.py,sha256=cxVVHD89_eVCHm5fqPq3ANuy6jTPfw3se-FST_7wILY,22093
|
3
|
-
artefacts/cli/app_containers.py,sha256=tHdGkxtzjexLhgcx5YXy_OOJsPcBi1jmwKZN01OAcmU,3044
|
4
|
-
artefacts/cli/bagparser.py,sha256=FE_QaztC9pg4hQzTjGSdyve6mzZbHJbyqa3wqvZSbxE,3702
|
5
|
-
artefacts/cli/constants.py,sha256=bvsVDwqkAc49IZN7j6k6IL6EG87bECHd_VINtKJqbv8,320
|
6
|
-
artefacts/cli/other.py,sha256=7NvzlspvG0zF7sryR-QznwdLupXLln1BKWxHB9VuEcc,1160
|
7
|
-
artefacts/cli/parameters.py,sha256=MDhrM7ur95wKTLDteqz2f-sLdCPcepi5wk0XjeLo6TU,788
|
8
|
-
artefacts/cli/ros1.py,sha256=RbtirCGarD9a0ikfuGK-pdpWYSXfqJhEt4rpA0uFsyU,9625
|
9
|
-
artefacts/cli/ros2.py,sha256=YaCi3HRuCdvCTjM0Ftnm-SLgMOy1OneIr0aU7KVTiAM,4476
|
10
|
-
artefacts/cli/utils.py,sha256=6yQJRzv-xaLbtcA73Tty9C9QZeV06n5pjXMYl3xsLPE,3156
|
11
|
-
artefacts/cli/utils_ros.py,sha256=pYbhAU9fK2VbnWm3nSLBwUMVBzsZuCn10SFZmVW3-Zo,2090
|
12
|
-
artefacts/cli/version.py,sha256=q7Xb4Wt4eH2VCIhiAa6149XDeUKQFfphqlwqiTFiurk,413
|
13
|
-
artefacts/cli/containers/__init__.py,sha256=y6NVB0efBfRQBlHb71xhRu1hEh7t2bkKNs2vtsLJaO8,2239
|
14
|
-
artefacts/cli/containers/docker.py,sha256=A9jqs0WU-g8umJzPpEACi0_DU5N8rglR-V3dmwxDuBA,3507
|
15
|
-
artefacts/cli/containers/utils.py,sha256=6Bw6s1ceEnTbWSdE5_1CReNW6RB29VeHsq-BAwcJ_fY,1895
|
16
|
-
artefacts/wrappers/artefacts_ros1_meta.launch,sha256=9tN7_0xLH8jW27KYFerhF3NuWDx2dED3ks_qoGVZAPw,1412
|
17
|
-
artefacts_cli-0.6.17.dist-info/METADATA,sha256=aiDZPGqyOGAS_pS3XnjIk4IAStv_vS3fUOW3qPyEH-s,3051
|
18
|
-
artefacts_cli-0.6.17.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
19
|
-
artefacts_cli-0.6.17.dist-info/entry_points.txt,sha256=nlTXRzilNjccbi53FgaRWCQPkG-pv61HRkaCkrKjlec,58
|
20
|
-
artefacts_cli-0.6.17.dist-info/top_level.txt,sha256=FdaMV1C9m36MWa-2Stm5xVODv7hss_nRYNwR83j_7ow,10
|
21
|
-
artefacts_cli-0.6.17.dist-info/RECORD,,
|
File without changes
|
File without changes
|