primitive 0.2.1__py3-none-any.whl → 0.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
primitive/__about__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # SPDX-FileCopyrightText: 2024-present Dylan Stein <dylan@primitive.tech>
2
2
  #
3
3
  # SPDX-License-Identifier: MIT
4
- __version__ = "0.2.1"
4
+ __version__ = "0.2.3"
@@ -1,4 +1,3 @@
1
- import shutil
2
1
  import sys
3
2
  from time import sleep
4
3
 
@@ -7,9 +6,8 @@ from loguru import logger
7
6
  from primitive.__about__ import __version__
8
7
  from primitive.utils.actions import BaseAction
9
8
 
10
- from ..utils.cache import get_sources_cache
11
9
  from ..utils.exceptions import P_CLI_100
12
- from .runner import AgentRunner
10
+ from .runner import Runner
13
11
  from .uploader import Uploader
14
12
 
15
13
 
@@ -28,9 +26,6 @@ class Agent(BaseAction):
28
26
  logger.info(" [*] primitive")
29
27
  logger.info(f" [*] Version: {__version__}")
30
28
 
31
- # Create cache dir if it doesn't exist
32
- cache_dir = get_sources_cache()
33
-
34
29
  # Create uploader
35
30
  uploader = Uploader(primitive=self.primitive)
36
31
 
@@ -51,12 +46,12 @@ class Agent(BaseAction):
51
46
  active_reservation_pk = None
52
47
 
53
48
  while True:
54
- logger.debug("Syncing children...")
55
- self.primitive.hardware._sync_children()
56
-
57
49
  logger.debug("Scanning for files to upload...")
58
50
  uploader.scan()
59
51
 
52
+ logger.debug("Syncing children...")
53
+ self.primitive.hardware._sync_children()
54
+
60
55
  hardware = self.primitive.hardware.get_own_hardware_details()
61
56
 
62
57
  if hardware["activeReservation"]:
@@ -125,29 +120,18 @@ class Agent(BaseAction):
125
120
  logger.debug(f"Job Run ID: {job_run['id']}")
126
121
  logger.debug(f"Job Name: {job_run['job']['name']}")
127
122
 
128
- git_repo_full_name = job_run["gitCommit"]["repoFullName"]
129
- git_ref = job_run["gitCommit"]["sha"]
130
- logger.debug(
131
- f"Downloading repository {git_repo_full_name} at ref {git_ref}"
132
- )
133
-
134
- github_access_token = (
135
- self.primitive.jobs.github_access_token_for_job_run(
136
- job_run["id"]
137
- )
123
+ runner = Runner(
124
+ primitive=self.primitive,
125
+ job_run=job_run,
126
+ max_log_size=500 * 1024,
138
127
  )
139
128
 
140
129
  try:
141
- downloaded_git_repository_dir = (
142
- self.primitive.git.download_git_repository_at_ref(
143
- git_repo_full_name=git_repo_full_name,
144
- git_ref=git_ref,
145
- github_access_token=github_access_token,
146
- destination=cache_dir,
147
- )
148
- )
130
+ runner.setup()
149
131
  except Exception as exception:
150
- logger.error(f"Error downloading source: {exception}")
132
+ logger.exception(
133
+ f"Exception while initializing runner: {exception}"
134
+ )
151
135
  self.primitive.jobs.job_run_update(
152
136
  id=job_run["id"],
153
137
  status="request_completed",
@@ -155,40 +139,21 @@ class Agent(BaseAction):
155
139
  )
156
140
  continue
157
141
 
158
- source_dir = downloaded_git_repository_dir.joinpath(
159
- job_run["jobSettings"]["rootDirectory"]
160
- )
161
-
162
142
  try:
163
- # Initialize Runner
164
- runner = AgentRunner(
165
- primitive=self.primitive,
166
- source_dir=source_dir,
167
- job_run=job_run,
168
- max_log_size=500 * 1024,
169
- )
143
+ runner.execute()
170
144
  except Exception as exception:
171
- # Log Error
172
- logger.exception(
173
- f"Error initializing agent runner: {exception}"
145
+ logger.exception(f"Exception while executing job: {exception}")
146
+ self.primitive.jobs.job_run_update(
147
+ id=job_run["id"],
148
+ status="request_completed",
149
+ conclusion="failure",
174
150
  )
175
- else:
176
- try:
177
- # Execute job
178
- runner.execute()
179
- except Exception as exception:
180
- # Log Error
181
- logger.exception(
182
- f"AgentRunner exception while running executing customer job: {exception}"
183
- )
184
- self.primitive.jobs.job_run_update(
185
- id=job_run["id"],
186
- status="request_completed",
187
- conclusion="failure",
188
- )
189
151
  finally:
190
- # Clean up
191
- shutil.rmtree(path=downloaded_git_repository_dir)
152
+ runner.cleanup()
153
+
154
+ # NOTE: also run scan here to force upload of artifacts
155
+ # This should probably eventuall be another daemon?
156
+ uploader.scan()
192
157
 
193
158
  sleep(5)
194
159
  except KeyboardInterrupt:
primitive/agent/runner.py CHANGED
@@ -1,18 +1,16 @@
1
1
  import os
2
- import threading
3
2
  import typing
4
- from enum import IntEnum
3
+ import re
4
+ import shutil
5
+ from typing import Dict, TypedDict, List
6
+ from abc import abstractmethod
7
+ from enum import IntEnum, Enum
5
8
  from pathlib import Path, PurePath
6
- from time import sleep
7
- from typing import Callable, Dict, Iterable, List, Optional, TypedDict
8
-
9
- import yaml
10
9
  from loguru import logger
11
-
12
- from ..utils.cache import get_artifacts_cache, get_logs_cache
13
- from ..utils.files import find_files_for_extension
14
- from .process import Process
15
- from .provision import ProvisionPython
10
+ import yaml
11
+ import asyncio
12
+ from primitive.utils.shell import env_string_to_dict
13
+ from ..utils.cache import get_artifacts_cache, get_sources_cache, get_logs_cache
16
14
 
17
15
  try:
18
16
  from yaml import CLoader as Loader
@@ -22,23 +20,21 @@ except ImportError:
22
20
  if typing.TYPE_CHECKING:
23
21
  import primitive.client
24
22
 
23
+ ENV_VAR_LOOKUP_START = "_ENV_VAR_LOOKUP_START"
24
+ ENV_VAR_LOOKUP_END = "_ENV_VAR_LOOKUP_END"
25
25
 
26
- class Artifact(TypedDict):
27
- name: str
28
- extension: str
29
26
 
30
-
31
- class JobStep(TypedDict):
32
- name: str
27
+ class Task(TypedDict):
28
+ label: str
33
29
  workdir: str
34
- artifacts: List[Artifact]
30
+ tags: Dict
35
31
  cmd: str
36
32
 
37
33
 
38
- class JobDescription(TypedDict):
39
- name: str
40
- provision: str
41
- steps: List[JobStep]
34
+ class JobConfig(TypedDict):
35
+ requires: List[str]
36
+ executes: List[Task]
37
+ stores: List[str]
42
38
 
43
39
 
44
40
  # NOTE This must match FailureLevel subclass in JobSettings model
@@ -47,237 +43,273 @@ class FailureLevel(IntEnum):
47
43
  WARNING = 2
48
44
 
49
45
 
50
- class AgentRunner:
46
+ class LogLevel(Enum):
47
+ INFO = "INFO"
48
+ ERROR = "ERROR"
49
+ WARNING = "WARNING"
50
+
51
+
52
+ class Runner:
51
53
  def __init__(
52
54
  self,
53
55
  primitive: "primitive.client.Primitive",
54
- source_dir: Path,
55
56
  job_run: Dict,
56
- max_log_size: int = 10 * 1024 * 1024,
57
- log_to_file: bool = True,
57
+ max_log_size: int = 10 * 1024 * 1024, # 10MB
58
58
  ) -> None:
59
59
  self.primitive = primitive
60
- self.source_dir = source_dir
61
- self.workdir = "."
60
+ self.job = job_run["job"]
62
61
  self.job_run = job_run
63
- self.job_id = job_run["id"]
64
- self.job_slug = job_run["job"]["slug"]
65
- self.max_log_size = max_log_size
66
- self.parse_logs = job_run["jobSettings"]["parseLogs"]
67
- self.failure_level = job_run["jobSettings"]["failureLevel"]
68
- self.log_to_file = log_to_file
69
-
70
- # Enable and configure logger
62
+ self.job_settings = job_run["jobSettings"]
63
+ self.config = None
64
+ self.source_dir = None
65
+ self.initial_env = {}
66
+ self.modified_env = {}
67
+ self.file_logger = None
68
+
71
69
  logger.enable("primitive")
72
70
 
73
- if self.log_to_file:
74
- log_name = f"{self.job_slug}_{self.job_id}_{{time}}.primitive.log"
75
- logger.add(
76
- Path(get_logs_cache(self.job_id) / log_name),
77
- rotation=self.max_log_size,
78
- format=AgentRunner.log_serializer(),
71
+ # If max_log_size set to <= 0, disable file logging
72
+ if max_log_size > 0:
73
+ log_name = f"{self.job['slug']}_{self.job_run['jobRunNumber']}_{{time}}.primitive.log"
74
+
75
+ self.file_logger = logger.add(
76
+ Path(get_logs_cache(self.job_run["id"]) / log_name),
77
+ rotation=max_log_size,
78
+ format=Runner.fmt,
79
79
  backtrace=True,
80
80
  diagnose=True,
81
81
  )
82
82
 
83
- logger.info(f"Scanning directory for job file {self.job_slug}")
84
- yaml_file = Path(self.source_dir / ".primitive" / f"{self.job_slug}.yaml")
85
- yml_file = Path(self.source_dir / ".primitive" / f"{self.job_slug}.yml")
83
+ def setup(self) -> None:
84
+ # Attempt to download the job source code
85
+ git_repo_full_name = self.job_run["gitCommit"]["repoFullName"]
86
+ git_ref = self.job_run["gitCommit"]["sha"]
87
+ logger.info(f"Downloading repository {git_repo_full_name} at ref {git_ref}")
88
+
89
+ github_access_token = self.primitive.jobs.github_access_token_for_job_run(
90
+ self.job_run["id"]
91
+ )
92
+
93
+ downloaded_git_repository_dir = (
94
+ self.primitive.git.download_git_repository_at_ref(
95
+ git_repo_full_name=git_repo_full_name,
96
+ git_ref=git_ref,
97
+ github_access_token=github_access_token,
98
+ destination=get_sources_cache(),
99
+ )
100
+ )
101
+
102
+ self.source_dir = downloaded_git_repository_dir.joinpath(
103
+ self.job_settings["rootDirectory"]
104
+ )
105
+
106
+ # Attempt to parse the job yaml file
107
+ logger.info(f"Scanning directory for job file {self.job['slug']}")
108
+ yaml_file = Path(self.source_dir / ".primitive" / f"{self.job['slug']}.yaml")
109
+ yml_file = Path(self.source_dir / ".primitive" / f"{self.job['slug']}.yml")
86
110
 
87
111
  if yaml_file.exists() and yml_file.exists():
88
112
  logger.error(
89
- f"Found two job descriptions with the same slug: {self.job_slug}"
90
- )
91
- self.primitive.jobs.job_run_update(
92
- self.job_id, status="request_completed", conclusion="failure"
113
+ f"Found two job descriptions with the same slug: {self.job['slug']}"
93
114
  )
94
115
  raise FileExistsError
95
116
 
96
- if yaml_file.exists():
97
- self.job = yaml.load(open(yaml_file, "r"), Loader=Loader)
98
- elif yml_file.exists():
99
- self.job = yaml.load(open(yml_file, "r"), Loader=Loader)
117
+ if yaml_file.exists() or yml_file.exists():
118
+ logger.info(f"Found job description for {self.job['slug']}")
119
+ config_file = yaml_file if yaml_file.exists() else yml_file
120
+ self.config = yaml.load(open(config_file, "r"), Loader=Loader)[
121
+ self.job["name"]
122
+ ]
100
123
  else:
101
124
  logger.error(
102
- f"No job description with matching slug '{self.job_slug}' found"
103
- )
104
- self.primitive.jobs.job_run_update(
105
- self.job_id, status="request_completed", conclusion="failure"
125
+ f"No job description with matching slug '{self.job['slug']}' found"
106
126
  )
107
127
  raise FileNotFoundError
108
128
 
109
- logger.info(f"Found job description for {self.job_slug}")
110
-
111
- @staticmethod
112
- def log_serializer() -> Callable:
113
- def fmt(record):
114
- step = ""
115
- if "step" in record["extra"]:
116
- step = record["extra"]["step"]
117
-
118
- log = (
119
- "<green>{time:YYYY-MM-DD HH:mm:ss.SSS!UTC}</green> | "
120
- "<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | "
121
- "<level>{level}</level> | "
122
- f"{step} | "
123
- "<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - "
124
- "<level>{message}</level>\n"
125
- )
126
-
127
- return log
128
-
129
- return fmt
129
+ # Setup initial process environment
130
+ self.initial_env = os.environ
131
+ self.initial_env["PRIMITIVE_GIT_SHA"] = str(self.job_run["gitCommit"]["sha"])
132
+ self.initial_env["PRIMITIVE_GIT_BRANCH"] = str(
133
+ self.job_run["gitCommit"]["branch"]
134
+ )
135
+ self.initial_env["PRIMITIVE_GIT_REPO"] = str(
136
+ self.job_run["gitCommit"]["repoFullName"]
137
+ )
130
138
 
131
- def name(self) -> str:
132
- return self.job["name"]
139
+ def execute(self) -> None:
140
+ logger.info(f"Executing {self.job['slug']} job")
141
+ self.primitive.jobs.job_run_update(
142
+ self.job_run["id"], status="request_in_progress"
143
+ )
144
+ self.modified_env = {**self.initial_env}
133
145
 
134
- def steps(self) -> Iterable[JobStep]:
135
- for step in self.job["steps"]:
136
- yield step
146
+ task_failed = False
147
+ for task in self.config["executes"]:
148
+ with logger.contextualize(label=task["label"]):
149
+ with asyncio.Runner() as async_runner:
150
+ if task_failed := async_runner.run(self.run_task(task)):
151
+ break
137
152
 
138
- def execute(self) -> None:
139
- logger.info(f"Executing {self.job_slug} job")
140
- self.primitive.jobs.job_run_update(self.job_id, status="request_in_progress")
153
+ if not task_failed:
154
+ self.primitive.jobs.job_run_update(
155
+ self.job_run["id"], status="request_completed", conclusion="success"
156
+ )
157
+ logger.success(f"Completed {self.job['slug']} job")
158
+
159
+ async def run_task(self, task: Task) -> bool:
160
+ for cmd in task["cmd"].strip().split("\n"):
161
+ args = [
162
+ "/bin/bash",
163
+ "-c",
164
+ f"{cmd} && echo '{ENV_VAR_LOOKUP_START}' && env && echo '{ENV_VAR_LOOKUP_END}'",
165
+ ]
166
+
167
+ process = await asyncio.create_subprocess_exec(
168
+ *args,
169
+ env=self.modified_env,
170
+ cwd=str(Path(self.source_dir / task.get("workdir", ""))),
171
+ stdout=asyncio.subprocess.PIPE,
172
+ stderr=asyncio.subprocess.PIPE,
173
+ )
141
174
 
142
- # Initialize the environment with the system
143
- environment = os.environ
175
+ stdout_failed, stderr_failed, cancelled = await asyncio.gather(
176
+ self.log_cmd(
177
+ process=process, stream=process.stdout, tags=task.get("tags", {})
178
+ ),
179
+ self.log_cmd(
180
+ process=process, stream=process.stderr, tags=task.get("tags", {})
181
+ ),
182
+ asyncio.create_task(self.monitor_cmd(process=process)),
183
+ )
144
184
 
145
- # Add local variables
146
- environment["PRIMITIVE_GIT_SHA"] = str(self.job_run["gitCommit"]["sha"])
185
+ returncode = await process.wait()
147
186
 
148
- if "provision" in self.job:
149
- logger.info(f"Provisioning for {self.job['provision']} environment")
150
- environment = self.provision()
187
+ if cancelled:
188
+ return True
151
189
 
152
- if not environment:
153
- self.primitive.jobs.job_run_update(
154
- self.job_id, status="request_completed", conclusion="failure"
155
- )
156
- logger.error(f"{self.job_slug} concluded with error(s)")
157
- return
158
-
159
- fail_level_detected = False
160
- total_parsed_levels = {FailureLevel.ERROR: 0, FailureLevel.WARNING: 0}
161
- for step in self.steps():
162
- logger.info(f"Beginning step {step['name']}")
163
-
164
- with logger.contextualize(step=step["name"]):
165
- if "workdir" in step:
166
- self.workdir = step["workdir"]
167
-
168
- proc = Process(
169
- cmd=step["cmd"],
170
- workdir=Path(self.source_dir / self.workdir),
171
- env=environment,
190
+ if returncode > 0 or stdout_failed or stderr_failed:
191
+ await self.primitive.jobs.ajob_run_update(
192
+ self.job_run["id"], status="request_completed", conclusion="failure"
172
193
  )
173
194
 
174
- try:
175
- proc.start()
176
- except Exception as exception:
177
- logger.exception(
178
- f"Error while attempting to run process {exception}"
195
+ if returncode > 0:
196
+ logger.error(
197
+ f"Task {task['label']} failed on '{cmd}' with return code {returncode}"
179
198
  )
180
- self.primitive.jobs.job_run_update(
181
- self.job_id, status="request_completed", conclusion="failure"
182
- )
183
- logger.error(f"{self.job_slug} concluded with error(s)")
184
- return
185
-
186
- def status_check():
187
- while proc.is_running():
188
- # Check job status
189
- status = self.primitive.jobs.get_job_status(self.job_id)
190
- status_value = status.data["jobRun"]["status"]
191
-
192
- # TODO: Should probably use request_cancelled or something
193
- # once we change it, we'll have to call conclude w/ cancelled status
194
- if status_value == "completed":
195
- logger.warning("Job cancelled by user")
196
- proc.terminate()
197
- return
198
-
199
- sleep(5)
200
-
201
- status_thread = threading.Thread(target=status_check)
202
- status_thread.start()
199
+ else:
200
+ logger.error(f"Task {task['label']} failed on '{cmd}'")
201
+
202
+ return True
203
+
204
+ return False
205
+
206
+ async def log_cmd(self, process, stream, tags: Dict = {}) -> bool:
207
+ failure_detected = False
208
+ while line := await stream.readline():
209
+ raw_data = line.decode()
210
+
211
+ # handle env vars
212
+ if ENV_VAR_LOOKUP_START in raw_data:
213
+ env_vars_string = ""
214
+ while env_line := await stream.readline():
215
+ if ENV_VAR_LOOKUP_END in env_line.decode():
216
+ break
217
+ env_vars_string += env_line.decode()
218
+
219
+ self.modified_env = env_string_to_dict(env_vars_string)
220
+ continue
221
+
222
+ # Handle logging
223
+ parse_logs = self.job_settings["parseLogs"]
224
+ parse_stderr = self.job_settings["parseStderr"]
225
+
226
+ level = LogLevel.INFO
227
+ tag = None
228
+ if (parse_logs and "error" in raw_data.lower()) or (
229
+ parse_stderr and stream is process.stderr
230
+ ):
231
+ level = LogLevel.ERROR
232
+ elif parse_logs and "warning" in raw_data.lower():
233
+ level = LogLevel.WARNING
234
+
235
+ failure_detected = (
236
+ level == LogLevel.ERROR
237
+ and self.job_settings["failureLevel"] >= FailureLevel.ERROR
238
+ ) or (
239
+ level == LogLevel.WARNING
240
+ and self.job_settings["failureLevel"] >= FailureLevel.WARNING
241
+ )
203
242
 
204
- returncode = proc.wait()
243
+ # Tag on the first matching regex in the list
244
+ for tag_key, regex in tags.items():
245
+ pattern = re.compile(regex)
246
+ if pattern.match(raw_data):
247
+ tag = tag_key
248
+ break
205
249
 
206
- logger.debug(
207
- f"Process {step['name']} finished with return code {returncode}"
208
- )
250
+ logger.bind(tag=tag).log(level.value, raw_data.rstrip())
209
251
 
210
- if proc.errors > 0 and self.failure_level >= FailureLevel.ERROR:
211
- fail_level_detected = True
252
+ return failure_detected
212
253
 
213
- if proc.warnings > 0 and self.failure_level >= FailureLevel.WARNING:
214
- fail_level_detected = True
254
+ async def monitor_cmd(self, process) -> bool:
255
+ while process.returncode is None:
256
+ status = await self.primitive.jobs.aget_job_status(self.job_run["id"])
257
+ status_value = status.data["jobRun"]["status"]
258
+ conclusion_value = status.data["jobRun"]["conclusion"]
215
259
 
216
- total_parsed_levels[FailureLevel.ERROR] += proc.errors
217
- total_parsed_levels[FailureLevel.WARNING] += proc.warnings
260
+ if status_value == "completed" and conclusion_value == "cancelled":
261
+ logger.warning("Job cancelled by user")
262
+ try:
263
+ process.terminate()
264
+ except ProcessLookupError:
265
+ pass
218
266
 
219
- status_thread.join()
267
+ return True
220
268
 
221
- self.collect_artifacts(step)
269
+ await asyncio.sleep(5)
222
270
 
223
- if returncode > 0:
224
- self.primitive.jobs.job_run_update(
225
- self.job_id, status="request_completed", conclusion="failure"
226
- )
227
- logger.error(
228
- f"Step {step['name']} failed with return code {returncode}"
229
- )
230
- return
271
+ return False
231
272
 
232
- if fail_level_detected and self.parse_logs:
233
- self.primitive.jobs.job_run_update(
234
- self.job_id, status="request_completed", conclusion="failure"
235
- )
273
+ def cleanup(self) -> None:
274
+ logger.remove(self.file_logger)
236
275
 
237
- logger.error(
238
- (
239
- f"{self.job_slug} concluded"
240
- f" with {total_parsed_levels[FailureLevel.ERROR]} error(s)"
241
- f" and {total_parsed_levels[FailureLevel.WARNING]} warning(s)"
242
- )
243
- )
276
+ if "stores" not in self.config:
244
277
  return
245
278
 
246
- self.primitive.jobs.job_run_update(
247
- self.job_id, status="request_completed", conclusion="success"
248
- )
249
- logger.success(f"Completed {self.job_slug} job")
250
-
251
- def provision(self) -> Optional[Dict]:
252
- match self.job["provision"]:
253
- case "python":
254
- requirements_glob = self.source_dir.rglob("requirements.txt")
255
-
256
- requirements_path = next(requirements_glob, None)
257
-
258
- if not requirements_path:
259
- logger.error("Unable to locate requirements.txt")
260
- return None
279
+ for glob in self.config["stores"]:
280
+ path = Path(glob)
261
281
 
262
- prov = ProvisionPython(self.source_dir, requirements_path)
263
- return prov.create_env()
264
-
265
- def collect_artifacts(self, step: JobStep) -> None:
266
- if "artifacts" not in step:
267
- return
268
-
269
- for artifact in step["artifacts"]:
270
- files = find_files_for_extension(self.source_dir, artifact["extension"])
282
+ if path.is_dir():
283
+ files = [str(f) for f in path.rglob("*") if f.is_file()]
284
+ else:
285
+ files = [str(f) for f in Path().glob(glob) if f.is_file()]
271
286
 
272
287
  for file in files:
273
- # Find path relative to source_dir
274
288
  relative_path = PurePath(file).relative_to(self.source_dir)
275
-
276
- # Construct destination to preserve directory structure
277
- destination = Path(get_artifacts_cache(self.job_id) / relative_path)
278
-
279
- # Create directories if they don't exist
289
+ destination = Path(
290
+ get_artifacts_cache(self.job_run["id"]) / relative_path
291
+ )
280
292
  destination.parent.mkdir(parents=True, exist_ok=True)
293
+ Path(file).replace(destination)
294
+
295
+ shutil.rmtree(path=self.source_dir)
296
+
297
+ @abstractmethod
298
+ def fmt(record) -> str:
299
+ extra = record["extra"]
300
+ # Delimiters with empty space MUST exist for LogQL pattern matching
301
+ label = extra.get("label", None)
302
+ tag = extra.get("tag", None)
303
+ context = f"{label} | " if label else " | "
304
+ context += f"{tag} | " if tag else " | "
305
+
306
+ log = (
307
+ "<green>{time:YYYY-MM-DD HH:mm:ss.SSS!UTC}</green> | "
308
+ "<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | "
309
+ "<level>{level}</level> | "
310
+ f"{context}"
311
+ "<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - "
312
+ "<level>{message}</level>\n"
313
+ )
281
314
 
282
- # Move file
283
- file.rename(destination)
315
+ return log
primitive/cli.py CHANGED
@@ -48,7 +48,7 @@ from .reservations.commands import cli as reservations_commands
48
48
  @click.version_option(__version__)
49
49
  @click.pass_context
50
50
  def cli(context, host, yes, debug, json, verbose):
51
- """primitive - a CLI tool for https://primitive.design"""
51
+ """primitive - a CLI tool for https://primitive.tech"""
52
52
  context.ensure_object(dict)
53
53
  context.obj["YES"] = yes
54
54
  context.obj["DEBUG"] = debug
primitive/git/actions.py CHANGED
@@ -1,6 +1,6 @@
1
1
  from pathlib import Path
2
2
  import shutil
3
- from subprocess import run, CalledProcessError, DEVNULL
3
+ from subprocess import run, CalledProcessError
4
4
 
5
5
  from gql import gql
6
6
  from loguru import logger
@@ -38,8 +38,17 @@ class Git(BaseAction):
38
38
  )
39
39
  source_dir = Path(destination).joinpath(git_repo_full_name.split("/")[-1])
40
40
 
41
+ # Clone with throw an exception if the directory already exists
42
+ if source_dir.exists():
43
+ shutil.rmtree(path=source_dir)
44
+
41
45
  try:
42
- run(["git", "clone", url, source_dir, "--no-checkout"], check=True)
46
+ run(
47
+ ["git", "clone", url, source_dir, "--no-checkout"],
48
+ check=True,
49
+ # stdout=DEVNULL,
50
+ # stderr=DEVNULL,
51
+ )
43
52
  except CalledProcessError:
44
53
  raise Exception("Failed to download repository")
45
54
 
@@ -48,8 +57,8 @@ class Git(BaseAction):
48
57
  ["git", "checkout", git_ref],
49
58
  check=True,
50
59
  cwd=source_dir,
51
- stdout=DEVNULL,
52
- stderr=DEVNULL,
60
+ # stdout=DEVNULL,
61
+ # stderr=DEVNULL,
53
62
  )
54
63
  except CalledProcessError:
55
64
  # Clean up directory if checkout failed
primitive/jobs/actions.py CHANGED
@@ -128,6 +128,30 @@ class Jobs(BaseAction):
128
128
  )
129
129
  return result
130
130
 
131
+ @guard
132
+ async def ajob_run_update(
133
+ self,
134
+ id: str,
135
+ status: str = None,
136
+ conclusion: str = None,
137
+ file_ids: Optional[List[str]] = [],
138
+ ):
139
+ mutation = gql(job_run_update_mutation)
140
+ input = {"id": id}
141
+ if status:
142
+ input["status"] = status
143
+ if conclusion:
144
+ input["conclusion"] = conclusion
145
+ if file_ids and len(file_ids) > 0:
146
+ input["files"] = file_ids
147
+ variables = {"input": input}
148
+
149
+ async with self.primitive.session as session:
150
+ result = await session.execute(
151
+ mutation, variable_values=variables, get_execution_result=True
152
+ )
153
+ return result
154
+
131
155
  @guard
132
156
  def github_access_token_for_job_run(self, job_run_id: str):
133
157
  query = gql(github_app_token_for_job_run_query)
@@ -160,3 +184,14 @@ class Jobs(BaseAction):
160
184
  query, variable_values=variables, get_execution_result=True
161
185
  )
162
186
  return result
187
+
188
+ @guard
189
+ async def aget_job_status(self, id: str):
190
+ query = gql(job_run_status_query)
191
+ variables = {"id": id}
192
+
193
+ async with self.primitive.session as session:
194
+ result = await session.execute(
195
+ query, variable_values=variables, get_execution_result=True
196
+ )
197
+ return result
@@ -13,6 +13,7 @@ job_run_fragment = """
13
13
  fragment JobRunFragment on JobRun {
14
14
  id
15
15
  pk
16
+ jobRunNumber
16
17
  createdAt
17
18
  updatedAt
18
19
  completedAt
@@ -31,6 +32,7 @@ fragment JobRunFragment on JobRun {
31
32
  containerArgs
32
33
  rootDirectory
33
34
  parseLogs
35
+ parseStderr
34
36
  failureLevel
35
37
  }
36
38
  gitCommit {
@@ -45,5 +47,6 @@ job_run_status_fragment = """
45
47
  fragment JobRunStatusFragment on JobRun {
46
48
  id
47
49
  status
50
+ conclusion
48
51
  }
49
52
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: primitive
3
- Version: 0.2.1
3
+ Version: 0.2.3
4
4
  Project-URL: Documentation, https://github.com//primitivecorp/primitive-cli#readme
5
5
  Project-URL: Issues, https://github.com//primitivecorp/primitive-cli/issues
6
6
  Project-URL: Source, https://github.com//primitivecorp/primitive-cli
@@ -1,13 +1,11 @@
1
- primitive/__about__.py,sha256=35gFZI1DLl6m1EtBovM9NL6lrmA-BCOe4IxZ2XQyJu0,129
1
+ primitive/__about__.py,sha256=8woIYDZ0kangkX7EciKlVF5rI46fZIDsaNCx1uPNmDM,129
2
2
  primitive/__init__.py,sha256=bwKdgggKNVssJFVPfKSxqFMz4IxSr54WWbmiZqTMPNI,106
3
- primitive/cli.py,sha256=CiI60bG3UZyNFuLTpchr0KeJRG5SALj455Ob11CegGE,2412
3
+ primitive/cli.py,sha256=58fn6ayVSC1f4hLKx3FUNT9CkuPLva8dFQg0_YUwpio,2410
4
4
  primitive/client.py,sha256=PPyIQRvKKSqCF9RRF5mJJ4Vqqolpzy1YXqffNLKIvAA,2390
5
5
  primitive/agent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- primitive/agent/actions.py,sha256=vNe46YvSD5RXZaOa0pXD6b5T9V3pG23TUq-U3c4j_Xg,8487
6
+ primitive/agent/actions.py,sha256=5Lmqxsf24eH6BDEesbJbn1Xj-8ifVlQ_Y3QR8xpkFtM,6869
7
7
  primitive/agent/commands.py,sha256=-dVDilELfkGfbZB7qfEPs77Dm1oT62qJj4tsIk4KoxI,254
8
- primitive/agent/process.py,sha256=32eoj0W1-LG-9xxeHia-jk9jTah1cnmjCYnvczgXYGU,3538
9
- primitive/agent/provision.py,sha256=rmwnro1K5F8mwtd45XAq7RVQmpDWnbBCQ8X_qgWhm3M,1546
10
- primitive/agent/runner.py,sha256=nuRtwEHl41Ic2BM8WhAwuJ2bCrtt7GxjIa9JSw9WTF8,9631
8
+ primitive/agent/runner.py,sha256=S4fFBnbezUNKd4ma4lMNgIiqXaDF_ro6ga_GzqBj3no,10655
11
9
  primitive/agent/uploader.py,sha256=OkgwXhWKoECOJnW_ZmpzmUS_cpb-orC_uebNcmf5byw,2948
12
10
  primitive/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
11
  primitive/auth/actions.py,sha256=MPsG9LcKcOPwA7gZ9Ewk0PZJhTQvIrGfODdz4GxSzgA,999
@@ -31,7 +29,7 @@ primitive/files/graphql/fragments.py,sha256=II6WHZjzSqX4IELwdiWokqHTKvDq6mMHF5gp
31
29
  primitive/files/graphql/mutations.py,sha256=Da_e6WSp-fsCYVE9A6SGkIQy9WDzjeQycNyHEn7vJqE,935
32
30
  primitive/files/graphql/queries.py,sha256=_ky-IRz928sKeSJuqaggTPxV4CGgmho3OyaAFu1z7nw,397
33
31
  primitive/git/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
- primitive/git/actions.py,sha256=7mOhpujYYXGxjtpjsxH-mle-O1WXv1Vvx0bD6LUDGxY,1745
32
+ primitive/git/actions.py,sha256=O17HX3eg8yAKzBX_5cMy49qDopU9cxx_5tuOhUAAjsU,2003
35
33
  primitive/git/commands.py,sha256=sCeSjkRgSEjCEsB5seXgB_h6xfk0KpvMvzMKoRfUbRA,1177
36
34
  primitive/git/graphql/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
37
35
  primitive/git/graphql/queries.py,sha256=I1HGlqBb1lHIAWVSsC8tVY9JdsQ8DJVqs4nqSTcL30M,98
@@ -48,10 +46,10 @@ primitive/hardware/graphql/fragments.py,sha256=kI6qnTNjaEaUr-C6eD55COphtueVYbYOW
48
46
  primitive/hardware/graphql/mutations.py,sha256=_4Hkbfik9Ron4T-meulu6T-9FR_BZjyPNwn745MPksU,1484
49
47
  primitive/hardware/graphql/queries.py,sha256=I86uLuOSjHSph11Y5MVCYko5Js7hoiEZ-cEoPTc4J-k,1392
50
48
  primitive/jobs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
51
- primitive/jobs/actions.py,sha256=CtyO-Z9614TgIoXJJX1QGsoll0fgpBIjG9PJH5JwCQs,4901
49
+ primitive/jobs/actions.py,sha256=gnJVlCEiu0O98UG8EGAGlOrGNNQCDWXQBHX8UoSZH84,5959
52
50
  primitive/jobs/commands.py,sha256=MxPCkBEYW_eLNqgCRYeyj7ZcLOFAWfpVZlqDR2Y_S0o,830
53
51
  primitive/jobs/graphql/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
- primitive/jobs/graphql/fragments.py,sha256=GZ_rVc_pc3MfC8EhCY_X9rjeNUdiwEytdqrknHYWO-E,573
52
+ primitive/jobs/graphql/fragments.py,sha256=1_ZttT7dx36KDC3DClJz9M8LMpsPwXySBygHSiUEcGg,619
55
53
  primitive/jobs/graphql/mutations.py,sha256=8ASvCmwQh7cMeeiykOdYaYVryG8FRIuVF6v_J8JJZuw,219
56
54
  primitive/jobs/graphql/queries.py,sha256=BrU_GnLjK0bTAmWsLSmGEUea7EM8MqTKxN1Qp6sSjwc,1597
57
55
  primitive/organizations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -86,15 +84,12 @@ primitive/utils/cache.py,sha256=FHGmVWYLJFQOazpXXcEwI0YJEZbdkgG39nOLdOv6VNk,1575
86
84
  primitive/utils/chunk_size.py,sha256=PAuVuirUTA9oRXyjo1c6MWxo31WVBRkWMuWw-AS58Bw,2914
87
85
  primitive/utils/config.py,sha256=DlFM5Nglo22WPtbpZSVtH7NX-PTMaKYlcrUE7GPRG4c,1058
88
86
  primitive/utils/exceptions.py,sha256=DrYHTcCAJGC7cCUwOx_FmdlVLWRdpzvDvpLb82heppE,311
89
- primitive/utils/files.py,sha256=QUa7c4t2PNvKOtyndLAxQMGvDM4cBftSeFh28xprVbM,752
90
- primitive/utils/git.py,sha256=1qNOu8X-33CavmrD580BmrFhD_WVO9PGWHUUboXJR_g,663
91
87
  primitive/utils/memory_size.py,sha256=4xfha21kW82nFvOTtDFx9Jk2ZQoEhkfXii-PGNTpIUk,3058
92
88
  primitive/utils/printer.py,sha256=f1XUpqi5dkTL3GWvYRUGlSwtj2IxU1q745T4Fxo7Tn4,370
93
89
  primitive/utils/shell.py,sha256=vpjr2Y7UQGYOvPGa6_RYXPPjqScfa9k7kT3tugF9h4Y,1837
94
90
  primitive/utils/text.py,sha256=XiESMnlhjQ534xE2hMNf08WehE1SKaYFRNih0MmnK0k,829
95
- primitive/utils/verible.py,sha256=Zb5NUISvcaIgEvgCDBWr-GCoceMa79Tcwvr5Wl9lfnA,2252
96
- primitive-0.2.1.dist-info/METADATA,sha256=SkWObx5lg3EFazkPqt6KrYiVItNl7l8IW5FVW5Fkow4,3669
97
- primitive-0.2.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
98
- primitive-0.2.1.dist-info/entry_points.txt,sha256=p1K8DMCWka5FqLlqP1sPek5Uovy9jq8u51gUsP-z334,48
99
- primitive-0.2.1.dist-info/licenses/LICENSE.txt,sha256=B8kmQMJ2sxYygjCLBk770uacaMci4mPSoJJ8WoDBY_c,1098
100
- primitive-0.2.1.dist-info/RECORD,,
91
+ primitive-0.2.3.dist-info/METADATA,sha256=lt9QA5sglw8QZVEiT5OaO3agzpJDYF1qFeMww-5Spp0,3669
92
+ primitive-0.2.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
93
+ primitive-0.2.3.dist-info/entry_points.txt,sha256=p1K8DMCWka5FqLlqP1sPek5Uovy9jq8u51gUsP-z334,48
94
+ primitive-0.2.3.dist-info/licenses/LICENSE.txt,sha256=B8kmQMJ2sxYygjCLBk770uacaMci4mPSoJJ8WoDBY_c,1098
95
+ primitive-0.2.3.dist-info/RECORD,,
@@ -1,125 +0,0 @@
1
- from typing import List
2
- from pathlib import Path
3
- from subprocess import Popen, PIPE
4
- import shlex
5
- import glob
6
- import selectors
7
- from loguru import logger
8
- from abc import abstractmethod
9
-
10
-
11
- class Process:
12
- def __init__(
13
- self,
14
- cmd,
15
- env,
16
- workdir: str = ".",
17
- ):
18
- self.cmd = Process.expand_glob_in_cmd(
19
- cmd_parts=shlex.split(cmd), workdir=workdir
20
- )
21
- self.env = env
22
- self.workdir = workdir
23
- self.process = None
24
- self.stdout_thread = None
25
- self.stderr_thread = None
26
- self._errors = 0
27
- self._warnings = 0
28
-
29
- def start(self):
30
- # Start the process
31
- self.sel = selectors.DefaultSelector()
32
- self.process = Popen(
33
- self.cmd,
34
- env=self.env,
35
- cwd=self.workdir,
36
- stdout=PIPE,
37
- stderr=PIPE,
38
- text=True,
39
- )
40
-
41
- self.sel.register(self.process.stdout, selectors.EVENT_READ)
42
- self.sel.register(self.process.stderr, selectors.EVENT_READ)
43
-
44
- def log(self):
45
- for key, _ in self.sel.select():
46
- data = key.fileobj.readline()
47
- if not data:
48
- continue
49
-
50
- if key.fileobj is self.process.stdout:
51
- raw_data = data.rstrip()
52
- if "error" in raw_data.lower():
53
- logger.error(raw_data)
54
- self._errors += 1
55
- elif "warning" in raw_data.lower():
56
- logger.warning(raw_data)
57
- self._warnings += 1
58
- else:
59
- logger.info(raw_data)
60
- elif key.fileobj is self.process.stderr:
61
- logger.error(data.rstrip())
62
- self._errors += 1
63
-
64
- def wait(self):
65
- while True:
66
- self.log()
67
- if not self.is_running():
68
- break
69
-
70
- return self.finish()
71
-
72
- def run(self):
73
- """Start and wait for the process."""
74
- self.start()
75
- return self.wait()
76
-
77
- def is_running(self):
78
- """Check if the process is still running."""
79
- return self.process and self.process.poll() is None
80
-
81
- def finish(self):
82
- """Make sure that logging finishes"""
83
- if self.process:
84
- self.sel.unregister(self.process.stdout)
85
- self.sel.unregister(self.process.stderr)
86
- self.process.stdout.close()
87
- self.process.stderr.close()
88
-
89
- return self.process.poll()
90
-
91
- def terminate(self):
92
- """Terminate the process."""
93
- if self.process:
94
- self.process.terminate()
95
-
96
- def kill(self):
97
- """Kill the process."""
98
- if self.process:
99
- self.process.kill()
100
-
101
- @abstractmethod
102
- def expand_glob_in_cmd(cmd_parts: List[str], workdir: Path):
103
- # Characters that indicate a glob pattern
104
- glob_chars = {"*", "?", "[", "]", "{", "}"}
105
- expanded_cmd = []
106
- for part in cmd_parts:
107
- if any(c in part for c in glob_chars):
108
- matches = glob.glob(str(workdir / part))
109
- if matches:
110
- expanded_cmd.extend(
111
- [str(Path(match).relative_to(workdir)) for match in matches]
112
- )
113
- else:
114
- expanded_cmd.append(part)
115
- else:
116
- expanded_cmd.append(part)
117
- return expanded_cmd
118
-
119
- @property
120
- def errors(self) -> int:
121
- return self._errors
122
-
123
- @property
124
- def warnings(self) -> int:
125
- return self._warnings
@@ -1,52 +0,0 @@
1
- import sys
2
- from subprocess import Popen, PIPE
3
- from pathlib import Path
4
- from typing import Dict
5
- from ..utils.shell import env_string_to_dict
6
-
7
-
8
- class ProvisionPython:
9
- def __init__(self, source_dir: Path, requirements_path: Path):
10
- self.source_dir = source_dir
11
- self.requirements_path = requirements_path
12
-
13
- def create_env(self) -> Dict:
14
- cmd = f"{sys.executable} -m ensurepip"
15
- proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True, text=True)
16
- proc.wait()
17
-
18
- cmd = f"{sys.executable} -m pip install virtualenv"
19
- proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True, text=True)
20
- proc.wait()
21
-
22
- cmd = f"{sys.executable} -m virtualenv venv"
23
- proc = Popen(
24
- cmd, cwd=self.source_dir, stdout=PIPE, stderr=PIPE, shell=True, text=True
25
- )
26
- proc.wait()
27
-
28
- cmd = "source venv/bin/activate && env"
29
- proc = Popen(
30
- cmd, cwd=self.source_dir, stdout=PIPE, stderr=PIPE, shell=True, text=True
31
- )
32
- proc.wait()
33
-
34
- # Read the output and decode it
35
- output, _ = proc.communicate()
36
-
37
- # Split the output into lines and parse it into a dictionary
38
- env_vars = env_string_to_dict(output)
39
-
40
- cmd = f"python -m pip install -r {self.requirements_path}"
41
- proc = Popen(
42
- cmd,
43
- cwd=self.source_dir,
44
- env=env_vars,
45
- stdout=PIPE,
46
- stderr=PIPE,
47
- shell=True,
48
- text=True,
49
- )
50
- proc.wait()
51
-
52
- return env_vars
primitive/utils/files.py DELETED
@@ -1,26 +0,0 @@
1
- from typing import List, Tuple
2
- from pathlib import Path
3
- from loguru import logger
4
- import os
5
-
6
-
7
- def find_files_for_extension(source: Path, extensions: Tuple[str]) -> List[Path]:
8
- matching_files = []
9
- logger.debug(f"Looking for files at {source} with extensions {extensions}")
10
-
11
- has_walk = getattr(source, "walk", None)
12
-
13
- if has_walk:
14
- files = source.walk()
15
- else:
16
- files = os.walk(source)
17
-
18
- for dirpath, dirnames, filenames in files:
19
- for filename in filenames:
20
- if filename.endswith(extensions):
21
- matching_files.append(Path(dirpath).joinpath(filename))
22
-
23
- logger.debug(
24
- f"Found {len(matching_files)} following files that match: {matching_files}"
25
- )
26
- return matching_files
primitive/utils/git.py DELETED
@@ -1,15 +0,0 @@
1
- import os
2
- from loguru import logger
3
-
4
-
5
- def download_source(github_access_token, git_repository, git_ref) -> None:
6
- # Download code to current directory
7
- logger.debug(f"Downloading source code from {git_repository} {git_ref}")
8
- url = f"https://api.github.com/repos/{git_repository}/tarball/{git_ref}"
9
- # TODO: switch to supbrocess.run or subprocess.Popen
10
- result = os.system(
11
- f"curl -s -L -H 'Accept: application/vnd.github+json' -H 'Authorization: Bearer {github_access_token}' -H 'X-GitHub-Api-Version: 2022-11-28' {url} | tar zx --strip-components 1 -C ."
12
- )
13
-
14
- if result != 0:
15
- raise Exception("Failed to import repository.")
@@ -1,57 +0,0 @@
1
- import tarfile
2
-
3
- import requests
4
- from loguru import logger
5
-
6
- from .cache import get_deps_cache
7
- from .shell import add_path_to_shell
8
-
9
- VERIBLE_MAC_OS_LINK = "https://github.com/chipsalliance/verible/releases/download/v0.0-3752-g8b64887e/verible-v0.0-3752-g8b64887e-macOS.tar.gz"
10
- VERIBLE_WINDOWS_64_OS_LINK = "https://github.com/chipsalliance/verible/releases/download/v0.0-3752-g8b64887e/verible-v0.0-3752-g8b64887e-win64.zip"
11
- VERIBLE_LINUX_X86_64_OS_LINK = "https://github.com/chipsalliance/verible/releases/download/v0.0-3752-g8b64887e/verible-v0.0-3752-g8b64887e-linux-static-x86_64.tar.gz"
12
- VERIBLE_LINUX_ARM64_LINK = "https://github.com/chipsalliance/verible/releases/download/v0.0-3752-g8b64887e/verible-v0.0-3752-g8b64887e-linux-static-arm64.tar.gz"
13
-
14
-
15
- def install_verible(system_info: dict) -> str:
16
- url = None
17
- if system_info.get("os_family") == "Darwin":
18
- url = VERIBLE_MAC_OS_LINK
19
- elif system_info.get("os_family") == "Windows":
20
- url = VERIBLE_WINDOWS_64_OS_LINK
21
- elif system_info.get("processor") == "x86_64":
22
- url = VERIBLE_LINUX_X86_64_OS_LINK
23
- elif system_info.get("processor") == "arm":
24
- url = VERIBLE_LINUX_X86_64_OS_LINK
25
-
26
- deps_cache = get_deps_cache()
27
-
28
- verible_dir_name = url.split("/")[-1].split(".tar.gz")[0]
29
- file_download_path = deps_cache / f"{verible_dir_name}.tar.gz"
30
-
31
- logger.debug("Downloading verible")
32
- response = requests.get(url, stream=True)
33
- if response.status_code == 200:
34
- with open(file_download_path, "wb") as file:
35
- file.write(response.raw.read())
36
- else:
37
- raise Exception(
38
- f"Failed to download verible. {response.status_code}. {response.text}"
39
- )
40
-
41
- logger.debug("Untaring verible")
42
- with tarfile.open(file_download_path) as tar:
43
- tar.extractall(deps_cache)
44
-
45
- logger.debug("Deleting tar.gz artifact")
46
- file_download_path.unlink()
47
-
48
- unpacked_verible_dir_name = verible_dir_name
49
- if "linux" in unpacked_verible_dir_name:
50
- unpacked_verible_dir_name = unpacked_verible_dir_name.split("-linux")[0]
51
-
52
- verible_bin = deps_cache.joinpath(unpacked_verible_dir_name).joinpath("bin")
53
-
54
- logger.debug("Adding verible to PATH")
55
- add_path_to_shell(verible_bin)
56
-
57
- return verible_bin