primitive 0.1.39__py3-none-any.whl → 0.1.41__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
primitive/__about__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # SPDX-FileCopyrightText: 2024-present Dylan Stein <dylan@primitive.tech>
2
2
  #
3
3
  # SPDX-License-Identifier: MIT
4
- __version__ = "0.1.39"
4
+ __version__ = "0.1.41"
@@ -1,18 +1,12 @@
1
1
  import sys
2
2
  import shutil
3
- from pathlib import Path
4
3
  from time import sleep
5
4
  from primitive.utils.actions import BaseAction
6
5
  from loguru import logger
7
6
  from primitive.__about__ import __version__
8
- import yaml
9
- from ..utils.yaml import generate_script_from_yaml
10
7
  from ..utils.cache import get_sources_cache
11
-
12
- try:
13
- from yaml import CLoader as Loader
14
- except ImportError:
15
- from yaml import Loader
8
+ from .runner import AgentRunner
9
+ from .uploader import Uploader
16
10
 
17
11
 
18
12
  class Agent(BaseAction):
@@ -26,6 +20,9 @@ class Agent(BaseAction):
26
20
  # Create cache dir if it doesnt exist
27
21
  cache_dir = get_sources_cache()
28
22
 
23
+ # Create uploader
24
+ uploader = Uploader(primitive=self.primitive)
25
+
29
26
  # self.primitive.hardware.update_hardware_system_info()
30
27
  try:
31
28
  self.primitive.hardware.check_in_http(is_available=True, is_online=True)
@@ -38,6 +35,9 @@ class Agent(BaseAction):
38
35
  active_reservation_pk = None
39
36
 
40
37
  while True:
38
+ logger.debug("Scanning for files to upload...")
39
+ uploader.scan()
40
+
41
41
  hardware = self.primitive.hardware.get_own_hardware_details()
42
42
  if hardware["activeReservation"]:
43
43
  if (
@@ -119,81 +119,15 @@ class Agent(BaseAction):
119
119
  job_run["jobSettings"]["rootDirectory"]
120
120
  )
121
121
 
122
- cmd = ("make",)
123
- if containerArgs := job_run["jobSettings"]["containerArgs"]:
124
- cmd = tuple(containerArgs.split(" "))
125
-
126
- # Load config and generate bash script
127
- yaml_config_path = Path(source_dir / "primitive.yaml")
128
- run_script_path = None
129
- if yaml_config_path.exists() and yaml_config_path.is_file():
130
- yaml_config = yaml.load(
131
- open(yaml_config_path, "r"), Loader=Loader
132
- )
122
+ runner = AgentRunner(
123
+ self.primitive,
124
+ source_dir=source_dir,
125
+ job_id=job_run["id"],
126
+ job_slug=job_run["job"]["slug"],
127
+ )
133
128
 
134
- job_slug = job_run["job"]["slug"]
135
- if job_slug in yaml_config:
136
- run_script_path = generate_script_from_yaml(
137
- yaml_config,
138
- slug=job_slug,
139
- destination=source_dir,
140
- )
141
- cmd = (
142
- "/bin/bash",
143
- str(run_script_path.resolve()),
144
- )
145
-
146
- match job_run["job"]["slug"]:
147
- case "lint":
148
- logger.debug("Executing Lint Job")
149
-
150
- self.primitive.jobs.job_run_update(
151
- job_run["id"], status="request_in_progress"
152
- )
153
-
154
- result, message = self.primitive.lint.execute(
155
- source=source_dir
156
- )
157
- if result:
158
- conclusion = "success"
159
- else:
160
- conclusion = "failure"
161
- self.primitive.jobs.job_run_update(
162
- job_run["id"],
163
- status="request_completed",
164
- conclusion=conclusion,
165
- stdout=message,
166
- )
167
-
168
- logger.debug("Lint Job Completed")
169
- case "sim":
170
- logger.debug("Executing Sim Job")
171
-
172
- self.primitive.jobs.job_run_update(
173
- job_run["id"], status="request_in_progress"
174
- )
175
-
176
- result, message = self.primitive.sim.execute(
177
- source=source_dir, cmd=cmd
178
- )
179
-
180
- # Attempt artifact collection
181
- self.primitive.sim.collect_artifacts(
182
- source=source_dir, job_run_id=job_run["id"]
183
- )
184
-
185
- if result:
186
- conclusion = "success"
187
- else:
188
- conclusion = "failure"
189
- self.primitive.jobs.job_run_update(
190
- job_run["id"],
191
- status="request_completed",
192
- conclusion=conclusion,
193
- stdout=message,
194
- )
195
-
196
- logger.debug("Sim Job Completed")
129
+ # Execute job
130
+ runner.execute()
197
131
 
198
132
  # Clean up
199
133
  shutil.rmtree(path=downloaded_git_repository_dir)
@@ -0,0 +1,84 @@
1
+ from subprocess import Popen, PIPE
2
+ import threading
3
+ import shlex
4
+ from loguru import logger
5
+
6
+
7
+ class Process:
8
+ def __init__(
9
+ self,
10
+ cmd,
11
+ workdir: str = ".",
12
+ ):
13
+ self.cmd = shlex.split(cmd)
14
+ self.workdir = workdir
15
+ self.process = None
16
+ self.stdout_thread = None
17
+ self.stderr_thread = None
18
+ self._errors = 0
19
+
20
+ def start(self):
21
+ # Start the process
22
+ self.process = Popen(
23
+ self.cmd, cwd=self.workdir, stdout=PIPE, stderr=PIPE, text=True
24
+ )
25
+
26
+ # Function to read and log output from a pipe
27
+ def log_output(pipe, level):
28
+ for line in iter(pipe.readline, ""):
29
+ if line:
30
+ logger.log(level, line.rstrip())
31
+ if level == "ERROR":
32
+ self._errors += 1
33
+
34
+ pipe.close()
35
+
36
+ # Create threads for stdout and stderr
37
+ self.stdout_thread = threading.Thread(
38
+ target=log_output, args=(self.process.stdout, "INFO")
39
+ )
40
+ self.stderr_thread = threading.Thread(
41
+ target=log_output, args=(self.process.stderr, "ERROR")
42
+ )
43
+
44
+ # Start the threads
45
+ self.stdout_thread.start()
46
+ self.stderr_thread.start()
47
+
48
+ def wait(self):
49
+ if self.process:
50
+ # Wait for the process to complete
51
+ self.process.wait()
52
+ # Wait for the threads to finish reading output
53
+ self.stdout_thread.join()
54
+ self.stderr_thread.join()
55
+
56
+ def run(self):
57
+ """Start and wait for the process."""
58
+ self.start()
59
+ self.wait()
60
+
61
+ def is_running(self):
62
+ """Check if the process is still running."""
63
+ return self.process and self.process.poll() is None
64
+
65
+ def finish(self):
66
+ """Make sure that logging finishes"""
67
+ self.stderr_thread.join()
68
+ self.stderr_thread.join()
69
+
70
+ return self.process.poll()
71
+
72
+ def terminate(self):
73
+ """Terminate the process."""
74
+ if self.process:
75
+ self.process.terminate()
76
+
77
+ def kill(self):
78
+ """Kill the process."""
79
+ if self.process:
80
+ self.process.kill()
81
+
82
+ @property
83
+ def errors(self) -> int:
84
+ return self._errors
@@ -0,0 +1,160 @@
1
+ import yaml
2
+ import sys
3
+ import typing
4
+ from time import sleep
5
+ from typing import TypedDict, Iterable, List
6
+ from pathlib import Path, PurePath
7
+ from loguru import logger
8
+ from .process import Process
9
+ from ..utils.cache import get_artifacts_cache
10
+ from ..utils.files import find_files_for_extension
11
+
12
+ try:
13
+ from yaml import CLoader as Loader
14
+ except ImportError:
15
+ from yaml import Loader
16
+
17
+ if typing.TYPE_CHECKING:
18
+ import primitive.client
19
+
20
+
21
+ class Artifact(TypedDict):
22
+ name: str
23
+ extension: str
24
+
25
+
26
+ class JobStep(TypedDict):
27
+ name: str
28
+ workdir: str
29
+ artifacts: List[Artifact]
30
+ cmd: str
31
+
32
+
33
+ class JobDescription(TypedDict):
34
+ name: str
35
+ steps: List[JobStep]
36
+
37
+
38
+ class AgentRunner:
39
+ def __init__(
40
+ self,
41
+ primitive: "primitive.client.Primitive",
42
+ source_dir: Path,
43
+ job_id: str,
44
+ job_slug: str,
45
+ max_log_size: int = 10 * 1024 * 1024,
46
+ ) -> None:
47
+ self.primitive = primitive
48
+ self.source_dir = source_dir
49
+ self.job_id = job_id
50
+ self.job_slug = job_slug
51
+ self.max_log_size = max_log_size
52
+ self.artifacts_dir = get_artifacts_cache(self.job_id)
53
+
54
+ logger.enable("primitive")
55
+ self.logger_handle = logger.add(
56
+ Path(self.artifacts_dir / "runner_{time}.log"),
57
+ rotation=self.max_log_size, # Rotate when the log file reaches 10MB
58
+ )
59
+
60
+ logger.info(f"Scanning directory for job {self.job_slug}")
61
+
62
+ # Look for job based on slug
63
+ yaml_file = Path(self.source_dir / ".primitive" / f"{self.job_slug}.yaml")
64
+ yml_file = Path(self.source_dir / ".primitive" / f"{self.job_slug}.yml")
65
+
66
+ if yaml_file.exists() and yml_file.exists():
67
+ logger.error(
68
+ f"Found two job descriptions with the same slug: {self.job_slug}"
69
+ )
70
+ sys.exit(1)
71
+
72
+ if yaml_file.exists():
73
+ self.job = yaml.load(open(yaml_file, "r"), Loader=Loader)
74
+ elif yml_file.exists():
75
+ self.job = yaml.load(open(yml_file, "r"), Loader=Loader)
76
+ else:
77
+ logger.error(
78
+ f"No job description with matching slug '{self.job_slug}' found"
79
+ )
80
+ sys.exit(1)
81
+
82
+ logger.info(f"Found job description for {self.job_slug}")
83
+
84
+ def name(self) -> str:
85
+ return self.job["name"]
86
+
87
+ def steps(self) -> Iterable[JobStep]:
88
+ for step in self.job["steps"]:
89
+ yield step
90
+
91
+ def execute(self) -> None:
92
+ logger.info(f"Executing {self.job_slug} job")
93
+ self.primitive.jobs.job_run_update(self.job_id, status="request_in_progress")
94
+
95
+ conclusion = None
96
+ total_errors = 0
97
+ for step in self.steps():
98
+ logger.info(f"Beginning step {step["name"]}")
99
+
100
+ # Define step proc
101
+ proc = Process(step["cmd"], workdir=Path(self.source_dir / step["workdir"]))
102
+
103
+ # Try to start
104
+ try:
105
+ proc.start()
106
+ except Exception as e:
107
+ logger.error(f"Error while attempting to run command {e}")
108
+ conclusion = "failure"
109
+ break
110
+
111
+ # Check for updates to status while running
112
+ while proc.is_running():
113
+ status = self.primitive.jobs.get_job_status(self.job_id)
114
+
115
+ logger.info(f"Step status: {status}")
116
+
117
+ sleep(5)
118
+
119
+ result = proc.finish()
120
+ total_errors += proc.errors
121
+
122
+ # Collect artifacts
123
+ self.collect_artifacts(step)
124
+
125
+ # Check if we have a good result
126
+ if not result:
127
+ conclusion = "failure"
128
+ break
129
+
130
+ if not conclusion and total_errors == 0:
131
+ conclusion = "success"
132
+ else:
133
+ conclusion = "failure"
134
+
135
+ self.primitive.jobs.job_run_update(
136
+ self.job_id, status="request_completed", conclusion=conclusion
137
+ )
138
+
139
+ logger.info(f"Completed {self.job_slug} job")
140
+ logger.remove(self.logger_handle)
141
+
142
+ def collect_artifacts(self, step: JobStep) -> None:
143
+ # str(PurePath(file_path).relative_to(Path(source))
144
+
145
+ # Search each artifact type
146
+ for artifact in step["artifacts"]:
147
+ files = find_files_for_extension(self.source_dir, artifact["extension"])
148
+
149
+ for file in files:
150
+ # Find path relative to source_dir
151
+ relative_path = PurePath(file).relative_to(self.source_dir)
152
+
153
+ # Construct destination to preserve directory structure
154
+ destination = Path(self.artifacts_dir / relative_path)
155
+
156
+ # Create directories if they don't exist
157
+ destination.parent.mkdir(parents=True, exist_ok=True)
158
+
159
+ # Move file
160
+ file.rename(destination)
@@ -0,0 +1,46 @@
1
+ import typing
2
+ import shutil
3
+ from pathlib import Path, PurePath
4
+ from ..utils.cache import get_artifacts_cache
5
+
6
+ if typing.TYPE_CHECKING:
7
+ import primitive.client
8
+
9
+
10
+ class Uploader:
11
+ def __init__(
12
+ self,
13
+ primitive: "primitive.client.Primitive",
14
+ ):
15
+ self.primitive = primitive
16
+
17
+ def upload_file(self, path: Path, prefix: str) -> str:
18
+ file_upload_response = self.primitive.files.file_upload(path, key_prefix=prefix)
19
+ return file_upload_response.json()["data"]["fileUpload"]["id"]
20
+
21
+ def scan(self) -> None:
22
+ # Scan artifacts directory
23
+ artifacts_dir = get_artifacts_cache()
24
+
25
+ subdirs = [
26
+ job_cache for job_cache in artifacts_dir.iterdir() if job_cache.is_dir()
27
+ ]
28
+
29
+ for job_cache in subdirs:
30
+ job_run_id = job_cache.name
31
+ files = [file for file in job_cache.rglob("*") if file.is_file()]
32
+
33
+ file_ids = []
34
+ for file in files:
35
+ file_ids.append(
36
+ self.upload_file(
37
+ file,
38
+ prefix=str(PurePath(file).relative_to(job_cache.parent).parent),
39
+ )
40
+ )
41
+
42
+ # Update job run
43
+ self.primitive.jobs.job_run_update(id=job_run_id, file_ids=file_ids)
44
+
45
+ # Clean up job cache
46
+ shutil.rmtree(path=job_cache)
primitive/auth/actions.py CHANGED
@@ -1,12 +1,13 @@
1
1
  from gql import gql
2
2
 
3
3
  from ..utils.config import read_config_file, update_config_file
4
-
4
+ from ..utils.auth import guard
5
5
 
6
6
  from primitive.utils.actions import BaseAction
7
7
 
8
8
 
9
9
  class Auth(BaseAction):
10
+ @guard
10
11
  def whoami(self):
11
12
  query = gql(
12
13
  """
primitive/client.py CHANGED
@@ -1,7 +1,6 @@
1
1
  import sys
2
2
  from .auth.actions import Auth
3
3
  from .projects.actions import Projects
4
- from .graphql.sdk import create_session
5
4
  from .utils.config import read_config_file
6
5
  from .files.actions import Files
7
6
  from .sim.actions import Sim
@@ -28,6 +27,7 @@ class Primitive:
28
27
  transport: str = None,
29
28
  ) -> None:
30
29
  self.host = host
30
+ self.session = None
31
31
  self.DEBUG = DEBUG
32
32
  self.JSON = JSON
33
33
 
@@ -42,20 +42,16 @@ class Primitive:
42
42
  diagnose=True,
43
43
  )
44
44
 
45
- host = self.host
46
- fingerprint = None
45
+ # Generate full or partial host config
47
46
  if not token and not transport:
48
- self.get_host_config()
49
- token = self.host_config.get("token")
50
- transport = self.host_config.get("transport")
51
- fingerprint = self.host_config.get("fingerprint")
47
+ # Attempt to build host config from file
48
+ try:
49
+ self.get_host_config()
50
+ except KeyError:
51
+ self.host_config = {}
52
52
  else:
53
53
  self.host_config = {"username": "", "token": token, "transport": transport}
54
54
 
55
- self.session = create_session(
56
- host=self.host, token=token, transport=transport, fingerprint=fingerprint
57
- )
58
-
59
55
  self.auth: Auth = Auth(self)
60
56
  self.organizations: Organizations = Organizations(self)
61
57
  self.projects: Projects = Projects(self)
@@ -1,12 +1,13 @@
1
1
  from pathlib import Path
2
2
  from gql import gql
3
3
  from primitive.graphql.sdk import create_requests_session
4
-
4
+ from ..utils.auth import guard
5
5
 
6
6
  from primitive.utils.actions import BaseAction
7
7
 
8
8
 
9
9
  class Files(BaseAction):
10
+ @guard
10
11
  def trace_create(
11
12
  self,
12
13
  file_id: str,
@@ -41,6 +42,7 @@ class Files(BaseAction):
41
42
  result = self.primitive.session.execute(mutation, variable_values=variables)
42
43
  return result
43
44
 
45
+ @guard
44
46
  def file_upload(self, path: Path, is_public: bool = False, key_prefix: str = ""):
45
47
  file_path = str(path.resolve())
46
48
  if path.exists() is False:
primitive/git/actions.py CHANGED
@@ -1,10 +1,12 @@
1
1
  from pathlib import Path
2
2
  from primitive.utils.actions import BaseAction
3
+ from ..utils.auth import guard
3
4
  from loguru import logger
4
5
  import os
5
6
 
6
7
 
7
8
  class Git(BaseAction):
9
+ @guard
8
10
  def get_github_access_token(self) -> str:
9
11
  query = """
10
12
  query githubAppToken{
@@ -11,6 +11,7 @@ from primitive.utils.memory_size import MemorySize
11
11
  from gql import gql
12
12
  from aiohttp import client_exceptions
13
13
  from ..utils.config import update_config_file
14
+ from ..utils.auth import guard
14
15
 
15
16
  import typing
16
17
 
@@ -265,6 +266,7 @@ class Hardware(BaseAction):
265
266
  system_info["gpu_config"] = self._get_gpu_config()
266
267
  return system_info
267
268
 
269
+ @guard
268
270
  def register(self):
269
271
  system_info = self.get_system_info()
270
272
  mutation = gql(
@@ -310,6 +312,7 @@ class Hardware(BaseAction):
310
312
  self.check_in_http(is_healthy=True)
311
313
  return True
312
314
 
315
+ @guard
313
316
  def update_hardware_system_info(self):
314
317
  """
315
318
  Updates hardware system information and returns the GraphQL response.
@@ -366,6 +369,7 @@ class Hardware(BaseAction):
366
369
 
367
370
  return result
368
371
 
372
+ @guard
369
373
  def check_in_http(
370
374
  self,
371
375
  is_healthy: bool = True,
@@ -454,6 +458,7 @@ class Hardware(BaseAction):
454
458
  logger.error(message)
455
459
  raise exception
456
460
 
461
+ @guard
457
462
  def get_hardware_list(self, fingerprint: Optional[str] = None):
458
463
  query = gql(
459
464
  """
primitive/jobs/actions.py CHANGED
@@ -3,9 +3,11 @@ from gql import gql
3
3
 
4
4
 
5
5
  from primitive.utils.actions import BaseAction
6
+ from ..utils.auth import guard
6
7
 
7
8
 
8
9
  class Jobs(BaseAction):
10
+ @guard
9
11
  def get_jobs(
10
12
  self,
11
13
  organization_id: Optional[str] = None,
@@ -117,7 +119,6 @@ fragment JobRunFragment on JobRun {
117
119
  startedAt
118
120
  status
119
121
  conclusion
120
- stdout
121
122
  job {
122
123
  id
123
124
  pk
@@ -196,6 +197,7 @@ query jobRuns(
196
197
  result = self.primitive.session.execute(query, variable_values=variables)
197
198
  return result
198
199
 
200
+ @guard
199
201
  def get_job_run(self, id: str):
200
202
  query = gql(
201
203
  """
@@ -208,7 +210,6 @@ query jobRuns(
208
210
  startedAt
209
211
  status
210
212
  conclusion
211
- stdout
212
213
  job {
213
214
  id
214
215
  pk
@@ -239,12 +240,12 @@ query jobRuns(
239
240
  result = self.primitive.session.execute(query, variable_values=variables)
240
241
  return result
241
242
 
243
+ @guard
242
244
  def job_run_update(
243
245
  self,
244
246
  id: str,
245
247
  status: str = None,
246
248
  conclusion: str = None,
247
- stdout: str = None,
248
249
  file_ids: Optional[List[str]] = [],
249
250
  ):
250
251
  mutation = gql(
@@ -267,12 +268,11 @@ query jobRuns(
267
268
  input["conclusion"] = conclusion
268
269
  if file_ids and len(file_ids) > 0:
269
270
  input["files"] = file_ids
270
- if stdout:
271
- input["stdout"] = stdout
272
271
  variables = {"input": input}
273
272
  result = self.primitive.session.execute(mutation, variable_values=variables)
274
273
  return result
275
274
 
275
+ @guard
276
276
  def github_access_token_for_job_run(self, job_run_id: str):
277
277
  query = gql(
278
278
  """
@@ -309,3 +309,23 @@ query ghAppTokenForJobRun($jobRunId: GlobalID!) {
309
309
  raise ValueError("job_slug or job_id is required")
310
310
 
311
311
  self.get_jobs(slug=job_slug)
312
+
313
+ @guard
314
+ def get_job_status(self, id: str):
315
+ query = gql(
316
+ """
317
+ fragment JobRunFragment on JobRun {
318
+ id
319
+ status
320
+ }
321
+
322
+ query jobRun($id: GlobalID!) {
323
+ jobRun(id: $id) {
324
+ ...JobRunFragment
325
+ }
326
+ }
327
+ """
328
+ )
329
+ variables = {"id": id}
330
+ result = self.primitive.session.execute(query, variable_values=variables)
331
+ return result
@@ -3,9 +3,11 @@ from gql import gql
3
3
 
4
4
 
5
5
  from primitive.utils.actions import BaseAction
6
+ from ..utils.auth import guard
6
7
 
7
8
 
8
9
  class Organizations(BaseAction):
10
+ @guard
9
11
  def get_organizations(
10
12
  self,
11
13
  organization_id: Optional[str] = None,
@@ -2,9 +2,11 @@ from gql import gql
2
2
 
3
3
  from typing import Optional
4
4
  from primitive.utils.actions import BaseAction
5
+ from ..utils.auth import guard
5
6
 
6
7
 
7
8
  class Projects(BaseAction):
9
+ @guard
8
10
  def get_projects(
9
11
  self,
10
12
  organization_id: Optional[str] = None,
@@ -0,0 +1,27 @@
1
+ from ..graphql.sdk import create_session
2
+ import sys
3
+
4
+
5
+ def guard(func):
6
+ def wrapper(self, *args, **kwargs):
7
+ if self.primitive.session is None:
8
+ token = self.primitive.host_config.get("token")
9
+ transport = self.primitive.host_config.get("transport")
10
+ fingerprint = self.primitive.host_config.get("fingerprint")
11
+
12
+ if not token or not transport:
13
+ print(
14
+ "CLI is not configured. Run primitive config to add an auth token."
15
+ )
16
+ sys.exit(1)
17
+
18
+ self.primitive.session = create_session(
19
+ host=self.primitive.host,
20
+ token=token,
21
+ transport=transport,
22
+ fingerprint=fingerprint,
23
+ )
24
+
25
+ return func(self, *args, **kwargs)
26
+
27
+ return wrapper
primitive/utils/cache.py CHANGED
@@ -30,6 +30,20 @@ def get_sources_cache() -> Path:
30
30
  return sources_dir
31
31
 
32
32
 
33
+ def get_artifacts_cache(cache_id: str = None) -> Path:
34
+ cache_dir = get_cache_dir()
35
+
36
+ artifacts_dir = cache_dir / "artifacts"
37
+
38
+ if cache_id:
39
+ artifacts_dir = artifacts_dir / cache_id
40
+
41
+ if not artifacts_dir.exists():
42
+ artifacts_dir.mkdir(parents=True, exist_ok=True)
43
+
44
+ return artifacts_dir
45
+
46
+
33
47
  def get_deps_cache() -> Path:
34
48
  cache_dir = get_cache_dir()
35
49
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: primitive
3
- Version: 0.1.39
3
+ Version: 0.1.41
4
4
  Project-URL: Documentation, https://github.com//primitivecorp/primitive-cli#readme
5
5
  Project-URL: Issues, https://github.com//primitivecorp/primitive-cli/issues
6
6
  Project-URL: Source, https://github.com//primitivecorp/primitive-cli
@@ -1,39 +1,43 @@
1
- primitive/__about__.py,sha256=_BMdl-E4C928TL5DMNPwXGZXZshwzVn9FoCKsCYkjoM,130
1
+ primitive/__about__.py,sha256=PsY-PpGOI_DmkKtzrfzfMfczkk5f26JZN38IjwLvkE8,130
2
2
  primitive/__init__.py,sha256=bwKdgggKNVssJFVPfKSxqFMz4IxSr54WWbmiZqTMPNI,106
3
3
  primitive/cli.py,sha256=VQPSewC6ouGdEG9W1gllawGJTydpOY0Lzg7LURXcqQg,2374
4
- primitive/client.py,sha256=SFPG4H2wJao8euGdnYp-l7dk_fDpWeVn2aT2WNJUAqo,2370
5
- primitive/agent/actions.py,sha256=cxavCfRG76KJ6M-FVwg3htMbJLz16gUjy30h-vsJ7A0,8468
4
+ primitive/client.py,sha256=vSJkifx450czuLvu0f2o-viSCC0p2f1UicA-2P5cJAw,2188
5
+ primitive/agent/actions.py,sha256=d_garPm3rl5uMEu-g681yMYEnBqfUDeEPeeKl_VEd64,5489
6
6
  primitive/agent/commands.py,sha256=-dVDilELfkGfbZB7qfEPs77Dm1oT62qJj4tsIk4KoxI,254
7
+ primitive/agent/process.py,sha256=hYjjZL1DoUUVDJmORewYfLhWptLX_4e47DVW1i0PrKw,2252
8
+ primitive/agent/runner.py,sha256=w_HlIUTIwRsXJCsZT4u5P5XsapXRn8hLOQ1SAGSQzFw,4797
9
+ primitive/agent/uploader.py,sha256=ngbynmQzxKVNMp7VWkTWW7vZIN_rnnRzYC6OAL21n1k,1378
7
10
  primitive/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- primitive/auth/actions.py,sha256=N2bGcwXNsB89pzs66gF9A5_WzUScY5fhfOyWixqo2y8,1054
11
+ primitive/auth/actions.py,sha256=2vZEC3LLAXj6eBBmt_2OEDEcBIb3uLkkNjgbZTIaQsY,1095
9
12
  primitive/auth/commands.py,sha256=JahUq0E2e7Xa-FX1WEUv7TgM6ieDvNH4VwRRtxAW7HE,2340
10
13
  primitive/daemons/actions.py,sha256=Nt3yNtbBhen0jK4sRsH_N7AP3UBuyL48VaUhtC7wYq8,2015
11
14
  primitive/daemons/commands.py,sha256=-Muh-6ib4uAVtPn_67AcMrDwuCwYlCnRQozCi2Xurmk,1726
12
15
  primitive/daemons/launch_agents.py,sha256=qovt32gwpjGDd82z_SY5EGCUjaUyNA49pZFajZsw3eE,4796
13
16
  primitive/daemons/launch_service.py,sha256=FPB9qKEjhllRfEpct0ng2L9lpIaGJbQwn1JdFT8uBA8,5600
14
- primitive/files/actions.py,sha256=f4JN3QFB2WXw-0JpnE-4-movnqtvXIpCrGd_9pdkeW4,2624
17
+ primitive/files/actions.py,sha256=2X1_rq8A7X1Nf0q9xyAI9ziMkow_VKARPfdnsAoZh9w,2676
15
18
  primitive/files/commands.py,sha256=DDizo3xJnU3KLUBTMeeM72viVpnJinLwxs84tmqKhqo,810
16
19
  primitive/git/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
- primitive/git/actions.py,sha256=fepcl5529w_hsaC6fBw9f-QHeyqNjGXz8HI5ebzbZMs,1386
20
+ primitive/git/actions.py,sha256=OTuGRXfjop9u1LtNaBen87mEU6ROeoa_MDgBbB2l6ig,1428
18
21
  primitive/git/commands.py,sha256=64B2STTOn0dwVDmJHqEwekmIqKMfSyBBFwKg29Wt8Aw,1230
19
22
  primitive/graphql/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
23
  primitive/graphql/sdk.py,sha256=BhCGmDtc4sNnH8CxbQSJyFwOZ-ZSqMtjsxMB3JRBhPw,1456
21
- primitive/hardware/actions.py,sha256=JJXEeW35QzVGcLN4ym5gYZwY71hxLzM1GYPXWaObEts,18893
24
+ primitive/hardware/actions.py,sha256=QwVElFCrGbA27qagapHRxVpf0vkpKu6bNcqCQyYpQkY,18968
22
25
  primitive/hardware/commands.py,sha256=QE7LLeFdfOqlvz3JwdwJJRZAY3fHI1zB9kYmmDajpq0,1477
23
- primitive/jobs/actions.py,sha256=LpUJN-GOiDSehQANfV_F86eRRKPZ2ew9skKdnlalXU4,7734
26
+ primitive/jobs/actions.py,sha256=xg5-ALJEAtculnUIp-avYYJ5hONkl9fWFL83orAs6IQ,8194
24
27
  primitive/jobs/commands.py,sha256=MxPCkBEYW_eLNqgCRYeyj7ZcLOFAWfpVZlqDR2Y_S0o,830
25
28
  primitive/lint/actions.py,sha256=tWsrht1dowGprcZjEUtjCJzozEQmh9sv2_C2__YHIOI,2825
26
29
  primitive/lint/commands.py,sha256=3CZvkOEMpJspJWmaQzA5bpPKx0_VCijQIXA9l-eTnZE,487
27
- primitive/organizations/actions.py,sha256=e0V4E1UK1IcBJsWWH6alHYUmArhzPrBqZ8WkHPIcLq0,2268
30
+ primitive/organizations/actions.py,sha256=fNVR0qx9opzMpzGn6lHBy5BqLhIwid1ZepxdtpelZwo,2310
28
31
  primitive/organizations/commands.py,sha256=_dwgVEJCqMa5VgB_7P1wLPFc0AuT1p9dtyR9JRr4kpw,487
29
32
  primitive/projects/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
- primitive/projects/actions.py,sha256=xhebDUMN9DXWvngWJyJkiijghbZwffy-JIPSsOg8agE,2061
33
+ primitive/projects/actions.py,sha256=wkQRMbMWRca0wnMKv6Cqh4t6BRCwIorS5ZpS7rMlSbU,2103
31
34
  primitive/projects/commands.py,sha256=Fqqgpi4cm6zOgkHK--0F0hiiIj32BmgZ-h1MydmWwdE,464
32
35
  primitive/sim/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
36
  primitive/sim/actions.py,sha256=MkhiAH5QvF2jbnpgWL29HeSrTYiSW0UJpql2wZ4q18w,4900
34
37
  primitive/sim/commands.py,sha256=8PaOfL1MO6qxTn7mNVRnBU1X2wa3gk_mlbAhBW6MnI0,591
35
38
  primitive/utils/actions.py,sha256=HOFrmM3-0A_A3NS84MqrZ6JmQEiiPSoDqEeuu6b_qfQ,196
36
- primitive/utils/cache.py,sha256=rzMhqDnP8QIzRidF7iyYA5rBGRLEu0tjX0POX0E1xFw,975
39
+ primitive/utils/auth.py,sha256=TtJKTR6tLmNrtWbOjJI-KJh4ZSJ1uG7ApE9GcY63m00,836
40
+ primitive/utils/cache.py,sha256=hDVpEL2TePrWOH6q7Me_Oi-DH_viFrRxrVta-z4wBhM,1295
37
41
  primitive/utils/config.py,sha256=DlFM5Nglo22WPtbpZSVtH7NX-PTMaKYlcrUE7GPRG4c,1058
38
42
  primitive/utils/files.py,sha256=Yv__bQes3YIlzhOT9kVxtYhoA5CmUjPSvphl9PZ41k4,867
39
43
  primitive/utils/git.py,sha256=1qNOu8X-33CavmrD580BmrFhD_WVO9PGWHUUboXJR_g,663
@@ -41,9 +45,8 @@ primitive/utils/memory_size.py,sha256=4xfha21kW82nFvOTtDFx9Jk2ZQoEhkfXii-PGNTpIU
41
45
  primitive/utils/printer.py,sha256=f1XUpqi5dkTL3GWvYRUGlSwtj2IxU1q745T4Fxo7Tn4,370
42
46
  primitive/utils/shell.py,sha256=-7UjQaBqSGHzEEyX8pNjeYFFP0P3lVnDV0OkgPz1qHU,1050
43
47
  primitive/utils/verible.py,sha256=r7c_hfqvL0UicMmIzK3Cy_BfZI1ZpcfBeLqKEWFWqJo,2252
44
- primitive/utils/yaml.py,sha256=4UP_9MXHoNb9_SCeUDm9xqYg9sHltqpVhNgsY6GNfb8,527
45
- primitive-0.1.39.dist-info/METADATA,sha256=kANNdscAcTCkcY2ZAAnLS-ovI3wQwUhAEIvUZu9gYtc,3782
46
- primitive-0.1.39.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
47
- primitive-0.1.39.dist-info/entry_points.txt,sha256=p1K8DMCWka5FqLlqP1sPek5Uovy9jq8u51gUsP-z334,48
48
- primitive-0.1.39.dist-info/licenses/LICENSE.txt,sha256=B8kmQMJ2sxYygjCLBk770uacaMci4mPSoJJ8WoDBY_c,1098
49
- primitive-0.1.39.dist-info/RECORD,,
48
+ primitive-0.1.41.dist-info/METADATA,sha256=GKvfSp-prhQ5_ZpKOR2_NOcBy587P_PdJLGUVi8N89U,3782
49
+ primitive-0.1.41.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
50
+ primitive-0.1.41.dist-info/entry_points.txt,sha256=p1K8DMCWka5FqLlqP1sPek5Uovy9jq8u51gUsP-z334,48
51
+ primitive-0.1.41.dist-info/licenses/LICENSE.txt,sha256=B8kmQMJ2sxYygjCLBk770uacaMci4mPSoJJ8WoDBY_c,1098
52
+ primitive-0.1.41.dist-info/RECORD,,
primitive/utils/yaml.py DELETED
@@ -1,23 +0,0 @@
1
- from pathlib import Path
2
-
3
-
4
- def generate_script_from_yaml(yaml_config: dict, slug: str, destination: Path) -> None:
5
- commands_blocks = []
6
- if steps := yaml_config[slug]["steps"]:
7
- for step in steps:
8
- commands_blocks.append(step["run"])
9
-
10
- script = f"""
11
- #!/bin/bash
12
-
13
- {"".join(commands_blocks)}
14
- """
15
-
16
- output_path = Path(destination / "run.sh")
17
- with open(output_path, "w") as f:
18
- f.write(script)
19
-
20
- # Apply execute file permissions
21
- output_path.chmod(0o744)
22
-
23
- return output_path