primitive 0.1.21__py3-none-any.whl → 0.1.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
primitive/__about__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # SPDX-FileCopyrightText: 2024-present Dylan Stein <dylan@steins.studio>
2
2
  #
3
3
  # SPDX-License-Identifier: MIT
4
- __version__ = "0.1.21"
4
+ __version__ = "0.1.23"
@@ -1,11 +1,36 @@
1
+ import platform
1
2
  import sys
3
+ from pathlib import Path
2
4
  from time import sleep
3
5
  from primitive.utils.actions import BaseAction
4
6
  from loguru import logger
5
7
  from primitive.__about__ import __version__
8
+ import yaml
9
+ from ..utils.yaml import generate_script_from_yaml
10
+
11
+ try:
12
+ from yaml import CLoader as Loader
13
+ except ImportError:
14
+ from yaml import Loader
6
15
 
7
16
 
8
17
  class Agent(BaseAction):
18
+ def set_cache_dir(self):
19
+ os_family = platform.system()
20
+
21
+ if os_family == "Darwin":
22
+ self.cache_dir = Path(
23
+ Path.home() / "Library" / "Caches" / "tech.primitive.agent"
24
+ )
25
+ elif os_family == "Linux":
26
+ self.cache_dir = Path(Path.home() / ".cache" / "primitive")
27
+ elif os_family == "Windows":
28
+ raise NotImplementedError("Windows is not currently supported.")
29
+ self.cache_dir = None
30
+
31
+ if not self.cache_dir.exists():
32
+ self.cache_dir.mkdir(parents=True, exist_ok=True)
33
+
9
34
  def execute(
10
35
  self,
11
36
  ):
@@ -13,6 +38,9 @@ class Agent(BaseAction):
13
38
  logger.info(" [*] primitive")
14
39
  logger.info(f" [*] Version: {__version__}")
15
40
 
41
+ # Create cache dir if it doesnt exist
42
+ self.set_cache_dir()
43
+
16
44
  # self.primitive.hardware.update_hardware_system_info()
17
45
  try:
18
46
  self.primitive.hardware.check_in_http(is_available=True, is_online=True)
@@ -21,15 +49,40 @@ class Agent(BaseAction):
21
49
  sys.exit(1)
22
50
 
23
51
  try:
52
+ active_reservation_id = None
53
+ active_reservation_pk = None
54
+
24
55
  while True:
25
56
  hardware = self.primitive.hardware.get_own_hardware_details()
57
+ if hardware["activeReservation"]:
58
+ if (
59
+ hardware["activeReservation"]["id"] != active_reservation_id
60
+ or hardware["activeReservation"]["pk"] != active_reservation_pk
61
+ ):
62
+ logger.warning("New reservation for this hardware.")
63
+ active_reservation_id = hardware["activeReservation"]["id"]
64
+ active_reservation_pk = hardware["activeReservation"]["pk"]
65
+ logger.debug("Active Reservation:")
66
+ logger.debug(f"Node ID: {active_reservation_id}")
67
+ logger.debug(f"PK: {active_reservation_pk}")
68
+ else:
69
+ if (
70
+ hardware["activeReservation"] is None
71
+ and active_reservation_id is not None
72
+ and hardware["isAvailable"]
73
+ ):
74
+ logger.debug("Previous Reservation is Complete:")
75
+ logger.debug(f"Node ID: {active_reservation_id}")
76
+ logger.debug(f"PK: {active_reservation_pk}")
77
+ active_reservation_id = None
78
+ active_reservation_pk = None
26
79
 
27
- active_reservation_id = None
28
- if hardware.get("activeReservation"):
29
- active_reservation_id = hardware["activeReservation"]["id"]
30
80
  if not active_reservation_id:
31
- logger.debug("No active reservation found")
32
- sleep(5)
81
+ sleep_amount = 5
82
+ logger.debug(
83
+ f"No active reservation found... [sleeping {sleep_amount} seconds]"
84
+ )
85
+ sleep(sleep_amount)
33
86
  continue
34
87
 
35
88
  job_runs_data = self.primitive.jobs.get_job_runs(
@@ -40,6 +93,14 @@ class Agent(BaseAction):
40
93
  edge["node"] for edge in job_runs_data["jobRuns"]["edges"]
41
94
  ]
42
95
 
96
+ if not pending_job_runs:
97
+ sleep_amount = 5
98
+ logger.debug(
99
+ f"Waiting for Job Runs... [sleeping {sleep_amount} seconds]"
100
+ )
101
+ sleep(sleep_amount)
102
+ continue
103
+
43
104
  for job_run in pending_job_runs:
44
105
  logger.debug("Found pending Job Run")
45
106
  logger.debug(f"Job Run ID: {job_run['id']}")
@@ -62,6 +123,7 @@ class Agent(BaseAction):
62
123
  git_repo_full_name=git_repo_full_name,
63
124
  git_ref=git_ref,
64
125
  github_access_token=github_access_token,
126
+ destination=self.cache_dir,
65
127
  )
66
128
  )
67
129
 
@@ -73,6 +135,23 @@ class Agent(BaseAction):
73
135
  if containerArgs := job_run["jobSettings"]["containerArgs"]:
74
136
  cmd = tuple(containerArgs.split(" "))
75
137
 
138
+ # Load config and generate bash script
139
+ yaml_config_path = Path(source_dir / "primitive.yaml")
140
+ run_script_path = None
141
+ if yaml_config_path.exists() and yaml_config_path.is_file():
142
+ yaml_config = yaml.load(
143
+ open(yaml_config_path, "r"), Loader=Loader
144
+ )
145
+ run_script_path = generate_script_from_yaml(
146
+ yaml_config,
147
+ slug=job_run["job"]["slug"],
148
+ destination=source_dir,
149
+ )
150
+ cmd = (
151
+ "/bin/bash",
152
+ str(run_script_path.resolve()),
153
+ )
154
+
76
155
  match job_run["job"]["slug"]:
77
156
  case "lint":
78
157
  logger.debug("Executing Lint Job")
@@ -464,6 +464,10 @@ fragment HardwareFragment on Hardware {
464
464
  slug
465
465
  createdAt
466
466
  updatedAt
467
+ isAvailable
468
+ isOnline
469
+ isQuarantined
470
+ isHealthy
467
471
  capabilities {
468
472
  id
469
473
  pk
primitive/sim/actions.py CHANGED
@@ -1,15 +1,13 @@
1
- from gql import gql
2
- from pathlib import Path
1
+ from pathlib import Path, PurePath
3
2
  from primitive.utils.actions import BaseAction
4
3
  from loguru import logger
5
4
  import subprocess
6
- from typing import Tuple, List
5
+ from typing import Tuple
7
6
  from ..utils.files import find_files_for_extension
8
7
  import os
9
8
  from .vcd import TokenKind, tokenize
10
9
  import io
11
10
  from collections import defaultdict
12
- import urllib
13
11
  import json
14
12
 
15
13
 
@@ -53,23 +51,23 @@ class Sim(BaseAction):
53
51
  def collect_artifacts(
54
52
  self, source: Path, job_run_id: str, organization_id: str
55
53
  ) -> None:
56
- file_ids = []
57
-
58
- # Look for VCD artifacts
54
+ # Split VCD artifacts
59
55
  files = find_files_for_extension(source, ".vcd")
60
56
  for file in files:
61
- trace_file_ids = self.generate_timeseries(
57
+ self.split_vcd(
62
58
  path=file, job_run_id=job_run_id, organization_id=organization_id
63
59
  )
64
- file_ids.extend(trace_file_ids)
65
60
 
66
61
  logger.debug("Uploading additional artifacts...")
67
- files = find_files_for_extension(source, (".xml", ".vcd", ".log", ".history"))
62
+ # TODO: Figure out how to track ".log", ".history" files w/ analog stuff involved
63
+ file_ids = []
64
+ files = find_files_for_extension(source, (".xml", ".vcd", ".json"))
68
65
  for file_path in files:
69
66
  try:
70
67
  file_ids.append(
71
68
  self.upload_file(
72
- file_path, prefix=f"{job_run_id}/{str(file_path.parent)}"
69
+ file_path,
70
+ prefix=f"{job_run_id}/{str(PurePath(file_path).relative_to(Path(source)).parent)}",
73
71
  )
74
72
  )
75
73
  except FileNotFoundError:
@@ -82,119 +80,71 @@ class Sim(BaseAction):
82
80
  )
83
81
  logger.success(job_run_update_response)
84
82
 
85
- def generate_timeseries(
86
- self, path: Path, job_run_id: str, organization_id: str
87
- ) -> List[str]:
83
+ def split_vcd(self, path: Path, job_run_id: str, organization_id: str) -> None:
88
84
  logger.debug("Parsing VCD file...")
89
85
  with open(path, "rb") as f:
90
86
  tokens = tokenize(io.BytesIO(f.read()))
91
87
 
92
88
  metadata = defaultdict(dict)
93
- traces = defaultdict(list)
94
- timescale_unit = "s"
95
- timescale_magnitude = 1
96
- active_module: str = ""
97
- time: int = 0
89
+ header = defaultdict(dict)
90
+ data = defaultdict(list)
91
+
92
+ active_scope = header
93
+ previous_scope = None
94
+
95
+ current_time = 0
98
96
 
99
97
  for token in tokens:
100
98
  match token.kind:
101
99
  case TokenKind.TIMESCALE:
102
- timescale_unit = token.data.unit.value
103
- timescale_magnitude = token.data.magnitude.value
100
+ metadata["timescaleUnit"] = token.data.unit.value
101
+ metadata["timescaleMagnitude"] = token.data.magnitude.value
104
102
  case TokenKind.SCOPE:
105
- active_module = token.data.ident
106
- case TokenKind.CHANGE_TIME:
107
- time = int(token.data)
103
+ scope_type = str(token.data.type_)
104
+ scope_ident = token.data.ident
105
+ key = f"{scope_type}:{scope_ident}"
106
+ active_scope[key] = {}
107
+
108
+ previous_scope = active_scope
109
+ active_scope = active_scope[key]
110
+ case TokenKind.UPSCOPE:
111
+ active_scope = previous_scope
108
112
  case TokenKind.VAR:
109
- var = {
113
+ active_scope[token.data.id_code] = {
110
114
  "id_code": token.data.id_code,
111
- "module": active_module,
112
115
  "var_type": str(token.data.type_),
113
116
  "var_size": token.data.size,
114
117
  "reference": token.data.reference,
115
118
  "bit_index": str(token.data.bit_index),
116
119
  }
117
- metadata[token.data.id_code] = var
120
+ case TokenKind.CHANGE_TIME:
121
+ current_time = int(token.data)
118
122
  case TokenKind.CHANGE_SCALAR:
119
- traces[token.data.id_code].append(
120
- (str(time), str(token.data.value))
123
+ data[token.data.id_code].append(
124
+ (str(current_time), str(token.data.value))
121
125
  )
122
126
  case TokenKind.CHANGE_VECTOR:
123
- traces[token.data.id_code].append(
124
- (str(time), str(token.data.value))
127
+ data[token.data.id_code].append(
128
+ (str(current_time), str(token.data.value))
125
129
  )
126
130
 
127
131
  # Add traces and write files
128
- logger.debug("Uploading traces...")
129
- trace_file_ids = []
130
- for id_code, timeseries in traces.items():
132
+ logger.debug("Writing traces...")
131
133
 
132
- def hashed(id_code):
133
- return urllib.parse.quote_plus(id_code, safe="")
134
+ # Find name of file for json dumps
135
+ file_name = path.name.split(".")[0]
134
136
 
135
- file_path = path.parent / f"{hashed(id_code)}.vcd.json"
136
- with open(file_path, "w") as f:
137
- f.write(json.dumps(timeseries))
137
+ # Write metadata file
138
+ metadata_path = path.parent / f"{file_name}.metadata.vcd.json"
139
+ with open(metadata_path, "w") as f:
140
+ f.write(json.dumps(metadata))
138
141
 
139
- trace_file_id = self.upload_file(
140
- file_path, prefix=f"{job_run_id}/{str(file_path.parent)}"
141
- )
142
- trace_file_ids.append(trace_file_id)
143
-
144
- self.trace_create(
145
- id_code=id_code,
146
- module=metadata[id_code]["module"],
147
- var_type=metadata[id_code]["var_type"],
148
- var_size=metadata[id_code]["var_size"],
149
- reference=metadata[id_code]["reference"],
150
- bit_index=metadata[id_code]["bit_index"],
151
- timescale_unit=timescale_unit,
152
- timescale_magnitude=timescale_magnitude,
153
- organization=organization_id,
154
- file=trace_file_id,
155
- job_run=job_run_id,
156
- )
142
+ # Write header file
143
+ header_path = path.parent / f"{file_name}.header.vcd.json"
144
+ with open(header_path, "w") as f:
145
+ f.write(json.dumps(metadata))
157
146
 
158
- return trace_file_ids
159
-
160
- def trace_create(
161
- self,
162
- id_code: str,
163
- module: str,
164
- var_type: str,
165
- var_size: int,
166
- reference: str,
167
- bit_index: str,
168
- timescale_unit: str,
169
- timescale_magnitude: int,
170
- organization: str,
171
- file: str,
172
- job_run: str,
173
- ):
174
- mutation = gql(
175
- """
176
- mutation createTrace($input: TraceCreateInput!) {
177
- traceCreate(input: $input) {
178
- ... on Trace {
179
- id
180
- }
181
- }
182
- }
183
- """
184
- )
185
- input = {
186
- "idCode": id_code,
187
- "module": module,
188
- "varType": var_type,
189
- "varSize": var_size,
190
- "reference": reference,
191
- "bitIndex": bit_index,
192
- "timescaleUnit": timescale_unit,
193
- "timescaleMagnitude": timescale_magnitude,
194
- "organization": organization,
195
- "file": file,
196
- "jobRun": job_run,
197
- }
198
- variables = {"input": input}
199
- result = self.primitive.session.execute(mutation, variable_values=variables)
200
- return result
147
+ # Write data file
148
+ data_path = path.parent / f"{file_name}.data.vcd.json"
149
+ with open(data_path, "w") as f:
150
+ f.write(json.dumps(metadata))
@@ -0,0 +1,23 @@
1
+ from pathlib import Path
2
+
3
+
4
+ def generate_script_from_yaml(yaml_config: dict, slug: str, destination: Path) -> None:
5
+ commands_blocks = []
6
+ if steps := yaml_config[slug]["steps"]:
7
+ for step in steps:
8
+ commands_blocks.append(step["run"])
9
+
10
+ script = f"""
11
+ #!/bin/bash
12
+
13
+ {"".join(commands_blocks)}
14
+ """
15
+
16
+ output_path = Path(destination / "run.sh")
17
+ with open(output_path, "w") as f:
18
+ f.write(script)
19
+
20
+ # Apply execute file permissions
21
+ output_path.chmod(0o744)
22
+
23
+ return output_path
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: primitive
3
- Version: 0.1.21
3
+ Version: 0.1.23
4
4
  Project-URL: Documentation, https://github.com//primitivecorp/primitive-cli#readme
5
5
  Project-URL: Issues, https://github.com//primitivecorp/primitive-cli/issues
6
6
  Project-URL: Source, https://github.com//primitivecorp/primitive-cli
@@ -23,6 +23,7 @@ Requires-Dist: gql[all]
23
23
  Requires-Dist: ipdb
24
24
  Requires-Dist: loguru
25
25
  Requires-Dist: pyright
26
+ Requires-Dist: pyyaml
26
27
  Requires-Dist: ruff
27
28
  Description-Content-Type: text/markdown
28
29
 
@@ -1,8 +1,8 @@
1
- primitive/__about__.py,sha256=JiIZ_OJnMBk7JQxWOZ-Ln2mvyV5v9BfpDzkeFWd4wQ0,129
1
+ primitive/__about__.py,sha256=t_yxgDK5E5ZXzNOixMhkLwEWmOymxLER3wGKqe2Kdg4,129
2
2
  primitive/__init__.py,sha256=bwKdgggKNVssJFVPfKSxqFMz4IxSr54WWbmiZqTMPNI,106
3
3
  primitive/cli.py,sha256=VQPSewC6ouGdEG9W1gllawGJTydpOY0Lzg7LURXcqQg,2374
4
4
  primitive/client.py,sha256=SFPG4H2wJao8euGdnYp-l7dk_fDpWeVn2aT2WNJUAqo,2370
5
- primitive/agent/actions.py,sha256=CFb44aKPG2IF4c2Jqb0sBj4iA7VCtojcaGHgWdrtLYE,5407
5
+ primitive/agent/actions.py,sha256=ZGnkjieQ6i61HhZuBt8TYjA6CgEJ5R4a6kEUqWIRSMM,8744
6
6
  primitive/agent/commands.py,sha256=-dVDilELfkGfbZB7qfEPs77Dm1oT62qJj4tsIk4KoxI,254
7
7
  primitive/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  primitive/auth/actions.py,sha256=N2bGcwXNsB89pzs66gF9A5_WzUScY5fhfOyWixqo2y8,1054
@@ -18,7 +18,7 @@ primitive/git/actions.py,sha256=fepcl5529w_hsaC6fBw9f-QHeyqNjGXz8HI5ebzbZMs,1386
18
18
  primitive/git/commands.py,sha256=64B2STTOn0dwVDmJHqEwekmIqKMfSyBBFwKg29Wt8Aw,1230
19
19
  primitive/graphql/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
20
  primitive/graphql/sdk.py,sha256=BhCGmDtc4sNnH8CxbQSJyFwOZ-ZSqMtjsxMB3JRBhPw,1456
21
- primitive/hardware/actions.py,sha256=Ea3_2E3F_3WapV60g_mOIcpXhadoknwihR7slXyUWtk,18840
21
+ primitive/hardware/actions.py,sha256=JJXEeW35QzVGcLN4ym5gYZwY71hxLzM1GYPXWaObEts,18893
22
22
  primitive/hardware/commands.py,sha256=QE7LLeFdfOqlvz3JwdwJJRZAY3fHI1zB9kYmmDajpq0,1477
23
23
  primitive/jobs/actions.py,sha256=1Mc-bg4nCd5qiKC-hPODveTPZwwo0Kztl5BuidLr-Sc,7718
24
24
  primitive/jobs/commands.py,sha256=MxPCkBEYW_eLNqgCRYeyj7ZcLOFAWfpVZlqDR2Y_S0o,830
@@ -30,7 +30,7 @@ primitive/projects/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuF
30
30
  primitive/projects/actions.py,sha256=xhebDUMN9DXWvngWJyJkiijghbZwffy-JIPSsOg8agE,2061
31
31
  primitive/projects/commands.py,sha256=Fqqgpi4cm6zOgkHK--0F0hiiIj32BmgZ-h1MydmWwdE,464
32
32
  primitive/sim/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
- primitive/sim/actions.py,sha256=vmhiFH8WhQgVEeIHG9r2Gxb5ZZMdk19JYiz5r2F21cg,6864
33
+ primitive/sim/actions.py,sha256=VT3PgMt_lC-zz_nx9kI8vWMclsOOkVaJmrYegQu31AM,5513
34
34
  primitive/sim/commands.py,sha256=8PaOfL1MO6qxTn7mNVRnBU1X2wa3gk_mlbAhBW6MnI0,591
35
35
  primitive/sim/vcd.py,sha256=mAbGnKWM0qzIUMkuSmO0p3sU25kOqbl31mvCsDSrXeM,22221
36
36
  primitive/utils/actions.py,sha256=HOFrmM3-0A_A3NS84MqrZ6JmQEiiPSoDqEeuu6b_qfQ,196
@@ -41,8 +41,9 @@ primitive/utils/memory_size.py,sha256=4xfha21kW82nFvOTtDFx9Jk2ZQoEhkfXii-PGNTpIU
41
41
  primitive/utils/printer.py,sha256=f1XUpqi5dkTL3GWvYRUGlSwtj2IxU1q745T4Fxo7Tn4,370
42
42
  primitive/utils/shell.py,sha256=-7UjQaBqSGHzEEyX8pNjeYFFP0P3lVnDV0OkgPz1qHU,1050
43
43
  primitive/utils/verible.py,sha256=QYczN1IvxODfj4jeq0nqjFuF0Oi0Zdx-Q32ySOJgcw8,2205
44
- primitive-0.1.21.dist-info/METADATA,sha256=MherRu1tZf7ePM5Inx7ebfvihZOrYKJlxMgSYQrTt80,1818
45
- primitive-0.1.21.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
46
- primitive-0.1.21.dist-info/entry_points.txt,sha256=p1K8DMCWka5FqLlqP1sPek5Uovy9jq8u51gUsP-z334,48
47
- primitive-0.1.21.dist-info/licenses/LICENSE.txt,sha256=B8kmQMJ2sxYygjCLBk770uacaMci4mPSoJJ8WoDBY_c,1098
48
- primitive-0.1.21.dist-info/RECORD,,
44
+ primitive/utils/yaml.py,sha256=4UP_9MXHoNb9_SCeUDm9xqYg9sHltqpVhNgsY6GNfb8,527
45
+ primitive-0.1.23.dist-info/METADATA,sha256=HxzprzDhPdTYFNxtEACxa57jBgn-ZJcGNfQtELXfKuE,1840
46
+ primitive-0.1.23.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
47
+ primitive-0.1.23.dist-info/entry_points.txt,sha256=p1K8DMCWka5FqLlqP1sPek5Uovy9jq8u51gUsP-z334,48
48
+ primitive-0.1.23.dist-info/licenses/LICENSE.txt,sha256=B8kmQMJ2sxYygjCLBk770uacaMci4mPSoJJ8WoDBY_c,1098
49
+ primitive-0.1.23.dist-info/RECORD,,