primitive 0.1.22__py3-none-any.whl → 0.1.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
primitive/__about__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # SPDX-FileCopyrightText: 2024-present Dylan Stein <dylan@steins.studio>
2
2
  #
3
3
  # SPDX-License-Identifier: MIT
4
- __version__ = "0.1.22"
4
+ __version__ = "0.1.24"
@@ -1,11 +1,36 @@
1
+ import platform
1
2
  import sys
3
+ from pathlib import Path
2
4
  from time import sleep
3
5
  from primitive.utils.actions import BaseAction
4
6
  from loguru import logger
5
7
  from primitive.__about__ import __version__
8
+ import yaml
9
+ from ..utils.yaml import generate_script_from_yaml
10
+
11
+ try:
12
+ from yaml import CLoader as Loader
13
+ except ImportError:
14
+ from yaml import Loader
6
15
 
7
16
 
8
17
  class Agent(BaseAction):
18
+ def set_cache_dir(self):
19
+ os_family = platform.system()
20
+
21
+ if os_family == "Darwin":
22
+ self.cache_dir = Path(
23
+ Path.home() / "Library" / "Caches" / "tech.primitive.agent"
24
+ )
25
+ elif os_family == "Linux":
26
+ self.cache_dir = Path(Path.home() / ".cache" / "primitive")
27
+ elif os_family == "Windows":
28
+ raise NotImplementedError("Windows is not currently supported.")
29
+ self.cache_dir = None
30
+
31
+ if not self.cache_dir.exists():
32
+ self.cache_dir.mkdir(parents=True, exist_ok=True)
33
+
9
34
  def execute(
10
35
  self,
11
36
  ):
@@ -13,6 +38,9 @@ class Agent(BaseAction):
13
38
  logger.info(" [*] primitive")
14
39
  logger.info(f" [*] Version: {__version__}")
15
40
 
41
+ # Create cache dir if it doesnt exist
42
+ self.set_cache_dir()
43
+
16
44
  # self.primitive.hardware.update_hardware_system_info()
17
45
  try:
18
46
  self.primitive.hardware.check_in_http(is_available=True, is_online=True)
@@ -95,6 +123,7 @@ class Agent(BaseAction):
95
123
  git_repo_full_name=git_repo_full_name,
96
124
  git_ref=git_ref,
97
125
  github_access_token=github_access_token,
126
+ destination=self.cache_dir,
98
127
  )
99
128
  )
100
129
 
@@ -106,6 +135,23 @@ class Agent(BaseAction):
106
135
  if containerArgs := job_run["jobSettings"]["containerArgs"]:
107
136
  cmd = tuple(containerArgs.split(" "))
108
137
 
138
+ # Load config and generate bash script
139
+ yaml_config_path = Path(source_dir / "primitive.yaml")
140
+ run_script_path = None
141
+ if yaml_config_path.exists() and yaml_config_path.is_file():
142
+ yaml_config = yaml.load(
143
+ open(yaml_config_path, "r"), Loader=Loader
144
+ )
145
+ run_script_path = generate_script_from_yaml(
146
+ yaml_config,
147
+ slug=job_run["job"]["slug"],
148
+ destination=source_dir,
149
+ )
150
+ cmd = (
151
+ "/bin/bash",
152
+ str(run_script_path.resolve()),
153
+ )
154
+
109
155
  match job_run["job"]["slug"]:
110
156
  case "lint":
111
157
  logger.debug("Executing Lint Job")
@@ -142,9 +188,7 @@ class Agent(BaseAction):
142
188
 
143
189
  # Attempt artifact collection
144
190
  self.primitive.sim.collect_artifacts(
145
- source=source_dir,
146
- job_run_id=job_run["id"],
147
- organization_id=job_run["organization"]["id"],
191
+ source=source_dir, job_run_id=job_run["id"]
148
192
  )
149
193
 
150
194
  if result:
primitive/sim/actions.py CHANGED
@@ -1,16 +1,15 @@
1
- from gql import gql
2
- from pathlib import Path
1
+ from pathlib import Path, PurePath
3
2
  from primitive.utils.actions import BaseAction
4
3
  from loguru import logger
5
4
  import subprocess
6
- from typing import Tuple, List
5
+ from typing import Tuple
7
6
  from ..utils.files import find_files_for_extension
8
7
  import os
9
8
  from .vcd import TokenKind, tokenize
10
9
  import io
11
10
  from collections import defaultdict
12
- import urllib
13
11
  import json
12
+ import xml.etree.ElementTree as ET
14
13
 
15
14
 
16
15
  class Sim(BaseAction):
@@ -50,26 +49,27 @@ class Sim(BaseAction):
50
49
  file_upload_response = self.primitive.files.file_upload(path, key_prefix=prefix)
51
50
  return file_upload_response.json()["data"]["fileUpload"]["id"]
52
51
 
53
- def collect_artifacts(
54
- self, source: Path, job_run_id: str, organization_id: str
55
- ) -> None:
56
- file_ids = []
57
-
58
- # Look for VCD artifacts
52
+ def collect_artifacts(self, source: Path, job_run_id: str) -> None:
53
+ # Parse VCD artifacts
59
54
  files = find_files_for_extension(source, ".vcd")
60
55
  for file in files:
61
- trace_file_ids = self.generate_timeseries(
62
- path=file, job_run_id=job_run_id, organization_id=organization_id
63
- )
64
- file_ids.extend(trace_file_ids)
56
+ self.parse_vcd(path=file)
57
+
58
+ # Parse XML artifacts
59
+ files = find_files_for_extension(source, ".xml")
60
+ for file in files:
61
+ self.parse_xml(path=file)
65
62
 
66
63
  logger.debug("Uploading additional artifacts...")
67
- files = find_files_for_extension(source, (".xml", ".vcd", ".log", ".history"))
64
+ # TODO: Figure out how to track ".log", ".history" files w/ analog stuff involved
65
+ file_ids = []
66
+ files = find_files_for_extension(source, (".xml", ".vcd", ".json"))
68
67
  for file_path in files:
69
68
  try:
70
69
  file_ids.append(
71
70
  self.upload_file(
72
- file_path, prefix=f"{job_run_id}/{str(file_path.parent)}"
71
+ file_path,
72
+ prefix=f"{job_run_id}/{str(PurePath(file_path).relative_to(Path(source)).parent)}",
73
73
  )
74
74
  )
75
75
  except FileNotFoundError:
@@ -82,119 +82,115 @@ class Sim(BaseAction):
82
82
  )
83
83
  logger.success(job_run_update_response)
84
84
 
85
- def generate_timeseries(
86
- self, path: Path, job_run_id: str, organization_id: str
87
- ) -> List[str]:
85
+ def parse_xml(self, path: Path) -> None:
86
+ results = ET.parse(path)
87
+ testsuites = results.getroot()
88
+
89
+ parsed_results = {}
90
+ testsuites_name = testsuites.attrib["name"]
91
+ parsed_results[testsuites_name] = {}
92
+
93
+ for testsuite in testsuites.findall("testsuite"):
94
+ testsuite_name = testsuite.attrib["name"]
95
+ parsed_results[testsuites_name][testsuite_name] = {
96
+ "properties": {},
97
+ "testcases": {},
98
+ }
99
+ props = parsed_results[testsuites_name][testsuite_name]["properties"]
100
+ testcases = parsed_results[testsuites_name][testsuite_name]["testcases"]
101
+
102
+ for prop in testsuite.findall("property"):
103
+ props[prop.attrib["name"]] = prop.attrib["value"]
104
+
105
+ for testcase in testsuite.findall("testcase"):
106
+ testcases[testcase.attrib["name"]] = {
107
+ attr_key: attr_val for attr_key, attr_val in testcase.attrib.items()
108
+ }
109
+
110
+ failures = testcase.findall("failure")
111
+
112
+ if len(failures) > 0:
113
+ for failure in failures:
114
+ testcases[testcase.attrib["name"]]["status"] = {
115
+ "conclusion": "failure",
116
+ "message": failure.attrib["message"],
117
+ }
118
+ else:
119
+ testcases[testcase.attrib["name"]]["status"] = {
120
+ "conclusion": "success",
121
+ "message": "",
122
+ }
123
+
124
+ # Write parsed file
125
+ data_path = path.parent / f"{path.name}.json"
126
+ with open(data_path, "w") as f:
127
+ f.write(json.dumps(parsed_results))
128
+
129
+ def parse_vcd(self, path: Path) -> None:
88
130
  logger.debug("Parsing VCD file...")
89
131
  with open(path, "rb") as f:
90
132
  tokens = tokenize(io.BytesIO(f.read()))
91
133
 
92
134
  metadata = defaultdict(dict)
93
- traces = defaultdict(list)
94
- timescale_unit = "s"
95
- timescale_magnitude = 1
96
- active_module: str = ""
97
- time: int = 0
135
+ header = defaultdict(dict)
136
+ data = defaultdict(list)
137
+
138
+ active_scope = header
139
+ previous_scope = None
140
+
141
+ current_time = 0
98
142
 
99
143
  for token in tokens:
100
144
  match token.kind:
101
145
  case TokenKind.TIMESCALE:
102
- timescale_unit = token.data.unit.value
103
- timescale_magnitude = token.data.magnitude.value
146
+ metadata["timescaleUnit"] = token.data.unit.value
147
+ metadata["timescaleMagnitude"] = token.data.magnitude.value
104
148
  case TokenKind.SCOPE:
105
- active_module = token.data.ident
106
- case TokenKind.CHANGE_TIME:
107
- time = int(token.data)
149
+ scope_type = str(token.data.type_)
150
+ scope_ident = token.data.ident
151
+ key = f"{scope_type}:{scope_ident}"
152
+ active_scope[key] = {}
153
+
154
+ previous_scope = active_scope
155
+ active_scope = active_scope[key]
156
+ case TokenKind.UPSCOPE:
157
+ active_scope = previous_scope
108
158
  case TokenKind.VAR:
109
- var = {
159
+ active_scope[token.data.id_code] = {
110
160
  "id_code": token.data.id_code,
111
- "module": active_module,
112
161
  "var_type": str(token.data.type_),
113
162
  "var_size": token.data.size,
114
163
  "reference": token.data.reference,
115
164
  "bit_index": str(token.data.bit_index),
116
165
  }
117
- metadata[token.data.id_code] = var
166
+ case TokenKind.CHANGE_TIME:
167
+ current_time = int(token.data)
118
168
  case TokenKind.CHANGE_SCALAR:
119
- traces[token.data.id_code].append(
120
- (str(time), str(token.data.value))
169
+ data[token.data.id_code].append(
170
+ (str(current_time), str(token.data.value))
121
171
  )
122
172
  case TokenKind.CHANGE_VECTOR:
123
- traces[token.data.id_code].append(
124
- (str(time), str(token.data.value))
173
+ data[token.data.id_code].append(
174
+ (str(current_time), str(token.data.value))
125
175
  )
126
176
 
127
177
  # Add traces and write files
128
- logger.debug("Uploading traces...")
129
- trace_file_ids = []
130
- for id_code, timeseries in traces.items():
178
+ logger.debug("Writing traces...")
131
179
 
132
- def hashed(id_code):
133
- return urllib.parse.quote_plus(id_code, safe="")
180
+ # Find name of file for json dumps
181
+ file_name = path.name.split(".")[0]
134
182
 
135
- file_path = path.parent / f"{hashed(id_code)}.vcd.json"
136
- with open(file_path, "w") as f:
137
- f.write(json.dumps(timeseries))
183
+ # Write metadata file
184
+ metadata_path = path.parent / f"{file_name}.metadata.vcd.json"
185
+ with open(metadata_path, "w") as f:
186
+ f.write(json.dumps(metadata))
138
187
 
139
- trace_file_id = self.upload_file(
140
- file_path, prefix=f"{job_run_id}/{str(file_path.parent)}"
141
- )
142
- trace_file_ids.append(trace_file_id)
143
-
144
- self.trace_create(
145
- id_code=id_code,
146
- module=metadata[id_code]["module"],
147
- var_type=metadata[id_code]["var_type"],
148
- var_size=metadata[id_code]["var_size"],
149
- reference=metadata[id_code]["reference"],
150
- bit_index=metadata[id_code]["bit_index"],
151
- timescale_unit=timescale_unit,
152
- timescale_magnitude=timescale_magnitude,
153
- organization=organization_id,
154
- file=trace_file_id,
155
- job_run=job_run_id,
156
- )
188
+ # Write header file
189
+ header_path = path.parent / f"{file_name}.header.vcd.json"
190
+ with open(header_path, "w") as f:
191
+ f.write(json.dumps(metadata))
157
192
 
158
- return trace_file_ids
159
-
160
- def trace_create(
161
- self,
162
- id_code: str,
163
- module: str,
164
- var_type: str,
165
- var_size: int,
166
- reference: str,
167
- bit_index: str,
168
- timescale_unit: str,
169
- timescale_magnitude: int,
170
- organization: str,
171
- file: str,
172
- job_run: str,
173
- ):
174
- mutation = gql(
175
- """
176
- mutation createTrace($input: TraceCreateInput!) {
177
- traceCreate(input: $input) {
178
- ... on Trace {
179
- id
180
- }
181
- }
182
- }
183
- """
184
- )
185
- input = {
186
- "idCode": id_code,
187
- "module": module,
188
- "varType": var_type,
189
- "varSize": var_size,
190
- "reference": reference,
191
- "bitIndex": bit_index,
192
- "timescaleUnit": timescale_unit,
193
- "timescaleMagnitude": timescale_magnitude,
194
- "organization": organization,
195
- "file": file,
196
- "jobRun": job_run,
197
- }
198
- variables = {"input": input}
199
- result = self.primitive.session.execute(mutation, variable_values=variables)
200
- return result
193
+ # Write data file
194
+ data_path = path.parent / f"{file_name}.data.vcd.json"
195
+ with open(data_path, "w") as f:
196
+ f.write(json.dumps(metadata))
@@ -0,0 +1,23 @@
1
+ from pathlib import Path
2
+
3
+
4
+ def generate_script_from_yaml(yaml_config: dict, slug: str, destination: Path) -> None:
5
+ commands_blocks = []
6
+ if steps := yaml_config[slug]["steps"]:
7
+ for step in steps:
8
+ commands_blocks.append(step["run"])
9
+
10
+ script = f"""
11
+ #!/bin/bash
12
+
13
+ {"".join(commands_blocks)}
14
+ """
15
+
16
+ output_path = Path(destination / "run.sh")
17
+ with open(output_path, "w") as f:
18
+ f.write(script)
19
+
20
+ # Apply execute file permissions
21
+ output_path.chmod(0o744)
22
+
23
+ return output_path
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: primitive
3
- Version: 0.1.22
3
+ Version: 0.1.24
4
4
  Project-URL: Documentation, https://github.com//primitivecorp/primitive-cli#readme
5
5
  Project-URL: Issues, https://github.com//primitivecorp/primitive-cli/issues
6
6
  Project-URL: Source, https://github.com//primitivecorp/primitive-cli
@@ -23,6 +23,7 @@ Requires-Dist: gql[all]
23
23
  Requires-Dist: ipdb
24
24
  Requires-Dist: loguru
25
25
  Requires-Dist: pyright
26
+ Requires-Dist: pyyaml
26
27
  Requires-Dist: ruff
27
28
  Description-Content-Type: text/markdown
28
29
 
@@ -1,8 +1,8 @@
1
- primitive/__about__.py,sha256=GLbyQLa84sxg9eRnmnWkVVauDFJZS0FGa5ocDebd_3g,129
1
+ primitive/__about__.py,sha256=_w49f95cSqBz80O_5aXINkQBIbfMdD--GZTies4DMl0,129
2
2
  primitive/__init__.py,sha256=bwKdgggKNVssJFVPfKSxqFMz4IxSr54WWbmiZqTMPNI,106
3
3
  primitive/cli.py,sha256=VQPSewC6ouGdEG9W1gllawGJTydpOY0Lzg7LURXcqQg,2374
4
4
  primitive/client.py,sha256=SFPG4H2wJao8euGdnYp-l7dk_fDpWeVn2aT2WNJUAqo,2370
5
- primitive/agent/actions.py,sha256=uTT3Ke95BTL9haAHFhPJPasZEDlqDuC2uEF03Omap7E,7020
5
+ primitive/agent/actions.py,sha256=fAuQmd5Z8FX3w1AiwLd4a_PPuBynzmdiWRmWA9ftn-8,8632
6
6
  primitive/agent/commands.py,sha256=-dVDilELfkGfbZB7qfEPs77Dm1oT62qJj4tsIk4KoxI,254
7
7
  primitive/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  primitive/auth/actions.py,sha256=N2bGcwXNsB89pzs66gF9A5_WzUScY5fhfOyWixqo2y8,1054
@@ -30,7 +30,7 @@ primitive/projects/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuF
30
30
  primitive/projects/actions.py,sha256=xhebDUMN9DXWvngWJyJkiijghbZwffy-JIPSsOg8agE,2061
31
31
  primitive/projects/commands.py,sha256=Fqqgpi4cm6zOgkHK--0F0hiiIj32BmgZ-h1MydmWwdE,464
32
32
  primitive/sim/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
- primitive/sim/actions.py,sha256=vmhiFH8WhQgVEeIHG9r2Gxb5ZZMdk19JYiz5r2F21cg,6864
33
+ primitive/sim/actions.py,sha256=OywgMR2-ErgL3Q5YsOMoD-UxeUkZHrMoJYxxQAl5sss,7283
34
34
  primitive/sim/commands.py,sha256=8PaOfL1MO6qxTn7mNVRnBU1X2wa3gk_mlbAhBW6MnI0,591
35
35
  primitive/sim/vcd.py,sha256=mAbGnKWM0qzIUMkuSmO0p3sU25kOqbl31mvCsDSrXeM,22221
36
36
  primitive/utils/actions.py,sha256=HOFrmM3-0A_A3NS84MqrZ6JmQEiiPSoDqEeuu6b_qfQ,196
@@ -41,8 +41,9 @@ primitive/utils/memory_size.py,sha256=4xfha21kW82nFvOTtDFx9Jk2ZQoEhkfXii-PGNTpIU
41
41
  primitive/utils/printer.py,sha256=f1XUpqi5dkTL3GWvYRUGlSwtj2IxU1q745T4Fxo7Tn4,370
42
42
  primitive/utils/shell.py,sha256=-7UjQaBqSGHzEEyX8pNjeYFFP0P3lVnDV0OkgPz1qHU,1050
43
43
  primitive/utils/verible.py,sha256=QYczN1IvxODfj4jeq0nqjFuF0Oi0Zdx-Q32ySOJgcw8,2205
44
- primitive-0.1.22.dist-info/METADATA,sha256=O1FnkxZf7VZ8cu575Na1xlA2-6z6esA-sqgWp-0sWtc,1818
45
- primitive-0.1.22.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
46
- primitive-0.1.22.dist-info/entry_points.txt,sha256=p1K8DMCWka5FqLlqP1sPek5Uovy9jq8u51gUsP-z334,48
47
- primitive-0.1.22.dist-info/licenses/LICENSE.txt,sha256=B8kmQMJ2sxYygjCLBk770uacaMci4mPSoJJ8WoDBY_c,1098
48
- primitive-0.1.22.dist-info/RECORD,,
44
+ primitive/utils/yaml.py,sha256=4UP_9MXHoNb9_SCeUDm9xqYg9sHltqpVhNgsY6GNfb8,527
45
+ primitive-0.1.24.dist-info/METADATA,sha256=VCnsBIbMsGhGTQzilsu0KRcSVB81L-4bW1w8KyyQSg8,1840
46
+ primitive-0.1.24.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
47
+ primitive-0.1.24.dist-info/entry_points.txt,sha256=p1K8DMCWka5FqLlqP1sPek5Uovy9jq8u51gUsP-z334,48
48
+ primitive-0.1.24.dist-info/licenses/LICENSE.txt,sha256=B8kmQMJ2sxYygjCLBk770uacaMci4mPSoJJ8WoDBY_c,1098
49
+ primitive-0.1.24.dist-info/RECORD,,