primitive 0.1.22__tar.gz → 0.1.23__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. {primitive-0.1.22 → primitive-0.1.23}/PKG-INFO +2 -1
  2. {primitive-0.1.22 → primitive-0.1.23}/pyproject.toml +8 -24
  3. {primitive-0.1.22 → primitive-0.1.23}/requirements.txt +1 -6
  4. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/__about__.py +1 -1
  5. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/agent/actions.py +46 -0
  6. primitive-0.1.23/src/primitive/sim/actions.py +150 -0
  7. primitive-0.1.23/src/primitive/utils/yaml.py +23 -0
  8. primitive-0.1.22/src/primitive/sim/actions.py +0 -200
  9. {primitive-0.1.22 → primitive-0.1.23}/.git-hooks/pre-commit +0 -0
  10. {primitive-0.1.22 → primitive-0.1.23}/.gitattributes +0 -0
  11. {primitive-0.1.22 → primitive-0.1.23}/.github/workflows/lint.yml +0 -0
  12. {primitive-0.1.22 → primitive-0.1.23}/.github/workflows/publish.yml +0 -0
  13. {primitive-0.1.22 → primitive-0.1.23}/.gitignore +0 -0
  14. {primitive-0.1.22 → primitive-0.1.23}/.vscode/settings.json +0 -0
  15. {primitive-0.1.22 → primitive-0.1.23}/LICENSE.txt +0 -0
  16. {primitive-0.1.22 → primitive-0.1.23}/Makefile +0 -0
  17. {primitive-0.1.22 → primitive-0.1.23}/README.md +0 -0
  18. {primitive-0.1.22 → primitive-0.1.23}/linux setup.md +0 -0
  19. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/__init__.py +0 -0
  20. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/agent/commands.py +0 -0
  21. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/auth/__init__.py +0 -0
  22. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/auth/actions.py +0 -0
  23. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/auth/commands.py +0 -0
  24. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/cli.py +0 -0
  25. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/client.py +0 -0
  26. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/daemons/actions.py +0 -0
  27. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/daemons/commands.py +0 -0
  28. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/daemons/launch_agents.py +0 -0
  29. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/daemons/launch_service.py +0 -0
  30. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/files/actions.py +0 -0
  31. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/files/commands.py +0 -0
  32. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/git/__init__.py +0 -0
  33. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/git/actions.py +0 -0
  34. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/git/commands.py +0 -0
  35. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/graphql/__init__.py +0 -0
  36. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/graphql/sdk.py +0 -0
  37. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/hardware/actions.py +0 -0
  38. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/hardware/commands.py +0 -0
  39. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/jobs/actions.py +0 -0
  40. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/jobs/commands.py +0 -0
  41. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/lint/actions.py +0 -0
  42. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/lint/commands.py +0 -0
  43. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/organizations/actions.py +0 -0
  44. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/organizations/commands.py +0 -0
  45. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/projects/__init__.py +0 -0
  46. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/projects/actions.py +0 -0
  47. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/projects/commands.py +0 -0
  48. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/sim/__init__.py +0 -0
  49. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/sim/commands.py +0 -0
  50. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/sim/vcd.py +0 -0
  51. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/utils/actions.py +0 -0
  52. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/utils/config.py +0 -0
  53. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/utils/files.py +0 -0
  54. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/utils/git.py +0 -0
  55. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/utils/memory_size.py +0 -0
  56. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/utils/printer.py +0 -0
  57. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/utils/shell.py +0 -0
  58. {primitive-0.1.22 → primitive-0.1.23}/src/primitive/utils/verible.py +0 -0
  59. {primitive-0.1.22 → primitive-0.1.23}/tests/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: primitive
3
- Version: 0.1.22
3
+ Version: 0.1.23
4
4
  Project-URL: Documentation, https://github.com//primitivecorp/primitive-cli#readme
5
5
  Project-URL: Issues, https://github.com//primitivecorp/primitive-cli/issues
6
6
  Project-URL: Source, https://github.com//primitivecorp/primitive-cli
@@ -23,6 +23,7 @@ Requires-Dist: gql[all]
23
23
  Requires-Dist: ipdb
24
24
  Requires-Dist: loguru
25
25
  Requires-Dist: pyright
26
+ Requires-Dist: pyyaml
26
27
  Requires-Dist: ruff
27
28
  Description-Content-Type: text/markdown
28
29
 
@@ -32,7 +32,8 @@ dependencies = [
32
32
  "pyright",
33
33
  "click",
34
34
  "gql[all]",
35
- "loguru"
35
+ "loguru",
36
+ "pyyaml",
36
37
  ]
37
38
 
38
39
  [project.urls]
@@ -47,29 +48,18 @@ primitive = "primitive.cli:cli"
47
48
  path = "src/primitive/__about__.py"
48
49
 
49
50
  [tool.hatch.envs.default]
50
- dependencies = [
51
- "coverage[toml]>=6.5",
52
- "pytest",
53
- ]
51
+ dependencies = ["coverage[toml]>=6.5", "pytest"]
54
52
  [tool.hatch.envs.default.scripts]
55
53
  test = "pytest {args:tests}"
56
54
  test-cov = "coverage run -m pytest {args:tests}"
57
- cov-report = [
58
- "- coverage combine",
59
- "coverage report",
60
- ]
61
- cov = [
62
- "test-cov",
63
- "cov-report",
64
- ]
55
+ cov-report = ["- coverage combine", "coverage report"]
56
+ cov = ["test-cov", "cov-report"]
65
57
 
66
58
  [[tool.hatch.envs.all.matrix]]
67
59
  python = ["3.8", "3.9", "3.10", "3.11", "3.12"]
68
60
 
69
61
  [tool.hatch.envs.types]
70
- dependencies = [
71
- "mypy>=1.0.0",
72
- ]
62
+ dependencies = ["mypy>=1.0.0"]
73
63
  [tool.hatch.envs.types.scripts]
74
64
  check = "mypy --install-types --non-interactive {args:src/primitive tests}"
75
65
 
@@ -77,17 +67,11 @@ check = "mypy --install-types --non-interactive {args:src/primitive tests}"
77
67
  source_pkgs = ["primitive", "tests"]
78
68
  branch = true
79
69
  parallel = true
80
- omit = [
81
- "src/primitive/__about__.py",
82
- ]
70
+ omit = ["src/primitive/__about__.py"]
83
71
 
84
72
  [tool.coverage.paths]
85
73
  primitive = ["src/primitive", "*/primitive/src/primitive"]
86
74
  tests = ["tests", "*/primitive/tests"]
87
75
 
88
76
  [tool.coverage.report]
89
- exclude_lines = [
90
- "no cov",
91
- "if __name__ == .__main__.:",
92
- "if TYPE_CHECKING:",
93
- ]
77
+ exclude_lines = ["no cov", "if __name__ == .__main__.:", "if TYPE_CHECKING:"]
@@ -26,7 +26,6 @@ certifi==2024.7.4
26
26
  charset-normalizer==3.3.2
27
27
  # via requests
28
28
  click==8.1.7
29
- # via primitive (pyproject.toml)
30
29
  decorator==5.1.1
31
30
  # via
32
31
  # ipdb
@@ -38,7 +37,6 @@ frozenlist==1.4.1
38
37
  # aiohttp
39
38
  # aiosignal
40
39
  gql==3.5.0
41
- # via primitive (pyproject.toml)
42
40
  graphql-core==3.2.3
43
41
  # via gql
44
42
  h11==0.14.0
@@ -54,7 +52,6 @@ idna==3.7
54
52
  # requests
55
53
  # yarl
56
54
  ipdb==0.13.13
57
- # via primitive (pyproject.toml)
58
55
  ipython==8.26.0
59
56
  # via ipdb
60
57
  jedi==0.19.1
@@ -62,7 +59,6 @@ jedi==0.19.1
62
59
  jmespath==1.0.1
63
60
  # via botocore
64
61
  loguru==0.7.2
65
- # via primitive (pyproject.toml)
66
62
  matplotlib-inline==0.1.7
67
63
  # via ipython
68
64
  multidict==6.0.5
@@ -84,9 +80,9 @@ pure-eval==0.2.3
84
80
  pygments==2.18.0
85
81
  # via ipython
86
82
  pyright==1.1.376
87
- # via primitive (pyproject.toml)
88
83
  python-dateutil==2.9.0.post0
89
84
  # via botocore
85
+ pyyaml==6.0.2
90
86
  requests==2.32.3
91
87
  # via
92
88
  # gql
@@ -94,7 +90,6 @@ requests==2.32.3
94
90
  requests-toolbelt==1.0.0
95
91
  # via gql
96
92
  ruff==0.6.1
97
- # via primitive (pyproject.toml)
98
93
  six==1.16.0
99
94
  # via
100
95
  # asttokens
@@ -1,4 +1,4 @@
1
1
  # SPDX-FileCopyrightText: 2024-present Dylan Stein <dylan@steins.studio>
2
2
  #
3
3
  # SPDX-License-Identifier: MIT
4
- __version__ = "0.1.22"
4
+ __version__ = "0.1.23"
@@ -1,11 +1,36 @@
1
+ import platform
1
2
  import sys
3
+ from pathlib import Path
2
4
  from time import sleep
3
5
  from primitive.utils.actions import BaseAction
4
6
  from loguru import logger
5
7
  from primitive.__about__ import __version__
8
+ import yaml
9
+ from ..utils.yaml import generate_script_from_yaml
10
+
11
+ try:
12
+ from yaml import CLoader as Loader
13
+ except ImportError:
14
+ from yaml import Loader
6
15
 
7
16
 
8
17
  class Agent(BaseAction):
18
+ def set_cache_dir(self):
19
+ os_family = platform.system()
20
+
21
+ if os_family == "Darwin":
22
+ self.cache_dir = Path(
23
+ Path.home() / "Library" / "Caches" / "tech.primitive.agent"
24
+ )
25
+ elif os_family == "Linux":
26
+ self.cache_dir = Path(Path.home() / ".cache" / "primitive")
27
+ elif os_family == "Windows":
28
+ raise NotImplementedError("Windows is not currently supported.")
29
+ self.cache_dir = None
30
+
31
+ if not self.cache_dir.exists():
32
+ self.cache_dir.mkdir(parents=True, exist_ok=True)
33
+
9
34
  def execute(
10
35
  self,
11
36
  ):
@@ -13,6 +38,9 @@ class Agent(BaseAction):
13
38
  logger.info(" [*] primitive")
14
39
  logger.info(f" [*] Version: {__version__}")
15
40
 
41
+ # Create cache dir if it doesnt exist
42
+ self.set_cache_dir()
43
+
16
44
  # self.primitive.hardware.update_hardware_system_info()
17
45
  try:
18
46
  self.primitive.hardware.check_in_http(is_available=True, is_online=True)
@@ -95,6 +123,7 @@ class Agent(BaseAction):
95
123
  git_repo_full_name=git_repo_full_name,
96
124
  git_ref=git_ref,
97
125
  github_access_token=github_access_token,
126
+ destination=self.cache_dir,
98
127
  )
99
128
  )
100
129
 
@@ -106,6 +135,23 @@ class Agent(BaseAction):
106
135
  if containerArgs := job_run["jobSettings"]["containerArgs"]:
107
136
  cmd = tuple(containerArgs.split(" "))
108
137
 
138
+ # Load config and generate bash script
139
+ yaml_config_path = Path(source_dir / "primitive.yaml")
140
+ run_script_path = None
141
+ if yaml_config_path.exists() and yaml_config_path.is_file():
142
+ yaml_config = yaml.load(
143
+ open(yaml_config_path, "r"), Loader=Loader
144
+ )
145
+ run_script_path = generate_script_from_yaml(
146
+ yaml_config,
147
+ slug=job_run["job"]["slug"],
148
+ destination=source_dir,
149
+ )
150
+ cmd = (
151
+ "/bin/bash",
152
+ str(run_script_path.resolve()),
153
+ )
154
+
109
155
  match job_run["job"]["slug"]:
110
156
  case "lint":
111
157
  logger.debug("Executing Lint Job")
@@ -0,0 +1,150 @@
1
+ from pathlib import Path, PurePath
2
+ from primitive.utils.actions import BaseAction
3
+ from loguru import logger
4
+ import subprocess
5
+ from typing import Tuple
6
+ from ..utils.files import find_files_for_extension
7
+ import os
8
+ from .vcd import TokenKind, tokenize
9
+ import io
10
+ from collections import defaultdict
11
+ import json
12
+
13
+
14
+ class Sim(BaseAction):
15
+ def execute(
16
+ self, source: Path = Path.cwd(), cmd: Tuple[str] = ["make"]
17
+ ) -> Tuple[bool, str]:
18
+ logger.debug(f"Starting simulation run for source: {source}")
19
+
20
+ os.chdir(source)
21
+ logger.debug(f"Changed to {source}, starting sim run")
22
+ try:
23
+ result = subprocess.run(cmd, capture_output=True, text=True, env=os.environ)
24
+ except FileNotFoundError:
25
+ message = f"Did not find {cmd}"
26
+ logger.error(message)
27
+ return False, message
28
+
29
+ logger.debug("Sim run complete.")
30
+
31
+ message = ""
32
+ if result.stderr:
33
+ logger.error("\n" + result.stderr)
34
+ if result.stdout:
35
+ logger.info("\n" + result.stdout)
36
+ message = "See above logs for sim output."
37
+
38
+ if result.returncode != 0:
39
+ if not self.primitive.DEBUG:
40
+ message = result.stderr
41
+ return False, message
42
+ else:
43
+ message = "Sim run successful."
44
+
45
+ return True, message
46
+
47
+ def upload_file(self, path: Path, prefix: str) -> str:
48
+ file_upload_response = self.primitive.files.file_upload(path, key_prefix=prefix)
49
+ return file_upload_response.json()["data"]["fileUpload"]["id"]
50
+
51
+ def collect_artifacts(
52
+ self, source: Path, job_run_id: str, organization_id: str
53
+ ) -> None:
54
+ # Split VCD artifacts
55
+ files = find_files_for_extension(source, ".vcd")
56
+ for file in files:
57
+ self.split_vcd(
58
+ path=file, job_run_id=job_run_id, organization_id=organization_id
59
+ )
60
+
61
+ logger.debug("Uploading additional artifacts...")
62
+ # TODO: Figure out how to track ".log", ".history" files w/ analog stuff involved
63
+ file_ids = []
64
+ files = find_files_for_extension(source, (".xml", ".vcd", ".json"))
65
+ for file_path in files:
66
+ try:
67
+ file_ids.append(
68
+ self.upload_file(
69
+ file_path,
70
+ prefix=f"{job_run_id}/{str(PurePath(file_path).relative_to(Path(source)).parent)}",
71
+ )
72
+ )
73
+ except FileNotFoundError:
74
+ logger.warning(f"{file_path} not found...")
75
+
76
+ logger.debug("Updating job run...")
77
+ if len(file_ids) > 0:
78
+ job_run_update_response = self.primitive.jobs.job_run_update(
79
+ id=job_run_id, file_ids=file_ids
80
+ )
81
+ logger.success(job_run_update_response)
82
+
83
+ def split_vcd(self, path: Path, job_run_id: str, organization_id: str) -> None:
84
+ logger.debug("Parsing VCD file...")
85
+ with open(path, "rb") as f:
86
+ tokens = tokenize(io.BytesIO(f.read()))
87
+
88
+ metadata = defaultdict(dict)
89
+ header = defaultdict(dict)
90
+ data = defaultdict(list)
91
+
92
+ active_scope = header
93
+ previous_scope = None
94
+
95
+ current_time = 0
96
+
97
+ for token in tokens:
98
+ match token.kind:
99
+ case TokenKind.TIMESCALE:
100
+ metadata["timescaleUnit"] = token.data.unit.value
101
+ metadata["timescaleMagnitude"] = token.data.magnitude.value
102
+ case TokenKind.SCOPE:
103
+ scope_type = str(token.data.type_)
104
+ scope_ident = token.data.ident
105
+ key = f"{scope_type}:{scope_ident}"
106
+ active_scope[key] = {}
107
+
108
+ previous_scope = active_scope
109
+ active_scope = active_scope[key]
110
+ case TokenKind.UPSCOPE:
111
+ active_scope = previous_scope
112
+ case TokenKind.VAR:
113
+ active_scope[token.data.id_code] = {
114
+ "id_code": token.data.id_code,
115
+ "var_type": str(token.data.type_),
116
+ "var_size": token.data.size,
117
+ "reference": token.data.reference,
118
+ "bit_index": str(token.data.bit_index),
119
+ }
120
+ case TokenKind.CHANGE_TIME:
121
+ current_time = int(token.data)
122
+ case TokenKind.CHANGE_SCALAR:
123
+ data[token.data.id_code].append(
124
+ (str(current_time), str(token.data.value))
125
+ )
126
+ case TokenKind.CHANGE_VECTOR:
127
+ data[token.data.id_code].append(
128
+ (str(current_time), str(token.data.value))
129
+ )
130
+
131
+ # Add traces and write files
132
+ logger.debug("Writing traces...")
133
+
134
+ # Find name of file for json dumps
135
+ file_name = path.name.split(".")[0]
136
+
137
+ # Write metadata file
138
+ metadata_path = path.parent / f"{file_name}.metadata.vcd.json"
139
+ with open(metadata_path, "w") as f:
140
+ f.write(json.dumps(metadata))
141
+
142
+ # Write header file
143
+ header_path = path.parent / f"{file_name}.header.vcd.json"
144
+ with open(header_path, "w") as f:
145
+ f.write(json.dumps(metadata))
146
+
147
+ # Write data file
148
+ data_path = path.parent / f"{file_name}.data.vcd.json"
149
+ with open(data_path, "w") as f:
150
+ f.write(json.dumps(metadata))
@@ -0,0 +1,23 @@
1
+ from pathlib import Path
2
+
3
+
4
+ def generate_script_from_yaml(yaml_config: dict, slug: str, destination: Path) -> None:
5
+ commands_blocks = []
6
+ if steps := yaml_config[slug]["steps"]:
7
+ for step in steps:
8
+ commands_blocks.append(step["run"])
9
+
10
+ script = f"""
11
+ #!/bin/bash
12
+
13
+ {"".join(commands_blocks)}
14
+ """
15
+
16
+ output_path = Path(destination / "run.sh")
17
+ with open(output_path, "w") as f:
18
+ f.write(script)
19
+
20
+ # Apply execute file permissions
21
+ output_path.chmod(0o744)
22
+
23
+ return output_path
@@ -1,200 +0,0 @@
1
- from gql import gql
2
- from pathlib import Path
3
- from primitive.utils.actions import BaseAction
4
- from loguru import logger
5
- import subprocess
6
- from typing import Tuple, List
7
- from ..utils.files import find_files_for_extension
8
- import os
9
- from .vcd import TokenKind, tokenize
10
- import io
11
- from collections import defaultdict
12
- import urllib
13
- import json
14
-
15
-
16
- class Sim(BaseAction):
17
- def execute(
18
- self, source: Path = Path.cwd(), cmd: Tuple[str] = ["make"]
19
- ) -> Tuple[bool, str]:
20
- logger.debug(f"Starting simulation run for source: {source}")
21
-
22
- os.chdir(source)
23
- logger.debug(f"Changed to {source}, starting sim run")
24
- try:
25
- result = subprocess.run(cmd, capture_output=True, text=True, env=os.environ)
26
- except FileNotFoundError:
27
- message = f"Did not find {cmd}"
28
- logger.error(message)
29
- return False, message
30
-
31
- logger.debug("Sim run complete.")
32
-
33
- message = ""
34
- if result.stderr:
35
- logger.error("\n" + result.stderr)
36
- if result.stdout:
37
- logger.info("\n" + result.stdout)
38
- message = "See above logs for sim output."
39
-
40
- if result.returncode != 0:
41
- if not self.primitive.DEBUG:
42
- message = result.stderr
43
- return False, message
44
- else:
45
- message = "Sim run successful."
46
-
47
- return True, message
48
-
49
- def upload_file(self, path: Path, prefix: str) -> str:
50
- file_upload_response = self.primitive.files.file_upload(path, key_prefix=prefix)
51
- return file_upload_response.json()["data"]["fileUpload"]["id"]
52
-
53
- def collect_artifacts(
54
- self, source: Path, job_run_id: str, organization_id: str
55
- ) -> None:
56
- file_ids = []
57
-
58
- # Look for VCD artifacts
59
- files = find_files_for_extension(source, ".vcd")
60
- for file in files:
61
- trace_file_ids = self.generate_timeseries(
62
- path=file, job_run_id=job_run_id, organization_id=organization_id
63
- )
64
- file_ids.extend(trace_file_ids)
65
-
66
- logger.debug("Uploading additional artifacts...")
67
- files = find_files_for_extension(source, (".xml", ".vcd", ".log", ".history"))
68
- for file_path in files:
69
- try:
70
- file_ids.append(
71
- self.upload_file(
72
- file_path, prefix=f"{job_run_id}/{str(file_path.parent)}"
73
- )
74
- )
75
- except FileNotFoundError:
76
- logger.warning(f"{file_path} not found...")
77
-
78
- logger.debug("Updating job run...")
79
- if len(file_ids) > 0:
80
- job_run_update_response = self.primitive.jobs.job_run_update(
81
- id=job_run_id, file_ids=file_ids
82
- )
83
- logger.success(job_run_update_response)
84
-
85
- def generate_timeseries(
86
- self, path: Path, job_run_id: str, organization_id: str
87
- ) -> List[str]:
88
- logger.debug("Parsing VCD file...")
89
- with open(path, "rb") as f:
90
- tokens = tokenize(io.BytesIO(f.read()))
91
-
92
- metadata = defaultdict(dict)
93
- traces = defaultdict(list)
94
- timescale_unit = "s"
95
- timescale_magnitude = 1
96
- active_module: str = ""
97
- time: int = 0
98
-
99
- for token in tokens:
100
- match token.kind:
101
- case TokenKind.TIMESCALE:
102
- timescale_unit = token.data.unit.value
103
- timescale_magnitude = token.data.magnitude.value
104
- case TokenKind.SCOPE:
105
- active_module = token.data.ident
106
- case TokenKind.CHANGE_TIME:
107
- time = int(token.data)
108
- case TokenKind.VAR:
109
- var = {
110
- "id_code": token.data.id_code,
111
- "module": active_module,
112
- "var_type": str(token.data.type_),
113
- "var_size": token.data.size,
114
- "reference": token.data.reference,
115
- "bit_index": str(token.data.bit_index),
116
- }
117
- metadata[token.data.id_code] = var
118
- case TokenKind.CHANGE_SCALAR:
119
- traces[token.data.id_code].append(
120
- (str(time), str(token.data.value))
121
- )
122
- case TokenKind.CHANGE_VECTOR:
123
- traces[token.data.id_code].append(
124
- (str(time), str(token.data.value))
125
- )
126
-
127
- # Add traces and write files
128
- logger.debug("Uploading traces...")
129
- trace_file_ids = []
130
- for id_code, timeseries in traces.items():
131
-
132
- def hashed(id_code):
133
- return urllib.parse.quote_plus(id_code, safe="")
134
-
135
- file_path = path.parent / f"{hashed(id_code)}.vcd.json"
136
- with open(file_path, "w") as f:
137
- f.write(json.dumps(timeseries))
138
-
139
- trace_file_id = self.upload_file(
140
- file_path, prefix=f"{job_run_id}/{str(file_path.parent)}"
141
- )
142
- trace_file_ids.append(trace_file_id)
143
-
144
- self.trace_create(
145
- id_code=id_code,
146
- module=metadata[id_code]["module"],
147
- var_type=metadata[id_code]["var_type"],
148
- var_size=metadata[id_code]["var_size"],
149
- reference=metadata[id_code]["reference"],
150
- bit_index=metadata[id_code]["bit_index"],
151
- timescale_unit=timescale_unit,
152
- timescale_magnitude=timescale_magnitude,
153
- organization=organization_id,
154
- file=trace_file_id,
155
- job_run=job_run_id,
156
- )
157
-
158
- return trace_file_ids
159
-
160
- def trace_create(
161
- self,
162
- id_code: str,
163
- module: str,
164
- var_type: str,
165
- var_size: int,
166
- reference: str,
167
- bit_index: str,
168
- timescale_unit: str,
169
- timescale_magnitude: int,
170
- organization: str,
171
- file: str,
172
- job_run: str,
173
- ):
174
- mutation = gql(
175
- """
176
- mutation createTrace($input: TraceCreateInput!) {
177
- traceCreate(input: $input) {
178
- ... on Trace {
179
- id
180
- }
181
- }
182
- }
183
- """
184
- )
185
- input = {
186
- "idCode": id_code,
187
- "module": module,
188
- "varType": var_type,
189
- "varSize": var_size,
190
- "reference": reference,
191
- "bitIndex": bit_index,
192
- "timescaleUnit": timescale_unit,
193
- "timescaleMagnitude": timescale_magnitude,
194
- "organization": organization,
195
- "file": file,
196
- "jobRun": job_run,
197
- }
198
- variables = {"input": input}
199
- result = self.primitive.session.execute(mutation, variable_values=variables)
200
- return result
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes