siliconcompiler 0.31.1__py3-none-any.whl → 0.32.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. siliconcompiler/_metadata.py +1 -1
  2. siliconcompiler/apps/_common.py +23 -6
  3. siliconcompiler/apps/sc.py +1 -0
  4. siliconcompiler/apps/sc_dashboard.py +7 -1
  5. siliconcompiler/apps/sc_show.py +6 -0
  6. siliconcompiler/apps/utils/summarize.py +1 -1
  7. siliconcompiler/core.py +37 -42
  8. siliconcompiler/flows/_common.py +10 -4
  9. siliconcompiler/fpgas/lattice_ice40.py +6 -16
  10. siliconcompiler/package/__init__.py +18 -61
  11. siliconcompiler/package/git.py +4 -1
  12. siliconcompiler/package/github.py +124 -0
  13. siliconcompiler/package/https.py +12 -2
  14. siliconcompiler/report/dashboard/components/__init__.py +18 -7
  15. siliconcompiler/report/dashboard/components/flowgraph.py +3 -0
  16. siliconcompiler/report/dashboard/utils/__init__.py +5 -2
  17. siliconcompiler/report/report.py +6 -6
  18. siliconcompiler/report/utils.py +3 -0
  19. siliconcompiler/scheduler/run_node.py +4 -1
  20. siliconcompiler/schema/schema_obj.py +3 -2
  21. siliconcompiler/schema/utils.py +0 -3
  22. siliconcompiler/targets/fpgaflow_demo.py +0 -2
  23. siliconcompiler/tools/openroad/_apr.py +15 -5
  24. siliconcompiler/tools/openroad/scripts/common/reports.tcl +10 -0
  25. siliconcompiler/tools/openroad/scripts/common/write_images.tcl +27 -0
  26. siliconcompiler/tools/slang/__init__.py +123 -33
  27. siliconcompiler/tools/slang/elaborate.py +123 -18
  28. siliconcompiler/tools/slang/lint.py +20 -10
  29. siliconcompiler/tools/surelog/__init__.py +17 -4
  30. siliconcompiler/toolscripts/_tools.json +3 -3
  31. siliconcompiler/toolscripts/ubuntu24/install-icarus.sh +2 -1
  32. siliconcompiler/toolscripts/ubuntu24/install-netgen.sh +1 -1
  33. siliconcompiler/units.py +10 -7
  34. siliconcompiler/use.py +5 -2
  35. siliconcompiler/utils/__init__.py +5 -14
  36. {siliconcompiler-0.31.1.dist-info → siliconcompiler-0.32.1.dist-info}/METADATA +3 -6
  37. {siliconcompiler-0.31.1.dist-info → siliconcompiler-0.32.1.dist-info}/RECORD +41 -45
  38. {siliconcompiler-0.31.1.dist-info → siliconcompiler-0.32.1.dist-info}/WHEEL +1 -1
  39. {siliconcompiler-0.31.1.dist-info → siliconcompiler-0.32.1.dist-info}/entry_points.txt +1 -0
  40. siliconcompiler/fpgas/vpr_example.py +0 -116
  41. siliconcompiler/toolscripts/rhel8/install-ghdl.sh +0 -25
  42. siliconcompiler/toolscripts/rhel8/install-yosys-moosic.sh +0 -17
  43. siliconcompiler/toolscripts/rhel8/install-yosys-slang.sh +0 -22
  44. siliconcompiler/toolscripts/rhel8/install-yosys.sh +0 -23
  45. {siliconcompiler-0.31.1.dist-info → siliconcompiler-0.32.1.dist-info}/LICENSE +0 -0
  46. {siliconcompiler-0.31.1.dist-info → siliconcompiler-0.32.1.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,5 @@
1
1
  # Version number following semver standard.
2
- version = '0.31.1'
2
+ version = '0.32.1'
3
3
 
4
4
  # Default server address for remote runs, if unspecified.
5
5
  default_server = 'https://server.siliconcompiler.com'
@@ -98,11 +98,28 @@ def pick_manifest(chip, src_file=None):
98
98
  if chip.get('option', 'jobname') not in all_manifests[chip.design]:
99
99
  jobname = list(all_manifests[chip.design].keys())[0]
100
100
 
101
+ step, index = chip.get('arg', 'step'), chip.get('arg', 'index')
102
+ if step and not index:
103
+ all_nodes = list(all_manifests[chip.design][jobname].keys())
104
+ try:
105
+ all_nodes.remove((None, None))
106
+ except ValueError:
107
+ pass
108
+ for found_step, found_index in sorted(all_nodes):
109
+ if found_step == step:
110
+ index = found_index
111
+ if index is None:
112
+ index = '0'
113
+ if step and index:
114
+ if (step, index) in all_manifests[chip.design][jobname]:
115
+ return all_manifests[chip.design][jobname][(step, index)]
116
+ else:
117
+ chip.logger.error(f'{step}{index} is not a valid node.')
118
+ return None
119
+
101
120
  if (None, None) in all_manifests[chip.design][jobname]:
102
- manifest = all_manifests[chip.design][jobname][None, None]
103
- else:
104
- # pick newest manifest
105
- manifest = list(sorted(all_manifests[chip.design][jobname].values(),
106
- key=lambda file: os.stat(file).st_ctime))[-1]
121
+ return all_manifests[chip.design][jobname][None, None]
107
122
 
108
- return manifest
123
+ # pick newest manifest
124
+ return list(sorted(all_manifests[chip.design][jobname].values(),
125
+ key=lambda file: os.stat(file).st_ctime))[-1]
@@ -99,6 +99,7 @@ def main():
99
99
 
100
100
  # Print Job Summary
101
101
  chip.summary()
102
+ chip.snapshot()
102
103
  except SiliconCompilerError:
103
104
  return 1
104
105
  except Exception as e:
@@ -63,6 +63,8 @@ To include another chip object to compare to:
63
63
  if manifest:
64
64
  chip.logger.info(f'Loading manifest: {manifest}')
65
65
  chip.read_manifest(manifest)
66
+ else:
67
+ manifest = chip.get('option', 'cfg')
66
68
 
67
69
  # Error checking
68
70
  design = chip.get('design')
@@ -70,6 +72,10 @@ To include another chip object to compare to:
70
72
  chip.logger.error('Design not loaded')
71
73
  return 1
72
74
 
75
+ if not manifest:
76
+ chip.logger.error('Unable to determine job manifest')
77
+ return 2
78
+
73
79
  graph_chips = []
74
80
  if switches['graph_cfg']:
75
81
  for i, name_and_file_path in enumerate(switches['graph_cfg']):
@@ -86,7 +92,7 @@ To include another chip object to compare to:
86
92
  raise ValueError(('graph_cfg accepts a max of 2 values, you supplied'
87
93
  f' {args} in "-graph_cfg {name_and_file_path}"'))
88
94
  if not os.path.isfile(file_path):
89
- raise ValueError(f'not a valid file path : {file_path}')
95
+ raise ValueError(f'not a valid file path: {file_path}')
90
96
  graph_chip = siliconcompiler.core.Chip(design='')
91
97
  graph_chip.read_manifest(file_path)
92
98
  graph_chips.append({
@@ -111,6 +111,8 @@ def main():
111
111
  if manifest:
112
112
  chip.logger.info(f'Loading manifest: {manifest}')
113
113
  chip.read_manifest(manifest)
114
+ else:
115
+ manifest = chip.get('option', 'cfg')
114
116
 
115
117
  # Error checking
116
118
  design = chip.get('design')
@@ -120,6 +122,10 @@ def main():
120
122
  '-cfg, -design, and/or inputs.')
121
123
  return 1
122
124
 
125
+ if not manifest:
126
+ chip.logger.error('Unable to determine job manifest')
127
+ return 2
128
+
123
129
  # Read in file
124
130
  if filename:
125
131
  chip.logger.info(f"Displaying {filename}")
@@ -37,7 +37,7 @@ def main():
37
37
  return 1
38
38
 
39
39
  # Print Job Summary
40
- chip.summary(generate_image=False, generate_html=False)
40
+ chip.summary()
41
41
 
42
42
  return 0
43
43
 
siliconcompiler/core.py CHANGED
@@ -26,7 +26,6 @@ from siliconcompiler import _metadata
26
26
  from siliconcompiler import NodeStatus, SiliconCompilerError
27
27
  from siliconcompiler.report import _show_summary_table
28
28
  from siliconcompiler.report import _generate_summary_image, _open_summary_image
29
- from siliconcompiler.report import _generate_html_report, _open_html_report
30
29
  from siliconcompiler.report import Dashboard
31
30
  from siliconcompiler import package as sc_package
32
31
  import glob
@@ -211,6 +210,8 @@ class Chip:
211
210
 
212
211
  self.logger.addHandler(file_handler)
213
212
 
213
+ return file_handler
214
+
214
215
  ###########################################################################
215
216
  def _init_logger(self, step=None, index=None, in_run=False):
216
217
 
@@ -2889,7 +2890,7 @@ class Chip:
2889
2890
  return self._dash
2890
2891
 
2891
2892
  ###########################################################################
2892
- def summary(self, show_all_indices=False, generate_image=True, generate_html=False):
2893
+ def summary(self, show_all_indices=False):
2893
2894
  '''
2894
2895
  Prints a summary of the compilation manifest.
2895
2896
 
@@ -2902,13 +2903,6 @@ class Chip:
2902
2903
  show_all_indices (bool): If True, displays metrics for all indices
2903
2904
  of each step. If False, displays metrics only for winning
2904
2905
  indices.
2905
- generate_image (bool): If True, generates a summary image featuring
2906
- a layout screenshot and a subset of metrics. Requires that the
2907
- current job has an ending node that generated a PNG file.
2908
- generate_html (bool): If True, generates an HTML report featuring a
2909
- metrics summary table and manifest tree view. The report will
2910
- include a layout screenshot if the current job has an ending node
2911
- that generated a PNG file.
2912
2906
 
2913
2907
  Examples:
2914
2908
  >>> chip.summary()
@@ -2921,36 +2915,36 @@ class Chip:
2921
2915
 
2922
2916
  _show_summary_table(self, flow, nodes_to_execute, show_all_indices=show_all_indices)
2923
2917
 
2924
- # Create a report for the Chip object which can be viewed in a web browser.
2925
- # Place report files in the build's root directory.
2926
- work_dir = self.getworkdir()
2927
- if os.path.isdir(work_dir):
2928
- # Mark file paths where the reports can be found if they were generated.
2929
- results_img = os.path.join(work_dir, f'{self.design}.png')
2930
- results_html = os.path.join(work_dir, 'report.html')
2931
-
2932
- for path in (results_img, results_html):
2933
- if os.path.exists(path):
2934
- os.remove(path)
2935
-
2936
- if generate_image:
2937
- _generate_summary_image(self, results_img)
2938
-
2939
- if generate_html:
2940
- _generate_html_report(self, flow, nodes_to_execute, results_html)
2941
-
2942
- # dashboard does not generate any data
2943
- self.logger.info(f'Dashboard at "sc-dashboard -cfg {work_dir}/{self.design}.pkg.json"')
2944
-
2945
- # Try to open the results and layout only if '-nodisplay' is not set.
2946
- # Priority: PNG > HTML > dashboard.
2947
- if not self.get('option', 'nodisplay'):
2948
- if os.path.isfile(results_img):
2949
- _open_summary_image(results_img)
2950
- elif os.path.isfile(results_html):
2951
- _open_html_report(self, results_html)
2952
- else:
2953
- self.dashboard(wait=False)
2918
+ # dashboard does not generate any data
2919
+ self.logger.info('Dashboard at "sc-dashboard '
2920
+ f'-cfg {self.getworkdir()}/{self.design}.pkg.json"')
2921
+
2922
+ ###########################################################################
2923
+ def snapshot(self, path=None, display=True):
2924
+ '''
2925
+ Creates a snapshot image of the job
2926
+
2927
+ Args:
2928
+ path (str): Path to generate the image at, if not provided will default to
2929
+ <job>/<design>.png
2930
+ display (bool): If True, will open the image for viewing. If :keypath:`option,nodisplay`
2931
+ is True, this argument will be ignored.
2932
+
2933
+ Examples:
2934
+ >>> chip.snapshot()
2935
+ Creates a snapshot image in the default location
2936
+ '''
2937
+
2938
+ if not path:
2939
+ path = os.path.join(self.getworkdir(), f'{self.design}.png')
2940
+
2941
+ if os.path.exists(path):
2942
+ os.remove(path)
2943
+
2944
+ _generate_summary_image(self, path)
2945
+
2946
+ if os.path.isfile(path) and not self.get('option', 'nodisplay') and display:
2947
+ _open_summary_image(path)
2954
2948
 
2955
2949
  ###########################################################################
2956
2950
  def clock(self, pin, period, jitter=0, mode='global'):
@@ -3245,9 +3239,10 @@ class Chip:
3245
3239
  if sc_step and sc_index:
3246
3240
  search_nodes.append((sc_step, sc_index))
3247
3241
  elif sc_step:
3248
- for check_step, check_index in nodes_to_execute(self, flow):
3249
- if sc_step == check_step:
3250
- search_nodes.append((check_step, check_index))
3242
+ if flow is not None:
3243
+ for check_step, check_index in nodes_to_execute(self, flow):
3244
+ if sc_step == check_step:
3245
+ search_nodes.append((check_step, check_index))
3251
3246
  else:
3252
3247
  if flow is not None:
3253
3248
  for nodes in _get_flowgraph_execution_order(self,
@@ -1,4 +1,5 @@
1
1
  from siliconcompiler.tools.surelog import parse as surelog_parse
2
+ from siliconcompiler.tools.slang import elaborate as slang_preprocess
2
3
  from siliconcompiler.tools.chisel import convert as chisel_convert
3
4
  from siliconcompiler.tools.bambu import convert as bambu_convert
4
5
  from siliconcompiler.tools.bluespec import convert as bluespec_convert
@@ -7,6 +8,8 @@ from siliconcompiler.tools.sv2v import convert as sv2v_convert
7
8
 
8
9
  from siliconcompiler.tools.builtin import concatenate
9
10
 
11
+ from siliconcompiler.tools.slang import has_pyslang
12
+
10
13
 
11
14
  def _make_docs(chip):
12
15
  from siliconcompiler.targets import freepdk45_demo
@@ -20,9 +23,12 @@ def _make_docs(chip):
20
23
  chip.use(freepdk45_demo)
21
24
 
22
25
 
23
- def __get_frontends(allow_system_verilog):
26
+ def __get_frontends(allow_system_verilog, use_surelog=False):
27
+ parser = surelog_parse
28
+ if not use_surelog and has_pyslang():
29
+ parser = slang_preprocess
24
30
  systemverilog_frontend = [
25
- ('import.verilog', surelog_parse)
31
+ ('import.verilog', parser)
26
32
  ]
27
33
  if not allow_system_verilog:
28
34
  systemverilog_frontend.append(('import.convert', sv2v_convert))
@@ -36,7 +42,7 @@ def __get_frontends(allow_system_verilog):
36
42
  }
37
43
 
38
44
 
39
- def setup_multiple_frontends(flow, allow_system_verilog=False):
45
+ def setup_multiple_frontends(flow, allow_system_verilog=False, use_surelog=False):
40
46
  '''
41
47
  Sets of multiple frontends if different frontends are required.
42
48
 
@@ -45,7 +51,7 @@ def setup_multiple_frontends(flow, allow_system_verilog=False):
45
51
 
46
52
  concat_nodes = []
47
53
  flowname = flow.design
48
- for _, pipe in __get_frontends(allow_system_verilog).items():
54
+ for _, pipe in __get_frontends(allow_system_verilog, use_surelog=use_surelog).items():
49
55
  prev_step = None
50
56
  for step, task in pipe:
51
57
  flow.node(flowname, step, task)
@@ -1,5 +1,4 @@
1
- import siliconcompiler
2
- from siliconcompiler.utils import register_sc_data_source
1
+ from siliconcompiler import Chip, FPGA
3
2
 
4
3
 
5
4
  ####################################################
@@ -13,22 +12,13 @@ def setup():
13
12
  yosys + nextpnr
14
13
  '''
15
14
 
16
- vendor = 'lattice'
17
-
18
- lut_size = '4'
19
-
20
15
  all_fpgas = []
21
16
 
22
- all_part_names = [
23
- "ice40up5k-sg48",
24
- ]
25
-
26
- for part_name in all_part_names:
27
- fpga = siliconcompiler.FPGA(part_name, package='siliconcompiler_data')
28
- register_sc_data_source(fpga)
17
+ for part_name in ("ice40up5k-sg48",):
18
+ fpga = FPGA(part_name)
29
19
 
30
- fpga.set('fpga', part_name, 'vendor', vendor)
31
- fpga.set('fpga', part_name, 'lutsize', lut_size)
20
+ fpga.set('fpga', part_name, 'vendor', 'lattice')
21
+ fpga.set('fpga', part_name, 'lutsize', 4)
32
22
 
33
23
  all_fpgas.append(fpga)
34
24
 
@@ -37,5 +27,5 @@ def setup():
37
27
 
38
28
  #########################
39
29
  if __name__ == "__main__":
40
- for fpga in setup(siliconcompiler.Chip('<fpga>')):
30
+ for fpga in setup(Chip('<fpga>')):
41
31
  fpga.write_manifest(f'{fpga.design}.json')
@@ -10,9 +10,6 @@ import functools
10
10
  import time
11
11
  from pathlib import Path
12
12
 
13
- from github import Github
14
- import github.Auth
15
-
16
13
  from siliconcompiler.utils import get_plugins
17
14
 
18
15
 
@@ -41,11 +38,11 @@ def get_download_cache_path(chip, package, ref):
41
38
  os.path.join(cache_path, f'{package}-{ref}.lock')
42
39
 
43
40
 
44
- def _file_path_resolver(chip, package, path, ref, url):
41
+ def _file_path_resolver(chip, package, path, ref, url, fetch):
45
42
  return os.path.abspath(path.replace('file://', ''))
46
43
 
47
44
 
48
- def _python_path_resolver(chip, package, path, ref, url):
45
+ def _python_path_resolver(chip, package, path, ref, url, fetch):
49
46
  return path_from_python(chip, url.netloc)
50
47
 
51
48
 
@@ -66,7 +63,7 @@ def _get_path_resolver(path):
66
63
  raise ValueError(f"{path} is not supported")
67
64
 
68
65
 
69
- def _path(chip, package):
66
+ def _path(chip, package, fetch):
70
67
  # Initially try retrieving data source from schema
71
68
  data = {}
72
69
  data['path'] = chip.get('package', 'source', package, 'path')
@@ -84,22 +81,23 @@ def _path(chip, package):
84
81
 
85
82
  path_resolver, url = _get_path_resolver(data['path'])
86
83
 
87
- return path_resolver(chip, package, data['path'], data['ref'], url)
84
+ return path_resolver(chip, package, data['path'], data['ref'], url, fetch)
88
85
 
89
86
 
90
- def path(chip, package):
87
+ def path(chip, package, fetch=True):
91
88
  """
92
89
  Compute data source data path
93
90
  Additionally cache data source data if possible
94
91
  Parameters:
95
92
  package (str): Name of the data source
93
+ fetch (bool): Flag to indicate that the path should be fetched
96
94
  Returns:
97
95
  path: Location of data source on the local system
98
96
  """
99
97
 
100
98
  if package not in chip._packages:
101
99
  changed = False
102
- data_path = _path(chip, package)
100
+ data_path = _path(chip, package, fetch)
103
101
 
104
102
  if isinstance(data_path, tuple) and len(data_path) == 2:
105
103
  data_path, changed = data_path
@@ -218,58 +216,6 @@ def register_python_data_source(chip,
218
216
  ref=ref)
219
217
 
220
218
 
221
- def register_private_github_data_source(chip,
222
- package_name,
223
- repository,
224
- release,
225
- artifact):
226
- gh = Github(auth=github.Auth.Token(__get_github_auth_token(package_name)))
227
- repo = gh.get_repo(repository)
228
-
229
- if not release:
230
- release = repo.get_latest_release().tag_name
231
-
232
- url = None
233
- for repo_release in repo.get_releases():
234
- if repo_release.tag_name == release:
235
- for asset in repo_release.assets:
236
- if asset.name == artifact:
237
- url = asset.url
238
-
239
- if not url:
240
- raise ValueError(f'Unable to find release asset: {repository}/{release}/{artifact}')
241
-
242
- chip.register_source(
243
- package_name,
244
- path=url,
245
- ref=release)
246
-
247
-
248
- def __get_github_auth_token(package_name):
249
- token_name = package_name.upper()
250
- for tok in ('#', '$', '&', '-', '=', '!', '/'):
251
- token_name = token_name.replace(tok, '')
252
-
253
- search_env = (
254
- f'GITHUB_{token_name}_TOKEN',
255
- 'GITHUB_TOKEN',
256
- 'GIT_TOKEN'
257
- )
258
-
259
- token = None
260
- for env in search_env:
261
- token = os.environ.get(env, None)
262
-
263
- if token:
264
- break
265
-
266
- if not token:
267
- raise ValueError('Unable to determine authorization token for GitHub, '
268
- f'please set one of the following environmental variables: {search_env}')
269
-
270
- return token
271
-
272
-
273
219
  @functools.lru_cache(maxsize=1)
274
220
  def __get_python_module_mapping():
275
221
  mapping = {}
@@ -297,3 +243,14 @@ def __get_python_module_mapping():
297
243
  mapping.setdefault(module, []).append(dist_name)
298
244
 
299
245
  return mapping
246
+
247
+
248
+ def register_private_github_data_source(chip,
249
+ package_name,
250
+ repository,
251
+ release,
252
+ artifact):
253
+ chip.register_source(
254
+ package_name,
255
+ path=f"github+private://{repository}/{release}/{artifact}",
256
+ ref=release)
@@ -15,9 +15,12 @@ def get_resolver(url):
15
15
  return None
16
16
 
17
17
 
18
- def git_resolver(chip, package, path, ref, url):
18
+ def git_resolver(chip, package, path, ref, url, fetch):
19
19
  data_path, data_path_lock = get_download_cache_path(chip, package, ref)
20
20
 
21
+ if not fetch:
22
+ return data_path, False
23
+
21
24
  # Acquire lock
22
25
  data_lock = InterProcessLock(data_path_lock)
23
26
  aquire_data_lock(data_path, data_lock)
@@ -0,0 +1,124 @@
1
+ import os
2
+ from fasteners import InterProcessLock
3
+ from github import Github, Auth
4
+ from github.GithubException import UnknownObjectException
5
+ from urllib.parse import urlparse
6
+ from siliconcompiler.package import get_download_cache_path
7
+ from siliconcompiler.package import aquire_data_lock, release_data_lock
8
+ from siliconcompiler.package.https import _http_resolver
9
+
10
+
11
+ def get_resolver(url):
12
+ if url.scheme in ("github",):
13
+ return github_any_resolver
14
+ if url.scheme in ("github+private",):
15
+ return github_private_resolver
16
+ return None
17
+
18
+
19
+ def github_any_resolver(chip, package, path, ref, url, fetch):
20
+ data_path, data_path_lock = get_download_cache_path(chip, package, ref)
21
+
22
+ if not fetch:
23
+ return data_path, False
24
+
25
+ # Acquire lock
26
+ data_lock = InterProcessLock(data_path_lock)
27
+ aquire_data_lock(data_path, data_lock)
28
+
29
+ if os.path.exists(data_path):
30
+ release_data_lock(data_lock)
31
+ return data_path, False
32
+
33
+ try:
34
+ return _github_resolver(chip, package, path, ref, url, data_lock)
35
+ except UnknownObjectException:
36
+ return github_private_resolver(chip, package, path, ref, url, fetch, data_lock=data_lock)
37
+
38
+
39
+ def github_private_resolver(chip, package, path, ref, url, fetch, data_lock=None):
40
+ data_path, data_path_lock = get_download_cache_path(chip, package, ref)
41
+
42
+ if not fetch:
43
+ return data_path, False
44
+
45
+ if not data_lock:
46
+ # Acquire lock
47
+ data_lock = InterProcessLock(data_path_lock)
48
+ aquire_data_lock(data_path, data_lock)
49
+
50
+ if os.path.exists(data_path):
51
+ release_data_lock(data_lock)
52
+ return data_path, False
53
+
54
+ gh = Github(auth=Auth.Token(__get_github_auth_token(package)))
55
+
56
+ return _github_resolver(chip, package, path, ref, url, data_lock, gh=gh)
57
+
58
+
59
+ def _github_resolver(chip, package, path, ref, url, data_lock, gh=None):
60
+ if not gh:
61
+ gh = Github()
62
+
63
+ url_parts = (url.netloc, *url.path.split("/")[1:])
64
+
65
+ if len(url_parts) != 4:
66
+ raise ValueError(
67
+ f"{path} is not in the proper form: <owner>/<repository>/<version>/<artifact>")
68
+
69
+ repository = "/".join(url_parts[0:2])
70
+ release = url_parts[2]
71
+ artifact = url_parts[3]
72
+
73
+ release_url = __get_release_url(gh, repository, release, artifact)
74
+
75
+ return _http_resolver(chip, package, release_url, ref, urlparse(release_url), data_lock)
76
+
77
+
78
+ def __get_release_url(gh, repository, release, artifact):
79
+ if artifact == f"{release}.zip":
80
+ return f"https://github.com/{repository}/archive/refs/tags/{release}.zip"
81
+ if artifact == f"{release}.tar.gz":
82
+ return f"https://github.com/{repository}/archive/refs/tags/{release}.tar.gz"
83
+
84
+ repo = gh.get_repo(repository)
85
+
86
+ if not release:
87
+ release = repo.get_latest_release().tag_name
88
+
89
+ url = None
90
+ for repo_release in repo.get_releases():
91
+ if repo_release.tag_name == release:
92
+ for asset in repo_release.assets:
93
+ if asset.name == artifact:
94
+ url = asset.url
95
+
96
+ if not url:
97
+ raise ValueError(f'Unable to find release asset: {repository}/{release}/{artifact}')
98
+
99
+ return url
100
+
101
+
102
+ def __get_github_auth_token(package_name):
103
+ token_name = package_name.upper()
104
+ for tok in ('#', '$', '&', '-', '=', '!', '/'):
105
+ token_name = token_name.replace(tok, '')
106
+
107
+ search_env = (
108
+ f'GITHUB_{token_name}_TOKEN',
109
+ 'GITHUB_TOKEN',
110
+ 'GIT_TOKEN'
111
+ )
112
+
113
+ token = None
114
+ for env in search_env:
115
+ token = os.environ.get(env, None)
116
+
117
+ if token:
118
+ break
119
+
120
+ if not token:
121
+ raise ValueError('Unable to determine authorization token for GitHub, '
122
+ f'please set one of the following environmental variables: {search_env}')
123
+
124
+ return token
@@ -21,16 +21,26 @@ def get_resolver(url):
21
21
  return None
22
22
 
23
23
 
24
- def http_resolver(chip, package, path, ref, url):
24
+ def http_resolver(chip, package, path, ref, url, fetch):
25
25
  data_path, data_path_lock = get_download_cache_path(chip, package, ref)
26
26
 
27
- if os.path.exists(data_path):
27
+ if not fetch:
28
28
  return data_path, False
29
29
 
30
30
  # Acquire lock
31
31
  data_lock = InterProcessLock(data_path_lock)
32
32
  aquire_data_lock(data_path, data_lock)
33
33
 
34
+ if os.path.exists(data_path):
35
+ release_data_lock(data_lock)
36
+ return data_path, False
37
+
38
+ return _http_resolver(chip, package, path, ref, url, data_lock)
39
+
40
+
41
+ def _http_resolver(chip, package, path, ref, url, data_lock):
42
+ data_path, _ = get_download_cache_path(chip, package, ref)
43
+
34
44
  extract_from_url(chip, package, path, ref, url, data_path)
35
45
 
36
46
  release_data_lock(data_lock)
@@ -9,6 +9,7 @@ import streamlit_antd_components as sac
9
9
 
10
10
  from PIL import Image
11
11
 
12
+ import siliconcompiler
12
13
  from siliconcompiler import __version__ as sc_version
13
14
  from siliconcompiler import utils
14
15
  from siliconcompiler.report import report
@@ -31,7 +32,7 @@ SC_MENU = {
31
32
  "Report a Bug":
32
33
  '''https://github.com/siliconcompiler/siliconcompiler/issues''',
33
34
  "About": "\n\n".join(SC_ABOUT)}
34
- SC_DATA_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..', 'data'))
35
+ SC_DATA_ROOT = os.path.abspath(os.path.join(os.path.dirname(siliconcompiler.__file__), 'data'))
35
36
  SC_LOGO_PATH = os.path.join(SC_DATA_ROOT, 'logo.png')
36
37
  SC_FONT_PATH = os.path.join(SC_DATA_ROOT, 'RobotoMono', 'RobotoMono-Regular.ttf')
37
38
 
@@ -433,7 +434,7 @@ def node_file_tree_viewer(chip, step, index):
433
434
  lookup = {}
434
435
  tree_items = []
435
436
 
436
- file_metrics = report.get_metrics_source(chip, step, index)
437
+ metrics_source, file_metrics = report.get_metrics_source(chip, step, index)
437
438
  work_dir = chip.getworkdir(step=step, index=index)
438
439
 
439
440
  def make_item(file):
@@ -446,12 +447,22 @@ def node_file_tree_viewer(chip, step, index):
446
447
 
447
448
  check_file = os.path.relpath(file['value'], work_dir)
448
449
  if check_file in file_metrics:
449
- for metric in file_metrics[check_file]:
450
- if len(item.tag) < 5:
451
- item.tag.append(sac.Tag(metric, color='green'))
452
- else:
453
- item.tag.append(sac.Tag('metrics...', color='geekblue'))
450
+ metrics = set(file_metrics[check_file])
451
+ primary_source = set()
452
+ if check_file in metrics_source:
453
+ primary_source = set(metrics_source[check_file])
454
+ metrics = metrics - primary_source
455
+
456
+ for color, metric_set in (('blue', primary_source), ('green', metrics)):
457
+ if len(item.tag) >= 5:
454
458
  break
459
+
460
+ for metric in metric_set:
461
+ if len(item.tag) < 5:
462
+ item.tag.append(sac.Tag(metric, color=color))
463
+ else:
464
+ item.tag.append(sac.Tag('metrics...', color='geekblue'))
465
+ break
455
466
  item.tooltip = "metrics: " + ", ".join(file_metrics[check_file])
456
467
 
457
468
  if 'children' in file: