siliconcompiler 0.31.0__py3-none-any.whl → 0.32.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. siliconcompiler/_metadata.py +1 -1
  2. siliconcompiler/apps/sc.py +1 -0
  3. siliconcompiler/apps/sc_install.py +19 -1
  4. siliconcompiler/apps/utils/summarize.py +1 -1
  5. siliconcompiler/core.py +33 -39
  6. siliconcompiler/flows/_common.py +10 -4
  7. siliconcompiler/{package.py → package/__init__.py} +64 -177
  8. siliconcompiler/package/git.py +84 -0
  9. siliconcompiler/package/https.py +97 -0
  10. siliconcompiler/report/dashboard/components/__init__.py +16 -6
  11. siliconcompiler/report/report.py +6 -6
  12. siliconcompiler/scheduler/__init__.py +19 -10
  13. siliconcompiler/scheduler/docker_runner.py +3 -3
  14. siliconcompiler/scheduler/run_node.py +6 -3
  15. siliconcompiler/schema/schema_obj.py +7 -11
  16. siliconcompiler/templates/tcl/manifest.tcl.j2 +1 -1
  17. siliconcompiler/tools/_common/tcl/sc_pin_constraints.tcl +3 -5
  18. siliconcompiler/tools/genfasm/genfasm.py +1 -1
  19. siliconcompiler/tools/openroad/_apr.py +15 -5
  20. siliconcompiler/tools/openroad/rdlroute.py +4 -0
  21. siliconcompiler/tools/openroad/scripts/apr/sc_init_floorplan.tcl +3 -3
  22. siliconcompiler/tools/openroad/scripts/common/reports.tcl +10 -0
  23. siliconcompiler/tools/openroad/scripts/common/write_images.tcl +27 -0
  24. siliconcompiler/tools/openroad/scripts/sc_rdlroute.tcl +3 -13
  25. siliconcompiler/tools/slang/__init__.py +123 -33
  26. siliconcompiler/tools/slang/elaborate.py +123 -18
  27. siliconcompiler/tools/slang/lint.py +20 -10
  28. siliconcompiler/tools/surelog/__init__.py +17 -4
  29. siliconcompiler/tools/vpr/vpr.py +86 -6
  30. siliconcompiler/toolscripts/_tools.json +4 -4
  31. siliconcompiler/units.py +10 -7
  32. siliconcompiler/use.py +5 -2
  33. {siliconcompiler-0.31.0.dist-info → siliconcompiler-0.32.0.dist-info}/METADATA +17 -22
  34. {siliconcompiler-0.31.0.dist-info → siliconcompiler-0.32.0.dist-info}/RECORD +38 -36
  35. {siliconcompiler-0.31.0.dist-info → siliconcompiler-0.32.0.dist-info}/WHEEL +1 -1
  36. {siliconcompiler-0.31.0.dist-info → siliconcompiler-0.32.0.dist-info}/entry_points.txt +4 -0
  37. {siliconcompiler-0.31.0.dist-info → siliconcompiler-0.32.0.dist-info}/LICENSE +0 -0
  38. {siliconcompiler-0.31.0.dist-info → siliconcompiler-0.32.0.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,5 @@
1
1
  # Version number following semver standard.
2
- version = '0.31.0'
2
+ version = '0.32.0'
3
3
 
4
4
  # Default server address for remote runs, if unspecified.
5
5
  default_server = 'https://server.siliconcompiler.com'
@@ -99,6 +99,7 @@ def main():
99
99
 
100
100
  # Print Job Summary
101
101
  chip.summary()
102
+ chip.snapshot()
102
103
  except SiliconCompilerError:
103
104
  return 1
104
105
  except Exception as e:
@@ -101,6 +101,19 @@ def print_machine_info():
101
101
  print("Scripts: ", _get_tool_script_dir())
102
102
 
103
103
 
104
+ def __print_summary(successful, failed):
105
+ max_len = 64
106
+ print("#"*max_len)
107
+ if successful:
108
+ msg = f"Installed: {', '.join(sorted(successful))}"
109
+ print(f"# {msg}")
110
+
111
+ if failed:
112
+ msg = f"Failed to install: {failed}"
113
+ print(f"# {msg}")
114
+ print("#"*max_len)
115
+
116
+
104
117
  def _get_tool_script_dir():
105
118
  return Path(siliconcompiler.__file__).parent / "toolscripts"
106
119
 
@@ -242,6 +255,7 @@ To system debugging information (this should only be used to debug):
242
255
  args.tool.extend(tool_groups[group])
243
256
 
244
257
  tools_handled = set()
258
+ tools_completed = set()
245
259
  for tool in args.tool:
246
260
  if tool in tools_handled:
247
261
  continue
@@ -250,9 +264,14 @@ To system debugging information (this should only be used to debug):
250
264
  show_tool(tool, tools[tool])
251
265
  else:
252
266
  if not install_tool(tool, tools[tool], args.build_dir, args.prefix):
267
+ __print_summary(tools_completed, tool)
253
268
  return 1
269
+ else:
270
+ tools_completed.add(tool)
254
271
 
255
272
  if not args.show:
273
+ __print_summary(tools_completed, None)
274
+
256
275
  msgs = []
257
276
  for env, path in (
258
277
  ("PATH", "bin"),
@@ -262,7 +281,6 @@ To system debugging information (this should only be used to debug):
262
281
  os.path.expandvars(os.path.expanduser(p))
263
282
  for p in os.getenv(env, "").split(":")
264
283
  ]
265
- print(envs)
266
284
  if check_path not in envs:
267
285
  msgs.extend([
268
286
  "",
@@ -37,7 +37,7 @@ def main():
37
37
  return 1
38
38
 
39
39
  # Print Job Summary
40
- chip.summary(generate_image=False, generate_html=False)
40
+ chip.summary()
41
41
 
42
42
  return 0
43
43
 
siliconcompiler/core.py CHANGED
@@ -26,7 +26,6 @@ from siliconcompiler import _metadata
26
26
  from siliconcompiler import NodeStatus, SiliconCompilerError
27
27
  from siliconcompiler.report import _show_summary_table
28
28
  from siliconcompiler.report import _generate_summary_image, _open_summary_image
29
- from siliconcompiler.report import _generate_html_report, _open_html_report
30
29
  from siliconcompiler.report import Dashboard
31
30
  from siliconcompiler import package as sc_package
32
31
  import glob
@@ -211,6 +210,8 @@ class Chip:
211
210
 
212
211
  self.logger.addHandler(file_handler)
213
212
 
213
+ return file_handler
214
+
214
215
  ###########################################################################
215
216
  def _init_logger(self, step=None, index=None, in_run=False):
216
217
 
@@ -2889,7 +2890,7 @@ class Chip:
2889
2890
  return self._dash
2890
2891
 
2891
2892
  ###########################################################################
2892
- def summary(self, show_all_indices=False, generate_image=True, generate_html=False):
2893
+ def summary(self, show_all_indices=False):
2893
2894
  '''
2894
2895
  Prints a summary of the compilation manifest.
2895
2896
 
@@ -2902,13 +2903,6 @@ class Chip:
2902
2903
  show_all_indices (bool): If True, displays metrics for all indices
2903
2904
  of each step. If False, displays metrics only for winning
2904
2905
  indices.
2905
- generate_image (bool): If True, generates a summary image featuring
2906
- a layout screenshot and a subset of metrics. Requires that the
2907
- current job has an ending node that generated a PNG file.
2908
- generate_html (bool): If True, generates an HTML report featuring a
2909
- metrics summary table and manifest tree view. The report will
2910
- include a layout screenshot if the current job has an ending node
2911
- that generated a PNG file.
2912
2906
 
2913
2907
  Examples:
2914
2908
  >>> chip.summary()
@@ -2921,36 +2915,36 @@ class Chip:
2921
2915
 
2922
2916
  _show_summary_table(self, flow, nodes_to_execute, show_all_indices=show_all_indices)
2923
2917
 
2924
- # Create a report for the Chip object which can be viewed in a web browser.
2925
- # Place report files in the build's root directory.
2926
- work_dir = self.getworkdir()
2927
- if os.path.isdir(work_dir):
2928
- # Mark file paths where the reports can be found if they were generated.
2929
- results_img = os.path.join(work_dir, f'{self.design}.png')
2930
- results_html = os.path.join(work_dir, 'report.html')
2931
-
2932
- for path in (results_img, results_html):
2933
- if os.path.exists(path):
2934
- os.remove(path)
2935
-
2936
- if generate_image:
2937
- _generate_summary_image(self, results_img)
2938
-
2939
- if generate_html:
2940
- _generate_html_report(self, flow, nodes_to_execute, results_html)
2941
-
2942
- # dashboard does not generate any data
2943
- self.logger.info(f'Dashboard at "sc-dashboard -cfg {work_dir}/{self.design}.pkg.json"')
2944
-
2945
- # Try to open the results and layout only if '-nodisplay' is not set.
2946
- # Priority: PNG > HTML > dashboard.
2947
- if not self.get('option', 'nodisplay'):
2948
- if os.path.isfile(results_img):
2949
- _open_summary_image(results_img)
2950
- elif os.path.isfile(results_html):
2951
- _open_html_report(self, results_html)
2952
- else:
2953
- self._dashboard(wait=False)
2918
+ # dashboard does not generate any data
2919
+ self.logger.info('Dashboard at "sc-dashboard '
2920
+ f'-cfg {self.getworkdir()}/{self.design}.pkg.json"')
2921
+
2922
+ ###########################################################################
2923
+ def snapshot(self, path=None, display=True):
2924
+ '''
2925
+ Creates a snapshot image of the job
2926
+
2927
+ Args:
2928
+ path (str): Path to generate the image at, if not provided will default to
2929
+ <job>/<design>.png
2930
+ display (bool): If True, will open the image for viewing. If :keypath:`option,nodisplay`
2931
+ is True, this argument will be ignored.
2932
+
2933
+ Examples:
2934
+ >>> chip.snapshot()
2935
+ Creates a snapshot image in the default location
2936
+ '''
2937
+
2938
+ if not path:
2939
+ path = os.path.join(self.getworkdir(), f'{self.design}.png')
2940
+
2941
+ if os.path.exists(path):
2942
+ os.remove(path)
2943
+
2944
+ _generate_summary_image(self, path)
2945
+
2946
+ if os.path.isfile(path) and not self.get('option', 'nodisplay') and display:
2947
+ _open_summary_image(path)
2954
2948
 
2955
2949
  ###########################################################################
2956
2950
  def clock(self, pin, period, jitter=0, mode='global'):
@@ -1,4 +1,5 @@
1
1
  from siliconcompiler.tools.surelog import parse as surelog_parse
2
+ from siliconcompiler.tools.slang import elaborate as slang_preprocess
2
3
  from siliconcompiler.tools.chisel import convert as chisel_convert
3
4
  from siliconcompiler.tools.bambu import convert as bambu_convert
4
5
  from siliconcompiler.tools.bluespec import convert as bluespec_convert
@@ -7,6 +8,8 @@ from siliconcompiler.tools.sv2v import convert as sv2v_convert
7
8
 
8
9
  from siliconcompiler.tools.builtin import concatenate
9
10
 
11
+ from siliconcompiler.tools.slang import has_pyslang
12
+
10
13
 
11
14
  def _make_docs(chip):
12
15
  from siliconcompiler.targets import freepdk45_demo
@@ -20,9 +23,12 @@ def _make_docs(chip):
20
23
  chip.use(freepdk45_demo)
21
24
 
22
25
 
23
- def __get_frontends(allow_system_verilog):
26
+ def __get_frontends(allow_system_verilog, use_surelog=False):
27
+ parser = surelog_parse
28
+ if not use_surelog and has_pyslang():
29
+ parser = slang_preprocess
24
30
  systemverilog_frontend = [
25
- ('import.verilog', surelog_parse)
31
+ ('import.verilog', parser)
26
32
  ]
27
33
  if not allow_system_verilog:
28
34
  systemverilog_frontend.append(('import.convert', sv2v_convert))
@@ -36,7 +42,7 @@ def __get_frontends(allow_system_verilog):
36
42
  }
37
43
 
38
44
 
39
- def setup_multiple_frontends(flow, allow_system_verilog=False):
45
+ def setup_multiple_frontends(flow, allow_system_verilog=False, use_surelog=False):
40
46
  '''
41
47
  Sets of multiple frontends if different frontends are required.
42
48
 
@@ -45,7 +51,7 @@ def setup_multiple_frontends(flow, allow_system_verilog=False):
45
51
 
46
52
  concat_nodes = []
47
53
  flowname = flow.design
48
- for _, pipe in __get_frontends(allow_system_verilog).items():
54
+ for _, pipe in __get_frontends(allow_system_verilog, use_surelog=use_surelog).items():
49
55
  prev_step = None
50
56
  for step, task in pipe:
51
57
  flow.node(flowname, step, task)
@@ -1,25 +1,20 @@
1
1
  import os
2
- import requests
3
- import tarfile
4
- import zipfile
5
- from git import Repo, GitCommandError
6
2
  from urllib.parse import urlparse
7
3
  import importlib
8
- import shutil
9
4
  import re
10
5
  from siliconcompiler import SiliconCompilerError
11
6
  from siliconcompiler.utils import default_cache_dir, _resolve_env_vars
12
7
  import json
13
8
  from importlib.metadata import distributions, distribution
14
9
  import functools
15
- import fasteners
16
10
  import time
17
11
  from pathlib import Path
18
- from io import BytesIO
19
12
 
20
13
  from github import Github
21
14
  import github.Auth
22
15
 
16
+ from siliconcompiler.utils import get_plugins
17
+
23
18
 
24
19
  def get_cache_path(chip):
25
20
  cache_path = chip.get('option', 'cachedir')
@@ -33,10 +28,45 @@ def get_cache_path(chip):
33
28
  return cache_path
34
29
 
35
30
 
36
- def _path(chip, package, download_handler):
37
- if package in chip._packages:
38
- return chip._packages[package]
31
+ def get_download_cache_path(chip, package, ref):
32
+ cache_path = get_cache_path(chip)
33
+ if not os.path.exists(cache_path):
34
+ os.makedirs(cache_path, exist_ok=True)
35
+
36
+ if ref is None:
37
+ raise SiliconCompilerError(f'Reference is required for cached data: {package}', chip=chip)
38
+
39
+ return \
40
+ os.path.join(cache_path, f'{package}-{ref}'), \
41
+ os.path.join(cache_path, f'{package}-{ref}.lock')
42
+
43
+
44
+ def _file_path_resolver(chip, package, path, ref, url, fetch):
45
+ return os.path.abspath(path.replace('file://', ''))
46
+
47
+
48
+ def _python_path_resolver(chip, package, path, ref, url, fetch):
49
+ return path_from_python(chip, url.netloc)
39
50
 
51
+
52
+ def _get_path_resolver(path):
53
+ url = urlparse(path)
54
+
55
+ for resolver in get_plugins("path_resolver"):
56
+ func = resolver(url)
57
+ if func:
58
+ return func, url
59
+
60
+ if url.scheme == "file":
61
+ return _file_path_resolver, url
62
+
63
+ if url.scheme == "python":
64
+ return _python_path_resolver, url
65
+
66
+ raise ValueError(f"{path} is not supported")
67
+
68
+
69
+ def _path(chip, package, fetch):
40
70
  # Initially try retrieving data source from schema
41
71
  data = {}
42
72
  data['path'] = chip.get('package', 'source', package, 'path')
@@ -48,94 +78,45 @@ def _path(chip, package, download_handler):
48
78
 
49
79
  data['path'] = _resolve_env_vars(chip, data['path'], None, None)
50
80
 
51
- url = urlparse(data['path'])
52
-
53
- # check network drive for package data source
54
- if data['path'].startswith('file://') or os.path.exists(data['path']):
55
- path = os.path.abspath(data['path'].replace('file://', ''))
56
- chip.logger.info(f'Found {package} data at {path}')
57
- chip._packages[package] = path
58
- return path
59
- elif data['path'].startswith('python://'):
60
- path = path_from_python(chip, url.netloc)
61
- chip.logger.info(f'Found {package} data at {path}')
62
- chip._packages[package] = path
63
- return path
64
-
65
- # location of the python package
66
- cache_path = get_cache_path(chip)
67
- if not os.path.exists(cache_path):
68
- os.makedirs(cache_path, exist_ok=True)
69
- project_id = f'{package}-{data.get("ref")}'
70
- if url.scheme not in ['git', 'git+https', 'https', 'git+ssh', 'ssh'] or not project_id:
71
- raise SiliconCompilerError(
72
- f'Could not find data path in package {package}: {data["path"]}',
73
- chip=chip)
74
-
75
- data_path = os.path.join(cache_path, project_id)
76
-
77
- if download_handler:
78
- download_handler(chip,
79
- package,
80
- data,
81
- url,
82
- data_path,
83
- os.path.join(cache_path, f'{project_id}.lock'))
84
-
85
- if os.path.exists(data_path):
86
- if package not in chip._packages:
87
- chip.logger.info(f'Saved {package} data to {data_path}')
88
- chip._packages[package] = data_path
89
- return data_path
81
+ if os.path.exists(data['path']):
82
+ # Path is already a path
83
+ return os.path.abspath(data['path'])
90
84
 
91
- raise SiliconCompilerError(f'Extracting {package} data to {data_path} failed',
92
- chip=chip)
85
+ path_resolver, url = _get_path_resolver(data['path'])
93
86
 
87
+ return path_resolver(chip, package, data['path'], data['ref'], url, fetch)
94
88
 
95
- def path(chip, package):
89
+
90
+ def path(chip, package, fetch=True):
96
91
  """
97
92
  Compute data source data path
98
93
  Additionally cache data source data if possible
99
94
  Parameters:
100
95
  package (str): Name of the data source
96
+ fetch (bool): Flag to indicate that the path should be fetched
101
97
  Returns:
102
98
  path: Location of data source on the local system
103
99
  """
104
100
 
105
- return _path(chip, package, __download_data)
106
-
101
+ if package not in chip._packages:
102
+ changed = False
103
+ data_path = _path(chip, package, fetch)
107
104
 
108
- def __download_data(chip, package, data, url, data_path, data_path_lock):
109
- data_lock = fasteners.InterProcessLock(data_path_lock)
105
+ if isinstance(data_path, tuple) and len(data_path) == 2:
106
+ data_path, changed = data_path
110
107
 
111
- _aquire_data_lock(data_path, data_lock)
108
+ if os.path.exists(data_path):
109
+ if package not in chip._packages and changed:
110
+ chip.logger.info(f'Saved {package} data to {data_path}')
111
+ else:
112
+ chip.logger.info(f'Found {package} data at {data_path}')
112
113
 
113
- # check cached package data source
114
- if os.path.exists(data_path):
115
- chip.logger.info(f'Found cached {package} data at {data_path}')
116
- if url.scheme in ['git', 'git+https', 'ssh', 'git+ssh']:
117
- try:
118
- repo = Repo(data_path)
119
- if repo.untracked_files or repo.index.diff("HEAD"):
120
- chip.logger.warning('The repo of the cached data is dirty.')
121
- _release_data_lock(data_lock)
122
- chip._packages[package] = data_path
123
- return
124
- except GitCommandError:
125
- chip.logger.warning('Deleting corrupted cache data.')
126
- shutil.rmtree(path)
127
- else:
128
- _release_data_lock(data_lock)
129
114
  chip._packages[package] = data_path
130
- return
131
-
132
- # download package data source
133
- if url.scheme in ['git', 'git+https', 'ssh', 'git+ssh']:
134
- clone_synchronized(chip, package, data, data_path)
135
- elif url.scheme == 'https':
136
- extract_from_url(chip, package, data, data_path)
115
+ else:
116
+ raise SiliconCompilerError(f'Unable to locate {package} data in {data_path}',
117
+ chip=chip)
137
118
 
138
- _release_data_lock(data_lock)
119
+ return chip._packages[package]
139
120
 
140
121
 
141
122
  def __get_filebased_lock(data_lock):
@@ -143,7 +124,7 @@ def __get_filebased_lock(data_lock):
143
124
  return Path(f'{base}.sc_lock')
144
125
 
145
126
 
146
- def _aquire_data_lock(data_path, data_lock):
127
+ def aquire_data_lock(data_path, data_lock):
147
128
  # Wait a maximum of 10 minutes for other processes to finish
148
129
  max_seconds = 10 * 60
149
130
  try:
@@ -168,7 +149,7 @@ def _aquire_data_lock(data_path, data_lock):
168
149
  'please delete it.')
169
150
 
170
151
 
171
- def _release_data_lock(data_lock):
152
+ def release_data_lock(data_lock):
172
153
  # Check if file based locking method was used
173
154
  lock_file = __get_filebased_lock(data_lock)
174
155
  if lock_file.exists():
@@ -178,100 +159,6 @@ def _release_data_lock(data_lock):
178
159
  data_lock.release()
179
160
 
180
161
 
181
- def clone_synchronized(chip, package, data, data_path):
182
- url = urlparse(data['path'])
183
- try:
184
- clone_from_git(chip, package, data, data_path)
185
- except GitCommandError as e:
186
- if 'Permission denied' in repr(e):
187
- if url.scheme in ['ssh', 'git+ssh']:
188
- chip.logger.error('Failed to authenticate. Please setup your git ssh.')
189
- elif url.scheme in ['git', 'git+https']:
190
- chip.logger.error('Failed to authenticate. Please use a token or ssh.')
191
- else:
192
- chip.logger.error(str(e))
193
-
194
-
195
- def clone_from_git(chip, package, data, repo_path):
196
- url = urlparse(data['path'])
197
- if url.scheme in ['git', 'git+https'] and url.username:
198
- chip.logger.warning('Your token is in the data source path and will be stored in the '
199
- 'schema. If you do not want this set the env variable GIT_TOKEN '
200
- 'or use ssh for authentication.')
201
- if url.scheme in ['git+ssh', 'ssh']:
202
- chip.logger.info(f'Cloning {package} data from {url.netloc}:{url.path[1:]}')
203
- # Git requires the format git@github.com:org/repo instead of git@github.com/org/repo
204
- repo = Repo.clone_from(f'{url.netloc}:{url.path[1:]}',
205
- repo_path,
206
- recurse_submodules=True)
207
- else:
208
- if os.environ.get('GIT_TOKEN') and not url.username:
209
- url = url._replace(netloc=f'{os.environ.get("GIT_TOKEN")}@{url.hostname}')
210
- url = url._replace(scheme='https')
211
- chip.logger.info(f'Cloning {package} data from {url.geturl()}')
212
- repo = Repo.clone_from(url.geturl(), repo_path, recurse_submodules=True)
213
- chip.logger.info(f'Checking out {data["ref"]}')
214
- repo.git.checkout(data["ref"])
215
- for submodule in repo.submodules:
216
- submodule.update(init=True)
217
-
218
-
219
- def extract_from_url(chip, package, data, data_path):
220
- url = urlparse(data['path'])
221
- data_url = data.get('path')
222
- headers = {}
223
- if os.environ.get('GIT_TOKEN') or url.username:
224
- headers['Authorization'] = f'token {os.environ.get("GIT_TOKEN") or url.username}'
225
- if "github" in data_url:
226
- headers['Accept'] = 'application/octet-stream'
227
- data_url = data['path']
228
- if data_url.endswith('/'):
229
- data_url = f"{data_url}{data['ref']}.tar.gz"
230
- chip.logger.info(f'Downloading {package} data from {data_url}')
231
- response = requests.get(data_url, stream=True, headers=headers)
232
- if not response.ok:
233
- raise SiliconCompilerError(f'Failed to download {package} data source.', chip=chip)
234
-
235
- fileobj = BytesIO(response.content)
236
- try:
237
- with tarfile.open(fileobj=fileobj, mode='r|gz') as tar_ref:
238
- tar_ref.extractall(path=data_path)
239
- except tarfile.ReadError:
240
- fileobj.seek(0)
241
- # Try as zip
242
- with zipfile.ZipFile(fileobj) as zip_ref:
243
- zip_ref.extractall(path=data_path)
244
-
245
- if 'github' in url.netloc and len(os.listdir(data_path)) == 1:
246
- # Github inserts one folder at the highest level of the tar file
247
- # this compensates for this behavior
248
- gh_url = urlparse(data_url)
249
-
250
- repo = gh_url.path.split('/')[2]
251
-
252
- ref = gh_url.path.split('/')[-1]
253
- if repo.endswith('.git'):
254
- ref = data['ref']
255
- elif ref.endswith('.tar.gz'):
256
- ref = ref[0:-7]
257
- elif ref.endswith('.tgz'):
258
- ref = ref[0:-4]
259
- else:
260
- ref = ref.split('.')[0]
261
-
262
- if ref.startswith('v'):
263
- ref = ref[1:]
264
-
265
- github_folder = f"{repo}-{ref}"
266
-
267
- if github_folder in os.listdir(data_path):
268
- # This moves all files one level up
269
- git_path = os.path.join(data_path, github_folder)
270
- for data_file in os.listdir(git_path):
271
- shutil.move(os.path.join(git_path, data_file), data_path)
272
- os.removedirs(git_path)
273
-
274
-
275
162
  def path_from_python(chip, python_package, append_path=None):
276
163
  try:
277
164
  module = importlib.import_module(python_package)
@@ -0,0 +1,84 @@
1
+ import shutil
2
+
3
+ import os.path
4
+
5
+ from git import Repo, GitCommandError
6
+ from fasteners import InterProcessLock
7
+
8
+ from siliconcompiler.package import get_download_cache_path
9
+ from siliconcompiler.package import aquire_data_lock, release_data_lock
10
+
11
+
12
+ def get_resolver(url):
13
+ if url.scheme in ("git", "git+https", "git+ssh", "ssh"):
14
+ return git_resolver
15
+ return None
16
+
17
+
18
+ def git_resolver(chip, package, path, ref, url, fetch):
19
+ data_path, data_path_lock = get_download_cache_path(chip, package, ref)
20
+
21
+ if not fetch:
22
+ return data_path, False
23
+
24
+ # Acquire lock
25
+ data_lock = InterProcessLock(data_path_lock)
26
+ aquire_data_lock(data_path, data_lock)
27
+
28
+ if os.path.exists(data_path):
29
+ try:
30
+ repo = Repo(data_path)
31
+ if repo.untracked_files or repo.index.diff("HEAD"):
32
+ chip.logger.warning('The repo of the cached data is dirty.')
33
+ release_data_lock(data_lock)
34
+ return data_path, False
35
+ except GitCommandError:
36
+ chip.logger.warning('Deleting corrupted cache data.')
37
+ shutil.rmtree(data_path)
38
+
39
+ clone_synchronized(chip, package, path, ref, url, data_path)
40
+
41
+ release_data_lock(data_lock)
42
+
43
+ return data_path, True
44
+
45
+
46
+ def clone_synchronized(chip, package, path, ref, url, data_path):
47
+ try:
48
+ clone_from_git(chip, package, path, ref, url, data_path)
49
+ except GitCommandError as e:
50
+ if 'Permission denied' in repr(e):
51
+ if url.scheme in ['ssh', 'git+ssh']:
52
+ chip.logger.error('Failed to authenticate. Please setup your git ssh.')
53
+ elif url.scheme in ['git', 'git+https']:
54
+ chip.logger.error('Failed to authenticate. Please use a token or ssh.')
55
+ else:
56
+ chip.logger.error(str(e))
57
+
58
+
59
+ def clone_from_git(chip, package, path, ref, url, data_path):
60
+ if url.scheme in ['git', 'git+https'] and url.username:
61
+ chip.logger.warning('Your token is in the data source path and will be stored in the '
62
+ 'schema. If you do not want this set the env variable GIT_TOKEN '
63
+ 'or use ssh for authentication.')
64
+ if url.scheme in ['git+ssh']:
65
+ chip.logger.info(f'Cloning {package} data from {url.netloc}:{url.path[1:]}')
66
+ # Git requires the format git@github.com:org/repo instead of git@github.com/org/repo
67
+ repo = Repo.clone_from(f'{url.netloc}/{url.path[1:]}',
68
+ data_path,
69
+ recurse_submodules=True)
70
+ elif url.scheme in ['ssh']:
71
+ chip.logger.info(f'Cloning {package} data from {path}')
72
+ repo = Repo.clone_from(path,
73
+ data_path,
74
+ recurse_submodules=True)
75
+ else:
76
+ if os.environ.get('GIT_TOKEN') and not url.username:
77
+ url = url._replace(netloc=f'{os.environ.get("GIT_TOKEN")}@{url.hostname}')
78
+ url = url._replace(scheme='https')
79
+ chip.logger.info(f'Cloning {package} data from {url.geturl()}')
80
+ repo = Repo.clone_from(url.geturl(), data_path, recurse_submodules=True)
81
+ chip.logger.info(f'Checking out {ref}')
82
+ repo.git.checkout(ref)
83
+ for submodule in repo.submodules:
84
+ submodule.update(init=True)