siliconcompiler 0.34.1__py3-none-any.whl → 0.34.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siliconcompiler/__init__.py +14 -2
- siliconcompiler/_metadata.py +1 -1
- siliconcompiler/apps/sc_show.py +1 -1
- siliconcompiler/constraints/__init__.py +17 -0
- siliconcompiler/constraints/asic_component.py +378 -0
- siliconcompiler/constraints/asic_floorplan.py +449 -0
- siliconcompiler/constraints/asic_pins.py +489 -0
- siliconcompiler/constraints/asic_timing.py +517 -0
- siliconcompiler/core.py +3 -3
- siliconcompiler/dependencyschema.py +10 -174
- siliconcompiler/design.py +235 -118
- siliconcompiler/flowgraph.py +27 -14
- siliconcompiler/library.py +133 -0
- siliconcompiler/metric.py +94 -72
- siliconcompiler/metrics/__init__.py +7 -0
- siliconcompiler/metrics/asic.py +245 -0
- siliconcompiler/metrics/fpga.py +220 -0
- siliconcompiler/package/__init__.py +138 -35
- siliconcompiler/package/github.py +6 -10
- siliconcompiler/packageschema.py +256 -12
- siliconcompiler/pathschema.py +226 -0
- siliconcompiler/project.py +459 -0
- siliconcompiler/scheduler/docker.py +2 -3
- siliconcompiler/scheduler/run_node.py +2 -1
- siliconcompiler/scheduler/scheduler.py +4 -13
- siliconcompiler/scheduler/schedulernode.py +25 -17
- siliconcompiler/scheduler/taskscheduler.py +2 -1
- siliconcompiler/schema/__init__.py +0 -2
- siliconcompiler/schema/baseschema.py +147 -24
- siliconcompiler/schema/editableschema.py +14 -6
- siliconcompiler/schema/journal.py +23 -15
- siliconcompiler/schema/namedschema.py +6 -4
- siliconcompiler/schema/parameter.py +34 -19
- siliconcompiler/schema/parametertype.py +2 -0
- siliconcompiler/schema/parametervalue.py +198 -15
- siliconcompiler/schema/schema_cfg.py +18 -14
- siliconcompiler/schema_obj.py +5 -3
- siliconcompiler/tool.py +199 -10
- siliconcompiler/toolscripts/_tools.json +4 -4
- {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.2.dist-info}/METADATA +3 -3
- {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.2.dist-info}/RECORD +45 -35
- siliconcompiler/schema/packageschema.py +0 -101
- {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.2.dist-info}/WHEEL +0 -0
- {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.2.dist-info}/entry_points.txt +0 -0
- {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.2.dist-info}/licenses/LICENSE +0 -0
- {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
from siliconcompiler import MetricSchema
|
|
2
|
+
|
|
3
|
+
from siliconcompiler.schema import EditableSchema, Parameter, Scope, PerNode
|
|
4
|
+
from siliconcompiler.schema.utils import trim
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class FPGAMetricsSchema(MetricSchema):
|
|
8
|
+
def __init__(self):
|
|
9
|
+
super().__init__()
|
|
10
|
+
|
|
11
|
+
schema = EditableSchema(self)
|
|
12
|
+
|
|
13
|
+
for item, description in [
|
|
14
|
+
('unconstrained', 'unconstrained timing paths')]:
|
|
15
|
+
schema.insert(
|
|
16
|
+
item,
|
|
17
|
+
Parameter(
|
|
18
|
+
'int',
|
|
19
|
+
scope=Scope.JOB,
|
|
20
|
+
shorthelp=f"Metric: total {item}",
|
|
21
|
+
switch=f"-metric_{item} 'step index <int>'",
|
|
22
|
+
example=[
|
|
23
|
+
f"cli: -metric_{item} 'dfm 0 0'",
|
|
24
|
+
f"api: chip.set('metric', '{item}', 0, step='dfm', index=0)"],
|
|
25
|
+
pernode=PerNode.REQUIRED,
|
|
26
|
+
help=trim(f"""Metric tracking the total number of {description} on a
|
|
27
|
+
per step and index basis.""")))
|
|
28
|
+
|
|
29
|
+
for item, description in [
|
|
30
|
+
('luts', 'FPGA LUTs used'),
|
|
31
|
+
('dsps', 'FPGA DSP slices used'),
|
|
32
|
+
('brams', 'FPGA BRAM tiles used')]:
|
|
33
|
+
schema.insert(
|
|
34
|
+
item,
|
|
35
|
+
Parameter(
|
|
36
|
+
'int',
|
|
37
|
+
scope=Scope.JOB,
|
|
38
|
+
shorthelp=f"Metric: {description}",
|
|
39
|
+
switch=f"-metric_{item} 'step index <int>'",
|
|
40
|
+
example=[
|
|
41
|
+
f"cli: -metric_{item} 'place 0 100'",
|
|
42
|
+
f"api: chip.set('metric', '{item}', 100, step='place', index=0)"],
|
|
43
|
+
pernode=PerNode.REQUIRED,
|
|
44
|
+
help=trim(f"""
|
|
45
|
+
Metric tracking the total {description} used by the design as reported
|
|
46
|
+
by the implementation tool. There is no standardized definition
|
|
47
|
+
for this metric across vendors, so metric comparisons can
|
|
48
|
+
generally only be done between runs on identical tools and
|
|
49
|
+
device families.""")))
|
|
50
|
+
|
|
51
|
+
schema.insert(
|
|
52
|
+
'utilization',
|
|
53
|
+
Parameter(
|
|
54
|
+
'float',
|
|
55
|
+
unit='%',
|
|
56
|
+
scope=Scope.JOB,
|
|
57
|
+
shorthelp="Metric: area utilization",
|
|
58
|
+
switch="-metric_utilization 'step index <float>'",
|
|
59
|
+
example=[
|
|
60
|
+
"cli: -metric_utilization 'place 0 50.00'",
|
|
61
|
+
"api: chip.set('metric', 'utilization', 50.00, step='place', index=0)"],
|
|
62
|
+
pernode=PerNode.REQUIRED,
|
|
63
|
+
help=trim("""
|
|
64
|
+
Metric tracking the area utilization of the design calculated as
|
|
65
|
+
100 * (cellarea/totalarea).""")))
|
|
66
|
+
|
|
67
|
+
schema.insert(
|
|
68
|
+
'logicdepth',
|
|
69
|
+
Parameter(
|
|
70
|
+
'int',
|
|
71
|
+
scope=Scope.JOB,
|
|
72
|
+
shorthelp="Metric: logic depth",
|
|
73
|
+
switch="-metric_logicdepth 'step index <int>'",
|
|
74
|
+
example=[
|
|
75
|
+
"cli: -metric_logicdepth 'place 0 8'",
|
|
76
|
+
"api: chip.set('metric', 'logicdepth', 8, step='place', index=0)"],
|
|
77
|
+
pernode=PerNode.REQUIRED,
|
|
78
|
+
help=trim("""
|
|
79
|
+
Metric tracking the logic depth of the design. This is determined
|
|
80
|
+
by the number of logic gates between the start of the critital timing
|
|
81
|
+
path to the end of the path.""")))
|
|
82
|
+
|
|
83
|
+
for item, description in [
|
|
84
|
+
('peakpower', 'worst case total peak power'),
|
|
85
|
+
('averagepower', 'average workload power'),
|
|
86
|
+
('leakagepower', 'leakage power with rails active but without any dynamic '
|
|
87
|
+
'switching activity')]:
|
|
88
|
+
schema.insert(
|
|
89
|
+
item,
|
|
90
|
+
Parameter(
|
|
91
|
+
'float',
|
|
92
|
+
unit='mw',
|
|
93
|
+
scope=Scope.JOB,
|
|
94
|
+
shorthelp=f"Metric: {item}",
|
|
95
|
+
switch=f"-metric_{item} 'step index <float>'",
|
|
96
|
+
example=[
|
|
97
|
+
f"cli: -metric_{item} 'place 0 0.01'",
|
|
98
|
+
f"api: chip.set('metric', '{item}', 0.01, step='place', index=0)"],
|
|
99
|
+
pernode=PerNode.REQUIRED,
|
|
100
|
+
help=trim(f"""
|
|
101
|
+
Metric tracking the {description} of the design specified on a per step
|
|
102
|
+
and index basis. Power metric depend heavily on the method
|
|
103
|
+
being used for extraction: dynamic vs static, workload
|
|
104
|
+
specification (vcd vs saif), power models, process/voltage/temperature.
|
|
105
|
+
The power {item} metric tries to capture the data that would
|
|
106
|
+
usually be reflected inside a datasheet given the appropriate
|
|
107
|
+
footnote conditions.""")))
|
|
108
|
+
|
|
109
|
+
for item, description in [
|
|
110
|
+
('holdpaths', 'hold'),
|
|
111
|
+
('setuppaths', 'setup')]:
|
|
112
|
+
schema.insert(
|
|
113
|
+
item,
|
|
114
|
+
Parameter(
|
|
115
|
+
'int',
|
|
116
|
+
scope=Scope.JOB,
|
|
117
|
+
shorthelp=f"Metric: {item}",
|
|
118
|
+
switch=f"-metric_{item} 'step index <int>'",
|
|
119
|
+
example=[
|
|
120
|
+
f"cli: -metric_{item} 'place 0 10'",
|
|
121
|
+
f"api: chip.set('metric', '{item}', 10, step='place', index=0)"],
|
|
122
|
+
pernode=PerNode.REQUIRED,
|
|
123
|
+
help=trim(f"""
|
|
124
|
+
Metric tracking the total number of timing paths violating {description}
|
|
125
|
+
constraints.""")))
|
|
126
|
+
|
|
127
|
+
for item, description in [
|
|
128
|
+
('holdslack', 'worst hold slack (positive or negative)'),
|
|
129
|
+
('holdwns', 'worst negative hold slack (positive values truncated to zero)'),
|
|
130
|
+
('holdtns', 'total negative hold slack (TNS)'),
|
|
131
|
+
('holdskew', 'hold clock skew'),
|
|
132
|
+
('setupslack', 'worst setup slack (positive or negative)'),
|
|
133
|
+
('setupwns', 'worst negative setup slack (positive values truncated to zero)'),
|
|
134
|
+
('setuptns', 'total negative setup slack (TNS)'),
|
|
135
|
+
('setupskew', 'setup clock skew')]:
|
|
136
|
+
schema.insert(
|
|
137
|
+
item,
|
|
138
|
+
Parameter(
|
|
139
|
+
'float',
|
|
140
|
+
unit='ns',
|
|
141
|
+
scope=Scope.JOB,
|
|
142
|
+
shorthelp=f"Metric: {item}",
|
|
143
|
+
switch=f"-metric_{item} 'step index <float>'",
|
|
144
|
+
example=[
|
|
145
|
+
f"cli: -metric_{item} 'place 0 0.01'",
|
|
146
|
+
f"api: chip.set('metric', '{item}', 0.01, step='place', index=0)"],
|
|
147
|
+
pernode=PerNode.REQUIRED,
|
|
148
|
+
help=trim(f"""
|
|
149
|
+
Metric tracking the {description} on a per step and index basis.""")))
|
|
150
|
+
|
|
151
|
+
for item, description in [
|
|
152
|
+
('fmax', 'maximum clock frequency')]:
|
|
153
|
+
schema.insert(
|
|
154
|
+
item,
|
|
155
|
+
Parameter(
|
|
156
|
+
'float',
|
|
157
|
+
unit='Hz',
|
|
158
|
+
scope=Scope.JOB,
|
|
159
|
+
shorthelp=f"Metric: {item}",
|
|
160
|
+
switch=f"-metric_{item} 'step index <float>'",
|
|
161
|
+
example=[
|
|
162
|
+
f"cli: -metric_{item} 'place 0 100e6'",
|
|
163
|
+
f"api: chip.set('metric', '{item}', 100e6, step='place', index=0)"],
|
|
164
|
+
pernode=PerNode.REQUIRED,
|
|
165
|
+
help=trim(f"""
|
|
166
|
+
Metric tracking the {description} on a per step and index basis.""")))
|
|
167
|
+
|
|
168
|
+
for item, description in [
|
|
169
|
+
('macros', 'macros'),
|
|
170
|
+
('cells', 'cell instances'),
|
|
171
|
+
('registers', 'register instances'),
|
|
172
|
+
('pins', 'pins'),
|
|
173
|
+
('nets', 'nets')]:
|
|
174
|
+
schema.insert(
|
|
175
|
+
item,
|
|
176
|
+
Parameter(
|
|
177
|
+
'int',
|
|
178
|
+
scope=Scope.JOB,
|
|
179
|
+
shorthelp=f"Metric: {item}",
|
|
180
|
+
switch=f"-metric_{item} 'step index <int>'",
|
|
181
|
+
example=[
|
|
182
|
+
f"cli: -metric_{item} 'place 0 100'",
|
|
183
|
+
f"api: chip.set('metric', '{item}', 50, step='place', index=0)"],
|
|
184
|
+
pernode=PerNode.REQUIRED,
|
|
185
|
+
help=trim(f"""
|
|
186
|
+
Metric tracking the total number of {description} in the design
|
|
187
|
+
on a per step and index basis.""")))
|
|
188
|
+
|
|
189
|
+
schema.insert(
|
|
190
|
+
'wirelength',
|
|
191
|
+
Parameter(
|
|
192
|
+
'float',
|
|
193
|
+
scope=Scope.JOB,
|
|
194
|
+
shorthelp="Metric: wirelength",
|
|
195
|
+
switch="-metric_wirelength 'step index <float>'",
|
|
196
|
+
example=[
|
|
197
|
+
"cli: -metric_wirelength 'place 0 100.0'",
|
|
198
|
+
"api: chip.set('metric', 'wirelength', 50.0, step='place', index=0)"],
|
|
199
|
+
pernode=PerNode.REQUIRED,
|
|
200
|
+
help=trim("""
|
|
201
|
+
Metric tracking the total wirelength of the design on a per step
|
|
202
|
+
and index basis.""")))
|
|
203
|
+
|
|
204
|
+
schema.insert(
|
|
205
|
+
'overflow',
|
|
206
|
+
Parameter(
|
|
207
|
+
'int',
|
|
208
|
+
scope=Scope.JOB,
|
|
209
|
+
shorthelp="Metric: overflow",
|
|
210
|
+
switch="-metric_overflow 'step index <int>'",
|
|
211
|
+
example=[
|
|
212
|
+
"cli: -metric_overflow 'place 0 0'",
|
|
213
|
+
"api: chip.set('metric', 'overflow', 50, step='place', index=0)"],
|
|
214
|
+
pernode=PerNode.REQUIRED,
|
|
215
|
+
help=trim("""
|
|
216
|
+
Metric tracking the total number of overflow tracks for the routing
|
|
217
|
+
on per step and index basis. Any non-zero number suggests an over
|
|
218
|
+
congested design. To analyze where the congestion is occurring
|
|
219
|
+
inspect the router log files for detailed per metal overflow
|
|
220
|
+
reporting and open up the design to find routing hotspots.""")))
|
|
@@ -1,12 +1,15 @@
|
|
|
1
1
|
import contextlib
|
|
2
2
|
import functools
|
|
3
|
+
import hashlib
|
|
3
4
|
import importlib
|
|
4
5
|
import json
|
|
5
6
|
import logging
|
|
6
7
|
import os
|
|
8
|
+
import random
|
|
7
9
|
import re
|
|
8
10
|
import time
|
|
9
11
|
import threading
|
|
12
|
+
import uuid
|
|
10
13
|
|
|
11
14
|
import os.path
|
|
12
15
|
|
|
@@ -50,13 +53,16 @@ class Resolver:
|
|
|
50
53
|
_RESOLVERS_LOCK = threading.Lock()
|
|
51
54
|
_RESOLVERS = {}
|
|
52
55
|
|
|
56
|
+
__CACHE_LOCK = threading.Lock()
|
|
57
|
+
__CACHE = {}
|
|
58
|
+
|
|
53
59
|
def __init__(self, name, root, source, reference=None):
|
|
54
60
|
self.__name = name
|
|
55
61
|
self.__root = root
|
|
56
62
|
self.__source = source
|
|
57
63
|
self.__reference = reference
|
|
58
64
|
self.__changed = False
|
|
59
|
-
self.
|
|
65
|
+
self.__cacheid = None
|
|
60
66
|
|
|
61
67
|
if self.__root and hasattr(self.__root, "logger"):
|
|
62
68
|
self.__logger = self.__root.logger.getChild(f"resolver-{self.name}")
|
|
@@ -126,34 +132,80 @@ class Resolver:
|
|
|
126
132
|
return self.urlparse.netloc
|
|
127
133
|
|
|
128
134
|
@property
|
|
129
|
-
def changed(self):
|
|
135
|
+
def changed(self) -> bool:
|
|
130
136
|
change = self.__changed
|
|
131
137
|
self.__changed = False
|
|
132
138
|
return change
|
|
133
139
|
|
|
140
|
+
@property
|
|
141
|
+
def cache_id(self) -> str:
|
|
142
|
+
if self.__cacheid is None:
|
|
143
|
+
hash = hashlib.sha1()
|
|
144
|
+
hash.update(self.__source.encode())
|
|
145
|
+
if self.__reference:
|
|
146
|
+
hash.update(self.__reference.encode())
|
|
147
|
+
else:
|
|
148
|
+
hash.update("".encode())
|
|
149
|
+
|
|
150
|
+
self.__cacheid = hash.hexdigest()
|
|
151
|
+
return self.__cacheid
|
|
152
|
+
|
|
134
153
|
def set_changed(self):
|
|
135
154
|
self.__changed = True
|
|
136
155
|
|
|
137
|
-
def set_cache(self, cache):
|
|
138
|
-
self.__cache = cache
|
|
139
|
-
|
|
140
156
|
def resolve(self):
|
|
141
157
|
raise NotImplementedError("child class must implement this")
|
|
142
158
|
|
|
159
|
+
@staticmethod
|
|
160
|
+
def __get_root_id(root):
|
|
161
|
+
STORAGE = "__Resolver_cache_id"
|
|
162
|
+
if not getattr(root, STORAGE, None):
|
|
163
|
+
setattr(root, STORAGE, uuid.uuid4().hex)
|
|
164
|
+
return getattr(root, STORAGE)
|
|
165
|
+
|
|
166
|
+
@staticmethod
|
|
167
|
+
def get_cache(root, name: str = None):
|
|
168
|
+
with Resolver.__CACHE_LOCK:
|
|
169
|
+
root_id = Resolver.__get_root_id(root)
|
|
170
|
+
if root_id not in Resolver.__CACHE:
|
|
171
|
+
Resolver.__CACHE[root_id] = {}
|
|
172
|
+
|
|
173
|
+
if name:
|
|
174
|
+
return Resolver.__CACHE[root_id].get(name, None)
|
|
175
|
+
|
|
176
|
+
return Resolver.__CACHE[root_id].copy()
|
|
177
|
+
|
|
178
|
+
@staticmethod
|
|
179
|
+
def set_cache(root, name: str, path: str):
|
|
180
|
+
with Resolver.__CACHE_LOCK:
|
|
181
|
+
root_id = Resolver.__get_root_id(root)
|
|
182
|
+
if root_id not in Resolver.__CACHE:
|
|
183
|
+
Resolver.__CACHE[root_id] = {}
|
|
184
|
+
Resolver.__CACHE[root_id][name] = path
|
|
185
|
+
|
|
186
|
+
@staticmethod
|
|
187
|
+
def reset_cache(root):
|
|
188
|
+
with Resolver.__CACHE_LOCK:
|
|
189
|
+
root_id = Resolver.__get_root_id(root)
|
|
190
|
+
if root_id in Resolver.__CACHE:
|
|
191
|
+
del Resolver.__CACHE[root_id]
|
|
192
|
+
|
|
143
193
|
def get_path(self):
|
|
144
|
-
|
|
145
|
-
|
|
194
|
+
cache_path = Resolver.get_cache(self.__root, self.cache_id)
|
|
195
|
+
if cache_path:
|
|
196
|
+
return cache_path
|
|
146
197
|
|
|
147
198
|
path = self.resolve()
|
|
148
199
|
if not os.path.exists(path):
|
|
149
200
|
raise FileNotFoundError(f"Unable to locate {self.name} at {path}")
|
|
150
201
|
|
|
151
|
-
if self.changed
|
|
202
|
+
if self.changed:
|
|
152
203
|
self.logger.info(f'Saved {self.name} data to {path}')
|
|
153
204
|
else:
|
|
154
205
|
self.logger.info(f'Found {self.name} data at {path}')
|
|
155
|
-
|
|
156
|
-
|
|
206
|
+
|
|
207
|
+
Resolver.set_cache(self.__root, self.cache_id, path)
|
|
208
|
+
return path
|
|
157
209
|
|
|
158
210
|
def __resolve_env(self, path):
|
|
159
211
|
env_save = os.environ.copy()
|
|
@@ -198,6 +250,7 @@ class RemoteResolver(Resolver):
|
|
|
198
250
|
if not root:
|
|
199
251
|
return Path(default_path)
|
|
200
252
|
|
|
253
|
+
path = None
|
|
201
254
|
if root.valid('option', 'cachedir'):
|
|
202
255
|
path = root.get('option', 'cachedir')
|
|
203
256
|
if path:
|
|
@@ -249,32 +302,51 @@ class RemoteResolver(Resolver):
|
|
|
249
302
|
return RemoteResolver._CACHE_LOCKS[self.name]
|
|
250
303
|
|
|
251
304
|
@contextlib.contextmanager
|
|
252
|
-
def
|
|
305
|
+
def __thread_lock(self):
|
|
253
306
|
lock = self.thread_lock()
|
|
254
307
|
lock_acquired = False
|
|
255
308
|
try:
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
if max_seconds == 0:
|
|
267
|
-
raise RuntimeError(f'Failed to access {self.cache_path}. '
|
|
268
|
-
f'Lock {sc_data_path_lock} still exists.')
|
|
269
|
-
time.sleep(1)
|
|
270
|
-
max_seconds -= 1
|
|
271
|
-
sc_data_path_lock.touch()
|
|
272
|
-
lock_acquired = True
|
|
273
|
-
if lock_acquired:
|
|
274
|
-
yield
|
|
309
|
+
timeout = self.timeout
|
|
310
|
+
while timeout > 0:
|
|
311
|
+
if lock.acquire_lock(timeout=1):
|
|
312
|
+
lock_acquired = True
|
|
313
|
+
break
|
|
314
|
+
sleep_time = random.randint(1, max(1, int(timeout / 10)))
|
|
315
|
+
timeout -= sleep_time + 1
|
|
316
|
+
time.sleep(sleep_time)
|
|
317
|
+
if lock_acquired:
|
|
318
|
+
yield
|
|
275
319
|
finally:
|
|
276
320
|
if lock.locked():
|
|
277
321
|
lock.release()
|
|
322
|
+
|
|
323
|
+
if not lock_acquired:
|
|
324
|
+
raise RuntimeError(f'Failed to access {self.cache_path}. '
|
|
325
|
+
f'Another thread is currently holding the lock.')
|
|
326
|
+
|
|
327
|
+
@contextlib.contextmanager
|
|
328
|
+
def __file_lock(self):
|
|
329
|
+
data_path_lock = InterProcessLock(self.lock_file)
|
|
330
|
+
lock_acquired = False
|
|
331
|
+
sc_data_path_lock = None
|
|
332
|
+
try:
|
|
333
|
+
try:
|
|
334
|
+
lock_acquired = data_path_lock.acquire(timeout=self.timeout)
|
|
335
|
+
except (OSError, RuntimeError):
|
|
336
|
+
if not lock_acquired:
|
|
337
|
+
sc_data_path_lock = Path(self.sc_lock_file)
|
|
338
|
+
max_seconds = self.timeout
|
|
339
|
+
while sc_data_path_lock.exists():
|
|
340
|
+
if max_seconds == 0:
|
|
341
|
+
raise RuntimeError(f'Failed to access {self.cache_path}. '
|
|
342
|
+
f'Lock {sc_data_path_lock} still exists.')
|
|
343
|
+
time.sleep(1)
|
|
344
|
+
max_seconds -= 1
|
|
345
|
+
sc_data_path_lock.touch()
|
|
346
|
+
lock_acquired = True
|
|
347
|
+
if lock_acquired:
|
|
348
|
+
yield
|
|
349
|
+
finally:
|
|
278
350
|
if lock_acquired:
|
|
279
351
|
if data_path_lock.acquired:
|
|
280
352
|
data_path_lock.release()
|
|
@@ -286,6 +358,12 @@ class RemoteResolver(Resolver):
|
|
|
286
358
|
f'{self.lock_file} is still locked, if this is a mistake, '
|
|
287
359
|
'please delete it.')
|
|
288
360
|
|
|
361
|
+
@contextlib.contextmanager
|
|
362
|
+
def lock(self):
|
|
363
|
+
with self.__thread_lock():
|
|
364
|
+
with self.__file_lock():
|
|
365
|
+
yield
|
|
366
|
+
|
|
289
367
|
def resolve_remote(self):
|
|
290
368
|
raise NotImplementedError("child class must implement this")
|
|
291
369
|
|
|
@@ -324,11 +402,8 @@ class FileResolver(Resolver):
|
|
|
324
402
|
|
|
325
403
|
@property
|
|
326
404
|
def urlpath(self):
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
return parse.netloc
|
|
330
|
-
else:
|
|
331
|
-
return parse.path
|
|
405
|
+
# Rebuild URL and remove scheme prefix
|
|
406
|
+
return self.urlparse.geturl()[7:]
|
|
332
407
|
|
|
333
408
|
def resolve(self):
|
|
334
409
|
return os.path.abspath(self.urlpath)
|
|
@@ -412,6 +487,34 @@ class PythonPathResolver(Resolver):
|
|
|
412
487
|
path=path,
|
|
413
488
|
ref=ref)
|
|
414
489
|
|
|
490
|
+
@staticmethod
|
|
491
|
+
def set_dataroot(root,
|
|
492
|
+
package_name,
|
|
493
|
+
python_module,
|
|
494
|
+
alternative_path,
|
|
495
|
+
alternative_ref=None,
|
|
496
|
+
python_module_path_append=None):
|
|
497
|
+
'''
|
|
498
|
+
Helper function to register a python module as data source with an alternative in case
|
|
499
|
+
the module is not installed in an editable state
|
|
500
|
+
'''
|
|
501
|
+
# check if installed in an editable state
|
|
502
|
+
if PythonPathResolver.is_python_module_editable(python_module):
|
|
503
|
+
if python_module_path_append:
|
|
504
|
+
path = PythonPathResolver(
|
|
505
|
+
python_module, root, f"python://{python_module}").resolve()
|
|
506
|
+
path = os.path.abspath(os.path.join(path, python_module_path_append))
|
|
507
|
+
else:
|
|
508
|
+
path = f"python://{python_module}"
|
|
509
|
+
ref = None
|
|
510
|
+
else:
|
|
511
|
+
path = alternative_path
|
|
512
|
+
ref = alternative_ref
|
|
513
|
+
|
|
514
|
+
root.set_dataroot(name=package_name,
|
|
515
|
+
path=path,
|
|
516
|
+
tag=ref)
|
|
517
|
+
|
|
415
518
|
def resolve(self):
|
|
416
519
|
module = importlib.import_module(self.urlpath)
|
|
417
520
|
python_path = os.path.dirname(module.__file__)
|
|
@@ -53,17 +53,13 @@ class GithubResolver(HTTPResolver):
|
|
|
53
53
|
if not release:
|
|
54
54
|
release = repo.get_latest_release().tag_name
|
|
55
55
|
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
url = asset.url
|
|
56
|
+
repo_release = repo.get_release(release)
|
|
57
|
+
if repo_release:
|
|
58
|
+
for asset in repo_release.assets:
|
|
59
|
+
if asset.name == artifact:
|
|
60
|
+
return asset.url
|
|
62
61
|
|
|
63
|
-
|
|
64
|
-
raise ValueError(f'Unable to find release asset: {repository}/{release}/{artifact}')
|
|
65
|
-
|
|
66
|
-
return url
|
|
62
|
+
raise ValueError(f'Unable to find release asset: {repository}/{release}/{artifact}')
|
|
67
63
|
|
|
68
64
|
def __get_gh_auth(self):
|
|
69
65
|
token_name = self.name.upper()
|