siliconcompiler 0.35.0__py3-none-any.whl → 0.35.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- siliconcompiler/_metadata.py +1 -1
- siliconcompiler/apps/_common.py +3 -2
- siliconcompiler/apps/sc_dashboard.py +3 -1
- siliconcompiler/apps/sc_install.py +149 -37
- siliconcompiler/apps/smake.py +9 -3
- siliconcompiler/checklist.py +3 -3
- siliconcompiler/data/demo_fpga/z1000_yosys_config.json +24 -0
- siliconcompiler/design.py +51 -45
- siliconcompiler/flowgraph.py +2 -2
- siliconcompiler/library.py +23 -12
- siliconcompiler/package/__init__.py +77 -49
- siliconcompiler/package/git.py +11 -6
- siliconcompiler/package/github.py +11 -6
- siliconcompiler/package/https.py +6 -4
- siliconcompiler/pdk.py +23 -16
- siliconcompiler/scheduler/scheduler.py +30 -22
- siliconcompiler/scheduler/schedulernode.py +60 -50
- siliconcompiler/scheduler/taskscheduler.py +52 -32
- siliconcompiler/schema/baseschema.py +88 -69
- siliconcompiler/schema/docs/schemagen.py +4 -3
- siliconcompiler/schema/editableschema.py +5 -5
- siliconcompiler/schema/journal.py +19 -13
- siliconcompiler/schema/namedschema.py +16 -10
- siliconcompiler/schema/parameter.py +64 -37
- siliconcompiler/schema/parametervalue.py +126 -80
- siliconcompiler/schema/safeschema.py +16 -7
- siliconcompiler/schema/utils.py +3 -1
- siliconcompiler/schema_support/cmdlineschema.py +9 -9
- siliconcompiler/schema_support/dependencyschema.py +12 -7
- siliconcompiler/schema_support/filesetschema.py +15 -10
- siliconcompiler/schema_support/metric.py +29 -17
- siliconcompiler/schema_support/packageschema.py +2 -2
- siliconcompiler/schema_support/pathschema.py +30 -18
- siliconcompiler/schema_support/record.py +30 -23
- siliconcompiler/tool.py +265 -210
- siliconcompiler/tools/opensta/timing.py +13 -0
- siliconcompiler/tools/yosys/syn_fpga.py +3 -2
- siliconcompiler/toolscripts/_tools.json +3 -3
- siliconcompiler/utils/__init__.py +23 -16
- siliconcompiler/utils/curation.py +11 -5
- siliconcompiler/utils/multiprocessing.py +16 -14
- siliconcompiler/utils/paths.py +24 -12
- siliconcompiler/utils/units.py +16 -12
- {siliconcompiler-0.35.0.dist-info → siliconcompiler-0.35.1.dist-info}/METADATA +3 -4
- {siliconcompiler-0.35.0.dist-info → siliconcompiler-0.35.1.dist-info}/RECORD +49 -48
- {siliconcompiler-0.35.0.dist-info → siliconcompiler-0.35.1.dist-info}/entry_points.txt +4 -3
- {siliconcompiler-0.35.0.dist-info → siliconcompiler-0.35.1.dist-info}/WHEEL +0 -0
- {siliconcompiler-0.35.0.dist-info → siliconcompiler-0.35.1.dist-info}/licenses/LICENSE +0 -0
- {siliconcompiler-0.35.0.dist-info → siliconcompiler-0.35.1.dist-info}/top_level.txt +0 -0
|
@@ -22,6 +22,8 @@ import uuid
|
|
|
22
22
|
|
|
23
23
|
import os.path
|
|
24
24
|
|
|
25
|
+
from typing import Optional, List, Dict, Type, Union, TYPE_CHECKING, ClassVar
|
|
26
|
+
|
|
25
27
|
from fasteners import InterProcessLock
|
|
26
28
|
from importlib.metadata import distributions, distribution
|
|
27
29
|
from pathlib import Path
|
|
@@ -29,6 +31,11 @@ from urllib import parse as url_parse
|
|
|
29
31
|
|
|
30
32
|
from siliconcompiler.utils import get_plugins
|
|
31
33
|
|
|
34
|
+
if TYPE_CHECKING:
|
|
35
|
+
from siliconcompiler.project import Project
|
|
36
|
+
from siliconcompiler.schema_support.pathschema import PathSchema
|
|
37
|
+
from siliconcompiler.schema import BaseSchema
|
|
38
|
+
|
|
32
39
|
|
|
33
40
|
class Resolver:
|
|
34
41
|
"""
|
|
@@ -45,13 +52,17 @@ class Resolver:
|
|
|
45
52
|
source (str): The URI or path specifying the data source.
|
|
46
53
|
reference (str): A version, commit hash, or tag for remote sources.
|
|
47
54
|
"""
|
|
48
|
-
_RESOLVERS_LOCK = threading.Lock()
|
|
49
|
-
_RESOLVERS = {}
|
|
55
|
+
_RESOLVERS_LOCK: ClassVar[threading.Lock] = threading.Lock()
|
|
56
|
+
_RESOLVERS: ClassVar[Dict[str, Type["Resolver"]]] = {}
|
|
57
|
+
__STORAGE: str = "__Resolver_cache_id"
|
|
50
58
|
|
|
51
|
-
__CACHE_LOCK = threading.Lock()
|
|
52
|
-
__CACHE = {}
|
|
59
|
+
__CACHE_LOCK: ClassVar[threading.Lock] = threading.Lock()
|
|
60
|
+
__CACHE: ClassVar[Dict[str, Dict[str, str]]] = {}
|
|
53
61
|
|
|
54
|
-
def __init__(self, name
|
|
62
|
+
def __init__(self, name: str,
|
|
63
|
+
root: Optional[Union["Project", "BaseSchema"]],
|
|
64
|
+
source: str,
|
|
65
|
+
reference: Optional[str] = None):
|
|
55
66
|
"""
|
|
56
67
|
Initializes the Resolver.
|
|
57
68
|
"""
|
|
@@ -68,7 +79,7 @@ class Resolver:
|
|
|
68
79
|
self.__logger = logging.getLogger(f"resolver-{self.name}")
|
|
69
80
|
|
|
70
81
|
@staticmethod
|
|
71
|
-
def populate_resolvers():
|
|
82
|
+
def populate_resolvers() -> None:
|
|
72
83
|
"""
|
|
73
84
|
Scans for and registers all available resolver plugins.
|
|
74
85
|
|
|
@@ -90,7 +101,7 @@ class Resolver:
|
|
|
90
101
|
Resolver._RESOLVERS.update(resolver())
|
|
91
102
|
|
|
92
103
|
@staticmethod
|
|
93
|
-
def find_resolver(source):
|
|
104
|
+
def find_resolver(source: str) -> Type["Resolver"]:
|
|
94
105
|
"""
|
|
95
106
|
Finds the appropriate resolver class for a given source URI.
|
|
96
107
|
|
|
@@ -122,7 +133,7 @@ class Resolver:
|
|
|
122
133
|
return self.__name
|
|
123
134
|
|
|
124
135
|
@property
|
|
125
|
-
def root(self):
|
|
136
|
+
def root(self) -> Optional[Union["Project", "BaseSchema"]]:
|
|
126
137
|
"""The root object (e.g., Project) providing context."""
|
|
127
138
|
return self.__root
|
|
128
139
|
|
|
@@ -137,7 +148,7 @@ class Resolver:
|
|
|
137
148
|
return self.__source
|
|
138
149
|
|
|
139
150
|
@property
|
|
140
|
-
def reference(self) -> str:
|
|
151
|
+
def reference(self) -> Union[None, str]:
|
|
141
152
|
"""A version, commit hash, or tag for the source."""
|
|
142
153
|
return self.__reference
|
|
143
154
|
|
|
@@ -185,7 +196,7 @@ class Resolver:
|
|
|
185
196
|
"""Marks the resolved data as having been changed."""
|
|
186
197
|
self.__changed = True
|
|
187
198
|
|
|
188
|
-
def resolve(self):
|
|
199
|
+
def resolve(self) -> Union[Path, str]:
|
|
189
200
|
"""
|
|
190
201
|
Abstract method to perform the actual data resolution.
|
|
191
202
|
|
|
@@ -195,15 +206,15 @@ class Resolver:
|
|
|
195
206
|
raise NotImplementedError("child class must implement this")
|
|
196
207
|
|
|
197
208
|
@staticmethod
|
|
198
|
-
def __get_root_id(root):
|
|
209
|
+
def __get_root_id(root: Union["Project", "BaseSchema"]) -> str:
|
|
199
210
|
"""Generates or retrieves a unique ID for a root object."""
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
return getattr(root, STORAGE)
|
|
211
|
+
if not getattr(root, Resolver.__STORAGE, None):
|
|
212
|
+
setattr(root, Resolver.__STORAGE, uuid.uuid4().hex)
|
|
213
|
+
return getattr(root, Resolver.__STORAGE)
|
|
204
214
|
|
|
205
215
|
@staticmethod
|
|
206
|
-
def get_cache(root, name: str = None)
|
|
216
|
+
def get_cache(root: Optional[Union["Project", "BaseSchema"]], name: Optional[str] = None) \
|
|
217
|
+
-> Union[None, str, Dict[str, str]]:
|
|
207
218
|
"""
|
|
208
219
|
Gets a cached path for a given root object and resolver name.
|
|
209
220
|
|
|
@@ -215,6 +226,9 @@ class Resolver:
|
|
|
215
226
|
Returns:
|
|
216
227
|
str or dict or None: The cached path, a copy of the cache, or None.
|
|
217
228
|
"""
|
|
229
|
+
if root is None:
|
|
230
|
+
return None
|
|
231
|
+
|
|
218
232
|
with Resolver.__CACHE_LOCK:
|
|
219
233
|
root_id = Resolver.__get_root_id(root)
|
|
220
234
|
if root_id not in Resolver.__CACHE:
|
|
@@ -226,7 +240,9 @@ class Resolver:
|
|
|
226
240
|
return Resolver.__CACHE[root_id].copy()
|
|
227
241
|
|
|
228
242
|
@staticmethod
|
|
229
|
-
def set_cache(root
|
|
243
|
+
def set_cache(root: Optional[Union["Project", "BaseSchema"]],
|
|
244
|
+
name: str,
|
|
245
|
+
path: Union[Path, str]) -> None:
|
|
230
246
|
"""
|
|
231
247
|
Sets a cached path for a given root object and resolver name.
|
|
232
248
|
|
|
@@ -235,26 +251,32 @@ class Resolver:
|
|
|
235
251
|
name (str): The name of the resolver cache to set.
|
|
236
252
|
path (str): The path to cache.
|
|
237
253
|
"""
|
|
254
|
+
if root is None:
|
|
255
|
+
return
|
|
256
|
+
|
|
238
257
|
with Resolver.__CACHE_LOCK:
|
|
239
258
|
root_id = Resolver.__get_root_id(root)
|
|
240
259
|
if root_id not in Resolver.__CACHE:
|
|
241
260
|
Resolver.__CACHE[root_id] = {}
|
|
242
|
-
Resolver.__CACHE[root_id][name] = path
|
|
261
|
+
Resolver.__CACHE[root_id][name] = str(path)
|
|
243
262
|
|
|
244
263
|
@staticmethod
|
|
245
|
-
def reset_cache(root):
|
|
264
|
+
def reset_cache(root: Optional[Union["Project", "BaseSchema"]]) -> None:
|
|
246
265
|
"""
|
|
247
266
|
Resets the entire cache for a given root object.
|
|
248
267
|
|
|
249
268
|
Args:
|
|
250
269
|
root: The root object whose cache will be cleared.
|
|
251
270
|
"""
|
|
271
|
+
if root is None:
|
|
272
|
+
return
|
|
273
|
+
|
|
252
274
|
with Resolver.__CACHE_LOCK:
|
|
253
275
|
root_id = Resolver.__get_root_id(root)
|
|
254
276
|
if root_id in Resolver.__CACHE:
|
|
255
277
|
del Resolver.__CACHE[root_id]
|
|
256
278
|
|
|
257
|
-
def get_path(self):
|
|
279
|
+
def get_path(self) -> str:
|
|
258
280
|
"""
|
|
259
281
|
Resolves the data source and returns its local path.
|
|
260
282
|
|
|
@@ -267,7 +289,7 @@ class Resolver:
|
|
|
267
289
|
Raises:
|
|
268
290
|
FileNotFoundError: If the resolved path does not exist.
|
|
269
291
|
"""
|
|
270
|
-
cache_path = Resolver.get_cache(self.__root, self.cache_id)
|
|
292
|
+
cache_path: Optional[str] = Resolver.get_cache(self.__root, self.cache_id)
|
|
271
293
|
if cache_path:
|
|
272
294
|
return cache_path
|
|
273
295
|
|
|
@@ -281,9 +303,9 @@ class Resolver:
|
|
|
281
303
|
self.logger.info(f'Found {self.name} data at {path}')
|
|
282
304
|
|
|
283
305
|
Resolver.set_cache(self.__root, self.cache_id, path)
|
|
284
|
-
return path
|
|
306
|
+
return str(path)
|
|
285
307
|
|
|
286
|
-
def __resolve_env(self, path):
|
|
308
|
+
def __resolve_env(self, path: str) -> str:
|
|
287
309
|
"""Expands environment variables and user home directory in a path."""
|
|
288
310
|
env_save = os.environ.copy()
|
|
289
311
|
|
|
@@ -313,26 +335,29 @@ class RemoteResolver(Resolver):
|
|
|
313
335
|
_CACHE_LOCKS = {}
|
|
314
336
|
_CACHE_LOCK = threading.Lock()
|
|
315
337
|
|
|
316
|
-
def __init__(self, name
|
|
338
|
+
def __init__(self, name: str,
|
|
339
|
+
root: Optional[Union["Project", "BaseSchema"]],
|
|
340
|
+
source: str,
|
|
341
|
+
reference: Optional[str] = None):
|
|
317
342
|
if reference is None:
|
|
318
343
|
raise ValueError(f'A reference (e.g., version, commit) is required for {name}')
|
|
319
344
|
|
|
320
345
|
super().__init__(name, root, source, reference)
|
|
321
346
|
|
|
322
347
|
# Wait a maximum of 10 minutes for other processes to finish
|
|
323
|
-
self.__max_lock_wait = 60 * 10
|
|
348
|
+
self.__max_lock_wait: int = 60 * 10
|
|
324
349
|
|
|
325
350
|
@property
|
|
326
|
-
def timeout(self):
|
|
351
|
+
def timeout(self) -> int:
|
|
327
352
|
"""The maximum time in seconds to wait for a lock."""
|
|
328
353
|
return self.__max_lock_wait
|
|
329
354
|
|
|
330
|
-
def set_timeout(self, value):
|
|
355
|
+
def set_timeout(self, value: int) -> None:
|
|
331
356
|
"""Sets the maximum time in seconds to wait for a lock."""
|
|
332
357
|
self.__max_lock_wait = value
|
|
333
358
|
|
|
334
359
|
@staticmethod
|
|
335
|
-
def determine_cache_dir(root) -> Path:
|
|
360
|
+
def determine_cache_dir(root: Optional[Union["Project", "BaseSchema"]]) -> Path:
|
|
336
361
|
"""
|
|
337
362
|
Determines the directory for the on-disk cache.
|
|
338
363
|
|
|
@@ -401,7 +426,7 @@ class RemoteResolver(Resolver):
|
|
|
401
426
|
|
|
402
427
|
return self.cache_dir / f"{self.cache_name}.sc_lock"
|
|
403
428
|
|
|
404
|
-
def thread_lock(self):
|
|
429
|
+
def thread_lock(self) -> threading.Lock:
|
|
405
430
|
"""Gets a threading.Lock specific to this resolver instance."""
|
|
406
431
|
with RemoteResolver._CACHE_LOCK:
|
|
407
432
|
if self.name not in RemoteResolver._CACHE_LOCKS:
|
|
@@ -479,11 +504,11 @@ class RemoteResolver(Resolver):
|
|
|
479
504
|
with self.__file_lock():
|
|
480
505
|
yield
|
|
481
506
|
|
|
482
|
-
def resolve_remote(self):
|
|
507
|
+
def resolve_remote(self) -> None:
|
|
483
508
|
"""Abstract method to fetch the remote data."""
|
|
484
509
|
raise NotImplementedError("child class must implement this")
|
|
485
510
|
|
|
486
|
-
def check_cache(self):
|
|
511
|
+
def check_cache(self) -> bool:
|
|
487
512
|
"""
|
|
488
513
|
Abstract method to check if the on-disk cache is valid.
|
|
489
514
|
|
|
@@ -492,7 +517,7 @@ class RemoteResolver(Resolver):
|
|
|
492
517
|
"""
|
|
493
518
|
raise NotImplementedError("child class must implement this")
|
|
494
519
|
|
|
495
|
-
def resolve(self) -> Path:
|
|
520
|
+
def resolve(self) -> Union[str, Path]:
|
|
496
521
|
"""
|
|
497
522
|
Resolves the remote data, using the on-disk cache if possible.
|
|
498
523
|
|
|
@@ -531,7 +556,7 @@ class FileResolver(Resolver):
|
|
|
531
556
|
It normalizes the source string to a `file://` URI.
|
|
532
557
|
"""
|
|
533
558
|
|
|
534
|
-
def __init__(self, name, root, source, reference=None):
|
|
559
|
+
def __init__(self, name: str, root: "Project", source: str, reference: Optional[str] = None):
|
|
535
560
|
if source.startswith("file://"):
|
|
536
561
|
source = source[7:]
|
|
537
562
|
if source[0] != "$" and not os.path.isabs(source):
|
|
@@ -540,14 +565,17 @@ class FileResolver(Resolver):
|
|
|
540
565
|
super().__init__(name, root, f"file://{source}", None)
|
|
541
566
|
|
|
542
567
|
@property
|
|
543
|
-
def urlpath(self):
|
|
568
|
+
def urlpath(self) -> str:
|
|
544
569
|
"""The absolute file path, stripped of the 'file://' prefix."""
|
|
545
570
|
# Rebuild URL and remove scheme prefix
|
|
546
571
|
return self.urlparse.geturl()[7:]
|
|
547
572
|
|
|
548
|
-
def resolve(self):
|
|
573
|
+
def resolve(self) -> str:
|
|
549
574
|
"""Returns the absolute path to the file."""
|
|
550
|
-
|
|
575
|
+
path = self.urlpath
|
|
576
|
+
if path and path[0] == "$":
|
|
577
|
+
return path
|
|
578
|
+
return os.path.abspath(path)
|
|
551
579
|
|
|
552
580
|
|
|
553
581
|
class PythonPathResolver(Resolver):
|
|
@@ -559,12 +587,12 @@ class PythonPathResolver(Resolver):
|
|
|
559
587
|
determine if a package is installed in "editable" mode.
|
|
560
588
|
"""
|
|
561
589
|
|
|
562
|
-
def __init__(self, name, root, source, reference=None):
|
|
590
|
+
def __init__(self, name: str, root: "Project", source: str, reference: Optional[str] = None):
|
|
563
591
|
super().__init__(name, root, source, None)
|
|
564
592
|
|
|
565
593
|
@staticmethod
|
|
566
594
|
@functools.lru_cache(maxsize=1)
|
|
567
|
-
def get_python_module_mapping():
|
|
595
|
+
def get_python_module_mapping() -> Dict[str, List[str]]:
|
|
568
596
|
"""
|
|
569
597
|
Creates a mapping from importable module names to their distribution names.
|
|
570
598
|
|
|
@@ -592,13 +620,13 @@ class PythonPathResolver(Resolver):
|
|
|
592
620
|
|
|
593
621
|
provides = dist.read_text('top_level.txt')
|
|
594
622
|
if provides:
|
|
595
|
-
for module in
|
|
623
|
+
for module in provides.split():
|
|
596
624
|
mapping.setdefault(module, []).append(dist_name)
|
|
597
625
|
|
|
598
626
|
return mapping
|
|
599
627
|
|
|
600
628
|
@staticmethod
|
|
601
|
-
def is_python_module_editable(module_name):
|
|
629
|
+
def is_python_module_editable(module_name: str) -> bool:
|
|
602
630
|
"""
|
|
603
631
|
Checks if a Python module is installed in "editable" mode.
|
|
604
632
|
|
|
@@ -630,12 +658,12 @@ class PythonPathResolver(Resolver):
|
|
|
630
658
|
return is_editable
|
|
631
659
|
|
|
632
660
|
@staticmethod
|
|
633
|
-
def set_dataroot(root,
|
|
634
|
-
package_name,
|
|
635
|
-
python_module,
|
|
636
|
-
alternative_path,
|
|
637
|
-
alternative_ref=None,
|
|
638
|
-
python_module_path_append=None):
|
|
661
|
+
def set_dataroot(root: "PathSchema",
|
|
662
|
+
package_name: str,
|
|
663
|
+
python_module: str,
|
|
664
|
+
alternative_path: str,
|
|
665
|
+
alternative_ref: Optional[str] = None,
|
|
666
|
+
python_module_path_append: Optional[str] = None):
|
|
639
667
|
"""
|
|
640
668
|
Helper to conditionally set a dataroot to a Python module or a fallback path.
|
|
641
669
|
"""
|
|
@@ -652,7 +680,7 @@ class PythonPathResolver(Resolver):
|
|
|
652
680
|
|
|
653
681
|
root.set_dataroot(package_name, path=path, tag=ref)
|
|
654
682
|
|
|
655
|
-
def resolve(self):
|
|
683
|
+
def resolve(self) -> str:
|
|
656
684
|
"""
|
|
657
685
|
Resolves the path to the specified Python module.
|
|
658
686
|
|
|
@@ -672,10 +700,10 @@ class KeyPathResolver(Resolver):
|
|
|
672
700
|
`find_files` method of the root project object to locate the corresponding file.
|
|
673
701
|
"""
|
|
674
702
|
|
|
675
|
-
def __init__(self, name, root, source, reference=None):
|
|
703
|
+
def __init__(self, name: str, root: "Project", source: str, reference: Optional[str] = None):
|
|
676
704
|
super().__init__(name, root, source, None)
|
|
677
705
|
|
|
678
|
-
def resolve(self):
|
|
706
|
+
def resolve(self) -> str:
|
|
679
707
|
"""
|
|
680
708
|
Resolves the path by looking up the keypath in the project schema.
|
|
681
709
|
|
siliconcompiler/package/git.py
CHANGED
|
@@ -8,11 +8,16 @@ branches, tags, or commit hashes), and managing the cached repository's state.
|
|
|
8
8
|
import shutil
|
|
9
9
|
import os.path
|
|
10
10
|
|
|
11
|
+
from typing import Dict, Type, Optional, Union, TYPE_CHECKING
|
|
12
|
+
|
|
11
13
|
from git import Repo, GitCommandError
|
|
12
14
|
from siliconcompiler.package import RemoteResolver
|
|
13
15
|
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from siliconcompiler.project import Project
|
|
18
|
+
|
|
14
19
|
|
|
15
|
-
def get_resolver():
|
|
20
|
+
def get_resolver() -> Dict[str, Type["GitResolver"]]:
|
|
16
21
|
"""
|
|
17
22
|
Returns a dictionary mapping Git-related URI schemes to the GitResolver class.
|
|
18
23
|
|
|
@@ -40,13 +45,13 @@ class GitResolver(RemoteResolver):
|
|
|
40
45
|
for SSH-based URLs.
|
|
41
46
|
"""
|
|
42
47
|
|
|
43
|
-
def __init__(self, name, root, source, reference=None):
|
|
48
|
+
def __init__(self, name: str, root: "Project", source: str, reference: Optional[str] = None):
|
|
44
49
|
"""
|
|
45
50
|
Initializes the GitResolver.
|
|
46
51
|
"""
|
|
47
52
|
super().__init__(name, root, source, reference)
|
|
48
53
|
|
|
49
|
-
def check_cache(self):
|
|
54
|
+
def check_cache(self) -> bool:
|
|
50
55
|
"""
|
|
51
56
|
Checks if a valid, clean Git repository exists at the cache path.
|
|
52
57
|
|
|
@@ -69,7 +74,7 @@ class GitResolver(RemoteResolver):
|
|
|
69
74
|
return False
|
|
70
75
|
return False
|
|
71
76
|
|
|
72
|
-
def __get_token_env(self):
|
|
77
|
+
def __get_token_env(self) -> Union[None, str]:
|
|
73
78
|
"""
|
|
74
79
|
Searches for a Git authentication token in predefined environment variables.
|
|
75
80
|
|
|
@@ -99,7 +104,7 @@ class GitResolver(RemoteResolver):
|
|
|
99
104
|
return None
|
|
100
105
|
|
|
101
106
|
@property
|
|
102
|
-
def git_path(self):
|
|
107
|
+
def git_path(self) -> str:
|
|
103
108
|
"""
|
|
104
109
|
Constructs the final Git URL for cloning.
|
|
105
110
|
|
|
@@ -123,7 +128,7 @@ class GitResolver(RemoteResolver):
|
|
|
123
128
|
url = url._replace(scheme='https')
|
|
124
129
|
return url.geturl()
|
|
125
130
|
|
|
126
|
-
def resolve_remote(self):
|
|
131
|
+
def resolve_remote(self) -> None:
|
|
127
132
|
"""
|
|
128
133
|
Fetches the remote repository and checks out the specified reference.
|
|
129
134
|
|
|
@@ -6,13 +6,18 @@ release assets from public or private GitHub repositories.
|
|
|
6
6
|
"""
|
|
7
7
|
import os
|
|
8
8
|
|
|
9
|
+
from typing import Dict, Type, Optional, Tuple, TYPE_CHECKING
|
|
10
|
+
|
|
9
11
|
from github import Github, Auth
|
|
10
12
|
from github.GithubException import UnknownObjectException
|
|
11
13
|
|
|
12
14
|
from siliconcompiler.package.https import HTTPResolver
|
|
13
15
|
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from siliconcompiler.project import Project
|
|
18
|
+
|
|
14
19
|
|
|
15
|
-
def get_resolver():
|
|
20
|
+
def get_resolver() -> Dict[str, Type["GithubResolver"]]:
|
|
16
21
|
"""
|
|
17
22
|
Returns a dictionary mapping GitHub URI schemes to the GithubResolver class.
|
|
18
23
|
|
|
@@ -43,7 +48,7 @@ class GithubResolver(HTTPResolver):
|
|
|
43
48
|
a GitHub token must be provided via environment variables.
|
|
44
49
|
"""
|
|
45
50
|
|
|
46
|
-
def __init__(self, name, root, source, reference=None):
|
|
51
|
+
def __init__(self, name: str, root: "Project", source: str, reference: Optional[str] = None):
|
|
47
52
|
"""
|
|
48
53
|
Initializes the GithubResolver.
|
|
49
54
|
"""
|
|
@@ -55,7 +60,7 @@ class GithubResolver(HTTPResolver):
|
|
|
55
60
|
"github://<owner>/<repository>/<version>/<artifact>")
|
|
56
61
|
|
|
57
62
|
@property
|
|
58
|
-
def gh_path(self):
|
|
63
|
+
def gh_path(self) -> Tuple[str, ...]:
|
|
59
64
|
"""
|
|
60
65
|
Parses the source URL into its constituent GitHub parts.
|
|
61
66
|
|
|
@@ -65,7 +70,7 @@ class GithubResolver(HTTPResolver):
|
|
|
65
70
|
return self.urlpath, *self.urlparse.path.split("/")[1:]
|
|
66
71
|
|
|
67
72
|
@property
|
|
68
|
-
def download_url(self):
|
|
73
|
+
def download_url(self) -> str:
|
|
69
74
|
"""
|
|
70
75
|
Determines the direct download URL for the GitHub release asset.
|
|
71
76
|
|
|
@@ -93,7 +98,7 @@ class GithubResolver(HTTPResolver):
|
|
|
93
98
|
self.logger.info("Could not find public release, trying private.")
|
|
94
99
|
return self.__get_release_url(repository, release, artifact, private=True)
|
|
95
100
|
|
|
96
|
-
def __get_release_url(self, repository, release, artifact, private: bool):
|
|
101
|
+
def __get_release_url(self, repository: str, release: str, artifact: str, private: bool) -> str:
|
|
97
102
|
"""
|
|
98
103
|
Uses the GitHub API to find the download URL for a specific release asset.
|
|
99
104
|
|
|
@@ -133,7 +138,7 @@ class GithubResolver(HTTPResolver):
|
|
|
133
138
|
|
|
134
139
|
raise ValueError(f'Unable to find release asset: {repository}/{release}/{artifact}')
|
|
135
140
|
|
|
136
|
-
def __get_gh_auth(self):
|
|
141
|
+
def __get_gh_auth(self) -> str:
|
|
137
142
|
"""
|
|
138
143
|
Searches for a GitHub authentication token in predefined environment variables.
|
|
139
144
|
|
siliconcompiler/package/https.py
CHANGED
|
@@ -11,13 +11,15 @@ import zipfile
|
|
|
11
11
|
|
|
12
12
|
import os.path
|
|
13
13
|
|
|
14
|
+
from typing import Dict, Type
|
|
15
|
+
|
|
14
16
|
from io import BytesIO
|
|
15
17
|
from urllib.parse import urlparse
|
|
16
18
|
|
|
17
19
|
from siliconcompiler.package import RemoteResolver
|
|
18
20
|
|
|
19
21
|
|
|
20
|
-
def get_resolver():
|
|
22
|
+
def get_resolver() -> Dict[str, Type["HTTPResolver"]]:
|
|
21
23
|
"""
|
|
22
24
|
Returns a dictionary mapping HTTP schemes to the HTTPResolver class.
|
|
23
25
|
|
|
@@ -43,7 +45,7 @@ class HTTPResolver(RemoteResolver):
|
|
|
43
45
|
of archives downloaded from GitHub.
|
|
44
46
|
"""
|
|
45
47
|
|
|
46
|
-
def check_cache(self):
|
|
48
|
+
def check_cache(self) -> bool:
|
|
47
49
|
"""
|
|
48
50
|
Checks if the data has already been cached.
|
|
49
51
|
|
|
@@ -56,7 +58,7 @@ class HTTPResolver(RemoteResolver):
|
|
|
56
58
|
return os.path.exists(self.cache_path)
|
|
57
59
|
|
|
58
60
|
@property
|
|
59
|
-
def download_url(self):
|
|
61
|
+
def download_url(self) -> str:
|
|
60
62
|
"""
|
|
61
63
|
Constructs the final download URL.
|
|
62
64
|
|
|
@@ -71,7 +73,7 @@ class HTTPResolver(RemoteResolver):
|
|
|
71
73
|
data_url = f"{data_url}{self.reference}.tar.gz"
|
|
72
74
|
return data_url
|
|
73
75
|
|
|
74
|
-
def resolve_remote(self):
|
|
76
|
+
def resolve_remote(self) -> None:
|
|
75
77
|
"""
|
|
76
78
|
Fetches the remote archive, unpacks it, and stores it in the cache.
|
|
77
79
|
|
siliconcompiler/pdk.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import math
|
|
2
2
|
|
|
3
|
-
from typing import Tuple
|
|
3
|
+
from typing import Tuple, Optional, Union, List
|
|
4
4
|
|
|
5
5
|
from siliconcompiler.schema import EditableSchema, Parameter, Scope, BaseSchema
|
|
6
6
|
from siliconcompiler.schema.utils import trim
|
|
@@ -20,7 +20,7 @@ class PDK(ToolLibrarySchema):
|
|
|
20
20
|
ToolLibrarySchema to provide a standardized way of describing and
|
|
21
21
|
accessing PDK data within the SiliconCompiler framework.
|
|
22
22
|
"""
|
|
23
|
-
def __init__(self, name: str = None):
|
|
23
|
+
def __init__(self, name: Optional[str] = None):
|
|
24
24
|
"""
|
|
25
25
|
Initializes a PDK object.
|
|
26
26
|
|
|
@@ -405,7 +405,7 @@ class PDK(ToolLibrarySchema):
|
|
|
405
405
|
"""
|
|
406
406
|
return self.set("pdk", "edgemargin", margin)
|
|
407
407
|
|
|
408
|
-
def set_aprroutinglayers(self, min: str = None, max: str = None):
|
|
408
|
+
def set_aprroutinglayers(self, min: Optional[str] = None, max: Optional[str] = None):
|
|
409
409
|
"""
|
|
410
410
|
Sets the minimum and maximum routing layers for the PDK.
|
|
411
411
|
|
|
@@ -418,7 +418,8 @@ class PDK(ToolLibrarySchema):
|
|
|
418
418
|
if max:
|
|
419
419
|
self.set("pdk", "maxlayer", max)
|
|
420
420
|
|
|
421
|
-
def add_aprtechfileset(self, tool: str, fileset: str = None,
|
|
421
|
+
def add_aprtechfileset(self, tool: str, fileset: Optional[Union[List[str], str]] = None,
|
|
422
|
+
clobber: bool = False):
|
|
422
423
|
"""
|
|
423
424
|
Adds a fileset containing APR technology files.
|
|
424
425
|
|
|
@@ -438,7 +439,8 @@ class PDK(ToolLibrarySchema):
|
|
|
438
439
|
else:
|
|
439
440
|
return self.add("pdk", "aprtechfileset", tool, fileset)
|
|
440
441
|
|
|
441
|
-
def add_layermapfileset(self, tool: str, src: str, dst: str,
|
|
442
|
+
def add_layermapfileset(self, tool: str, src: str, dst: str,
|
|
443
|
+
fileset: Optional[Union[List[str], str]] = None,
|
|
442
444
|
clobber: bool = False):
|
|
443
445
|
"""
|
|
444
446
|
Adds a fileset containing layer map files.
|
|
@@ -461,7 +463,8 @@ class PDK(ToolLibrarySchema):
|
|
|
461
463
|
else:
|
|
462
464
|
return self.add("pdk", "layermapfileset", tool, src, dst, fileset)
|
|
463
465
|
|
|
464
|
-
def add_displayfileset(self, tool: str, fileset: str = None,
|
|
466
|
+
def add_displayfileset(self, tool: str, fileset: Optional[Union[List[str], str]] = None,
|
|
467
|
+
clobber: bool = False):
|
|
465
468
|
"""
|
|
466
469
|
Adds a fileset containing display configuration files.
|
|
467
470
|
|
|
@@ -481,7 +484,8 @@ class PDK(ToolLibrarySchema):
|
|
|
481
484
|
else:
|
|
482
485
|
return self.add("pdk", "displayfileset", tool, fileset)
|
|
483
486
|
|
|
484
|
-
def add_devmodelfileset(self, tool: str, type: str,
|
|
487
|
+
def add_devmodelfileset(self, tool: str, type: str,
|
|
488
|
+
fileset: Optional[Union[List[str], str]] = None,
|
|
485
489
|
clobber: bool = False):
|
|
486
490
|
"""
|
|
487
491
|
Adds a fileset containing device model files.
|
|
@@ -503,7 +507,8 @@ class PDK(ToolLibrarySchema):
|
|
|
503
507
|
else:
|
|
504
508
|
return self.add("pdk", "devmodelfileset", tool, type, fileset)
|
|
505
509
|
|
|
506
|
-
def add_pexmodelfileset(self, tool: str, corner: str,
|
|
510
|
+
def add_pexmodelfileset(self, tool: str, corner: str,
|
|
511
|
+
fileset: Optional[Union[List[str], str]] = None,
|
|
507
512
|
clobber: bool = False):
|
|
508
513
|
"""
|
|
509
514
|
Adds a fileset containing parasitic extraction (pex) model files.
|
|
@@ -525,7 +530,8 @@ class PDK(ToolLibrarySchema):
|
|
|
525
530
|
else:
|
|
526
531
|
return self.add("pdk", "pexmodelfileset", tool, corner, fileset)
|
|
527
532
|
|
|
528
|
-
def add_runsetfileset(self, type: str, tool: str, name: str,
|
|
533
|
+
def add_runsetfileset(self, type: str, tool: str, name: str,
|
|
534
|
+
fileset: Optional[Union[List[str], str]] = None,
|
|
529
535
|
clobber: bool = False):
|
|
530
536
|
"""
|
|
531
537
|
Adds a fileset containing a runset for a specific verification task.
|
|
@@ -548,7 +554,8 @@ class PDK(ToolLibrarySchema):
|
|
|
548
554
|
else:
|
|
549
555
|
return self.add("pdk", type, "runsetfileset", tool, name, fileset)
|
|
550
556
|
|
|
551
|
-
def add_waiverfileset(self, type: str, tool: str, name: str,
|
|
557
|
+
def add_waiverfileset(self, type: str, tool: str, name: str,
|
|
558
|
+
fileset: Optional[Union[List[str], str]] = None,
|
|
552
559
|
clobber: bool = False):
|
|
553
560
|
"""
|
|
554
561
|
Adds a fileset containing waiver files for a specific verification task.
|
|
@@ -601,7 +608,7 @@ class PDK(ToolLibrarySchema):
|
|
|
601
608
|
>>> yield = pdk.calc_yield(1500.0)
|
|
602
609
|
# Calculates yield for a 1500 um^2 die.
|
|
603
610
|
'''
|
|
604
|
-
d0 = self.get('pdk', 'd0')
|
|
611
|
+
d0: Optional[float] = self.get('pdk', 'd0')
|
|
605
612
|
if d0 is None:
|
|
606
613
|
raise ValueError(f"[{','.join([*self._keypath, 'pdk', 'd0'])}] has not been set")
|
|
607
614
|
|
|
@@ -641,16 +648,16 @@ class PDK(ToolLibrarySchema):
|
|
|
641
648
|
# Calculates dies per wafer for a 1000x1500 um die.
|
|
642
649
|
'''
|
|
643
650
|
# PDK information
|
|
644
|
-
wafersize = self.get('pdk', 'wafersize')
|
|
651
|
+
wafersize: Optional[float] = self.get('pdk', 'wafersize')
|
|
645
652
|
|
|
646
653
|
if wafersize is None:
|
|
647
654
|
raise ValueError(f"[{','.join([*self._keypath, 'pdk', 'wafersize'])}] has not been set")
|
|
648
655
|
|
|
649
|
-
edgemargin = self.get('pdk', 'edgemargin')
|
|
656
|
+
edgemargin: Optional[float] = self.get('pdk', 'edgemargin')
|
|
650
657
|
if edgemargin is None:
|
|
651
658
|
edgemargin = 0.0
|
|
652
659
|
|
|
653
|
-
scribe = self.get('pdk', 'scribe')
|
|
660
|
+
scribe: Tuple[Optional[float], Optional[float]] = self.get('pdk', 'scribe')
|
|
654
661
|
if scribe:
|
|
655
662
|
hscribe, vscribe = scribe
|
|
656
663
|
else:
|
|
@@ -709,13 +716,13 @@ class PDK(ToolLibrarySchema):
|
|
|
709
716
|
|
|
710
717
|
def _generate_doc(self, doc,
|
|
711
718
|
ref_root: str = "",
|
|
712
|
-
key_offset: Tuple[str] = None,
|
|
719
|
+
key_offset: Optional[Tuple[str, ...]] = None,
|
|
713
720
|
detailed: bool = True):
|
|
714
721
|
from .schema.docs.utils import build_section
|
|
715
722
|
docs = []
|
|
716
723
|
|
|
717
724
|
if not key_offset:
|
|
718
|
-
key_offset =
|
|
725
|
+
key_offset = ("PDK",)
|
|
719
726
|
|
|
720
727
|
# Show dataroot
|
|
721
728
|
dataroot = PathSchema._generate_doc(self, doc, ref_root)
|