siliconcompiler 0.35.2__py3-none-any.whl → 0.35.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. siliconcompiler/_metadata.py +1 -1
  2. siliconcompiler/apps/sc_issue.py +18 -2
  3. siliconcompiler/apps/smake.py +106 -100
  4. siliconcompiler/checklist.py +2 -1
  5. siliconcompiler/constraints/asic_component.py +49 -11
  6. siliconcompiler/constraints/asic_floorplan.py +23 -21
  7. siliconcompiler/constraints/asic_pins.py +55 -17
  8. siliconcompiler/constraints/asic_timing.py +53 -22
  9. siliconcompiler/constraints/fpga_timing.py +5 -6
  10. siliconcompiler/data/templates/replay/replay.sh.j2 +27 -14
  11. siliconcompiler/flowgraph.py +418 -129
  12. siliconcompiler/library.py +5 -4
  13. siliconcompiler/package/__init__.py +17 -6
  14. siliconcompiler/package/https.py +10 -5
  15. siliconcompiler/project.py +92 -33
  16. siliconcompiler/remote/client.py +17 -6
  17. siliconcompiler/scheduler/docker.py +24 -25
  18. siliconcompiler/scheduler/scheduler.py +284 -121
  19. siliconcompiler/scheduler/schedulernode.py +196 -90
  20. siliconcompiler/scheduler/slurm.py +113 -29
  21. siliconcompiler/scheduler/taskscheduler.py +0 -7
  22. siliconcompiler/schema/__init__.py +3 -2
  23. siliconcompiler/schema/_metadata.py +1 -1
  24. siliconcompiler/schema/baseschema.py +205 -93
  25. siliconcompiler/schema/editableschema.py +29 -0
  26. siliconcompiler/schema/namedschema.py +21 -13
  27. siliconcompiler/schema/parametervalue.py +14 -2
  28. siliconcompiler/schema/safeschema.py +18 -7
  29. siliconcompiler/schema_support/dependencyschema.py +4 -3
  30. siliconcompiler/schema_support/option.py +82 -1
  31. siliconcompiler/schema_support/pathschema.py +14 -15
  32. siliconcompiler/schema_support/record.py +5 -4
  33. siliconcompiler/targets/asap7_demo.py +4 -1
  34. siliconcompiler/tool.py +56 -29
  35. siliconcompiler/tools/builtin/__init__.py +2 -0
  36. siliconcompiler/tools/builtin/filter.py +8 -1
  37. siliconcompiler/tools/builtin/importfiles.py +2 -0
  38. siliconcompiler/tools/klayout/__init__.py +3 -0
  39. siliconcompiler/tools/klayout/scripts/klayout_convert_drc_db.py +1 -0
  40. siliconcompiler/tools/klayout/scripts/klayout_export.py +1 -0
  41. siliconcompiler/tools/klayout/scripts/klayout_operations.py +1 -0
  42. siliconcompiler/tools/klayout/scripts/klayout_show.py +2 -1
  43. siliconcompiler/tools/klayout/scripts/klayout_utils.py +3 -4
  44. siliconcompiler/tools/klayout/show.py +17 -5
  45. siliconcompiler/tools/openroad/__init__.py +27 -1
  46. siliconcompiler/tools/openroad/_apr.py +81 -4
  47. siliconcompiler/tools/openroad/clock_tree_synthesis.py +1 -0
  48. siliconcompiler/tools/openroad/global_placement.py +1 -0
  49. siliconcompiler/tools/openroad/init_floorplan.py +116 -7
  50. siliconcompiler/tools/openroad/power_grid_analysis.py +174 -0
  51. siliconcompiler/tools/openroad/repair_design.py +1 -0
  52. siliconcompiler/tools/openroad/repair_timing.py +1 -0
  53. siliconcompiler/tools/openroad/scripts/apr/preamble.tcl +1 -1
  54. siliconcompiler/tools/openroad/scripts/apr/sc_init_floorplan.tcl +42 -4
  55. siliconcompiler/tools/openroad/scripts/apr/sc_irdrop.tcl +146 -0
  56. siliconcompiler/tools/openroad/scripts/apr/sc_repair_design.tcl +1 -1
  57. siliconcompiler/tools/openroad/scripts/apr/sc_write_data.tcl +4 -6
  58. siliconcompiler/tools/openroad/scripts/common/procs.tcl +1 -1
  59. siliconcompiler/tools/openroad/scripts/common/reports.tcl +1 -1
  60. siliconcompiler/tools/openroad/scripts/rcx/sc_rcx_bench.tcl +2 -4
  61. siliconcompiler/tools/opensta/__init__.py +1 -1
  62. siliconcompiler/tools/opensta/scripts/sc_timing.tcl +17 -12
  63. siliconcompiler/tools/vivado/scripts/sc_bitstream.tcl +11 -0
  64. siliconcompiler/tools/vivado/scripts/sc_place.tcl +11 -0
  65. siliconcompiler/tools/vivado/scripts/sc_route.tcl +11 -0
  66. siliconcompiler/tools/vivado/scripts/sc_syn_fpga.tcl +10 -0
  67. siliconcompiler/tools/vpr/__init__.py +28 -0
  68. siliconcompiler/tools/yosys/prepareLib.py +7 -2
  69. siliconcompiler/tools/yosys/scripts/sc_screenshot.tcl +1 -1
  70. siliconcompiler/tools/yosys/scripts/sc_synth_asic.tcl +40 -4
  71. siliconcompiler/tools/yosys/scripts/sc_synth_fpga.tcl +15 -5
  72. siliconcompiler/tools/yosys/syn_asic.py +62 -2
  73. siliconcompiler/tools/yosys/syn_fpga.py +8 -0
  74. siliconcompiler/toolscripts/_tools.json +6 -6
  75. siliconcompiler/utils/__init__.py +243 -51
  76. siliconcompiler/utils/curation.py +89 -56
  77. siliconcompiler/utils/issue.py +6 -1
  78. siliconcompiler/utils/multiprocessing.py +35 -2
  79. siliconcompiler/utils/paths.py +21 -0
  80. siliconcompiler/utils/settings.py +141 -0
  81. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/METADATA +5 -4
  82. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/RECORD +86 -83
  83. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/WHEEL +0 -0
  84. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/entry_points.txt +0 -0
  85. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/licenses/LICENSE +0 -0
  86. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/top_level.txt +0 -0
@@ -7,7 +7,7 @@
7
7
  from typing import Dict, Tuple, Optional, Union, List, Set
8
8
 
9
9
  from .parameter import Parameter
10
- from .baseschema import BaseSchema
10
+ from .baseschema import BaseSchema, LazyLoad
11
11
 
12
12
 
13
13
  class SafeSchema(BaseSchema):
@@ -35,7 +35,8 @@ class SafeSchema(BaseSchema):
35
35
 
36
36
  def _from_dict(self, manifest: Dict,
37
37
  keypath: Union[List[str], Tuple[str, ...]],
38
- version: Optional[Tuple[int, ...]] = None) \
38
+ version: Optional[Tuple[int, ...]] = None,
39
+ lazyload: LazyLoad = LazyLoad.OFF) \
39
40
  -> Tuple[Set[Tuple[str, ...]], Set[Tuple[str, ...]]]:
40
41
  if not isinstance(manifest, dict):
41
42
  return set(), set()
@@ -43,11 +44,13 @@ class SafeSchema(BaseSchema):
43
44
  if "__meta__" in manifest:
44
45
  del manifest["__meta__"]
45
46
 
47
+ lazyload = LazyLoad.OFF
48
+
46
49
  for key, data in manifest.items():
47
50
  obj = SafeSchema.__is_dict_leaf(data, list(keypath) + [key], version)
48
51
  if not obj:
49
52
  obj = SafeSchema()
50
- obj._from_dict(data, list(keypath) + [key], version)
53
+ obj._from_dict(data, list(keypath) + [key], version=version, lazyload=lazyload)
51
54
 
52
55
  if key == "default":
53
56
  self._BaseSchema__default = obj
@@ -59,11 +62,19 @@ class SafeSchema(BaseSchema):
59
62
  @classmethod
60
63
  def from_manifest(cls,
61
64
  filepath: Union[None, str] = None,
62
- cfg: Union[None, Dict] = None) -> "SafeSchema":
65
+ cfg: Union[None, Dict] = None,
66
+ lazyload: bool = False) -> "SafeSchema":
63
67
  if filepath:
64
68
  cfg = BaseSchema._read_manifest(filepath)
65
69
 
66
- if cfg and "__meta__" in cfg:
67
- del cfg["__meta__"]
70
+ def rm_meta(manifest):
71
+ if not isinstance(manifest, dict):
72
+ return
73
+ if manifest and "__meta__" in manifest:
74
+ del manifest["__meta__"]
75
+ for section in manifest.values():
76
+ rm_meta(section)
77
+
78
+ rm_meta(cfg)
68
79
 
69
- return super().from_manifest(filepath=None, cfg=cfg)
80
+ return super().from_manifest(filepath=None, cfg=cfg, lazyload=False)
@@ -2,7 +2,7 @@ import os.path
2
2
 
3
3
  from typing import Dict, Union, Tuple, List, Optional, Set
4
4
 
5
- from siliconcompiler.schema.baseschema import BaseSchema
5
+ from siliconcompiler.schema.baseschema import BaseSchema, LazyLoad
6
6
  from siliconcompiler.schema.editableschema import EditableSchema
7
7
  from siliconcompiler.schema.parameter import Parameter, Scope
8
8
  from siliconcompiler.schema.namedschema import NamedSchema
@@ -33,7 +33,8 @@ class DependencySchema(BaseSchema):
33
33
 
34
34
  def _from_dict(self, manifest: Dict,
35
35
  keypath: Union[List[str], Tuple[str, ...]],
36
- version: Optional[Tuple[int, ...]] = None) \
36
+ version: Optional[Tuple[int, ...]] = None,
37
+ lazyload: LazyLoad = LazyLoad.ON) \
37
38
  -> Tuple[Set[Tuple[str, ...]], Set[Tuple[str, ...]]]:
38
39
  '''
39
40
  Internal helper to load schema from a dictionary manifest.
@@ -50,7 +51,7 @@ class DependencySchema(BaseSchema):
50
51
  The result of the parent class's _from_dict method.
51
52
  '''
52
53
  self.set("deps", False, field="lock")
53
- ret = super()._from_dict(manifest, keypath, version)
54
+ ret = super()._from_dict(manifest, keypath, version=version, lazyload=lazyload)
54
55
  self.set("deps", True, field="lock")
55
56
  return ret
56
57
 
@@ -1,7 +1,8 @@
1
- from typing import Union, List, Tuple, Callable, Dict, Optional
1
+ from typing import Union, List, Tuple, Callable, Dict, Optional, Final
2
2
 
3
3
  from siliconcompiler.schema import BaseSchema, EditableSchema, Parameter, Scope, PerNode
4
4
  from siliconcompiler.schema.utils import trim
5
+ from siliconcompiler.utils.multiprocessing import MPManager
5
6
 
6
7
 
7
8
  class SchedulerSchema(BaseSchema):
@@ -427,6 +428,8 @@ class OptionSchema(BaseSchema):
427
428
  compiler's behavior, such as flow control, logging, build settings, and
428
429
  remote execution. It provides getter and setter methods for each parameter.
429
430
  """
431
+ __OPTIONS: Final[str] = "schema-options"
432
+
430
433
  def __init__(self):
431
434
  """Initializes the options schema and defines all its parameters."""
432
435
  super().__init__()
@@ -844,6 +847,84 @@ class OptionSchema(BaseSchema):
844
847
 
845
848
  schema.insert('scheduler', SchedulerSchema())
846
849
 
850
+ self.__load_defaults()
851
+
852
+ def __load_defaults(self) -> None:
853
+ """Loads and applies settings from the default options file.
854
+
855
+ This method reads the configuration file specified by the settings
856
+ manager. It iterates through the list of option
857
+ objects in the file.
858
+
859
+ For each object, it checks for a "key" and a "value". If the key
860
+ is recognized (exists in `self.allkeys()`), it attempts to apply
861
+ the value using `self.set()`.
862
+
863
+ Errors during value setting (`ValueError`) are silently ignored.
864
+ """
865
+ options = MPManager.get_settings().get_category(OptionSchema.__OPTIONS)
866
+
867
+ if not options:
868
+ return
869
+
870
+ allkeys = self.allkeys()
871
+ for key, value in options.items():
872
+ if key is None:
873
+ continue
874
+
875
+ key = tuple(key.split(","))
876
+ if key not in allkeys:
877
+ continue
878
+
879
+ try:
880
+ self.set(*key, value)
881
+ except ValueError:
882
+ pass
883
+
884
+ def write_defaults(self) -> None:
885
+ """Saves all non-default settings to the configuration file.
886
+
887
+ This method iterates through all parameters known to the system
888
+ (via `self.allkeys()`). It compares the current value of each
889
+ parameter against its default value.
890
+
891
+ Any parameter whose current value differs from its default is
892
+ collected. This list of non-default settings is then
893
+ serialized as a JSON array to the file specified by
894
+ `default_options_file()`.
895
+
896
+ If all parameters are set to their default values, the list
897
+ will be empty, and no file will be written.
898
+ """
899
+ transientkeys = {
900
+ # Flow information
901
+ ("flow",),
902
+ ("from",),
903
+ ("to",),
904
+ ("prune",),
905
+
906
+ # Design information
907
+ ("design",),
908
+ ("alias",),
909
+ ("fileset",),
910
+ }
911
+
912
+ settings = MPManager.get_settings()
913
+ settings.delete(OptionSchema.__OPTIONS)
914
+
915
+ for key in self.allkeys():
916
+ if key in transientkeys:
917
+ continue
918
+
919
+ param: Parameter = self.get(*key, field=None)
920
+
921
+ value = param.get()
922
+ if value != param.default.get():
923
+ settings.set(OptionSchema.__OPTIONS, ",".join(key), value)
924
+
925
+ if settings.get_category(OptionSchema.__OPTIONS):
926
+ settings.save()
927
+
847
928
  # Getters and Setters
848
929
  def get_remote(self) -> bool:
849
930
  """Gets the remote processing flag.
@@ -11,7 +11,7 @@ from siliconcompiler.schema.parameter import Parameter, Scope
11
11
  from siliconcompiler.schema.utils import trim
12
12
 
13
13
  from siliconcompiler.package import Resolver
14
- from siliconcompiler.utils.paths import collectiondir
14
+ from siliconcompiler.utils.paths import collectiondir, cwdirsafe
15
15
 
16
16
 
17
17
  class PathSchemaBase(BaseSchema):
@@ -51,14 +51,12 @@ class PathSchemaBase(BaseSchema):
51
51
  the schema.
52
52
  """
53
53
  schema_root = self._parent(root=True)
54
- cwd = getattr(schema_root, "_Project__cwd", os.getcwd())
55
- collection_dir = collectiondir(schema_root)
56
54
 
57
55
  return super()._find_files(*keypath,
58
56
  missing_ok=missing_ok,
59
57
  step=step, index=index,
60
- collection_dir=collection_dir,
61
- cwd=cwd)
58
+ collection_dir=collectiondir(schema_root),
59
+ cwd=cwdirsafe(schema_root))
62
60
 
63
61
  def check_filepaths(self, ignore_keys: Optional[List[Tuple[str, ...]]] = None) -> bool:
64
62
  '''
@@ -71,17 +69,15 @@ class PathSchemaBase(BaseSchema):
71
69
  True if all file paths are valid, otherwise False.
72
70
  '''
73
71
  schema_root = self._parent(root=True)
74
- cwd = getattr(schema_root, "_Project__cwd", os.getcwd())
75
72
  logger = getattr(schema_root,
76
73
  "logger",
77
74
  logging.getLogger("siliconcompiler.check_filepaths"))
78
- collection_dir = collectiondir(schema_root)
79
75
 
80
76
  return super()._check_filepaths(
81
77
  ignore_keys=ignore_keys,
82
78
  logger=logger,
83
- collection_dir=collection_dir,
84
- cwd=cwd)
79
+ collection_dir=collectiondir(schema_root),
80
+ cwd=cwdirsafe(schema_root))
85
81
 
86
82
  def hash_files(self, *keypath: str,
87
83
  update: bool = True,
@@ -126,11 +122,9 @@ class PathSchemaBase(BaseSchema):
126
122
  Computes, stores, and returns hashes of files in :keypath:`input, rtl, verilog`.
127
123
  '''
128
124
  schema_root = self._parent(root=True)
129
- cwd = getattr(schema_root, "_Project__cwd", os.getcwd())
130
125
  logger = getattr(schema_root,
131
126
  "logger",
132
127
  logging.getLogger("siliconcompiler.hash_files"))
133
- collection_dir = collectiondir(schema_root)
134
128
 
135
129
  if verbose:
136
130
  logger.info(f"Computing hash value for [{','.join([*self._keypath, *keypath])}]")
@@ -138,8 +132,8 @@ class PathSchemaBase(BaseSchema):
138
132
  hashes = super()._hash_files(*keypath,
139
133
  missing_ok=missing_ok,
140
134
  step=step, index=index,
141
- collection_dir=collection_dir,
142
- cwd=cwd)
135
+ collection_dir=collectiondir(schema_root),
136
+ cwd=cwdirsafe(schema_root))
143
137
 
144
138
  if check:
145
139
  check_hashes = self.get(*keypath, field="filehash", step=step, index=index)
@@ -392,7 +386,8 @@ class PathSchema(PathSchemaBase):
392
386
  resolver = Resolver.find_resolver(path)
393
387
  return resolver(name, schema._parent(root=True), path, tag).get_path()
394
388
 
395
- def _find_files_dataroot_resolvers(self) -> Dict[str, Union[str, Callable]]:
389
+ def _find_files_dataroot_resolvers(self, resolvers: bool = False) \
390
+ -> Dict[str, Union[str, Callable]]:
396
391
  """
397
392
  Returns a dictionary of path resolvers data directory handling for find_files
398
393
 
@@ -410,7 +405,11 @@ class PathSchema(PathSchemaBase):
410
405
  path = BaseSchema.get(schema, "dataroot", dataroot, "path")
411
406
  tag = BaseSchema.get(schema, "dataroot", dataroot, "tag")
412
407
  resolver = Resolver.find_resolver(path)
413
- resolver_map[dataroot] = resolver(dataroot, schema_root, path, tag).get_path
408
+ resolver_obj = resolver(dataroot, schema_root, path, tag)
409
+ if resolvers:
410
+ resolver_map[dataroot] = resolver_obj
411
+ else:
412
+ resolver_map[dataroot] = resolver_obj.get_path
414
413
  return resolver_map
415
414
 
416
415
  @contextlib.contextmanager
@@ -12,7 +12,7 @@ from typing import Dict, Union, List, Optional, Set, Tuple
12
12
  from datetime import datetime, timezone
13
13
  from enum import Enum
14
14
 
15
- from siliconcompiler.schema import BaseSchema
15
+ from siliconcompiler.schema import BaseSchema, LazyLoad
16
16
  from siliconcompiler.schema import EditableSchema, Parameter, PerNode, Scope
17
17
  from siliconcompiler.schema.utils import trim
18
18
 
@@ -53,7 +53,8 @@ class RecordSchema(BaseSchema):
53
53
 
54
54
  def _from_dict(self, manifest: Dict,
55
55
  keypath: Union[List[str], Tuple[str, ...]],
56
- version: Optional[Tuple[int, ...]] = None) \
56
+ version: Optional[Tuple[int, ...]] = None,
57
+ lazyload: LazyLoad = LazyLoad.ON) \
57
58
  -> Tuple[Set[Tuple[str, ...]], Set[Tuple[str, ...]]]:
58
59
  """
59
60
  Constructs a schema from a dictionary.
@@ -66,10 +67,10 @@ class RecordSchema(BaseSchema):
66
67
  Returns:
67
68
  dict: The constructed dictionary.
68
69
  """
69
- ret = super()._from_dict(manifest, keypath, version)
70
+ ret = super()._from_dict(manifest, keypath, version=version, lazyload=lazyload)
70
71
 
71
72
  # Correct for change specification
72
- if version and version < (0, 50, 4):
73
+ if not lazyload.is_enforced and version and version < (0, 50, 4):
73
74
  for timekey in RecordTime:
74
75
  start_param = self.get(timekey.value, field=None)
75
76
  for value, step, index in start_param.getvalues():
@@ -3,7 +3,9 @@ from siliconcompiler import ASIC
3
3
  from siliconcompiler.flows import asicflow, synflow
4
4
 
5
5
  from lambdapdk.asap7.libs.asap7sc7p5t import ASAP7SC7p5RVT, ASAP7SC7p5SLVT, ASAP7SC7p5LVT
6
- from lambdapdk.asap7.libs.fakeram7 import FakeRAM7Lambdalib_SinglePort, FakeRAM7Lambdalib_DoublePort
6
+ from lambdapdk.asap7.libs.fakeram7 import FakeRAM7Lambdalib_SinglePort, \
7
+ FakeRAM7Lambdalib_DoublePort, \
8
+ FakeRAM7Lambdalib_TrueDoublePort
7
9
  from lambdapdk.asap7.libs.fakeio7 import FakeIO7Lambdalib_IO
8
10
 
9
11
 
@@ -101,4 +103,5 @@ def asap7_demo(
101
103
  # for demonstration and academic purposes.
102
104
  FakeRAM7Lambdalib_SinglePort.alias(project)
103
105
  FakeRAM7Lambdalib_DoublePort.alias(project)
106
+ FakeRAM7Lambdalib_TrueDoublePort.alias(project)
104
107
  FakeIO7Lambdalib_IO.alias(project)
siliconcompiler/tool.py CHANGED
@@ -35,7 +35,7 @@ from packaging.specifiers import SpecifierSet, InvalidSpecifier
35
35
  from typing import List, Dict, Tuple, Union, Optional, Set, TextIO, Type, TypeVar, TYPE_CHECKING
36
36
  from pathlib import Path
37
37
 
38
- from siliconcompiler.schema import BaseSchema, NamedSchema, Journal, DocsSchema
38
+ from siliconcompiler.schema import BaseSchema, NamedSchema, Journal, DocsSchema, LazyLoad
39
39
  from siliconcompiler.schema import EditableSchema, Parameter, PerNode, Scope
40
40
  from siliconcompiler.schema.parametertype import NodeType
41
41
  from siliconcompiler.schema.utils import trim
@@ -130,7 +130,8 @@ class Task(NamedSchema, PathSchema, DocsSchema):
130
130
  r"^\s*" + __parse_version_check_str + r"\s*$",
131
131
  re.VERBOSE | re.IGNORECASE)
132
132
 
133
- __POLL_INTERVAL: float = 0.1
133
+ __IO_POLL_INTERVAL: float = 0.1
134
+ __MEM_POLL_INTERVAL: float = 0.5
134
135
  __MEMORY_WARN_LIMIT: int = 90
135
136
 
136
137
  def __init__(self):
@@ -147,13 +148,14 @@ class Task(NamedSchema, PathSchema, DocsSchema):
147
148
 
148
149
  def _from_dict(self, manifest: Dict,
149
150
  keypath: Union[List[str], Tuple[str, ...]],
150
- version: Optional[Tuple[int, ...]] = None) \
151
+ version: Optional[Tuple[int, ...]] = None,
152
+ lazyload: LazyLoad = LazyLoad.ON) \
151
153
  -> Tuple[Set[Tuple[str, ...]], Set[Tuple[str, ...]]]:
152
154
  """
153
155
  Populates the schema from a dictionary, dynamically adding 'var'
154
156
  parameters found in the manifest that are not already defined.
155
157
  """
156
- if "var" in manifest:
158
+ if not lazyload.is_enforced and "var" in manifest:
157
159
  # Collect existing and manifest var keys
158
160
  var_keys = [k[0] for k in self.allkeys("var")]
159
161
  manifest_keys = set(manifest["var"].keys())
@@ -171,7 +173,7 @@ class Task(NamedSchema, PathSchema, DocsSchema):
171
173
  if not manifest["var"]:
172
174
  del manifest["var"]
173
175
 
174
- return super()._from_dict(manifest, keypath, version)
176
+ return super()._from_dict(manifest, keypath, version=version, lazyload=lazyload)
175
177
 
176
178
  @contextlib.contextmanager
177
179
  def runtime(self, node: "SchedulerNode",
@@ -414,7 +416,7 @@ class Task(NamedSchema, PathSchema, DocsSchema):
414
416
  cmdlist.extend(veropt)
415
417
 
416
418
  self.logger.debug(f'Running {self.tool()}/{self.task()} version check: '
417
- f'{" ".join(cmdlist)}')
419
+ f'{shlex.join(cmdlist)}')
418
420
 
419
421
  proc = subprocess.run(cmdlist,
420
422
  stdin=subprocess.DEVNULL,
@@ -813,6 +815,10 @@ class Task(NamedSchema, PathSchema, DocsSchema):
813
815
  schema = root.copy()
814
816
 
815
817
  for keypath in root.allkeys():
818
+ if keypath[0] == "history":
819
+ # Ignore history as this is not relevant to the task
820
+ continue
821
+
816
822
  paramtype: str = schema.get(*keypath, field='type')
817
823
  if 'file' not in paramtype and 'dir' not in paramtype:
818
824
  continue
@@ -895,6 +901,37 @@ class Task(NamedSchema, PathSchema, DocsSchema):
895
901
  f'{timeout} seconds. Terminating...')
896
902
  terminate_process(proc.pid, timeout=timeout)
897
903
 
904
+ def __collect_memory(self, pid) -> Optional[int]:
905
+ try:
906
+ pproc = psutil.Process(pid)
907
+ proc_mem_bytes = pproc.memory_full_info().uss
908
+ for child in pproc.children(recursive=True):
909
+ proc_mem_bytes += child.memory_full_info().uss
910
+ return proc_mem_bytes
911
+ except psutil.Error:
912
+ # Process may have already terminated or been killed.
913
+ # Retain existing memory usage statistics in this case.
914
+ pass
915
+ except PermissionError:
916
+ # OS is preventing access to this information so it cannot
917
+ # be collected
918
+ pass
919
+ return None
920
+
921
+ def __check_memory_limit(self, warn_limit: int) -> int:
922
+ try:
923
+ memory_usage = psutil.virtual_memory()
924
+ if memory_usage.percent > warn_limit:
925
+ self.logger.warning(
926
+ 'Current system memory usage is '
927
+ f'{memory_usage.percent:.1f}%')
928
+ return int(memory_usage.percent + 1)
929
+ except psutil.Error:
930
+ pass
931
+ except PermissionError:
932
+ pass
933
+ return warn_limit
934
+
898
935
  def run_task(self,
899
936
  workdir: str,
900
937
  quiet: bool,
@@ -1029,39 +1066,29 @@ class Task(NamedSchema, PathSchema, DocsSchema):
1029
1066
  raise TaskError(f"Unable to start {exe}: {str(e)}")
1030
1067
 
1031
1068
  memory_warn_limit = Task.__MEMORY_WARN_LIMIT
1069
+ next_collection = None
1032
1070
  try:
1033
1071
  while proc.poll() is None:
1072
+ curr_time = time.time()
1073
+
1034
1074
  # Monitor subprocess memory usage
1035
- try:
1036
- pproc = psutil.Process(proc.pid)
1037
- proc_mem_bytes = pproc.memory_full_info().uss
1038
- for child in pproc.children(recursive=True):
1039
- proc_mem_bytes += child.memory_full_info().uss
1040
- max_mem_bytes = max(max_mem_bytes, proc_mem_bytes)
1041
-
1042
- memory_usage = psutil.virtual_memory()
1043
- if memory_usage.percent > memory_warn_limit:
1044
- self.logger.warning(
1045
- 'Current system memory usage is '
1046
- f'{memory_usage.percent:.1f}%')
1047
- memory_warn_limit = int(memory_usage.percent + 1)
1048
- except psutil.Error:
1049
- # Process may have already terminated or been killed.
1050
- # Retain existing memory usage statistics in this case.
1051
- pass
1052
- except PermissionError:
1053
- # OS is preventing access to this information so it cannot
1054
- # be collected
1055
- pass
1075
+ if next_collection is None or \
1076
+ next_collection <= curr_time:
1077
+ proc_mem_bytes = self.__collect_memory(proc.pid)
1078
+ if proc_mem_bytes is not None:
1079
+ max_mem_bytes = max(max_mem_bytes, proc_mem_bytes)
1080
+ next_collection = curr_time + Task.__MEM_POLL_INTERVAL
1081
+
1082
+ memory_warn_limit = self.__check_memory_limit(memory_warn_limit)
1056
1083
 
1057
1084
  read_stdio(stdout_reader, stderr_reader)
1058
1085
 
1059
1086
  # Check for timeout
1060
- duration = time.time() - cpu_start
1087
+ duration = curr_time - cpu_start
1061
1088
  if timeout is not None and duration > timeout:
1062
1089
  raise TaskTimeout(timeout=duration)
1063
1090
 
1064
- time.sleep(Task.__POLL_INTERVAL)
1091
+ time.sleep(Task.__IO_POLL_INTERVAL)
1065
1092
  except KeyboardInterrupt:
1066
1093
  self.logger.info("Received ctrl-c.")
1067
1094
  self.__terminate_exe(proc)
@@ -21,6 +21,8 @@ class BuiltinTask(Task):
21
21
 
22
22
  self._set_io_files()
23
23
 
24
+ self.set_threads(1)
25
+
24
26
  def _set_io_files(self):
25
27
  files = sorted(list(self.get_files_from_input_nodes().keys()))
26
28
  self.add_input_file(files)
@@ -50,14 +50,21 @@ class FilterTask(Task):
50
50
  def setup(self):
51
51
  super().setup()
52
52
 
53
+ self.set_threads(1)
54
+
55
+ flow = self.project.get("flowgraph", self.project.option.get_flow(), field="schema")
56
+ graph_node = flow.get_graph_node(self.step, self.index)
57
+
53
58
  if self.get("var", "keep"):
54
59
  self.add_required_key("var", "keep")
60
+ elif graph_node.get_args():
61
+ self.add_required_key(graph_node, "args")
55
62
 
56
63
  files = sorted(list(self.get_files_from_input_nodes().keys()))
57
64
  if not files:
58
65
  raise ValueError("task receives no files")
59
66
 
60
- filters: List[str] = self.get("var", "keep")
67
+ filters: List[str] = self.get("var", "keep") or graph_node.get_args()
61
68
  if not filters:
62
69
  filters = ["*"]
63
70
 
@@ -92,6 +92,8 @@ class ImportFilesTask(Task):
92
92
  """
93
93
  super().setup()
94
94
 
95
+ self.set_threads(1)
96
+
95
97
  if (self.step, self.index) not in self.schema_flow.get_entry_nodes():
96
98
  raise ValueError("task must be an entry node")
97
99
 
@@ -172,6 +172,7 @@ class KLayoutTask(ASICTask):
172
172
  with self.active_dataroot("refdir"):
173
173
  self.set_refdir("scripts")
174
174
 
175
+ self.set_environmentalvariable('PYTHONUNBUFFERED', '1')
175
176
  if self.project.get('option', 'nodisplay'):
176
177
  # Tells QT to use the offscreen platform if nodisplay is used
177
178
  self.set_environmentalvariable('QT_QPA_PLATFORM', 'offscreen')
@@ -185,6 +186,8 @@ class KLayoutTask(ASICTask):
185
186
  options = super().runtime_options()
186
187
  options.extend(['-rd', f'SC_KLAYOUT_ROOT={self.find_files("refdir")[0]}'])
187
188
  options.extend(['-rd', f'SC_TOOLS_ROOT={os.path.dirname(os.path.dirname(__file__))}'])
189
+ options.extend(['-rd',
190
+ f'SC_ROOT={os.path.dirname(os.path.dirname(os.path.dirname(__file__)))}'])
188
191
  return options
189
192
 
190
193
  def post_process(self):
@@ -151,6 +151,7 @@ def convert_drc(view, path):
151
151
  def main():
152
152
  # SC_ROOT provided by CLI
153
153
  sys.path.append(SC_KLAYOUT_ROOT) # noqa: F821
154
+ sys.path.append(SC_ROOT) # noqa: F821
154
155
 
155
156
  from klayout_utils import get_schema
156
157
 
@@ -121,6 +121,7 @@ def main():
121
121
  # SC_ROOT provided by CLI
122
122
  sys.path.append(SC_KLAYOUT_ROOT) # noqa: F821
123
123
  sys.path.append(SC_TOOLS_ROOT) # noqa: F821
124
+ sys.path.append(SC_ROOT) # noqa: F821
124
125
 
125
126
  from klayout_utils import (
126
127
  technology,
@@ -326,6 +326,7 @@ if __name__ == "__main__":
326
326
  # SC_ROOT provided by CLI
327
327
  sys.path.append(SC_KLAYOUT_ROOT) # noqa: F821
328
328
  sys.path.append(SC_TOOLS_ROOT) # noqa: F821
329
+ sys.path.append(SC_ROOT) # noqa: F821
329
330
 
330
331
  from klayout_utils import (
331
332
  technology,
@@ -193,6 +193,7 @@ def main():
193
193
  # SC_ROOT provided by CLI, and is only accessible when this is main module
194
194
  sys.path.append(SC_KLAYOUT_ROOT) # noqa: F821
195
195
  sys.path.append(SC_TOOLS_ROOT) # noqa: F821
196
+ sys.path.append(SC_ROOT) # noqa: F821
196
197
 
197
198
  from klayout_utils import (
198
199
  technology,
@@ -250,7 +251,7 @@ def main():
250
251
  sc_filename = schema.get('tool', 'klayout', 'task', task, 'var', 'showfilepath',
251
252
  step=step, index=index)
252
253
  else:
253
- for ext in (f'{sc_fileext}.gz', sc_fileext):
254
+ for ext in (f'{sc_fileext}.gz', sc_fileext, 'gds.gz', "gds", 'oas.gz', 'oas'):
254
255
  sc_filename = f"inputs/{design}.{ext}"
255
256
  if os.path.exists(sc_filename):
256
257
  break
@@ -1,7 +1,6 @@
1
1
  import pya
2
2
  import json
3
3
  import shutil
4
- import sys
5
4
  import os.path
6
5
 
7
6
 
@@ -129,8 +128,9 @@ def technology(design, schema):
129
128
  map_file = os.path.abspath(os.path.join(os.path.dirname(tech_file),
130
129
  map_file))
131
130
  for s in get_streams(schema):
132
- if schema.valid('library', sc_pdk, 'layermapfileset', 'klayout', 'def', s):
133
- for fileset in schema.get('library', sc_pdk, 'layermapfileset', 'klayout', 'def', s):
131
+ if schema.valid('library', sc_pdk, 'pdk', 'layermapfileset', 'klayout', 'def', s):
132
+ for fileset in schema.get('library', sc_pdk, 'pdk', 'layermapfileset', 'klayout',
133
+ 'def', s):
134
134
  if schema.valid('library', sc_pdk, "fileset", fileset, "file", "layermap"):
135
135
  map_file = schema.get('library', sc_pdk, "fileset", fileset, "file", "layermap")
136
136
  if map_file:
@@ -170,7 +170,6 @@ def get_write_options(filename, timestamps):
170
170
 
171
171
 
172
172
  def get_schema(manifest):
173
- sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
174
173
  from schema.safeschema import SafeSchema
175
174
  return SafeSchema.from_manifest(filepath=manifest)
176
175
 
@@ -15,6 +15,17 @@ class ShowTask(ShowTask, KLayoutTask):
15
15
 
16
16
  self.set_script("klayout_show.py")
17
17
 
18
+ if f"{self.design_topmodule}.gds.gz" in self.get_files_from_input_nodes():
19
+ self.add_input_file(ext="gds.gz")
20
+ elif f"{self.design_topmodule}.gds" in self.get_files_from_input_nodes():
21
+ self.add_input_file(ext="gds")
22
+ elif f"{self.design_topmodule}.oas.gz" in self.get_files_from_input_nodes():
23
+ self.add_input_file(ext="oas.gz")
24
+ elif f"{self.design_topmodule}.oas" in self.get_files_from_input_nodes():
25
+ self.add_input_file(ext="oas")
26
+ else:
27
+ self.add_required_key("var", "showfilepath")
28
+
18
29
  self.add_commandline_option(["-nc", "-rm"], clobber=True)
19
30
 
20
31
  def get_supported_show_extentions(self):
@@ -23,8 +34,9 @@ class ShowTask(ShowTask, KLayoutTask):
23
34
  def pre_process(self):
24
35
  super().pre_process()
25
36
 
26
- rel_path = os.path.dirname(self.get("var", "showfilepath"))
27
- for ext in ('lyt', 'lyp'):
28
- ext_file = os.path.join(rel_path, f'{self.design_topmodule}.{ext}')
29
- if ext_file and os.path.exists(ext_file):
30
- shutil.copy2(ext_file, f"inputs/{self.design_topmodule}.{ext}")
37
+ if self.get("var", "showfilepath"):
38
+ rel_path = os.path.dirname(self.get("var", "showfilepath"))
39
+ for ext in ('lyt', 'lyp'):
40
+ ext_file = os.path.join(rel_path, f'{self.design_topmodule}.{ext}')
41
+ if ext_file and os.path.exists(ext_file):
42
+ shutil.copy2(ext_file, f"inputs/{self.design_topmodule}.{ext}")