siliconcompiler 0.35.2__py3-none-any.whl → 0.35.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. siliconcompiler/_metadata.py +1 -1
  2. siliconcompiler/apps/sc_issue.py +18 -2
  3. siliconcompiler/apps/smake.py +106 -100
  4. siliconcompiler/checklist.py +2 -1
  5. siliconcompiler/constraints/asic_component.py +49 -11
  6. siliconcompiler/constraints/asic_floorplan.py +23 -21
  7. siliconcompiler/constraints/asic_pins.py +55 -17
  8. siliconcompiler/constraints/asic_timing.py +53 -22
  9. siliconcompiler/constraints/fpga_timing.py +5 -6
  10. siliconcompiler/data/templates/replay/replay.sh.j2 +27 -14
  11. siliconcompiler/flowgraph.py +418 -129
  12. siliconcompiler/library.py +5 -4
  13. siliconcompiler/package/__init__.py +17 -6
  14. siliconcompiler/package/https.py +10 -5
  15. siliconcompiler/project.py +92 -33
  16. siliconcompiler/remote/client.py +17 -6
  17. siliconcompiler/scheduler/docker.py +24 -25
  18. siliconcompiler/scheduler/scheduler.py +284 -121
  19. siliconcompiler/scheduler/schedulernode.py +196 -90
  20. siliconcompiler/scheduler/slurm.py +113 -29
  21. siliconcompiler/scheduler/taskscheduler.py +0 -7
  22. siliconcompiler/schema/__init__.py +3 -2
  23. siliconcompiler/schema/_metadata.py +1 -1
  24. siliconcompiler/schema/baseschema.py +205 -93
  25. siliconcompiler/schema/editableschema.py +29 -0
  26. siliconcompiler/schema/namedschema.py +21 -13
  27. siliconcompiler/schema/parametervalue.py +14 -2
  28. siliconcompiler/schema/safeschema.py +18 -7
  29. siliconcompiler/schema_support/dependencyschema.py +4 -3
  30. siliconcompiler/schema_support/option.py +82 -1
  31. siliconcompiler/schema_support/pathschema.py +14 -15
  32. siliconcompiler/schema_support/record.py +5 -4
  33. siliconcompiler/targets/asap7_demo.py +4 -1
  34. siliconcompiler/tool.py +56 -29
  35. siliconcompiler/tools/builtin/__init__.py +2 -0
  36. siliconcompiler/tools/builtin/filter.py +8 -1
  37. siliconcompiler/tools/builtin/importfiles.py +2 -0
  38. siliconcompiler/tools/klayout/__init__.py +3 -0
  39. siliconcompiler/tools/klayout/scripts/klayout_convert_drc_db.py +1 -0
  40. siliconcompiler/tools/klayout/scripts/klayout_export.py +1 -0
  41. siliconcompiler/tools/klayout/scripts/klayout_operations.py +1 -0
  42. siliconcompiler/tools/klayout/scripts/klayout_show.py +2 -1
  43. siliconcompiler/tools/klayout/scripts/klayout_utils.py +3 -4
  44. siliconcompiler/tools/klayout/show.py +17 -5
  45. siliconcompiler/tools/openroad/__init__.py +27 -1
  46. siliconcompiler/tools/openroad/_apr.py +81 -4
  47. siliconcompiler/tools/openroad/clock_tree_synthesis.py +1 -0
  48. siliconcompiler/tools/openroad/global_placement.py +1 -0
  49. siliconcompiler/tools/openroad/init_floorplan.py +116 -7
  50. siliconcompiler/tools/openroad/power_grid_analysis.py +174 -0
  51. siliconcompiler/tools/openroad/repair_design.py +1 -0
  52. siliconcompiler/tools/openroad/repair_timing.py +1 -0
  53. siliconcompiler/tools/openroad/scripts/apr/preamble.tcl +1 -1
  54. siliconcompiler/tools/openroad/scripts/apr/sc_init_floorplan.tcl +42 -4
  55. siliconcompiler/tools/openroad/scripts/apr/sc_irdrop.tcl +146 -0
  56. siliconcompiler/tools/openroad/scripts/apr/sc_repair_design.tcl +1 -1
  57. siliconcompiler/tools/openroad/scripts/apr/sc_write_data.tcl +4 -6
  58. siliconcompiler/tools/openroad/scripts/common/procs.tcl +1 -1
  59. siliconcompiler/tools/openroad/scripts/common/reports.tcl +1 -1
  60. siliconcompiler/tools/openroad/scripts/rcx/sc_rcx_bench.tcl +2 -4
  61. siliconcompiler/tools/opensta/__init__.py +1 -1
  62. siliconcompiler/tools/opensta/scripts/sc_timing.tcl +17 -12
  63. siliconcompiler/tools/vivado/scripts/sc_bitstream.tcl +11 -0
  64. siliconcompiler/tools/vivado/scripts/sc_place.tcl +11 -0
  65. siliconcompiler/tools/vivado/scripts/sc_route.tcl +11 -0
  66. siliconcompiler/tools/vivado/scripts/sc_syn_fpga.tcl +10 -0
  67. siliconcompiler/tools/vpr/__init__.py +28 -0
  68. siliconcompiler/tools/yosys/prepareLib.py +7 -2
  69. siliconcompiler/tools/yosys/scripts/sc_screenshot.tcl +1 -1
  70. siliconcompiler/tools/yosys/scripts/sc_synth_asic.tcl +40 -4
  71. siliconcompiler/tools/yosys/scripts/sc_synth_fpga.tcl +15 -5
  72. siliconcompiler/tools/yosys/syn_asic.py +62 -2
  73. siliconcompiler/tools/yosys/syn_fpga.py +8 -0
  74. siliconcompiler/toolscripts/_tools.json +6 -6
  75. siliconcompiler/utils/__init__.py +243 -51
  76. siliconcompiler/utils/curation.py +89 -56
  77. siliconcompiler/utils/issue.py +6 -1
  78. siliconcompiler/utils/multiprocessing.py +35 -2
  79. siliconcompiler/utils/paths.py +21 -0
  80. siliconcompiler/utils/settings.py +141 -0
  81. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/METADATA +5 -4
  82. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/RECORD +86 -83
  83. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/WHEEL +0 -0
  84. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/entry_points.txt +0 -0
  85. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/licenses/LICENSE +0 -0
  86. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/top_level.txt +0 -0
@@ -7,7 +7,7 @@ from siliconcompiler.schema_support.filesetschema import FileSetSchema
7
7
  from siliconcompiler.schema_support.pathschema import PathSchema
8
8
  from siliconcompiler.schema import NamedSchema, BaseSchema
9
9
 
10
- from siliconcompiler.schema import EditableSchema, Parameter, Scope, PerNode
10
+ from siliconcompiler.schema import EditableSchema, Parameter, Scope, PerNode, LazyLoad
11
11
  from siliconcompiler.schema.utils import trim
12
12
 
13
13
 
@@ -99,7 +99,8 @@ class ToolLibrarySchema(LibrarySchema):
99
99
 
100
100
  def _from_dict(self, manifest: Dict,
101
101
  keypath: Union[List[str], Tuple[str, ...]],
102
- version: Optional[Tuple[int, ...]] = None) \
102
+ version: Optional[Tuple[int, ...]] = None,
103
+ lazyload: LazyLoad = LazyLoad.ON) \
103
104
  -> Tuple[Set[Tuple[str, ...]], Set[Tuple[str, ...]]]:
104
105
  """
105
106
  Constructs a schema from a dictionary.
@@ -112,7 +113,7 @@ class ToolLibrarySchema(LibrarySchema):
112
113
  Returns:
113
114
  dict: The constructed dictionary.
114
115
  """
115
- if "tool" in manifest:
116
+ if not lazyload.is_enforced and "tool" in manifest:
116
117
  # collect tool keys
117
118
  tool_keys = self.allkeys("tool")
118
119
 
@@ -136,7 +137,7 @@ class ToolLibrarySchema(LibrarySchema):
136
137
  if not manifest["tool"]:
137
138
  del manifest["tool"]
138
139
 
139
- return super()._from_dict(manifest, keypath, version)
140
+ return super()._from_dict(manifest, keypath, version=version, lazyload=lazyload)
140
141
 
141
142
  def _generate_doc(self, doc,
142
143
  ref_root: str = "",
@@ -16,6 +16,7 @@ import logging
16
16
  import os
17
17
  import random
18
18
  import re
19
+ import shutil
19
20
  import time
20
21
  import threading
21
22
  import uuid
@@ -29,7 +30,8 @@ from importlib.metadata import distributions, distribution
29
30
  from pathlib import Path
30
31
  from urllib import parse as url_parse
31
32
 
32
- from siliconcompiler.utils import get_plugins
33
+ from siliconcompiler.utils import get_plugins, default_cache_dir
34
+ from siliconcompiler.utils.paths import cwdirsafe
33
35
 
34
36
  if TYPE_CHECKING:
35
37
  from siliconcompiler.project import Project
@@ -370,7 +372,7 @@ class RemoteResolver(Resolver):
370
372
  Returns:
371
373
  Path: The path to the cache directory.
372
374
  """
373
- default_path = os.path.join(Path.home(), '.sc', 'cache')
375
+ default_path = default_cache_dir()
374
376
  if not root:
375
377
  return Path(default_path)
376
378
 
@@ -380,8 +382,7 @@ class RemoteResolver(Resolver):
380
382
  if path:
381
383
  path = root.find_files('option', 'cachedir', missing_ok=True)
382
384
  if not path:
383
- path = os.path.join(getattr(root, "_Project__cwd", os.getcwd()),
384
- root.get('option', 'cachedir'))
385
+ path = os.path.join(cwdirsafe(root), root.get('option', 'cachedir'))
385
386
  if not path:
386
387
  path = default_path
387
388
 
@@ -543,7 +544,17 @@ class RemoteResolver(Resolver):
543
544
  if self.check_cache():
544
545
  return self.cache_path
545
546
 
546
- self.resolve_remote()
547
+ try:
548
+ self.resolve_remote()
549
+ except BaseException as e:
550
+ # Exception occurred, so need to cleanup
551
+ try:
552
+ shutil.rmtree(self.cache_path)
553
+ except BaseException as cleane:
554
+ self.logger.error(f"Exception occurred during cleanup: {cleane} "
555
+ f"({cleane.__class__.__name__})")
556
+ raise e from None
557
+
547
558
  self.set_changed()
548
559
  return self.cache_path
549
560
 
@@ -560,7 +571,7 @@ class FileResolver(Resolver):
560
571
  if source.startswith("file://"):
561
572
  source = source[7:]
562
573
  if source[0] != "$" and not os.path.isabs(source):
563
- source = os.path.join(getattr(root, "_Project__cwd", os.getcwd()), source)
574
+ source = os.path.join(cwdirsafe(root), source)
564
575
 
565
576
  super().__init__(name, root, f"file://{source}", None)
566
577
 
@@ -114,11 +114,16 @@ class HTTPResolver(RemoteResolver):
114
114
  except tarfile.ReadError:
115
115
  fileobj.seek(0)
116
116
  try:
117
- with zipfile.ZipFile(fileobj) as zip_ref:
118
- zip_ref.extractall(path=self.cache_path)
119
- except zipfile.BadZipFile:
120
- raise TypeError(f"Could not extract file from {data_url}. "
121
- "File is not a valid tar.gz or zip archive.")
117
+ with tarfile.open(fileobj=fileobj, mode='r:bz2') as tar_ref:
118
+ tar_ref.extractall(path=self.cache_path)
119
+ except tarfile.ReadError:
120
+ fileobj.seek(0)
121
+ try:
122
+ with zipfile.ZipFile(fileobj) as zip_ref:
123
+ zip_ref.extractall(path=self.cache_path)
124
+ except zipfile.BadZipFile:
125
+ raise TypeError(f"Could not extract file from {data_url}. "
126
+ "File is not a valid tar.gz or zip archive.")
122
127
 
123
128
  # --- GitHub-specific directory flattening ---
124
129
  # GitHub archives often have a single top-level directory like 'repo-v1.0'.
@@ -5,10 +5,11 @@ import uuid
5
5
 
6
6
  import os.path
7
7
 
8
- from typing import Union, List, Tuple, TextIO, Optional
8
+ from typing import Union, List, Tuple, TextIO, Optional, Dict, Set
9
9
 
10
10
  from siliconcompiler.schema import BaseSchema, NamedSchema, EditableSchema, Parameter, Scope, \
11
- __version__ as schema_version
11
+ __version__ as schema_version, \
12
+ LazyLoad
12
13
 
13
14
  from siliconcompiler import Design
14
15
  from siliconcompiler import Flowgraph
@@ -107,7 +108,7 @@ class Project(PathSchemaBase, CommandLineSchema, BaseSchema):
107
108
  is not intended for external use."""))
108
109
 
109
110
  schema.insert("checklist", "default", Checklist())
110
- schema.insert("library", BaseSchema())
111
+ schema.insert("library", _ProjectLibrary())
111
112
  schema.insert("flowgraph", "default", Flowgraph())
112
113
  schema.insert("metric", MetricSchema())
113
114
  schema.insert("record", RecordSchema())
@@ -289,26 +290,11 @@ class Project(PathSchemaBase, CommandLineSchema, BaseSchema):
289
290
  """
290
291
  return Project.__name__
291
292
 
292
- def __populate_deps(self, obj: DependencySchema = None):
293
- """
294
- Ensures that all loaded dependencies (like libraries) within the project
295
- contain correct internal pointers back to the project's libraries.
296
- This is crucial for maintaining a consistent and navigable schema graph.
297
-
298
- Args:
299
- obj (DependencySchema, optional): An optional dependency object to
300
- reset and populate. If None, all existing library dependencies
301
- in the project are processed. Defaults to None.
302
- """
303
- if obj:
304
- obj._reset_deps()
305
- dep_map = {name: self.get("library", name, field="schema")
306
- for name in self.getkeys("library")}
307
- for obj in dep_map.values():
308
- if isinstance(obj, DependencySchema):
309
- obj._populate_deps(dep_map)
310
-
311
- def _from_dict(self, manifest, keypath, version=None):
293
+ def _from_dict(self, manifest: Dict,
294
+ keypath: Union[List[str], Tuple[str, ...]],
295
+ version: Optional[Tuple[int, ...]] = None,
296
+ lazyload: LazyLoad = LazyLoad.ON) \
297
+ -> Tuple[Set[Tuple[str, ...]], Set[Tuple[str, ...]]]:
312
298
  """
313
299
  Populates the project's schema from a dictionary representation.
314
300
 
@@ -325,15 +311,13 @@ class Project(PathSchemaBase, CommandLineSchema, BaseSchema):
325
311
  Returns:
326
312
  Any: The result of the superclass's `_from_dict` method.
327
313
  """
328
- ret = super()._from_dict(manifest, keypath, version)
314
+ ret = super()._from_dict(manifest, keypath, version=version, lazyload=lazyload)
329
315
 
330
- # Restore dependencies
331
- self.__populate_deps()
332
-
333
- # Preserve logger in history
334
- for history in self.getkeys("history"):
335
- hist = self.get("history", history, field="schema")
336
- hist.__logger = self.__logger
316
+ if not lazyload.is_enforced:
317
+ # Preserve logger in history
318
+ for history in self.getkeys("history"):
319
+ hist: "Project" = self.get("history", history, field="schema")
320
+ hist.__logger = self.__logger
337
321
 
338
322
  return ret
339
323
 
@@ -392,7 +376,7 @@ class Project(PathSchemaBase, CommandLineSchema, BaseSchema):
392
376
  self.add_dep(dep)
393
377
 
394
378
  # Rebuild dependencies to ensure instances are correct
395
- self.__populate_deps(obj)
379
+ self.get("library", field="schema")._populate_deps(obj)
396
380
 
397
381
  def __import_flow(self, flow: Flowgraph):
398
382
  """
@@ -410,13 +394,21 @@ class Project(PathSchemaBase, CommandLineSchema, BaseSchema):
410
394
  return
411
395
 
412
396
  edit_schema = EditableSchema(self)
413
- edit_schema.insert("flowgraph", flow.name, flow)
414
397
 
415
398
  # Instantiate tasks
416
399
  for task_cls in flow.get_all_tasks():
417
400
  task = task_cls()
418
401
  if not self.valid("tool", task.tool(), "task", task.task()):
419
402
  edit_schema.insert("tool", task.tool(), "task", task.task(), task)
403
+ else:
404
+ existing_task: Task = self.get("tool", task.tool(), "task", task.task(),
405
+ field="schema")
406
+ if type(existing_task) is not type(task):
407
+ raise TypeError(f"Task {task.tool()}/{task.task()} already exists with "
408
+ f"different type {type(existing_task).__name__}, "
409
+ f"imported type is {type(task).__name__}")
410
+
411
+ edit_schema.insert("flowgraph", flow.name, flow)
420
412
 
421
413
  def check_manifest(self) -> bool:
422
414
  """
@@ -664,6 +656,9 @@ class Project(PathSchemaBase, CommandLineSchema, BaseSchema):
664
656
  # Pass along manager address
665
657
  state["__manager__"] = MPManager._get_manager_address()
666
658
 
659
+ # Pass along logger level
660
+ state["__loglevel__"] = self.logger.level
661
+
667
662
  return state
668
663
 
669
664
  def __setstate__(self, state):
@@ -676,6 +671,10 @@ class Project(PathSchemaBase, CommandLineSchema, BaseSchema):
676
671
  Args:
677
672
  state (dict): The deserialized state of the object.
678
673
  """
674
+ # Retrieve log level
675
+ loglevel = state["__loglevel__"]
676
+ del state["__loglevel__"]
677
+
679
678
  # Retrieve manager address
680
679
  MPManager._set_manager_address(state["__manager__"])
681
680
  del state["__manager__"]
@@ -684,6 +683,7 @@ class Project(PathSchemaBase, CommandLineSchema, BaseSchema):
684
683
 
685
684
  # Reinitialize logger on restore
686
685
  self.__init_logger()
686
+ self.logger.setLevel(loglevel)
687
687
 
688
688
  # Restore callbacks
689
689
  self.__init_option_callbacks()
@@ -1225,6 +1225,7 @@ class Project(PathSchemaBase, CommandLineSchema, BaseSchema):
1225
1225
  # Setup options:
1226
1226
  for option, value in [
1227
1227
  ("track", False),
1228
+ ("remote", False),
1228
1229
  ("hash", False),
1229
1230
  ("nodisplay", False),
1230
1231
  ("continue", True),
@@ -1276,3 +1277,61 @@ class Lint(Project):
1276
1277
  @classmethod
1277
1278
  def _getdict_type(cls) -> str:
1278
1279
  return Lint.__name__
1280
+
1281
+
1282
+ class _ProjectLibrary(BaseSchema):
1283
+ def _from_dict(self, manifest: Dict,
1284
+ keypath: Union[List[str], Tuple[str, ...]],
1285
+ version: Optional[Tuple[int, ...]] = None,
1286
+ lazyload: LazyLoad = LazyLoad.ON) \
1287
+ -> Tuple[Set[Tuple[str, ...]], Set[Tuple[str, ...]]]:
1288
+ """
1289
+ Populates the project's schema from a dictionary representation.
1290
+
1291
+ This method is typically used during deserialization or when loading
1292
+ a project state from a manifest. After loading the data, it ensures
1293
+ that internal dependencies are correctly re-established.
1294
+
1295
+ Args:
1296
+ manifest (dict): The dictionary containing the schema data.
1297
+ keypath (list): The current keypath being processed (used internally
1298
+ for recursive loading).
1299
+ version (str, optional): The schema version of the manifest. Defaults to None.
1300
+
1301
+ Returns:
1302
+ Any: The result of the superclass's `_from_dict` method.
1303
+ """
1304
+ ret = super()._from_dict(manifest, keypath, version=version, lazyload=lazyload)
1305
+
1306
+ if not lazyload.is_enforced:
1307
+ # Restore dependencies
1308
+ self._populate_deps(complete=True)
1309
+
1310
+ return ret
1311
+
1312
+ def _populate_deps(self, obj: Optional[DependencySchema] = None, complete: bool = False):
1313
+ """
1314
+ Ensures that all loaded dependencies (like libraries) within the project
1315
+ contain correct internal pointers back to the project's libraries.
1316
+ This is crucial for maintaining a consistent and navigable schema graph.
1317
+
1318
+ Args:
1319
+ obj (DependencySchema, optional): An optional dependency object to
1320
+ reset and populate. If None, all existing library dependencies
1321
+ in the project are processed. Defaults to None.
1322
+ complete (bool, optional): If True, performs a full reset of all
1323
+ DependencySchema objects before populating dependencies. This
1324
+ ensures a clean state during manifest deserialization. Defaults to False.
1325
+ """
1326
+ if obj:
1327
+ obj._reset_deps()
1328
+ dep_map = {name: self.get(name, field="schema") for name in self.getkeys()}
1329
+
1330
+ if complete:
1331
+ for obj in dep_map.values():
1332
+ if isinstance(obj, DependencySchema):
1333
+ obj._reset_deps()
1334
+
1335
+ for obj in dep_map.values():
1336
+ if isinstance(obj, DependencySchema):
1337
+ obj._populate_deps(dep_map)
@@ -18,7 +18,8 @@ from siliconcompiler import NodeStatus as SCNodeStatus
18
18
  from siliconcompiler._metadata import default_server
19
19
  from siliconcompiler.flowgraph import RuntimeFlowgraph
20
20
  from siliconcompiler.scheduler import Scheduler
21
- from siliconcompiler.schema import Journal
21
+ from siliconcompiler.schema import Journal, Parameter
22
+ from siliconcompiler.package import PythonPathResolver, FileResolver, KeyPathResolver
22
23
 
23
24
  from siliconcompiler.utils.logging import get_console_formatter
24
25
  from siliconcompiler.utils.curation import collect
@@ -573,19 +574,29 @@ service, provided by SiliconCompiler, is not intended to process proprietary IP.
573
574
 
574
575
  # Ensure dataroots with python sources are copied
575
576
  for key in self.__project.allkeys():
576
- key_type = self.__project.get(*key, field='type')
577
+ if key[0] == "history":
578
+ continue
579
+
580
+ param: Parameter = self.__project.get(*key, field=None)
581
+ key_type: str = param.get(field="type")
577
582
 
578
583
  if 'dir' in key_type or 'file' in key_type:
579
- for _, step, index in self.__project.get(*key, field=None).getvalues(
580
- return_defvalue=False):
581
- dataroots = self.__project.get(*key, field='dataroot', step=step, index=index)
584
+ schema_obj = self.__project.get(*key[:-1], field="schema")
585
+ dataroot_objs = schema_obj._find_files_dataroot_resolvers(True)
586
+
587
+ for value, step, index in param.getvalues():
588
+ if not value:
589
+ continue
590
+ dataroots = param.get(field='dataroot', step=step, index=index)
582
591
  if not isinstance(dataroots, list):
583
592
  dataroots = [dataroots]
584
593
  force_copy = False
585
594
  for dataroot in dataroots:
586
595
  if not dataroot:
587
596
  continue
588
- if dataroot.startswith('python://'):
597
+ dataroot_resolver = dataroot_objs.get(dataroot, None)
598
+ if isinstance(dataroot_resolver,
599
+ (PythonPathResolver, FileResolver, KeyPathResolver)):
589
600
  force_copy = True
590
601
  if force_copy:
591
602
  self.__project.set(*key, True, field='copy', step=step, index=index)
@@ -3,6 +3,8 @@ import os
3
3
  import shlex
4
4
  import sys
5
5
 
6
+ import docker.errors
7
+
6
8
  from pathlib import Path
7
9
 
8
10
  import siliconcompiler
@@ -11,10 +13,9 @@ from siliconcompiler.package import RemoteResolver
11
13
  from siliconcompiler.utils import default_email_credentials_file
12
14
  from siliconcompiler.scheduler import SchedulerNode
13
15
  from siliconcompiler.utils.logging import SCBlankLoggerFormatter
14
- from siliconcompiler.utils.curation import collect
15
16
 
16
17
 
17
- def get_image(project, step, index):
18
+ def get_image(project, step, index) -> str:
18
19
  """Determines the Docker image to use for a given node.
19
20
 
20
21
  The image is selected based on the following priority:
@@ -32,7 +33,7 @@ def get_image(project, step, index):
32
33
  """
33
34
  from siliconcompiler import __version__
34
35
 
35
- queue = project.get('option', 'scheduler', 'queue', step=step, index=index)
36
+ queue = project.option.scheduler.get_queue(step=step, index=index)
36
37
  if queue:
37
38
  return queue
38
39
 
@@ -161,24 +162,24 @@ class DockerSchedulerNode(SchedulerNode):
161
162
  """
162
163
  A static pre-processing hook for the Docker scheduler.
163
164
 
164
- On Windows, this method forces all file/directory parameters to be
165
- copied rather than linked, which avoids issues with differing
166
- filesystem types between the host and the Linux-based container.
167
- It then triggers :meth:`.collect()` to ensure all files are staged.
168
-
169
165
  Args:
170
166
  project (Project): The project object to perform pre-processing on.
171
167
  """
172
- if sys.platform == 'win32':
173
- # this avoids the issue of different file system types
174
- project.logger.error('Setting copy field to true for docker run on Windows')
175
- for key in project.allkeys():
176
- if key[0] == 'history':
177
- continue
178
- sc_type = project.get(*key, field='type')
179
- if 'dir' in sc_type or 'file' in sc_type:
180
- project.set(*key, True, field='copy')
181
- collect(project)
168
+ try:
169
+ client = docker.from_env()
170
+ client.version()
171
+ except (docker.errors.DockerException, docker.errors.APIError):
172
+ raise RuntimeError('docker is not available or installed on this machine')
173
+
174
+ def mark_copy(self) -> bool:
175
+ if sys.platform != 'win32':
176
+ return False
177
+
178
+ do_collect = False
179
+ for key in self.get_required_path_keys():
180
+ self.project.set(*key, True, field='copy')
181
+ do_collect = True
182
+ return do_collect
182
183
 
183
184
  def run(self):
184
185
  """
@@ -196,12 +197,7 @@ class DockerSchedulerNode(SchedulerNode):
196
197
  """
197
198
  self._init_run_logger()
198
199
 
199
- try:
200
- client = docker.from_env()
201
- client.version()
202
- except (docker.errors.DockerException, docker.errors.APIError) as e:
203
- self.logger.error(f'Unable to connect to docker: {e}')
204
- self.halt()
200
+ client = docker.from_env()
205
201
 
206
202
  is_windows = sys.platform == 'win32'
207
203
 
@@ -233,7 +229,7 @@ class DockerSchedulerNode(SchedulerNode):
233
229
  email_file = default_email_credentials_file()
234
230
  if is_windows:
235
231
  # Hack to get around manifest merging
236
- self.project.set('option', 'cachedir', None)
232
+ self.project.option.set_cachedir(None)
237
233
  cache_dir = '/sc_cache'
238
234
  cwd = '/sc_docker'
239
235
  builddir = f'{cwd}/build'
@@ -347,3 +343,6 @@ class DockerSchedulerNode(SchedulerNode):
347
343
 
348
344
  # Restore working directory
349
345
  os.chdir(start_cwd)
346
+
347
+ def check_required_paths(self) -> bool:
348
+ return True