siliconcompiler 0.35.3__py3-none-any.whl → 0.35.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. siliconcompiler/_metadata.py +1 -1
  2. siliconcompiler/apps/sc_issue.py +18 -2
  3. siliconcompiler/checklist.py +2 -1
  4. siliconcompiler/constraints/asic_component.py +49 -11
  5. siliconcompiler/constraints/asic_floorplan.py +23 -21
  6. siliconcompiler/constraints/asic_pins.py +55 -17
  7. siliconcompiler/constraints/asic_timing.py +53 -22
  8. siliconcompiler/constraints/fpga_timing.py +5 -6
  9. siliconcompiler/data/templates/replay/replay.sh.j2 +27 -14
  10. siliconcompiler/package/__init__.py +17 -6
  11. siliconcompiler/project.py +9 -1
  12. siliconcompiler/scheduler/docker.py +24 -25
  13. siliconcompiler/scheduler/scheduler.py +82 -68
  14. siliconcompiler/scheduler/schedulernode.py +133 -20
  15. siliconcompiler/scheduler/slurm.py +113 -29
  16. siliconcompiler/scheduler/taskscheduler.py +0 -7
  17. siliconcompiler/schema/editableschema.py +29 -0
  18. siliconcompiler/schema/parametervalue.py +14 -2
  19. siliconcompiler/schema_support/option.py +82 -1
  20. siliconcompiler/schema_support/pathschema.py +7 -13
  21. siliconcompiler/tool.py +47 -25
  22. siliconcompiler/tools/klayout/__init__.py +3 -0
  23. siliconcompiler/tools/klayout/scripts/klayout_convert_drc_db.py +1 -0
  24. siliconcompiler/tools/klayout/scripts/klayout_export.py +1 -0
  25. siliconcompiler/tools/klayout/scripts/klayout_operations.py +1 -0
  26. siliconcompiler/tools/klayout/scripts/klayout_show.py +1 -0
  27. siliconcompiler/tools/klayout/scripts/klayout_utils.py +3 -4
  28. siliconcompiler/tools/openroad/__init__.py +27 -1
  29. siliconcompiler/tools/openroad/_apr.py +81 -4
  30. siliconcompiler/tools/openroad/clock_tree_synthesis.py +1 -0
  31. siliconcompiler/tools/openroad/global_placement.py +1 -0
  32. siliconcompiler/tools/openroad/init_floorplan.py +116 -7
  33. siliconcompiler/tools/openroad/power_grid_analysis.py +174 -0
  34. siliconcompiler/tools/openroad/repair_design.py +1 -0
  35. siliconcompiler/tools/openroad/repair_timing.py +1 -0
  36. siliconcompiler/tools/openroad/scripts/apr/preamble.tcl +1 -1
  37. siliconcompiler/tools/openroad/scripts/apr/sc_init_floorplan.tcl +42 -4
  38. siliconcompiler/tools/openroad/scripts/apr/sc_irdrop.tcl +146 -0
  39. siliconcompiler/tools/openroad/scripts/apr/sc_repair_design.tcl +1 -1
  40. siliconcompiler/tools/openroad/scripts/apr/sc_write_data.tcl +4 -6
  41. siliconcompiler/tools/openroad/scripts/common/procs.tcl +1 -1
  42. siliconcompiler/tools/openroad/scripts/common/reports.tcl +1 -1
  43. siliconcompiler/tools/openroad/scripts/rcx/sc_rcx_bench.tcl +2 -4
  44. siliconcompiler/tools/opensta/__init__.py +1 -1
  45. siliconcompiler/tools/opensta/scripts/sc_timing.tcl +17 -12
  46. siliconcompiler/tools/vivado/scripts/sc_bitstream.tcl +11 -0
  47. siliconcompiler/tools/vivado/scripts/sc_place.tcl +11 -0
  48. siliconcompiler/tools/vivado/scripts/sc_route.tcl +11 -0
  49. siliconcompiler/tools/vivado/scripts/sc_syn_fpga.tcl +10 -0
  50. siliconcompiler/tools/vpr/__init__.py +28 -0
  51. siliconcompiler/tools/yosys/scripts/sc_screenshot.tcl +1 -1
  52. siliconcompiler/tools/yosys/scripts/sc_synth_asic.tcl +40 -4
  53. siliconcompiler/tools/yosys/scripts/sc_synth_fpga.tcl +15 -5
  54. siliconcompiler/tools/yosys/syn_asic.py +42 -0
  55. siliconcompiler/tools/yosys/syn_fpga.py +8 -0
  56. siliconcompiler/toolscripts/_tools.json +6 -6
  57. siliconcompiler/utils/__init__.py +243 -51
  58. siliconcompiler/utils/curation.py +89 -56
  59. siliconcompiler/utils/issue.py +6 -1
  60. siliconcompiler/utils/multiprocessing.py +35 -2
  61. siliconcompiler/utils/paths.py +21 -0
  62. siliconcompiler/utils/settings.py +141 -0
  63. {siliconcompiler-0.35.3.dist-info → siliconcompiler-0.35.4.dist-info}/METADATA +4 -3
  64. {siliconcompiler-0.35.3.dist-info → siliconcompiler-0.35.4.dist-info}/RECORD +68 -65
  65. {siliconcompiler-0.35.3.dist-info → siliconcompiler-0.35.4.dist-info}/WHEEL +0 -0
  66. {siliconcompiler-0.35.3.dist-info → siliconcompiler-0.35.4.dist-info}/entry_points.txt +0 -0
  67. {siliconcompiler-0.35.3.dist-info → siliconcompiler-0.35.4.dist-info}/licenses/LICENSE +0 -0
  68. {siliconcompiler-0.35.3.dist-info → siliconcompiler-0.35.4.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,4 @@
1
- from typing import Union, Set, List, Tuple
1
+ from typing import Union, Set, List, Tuple, Optional
2
2
 
3
3
  from siliconcompiler.schema import BaseSchema, NamedSchema, EditableSchema, Parameter, \
4
4
  PerNode, Scope
@@ -14,7 +14,7 @@ class ASICTimingScenarioSchema(NamedSchema):
14
14
  operating mode, SDC filesets, and timing checks to be performed.
15
15
  """
16
16
 
17
- def __init__(self, name: str = None):
17
+ def __init__(self, name: Optional[str] = None):
18
18
  super().__init__()
19
19
  self.set_name(name)
20
20
 
@@ -126,7 +126,7 @@ class ASICTimingScenarioSchema(NamedSchema):
126
126
  def set_pin_voltage(self,
127
127
  pin: str,
128
128
  voltage: float,
129
- step: str = None, index: Union[str, int] = None):
129
+ step: Optional[str] = None, index: Optional[Union[str, int]] = None):
130
130
  """
131
131
  Sets the voltage for a specified pin.
132
132
 
@@ -140,7 +140,8 @@ class ASICTimingScenarioSchema(NamedSchema):
140
140
 
141
141
  def get_pin_voltage(self,
142
142
  pin: str,
143
- step: str = None, index: Union[str, int] = None) -> float:
143
+ step: Optional[str] = None, index: Optional[Union[str, int]] = None) \
144
+ -> float:
144
145
  """
145
146
  Gets the voltage of a specified pin.
146
147
 
@@ -162,7 +163,7 @@ class ASICTimingScenarioSchema(NamedSchema):
162
163
  def add_libcorner(self,
163
164
  libcorner: Union[List[str], str],
164
165
  clobber: bool = False,
165
- step: str = None, index: Union[str, int] = None):
166
+ step: Optional[str] = None, index: Optional[Union[str, int]] = None):
166
167
  """
167
168
  Adds a library corner to the design.
168
169
 
@@ -179,8 +180,8 @@ class ASICTimingScenarioSchema(NamedSchema):
179
180
  else:
180
181
  return self.add("libcorner", libcorner, step=step, index=index)
181
182
 
182
- def get_libcorner(self,
183
- step: str = None, index: Union[str, int] = None) -> Set[str]:
183
+ def get_libcorner(self, step: Optional[str] = None, index: Optional[Union[str, int]] = None) \
184
+ -> Set[str]:
184
185
  """
185
186
  Gets the set of library corners.
186
187
 
@@ -195,7 +196,7 @@ class ASICTimingScenarioSchema(NamedSchema):
195
196
 
196
197
  def set_pexcorner(self,
197
198
  pexcorner: str,
198
- step: str = None, index: Union[str, int] = None):
199
+ step: Optional[str] = None, index: Optional[Union[str, int]] = None):
199
200
  """
200
201
  Sets the parasitic extraction (PEX) corner for the design.
201
202
 
@@ -207,7 +208,7 @@ class ASICTimingScenarioSchema(NamedSchema):
207
208
  return self.set("pexcorner", pexcorner, step=step, index=index)
208
209
 
209
210
  def get_pexcorner(self,
210
- step: str = None, index: Union[str, int] = None) -> str:
211
+ step: Optional[str] = None, index: Optional[Union[str, int]] = None) -> str:
211
212
  """
212
213
  Gets the parasitic extraction (PEX) corner currently set for the design.
213
214
 
@@ -222,7 +223,7 @@ class ASICTimingScenarioSchema(NamedSchema):
222
223
 
223
224
  def set_mode(self,
224
225
  mode: str,
225
- step: str = None, index: Union[str, int] = None):
226
+ step: Optional[str] = None, index: Optional[Union[str, int]] = None):
226
227
  """
227
228
  Sets the operational mode for the design.
228
229
 
@@ -234,7 +235,7 @@ class ASICTimingScenarioSchema(NamedSchema):
234
235
  return self.set("mode", mode, step=step, index=index)
235
236
 
236
237
  def get_mode(self,
237
- step: str = None, index: Union[str, int] = None) -> str:
238
+ step: Optional[str] = None, index: Optional[Union[str, int]] = None) -> str:
238
239
  """
239
240
  Gets the operational mode currently set for the design.
240
241
 
@@ -249,7 +250,7 @@ class ASICTimingScenarioSchema(NamedSchema):
249
250
 
250
251
  def set_opcond(self,
251
252
  opcond: str,
252
- step: str = None, index: Union[str, int] = None):
253
+ step: Optional[str] = None, index: Optional[Union[str, int]] = None):
253
254
  """
254
255
  Sets the operating condition for the design.
255
256
 
@@ -261,7 +262,7 @@ class ASICTimingScenarioSchema(NamedSchema):
261
262
  return self.set("opcond", opcond, step=step, index=index)
262
263
 
263
264
  def get_opcond(self,
264
- step: str = None, index: Union[str, int] = None) -> str:
265
+ step: Optional[str] = None, index: Optional[Union[str, int]] = None) -> str:
265
266
  """
266
267
  Gets the operating condition currently set for the design.
267
268
 
@@ -276,7 +277,7 @@ class ASICTimingScenarioSchema(NamedSchema):
276
277
 
277
278
  def set_temperature(self,
278
279
  temperature: float,
279
- step: str = None, index: Union[str, int] = None):
280
+ step: Optional[str] = None, index: Optional[Union[str, int]] = None):
280
281
  """
281
282
  Sets the temperature for the design.
282
283
 
@@ -287,8 +288,8 @@ class ASICTimingScenarioSchema(NamedSchema):
287
288
  """
288
289
  return self.set("temperature", temperature, step=step, index=index)
289
290
 
290
- def get_temperature(self,
291
- step: str = None, index: Union[str, int] = None) -> float:
291
+ def get_temperature(self, step: Optional[str] = None, index: Optional[Union[str, int]] = None) \
292
+ -> float:
292
293
  """
293
294
  Gets the temperature currently set for the design.
294
295
 
@@ -305,7 +306,7 @@ class ASICTimingScenarioSchema(NamedSchema):
305
306
  design: Union[Design, str],
306
307
  fileset: str,
307
308
  clobber: bool = False,
308
- step: str = None, index: Union[str, int] = None):
309
+ step: Optional[str] = None, index: Optional[Union[str, int]] = None):
309
310
  """
310
311
  Adds an SDC fileset for a given design.
311
312
 
@@ -337,8 +338,8 @@ class ASICTimingScenarioSchema(NamedSchema):
337
338
  else:
338
339
  return self.add("sdcfileset", (design, fileset), step=step, index=index)
339
340
 
340
- def get_sdcfileset(self,
341
- step: str = None, index: Union[str, int] = None) -> List[Tuple[str, str]]:
341
+ def get_sdcfileset(self, step: Optional[str] = None, index: Optional[Union[str, int]] = None) \
342
+ -> List[Tuple[str, str]]:
342
343
  """
343
344
  Gets the list of SDC filesets.
344
345
 
@@ -354,7 +355,7 @@ class ASICTimingScenarioSchema(NamedSchema):
354
355
  def add_check(self,
355
356
  check: Union[List[str], str],
356
357
  clobber: bool = False,
357
- step: str = None, index: Union[str, int] = None):
358
+ step: Optional[str] = None, index: Optional[Union[str, int]] = None):
358
359
  """
359
360
  Adds a check to the design process.
360
361
 
@@ -371,7 +372,8 @@ class ASICTimingScenarioSchema(NamedSchema):
371
372
  else:
372
373
  return self.add("check", check, step=step, index=index)
373
374
 
374
- def get_check(self, step: str = None, index: Union[str, int] = None) -> Set[str]:
375
+ def get_check(self, step: Optional[str] = None, index: Optional[Union[str, int]] = None) \
376
+ -> Set[str]:
375
377
  """
376
378
  Gets the set of checks configured for the design process.
377
379
 
@@ -426,7 +428,7 @@ class ASICTimingConstraintSchema(BaseSchema):
426
428
 
427
429
  EditableSchema(self).insert(scenario.name, scenario, clobber=True)
428
430
 
429
- def get_scenario(self, scenario: str = None):
431
+ def get_scenario(self, scenario: Optional[str] = None):
430
432
  """
431
433
  Retrieves one or all timing scenarios from the configuration.
432
434
 
@@ -490,6 +492,35 @@ class ASICTimingConstraintSchema(BaseSchema):
490
492
  self.add_scenario(scenarioobj)
491
493
  return scenarioobj
492
494
 
495
+ def copy_scenario(self, scenario: str, name: str, insert: bool = True) \
496
+ -> ASICTimingScenarioSchema:
497
+ """
498
+ Copies an existing timing scenario, renames it, and optionally adds it to the design.
499
+
500
+ This method retrieves the scenario identified by ``scenario``, creates a
501
+ deep copy of it, and renames the copy to ``name``. If ``insert`` is True,
502
+ the new scenario is immediately added to the configuration.
503
+
504
+ Args:
505
+ scenario (str): The name of the existing scenario to be copied.
506
+ name (str): The name to assign to the new copied scenario.
507
+ insert (bool, optional): Whether to add the newly created scenario
508
+ to the configuration. Defaults to True.
509
+
510
+ Returns:
511
+ ASICTimingScenarioSchema: The newly created copy of the scenario.
512
+
513
+ Raises:
514
+ LookupError: If the source scenario specified by ``scenario`` does not exist.
515
+ """
516
+ constraint = EditableSchema(self.get_scenario(scenario)).copy()
517
+ EditableSchema(constraint).rename(name)
518
+ if insert:
519
+ if self.valid(name):
520
+ raise ValueError(f"{name} already exists")
521
+ self.add_scenario(constraint)
522
+ return constraint
523
+
493
524
  def remove_scenario(self, scenario: str) -> bool:
494
525
  """
495
526
  Removes a timing scenario from the design configuration.
@@ -1,4 +1,4 @@
1
- from typing import Union
1
+ from typing import Union, Optional
2
2
 
3
3
  from siliconcompiler.schema import BaseSchema, NamedSchema, EditableSchema, Parameter, \
4
4
  PerNode, Scope
@@ -12,7 +12,7 @@ class FPGATimingScenarioSchema(NamedSchema):
12
12
  scenario and operating mode.
13
13
  """
14
14
 
15
- def __init__(self, name: str = None):
15
+ def __init__(self, name: Optional[str] = None):
16
16
  super().__init__()
17
17
  self.set_name(name)
18
18
 
@@ -31,7 +31,7 @@ class FPGATimingScenarioSchema(NamedSchema):
31
31
 
32
32
  def set_mode(self,
33
33
  mode: str,
34
- step: str = None, index: Union[str, int] = None):
34
+ step: Optional[str] = None, index: Optional[Union[str, int]] = None):
35
35
  """
36
36
  Sets the operational mode for the design.
37
37
 
@@ -42,8 +42,7 @@ class FPGATimingScenarioSchema(NamedSchema):
42
42
  """
43
43
  return self.set("mode", mode, step=step, index=index)
44
44
 
45
- def get_mode(self,
46
- step: str = None, index: Union[str, int] = None) -> str:
45
+ def get_mode(self, step: Optional[str] = None, index: Optional[Union[str, int]] = None) -> str:
47
46
  """
48
47
  Gets the operational mode currently set for the design.
49
48
 
@@ -98,7 +97,7 @@ class FPGATimingConstraintSchema(BaseSchema):
98
97
 
99
98
  EditableSchema(self).insert(scenario.name, scenario, clobber=True)
100
99
 
101
- def get_scenario(self, scenario: str = None):
100
+ def get_scenario(self, scenario: Optional[str] = None):
102
101
  """
103
102
  Retrieves one or all timing scenarios from the configuration.
104
103
 
@@ -4,10 +4,27 @@ if [ "${BASH_SOURCE[0]}" != "$0" ]; then
4
4
  return
5
5
  fi
6
6
 
7
+ __print_help() {
8
+ # Print help information for this file
9
+ echo "Usage: $0"
10
+ echo " Options:"
11
+ echo " --which print which executable would be used"
12
+ echo " --version print the version of the executable, if supported"
13
+ echo " --directory print the execution directory"
14
+ echo " --command print the execution command"
15
+ echo " --skipcd do not change directory into replay directory"
16
+ echo " --skipexports do not export environmental variables"
17
+ echo " --cmdprefix <cmd> prefix to add to the replay command, such as gdb"
18
+ echo " --cmdarg <args> prefix to add to the replay command, such as -gui"
19
+ echo " --node execute entire node"
20
+ echo " -h,--help print this help"
21
+ }
22
+
7
23
  # Parse replay arguments
8
24
  CD_WORK="{{ work_dir }}"
9
25
  PRINT=""
10
26
  CMDPREFIX=""
27
+ CMDARGS=""
11
28
  SKIPEXPORT=0
12
29
  DONODE={{ node_only }}
13
30
  while [[ $# -gt 0 ]]; do
@@ -41,27 +58,22 @@ while [[ $# -gt 0 ]]; do
41
58
  shift
42
59
  shift
43
60
  ;;
61
+ --cmdarg)
62
+ CMDARGS="$2"
63
+ shift
64
+ shift
65
+ ;;
44
66
  --node)
45
67
  DONODE=1
46
68
  shift
47
- shift
48
69
  ;;
49
70
  -h|--help)
50
- echo "Usage: $0"
51
- echo " Options:"
52
- echo " --which print which executable would be used"
53
- echo " --version print the version of the executable, if supported"
54
- echo " --directory print the execution directory"
55
- echo " --command print the execution command"
56
- echo " --skipcd do not change directory into replay directory"
57
- echo " --skipexports do not export environmental variables"
58
- echo " --cmdprefix <cmd> prefix to add to the replay command, such as dgb"
59
- echo " --node execute entire node"
60
- echo " -h,--help print this help"
71
+ __print_help
61
72
  exit 0
62
73
  ;;
63
74
  *)
64
75
  echo "Unknown option $1"
76
+ __print_help
65
77
  exit 1
66
78
  ;;
67
79
  esac
@@ -105,5 +117,6 @@ python3 -m siliconcompiler.scheduler.run_node \
105
117
  {% if cmds|length > 0 %}else
106
118
  # Command execution
107
119
  $CMDPREFIX \{% for cmd in cmds %}
108
- {% if not loop.first %} {% endif %}{{ cmd }}{% if not loop.last %} \{% endif %}{% endfor %}
109
- {% endif %}fi
120
+ {% if not loop.first %} {% endif %}{{ cmd }} \{% endfor %}
121
+ {% endif %} ${CMDARGS}
122
+ fi
@@ -16,6 +16,7 @@ import logging
16
16
  import os
17
17
  import random
18
18
  import re
19
+ import shutil
19
20
  import time
20
21
  import threading
21
22
  import uuid
@@ -29,7 +30,8 @@ from importlib.metadata import distributions, distribution
29
30
  from pathlib import Path
30
31
  from urllib import parse as url_parse
31
32
 
32
- from siliconcompiler.utils import get_plugins
33
+ from siliconcompiler.utils import get_plugins, default_cache_dir
34
+ from siliconcompiler.utils.paths import cwdirsafe
33
35
 
34
36
  if TYPE_CHECKING:
35
37
  from siliconcompiler.project import Project
@@ -370,7 +372,7 @@ class RemoteResolver(Resolver):
370
372
  Returns:
371
373
  Path: The path to the cache directory.
372
374
  """
373
- default_path = os.path.join(Path.home(), '.sc', 'cache')
375
+ default_path = default_cache_dir()
374
376
  if not root:
375
377
  return Path(default_path)
376
378
 
@@ -380,8 +382,7 @@ class RemoteResolver(Resolver):
380
382
  if path:
381
383
  path = root.find_files('option', 'cachedir', missing_ok=True)
382
384
  if not path:
383
- path = os.path.join(getattr(root, "_Project__cwd", os.getcwd()),
384
- root.get('option', 'cachedir'))
385
+ path = os.path.join(cwdirsafe(root), root.get('option', 'cachedir'))
385
386
  if not path:
386
387
  path = default_path
387
388
 
@@ -543,7 +544,17 @@ class RemoteResolver(Resolver):
543
544
  if self.check_cache():
544
545
  return self.cache_path
545
546
 
546
- self.resolve_remote()
547
+ try:
548
+ self.resolve_remote()
549
+ except BaseException as e:
550
+ # Exception occurred, so need to cleanup
551
+ try:
552
+ shutil.rmtree(self.cache_path)
553
+ except BaseException as cleane:
554
+ self.logger.error(f"Exception occurred during cleanup: {cleane} "
555
+ f"({cleane.__class__.__name__})")
556
+ raise e from None
557
+
547
558
  self.set_changed()
548
559
  return self.cache_path
549
560
 
@@ -560,7 +571,7 @@ class FileResolver(Resolver):
560
571
  if source.startswith("file://"):
561
572
  source = source[7:]
562
573
  if source[0] != "$" and not os.path.isabs(source):
563
- source = os.path.join(getattr(root, "_Project__cwd", os.getcwd()), source)
574
+ source = os.path.join(cwdirsafe(root), source)
564
575
 
565
576
  super().__init__(name, root, f"file://{source}", None)
566
577
 
@@ -394,13 +394,21 @@ class Project(PathSchemaBase, CommandLineSchema, BaseSchema):
394
394
  return
395
395
 
396
396
  edit_schema = EditableSchema(self)
397
- edit_schema.insert("flowgraph", flow.name, flow)
398
397
 
399
398
  # Instantiate tasks
400
399
  for task_cls in flow.get_all_tasks():
401
400
  task = task_cls()
402
401
  if not self.valid("tool", task.tool(), "task", task.task()):
403
402
  edit_schema.insert("tool", task.tool(), "task", task.task(), task)
403
+ else:
404
+ existing_task: Task = self.get("tool", task.tool(), "task", task.task(),
405
+ field="schema")
406
+ if type(existing_task) is not type(task):
407
+ raise TypeError(f"Task {task.tool()}/{task.task()} already exists with "
408
+ f"different type {type(existing_task).__name__}, "
409
+ f"imported type is {type(task).__name__}")
410
+
411
+ edit_schema.insert("flowgraph", flow.name, flow)
404
412
 
405
413
  def check_manifest(self) -> bool:
406
414
  """
@@ -3,6 +3,8 @@ import os
3
3
  import shlex
4
4
  import sys
5
5
 
6
+ import docker.errors
7
+
6
8
  from pathlib import Path
7
9
 
8
10
  import siliconcompiler
@@ -11,10 +13,9 @@ from siliconcompiler.package import RemoteResolver
11
13
  from siliconcompiler.utils import default_email_credentials_file
12
14
  from siliconcompiler.scheduler import SchedulerNode
13
15
  from siliconcompiler.utils.logging import SCBlankLoggerFormatter
14
- from siliconcompiler.utils.curation import collect
15
16
 
16
17
 
17
- def get_image(project, step, index):
18
+ def get_image(project, step, index) -> str:
18
19
  """Determines the Docker image to use for a given node.
19
20
 
20
21
  The image is selected based on the following priority:
@@ -32,7 +33,7 @@ def get_image(project, step, index):
32
33
  """
33
34
  from siliconcompiler import __version__
34
35
 
35
- queue = project.get('option', 'scheduler', 'queue', step=step, index=index)
36
+ queue = project.option.scheduler.get_queue(step=step, index=index)
36
37
  if queue:
37
38
  return queue
38
39
 
@@ -161,24 +162,24 @@ class DockerSchedulerNode(SchedulerNode):
161
162
  """
162
163
  A static pre-processing hook for the Docker scheduler.
163
164
 
164
- On Windows, this method forces all file/directory parameters to be
165
- copied rather than linked, which avoids issues with differing
166
- filesystem types between the host and the Linux-based container.
167
- It then triggers :meth:`.collect()` to ensure all files are staged.
168
-
169
165
  Args:
170
166
  project (Project): The project object to perform pre-processing on.
171
167
  """
172
- if sys.platform == 'win32':
173
- # this avoids the issue of different file system types
174
- project.logger.error('Setting copy field to true for docker run on Windows')
175
- for key in project.allkeys():
176
- if key[0] == 'history':
177
- continue
178
- sc_type = project.get(*key, field='type')
179
- if 'dir' in sc_type or 'file' in sc_type:
180
- project.set(*key, True, field='copy')
181
- collect(project)
168
+ try:
169
+ client = docker.from_env()
170
+ client.version()
171
+ except (docker.errors.DockerException, docker.errors.APIError):
172
+ raise RuntimeError('docker is not available or installed on this machine')
173
+
174
+ def mark_copy(self) -> bool:
175
+ if sys.platform != 'win32':
176
+ return False
177
+
178
+ do_collect = False
179
+ for key in self.get_required_path_keys():
180
+ self.project.set(*key, True, field='copy')
181
+ do_collect = True
182
+ return do_collect
182
183
 
183
184
  def run(self):
184
185
  """
@@ -196,12 +197,7 @@ class DockerSchedulerNode(SchedulerNode):
196
197
  """
197
198
  self._init_run_logger()
198
199
 
199
- try:
200
- client = docker.from_env()
201
- client.version()
202
- except (docker.errors.DockerException, docker.errors.APIError) as e:
203
- self.logger.error(f'Unable to connect to docker: {e}')
204
- self.halt()
200
+ client = docker.from_env()
205
201
 
206
202
  is_windows = sys.platform == 'win32'
207
203
 
@@ -233,7 +229,7 @@ class DockerSchedulerNode(SchedulerNode):
233
229
  email_file = default_email_credentials_file()
234
230
  if is_windows:
235
231
  # Hack to get around manifest merging
236
- self.project.set('option', 'cachedir', None)
232
+ self.project.option.set_cachedir(None)
237
233
  cache_dir = '/sc_cache'
238
234
  cwd = '/sc_docker'
239
235
  builddir = f'{cwd}/build'
@@ -347,3 +343,6 @@ class DockerSchedulerNode(SchedulerNode):
347
343
 
348
344
  # Restore working directory
349
345
  os.chdir(start_cwd)
346
+
347
+ def check_required_paths(self) -> bool:
348
+ return True