siliconcompiler 0.34.1__py3-none-any.whl → 0.34.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. siliconcompiler/__init__.py +14 -2
  2. siliconcompiler/_metadata.py +1 -1
  3. siliconcompiler/apps/sc_show.py +1 -1
  4. siliconcompiler/constraints/__init__.py +17 -0
  5. siliconcompiler/constraints/asic_component.py +378 -0
  6. siliconcompiler/constraints/asic_floorplan.py +449 -0
  7. siliconcompiler/constraints/asic_pins.py +489 -0
  8. siliconcompiler/constraints/asic_timing.py +517 -0
  9. siliconcompiler/core.py +3 -3
  10. siliconcompiler/dependencyschema.py +10 -174
  11. siliconcompiler/design.py +235 -118
  12. siliconcompiler/flowgraph.py +27 -14
  13. siliconcompiler/library.py +133 -0
  14. siliconcompiler/metric.py +94 -72
  15. siliconcompiler/metrics/__init__.py +7 -0
  16. siliconcompiler/metrics/asic.py +245 -0
  17. siliconcompiler/metrics/fpga.py +220 -0
  18. siliconcompiler/package/__init__.py +138 -35
  19. siliconcompiler/package/github.py +6 -10
  20. siliconcompiler/packageschema.py +256 -12
  21. siliconcompiler/pathschema.py +226 -0
  22. siliconcompiler/project.py +459 -0
  23. siliconcompiler/scheduler/docker.py +2 -3
  24. siliconcompiler/scheduler/run_node.py +2 -1
  25. siliconcompiler/scheduler/scheduler.py +4 -13
  26. siliconcompiler/scheduler/schedulernode.py +25 -17
  27. siliconcompiler/scheduler/taskscheduler.py +2 -1
  28. siliconcompiler/schema/__init__.py +0 -2
  29. siliconcompiler/schema/baseschema.py +147 -24
  30. siliconcompiler/schema/editableschema.py +14 -6
  31. siliconcompiler/schema/journal.py +23 -15
  32. siliconcompiler/schema/namedschema.py +6 -4
  33. siliconcompiler/schema/parameter.py +34 -19
  34. siliconcompiler/schema/parametertype.py +2 -0
  35. siliconcompiler/schema/parametervalue.py +198 -15
  36. siliconcompiler/schema/schema_cfg.py +18 -14
  37. siliconcompiler/schema_obj.py +5 -3
  38. siliconcompiler/tool.py +199 -10
  39. siliconcompiler/toolscripts/_tools.json +4 -4
  40. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.2.dist-info}/METADATA +3 -3
  41. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.2.dist-info}/RECORD +45 -35
  42. siliconcompiler/schema/packageschema.py +0 -101
  43. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.2.dist-info}/WHEEL +0 -0
  44. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.2.dist-info}/entry_points.txt +0 -0
  45. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.2.dist-info}/licenses/LICENSE +0 -0
  46. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,459 @@
1
+ import logging
2
+ import os
3
+ import sys
4
+ import uuid
5
+
6
+ import os.path
7
+
8
+ from typing import Union, List, Tuple
9
+
10
+ from siliconcompiler.schema import BaseSchema, NamedSchema, EditableSchema, Parameter
11
+
12
+ from siliconcompiler import DesignSchema
13
+ from siliconcompiler import FlowgraphSchema
14
+ from siliconcompiler import RecordSchema
15
+ from siliconcompiler import MetricSchema
16
+ from siliconcompiler import ChecklistSchema
17
+ from siliconcompiler import ToolSchema, TaskSchema
18
+
19
+ from siliconcompiler.pathschema import PathSchemaBase
20
+
21
+ from siliconcompiler.schema.schema_cfg import schema_option_runtime, schema_arg, schema_version
22
+
23
+ from siliconcompiler.scheduler.scheduler import Scheduler
24
+ from siliconcompiler.utils.logging import SCColorLoggerFormatter, SCLoggerFormatter
25
+
26
+
27
+ class Project(PathSchemaBase, BaseSchema):
28
+ """
29
+ """
30
+
31
+ def __init__(self, design: Union[DesignSchema, str] = None):
32
+ super().__init__()
33
+
34
+ # Initialize schema
35
+ schema = EditableSchema(self)
36
+ schema_version(schema)
37
+ schema_arg(schema)
38
+
39
+ schema.insert("checklist", "default", ChecklistSchema())
40
+ schema.insert("library", "default", DesignSchema())
41
+ schema.insert("flowgraph", "default", FlowgraphSchema())
42
+ schema.insert("metric", MetricSchema())
43
+ schema.insert("record", RecordSchema())
44
+ schema.insert("tool", "default", ToolSchema())
45
+
46
+ # Add options
47
+ schema_option_runtime(schema)
48
+ schema.insert("option", "env", "default", Parameter("str"))
49
+
50
+ schema.insert("option", "alias", Parameter("[(str,str,str,str)]"))
51
+ schema.insert("option", "fileset", Parameter("[str]"))
52
+ schema.insert("option", "design", Parameter("str"))
53
+
54
+ # Add history
55
+ schema.insert("history", BaseSchema())
56
+
57
+ # Init logger
58
+ self.__init_logger()
59
+
60
+ # Init fields
61
+ self.__cwd = os.getcwd()
62
+
63
+ if design:
64
+ if isinstance(design, str):
65
+ self.set("option", "design", design)
66
+ else:
67
+ self.set_design(design)
68
+
69
+ def __init_logger(self):
70
+ sc_logger = logging.getLogger("siliconcompiler")
71
+ sc_logger.propagate = False
72
+ self.__logger = sc_logger.getChild(f"project_{uuid.uuid4().hex}")
73
+ self.__logger.propagate = False
74
+ self.__logger.setLevel(logging.INFO)
75
+
76
+ self._logger_console = logging.StreamHandler(stream=sys.stdout)
77
+ if SCColorLoggerFormatter.supports_color(sys.stdout):
78
+ self._logger_console.setFormatter(SCColorLoggerFormatter(SCLoggerFormatter()))
79
+ else:
80
+ self._logger_console.setFormatter(SCLoggerFormatter())
81
+
82
+ self.__logger.addHandler(self._logger_console)
83
+
84
+ @property
85
+ def logger(self) -> logging.Logger:
86
+ """
87
+ Returns the logger for this project
88
+ """
89
+ return self.__logger
90
+
91
+ @property
92
+ def name(self) -> str:
93
+ """
94
+ Returns the name of the design
95
+ """
96
+ return self.get("option", "design")
97
+
98
+ @property
99
+ def design(self) -> DesignSchema:
100
+ """
101
+ Returns the design object
102
+ """
103
+ design_name = self.name
104
+ if not design_name:
105
+ raise ValueError("design name is not set")
106
+ if not self.valid("library", design_name):
107
+ raise KeyError(f"{design_name} design has not been loaded")
108
+
109
+ return self.get("library", design_name, field="schema")
110
+
111
+ @property
112
+ def cwd(self) -> str:
113
+ """
114
+ Returns the working directory for the project
115
+ """
116
+ return self.__cwd
117
+
118
+ def add_dep(self, obj):
119
+ if isinstance(obj, DesignSchema):
120
+ self.__import_design(obj)
121
+ elif isinstance(obj, FlowgraphSchema):
122
+ self.__import_flow(obj)
123
+ else:
124
+ raise NotImplementedError
125
+
126
+ def __import_design(self, design: DesignSchema):
127
+ edit_schema = EditableSchema(self)
128
+ edit_schema.insert("library", design.name(), design, clobber=True)
129
+
130
+ # Copy dependencies into project
131
+ for dep in design.get_dep():
132
+ self.add_dep(dep)
133
+
134
+ def __import_flow(self, flow: FlowgraphSchema):
135
+ edit_schema = EditableSchema(self)
136
+ edit_schema.insert("flowgraph", flow.name(), flow, clobber=True)
137
+
138
+ # Instantiate tasks
139
+ for task_cls in flow.get_all_tasks():
140
+ task = task_cls()
141
+ # TODO: this is not needed once tool moves
142
+ edit_schema.insert("tool", task.tool(), ToolSchema(), clobber=True)
143
+ edit_schema.insert("tool", task.tool(), "task", task.task(), task, clobber=True)
144
+
145
+ def check_manifest(self):
146
+ # Assert design is set
147
+ # Assert fileset is set
148
+ # Assert flow is set
149
+
150
+ # Assert design is a library
151
+ # Assert fileset is in design
152
+ # Assert design has topmodule
153
+
154
+ # Check that alias libraries exist
155
+
156
+ # Check flowgraph
157
+ # Check tasks have classes, cannot check post setup that is a runtime check
158
+
159
+ return True
160
+
161
+ def run(self, raise_exception=False):
162
+ '''
163
+ Executes tasks in a flowgraph.
164
+
165
+ The run function sets up tools and launches runs for every node
166
+ in the flowgraph starting with 'from' steps and ending at 'to' steps.
167
+ From/to are taken from the schema from/to parameters if defined,
168
+ otherwise from/to are defined as the entry/exit steps of the flowgraph.
169
+ Before starting the process, tool modules are loaded and setup up for each
170
+ step and index based on on the schema eda dictionary settings.
171
+ Once the tools have been set up, the manifest is checked using the
172
+ check_manifest() function and files in the manifest are hashed based
173
+ on the 'hashmode' schema setting.
174
+
175
+ Once launched, each process waits for preceding steps to complete,
176
+ as defined by the flowgraph 'inputs' parameter. Once a all inputs
177
+ are ready, previous steps are checked for errors before the
178
+ process entered a local working directory and starts to run
179
+ a tool or to execute a built in Chip function.
180
+
181
+ Fatal errors within a step/index process cause all subsequent
182
+ processes to exit before start, returning control to the the main
183
+ program which can then exit.
184
+
185
+ Args:
186
+ raise_exception (bool): if True, will rethrow errors that the flow raises,
187
+ otherwise will report the error and return False
188
+
189
+ Examples:
190
+ >>> run()
191
+ Runs the execution flow defined by the flowgraph dictionary.
192
+ '''
193
+ from siliconcompiler.remote.client import ClientScheduler
194
+
195
+ try:
196
+ if self.get('option', 'remote'):
197
+ scheduler = ClientScheduler(self)
198
+ else:
199
+ scheduler = Scheduler(self)
200
+ scheduler.run()
201
+ except Exception as e:
202
+ if raise_exception:
203
+ raise e
204
+ self.logger.error(str(e))
205
+ return False
206
+ finally:
207
+ pass
208
+ # Update dashboard if running
209
+ # if self._dash:
210
+ # self._dash.update_manifest()
211
+ # self._dash.end_of_run()
212
+
213
+ return True
214
+
215
+ def __getbuilddir(self) -> str:
216
+ """
217
+ Returns the path to the build directory
218
+ """
219
+ builddir = self.get('option', 'builddir')
220
+ if os.path.isabs(builddir):
221
+ return builddir
222
+
223
+ return os.path.join(self.cwd, builddir)
224
+
225
+ def getworkdir(self, step: str = None, index: Union[int, str] = None) -> str:
226
+ """
227
+ Returns absolute path to the work directory for a given step/index,
228
+ if step/index not given, job directory is returned.
229
+
230
+ Args:
231
+ step (str): Node step name
232
+ index (str/int): Node index
233
+ """
234
+ if not self.name:
235
+ raise ValueError("name has not been set")
236
+
237
+ dirlist = [self.__getbuilddir(),
238
+ self.name,
239
+ self.get('option', 'jobname')]
240
+
241
+ # Return jobdirectory if no step defined
242
+ # Return index 0 by default
243
+ if step is not None:
244
+ dirlist.append(step)
245
+
246
+ if index is None:
247
+ index = '0'
248
+
249
+ dirlist.append(str(index))
250
+ return os.path.join(*dirlist)
251
+
252
+ def getcollectiondir(self):
253
+ """
254
+ Returns absolute path to collected files directory
255
+ """
256
+ return os.path.join(self.getworkdir(), "sc_collected_files")
257
+
258
+ def collect(self, **kwargs):
259
+ pass
260
+
261
+ def history(self, job: str) -> "Project":
262
+ '''
263
+ Returns a *mutable* reference to ['history', job] as a Project object.
264
+
265
+ Raises:
266
+ KeyError: if job does not currently exist in history
267
+
268
+ Args:
269
+ job (str): Name of historical job to return.
270
+ '''
271
+
272
+ if job not in self.getkeys("history"):
273
+ raise KeyError(f"{job} is not a valid job")
274
+
275
+ return self.get("history", job, field="schema")
276
+
277
+ def _record_history(self):
278
+ '''
279
+ Copies the current project into the history
280
+ '''
281
+
282
+ job = self.get("option", "jobname")
283
+ proj = self.copy()
284
+
285
+ # Remove history from proj
286
+ EditableSchema(proj).insert("history", BaseSchema(), clobber=True)
287
+
288
+ if job in self.getkeys("history"):
289
+ self.logger.warning(f"Overwriting job {job}")
290
+
291
+ EditableSchema(self).insert("history", job, proj, clobber=True)
292
+
293
+ def __getstate__(self):
294
+ # Ensure a copy of the state is used
295
+ state = self.__dict__.copy()
296
+
297
+ # Remove logger objects since they are not serializable
298
+ del state["_Project__logger"]
299
+ del state["_logger_console"]
300
+
301
+ return state
302
+
303
+ def __setstate__(self, state):
304
+ self.__dict__ = state
305
+
306
+ # Reinitialize logger on restore
307
+ self.__init_logger()
308
+
309
+ def get_filesets(self) -> List[Tuple[NamedSchema, str]]:
310
+ """
311
+ Returns the filesets selected for this project
312
+ """
313
+ # Build alias mapping
314
+ alias = {}
315
+ for src_lib, src_fileset, dst_lib, dst_fileset in self.get("option", "alias"):
316
+ if dst_lib:
317
+ if not self.valid("library", dst_lib):
318
+ raise KeyError(f"{dst_lib} is not a loaded library")
319
+ dst_obj = self.get("library", dst_lib, field="schema")
320
+ else:
321
+ dst_obj = None
322
+ if not dst_fileset:
323
+ dst_fileset = None
324
+ alias[(src_lib, src_fileset)] = (dst_obj, dst_fileset)
325
+
326
+ return self.design.get_fileset(self.get("option", "fileset"), alias=alias)
327
+
328
+ def get_task(self,
329
+ tool: str,
330
+ task: str,
331
+ step: str = None,
332
+ index: Union[str, int] = None) -> TaskSchema:
333
+ if self.valid("tool", tool, "task", task):
334
+ obj: TaskSchema = self.get("tool", tool, "task", task, field="schema")
335
+ if step or index:
336
+ with obj.runtime(None, step, index) as obj:
337
+ return obj
338
+ return obj
339
+ raise KeyError(f"{tool}/{task} has not been loaded")
340
+
341
+ def set_design(self, design: Union[DesignSchema, str]):
342
+ """
343
+ Set the design for this project
344
+
345
+ Args:
346
+ design (:class:`DesignSchema` or str): design object or name
347
+ """
348
+ if isinstance(design, DesignSchema):
349
+ self.add_dep(design)
350
+ design = design.name()
351
+ elif not isinstance(design, str):
352
+ raise TypeError("design must be string or Design object")
353
+
354
+ return self.set("option", "design", design)
355
+
356
+ def set_flow(self, flow: Union[FlowgraphSchema, str]):
357
+ """
358
+ Set the flow for this project
359
+
360
+ Args:
361
+ design (:class:`FlowgraphSchema` or str): flow object or name
362
+ """
363
+ if isinstance(flow, FlowgraphSchema):
364
+ self.add_dep(flow)
365
+ flow = flow.name()
366
+ elif not isinstance(flow, str):
367
+ raise TypeError("flow must be string or Flowgraph object")
368
+
369
+ return self.set("option", "flow", flow)
370
+
371
+ def add_fileset(self, fileset: Union[List[str], str], clobber: bool = False):
372
+ """
373
+ Add a fileset to use in this project
374
+
375
+ Args:
376
+ fileset (list of str): name of fileset from the design
377
+ clobber (bool): if True, replace the filesets
378
+ """
379
+ if not isinstance(fileset, str):
380
+ if isinstance(fileset, (list, tuple, set)):
381
+ if not all([isinstance(v, str) for v in fileset]):
382
+ raise TypeError("fileset must be a string")
383
+ else:
384
+ raise TypeError("fileset must be a string")
385
+
386
+ if isinstance(fileset, str):
387
+ fileset = [fileset]
388
+
389
+ for fs in fileset:
390
+ if fs not in self.design.getkeys("fileset"):
391
+ raise ValueError(f"{fs} is not a valid fileset in {self.design.name()}")
392
+
393
+ if clobber:
394
+ return self.set("option", "fileset", fileset)
395
+ else:
396
+ return self.add("option", "fileset", fileset)
397
+
398
+ def add_alias(self,
399
+ src_dep: Union[DesignSchema, str],
400
+ src_fileset: str,
401
+ alias_dep: Union[DesignSchema, str],
402
+ alias_fileset: str,
403
+ clobber: bool = False):
404
+ """
405
+ Add an aliased fileset.
406
+
407
+ Args:
408
+ src_dep (:class:`DesignSchema` or str): source design to alias
409
+ src_fileset (str): source fileset to alias
410
+ alias_dep (:class:`DesignSchema` or str): replacement design
411
+ alias_fileset (str): replacement fileset
412
+ clobber (bool): overwrite existing values
413
+ """
414
+
415
+ if isinstance(src_dep, str):
416
+ if src_dep not in self.getkeys("library"):
417
+ raise KeyError(f"{src_dep} has not been loaded")
418
+
419
+ src_dep = self.get("library", src_dep, field="schema")
420
+ if isinstance(src_dep, DesignSchema):
421
+ src_dep_name = src_dep.name()
422
+ if src_dep_name not in self.getkeys("library"):
423
+ raise KeyError(f"{src_dep_name} has not been loaded")
424
+ else:
425
+ raise TypeError("source dep is not a valid type")
426
+
427
+ if src_fileset not in src_dep.getkeys("fileset"):
428
+ raise ValueError(f"{src_dep_name} does not have {src_fileset} as a fileset")
429
+
430
+ if alias_dep is None:
431
+ alias_dep = ""
432
+
433
+ if isinstance(alias_dep, str):
434
+ if alias_dep == "":
435
+ alias_dep = None
436
+ alias_dep_name = ""
437
+ alias_fileset = ""
438
+ else:
439
+ if alias_dep not in self.getkeys("library"):
440
+ raise KeyError(f"{alias_dep} has not been loaded")
441
+
442
+ alias_dep = self.get("library", alias_dep, field="schema")
443
+
444
+ if alias_dep is not None:
445
+ if isinstance(alias_dep, DesignSchema):
446
+ alias_dep_name = alias_dep.name()
447
+ if alias_dep_name not in self.getkeys("library"):
448
+ self.add_dep(alias_dep)
449
+ else:
450
+ raise TypeError("alias dep is not a valid type")
451
+
452
+ if alias_fileset != "" and alias_fileset not in alias_dep.getkeys("fileset"):
453
+ raise ValueError(f"{alias_dep_name} does not have {alias_fileset} as a fileset")
454
+
455
+ alias = (src_dep_name, src_fileset, alias_dep_name, alias_fileset)
456
+ if clobber:
457
+ return self.set("option", "alias", alias)
458
+ else:
459
+ return self.add("option", "alias", alias)
@@ -157,8 +157,7 @@ class DockerSchedulerNode(SchedulerNode):
157
157
  builddir = f'{cwd}/build'
158
158
 
159
159
  local_cfg = os.path.join(start_cwd, 'sc_docker.json')
160
- job = self.chip.get('option', 'jobname')
161
- cfg = f'{builddir}/{self.chip.design}/{job}/{self.step}/{self.index}/sc_docker.json'
160
+ cfg = f'{builddir}/{self.name}/{self.jobname}/{self.step}/{self.index}/sc_docker.json'
162
161
 
163
162
  user = None
164
163
 
@@ -210,7 +209,7 @@ class DockerSchedulerNode(SchedulerNode):
210
209
  volumes=volumes,
211
210
  labels=[
212
211
  "siliconcompiler",
213
- f"sc_node:{self.chip.design}:{self.step}:{self.index}"
212
+ f"sc_node:{self.name}:{self.step}:{self.index}"
214
213
  ],
215
214
  user=user,
216
215
  detach=True,
@@ -7,6 +7,7 @@ import tarfile
7
7
  import os.path
8
8
 
9
9
  from siliconcompiler import Chip, Schema
10
+ from siliconcompiler.package import Resolver
10
11
  from siliconcompiler.scheduler.schedulernode import SchedulerNode
11
12
  from siliconcompiler import __version__
12
13
 
@@ -102,7 +103,7 @@ def main():
102
103
  if args.cachemap:
103
104
  for cachepair in args.cachemap:
104
105
  package, path = cachepair.split(':')
105
- chip.get("package", field="schema")._set_cache(package, path)
106
+ Resolver.set_cache(chip, package, path)
106
107
 
107
108
  # Populate cache
108
109
  for resolver in chip.get('package', field='schema').get_resolvers().values():
@@ -21,7 +21,8 @@ from siliconcompiler.scheduler import send_messages
21
21
  class Scheduler:
22
22
  def __init__(self, chip):
23
23
  self.__chip = chip
24
- self.__logger = self.__chip.logger
24
+ self.__logger = chip.logger
25
+ self.__name = chip.design
25
26
 
26
27
  flow = self.__chip.get("option", "flow")
27
28
  if not flow:
@@ -51,21 +52,11 @@ class Scheduler:
51
52
  to_steps=to_steps,
52
53
  prune_nodes=self.__chip.get('option', 'prune'))
53
54
 
54
- self.__flow_runtime_no_prune = RuntimeFlowgraph(
55
- self.__flow,
56
- from_steps=from_steps,
57
- to_steps=to_steps)
58
-
59
55
  self.__flow_load_runtime = RuntimeFlowgraph(
60
56
  self.__flow,
61
57
  to_steps=from_steps,
62
58
  prune_nodes=prune_nodes)
63
59
 
64
- self.__flow_something = RuntimeFlowgraph(
65
- self.__flow,
66
- from_steps=set([step for step, _ in self.__flow.get_entry_nodes()]),
67
- prune_nodes=prune_nodes)
68
-
69
60
  self.__record = self.__chip.get("record", field="schema")
70
61
  self.__metrics = self.__chip.get("metric", field="schema")
71
62
 
@@ -103,7 +94,7 @@ class Scheduler:
103
94
  self.__chip.schema.record_history()
104
95
 
105
96
  # Record final manifest
106
- filepath = os.path.join(self.__chip.getworkdir(), f"{self.__chip.design}.pkg.json")
97
+ filepath = os.path.join(self.__chip.getworkdir(), f"{self.__name}.pkg.json")
107
98
  self.__chip.write_manifest(filepath)
108
99
 
109
100
  send_messages.send(self.__chip, 'summary', None, None)
@@ -206,7 +197,7 @@ class Scheduler:
206
197
 
207
198
  manifest = os.path.join(self.__chip.getworkdir(step=step, index=index),
208
199
  'outputs',
209
- f'{self.__chip.design}.pkg.json')
200
+ f'{self.__name}.pkg.json')
210
201
  if os.path.exists(manifest):
211
202
  # ensure we setup these nodes again
212
203
  try:
@@ -15,8 +15,7 @@ from siliconcompiler import NodeStatus
15
15
  from siliconcompiler.utils.logging import get_console_formatter, SCInRunLoggerFormatter
16
16
  from siliconcompiler.schema import utils as schema_utils
17
17
 
18
- from siliconcompiler.tools._common import input_file_node_name, record_metric
19
-
18
+ from siliconcompiler.package import Resolver
20
19
  from siliconcompiler.record import RecordTime, RecordTool
21
20
  from siliconcompiler.schema import Journal
22
21
  from siliconcompiler.scheduler import send_messages
@@ -28,7 +27,8 @@ class SchedulerNode:
28
27
  self.__index = index
29
28
  self.__chip = chip
30
29
 
31
- self.__design = self.__chip.design
30
+ self.__name = self.__chip.design
31
+ self.__topmodule = self.__chip.top(step=step, index=index)
32
32
 
33
33
  self.__job = self.__chip.get('option', 'jobname')
34
34
  self.__record_user_info = self.__chip.get("option", "track",
@@ -52,8 +52,8 @@ class SchedulerNode:
52
52
  self.__workdir = self.__chip.getworkdir(jobname=self.__job,
53
53
  step=self.__step, index=self.__index)
54
54
  self.__manifests = {
55
- "input": os.path.join(self.__workdir, "inputs", f"{self.__design}.pkg.json"),
56
- "output": os.path.join(self.__workdir, "outputs", f"{self.__design}.pkg.json")
55
+ "input": os.path.join(self.__workdir, "inputs", f"{self.__name}.pkg.json"),
56
+ "output": os.path.join(self.__workdir, "outputs", f"{self.__name}.pkg.json")
57
57
  }
58
58
  self.__logs = {
59
59
  "sc": os.path.join(self.__workdir, f"sc_{self.__step}_{self.__index}.log"),
@@ -108,8 +108,16 @@ class SchedulerNode:
108
108
  return self.__index
109
109
 
110
110
  @property
111
- def design(self):
112
- return self.__design
111
+ def name(self):
112
+ return self.__name
113
+
114
+ @property
115
+ def topmodule(self):
116
+ return self.__topmodule
117
+
118
+ @property
119
+ def jobname(self):
120
+ return self.__job
113
121
 
114
122
  @property
115
123
  def workdir(self):
@@ -435,11 +443,11 @@ class SchedulerNode:
435
443
  f'{output_dir}')
436
444
 
437
445
  for outfile in os.scandir(output_dir):
438
- if outfile.name == f'{self.__design}.pkg.json':
446
+ if outfile.name == f'{self.__name}.pkg.json':
439
447
  # Dont forward manifest
440
448
  continue
441
449
 
442
- new_name = input_file_node_name(outfile.name, in_step, in_index)
450
+ new_name = self.__task.compute_input_file_node_name(outfile.name, in_step, in_index)
443
451
  if self.__enforce_inputfiles:
444
452
  if outfile.name not in in_files and new_name not in in_files:
445
453
  continue
@@ -608,7 +616,7 @@ class SchedulerNode:
608
616
  journal.stop()
609
617
 
610
618
  if self.__pipe:
611
- self.__pipe.send(self.__chip.get("package", field="schema").get_path_cache())
619
+ self.__pipe.send(Resolver.get_cache(self.__chip))
612
620
 
613
621
  def execute(self):
614
622
  self.logger.info(f'Running in {self.__workdir}')
@@ -628,7 +636,7 @@ class SchedulerNode:
628
636
  required_outputs = set(self.__task.get('output'))
629
637
  in_workdir = self.__chip.getworkdir(step=in_step, index=in_index)
630
638
  for outfile in os.scandir(f"{in_workdir}/outputs"):
631
- if outfile.name == f'{self.__design}.pkg.json':
639
+ if outfile.name == f'{self.__name}.pkg.json':
632
640
  # Dont forward manifest
633
641
  continue
634
642
 
@@ -829,16 +837,16 @@ class SchedulerNode:
829
837
 
830
838
  for metric in ("errors", "warnings"):
831
839
  if metric in matches:
832
- errors = self.__metrics.get(metric, step=self.__step, index=self.__index)
833
- if errors is None:
834
- errors = 0
835
- errors += matches[metric]
840
+ value = self.__metrics.get(metric, step=self.__step, index=self.__index)
841
+ if value is None:
842
+ value = 0
843
+ value += matches[metric]
836
844
 
837
845
  sources = [os.path.basename(self.__logs["exe"])]
838
846
  if self.__task.get('regex', metric):
839
847
  sources.append(f'{self.__step}.{metric}')
840
848
 
841
- record_metric(self.__chip, self.__step, self.__index, metric, errors, sources)
849
+ self.__task.record_metric(metric, value, source_file=sources)
842
850
 
843
851
  def __hash_files_pre_execute(self):
844
852
  for task_key in ('refdir', 'prescript', 'postscript', 'script'):
@@ -936,7 +944,7 @@ class SchedulerNode:
936
944
  schema = Schema.from_manifest(manifest)
937
945
  # delete file as it might be a hard link
938
946
  os.remove(manifest)
939
- schema.set('option', 'jobname', self.__chip.get('option', 'jobname'))
947
+ schema.set('option', 'jobname', self.__job)
940
948
  schema.write_manifest(manifest)
941
949
 
942
950
  def clean_directory(self):
@@ -11,6 +11,7 @@ from siliconcompiler import SiliconCompilerError
11
11
  from siliconcompiler import utils
12
12
  from siliconcompiler.flowgraph import RuntimeFlowgraph
13
13
 
14
+ from siliconcompiler.package import Resolver
14
15
  from siliconcompiler.schema import Journal
15
16
 
16
17
  from siliconcompiler.utils.logging import SCBlankLoggerFormatter
@@ -200,7 +201,7 @@ class TaskScheduler:
200
201
  packages = info["parent_pipe"].recv()
201
202
  if isinstance(packages, dict):
202
203
  for package, path in packages.items():
203
- self.__chip.get("package", field="schema")._set_cache(package, path)
204
+ Resolver.set_cache(self.__chip, package, path)
204
205
  except: # noqa E722
205
206
  pass
206
207
 
@@ -5,7 +5,6 @@ from .editableschema import EditableSchema
5
5
  from .baseschema import BaseSchema
6
6
  from .cmdlineschema import CommandLineSchema
7
7
  from .namedschema import NamedSchema
8
- from .packageschema import PackageSchema
9
8
 
10
9
  from .schema_cfg import SCHEMA_VERSION
11
10
 
@@ -16,7 +15,6 @@ __all__ = [
16
15
  "EditableSchema",
17
16
  "CommandLineSchema",
18
17
  "NamedSchema",
19
- "PackageSchema",
20
18
  "Parameter",
21
19
  "Scope",
22
20
  "PerNode",