dv-flow-mgr 0.0.1.12849118090a1__py3-none-any.whl → 0.0.1.12911707440a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. {dv_flow_mgr → dv_flow/mgr}/__init__.py +1 -1
  2. {dv_flow_mgr → dv_flow/mgr}/__main__.py +1 -1
  3. dv_flow/mgr/cmds/cmd_run.py +90 -0
  4. {dv_flow_mgr → dv_flow/mgr}/package_def.py +96 -4
  5. dv_flow/mgr/pkg_rgy.py +78 -0
  6. {dv_flow_mgr/tasklib → dv_flow/mgr}/std/fileset.py +3 -3
  7. {dv_flow_mgr/tasklib → dv_flow/mgr}/std/flow.dv +6 -2
  8. {dv_flow_mgr/tasklib → dv_flow/mgr}/std/message.py +1 -1
  9. {dv_flow_mgr/tasklib → dv_flow/mgr}/std/task_fileset.py +5 -5
  10. dv_flow/mgr/std/task_null.py +10 -0
  11. {dv_flow_mgr → dv_flow/mgr}/task.py +11 -4
  12. {dv_flow_mgr → dv_flow/mgr}/task_data.py +18 -2
  13. {dv_flow_mgr → dv_flow/mgr}/task_def.py +2 -1
  14. dv_flow/mgr/task_graph_builder.py +190 -0
  15. dv_flow/mgr/task_graph_runner.py +71 -0
  16. dv_flow/mgr/task_graph_runner_local.py +79 -0
  17. dv_flow/mgr/util.py +19 -0
  18. {dv_flow_mgr-0.0.1.12849118090a1.dist-info → dv_flow_mgr-0.0.1.12911707440a1.dist-info}/METADATA +1 -1
  19. dv_flow_mgr-0.0.1.12911707440a1.dist-info/RECORD +31 -0
  20. dv_flow_mgr-0.0.1.12911707440a1.dist-info/entry_points.txt +2 -0
  21. dv_flow_mgr-0.0.1.12911707440a1.dist-info/top_level.txt +1 -0
  22. dv_flow_mgr/cmds/cmd_run.py +0 -28
  23. dv_flow_mgr/session.py +0 -324
  24. dv_flow_mgr/tasklib/builtin_pkg.py +0 -62
  25. dv_flow_mgr/tasklib/hdl/sim/mti_pkg.py +0 -11
  26. dv_flow_mgr/tasklib/hdl/sim/mti_task_sim_image.py +0 -69
  27. dv_flow_mgr/tasklib/hdl/sim/mti_task_sim_run.py +0 -47
  28. dv_flow_mgr/tasklib/hdl/sim/pkg_hdl_sim.py +0 -8
  29. dv_flow_mgr/tasklib/hdl/sim/task_sim_image.py +0 -16
  30. dv_flow_mgr/tasklib/hdl/sim/vcs_pkg.py +0 -14
  31. dv_flow_mgr/tasklib/hdl/sim/vcs_task_sim_image.py +0 -49
  32. dv_flow_mgr/tasklib/hdl/sim/vcs_task_sim_run.py +0 -45
  33. dv_flow_mgr/tasklib/hdl/sim/vl_task_sim_image.py +0 -96
  34. dv_flow_mgr/tasklib/hdl/sim/vlt_pkg.py +0 -14
  35. dv_flow_mgr/tasklib/hdl/sim/vlt_task_sim_image.py +0 -50
  36. dv_flow_mgr/tasklib/hdl/sim/vlt_task_sim_run.py +0 -45
  37. dv_flow_mgr/tasklib/std/pkg_std.py +0 -15
  38. dv_flow_mgr/tasklib/std/std.dfs +0 -7
  39. dv_flow_mgr/tasklib/std/task_null.py +0 -26
  40. dv_flow_mgr-0.0.1.12849118090a1.dist-info/RECORD +0 -42
  41. dv_flow_mgr-0.0.1.12849118090a1.dist-info/entry_points.txt +0 -2
  42. dv_flow_mgr-0.0.1.12849118090a1.dist-info/top_level.txt +0 -1
  43. {dv_flow_mgr → dv_flow/mgr}/fileset.py +0 -0
  44. {dv_flow_mgr → dv_flow/mgr}/flow.py +0 -0
  45. {dv_flow_mgr → dv_flow/mgr}/fragment_def.py +0 -0
  46. {dv_flow_mgr → dv_flow/mgr}/package.py +0 -0
  47. {dv_flow_mgr → dv_flow/mgr}/package_import_spec.py +0 -0
  48. {dv_flow_mgr → dv_flow/mgr}/parameters.py +0 -0
  49. {dv_flow_mgr → dv_flow/mgr}/share/flow.json +0 -0
  50. {dv_flow_mgr → dv_flow/mgr}/task_memento.py +0 -0
  51. {dv_flow_mgr-0.0.1.12849118090a1.dist-info → dv_flow_mgr-0.0.1.12911707440a1.dist-info}/LICENSE +0 -0
  52. {dv_flow_mgr-0.0.1.12849118090a1.dist-info → dv_flow_mgr-0.0.1.12911707440a1.dist-info}/WHEEL +0 -0
dv_flow_mgr/session.py DELETED
@@ -1,324 +0,0 @@
1
- #****************************************************************************
2
- #* session.py
3
- #*
4
- #* Copyright 2023 Matthew Ballance and Contributors
5
- #*
6
- #* Licensed under the Apache License, Version 2.0 (the "License"); you may
7
- #* not use this file except in compliance with the License.
8
- #* You may obtain a copy of the License at:
9
- #*
10
- #* http://www.apache.org/licenses/LICENSE-2.0
11
- #*
12
- #* Unless required by applicable law or agreed to in writing, software
13
- #* distributed under the License is distributed on an "AS IS" BASIS,
14
- #* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
- #* See the License for the specific language governing permissions and
16
- #* limitations under the License.
17
- #*
18
- #* Created on:
19
- #* Author:
20
- #*
21
- #****************************************************************************
22
- import asyncio
23
- import os
24
- import yaml
25
- import dataclasses as dc
26
- from typing import Any, Callable, Dict, List
27
- from .fragment_def import FragmentDef
28
- from .package import Package
29
- from .package_def import PackageDef, PackageSpec
30
- from .task import Task, TaskSpec, TaskCtor
31
-
32
- @dc.dataclass
33
- class Session(object):
34
- """Manages the running of a flow"""
35
-
36
- srcdir : str
37
- rundir : str
38
-
39
- # Search path for .dfs files
40
- package_path : List[str] = dc.field(default_factory=list)
41
- package : PackageDef = None
42
- create_subprocess : Callable = asyncio.create_subprocess_exec
43
- _root_dir : str = None
44
- _pkg_s : List[Package] = dc.field(default_factory=list)
45
- _pkg_m : Dict[PackageSpec,Package] = dc.field(default_factory=dict)
46
- _pkg_spec_s : List[PackageDef] = dc.field(default_factory=list)
47
- _pkg_def_m : Dict[str,PackageDef] = dc.field(default_factory=dict)
48
- _pkg_file_m : Dict[str,str] = dc.field(default_factory=dict)
49
- _pkg_path : List[str] = dc.field(default_factory=list)
50
- _task_list : List[Task] = dc.field(default_factory=list)
51
- _task_m : Dict[TaskSpec,Task] = dc.field(default_factory=dict)
52
- _task_id : int = 0
53
-
54
- def __post_init__(self):
55
- # Add a reference to the built-in 'std' package
56
- this_dir = os.path.dirname(os.path.abspath(__file__))
57
- self._pkg_file_m["std"] = os.path.join(this_dir, "tasklib/std/flow.dv")
58
-
59
- # from .tasklib.std.pkg_std import PackageStd
60
- # from .tasklib.hdl.sim.vcs_pkg import VcsPackage
61
- # from .tasklib.hdl.sim.vlt_pkg import VltPackage
62
- # from .tasklib.hdl.sim.mti_pkg import MtiPackage
63
- # self._pkg_m[PackageSpec("std")] = PackageStd("std")
64
- # self._pkg_m[PackageSpec("hdl.sim.mti")] = MtiPackage("hdl.sim.mti")
65
- # self._pkg_m[PackageSpec("hdl.sim.vcs")] = VcsPackage("hdl.sim.vcs")
66
- # self._pkg_m[PackageSpec("hdl.sim.vlt")] = VltPackage("hdl.sim.vlt")
67
- pass
68
-
69
- def load(self):
70
- if not os.path.isdir(self.srcdir):
71
- raise Exception("Root directory %s does not exist" % self.srcdir)
72
-
73
- if not os.path.isfile(os.path.join(self.srcdir, "flow.dv")):
74
- raise Exception("No root flow file")
75
-
76
- self._root_dir = os.path.dirname(self.srcdir)
77
- self.package = self._load_package(os.path.join(self.srcdir, "flow.dv"), [])
78
-
79
- return self.package
80
-
81
- def mkTaskGraph(self, task : str) -> Task:
82
- self._pkg_s.clear()
83
- self._task_m.clear()
84
-
85
- return self._mkTaskGraph(task, self.rundir)
86
-
87
- def _mkTaskGraph(self, task : str, parent_rundir : str) -> Task:
88
-
89
- elems = task.split(".")
90
-
91
- pkg_name = ".".join(elems[0:-1])
92
- task_name = elems[-1]
93
-
94
- if pkg_name == "":
95
- if len(self._pkg_spec_s) == 0:
96
- raise Exception("No package context for %s" % task)
97
- pkg_spec = self._pkg_spec_s[-1]
98
- pkg_name = pkg_spec.name
99
- else:
100
- pkg_spec = PackageSpec(pkg_name)
101
-
102
- rundir = os.path.join(parent_rundir, pkg_name, task_name)
103
-
104
- self._pkg_spec_s.append(pkg_spec)
105
- pkg = self.getPackage(pkg_spec)
106
-
107
- self._pkg_s.append(pkg)
108
-
109
- ctor_t : TaskCtor = pkg.getTaskCtor(task_name)
110
-
111
- depends = []
112
-
113
- # The returned task should have all param references resolved
114
- print("task_ctor=%s" % str(ctor_t.task_ctor), flush=True)
115
- task = ctor_t.task_ctor(
116
- name=task_name,
117
- session=self,
118
- params=ctor_t.mkParams(),
119
- depends=depends,
120
- rundir=rundir,
121
- srcdir=ctor_t.srcdir)
122
-
123
- for i,d in enumerate(task.depend_refs):
124
- if d in self._task_m.keys():
125
- task.depends.append(self._task_m[d])
126
- else:
127
- print("mkTaskGraph: %s" % d)
128
- task.depends.append(self._mkTaskGraph(d, parent_rundir))
129
-
130
- self._task_m[task.name] = task
131
-
132
- self._pkg_s.pop()
133
- self._pkg_spec_s.pop()
134
-
135
- return task
136
-
137
- def push_package(self, pkg : Package):
138
- self._pkg_s.append(pkg)
139
-
140
- def pop_package(self, pkg : Package):
141
- self._pkg_s.pop()
142
-
143
- def package(self):
144
- return self._pkg_s[-1]
145
-
146
- def mkTaskId(self, task : 'Task') -> int:
147
- self._task_id += 1
148
- # TODO: save task <-> id map for later?
149
- return self._task_id
150
-
151
- async def run(self, task : str) -> 'TaskData':
152
- impl = self.mkTaskGraph(task)
153
- return await impl.do_run()
154
-
155
- def _load_package(self, root : str, file_s : List[str]) -> PackageDef:
156
- if root in file_s:
157
- raise Exception("Recursive file processing @ %s: %s" % (root, ",".join(self._file_s)))
158
- file_s.append(root)
159
- ret = None
160
- with open(root, "r") as fp:
161
- print("open %s" % root)
162
- doc = yaml.load(fp, Loader=yaml.FullLoader)
163
- if "package" not in doc.keys():
164
- raise Exception("Missing 'package' key in %s" % root)
165
- pkg = PackageDef(**(doc["package"]))
166
- pkg.basedir = os.path.dirname(root)
167
-
168
- # for t in pkg.tasks:
169
- # t.basedir = os.path.dirname(root)
170
-
171
- if not len(self._pkg_spec_s):
172
- self._pkg_spec_s.append(PackageSpec(pkg.name))
173
- else:
174
- if self._pkg_spec_s[-1].name != pkg.name:
175
- raise Exception("Package name mismatch: %s != %s" % (self._pkg_spec_s[-1].name, pkg.name))
176
- else:
177
- # TODO: merge content
178
- self._pkg_spec_s.append(PackageSpec(pkg.name))
179
- self._pkg_def_m[PackageSpec(pkg.name)] = pkg
180
-
181
- print("pkg: %s" % str(pkg))
182
-
183
- print("fragments: %s" % str(pkg.fragments))
184
- for spec in pkg.fragments:
185
- self._load_fragment_spec(pkg, spec, file_s)
186
-
187
- self._pkg_spec_s.pop()
188
- file_s.pop()
189
-
190
- return pkg
191
-
192
- def _load_fragment_spec(self, pkg : PackageDef, spec : str, file_s : List[str]):
193
-
194
- # We're either going to have:
195
- # - File path
196
- # - Directory path
197
-
198
- if os.path.isfile(os.path.join(pkg.basedir, spec)):
199
- self._load_fragment_file(pkg, spec, file_s)
200
- elif os.path.isdir(os.path.join(pkg.basedir, spec)):
201
- self._load_fragment_dir(pkg, os.path.join(pkg.basedir, spec), file_s)
202
- else:
203
- raise Exception("Fragment spec %s not found" % spec)
204
-
205
-
206
- def _load_fragment_dir(self, pkg : PackageDef, dir : str, file_s : List[str]):
207
-
208
- for file in os.listdir(dir):
209
- if os.path.isdir(os.path.join(dir, file)):
210
- self._load_fragment_dir(pkg, os.path.join(dir, file), file_s)
211
- elif os.path.isfile(os.path.join(dir, file)) and file == "flow.yaml":
212
- self._load_fragment_file(pkg, os.path.join(dir, file), file_s)
213
-
214
- def _load_fragment_file(self, pkg : PackageDef, file : str, file_s : List[str]):
215
-
216
- if file in file_s:
217
- raise Exception("Recursive file processing @ %s: %s" % (file, ",".join(self._file_s)))
218
- file_s.append(file)
219
-
220
- with open(file, "r") as fp:
221
- doc = yaml.load(fp, Loader=yaml.FullLoader)
222
- print("doc: %s" % str(doc), flush=True)
223
- if "fragment" in doc.keys():
224
- # Merge the package definition
225
- frag = FragmentDef(**(doc["fragment"]))
226
- frag.basedir = os.path.dirname(file)
227
- pkg.fragment_l.append(frag)
228
- else:
229
- print("Warning: file %s is not a fragment" % file)
230
-
231
-
232
-
233
- def getPackage(self, spec : PackageSpec) -> Package:
234
- # Obtain the active package definition
235
- pkg_spec = self._pkg_spec_s[-1]
236
- pkg_def = self._pkg_def_m[pkg_spec]
237
-
238
- print("spec: %s ; _pkg_def_m: %s" % (str(spec), str(self._pkg_def_m.keys())))
239
-
240
- # Need a stack to track which package we are currently in
241
- # Need a map to get a concrete package from a name with parameterization
242
-
243
- # Note: _pkg_m needs to be context specific, such that imports from
244
- # one package don't end up visible in another
245
- if spec in self._pkg_m.keys():
246
- pkg = self._pkg_m[spec]
247
- elif spec in self._pkg_def_m.keys():
248
- pkg = self._pkg_def_m[spec].mkPackage(self)
249
- self._pkg_m[spec] = pkg
250
- else:
251
- pkg = None
252
- print("imports: %s" % str(pkg_def.imports))
253
- for imp in pkg_def.imports:
254
- print("imp: %s" % str(imp))
255
- if imp.alias is not None and imp.alias == spec.name:
256
- # Found the alias name. Just need to get an instance of this package
257
- tgt_pkg_spec = PackageSpec(imp.name)
258
- if tgt_pkg_spec in self._pkg_m.keys():
259
- pkg = self._pkg_m[tgt_pkg_spec]
260
- elif tgt_pkg_spec in self._pkg_def_m.keys():
261
- base = self._pkg_def_m[tgt_pkg_spec]
262
- pkg = base.mkPackage(self, spec.params)
263
- self._pkg_m[spec] = pkg
264
- elif imp.path is not None:
265
- # See if we can load the package
266
- print("TODO: load referenced package")
267
- else:
268
- raise Exception("Import alias %s not found" % imp.name)
269
- break
270
- else:
271
- # Need to compare the spec with the full import spec
272
- imp_spec = PackageSpec(imp.name)
273
- # TODO: set parameters
274
- if imp_spec == spec:
275
- base = self._pkg_def_m[PackageSpec(spec.name)]
276
- pkg = base.mkPackage(self, spec.params)
277
- self._pkg_m[spec] = pkg
278
- break
279
-
280
- if pkg is None:
281
- # Look in the set of registered packages
282
- if spec.name in self._pkg_file_m.keys():
283
- # Load the package
284
- self._pkg_spec_s.append(spec)
285
- pkg_def = self._load_package(
286
- self._pkg_file_m[spec.name],
287
- [])
288
-
289
- self._pkg_spec_s.pop()
290
-
291
- # The definition is now in the map, so recurse to create it
292
- pkg = self.getPackage(spec)
293
- else:
294
- # Go search the package path
295
- pass
296
-
297
-
298
-
299
- if pkg is None:
300
- raise Exception("Failed to find package %s from package %s" % (
301
- spec.name, pkg_def.name))
302
-
303
- return pkg
304
-
305
- def getTaskCtor(self, spec : TaskSpec, pkg : PackageDef) -> 'TaskCtor':
306
- spec_e = spec.name.split(".")
307
- task_name = spec_e[-1]
308
-
309
- if len(spec_e) == 1:
310
- # Just have a task name. Use the current package
311
- if len(self._pkg_s) == 0:
312
- raise Exception("No package context for task %s" % spec.name)
313
- pkg = self._pkg_s[-1]
314
- else:
315
- pkg_name = ".".join(spec_e[0:-1])
316
-
317
- try:
318
- pkg = self.getPackage(PackageSpec(pkg_name))
319
- except Exception as e:
320
- print("Failed to find package %s while looking for task %s" % (pkg_name, spec.name))
321
- raise e
322
-
323
- return pkg.getTaskCtor(task_name)
324
-
@@ -1,62 +0,0 @@
1
- # import os
2
- # import sys
3
- # import glob
4
- # import fnmatch
5
- # import importlib
6
- # import pydantic.dataclasses as dc
7
- # from ..package import TaskCtor
8
- from ..task import Task
9
- from ..task_data import TaskData
10
- # from ..task_memento import TaskMemento
11
- # from typing import List, Tuple
12
- # import dataclasses as dc
13
- # from ..package_def import Package
14
-
15
- # class TaskPyClass(Task):
16
- # pyclass : str = None
17
-
18
- # async def run(self, input : TaskData) -> TaskData:
19
-
20
- # if self.srcdir not in sys.path:
21
- # sys.path.insert(0, self.srcdir)
22
-
23
- # print("sys.path: %s" % str(sys.path), flush=True)
24
- # idx = self.params.pyclass.rfind('.')
25
- # modname = self.params.pyclass[:idx]
26
- # clsname = self.params.pyclass[idx+1:]
27
-
28
- # if os.path.isfile(os.path.join(self.basedir, "my_module.py")):
29
- # print("my_module.py exists", flush=True)
30
- # else:
31
- # print("my_module.py does not exist", flush=True)
32
-
33
- # if modname in sys.modules.keys():
34
- # module = sys.modules[modname]
35
- # else:
36
- # try:
37
- # print("modname=%s" % modname, flush=True)
38
- # module = importlib.import_module(modname)
39
- # except ModuleNotFoundError as e:
40
- # print("Module not found: %s syspath=%s" % (str(e), str(sys.path)), flush=True)
41
- # raise e
42
-
43
- # cls = getattr(module, clsname)
44
-
45
- # obj = cls(self.name, self.task_id, self.session, self.basedir, srcdir=self.srcdir)
46
-
47
- # return await obj.run(input)
48
-
49
-
50
- # class TaskPyClassMemento(TaskMemento):
51
- # pass
52
-
53
- class TaskNull(Task):
54
- async def run(self, input : TaskData) -> TaskData:
55
- return input
56
-
57
- # @dc.dataclass
58
- # class PackageBuiltin(Package):
59
-
60
- # def __post_init__(self):
61
- # print("PackageBuiltin::__post_init__", flush=True)
62
- # self.tasks["PyClass"] = TaskPyClass()
@@ -1,11 +0,0 @@
1
- import dataclasses as dc
2
- from .pkg_hdl_sim import PackageHdlSim
3
- from .mti_task_sim_image import TaskMtiSimImageCtor
4
- from .mti_task_sim_run import TaskMtiSimRunCtor
5
-
6
- @dc.dataclass
7
- class MtiPackage(PackageHdlSim):
8
- def __post_init__(self):
9
- self.tasks["SimImage"] = TaskMtiSimImageCtor()
10
- self.tasks["SimRun"] = TaskMtiSimRunCtor()
11
-
@@ -1,69 +0,0 @@
1
- import os
2
- import fnmatch
3
- import dataclasses as dc
4
- from ....fileset import FileSet
5
- from ....package import TaskCtor
6
- from ....task import Task, TaskParams, TaskCtorT
7
- from ....task_data import TaskData
8
- from ....task_memento import TaskMemento
9
- from .vl_task_sim_image import VlTaskSimImage, VlTaskSimImageParams, VlTaskSimImageMemento
10
- from typing import List, Tuple
11
-
12
- from svdep import FileCollection, TaskCheckUpToDate, TaskBuildFileCollection
13
-
14
- @dc.dataclass
15
- class TaskMtiSimImage(VlTaskSimImage):
16
-
17
- def getRefTime(self):
18
- if os.path.isfile(os.path.join(self.rundir, 'work.d')):
19
- return os.path.getmtime(os.path.join(self.rundir, 'work.d'))
20
- else:
21
- raise Exception("work.d not found (%s)")
22
-
23
- async def build(self, files : List[str], incdirs : List[str]):
24
- if not os.path.isdir(os.path.join(self.rundir, 'work')):
25
- cmd = ['vlib', 'work']
26
- proc = await self.session.create_subprocess(*cmd,
27
- cwd=self.rundir)
28
- await proc.wait()
29
- if proc.returncode != 0:
30
- raise Exception("Questa vlib failed")
31
-
32
- cmd = ['vlog', '-sv']
33
-
34
- for incdir in incdirs:
35
- cmd.append('+incdir+%s' % incdir)
36
-
37
- cmd.extend(files)
38
-
39
- proc = await self.session.create_subprocess(*cmd,
40
- cwd=self.rundir)
41
- await proc.wait()
42
- if proc.returncode != 0:
43
- raise Exception("Questa compile failed")
44
-
45
- cmd = ['vopt', '-o', 'simv_opt']
46
-
47
- for top in self.params.top:
48
- cmd.append(top)
49
-
50
- proc = await self.session.create_subprocess(*cmd,
51
- cwd=self.rundir)
52
-
53
- await proc.wait()
54
-
55
- with open(os.path.join(self.rundir, 'work.d'), "w") as fp:
56
- fp.write("\n")
57
-
58
- if proc.returncode != 0:
59
- raise Exception("Questa opt failed")
60
-
61
- class TaskMtiSimImageParams(VlTaskSimImageParams):
62
- pass
63
-
64
- class TaskMtiSimImageMemento(VlTaskSimImageMemento):
65
- pass
66
-
67
- class TaskMtiSimImageCtor(TaskCtorT):
68
- def __init__(self):
69
- super().__init__(TaskMtiSimImageParams, TaskMtiSimImage)
@@ -1,47 +0,0 @@
1
- import os
2
- import fnmatch
3
- import pydantic.dataclasses as dc
4
- from ....fileset import FileSet
5
- from ....package import TaskCtor
6
- from ....task import Task, TaskParams, TaskCtorT
7
- from ....task_data import TaskData
8
- from ....task_memento import TaskMemento
9
- from typing import List, Tuple
10
-
11
- class TaskMtiSimRun(Task):
12
-
13
- async def run(self, input : TaskData) -> TaskData:
14
- vl_fileset = input.getFileSets("verilatorBinary")
15
-
16
- build_dir = vl_fileset[0].basedir
17
-
18
- cmd = [
19
- 'vsim', '-batch', '-do', 'run -all; quit -f',
20
- '-work', os.path.join(build_dir, 'work'),
21
- 'simv_opt'
22
- ]
23
-
24
- fp = open(os.path.join(self.rundir, 'sim.log'), "w")
25
- proc = await self.session.create_subprocess(*cmd,
26
- cwd=self.rundir,
27
- stdout=fp)
28
-
29
- await proc.wait()
30
-
31
- fp.close()
32
-
33
- output = TaskData()
34
- output.addFileSet(FileSet(src=self.name, type="simRunDir", basedir=self.rundir))
35
-
36
- return output
37
-
38
- class TaskMtiSimRunParams(TaskParams):
39
- pass
40
-
41
- class TaskMtiSimRunMemento(TaskMemento):
42
- pass
43
-
44
- class TaskMtiSimRunCtor(TaskCtorT):
45
- def __init__(self):
46
- super().__init__(TaskMtiSimRunParams, TaskMtiSimRun)
47
-
@@ -1,8 +0,0 @@
1
- import dataclasses as dc
2
- from ....package import Package
3
-
4
- @dc.dataclass
5
- class PackageHdlSim(Package):
6
- # TODO: parameter to select proper implementation
7
- pass
8
-
@@ -1,16 +0,0 @@
1
- import os
2
- import fnmatch
3
- import pydantic.dataclasses as dc
4
- from ....fileset import FileSet
5
- from ....package import TaskCtor
6
- from ....task import Task, TaskParams
7
- from ....task_data import TaskData
8
- from ....task_memento import TaskMemento
9
- from typing import List, Tuple
10
-
11
- class TaskSimImage(Task):
12
-
13
- async def run(self, input : TaskData) -> TaskData:
14
- return input
15
-
16
-
@@ -1,14 +0,0 @@
1
- import dataclasses as dc
2
- from .pkg_hdl_sim import PackageHdlSim
3
- from .vcs_task_sim_image import TaskVcsSimImageCtor
4
- from .vcs_task_sim_run import TaskVcsSimRunCtor
5
-
6
- @dc.dataclass
7
- class VcsPackage(PackageHdlSim):
8
-
9
- def __post_init__(self):
10
- print("PackageVcs::__post_init__", flush=True)
11
- self.tasks["SimImage"] = TaskVcsSimImageCtor()
12
- self.tasks["SimRun"] = TaskVcsSimRunCtor()
13
- pass
14
-
@@ -1,49 +0,0 @@
1
- import os
2
- import fnmatch
3
- import dataclasses as dc
4
- from ....fileset import FileSet
5
- from ....package import TaskCtor
6
- from ....task import Task, TaskParams, TaskCtorT
7
- from ....task_data import TaskData
8
- from ....task_memento import TaskMemento
9
- from .vl_task_sim_image import VlTaskSimImage, VlTaskSimImageParams, VlTaskSimImageMemento
10
- from typing import List, Tuple
11
-
12
- from svdep import FileCollection, TaskCheckUpToDate, TaskBuildFileCollection
13
-
14
- @dc.dataclass
15
- class TaskVcsSimImage(VlTaskSimImage):
16
-
17
- def getRefTime(self):
18
- if os.path.isfile(os.path.join(self.rundir, 'simv')):
19
- return os.path.getmtime(os.path.join(self.rundir, 'simv'))
20
- else:
21
- raise Exception
22
-
23
- async def build(self, files : List[str], incdirs : List[str]):
24
- cmd = ['vcs', '-sverilog']
25
-
26
- for incdir in incdirs:
27
- cmd.append('+incdir+%s' % incdir)
28
-
29
- cmd.extend(files)
30
-
31
- if len(self.params.top):
32
- cmd.extend(['-top', "+".join(self.params.top)])
33
-
34
- proc = await self.session.create_subprocess(*cmd,
35
- cwd=self.rundir)
36
- await proc.wait()
37
-
38
- if proc.returncode != 0:
39
- raise Exception("VCS simv failed")
40
-
41
- class TaskVcsSimImageParams(VlTaskSimImageParams):
42
- pass
43
-
44
- class TaskVcsSimImageMemento(VlTaskSimImageMemento):
45
- pass
46
-
47
- class TaskVcsSimImageCtor(TaskCtorT):
48
- def __init__(self):
49
- super().__init__(TaskVcsSimImageParams, TaskVcsSimImage)
@@ -1,45 +0,0 @@
1
- import os
2
- import fnmatch
3
- import pydantic.dataclasses as dc
4
- from ....fileset import FileSet
5
- from ....package import TaskCtor
6
- from ....task import Task, TaskParams, TaskCtorT
7
- from ....task_data import TaskData
8
- from ....task_memento import TaskMemento
9
- from typing import List, Tuple
10
-
11
- class TaskVcsSimRun(Task):
12
-
13
- async def run(self, input : TaskData) -> TaskData:
14
- vl_fileset = input.getFileSets("simDir")
15
-
16
- build_dir = vl_fileset[0].basedir
17
-
18
- cmd = [
19
- os.path.join(build_dir, 'simv'),
20
- ]
21
-
22
- fp = open(os.path.join(self.rundir, 'sim.log'), "w")
23
- proc = await self.session.create_subprocess(*cmd,
24
- cwd=self.rundir,
25
- stdout=fp)
26
-
27
- await proc.wait()
28
-
29
- fp.close()
30
-
31
- output = TaskData()
32
- output.addFileSet(FileSet(src=self.name, type="simRunDir", basedir=self.rundir))
33
-
34
- return output
35
-
36
- class TaskVcsSimRunParams(TaskParams):
37
- pass
38
-
39
- class TaskVcsSimRunMemento(TaskMemento):
40
- pass
41
-
42
- class TaskVcsSimRunCtor(TaskCtorT):
43
- def __init__(self):
44
- super().__init__(TaskVcsSimRunParams, TaskVcsSimRun)
45
-