dv-flow-mgr 0.0.1.12822558956a1__py3-none-any.whl → 0.0.1.12911707440a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dv_flow_mgr → dv_flow/mgr}/__init__.py +1 -1
- {dv_flow_mgr → dv_flow/mgr}/__main__.py +1 -1
- dv_flow/mgr/cmds/cmd_run.py +90 -0
- {dv_flow_mgr → dv_flow/mgr}/package.py +0 -20
- dv_flow/mgr/package_def.py +302 -0
- dv_flow/mgr/pkg_rgy.py +78 -0
- dv_flow/mgr/std/fileset.py +68 -0
- dv_flow/mgr/std/flow.dv +30 -0
- dv_flow/mgr/std/message.py +7 -0
- {dv_flow_mgr/tasklib → dv_flow/mgr}/std/task_fileset.py +5 -5
- dv_flow/mgr/std/task_null.py +10 -0
- {dv_flow_mgr → dv_flow/mgr}/task.py +33 -72
- {dv_flow_mgr → dv_flow/mgr}/task_data.py +18 -2
- {dv_flow_mgr → dv_flow/mgr}/task_def.py +5 -2
- dv_flow/mgr/task_graph_builder.py +190 -0
- dv_flow/mgr/task_graph_runner.py +71 -0
- dv_flow/mgr/task_graph_runner_local.py +79 -0
- dv_flow/mgr/util.py +19 -0
- {dv_flow_mgr-0.0.1.12822558956a1.dist-info → dv_flow_mgr-0.0.1.12911707440a1.dist-info}/METADATA +1 -1
- dv_flow_mgr-0.0.1.12911707440a1.dist-info/RECORD +31 -0
- dv_flow_mgr-0.0.1.12911707440a1.dist-info/entry_points.txt +2 -0
- dv_flow_mgr-0.0.1.12911707440a1.dist-info/top_level.txt +1 -0
- dv_flow_mgr/cmds/cmd_run.py +0 -28
- dv_flow_mgr/package_def.py +0 -98
- dv_flow_mgr/session.py +0 -290
- dv_flow_mgr/tasklib/builtin_pkg.py +0 -61
- dv_flow_mgr/tasklib/hdl/sim/mti_pkg.py +0 -11
- dv_flow_mgr/tasklib/hdl/sim/mti_task_sim_image.py +0 -69
- dv_flow_mgr/tasklib/hdl/sim/mti_task_sim_run.py +0 -47
- dv_flow_mgr/tasklib/hdl/sim/pkg_hdl_sim.py +0 -8
- dv_flow_mgr/tasklib/hdl/sim/task_sim_image.py +0 -16
- dv_flow_mgr/tasklib/hdl/sim/vcs_pkg.py +0 -14
- dv_flow_mgr/tasklib/hdl/sim/vcs_task_sim_image.py +0 -49
- dv_flow_mgr/tasklib/hdl/sim/vcs_task_sim_run.py +0 -45
- dv_flow_mgr/tasklib/hdl/sim/vl_task_sim_image.py +0 -96
- dv_flow_mgr/tasklib/hdl/sim/vlt_pkg.py +0 -14
- dv_flow_mgr/tasklib/hdl/sim/vlt_task_sim_image.py +0 -50
- dv_flow_mgr/tasklib/hdl/sim/vlt_task_sim_run.py +0 -45
- dv_flow_mgr/tasklib/std/fileset.py +0 -5
- dv_flow_mgr/tasklib/std/flow.dv +0 -12
- dv_flow_mgr/tasklib/std/pkg_std.py +0 -15
- dv_flow_mgr/tasklib/std/std.dfs +0 -7
- dv_flow_mgr/tasklib/std/task_null.py +0 -26
- dv_flow_mgr-0.0.1.12822558956a1.dist-info/RECORD +0 -41
- dv_flow_mgr-0.0.1.12822558956a1.dist-info/entry_points.txt +0 -2
- dv_flow_mgr-0.0.1.12822558956a1.dist-info/top_level.txt +0 -1
- {dv_flow_mgr → dv_flow/mgr}/fileset.py +0 -0
- {dv_flow_mgr → dv_flow/mgr}/flow.py +0 -0
- {dv_flow_mgr → dv_flow/mgr}/fragment_def.py +0 -0
- {dv_flow_mgr → dv_flow/mgr}/package_import_spec.py +0 -0
- {dv_flow_mgr → dv_flow/mgr}/parameters.py +0 -0
- {dv_flow_mgr → dv_flow/mgr}/share/flow.json +0 -0
- {dv_flow_mgr → dv_flow/mgr}/task_memento.py +0 -0
- {dv_flow_mgr-0.0.1.12822558956a1.dist-info → dv_flow_mgr-0.0.1.12911707440a1.dist-info}/LICENSE +0 -0
- {dv_flow_mgr-0.0.1.12822558956a1.dist-info → dv_flow_mgr-0.0.1.12911707440a1.dist-info}/WHEEL +0 -0
dv_flow_mgr/package_def.py
DELETED
@@ -1,98 +0,0 @@
|
|
1
|
-
#****************************************************************************
|
2
|
-
#* package_def.py
|
3
|
-
#*
|
4
|
-
#* Copyright 2023 Matthew Ballance and Contributors
|
5
|
-
#*
|
6
|
-
#* Licensed under the Apache License, Version 2.0 (the "License"); you may
|
7
|
-
#* not use this file except in compliance with the License.
|
8
|
-
#* You may obtain a copy of the License at:
|
9
|
-
#*
|
10
|
-
#* http://www.apache.org/licenses/LICENSE-2.0
|
11
|
-
#*
|
12
|
-
#* Unless required by applicable law or agreed to in writing, software
|
13
|
-
#* distributed under the License is distributed on an "AS IS" BASIS,
|
14
|
-
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
15
|
-
#* See the License for the specific language governing permissions and
|
16
|
-
#* limitations under the License.
|
17
|
-
#*
|
18
|
-
#* Created on:
|
19
|
-
#* Author:
|
20
|
-
#*
|
21
|
-
#****************************************************************************
|
22
|
-
import pydantic.dataclasses as dc
|
23
|
-
import json
|
24
|
-
from pydantic import BaseModel
|
25
|
-
from typing import Any, Dict, List
|
26
|
-
from .flow import Flow
|
27
|
-
from .fragment_def import FragmentDef
|
28
|
-
from .package import Package
|
29
|
-
from .package_import_spec import PackageImportSpec, PackageSpec
|
30
|
-
from .task import TaskParamCtor
|
31
|
-
from .task_def import TaskDef, TaskSpec
|
32
|
-
|
33
|
-
|
34
|
-
class PackageDef(BaseModel):
|
35
|
-
name : str
|
36
|
-
params : Dict[str,Any] = dc.Field(default_factory=dict)
|
37
|
-
type : List[PackageSpec] = dc.Field(default_factory=list)
|
38
|
-
tasks : List[TaskDef] = dc.Field(default_factory=list)
|
39
|
-
imports : List[PackageImportSpec] = dc.Field(default_factory=list)
|
40
|
-
fragments: List[str] = dc.Field(default_factory=list)
|
41
|
-
|
42
|
-
fragment_l : List['FragmentDef'] = dc.Field(default_factory=list, exclude=True)
|
43
|
-
|
44
|
-
# import_m : Dict['PackageSpec','Package'] = dc.Field(default_factory=dict)
|
45
|
-
|
46
|
-
basedir : str = None
|
47
|
-
|
48
|
-
def getTask(self, name : str) -> 'TaskDef':
|
49
|
-
for t in self.tasks:
|
50
|
-
if t.name == name:
|
51
|
-
return t
|
52
|
-
|
53
|
-
def mkPackage(self, session, params : Dict[str,Any] = None) -> 'Package':
|
54
|
-
ret = Package(self.name)
|
55
|
-
|
56
|
-
for task in self.tasks:
|
57
|
-
if task.type is not None:
|
58
|
-
# Find package (not package_def) that implements this task
|
59
|
-
# Insert an indirect reference to that tasks's constructor
|
60
|
-
|
61
|
-
# Only call getTaskCtor if the task is in a different package
|
62
|
-
task_t = task.type if isinstance(task.type, TaskSpec) else TaskSpec(task.type)
|
63
|
-
ctor_t = session.getTaskCtor(task_t, self)
|
64
|
-
|
65
|
-
ctor_t = TaskParamCtor(
|
66
|
-
base=ctor_t,
|
67
|
-
params=task.params,
|
68
|
-
basedir=self.basedir,
|
69
|
-
depend_refs=task.depends)
|
70
|
-
else:
|
71
|
-
# We use the Null task from the std package
|
72
|
-
raise Exception("")
|
73
|
-
ret.tasks[task.name] = ctor_t
|
74
|
-
|
75
|
-
for frag in self.fragment_l:
|
76
|
-
for task in frag.tasks:
|
77
|
-
if task.type is not None:
|
78
|
-
# Find package (not package_def) that implements this task
|
79
|
-
# Insert an indirect reference to that tasks's constructor
|
80
|
-
|
81
|
-
# Only call getTaskCtor if the task is in a different package
|
82
|
-
task_t = task.type if isinstance(task.type, TaskSpec) else TaskSpec(task.type)
|
83
|
-
ctor_t = session.getTaskCtor(task_t, self)
|
84
|
-
|
85
|
-
ctor_t = TaskParamCtor(
|
86
|
-
base=ctor_t,
|
87
|
-
params=task.params,
|
88
|
-
basedir=frag.basedir,
|
89
|
-
depend_refs=task.depends)
|
90
|
-
else:
|
91
|
-
# We use the Null task from the std package
|
92
|
-
raise Exception("")
|
93
|
-
if task.name in ret.tasks:
|
94
|
-
raise Exception("Task %s already defined" % task.name)
|
95
|
-
ret.tasks[task.name] = ctor_t
|
96
|
-
|
97
|
-
return ret
|
98
|
-
|
dv_flow_mgr/session.py
DELETED
@@ -1,290 +0,0 @@
|
|
1
|
-
#****************************************************************************
|
2
|
-
#* session.py
|
3
|
-
#*
|
4
|
-
#* Copyright 2023 Matthew Ballance and Contributors
|
5
|
-
#*
|
6
|
-
#* Licensed under the Apache License, Version 2.0 (the "License"); you may
|
7
|
-
#* not use this file except in compliance with the License.
|
8
|
-
#* You may obtain a copy of the License at:
|
9
|
-
#*
|
10
|
-
#* http://www.apache.org/licenses/LICENSE-2.0
|
11
|
-
#*
|
12
|
-
#* Unless required by applicable law or agreed to in writing, software
|
13
|
-
#* distributed under the License is distributed on an "AS IS" BASIS,
|
14
|
-
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
15
|
-
#* See the License for the specific language governing permissions and
|
16
|
-
#* limitations under the License.
|
17
|
-
#*
|
18
|
-
#* Created on:
|
19
|
-
#* Author:
|
20
|
-
#*
|
21
|
-
#****************************************************************************
|
22
|
-
import asyncio
|
23
|
-
import os
|
24
|
-
import yaml
|
25
|
-
import dataclasses as dc
|
26
|
-
from typing import Any, Callable, Dict, List
|
27
|
-
from .fragment_def import FragmentDef
|
28
|
-
from .package import Package
|
29
|
-
from .package_def import PackageDef, PackageSpec
|
30
|
-
from .task import Task,TaskSpec
|
31
|
-
|
32
|
-
@dc.dataclass
|
33
|
-
class Session(object):
|
34
|
-
"""Manages the running of a flow"""
|
35
|
-
|
36
|
-
srcdir : str
|
37
|
-
rundir : str
|
38
|
-
|
39
|
-
# Search path for .dfs files
|
40
|
-
package_path : List[str] = dc.field(default_factory=list)
|
41
|
-
package : PackageDef = None
|
42
|
-
create_subprocess : Callable = asyncio.create_subprocess_exec
|
43
|
-
_root_dir : str = None
|
44
|
-
_pkg_s : List[Package] = dc.field(default_factory=list)
|
45
|
-
_pkg_m : Dict[PackageSpec,Package] = dc.field(default_factory=dict)
|
46
|
-
_pkg_spec_s : List[PackageDef] = dc.field(default_factory=list)
|
47
|
-
_pkg_def_m : Dict[str,PackageDef] = dc.field(default_factory=dict)
|
48
|
-
_task_list : List[Task] = dc.field(default_factory=list)
|
49
|
-
_task_m : Dict[TaskSpec,Task] = dc.field(default_factory=dict)
|
50
|
-
_task_id : int = 0
|
51
|
-
|
52
|
-
def __post_init__(self):
|
53
|
-
from .tasklib.std.pkg_std import PackageStd
|
54
|
-
from .tasklib.hdl.sim.vcs_pkg import VcsPackage
|
55
|
-
from .tasklib.hdl.sim.vlt_pkg import VltPackage
|
56
|
-
from .tasklib.hdl.sim.mti_pkg import MtiPackage
|
57
|
-
self._pkg_m[PackageSpec("std")] = PackageStd("std")
|
58
|
-
self._pkg_m[PackageSpec("hdl.sim.mti")] = MtiPackage("hdl.sim.mti")
|
59
|
-
self._pkg_m[PackageSpec("hdl.sim.vcs")] = VcsPackage("hdl.sim.vcs")
|
60
|
-
self._pkg_m[PackageSpec("hdl.sim.vlt")] = VltPackage("hdl.sim.vlt")
|
61
|
-
|
62
|
-
def load(self, root : str):
|
63
|
-
if not os.path.isdir(root):
|
64
|
-
raise Exception("Root directory %s does not exist" % root)
|
65
|
-
|
66
|
-
if not os.path.isfile(os.path.join(root, "flow.yaml")):
|
67
|
-
raise Exception("No root flow file")
|
68
|
-
|
69
|
-
self._root_dir = os.path.dirname(root)
|
70
|
-
self.package = self._load_package(os.path.join(root, "flow.yaml"), [])
|
71
|
-
|
72
|
-
return self.package
|
73
|
-
|
74
|
-
def mkTaskGraph(self, task : str) -> Task:
|
75
|
-
self._pkg_s.clear()
|
76
|
-
self._task_m.clear()
|
77
|
-
|
78
|
-
return self._mkTaskGraph(task, self.rundir)
|
79
|
-
|
80
|
-
def _mkTaskGraph(self, task : str, parent_rundir : str, params : dict = None) -> Task:
|
81
|
-
|
82
|
-
elems = task.split(".")
|
83
|
-
|
84
|
-
pkg_name = ".".join(elems[0:-1])
|
85
|
-
task_name = elems[-1]
|
86
|
-
|
87
|
-
if pkg_name == "":
|
88
|
-
if len(self._pkg_spec_s) == 0:
|
89
|
-
raise Exception("No package context for %s" % task)
|
90
|
-
pkg_spec = self._pkg_spec_s[-1]
|
91
|
-
pkg_name = pkg_spec.name
|
92
|
-
else:
|
93
|
-
pkg_spec = PackageSpec(pkg_name)
|
94
|
-
|
95
|
-
rundir = os.path.join(parent_rundir, pkg_name, task_name)
|
96
|
-
|
97
|
-
self._pkg_spec_s.append(pkg_spec)
|
98
|
-
pkg = self.getPackage(pkg_spec)
|
99
|
-
|
100
|
-
self._pkg_s.append(pkg)
|
101
|
-
|
102
|
-
#task_def = pkg.getTask(task_name)
|
103
|
-
|
104
|
-
depends = []
|
105
|
-
|
106
|
-
params = pkg.mkTaskParams(task_name)
|
107
|
-
|
108
|
-
task_id = self.mkTaskId(None)
|
109
|
-
# task_name = "%s.%s" % (pkg.name, task_def.name)
|
110
|
-
|
111
|
-
# The returned task should have all param references resolved
|
112
|
-
task = pkg.mkTask(
|
113
|
-
task_name,
|
114
|
-
task_id,
|
115
|
-
self,
|
116
|
-
params,
|
117
|
-
depends)
|
118
|
-
task.rundir = rundir
|
119
|
-
|
120
|
-
for i,d in enumerate(task.depend_refs):
|
121
|
-
if d in self._task_m.keys():
|
122
|
-
task.depends.append(self._task_m[d])
|
123
|
-
else:
|
124
|
-
print("mkTaskGraph: %s" % d)
|
125
|
-
task.depends.append(self._mkTaskGraph(d, parent_rundir))
|
126
|
-
|
127
|
-
self._task_m[task.name] = task
|
128
|
-
|
129
|
-
self._pkg_s.pop()
|
130
|
-
self._pkg_spec_s.pop()
|
131
|
-
|
132
|
-
return task
|
133
|
-
|
134
|
-
def mkTaskId(self, task : 'Task') -> int:
|
135
|
-
self._task_id += 1
|
136
|
-
# TODO: save task <-> id map for later?
|
137
|
-
return self._task_id
|
138
|
-
|
139
|
-
async def run(self, task : str) -> 'TaskData':
|
140
|
-
impl = self.mkTaskGraph(task)
|
141
|
-
return await impl.do_run()
|
142
|
-
|
143
|
-
def _load_package(self, root : str, file_s : List[str]) -> PackageDef:
|
144
|
-
if root in file_s:
|
145
|
-
raise Exception("Recursive file processing @ %s: %s" % (root, ",".join(self._file_s)))
|
146
|
-
file_s.append(root)
|
147
|
-
ret = None
|
148
|
-
with open(root, "r") as fp:
|
149
|
-
print("open %s" % root)
|
150
|
-
doc = yaml.load(fp, Loader=yaml.FullLoader)
|
151
|
-
if "package" not in doc.keys():
|
152
|
-
raise Exception("Missing 'package' key in %s" % root)
|
153
|
-
pkg = PackageDef(**(doc["package"]))
|
154
|
-
pkg.basedir = os.path.dirname(root)
|
155
|
-
|
156
|
-
# for t in pkg.tasks:
|
157
|
-
# t.basedir = os.path.dirname(root)
|
158
|
-
|
159
|
-
if not len(self._pkg_spec_s):
|
160
|
-
self._pkg_spec_s.append(PackageSpec(pkg.name))
|
161
|
-
self._pkg_def_m[PackageSpec(pkg.name)] = pkg
|
162
|
-
else:
|
163
|
-
if self._pkg_spec_s[0].name != pkg.name:
|
164
|
-
raise Exception("Package name mismatch: %s != %s" % (self._pkg_m[0].name, pkg.name))
|
165
|
-
else:
|
166
|
-
# TODO: merge content
|
167
|
-
self._pkg_spec_s.append(PackageSpec(pkg.name))
|
168
|
-
|
169
|
-
print("pkg: %s" % str(pkg))
|
170
|
-
|
171
|
-
print("fragments: %s" % str(pkg.fragments))
|
172
|
-
for spec in pkg.fragments:
|
173
|
-
self._load_fragment_spec(pkg, spec, file_s)
|
174
|
-
|
175
|
-
self._pkg_spec_s.pop()
|
176
|
-
file_s.pop()
|
177
|
-
|
178
|
-
return pkg
|
179
|
-
|
180
|
-
def _load_fragment_spec(self, pkg : PackageDef, spec : str, file_s : List[str]):
|
181
|
-
|
182
|
-
# We're either going to have:
|
183
|
-
# - File path
|
184
|
-
# - Directory path
|
185
|
-
|
186
|
-
if os.path.isfile(os.path.join(pkg.basedir, spec)):
|
187
|
-
self._load_fragment_file(pkg, spec, file_s)
|
188
|
-
elif os.path.isdir(os.path.join(pkg.basedir, spec)):
|
189
|
-
self._load_fragment_dir(pkg, os.path.join(pkg.basedir, spec), file_s)
|
190
|
-
else:
|
191
|
-
raise Exception("Fragment spec %s not found" % spec)
|
192
|
-
|
193
|
-
|
194
|
-
def _load_fragment_dir(self, pkg : PackageDef, dir : str, file_s : List[str]):
|
195
|
-
|
196
|
-
for file in os.listdir(dir):
|
197
|
-
if os.path.isdir(os.path.join(dir, file)):
|
198
|
-
self._load_fragment_dir(pkg, os.path.join(dir, file), file_s)
|
199
|
-
elif os.path.isfile(os.path.join(dir, file)) and file == "flow.yaml":
|
200
|
-
self._load_fragment_file(pkg, os.path.join(dir, file), file_s)
|
201
|
-
|
202
|
-
def _load_fragment_file(self, pkg : PackageDef, file : str, file_s : List[str]):
|
203
|
-
|
204
|
-
if file in file_s:
|
205
|
-
raise Exception("Recursive file processing @ %s: %s" % (file, ",".join(self._file_s)))
|
206
|
-
file_s.append(file)
|
207
|
-
|
208
|
-
with open(file, "r") as fp:
|
209
|
-
doc = yaml.load(fp, Loader=yaml.FullLoader)
|
210
|
-
print("doc: %s" % str(doc), flush=True)
|
211
|
-
if "fragment" in doc.keys():
|
212
|
-
# Merge the package definition
|
213
|
-
frag = FragmentDef(**(doc["fragment"]))
|
214
|
-
frag.basedir = os.path.dirname(file)
|
215
|
-
pkg.fragment_l.append(frag)
|
216
|
-
else:
|
217
|
-
print("Warning: file %s is not a fragment" % file)
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
def getPackage(self, spec : PackageSpec) -> Package:
|
222
|
-
pkg_spec = self._pkg_spec_s[-1]
|
223
|
-
pkg_def = self._pkg_def_m[pkg_spec]
|
224
|
-
|
225
|
-
# Need a stack to track which package we are currently in
|
226
|
-
# Need a map to get a concrete package from a name with parameterization
|
227
|
-
|
228
|
-
# Note: _pkg_m needs to be context specific, such that imports from
|
229
|
-
# one package don't end up visible in another
|
230
|
-
if spec in self._pkg_m.keys():
|
231
|
-
pkg = self._pkg_m[spec]
|
232
|
-
elif spec in self._pkg_def_m.keys():
|
233
|
-
pkg = self._pkg_def_m[spec].mkPackage(self)
|
234
|
-
self._pkg_m[spec] = pkg
|
235
|
-
else:
|
236
|
-
pkg = None
|
237
|
-
print("imports: %s" % str(pkg_def.imports))
|
238
|
-
for imp in pkg_def.imports:
|
239
|
-
print("imp: %s" % str(imp))
|
240
|
-
if imp.alias is not None and imp.alias == spec.name:
|
241
|
-
# Found the alias name. Just need to get an instance of this package
|
242
|
-
tgt_pkg_spec = PackageSpec(imp.name)
|
243
|
-
if tgt_pkg_spec in self._pkg_m.keys():
|
244
|
-
pkg = self._pkg_m[tgt_pkg_spec]
|
245
|
-
elif tgt_pkg_spec in self._pkg_def_m.keys():
|
246
|
-
base = self._pkg_def_m[tgt_pkg_spec]
|
247
|
-
pkg = base.mkPackage(self, spec.params)
|
248
|
-
self._pkg_m[spec] = pkg
|
249
|
-
elif imp.path is not None:
|
250
|
-
# See if we can load the package
|
251
|
-
print("TODO: load referenced package")
|
252
|
-
else:
|
253
|
-
raise Exception("Import alias %s not found" % imp.name)
|
254
|
-
break
|
255
|
-
else:
|
256
|
-
# Need to compare the spec with the full import spec
|
257
|
-
imp_spec = PackageSpec(imp.name)
|
258
|
-
# TODO: set parameters
|
259
|
-
if imp_spec == spec:
|
260
|
-
base = self._pkg_def_m[PackageSpec(spec.name)]
|
261
|
-
pkg = base.mkPackage(self, spec.params)
|
262
|
-
self._pkg_m[spec] = pkg
|
263
|
-
break
|
264
|
-
|
265
|
-
if pkg is None:
|
266
|
-
raise Exception("Failed to find package %s from package %s" % (
|
267
|
-
spec.name, pkg_def.name))
|
268
|
-
|
269
|
-
# base_spec = PackageSpec(spec.name)
|
270
|
-
# if not base_spec in self._pkg_def_m.keys():
|
271
|
-
# # Template is not present. Go find it...
|
272
|
-
#
|
273
|
-
# # If not found...
|
274
|
-
# raise Exception("Package %s not found" % spec.name)
|
275
|
-
|
276
|
-
return pkg
|
277
|
-
|
278
|
-
def getTaskCtor(self, spec : TaskSpec, pkg : PackageDef) -> 'TaskCtor':
|
279
|
-
spec_e = spec.name.split(".")
|
280
|
-
task_name = spec_e[-1]
|
281
|
-
pkg_name = ".".join(spec_e[0:-1])
|
282
|
-
|
283
|
-
try:
|
284
|
-
pkg = self.getPackage(PackageSpec(pkg_name))
|
285
|
-
except Exception as e:
|
286
|
-
print("Failed to find package %s while looking for task %s" % (pkg_name, spec.name))
|
287
|
-
raise e
|
288
|
-
|
289
|
-
return pkg.getTaskCtor(task_name)
|
290
|
-
|
@@ -1,61 +0,0 @@
|
|
1
|
-
import os
|
2
|
-
import sys
|
3
|
-
import glob
|
4
|
-
import fnmatch
|
5
|
-
import importlib
|
6
|
-
import pydantic.dataclasses as dc
|
7
|
-
from ..package import TaskCtor
|
8
|
-
from ..task import Task, TaskParams, TaskCtorT
|
9
|
-
from ..task_data import TaskData
|
10
|
-
from ..task_memento import TaskMemento
|
11
|
-
from typing import List, Tuple
|
12
|
-
import dataclasses as dc
|
13
|
-
from ..package_def import Package
|
14
|
-
|
15
|
-
class TaskPyClass(Task):
|
16
|
-
|
17
|
-
async def run(self, input : TaskData) -> TaskData:
|
18
|
-
|
19
|
-
if self.srcdir not in sys.path:
|
20
|
-
sys.path.insert(0, self.srcdir)
|
21
|
-
|
22
|
-
print("sys.path: %s" % str(sys.path), flush=True)
|
23
|
-
idx = self.params.pyclass.rfind('.')
|
24
|
-
modname = self.params.pyclass[:idx]
|
25
|
-
clsname = self.params.pyclass[idx+1:]
|
26
|
-
|
27
|
-
if os.path.isfile(os.path.join(self.basedir, "my_module.py")):
|
28
|
-
print("my_module.py exists", flush=True)
|
29
|
-
else:
|
30
|
-
print("my_module.py does not exist", flush=True)
|
31
|
-
|
32
|
-
try:
|
33
|
-
print("modname=%s" % modname, flush=True)
|
34
|
-
module = importlib.import_module(modname)
|
35
|
-
except ModuleNotFoundError as e:
|
36
|
-
print("Module not found: %s syspath=%s" % (str(e), str(sys.path)), flush=True)
|
37
|
-
raise e
|
38
|
-
|
39
|
-
cls = getattr(module, clsname)
|
40
|
-
|
41
|
-
obj = cls(self.name, self.task_id, self.session, self.basedir, srcdir=self.srcdir)
|
42
|
-
|
43
|
-
return await obj.run(input)
|
44
|
-
|
45
|
-
|
46
|
-
class TaskPyClassParams(TaskParams):
|
47
|
-
pyclass : str
|
48
|
-
|
49
|
-
class TaskPyClassMemento(TaskMemento):
|
50
|
-
pass
|
51
|
-
|
52
|
-
class TaskPyClassCtor(TaskCtorT):
|
53
|
-
def __init__(self):
|
54
|
-
super().__init__(TaskPyClassParams, TaskPyClass)
|
55
|
-
|
56
|
-
@dc.dataclass
|
57
|
-
class PackageBuiltin(Package):
|
58
|
-
|
59
|
-
def __post_init__(self):
|
60
|
-
print("PackageBuiltin::__post_init__", flush=True)
|
61
|
-
self.tasks["PyClass"] = TaskPyClass()
|
@@ -1,11 +0,0 @@
|
|
1
|
-
import dataclasses as dc
|
2
|
-
from .pkg_hdl_sim import PackageHdlSim
|
3
|
-
from .mti_task_sim_image import TaskMtiSimImageCtor
|
4
|
-
from .mti_task_sim_run import TaskMtiSimRunCtor
|
5
|
-
|
6
|
-
@dc.dataclass
|
7
|
-
class MtiPackage(PackageHdlSim):
|
8
|
-
def __post_init__(self):
|
9
|
-
self.tasks["SimImage"] = TaskMtiSimImageCtor()
|
10
|
-
self.tasks["SimRun"] = TaskMtiSimRunCtor()
|
11
|
-
|
@@ -1,69 +0,0 @@
|
|
1
|
-
import os
|
2
|
-
import fnmatch
|
3
|
-
import dataclasses as dc
|
4
|
-
from ....fileset import FileSet
|
5
|
-
from ....package import TaskCtor
|
6
|
-
from ....task import Task, TaskParams, TaskCtorT
|
7
|
-
from ....task_data import TaskData
|
8
|
-
from ....task_memento import TaskMemento
|
9
|
-
from .vl_task_sim_image import VlTaskSimImage, VlTaskSimImageParams, VlTaskSimImageMemento
|
10
|
-
from typing import List, Tuple
|
11
|
-
|
12
|
-
from svdep import FileCollection, TaskCheckUpToDate, TaskBuildFileCollection
|
13
|
-
|
14
|
-
@dc.dataclass
|
15
|
-
class TaskMtiSimImage(VlTaskSimImage):
|
16
|
-
|
17
|
-
def getRefTime(self):
|
18
|
-
if os.path.isfile(os.path.join(self.rundir, 'work.d')):
|
19
|
-
return os.path.getmtime(os.path.join(self.rundir, 'work.d'))
|
20
|
-
else:
|
21
|
-
raise Exception("work.d not found (%s)")
|
22
|
-
|
23
|
-
async def build(self, files : List[str], incdirs : List[str]):
|
24
|
-
if not os.path.isdir(os.path.join(self.rundir, 'work')):
|
25
|
-
cmd = ['vlib', 'work']
|
26
|
-
proc = await self.session.create_subprocess(*cmd,
|
27
|
-
cwd=self.rundir)
|
28
|
-
await proc.wait()
|
29
|
-
if proc.returncode != 0:
|
30
|
-
raise Exception("Questa vlib failed")
|
31
|
-
|
32
|
-
cmd = ['vlog', '-sv']
|
33
|
-
|
34
|
-
for incdir in incdirs:
|
35
|
-
cmd.append('+incdir+%s' % incdir)
|
36
|
-
|
37
|
-
cmd.extend(files)
|
38
|
-
|
39
|
-
proc = await self.session.create_subprocess(*cmd,
|
40
|
-
cwd=self.rundir)
|
41
|
-
await proc.wait()
|
42
|
-
if proc.returncode != 0:
|
43
|
-
raise Exception("Questa compile failed")
|
44
|
-
|
45
|
-
cmd = ['vopt', '-o', 'simv_opt']
|
46
|
-
|
47
|
-
for top in self.params.top:
|
48
|
-
cmd.append(top)
|
49
|
-
|
50
|
-
proc = await self.session.create_subprocess(*cmd,
|
51
|
-
cwd=self.rundir)
|
52
|
-
|
53
|
-
await proc.wait()
|
54
|
-
|
55
|
-
with open(os.path.join(self.rundir, 'work.d'), "w") as fp:
|
56
|
-
fp.write("\n")
|
57
|
-
|
58
|
-
if proc.returncode != 0:
|
59
|
-
raise Exception("Questa opt failed")
|
60
|
-
|
61
|
-
class TaskMtiSimImageParams(VlTaskSimImageParams):
|
62
|
-
pass
|
63
|
-
|
64
|
-
class TaskMtiSimImageMemento(VlTaskSimImageMemento):
|
65
|
-
pass
|
66
|
-
|
67
|
-
class TaskMtiSimImageCtor(TaskCtorT):
|
68
|
-
def __init__(self):
|
69
|
-
super().__init__(TaskMtiSimImageParams, TaskMtiSimImage)
|
@@ -1,47 +0,0 @@
|
|
1
|
-
import os
|
2
|
-
import fnmatch
|
3
|
-
import pydantic.dataclasses as dc
|
4
|
-
from ....fileset import FileSet
|
5
|
-
from ....package import TaskCtor
|
6
|
-
from ....task import Task, TaskParams, TaskCtorT
|
7
|
-
from ....task_data import TaskData
|
8
|
-
from ....task_memento import TaskMemento
|
9
|
-
from typing import List, Tuple
|
10
|
-
|
11
|
-
class TaskMtiSimRun(Task):
|
12
|
-
|
13
|
-
async def run(self, input : TaskData) -> TaskData:
|
14
|
-
vl_fileset = input.getFileSets("verilatorBinary")
|
15
|
-
|
16
|
-
build_dir = vl_fileset[0].basedir
|
17
|
-
|
18
|
-
cmd = [
|
19
|
-
'vsim', '-batch', '-do', 'run -all; quit -f',
|
20
|
-
'-work', os.path.join(build_dir, 'work'),
|
21
|
-
'simv_opt'
|
22
|
-
]
|
23
|
-
|
24
|
-
fp = open(os.path.join(self.rundir, 'sim.log'), "w")
|
25
|
-
proc = await self.session.create_subprocess(*cmd,
|
26
|
-
cwd=self.rundir,
|
27
|
-
stdout=fp)
|
28
|
-
|
29
|
-
await proc.wait()
|
30
|
-
|
31
|
-
fp.close()
|
32
|
-
|
33
|
-
output = TaskData()
|
34
|
-
output.addFileSet(FileSet(src=self.name, type="simRunDir", basedir=self.rundir))
|
35
|
-
|
36
|
-
return output
|
37
|
-
|
38
|
-
class TaskMtiSimRunParams(TaskParams):
|
39
|
-
pass
|
40
|
-
|
41
|
-
class TaskMtiSimRunMemento(TaskMemento):
|
42
|
-
pass
|
43
|
-
|
44
|
-
class TaskMtiSimRunCtor(TaskCtorT):
|
45
|
-
def __init__(self):
|
46
|
-
super().__init__(TaskMtiSimRunParams, TaskMtiSimRun)
|
47
|
-
|
@@ -1,16 +0,0 @@
|
|
1
|
-
import os
|
2
|
-
import fnmatch
|
3
|
-
import pydantic.dataclasses as dc
|
4
|
-
from ....fileset import FileSet
|
5
|
-
from ....package import TaskCtor
|
6
|
-
from ....task import Task, TaskParams
|
7
|
-
from ....task_data import TaskData
|
8
|
-
from ....task_memento import TaskMemento
|
9
|
-
from typing import List, Tuple
|
10
|
-
|
11
|
-
class TaskSimImage(Task):
|
12
|
-
|
13
|
-
async def run(self, input : TaskData) -> TaskData:
|
14
|
-
return input
|
15
|
-
|
16
|
-
|
@@ -1,14 +0,0 @@
|
|
1
|
-
import dataclasses as dc
|
2
|
-
from .pkg_hdl_sim import PackageHdlSim
|
3
|
-
from .vcs_task_sim_image import TaskVcsSimImageCtor
|
4
|
-
from .vcs_task_sim_run import TaskVcsSimRunCtor
|
5
|
-
|
6
|
-
@dc.dataclass
|
7
|
-
class VcsPackage(PackageHdlSim):
|
8
|
-
|
9
|
-
def __post_init__(self):
|
10
|
-
print("PackageVcs::__post_init__", flush=True)
|
11
|
-
self.tasks["SimImage"] = TaskVcsSimImageCtor()
|
12
|
-
self.tasks["SimRun"] = TaskVcsSimRunCtor()
|
13
|
-
pass
|
14
|
-
|
@@ -1,49 +0,0 @@
|
|
1
|
-
import os
|
2
|
-
import fnmatch
|
3
|
-
import dataclasses as dc
|
4
|
-
from ....fileset import FileSet
|
5
|
-
from ....package import TaskCtor
|
6
|
-
from ....task import Task, TaskParams, TaskCtorT
|
7
|
-
from ....task_data import TaskData
|
8
|
-
from ....task_memento import TaskMemento
|
9
|
-
from .vl_task_sim_image import VlTaskSimImage, VlTaskSimImageParams, VlTaskSimImageMemento
|
10
|
-
from typing import List, Tuple
|
11
|
-
|
12
|
-
from svdep import FileCollection, TaskCheckUpToDate, TaskBuildFileCollection
|
13
|
-
|
14
|
-
@dc.dataclass
|
15
|
-
class TaskVcsSimImage(VlTaskSimImage):
|
16
|
-
|
17
|
-
def getRefTime(self):
|
18
|
-
if os.path.isfile(os.path.join(self.rundir, 'simv')):
|
19
|
-
return os.path.getmtime(os.path.join(self.rundir, 'simv'))
|
20
|
-
else:
|
21
|
-
raise Exception
|
22
|
-
|
23
|
-
async def build(self, files : List[str], incdirs : List[str]):
|
24
|
-
cmd = ['vcs', '-sverilog']
|
25
|
-
|
26
|
-
for incdir in incdirs:
|
27
|
-
cmd.append('+incdir+%s' % incdir)
|
28
|
-
|
29
|
-
cmd.extend(files)
|
30
|
-
|
31
|
-
if len(self.params.top):
|
32
|
-
cmd.extend(['-top', "+".join(self.params.top)])
|
33
|
-
|
34
|
-
proc = await self.session.create_subprocess(*cmd,
|
35
|
-
cwd=self.rundir)
|
36
|
-
await proc.wait()
|
37
|
-
|
38
|
-
if proc.returncode != 0:
|
39
|
-
raise Exception("VCS simv failed")
|
40
|
-
|
41
|
-
class TaskVcsSimImageParams(VlTaskSimImageParams):
|
42
|
-
pass
|
43
|
-
|
44
|
-
class TaskVcsSimImageMemento(VlTaskSimImageMemento):
|
45
|
-
pass
|
46
|
-
|
47
|
-
class TaskVcsSimImageCtor(TaskCtorT):
|
48
|
-
def __init__(self):
|
49
|
-
super().__init__(TaskVcsSimImageParams, TaskVcsSimImage)
|