dv-flow-mgr 0.0.1.12849118090a1__py3-none-any.whl → 0.0.1.12919555073a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dv_flow/mgr/__init__.py +9 -0
- {dv_flow_mgr → dv_flow/mgr}/__main__.py +1 -1
- dv_flow/mgr/cmds/cmd_run.py +90 -0
- {dv_flow_mgr → dv_flow/mgr}/package_def.py +96 -4
- dv_flow/mgr/pkg_rgy.py +78 -0
- {dv_flow_mgr/tasklib → dv_flow/mgr}/std/fileset.py +3 -3
- {dv_flow_mgr/tasklib → dv_flow/mgr}/std/flow.dv +6 -2
- {dv_flow_mgr/tasklib → dv_flow/mgr}/std/message.py +1 -1
- {dv_flow_mgr/tasklib → dv_flow/mgr}/std/task_fileset.py +5 -5
- dv_flow/mgr/std/task_null.py +10 -0
- {dv_flow_mgr → dv_flow/mgr}/task.py +11 -4
- {dv_flow_mgr → dv_flow/mgr}/task_data.py +18 -2
- {dv_flow_mgr → dv_flow/mgr}/task_def.py +2 -1
- dv_flow/mgr/task_graph_builder.py +189 -0
- dv_flow/mgr/task_graph_runner.py +71 -0
- dv_flow/mgr/task_graph_runner_local.py +87 -0
- dv_flow/mgr/util.py +19 -0
- {dv_flow_mgr-0.0.1.12849118090a1.dist-info → dv_flow_mgr-0.0.1.12919555073a1.dist-info}/METADATA +2 -1
- dv_flow_mgr-0.0.1.12919555073a1.dist-info/RECORD +31 -0
- dv_flow_mgr-0.0.1.12919555073a1.dist-info/entry_points.txt +2 -0
- dv_flow_mgr-0.0.1.12919555073a1.dist-info/top_level.txt +1 -0
- dv_flow_mgr/__init__.py +0 -6
- dv_flow_mgr/cmds/cmd_run.py +0 -28
- dv_flow_mgr/session.py +0 -324
- dv_flow_mgr/tasklib/builtin_pkg.py +0 -62
- dv_flow_mgr/tasklib/hdl/sim/mti_pkg.py +0 -11
- dv_flow_mgr/tasklib/hdl/sim/mti_task_sim_image.py +0 -69
- dv_flow_mgr/tasklib/hdl/sim/mti_task_sim_run.py +0 -47
- dv_flow_mgr/tasklib/hdl/sim/pkg_hdl_sim.py +0 -8
- dv_flow_mgr/tasklib/hdl/sim/task_sim_image.py +0 -16
- dv_flow_mgr/tasklib/hdl/sim/vcs_pkg.py +0 -14
- dv_flow_mgr/tasklib/hdl/sim/vcs_task_sim_image.py +0 -49
- dv_flow_mgr/tasklib/hdl/sim/vcs_task_sim_run.py +0 -45
- dv_flow_mgr/tasklib/hdl/sim/vl_task_sim_image.py +0 -96
- dv_flow_mgr/tasklib/hdl/sim/vlt_pkg.py +0 -14
- dv_flow_mgr/tasklib/hdl/sim/vlt_task_sim_image.py +0 -50
- dv_flow_mgr/tasklib/hdl/sim/vlt_task_sim_run.py +0 -45
- dv_flow_mgr/tasklib/std/pkg_std.py +0 -15
- dv_flow_mgr/tasklib/std/std.dfs +0 -7
- dv_flow_mgr/tasklib/std/task_null.py +0 -26
- dv_flow_mgr-0.0.1.12849118090a1.dist-info/RECORD +0 -42
- dv_flow_mgr-0.0.1.12849118090a1.dist-info/entry_points.txt +0 -2
- dv_flow_mgr-0.0.1.12849118090a1.dist-info/top_level.txt +0 -1
- {dv_flow_mgr → dv_flow/mgr}/fileset.py +0 -0
- {dv_flow_mgr → dv_flow/mgr}/flow.py +0 -0
- {dv_flow_mgr → dv_flow/mgr}/fragment_def.py +0 -0
- {dv_flow_mgr → dv_flow/mgr}/package.py +0 -0
- {dv_flow_mgr → dv_flow/mgr}/package_import_spec.py +0 -0
- {dv_flow_mgr → dv_flow/mgr}/parameters.py +0 -0
- {dv_flow_mgr → dv_flow/mgr}/share/flow.json +0 -0
- {dv_flow_mgr → dv_flow/mgr}/task_memento.py +0 -0
- {dv_flow_mgr-0.0.1.12849118090a1.dist-info → dv_flow_mgr-0.0.1.12919555073a1.dist-info}/LICENSE +0 -0
- {dv_flow_mgr-0.0.1.12849118090a1.dist-info → dv_flow_mgr-0.0.1.12919555073a1.dist-info}/WHEEL +0 -0
dv_flow/mgr/__init__.py
ADDED
@@ -0,0 +1,9 @@
|
|
1
|
+
|
2
|
+
from .package_def import *
|
3
|
+
from .task_graph_runner import *
|
4
|
+
from .task import *
|
5
|
+
from .task_data import *
|
6
|
+
from .task_graph_runner import TaskGraphRunner
|
7
|
+
from .task_graph_runner_local import TaskGraphRunnerLocal
|
8
|
+
from .task_graph_builder import TaskGraphBuilder
|
9
|
+
|
@@ -7,7 +7,7 @@ def get_parser():
|
|
7
7
|
subparsers = parser.add_subparsers(required=True)
|
8
8
|
|
9
9
|
run_parser = subparsers.add_parser('run', help='run a flow')
|
10
|
-
run_parser.add_argument("tasks", nargs='
|
10
|
+
run_parser.add_argument("tasks", nargs='*', help="tasks to run")
|
11
11
|
run_parser.set_defaults(func=CmdRun())
|
12
12
|
|
13
13
|
return parser
|
@@ -0,0 +1,90 @@
|
|
1
|
+
import asyncio
|
2
|
+
import os
|
3
|
+
from ..task_graph_runner import TaskGraphRunner
|
4
|
+
from ..util import loadProjPkgDef
|
5
|
+
from ..task_graph_builder import TaskGraphBuilder
|
6
|
+
from ..task_graph_runner_local import TaskGraphRunnerLocal
|
7
|
+
from ..pkg_rgy import PkgRgy
|
8
|
+
|
9
|
+
|
10
|
+
class CmdRun(object):
|
11
|
+
|
12
|
+
def __call__(self, args):
|
13
|
+
|
14
|
+
# First, find the project we're working with
|
15
|
+
pkg = loadProjPkgDef(os.getcwd())
|
16
|
+
|
17
|
+
if pkg is None:
|
18
|
+
raise Exception("Failed to find a 'flow.dv' file that defines a package in %s or its parent directories" % os.getcwd())
|
19
|
+
|
20
|
+
print("pkg: %s" % pkg.name)
|
21
|
+
|
22
|
+
if len(args.tasks) > 0:
|
23
|
+
pass
|
24
|
+
else:
|
25
|
+
# Print out available tasks
|
26
|
+
tasks = []
|
27
|
+
for task in pkg.tasks:
|
28
|
+
tasks.append(task)
|
29
|
+
for frag in pkg.fragment_l:
|
30
|
+
for task in frag.tasks:
|
31
|
+
tasks.append(task)
|
32
|
+
tasks.sort(key=lambda x: x.name)
|
33
|
+
|
34
|
+
max_name_len = 0
|
35
|
+
for t in tasks:
|
36
|
+
if len(t.name) > max_name_len:
|
37
|
+
max_name_len = len(t.name)
|
38
|
+
|
39
|
+
print("No task specified. Available Tasks:")
|
40
|
+
for t in tasks:
|
41
|
+
desc = t.desc
|
42
|
+
if desc is None or t.desc == "":
|
43
|
+
"<no descripion>"
|
44
|
+
print("%s - %s" % (t.name.ljust(max_name_len), desc))
|
45
|
+
|
46
|
+
pass
|
47
|
+
|
48
|
+
# Create a session around <pkg>
|
49
|
+
# Need to select a backend
|
50
|
+
# Need somewhere to store project config data
|
51
|
+
# Maybe separate into a task-graph builder and a task-graph runner
|
52
|
+
|
53
|
+
# TODO: allow user to specify run root -- maybe relative to some fixed directory?
|
54
|
+
rundir = os.path.join(pkg.basedir, "rundir")
|
55
|
+
|
56
|
+
builder = TaskGraphBuilder(root_pkg=pkg, rundir=rundir)
|
57
|
+
runner = TaskGraphRunnerLocal(rundir)
|
58
|
+
|
59
|
+
tasks = []
|
60
|
+
|
61
|
+
for spec in args.tasks:
|
62
|
+
task = builder.mkTaskGraph(spec)
|
63
|
+
tasks.append(task)
|
64
|
+
|
65
|
+
asyncio.run(runner.run(tasks))
|
66
|
+
|
67
|
+
# rgy = PkgRgy.inst()
|
68
|
+
# rgy.registerPackage(pkg)
|
69
|
+
|
70
|
+
|
71
|
+
# srcdir = os.getcwd()
|
72
|
+
|
73
|
+
# session = Session(srcdir, rundir)
|
74
|
+
|
75
|
+
# package = session.load(srcdir)
|
76
|
+
|
77
|
+
# graphs = []
|
78
|
+
# for task in args.tasks:
|
79
|
+
# if task.find(".") == -1:
|
80
|
+
# task = package.name + "." + task
|
81
|
+
# subgraph = session.mkTaskGraph(task)
|
82
|
+
# graphs.append(subgraph)
|
83
|
+
|
84
|
+
# awaitables = [subgraph.do_run() for subgraph in graphs]
|
85
|
+
# print("%d awaitables" % len(awaitables))
|
86
|
+
|
87
|
+
# out = asyncio.get_event_loop().run_until_complete(asyncio.gather(*awaitables))
|
88
|
+
|
89
|
+
# print("out: %s" % str(out))
|
90
|
+
|
@@ -19,7 +19,9 @@
|
|
19
19
|
#* Author:
|
20
20
|
#*
|
21
21
|
#****************************************************************************
|
22
|
+
import os
|
22
23
|
import json
|
24
|
+
import yaml
|
23
25
|
import importlib
|
24
26
|
import sys
|
25
27
|
import pydantic
|
@@ -32,7 +34,7 @@ from .package import Package
|
|
32
34
|
from .package_import_spec import PackageImportSpec, PackageSpec
|
33
35
|
from .task import TaskCtor, TaskParams
|
34
36
|
from .task_def import TaskDef, TaskSpec
|
35
|
-
from .
|
37
|
+
from .std.task_null import TaskNull
|
36
38
|
|
37
39
|
|
38
40
|
class PackageDef(BaseModel):
|
@@ -144,7 +146,8 @@ class PackageDef(BaseModel):
|
|
144
146
|
else:
|
145
147
|
mod = sys.modules[modname]
|
146
148
|
except ModuleNotFoundError as e:
|
147
|
-
raise Exception("Failed to import module %s" %
|
149
|
+
raise Exception("Failed to import module %s (basedir=%s): %s" % (
|
150
|
+
modname, self.basedir, str(e)))
|
148
151
|
|
149
152
|
if not hasattr(mod, clsname):
|
150
153
|
raise Exception("Class %s not found in module %s" % (clsname, modname))
|
@@ -170,7 +173,15 @@ class PackageDef(BaseModel):
|
|
170
173
|
"str" : str,
|
171
174
|
"int" : int,
|
172
175
|
"float" : float,
|
173
|
-
"bool" : bool
|
176
|
+
"bool" : bool,
|
177
|
+
"list" : List
|
178
|
+
}
|
179
|
+
pdflt_m = {
|
180
|
+
"str" : "",
|
181
|
+
"int" : 0,
|
182
|
+
"float" : 0.0,
|
183
|
+
"bool" : False,
|
184
|
+
"list" : []
|
174
185
|
}
|
175
186
|
for p in task.params.keys():
|
176
187
|
param = task.params[p]
|
@@ -185,7 +196,7 @@ class PackageDef(BaseModel):
|
|
185
196
|
if "value" in param.keys():
|
186
197
|
field_m[p] = (ptype, param["value"])
|
187
198
|
else:
|
188
|
-
field_m[p] = (ptype, )
|
199
|
+
field_m[p] = (ptype, pdflt_m[ptype_s])
|
189
200
|
else:
|
190
201
|
if p not in field_m.keys():
|
191
202
|
raise Exception("Field %s not found" % p)
|
@@ -208,3 +219,84 @@ class PackageDef(BaseModel):
|
|
208
219
|
|
209
220
|
return ctor_t
|
210
221
|
|
222
|
+
@staticmethod
|
223
|
+
def load(path, exp_pkg_name=None):
|
224
|
+
return PackageDef._loadPkgDef(path, exp_pkg_name, [])
|
225
|
+
pass
|
226
|
+
|
227
|
+
@staticmethod
|
228
|
+
def _loadPkgDef(root, exp_pkg_name, file_s):
|
229
|
+
if root in file_s:
|
230
|
+
raise Exception("Recursive file processing @ %s: %s" % (root, ",".join(file_s)))
|
231
|
+
file_s.append(root)
|
232
|
+
ret = None
|
233
|
+
with open(root, "r") as fp:
|
234
|
+
print("open %s" % root)
|
235
|
+
doc = yaml.load(fp, Loader=yaml.FullLoader)
|
236
|
+
if "package" not in doc.keys():
|
237
|
+
raise Exception("Missing 'package' key in %s" % root)
|
238
|
+
pkg = PackageDef(**(doc["package"]))
|
239
|
+
pkg.basedir = os.path.dirname(root)
|
240
|
+
|
241
|
+
# for t in pkg.tasks:
|
242
|
+
# t.basedir = os.path.dirname(root)
|
243
|
+
|
244
|
+
if exp_pkg_name is not None:
|
245
|
+
if exp_pkg_name != pkg.name:
|
246
|
+
raise Exception("Package name mismatch: %s != %s" % (exp_pkg_name, pkg.name))
|
247
|
+
# else:
|
248
|
+
# self._pkg_m[exp_pkg_name] = [PackageSpec(pkg.name)
|
249
|
+
# self._pkg_spec_s.append(PackageSpec(pkg.name))
|
250
|
+
|
251
|
+
# if not len(self._pkg_spec_s):
|
252
|
+
# self._pkg_spec_s.append(PackageSpec(pkg.name))
|
253
|
+
# else:
|
254
|
+
# self._pkg_def_m[PackageSpec(pkg.name)] = pkg
|
255
|
+
|
256
|
+
print("pkg: %s" % str(pkg))
|
257
|
+
|
258
|
+
print("fragments: %s" % str(pkg.fragments))
|
259
|
+
for spec in pkg.fragments:
|
260
|
+
PackageDef._loadFragmentSpec(pkg, spec, file_s)
|
261
|
+
|
262
|
+
file_s.pop()
|
263
|
+
|
264
|
+
return pkg
|
265
|
+
|
266
|
+
@staticmethod
|
267
|
+
def _loadFragmentSpec(pkg, spec, file_s):
|
268
|
+
# We're either going to have:
|
269
|
+
# - File path
|
270
|
+
# - Directory path
|
271
|
+
|
272
|
+
if os.path.isfile(os.path.join(pkg.basedir, spec)):
|
273
|
+
PackageDef._loadFragmentFile(pkg, spec, file_s)
|
274
|
+
elif os.path.isdir(os.path.join(pkg.basedir, spec)):
|
275
|
+
PackageDef._loadFragmentDir(pkg, os.path.join(pkg.basedir, spec), file_s)
|
276
|
+
else:
|
277
|
+
raise Exception("Fragment spec %s not found" % spec)
|
278
|
+
|
279
|
+
@staticmethod
|
280
|
+
def _loadFragmentDir(pkg, dir, file_s):
|
281
|
+
for file in os.listdir(dir):
|
282
|
+
if os.path.isdir(os.path.join(dir, file)):
|
283
|
+
PackageDef._loadFragmentDir(pkg, os.path.join(dir, file), file_s)
|
284
|
+
elif os.path.isfile(os.path.join(dir, file)) and file == "flow.dv":
|
285
|
+
PackageDef._loadFragmentFile(pkg, os.path.join(dir, file), file_s)
|
286
|
+
|
287
|
+
@staticmethod
|
288
|
+
def _loadFragmentFile(pkg, file, file_s):
|
289
|
+
if file in file_s:
|
290
|
+
raise Exception("Recursive file processing @ %s: %s" % (file, ", ".join(file_s)))
|
291
|
+
file_s.append(file)
|
292
|
+
|
293
|
+
with open(file, "r") as fp:
|
294
|
+
doc = yaml.load(fp, Loader=yaml.FullLoader)
|
295
|
+
print("doc: %s" % str(doc), flush=True)
|
296
|
+
if "fragment" in doc.keys():
|
297
|
+
# Merge the package definition
|
298
|
+
frag = FragmentDef(**(doc["fragment"]))
|
299
|
+
frag.basedir = os.path.dirname(file)
|
300
|
+
pkg.fragment_l.append(frag)
|
301
|
+
else:
|
302
|
+
print("Warning: file %s is not a fragment" % file)
|
dv_flow/mgr/pkg_rgy.py
ADDED
@@ -0,0 +1,78 @@
|
|
1
|
+
import os
|
2
|
+
import sys
|
3
|
+
from typing import Dict, Tuple
|
4
|
+
from .package_def import PackageDef
|
5
|
+
|
6
|
+
class PkgRgy(object):
|
7
|
+
_inst = None
|
8
|
+
|
9
|
+
def __init__(self):
|
10
|
+
self._pkgpath = []
|
11
|
+
self._pkg_m : Dict[str, Tuple[str,PackageDef]] = {}
|
12
|
+
|
13
|
+
def hasPackage(self, name, search_path=False):
|
14
|
+
if name in self._pkg_m.keys():
|
15
|
+
return True
|
16
|
+
elif search_path:
|
17
|
+
for p in self._pkgpath:
|
18
|
+
if os.path.exists(os.path.join(p, name)):
|
19
|
+
return True
|
20
|
+
else:
|
21
|
+
return False
|
22
|
+
|
23
|
+
def getPackage(self, name):
|
24
|
+
if name in self._pkg_m.keys():
|
25
|
+
if self._pkg_m[name][1] is None:
|
26
|
+
pkg_def = PackageDef.load(self._pkg_m[name][0])
|
27
|
+
# Load the package
|
28
|
+
self._pkg_m[name] = (
|
29
|
+
self._pkg_m[name][0],
|
30
|
+
pkg_def
|
31
|
+
)
|
32
|
+
pass
|
33
|
+
return self._pkg_m[name][1]
|
34
|
+
else:
|
35
|
+
# Go search the package path
|
36
|
+
return None
|
37
|
+
|
38
|
+
def registerPackage(self, pkg_def):
|
39
|
+
if pkg_def.name in self._pkg_m.keys():
|
40
|
+
raise Exception("Duplicate package %s" % pkg_def.name)
|
41
|
+
self._pkg_m[pkg_def.name] = pkg_def
|
42
|
+
|
43
|
+
def _discover_plugins(self):
|
44
|
+
# Register built-in package
|
45
|
+
self._pkg_m["std"] = (os.path.join(os.path.dirname(__file__), "std/flow.dv"), None)
|
46
|
+
|
47
|
+
if sys.version_info < (3,10):
|
48
|
+
from importlib_metadata import entry_points
|
49
|
+
else:
|
50
|
+
from importlib.metadata import entry_points
|
51
|
+
|
52
|
+
discovered_plugins = entry_points(group='dv_flow.mgr')
|
53
|
+
for p in discovered_plugins:
|
54
|
+
try:
|
55
|
+
mod = p.load()
|
56
|
+
|
57
|
+
if hasattr(mod, "dvfm_packages"):
|
58
|
+
pkg_m = mod.dvfm_packages()
|
59
|
+
|
60
|
+
for name,path in pkg_m.items():
|
61
|
+
if name in self._pkg_m.keys():
|
62
|
+
raise Exception("Package %s already registered using path %s. Conflicting path: %s" % (
|
63
|
+
name, self._pkg_m[name][0], path))
|
64
|
+
self._pkg_m[name] = (path, None)
|
65
|
+
except Exception as e:
|
66
|
+
print("Error loading plugin %s: %s" % (p.name, str(e)))
|
67
|
+
raise e
|
68
|
+
|
69
|
+
# self._pkgs = {}
|
70
|
+
# for pkg in self._load_pkg_list():
|
71
|
+
# self._pkgs[pkg.name] = pkg
|
72
|
+
|
73
|
+
@classmethod
|
74
|
+
def inst(cls):
|
75
|
+
if cls._inst is None:
|
76
|
+
cls._inst = cls()
|
77
|
+
cls._inst._discover_plugins()
|
78
|
+
return cls._inst
|
@@ -4,8 +4,8 @@ import fnmatch
|
|
4
4
|
import glob
|
5
5
|
import pydantic.dataclasses as dc
|
6
6
|
from typing import List, Tuple
|
7
|
-
from
|
8
|
-
from
|
7
|
+
from dv_flow.mgr import Task, TaskData, TaskMemento
|
8
|
+
from dv_flow.mgr import FileSet as _FileSet
|
9
9
|
|
10
10
|
class TaskFileSetMemento(TaskMemento):
|
11
11
|
files : List[Tuple[str,float]] = dc.Field(default_factory=list)
|
@@ -45,7 +45,7 @@ class FileSet(Task):
|
|
45
45
|
for file in included_files:
|
46
46
|
if not any(glob.fnmatch.fnmatch(file, os.path.join(glob_root, pattern)) for pattern in self.params.exclude):
|
47
47
|
memento.files.append((file, os.path.getmtime(os.path.join(glob_root, file))))
|
48
|
-
fs.files.append(file[len(glob_root):])
|
48
|
+
fs.files.append(file[len(glob_root)+1:])
|
49
49
|
|
50
50
|
# Check to see if the filelist or fileset have changed
|
51
51
|
# Only bother doing this if the upstream task data has not changed
|
@@ -4,13 +4,13 @@ package:
|
|
4
4
|
|
5
5
|
tasks:
|
6
6
|
- name: Message
|
7
|
-
pyclass: message.Message
|
7
|
+
pyclass: dv_flow.mgr.std.message.Message
|
8
8
|
with:
|
9
9
|
msg:
|
10
10
|
type: str
|
11
11
|
value: ""
|
12
12
|
- name: FileSet
|
13
|
-
pyclass: fileset.FileSet
|
13
|
+
pyclass: dv_flow.mgr.std.fileset.FileSet
|
14
14
|
with:
|
15
15
|
base:
|
16
16
|
type: str
|
@@ -24,3 +24,7 @@ package:
|
|
24
24
|
exclude:
|
25
25
|
type: str
|
26
26
|
value: ""
|
27
|
+
# - name: Exec
|
28
|
+
# pyclass: dv_flow.mgr.std.exec.Exec
|
29
|
+
# with: {}
|
30
|
+
|
@@ -2,11 +2,11 @@ import os
|
|
2
2
|
import glob
|
3
3
|
import fnmatch
|
4
4
|
import pydantic.dataclasses as dc
|
5
|
-
from
|
6
|
-
from
|
7
|
-
from
|
8
|
-
from
|
9
|
-
from
|
5
|
+
from ..fileset import FileSet
|
6
|
+
from ..package import TaskCtor
|
7
|
+
from ..task import Task, TaskParams
|
8
|
+
from ..task_data import TaskData
|
9
|
+
from ..task_memento import TaskMemento
|
10
10
|
from typing import List, Tuple
|
11
11
|
|
12
12
|
class TaskFileSet(Task):
|
@@ -63,10 +63,10 @@ class TaskCtor(object):
|
|
63
63
|
class Task(object):
|
64
64
|
"""Executable view of a task"""
|
65
65
|
name : str
|
66
|
-
session : 'Session'
|
67
66
|
params : TaskParams
|
68
|
-
basedir : str
|
69
67
|
srcdir : str = None
|
68
|
+
session : 'TaskGraphRunner' = None
|
69
|
+
basedir : str = None
|
70
70
|
memento : TaskMemento = None
|
71
71
|
depend_refs : List['TaskSpec'] = dc.field(default_factory=list)
|
72
72
|
depends : List[int] = dc.field(default_factory=list)
|
@@ -82,6 +82,10 @@ class Task(object):
|
|
82
82
|
body: Dict[str,Any] = dc.field(default_factory=dict)
|
83
83
|
impl_t : Any = None
|
84
84
|
|
85
|
+
def init(self, runner, basedir):
|
86
|
+
self.session = runner
|
87
|
+
self.basedir = basedir
|
88
|
+
|
85
89
|
def getMemento(self, T) -> TaskMemento:
|
86
90
|
if os.path.isfile(os.path.join(self.rundir, "memento.json")):
|
87
91
|
with open(os.path.join(self.rundir, "memento.json"), "r") as fp:
|
@@ -110,7 +114,7 @@ class Task(object):
|
|
110
114
|
print("deps_o: %s" % str(deps_o))
|
111
115
|
|
112
116
|
|
113
|
-
print("deps_m: %s" % str(deps_m))
|
117
|
+
# print("deps_m: %s" % str(deps_m))
|
114
118
|
|
115
119
|
# Merge the output of the dependencies into a single input data
|
116
120
|
# if len(self.depends) > 1:
|
@@ -119,7 +123,10 @@ class Task(object):
|
|
119
123
|
# Now that we have a clean input object, we need
|
120
124
|
# to build the dep map
|
121
125
|
|
122
|
-
input = self.depends[0].output.copy()
|
126
|
+
# input = self.depends[0].output.copy()
|
127
|
+
input = TaskData.merge(deps_o)
|
128
|
+
input.src = self.name
|
129
|
+
input.deps[self.name] = list(inp.name for inp in self.depends)
|
123
130
|
else:
|
124
131
|
input = TaskData()
|
125
132
|
|
@@ -69,20 +69,34 @@ class TaskData(BaseModel):
|
|
69
69
|
def getFileSets(self, type=None, order=True) -> List[FileSet]:
|
70
70
|
ret = []
|
71
71
|
|
72
|
+
print("getFileSets: filesets=%s" % str(self.filesets))
|
73
|
+
|
72
74
|
if order:
|
73
75
|
# The deps map specifies task dependencies
|
74
76
|
|
75
77
|
candidate_fs = []
|
76
78
|
for fs in self.filesets:
|
79
|
+
print("fs: %s" % str(fs))
|
77
80
|
if type is None or fs.type in type:
|
78
81
|
candidate_fs.append(fs)
|
79
|
-
|
82
|
+
print("self.deps: %s" % str(self.deps))
|
80
83
|
order = toposort(self.deps)
|
81
84
|
|
85
|
+
print("order: %s" % str(order))
|
86
|
+
|
82
87
|
for order_s in order:
|
83
|
-
|
88
|
+
print("order_s: %s" % str(order_s))
|
89
|
+
i = 0
|
90
|
+
while i < len(candidate_fs):
|
91
|
+
fs = candidate_fs[i]
|
92
|
+
print("fs.src: %s" % fs.src)
|
84
93
|
if fs.src in order_s:
|
94
|
+
print("Add fileset")
|
85
95
|
ret.append(fs)
|
96
|
+
candidate_fs.pop(i)
|
97
|
+
else:
|
98
|
+
i += 1
|
99
|
+
ret.extend(candidate_fs)
|
86
100
|
else:
|
87
101
|
for fs in self.filesets:
|
88
102
|
if type is None or fs.type in type:
|
@@ -93,7 +107,9 @@ class TaskData(BaseModel):
|
|
93
107
|
def copy(self) -> 'TaskData':
|
94
108
|
ret = TaskData()
|
95
109
|
ret.src = self.src
|
110
|
+
ret.basedir = self.basedir
|
96
111
|
ret.params = self.params.copy()
|
112
|
+
ret.filesets = self.filesets.copy()
|
97
113
|
for d in self.deps:
|
98
114
|
ret.deps.append(d.clone())
|
99
115
|
ret.changed = self.changed
|
@@ -34,7 +34,8 @@ class TaskDef(BaseModel):
|
|
34
34
|
# type : Union[str,TaskSpec] = dc.Field(default_factory=list)
|
35
35
|
uses : str = dc.Field(default=None)
|
36
36
|
pyclass : str = dc.Field(default=None)
|
37
|
-
|
37
|
+
desc : str = dc.Field(default="")
|
38
|
+
doc : str = dc.Field(default="")
|
38
39
|
depends : List[Union[str,TaskSpec]] = dc.Field(default_factory=list, alias="needs")
|
39
40
|
params: Dict[str,Any] = dc.Field(default_factory=dict, alias="with")
|
40
41
|
|