dv-flow-mgr 0.0.1.12822558956a1__py3-none-any.whl → 0.0.1.12911707440a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. {dv_flow_mgr → dv_flow/mgr}/__init__.py +1 -1
  2. {dv_flow_mgr → dv_flow/mgr}/__main__.py +1 -1
  3. dv_flow/mgr/cmds/cmd_run.py +90 -0
  4. {dv_flow_mgr → dv_flow/mgr}/package.py +0 -20
  5. dv_flow/mgr/package_def.py +302 -0
  6. dv_flow/mgr/pkg_rgy.py +78 -0
  7. dv_flow/mgr/std/fileset.py +68 -0
  8. dv_flow/mgr/std/flow.dv +30 -0
  9. dv_flow/mgr/std/message.py +7 -0
  10. {dv_flow_mgr/tasklib → dv_flow/mgr}/std/task_fileset.py +5 -5
  11. dv_flow/mgr/std/task_null.py +10 -0
  12. {dv_flow_mgr → dv_flow/mgr}/task.py +33 -72
  13. {dv_flow_mgr → dv_flow/mgr}/task_data.py +18 -2
  14. {dv_flow_mgr → dv_flow/mgr}/task_def.py +5 -2
  15. dv_flow/mgr/task_graph_builder.py +190 -0
  16. dv_flow/mgr/task_graph_runner.py +71 -0
  17. dv_flow/mgr/task_graph_runner_local.py +79 -0
  18. dv_flow/mgr/util.py +19 -0
  19. {dv_flow_mgr-0.0.1.12822558956a1.dist-info → dv_flow_mgr-0.0.1.12911707440a1.dist-info}/METADATA +1 -1
  20. dv_flow_mgr-0.0.1.12911707440a1.dist-info/RECORD +31 -0
  21. dv_flow_mgr-0.0.1.12911707440a1.dist-info/entry_points.txt +2 -0
  22. dv_flow_mgr-0.0.1.12911707440a1.dist-info/top_level.txt +1 -0
  23. dv_flow_mgr/cmds/cmd_run.py +0 -28
  24. dv_flow_mgr/package_def.py +0 -98
  25. dv_flow_mgr/session.py +0 -290
  26. dv_flow_mgr/tasklib/builtin_pkg.py +0 -61
  27. dv_flow_mgr/tasklib/hdl/sim/mti_pkg.py +0 -11
  28. dv_flow_mgr/tasklib/hdl/sim/mti_task_sim_image.py +0 -69
  29. dv_flow_mgr/tasklib/hdl/sim/mti_task_sim_run.py +0 -47
  30. dv_flow_mgr/tasklib/hdl/sim/pkg_hdl_sim.py +0 -8
  31. dv_flow_mgr/tasklib/hdl/sim/task_sim_image.py +0 -16
  32. dv_flow_mgr/tasklib/hdl/sim/vcs_pkg.py +0 -14
  33. dv_flow_mgr/tasklib/hdl/sim/vcs_task_sim_image.py +0 -49
  34. dv_flow_mgr/tasklib/hdl/sim/vcs_task_sim_run.py +0 -45
  35. dv_flow_mgr/tasklib/hdl/sim/vl_task_sim_image.py +0 -96
  36. dv_flow_mgr/tasklib/hdl/sim/vlt_pkg.py +0 -14
  37. dv_flow_mgr/tasklib/hdl/sim/vlt_task_sim_image.py +0 -50
  38. dv_flow_mgr/tasklib/hdl/sim/vlt_task_sim_run.py +0 -45
  39. dv_flow_mgr/tasklib/std/fileset.py +0 -5
  40. dv_flow_mgr/tasklib/std/flow.dv +0 -12
  41. dv_flow_mgr/tasklib/std/pkg_std.py +0 -15
  42. dv_flow_mgr/tasklib/std/std.dfs +0 -7
  43. dv_flow_mgr/tasklib/std/task_null.py +0 -26
  44. dv_flow_mgr-0.0.1.12822558956a1.dist-info/RECORD +0 -41
  45. dv_flow_mgr-0.0.1.12822558956a1.dist-info/entry_points.txt +0 -2
  46. dv_flow_mgr-0.0.1.12822558956a1.dist-info/top_level.txt +0 -1
  47. {dv_flow_mgr → dv_flow/mgr}/fileset.py +0 -0
  48. {dv_flow_mgr → dv_flow/mgr}/flow.py +0 -0
  49. {dv_flow_mgr → dv_flow/mgr}/fragment_def.py +0 -0
  50. {dv_flow_mgr → dv_flow/mgr}/package_import_spec.py +0 -0
  51. {dv_flow_mgr → dv_flow/mgr}/parameters.py +0 -0
  52. {dv_flow_mgr → dv_flow/mgr}/share/flow.json +0 -0
  53. {dv_flow_mgr → dv_flow/mgr}/task_memento.py +0 -0
  54. {dv_flow_mgr-0.0.1.12822558956a1.dist-info → dv_flow_mgr-0.0.1.12911707440a1.dist-info}/LICENSE +0 -0
  55. {dv_flow_mgr-0.0.1.12822558956a1.dist-info → dv_flow_mgr-0.0.1.12911707440a1.dist-info}/WHEEL +0 -0
@@ -1,6 +1,6 @@
1
1
 
2
2
  from .package_def import *
3
- from .session import *
3
+ from .task_graph_runner import *
4
4
  from .task import *
5
5
  from .task_data import *
6
6
 
@@ -7,7 +7,7 @@ def get_parser():
7
7
  subparsers = parser.add_subparsers(required=True)
8
8
 
9
9
  run_parser = subparsers.add_parser('run', help='run a flow')
10
- run_parser.add_argument("tasks", nargs='+', help="tasks to run")
10
+ run_parser.add_argument("tasks", nargs='*', help="tasks to run")
11
11
  run_parser.set_defaults(func=CmdRun())
12
12
 
13
13
  return parser
@@ -0,0 +1,90 @@
1
+ import asyncio
2
+ import os
3
+ from ..task_graph_runner import TaskGraphRunner
4
+ from ..util import loadProjPkgDef
5
+ from ..task_graph_builder import TaskGraphBuilder
6
+ from ..task_graph_runner_local import TaskGraphRunnerLocal
7
+ from ..pkg_rgy import PkgRgy
8
+
9
+
10
+ class CmdRun(object):
11
+
12
+ def __call__(self, args):
13
+
14
+ # First, find the project we're working with
15
+ pkg = loadProjPkgDef(os.getcwd())
16
+
17
+ if pkg is None:
18
+ raise Exception("Failed to find a 'flow.dv' file that defines a package in %s or its parent directories" % os.getcwd())
19
+
20
+ print("pkg: %s" % pkg.name)
21
+
22
+ if len(args.tasks) > 0:
23
+ pass
24
+ else:
25
+ # Print out available tasks
26
+ tasks = []
27
+ for task in pkg.tasks:
28
+ tasks.append(task)
29
+ for frag in pkg.fragment_l:
30
+ for task in frag.tasks:
31
+ tasks.append(task)
32
+ tasks.sort(key=lambda x: x.name)
33
+
34
+ max_name_len = 0
35
+ for t in tasks:
36
+ if len(t.name) > max_name_len:
37
+ max_name_len = len(t.name)
38
+
39
+ print("No task specified. Available Tasks:")
40
+ for t in tasks:
41
+ desc = t.desc
42
+ if desc is None or t.desc == "":
43
+ "<no descripion>"
44
+ print("%s - %s" % (t.name.ljust(max_name_len), desc))
45
+
46
+ pass
47
+
48
+ # Create a session around <pkg>
49
+ # Need to select a backend
50
+ # Need somewhere to store project config data
51
+ # Maybe separate into a task-graph builder and a task-graph runner
52
+
53
+ # TODO: allow user to specify run root -- maybe relative to some fixed directory?
54
+ rundir = os.path.join(pkg.basedir, "rundir")
55
+
56
+ builder = TaskGraphBuilder(root_pkg=pkg, rundir=rundir)
57
+ runner = TaskGraphRunnerLocal(rundir)
58
+
59
+ tasks = []
60
+
61
+ for spec in args.tasks:
62
+ task = builder.mkTaskGraph(spec)
63
+ tasks.append(task)
64
+
65
+ asyncio.run(runner.run(tasks))
66
+
67
+ # rgy = PkgRgy.inst()
68
+ # rgy.registerPackage(pkg)
69
+
70
+
71
+ # srcdir = os.getcwd()
72
+
73
+ # session = Session(srcdir, rundir)
74
+
75
+ # package = session.load(srcdir)
76
+
77
+ # graphs = []
78
+ # for task in args.tasks:
79
+ # if task.find(".") == -1:
80
+ # task = package.name + "." + task
81
+ # subgraph = session.mkTaskGraph(task)
82
+ # graphs.append(subgraph)
83
+
84
+ # awaitables = [subgraph.do_run() for subgraph in graphs]
85
+ # print("%d awaitables" % len(awaitables))
86
+
87
+ # out = asyncio.get_event_loop().run_until_complete(asyncio.gather(*awaitables))
88
+
89
+ # print("out: %s" % str(out))
90
+
@@ -54,26 +54,6 @@ class Package(object):
54
54
  def getTaskCtor(self, name : str) -> TaskCtor:
55
55
  return self.tasks[name]
56
56
 
57
- def mkTaskParams(self, name : str) -> TaskParams:
58
- if name not in self.tasks:
59
- raise Exception("Task " + name + " not found")
60
- return self.tasks[name].mkTaskParams()
61
-
62
- def setTaskParams(self, name : str, params : TaskParams, pvals : Dict[str,Any]):
63
- if name not in self.tasks:
64
- raise Exception("Task " + name + " not found")
65
- self.tasks[name].setTaskParams(params, pvals)
66
-
67
- def mkTask(self,
68
- name : str,
69
- task_id : int,
70
- session : 'Session',
71
- params : TaskParams,
72
- depends : List['Task']) -> 'Task':
73
- # TODO: combine parameters to create the full taskname
74
- task = self.tasks[name].mkTask(name, task_id, session, params, depends)
75
- return task
76
-
77
57
  def __hash__(self):
78
58
  return hash(self.fullname())
79
59
 
@@ -0,0 +1,302 @@
1
+ #****************************************************************************
2
+ #* package_def.py
3
+ #*
4
+ #* Copyright 2023 Matthew Ballance and Contributors
5
+ #*
6
+ #* Licensed under the Apache License, Version 2.0 (the "License"); you may
7
+ #* not use this file except in compliance with the License.
8
+ #* You may obtain a copy of the License at:
9
+ #*
10
+ #* http://www.apache.org/licenses/LICENSE-2.0
11
+ #*
12
+ #* Unless required by applicable law or agreed to in writing, software
13
+ #* distributed under the License is distributed on an "AS IS" BASIS,
14
+ #* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
+ #* See the License for the specific language governing permissions and
16
+ #* limitations under the License.
17
+ #*
18
+ #* Created on:
19
+ #* Author:
20
+ #*
21
+ #****************************************************************************
22
+ import os
23
+ import json
24
+ import yaml
25
+ import importlib
26
+ import sys
27
+ import pydantic
28
+ import pydantic.dataclasses as dc
29
+ from pydantic import BaseModel
30
+ from typing import Any, Dict, List, Callable, Tuple
31
+ from .flow import Flow
32
+ from .fragment_def import FragmentDef
33
+ from .package import Package
34
+ from .package_import_spec import PackageImportSpec, PackageSpec
35
+ from .task import TaskCtor, TaskParams
36
+ from .task_def import TaskDef, TaskSpec
37
+ from .std.task_null import TaskNull
38
+
39
+
40
+ class PackageDef(BaseModel):
41
+ name : str
42
+ params : Dict[str,Any] = dc.Field(default_factory=dict)
43
+ type : List[PackageSpec] = dc.Field(default_factory=list)
44
+ tasks : List[TaskDef] = dc.Field(default_factory=list)
45
+ imports : List[PackageImportSpec] = dc.Field(default_factory=list)
46
+ fragments: List[str] = dc.Field(default_factory=list)
47
+
48
+ fragment_l : List['FragmentDef'] = dc.Field(default_factory=list, exclude=True)
49
+
50
+ # import_m : Dict['PackageSpec','Package'] = dc.Field(default_factory=dict)
51
+
52
+ basedir : str = None
53
+
54
+ def getTask(self, name : str) -> 'TaskDef':
55
+ for t in self.tasks:
56
+ if t.name == name:
57
+ return t
58
+
59
+ def mkPackage(self, session, params : Dict[str,Any] = None) -> 'Package':
60
+ ret = Package(self.name)
61
+
62
+ session.push_package(ret)
63
+
64
+ tasks_m : Dict[str,str,TaskCtor]= {}
65
+
66
+ for task in self.tasks:
67
+ if task.name in tasks_m.keys():
68
+ raise Exception("Duplicate task %s" % task.name)
69
+ tasks_m[task.name] = (task, self.basedir, ) # We'll add a TaskCtor later
70
+
71
+ for frag in self.fragment_l:
72
+ for task in frag.tasks:
73
+ if task.name in tasks_m.keys():
74
+ raise Exception("Duplicate task %s" % task.name)
75
+ tasks_m[task.name] = (task, frag.basedir, ) # We'll add a TaskCtor later
76
+
77
+ # Now we have a unified map of the tasks declared in this package
78
+ for name in list(tasks_m.keys()):
79
+ task_i = tasks_m[name]
80
+ if len(task_i) < 3:
81
+ # Need to create the task ctor
82
+ ctor_t = self.mkTaskCtor(session, task_i[0], task_i[1], tasks_m)
83
+ tasks_m[name] = (task_i[0], task_i[1], ctor_t)
84
+ ret.tasks[name] = tasks_m[name][2]
85
+
86
+ session.pop_package(ret)
87
+
88
+ return ret
89
+
90
+ def mkTaskCtor(self, session, task, srcdir, tasks_m) -> TaskCtor:
91
+ ctor_t : TaskCtor = None
92
+
93
+ if task.uses is not None:
94
+ # Find package (not package_def) that implements this task
95
+ # Insert an indirect reference to that tasks's constructor
96
+ last_dot = task.uses.rfind('.')
97
+
98
+ if last_dot != -1:
99
+ pkg_name = task.uses[:last_dot]
100
+ task_name = task.uses[last_dot+1:]
101
+ else:
102
+ pkg_name = None
103
+ task_name = task.uses
104
+
105
+ if pkg_name is not None:
106
+ pkg = session.getPackage(PackageSpec(pkg_name))
107
+ if pkg is None:
108
+ raise Exception("Failed to find package %s" % pkg_name)
109
+ ctor_t = pkg.getTaskCtor(task_name)
110
+ ctor_t = ctor_t.copy()
111
+ ctor_t.srcdir = srcdir
112
+ else:
113
+ if task_name not in tasks_m.keys():
114
+ raise Exception("Failed to find task %s" % task_name)
115
+ if len(tasks_m[task_name]) == 3:
116
+ ctor_t = tasks_m[task_name][2].copy()
117
+ ctor_t.srcdir = srcdir
118
+ else:
119
+ task_i = tasks_m[task_name]
120
+ ctor_t = self.mkTaskCtor(
121
+ session,
122
+ task=task_i[0],
123
+ srcdir=srcdir,
124
+ tasks_m=tasks_m)
125
+ tasks_m[task_name] = ctor_t
126
+
127
+ if ctor_t is None:
128
+ # Provide a default implementation
129
+ ctor_t = TaskCtor(
130
+ task_ctor=TaskNull,
131
+ param_ctor=TaskParams,
132
+ srcdir=srcdir)
133
+
134
+ if task.pyclass is not None:
135
+ # Built-in impl
136
+ # Now, lookup the class
137
+ last_dot = task.pyclass.rfind('.')
138
+ clsname = task.pyclass[last_dot+1:]
139
+ modname = task.pyclass[:last_dot]
140
+
141
+ try:
142
+ if modname not in sys.modules:
143
+ if self.basedir not in sys.path:
144
+ sys.path.append(self.basedir)
145
+ mod = importlib.import_module(modname)
146
+ else:
147
+ mod = sys.modules[modname]
148
+ except ModuleNotFoundError as e:
149
+ raise Exception("Failed to import module %s (basedir=%s): %s" % (
150
+ modname, self.basedir, str(e)))
151
+
152
+ if not hasattr(mod, clsname):
153
+ raise Exception("Class %s not found in module %s" % (clsname, modname))
154
+ ctor_t.task_ctor = getattr(mod, clsname)
155
+
156
+ if task.uses is None:
157
+ ctor_t.param_ctor = TaskParams
158
+
159
+ decl_params = False
160
+ for value in task.params.values():
161
+ if "type" in value:
162
+ decl_params = True
163
+ break
164
+
165
+ if decl_params:
166
+ # We need to combine base parameters with new parameters
167
+ field_m = {}
168
+ # First, add parameters from the base class
169
+ for fname,info in ctor_t.param_ctor.model_fields.items():
170
+ print("Field: %s (%s)" % (fname, info.default))
171
+ field_m[fname] = (info.annotation, info.default)
172
+ ptype_m = {
173
+ "str" : str,
174
+ "int" : int,
175
+ "float" : float,
176
+ "bool" : bool,
177
+ "list" : List
178
+ }
179
+ pdflt_m = {
180
+ "str" : "",
181
+ "int" : 0,
182
+ "float" : 0.0,
183
+ "bool" : False,
184
+ "list" : []
185
+ }
186
+ for p in task.params.keys():
187
+ param = task.params[p]
188
+ if type(param) == dict and "type" in param.keys():
189
+ ptype_s = param["type"]
190
+ if ptype_s not in ptype_m.keys():
191
+ raise Exception("Unknown type %s" % ptype_s)
192
+ ptype = ptype_m[ptype_s]
193
+
194
+ if p in field_m.keys():
195
+ raise Exception("Duplicate field %s" % p)
196
+ if "value" in param.keys():
197
+ field_m[p] = (ptype, param["value"])
198
+ else:
199
+ field_m[p] = (ptype, pdflt_m[ptype_s])
200
+ else:
201
+ if p not in field_m.keys():
202
+ raise Exception("Field %s not found" % p)
203
+ if type(param) != dict:
204
+ value = param
205
+ elif "value" in param.keys():
206
+ value = param["value"]
207
+ else:
208
+ raise Exception("No value specified for param %s: %s" % (
209
+ p, str(param)))
210
+ field_m[p] = (field_m[p][0], value)
211
+ print("field_m: %s" % str(field_m))
212
+ ctor_t.param_ctor = pydantic.create_model(
213
+ "Task%sParams" % task.name, **field_m)
214
+ else:
215
+ if len(task.params) > 0:
216
+ ctor_t.params = task.params
217
+ if len(task.depends) > 0:
218
+ ctor_t.depends.extends(task.depends)
219
+
220
+ return ctor_t
221
+
222
+ @staticmethod
223
+ def load(path, exp_pkg_name=None):
224
+ return PackageDef._loadPkgDef(path, exp_pkg_name, [])
225
+ pass
226
+
227
+ @staticmethod
228
+ def _loadPkgDef(root, exp_pkg_name, file_s):
229
+ if root in file_s:
230
+ raise Exception("Recursive file processing @ %s: %s" % (root, ",".join(file_s)))
231
+ file_s.append(root)
232
+ ret = None
233
+ with open(root, "r") as fp:
234
+ print("open %s" % root)
235
+ doc = yaml.load(fp, Loader=yaml.FullLoader)
236
+ if "package" not in doc.keys():
237
+ raise Exception("Missing 'package' key in %s" % root)
238
+ pkg = PackageDef(**(doc["package"]))
239
+ pkg.basedir = os.path.dirname(root)
240
+
241
+ # for t in pkg.tasks:
242
+ # t.basedir = os.path.dirname(root)
243
+
244
+ if exp_pkg_name is not None:
245
+ if exp_pkg_name != pkg.name:
246
+ raise Exception("Package name mismatch: %s != %s" % (exp_pkg_name, pkg.name))
247
+ # else:
248
+ # self._pkg_m[exp_pkg_name] = [PackageSpec(pkg.name)
249
+ # self._pkg_spec_s.append(PackageSpec(pkg.name))
250
+
251
+ # if not len(self._pkg_spec_s):
252
+ # self._pkg_spec_s.append(PackageSpec(pkg.name))
253
+ # else:
254
+ # self._pkg_def_m[PackageSpec(pkg.name)] = pkg
255
+
256
+ print("pkg: %s" % str(pkg))
257
+
258
+ print("fragments: %s" % str(pkg.fragments))
259
+ for spec in pkg.fragments:
260
+ PackageDef._loadFragmentSpec(pkg, spec, file_s)
261
+
262
+ file_s.pop()
263
+
264
+ return pkg
265
+
266
+ @staticmethod
267
+ def _loadFragmentSpec(pkg, spec, file_s):
268
+ # We're either going to have:
269
+ # - File path
270
+ # - Directory path
271
+
272
+ if os.path.isfile(os.path.join(pkg.basedir, spec)):
273
+ PackageDef._loadFragmentFile(pkg, spec, file_s)
274
+ elif os.path.isdir(os.path.join(pkg.basedir, spec)):
275
+ PackageDef._loadFragmentDir(pkg, os.path.join(pkg.basedir, spec), file_s)
276
+ else:
277
+ raise Exception("Fragment spec %s not found" % spec)
278
+
279
+ @staticmethod
280
+ def _loadFragmentDir(pkg, dir, file_s):
281
+ for file in os.listdir(dir):
282
+ if os.path.isdir(os.path.join(dir, file)):
283
+ PackageDef._loadFragmentDir(pkg, os.path.join(dir, file), file_s)
284
+ elif os.path.isfile(os.path.join(dir, file)) and file == "flow.dv":
285
+ PackageDef._loadFragmentFile(pkg, os.path.join(dir, file), file_s)
286
+
287
+ @staticmethod
288
+ def _loadFragmentFile(pkg, file, file_s):
289
+ if file in file_s:
290
+ raise Exception("Recursive file processing @ %s: %s" % (file, ", ".join(file_s)))
291
+ file_s.append(file)
292
+
293
+ with open(file, "r") as fp:
294
+ doc = yaml.load(fp, Loader=yaml.FullLoader)
295
+ print("doc: %s" % str(doc), flush=True)
296
+ if "fragment" in doc.keys():
297
+ # Merge the package definition
298
+ frag = FragmentDef(**(doc["fragment"]))
299
+ frag.basedir = os.path.dirname(file)
300
+ pkg.fragment_l.append(frag)
301
+ else:
302
+ print("Warning: file %s is not a fragment" % file)
dv_flow/mgr/pkg_rgy.py ADDED
@@ -0,0 +1,78 @@
1
+ import os
2
+ import sys
3
+ from typing import Dict, Tuple
4
+ from .package_def import PackageDef
5
+
6
+ class PkgRgy(object):
7
+ _inst = None
8
+
9
+ def __init__(self):
10
+ self._pkgpath = []
11
+ self._pkg_m : Dict[str, Tuple[str,PackageDef]] = {}
12
+
13
+ def hasPackage(self, name, search_path=False):
14
+ if name in self._pkg_m.keys():
15
+ return True
16
+ elif search_path:
17
+ for p in self._pkgpath:
18
+ if os.path.exists(os.path.join(p, name)):
19
+ return True
20
+ else:
21
+ return False
22
+
23
+ def getPackage(self, name):
24
+ if name in self._pkg_m.keys():
25
+ if self._pkg_m[name][1] is None:
26
+ pkg_def = PackageDef.load(self._pkg_m[name][0])
27
+ # Load the package
28
+ self._pkg_m[name] = (
29
+ self._pkg_m[name][0],
30
+ pkg_def
31
+ )
32
+ pass
33
+ return self._pkg_m[name][1]
34
+ else:
35
+ # Go search the package path
36
+ return None
37
+
38
+ def registerPackage(self, pkg_def):
39
+ if pkg_def.name in self._pkg_m.keys():
40
+ raise Exception("Duplicate package %s" % pkg_def.name)
41
+ self._pkg_m[pkg_def.name] = pkg_def
42
+
43
+ def _discover_plugins(self):
44
+ # Register built-in package
45
+ self._pkg_m["std"] = (os.path.join(os.path.dirname(__file__), "std/flow.dv"), None)
46
+
47
+ if sys.version_info < (3,10):
48
+ from importlib_metadata import entry_points
49
+ else:
50
+ from importlib.metadata import entry_points
51
+
52
+ discovered_plugins = entry_points(group='dv_flow.mgr')
53
+ for p in discovered_plugins:
54
+ try:
55
+ mod = p.load()
56
+
57
+ if hasattr(mod, "dvfm_packages"):
58
+ pkg_m = mod.dvfm_packages()
59
+
60
+ for name,path in pkg_m.items():
61
+ if name in self._pkg_m.keys():
62
+ raise Exception("Package %s already registered using path %s. Conflicting path: %s" % (
63
+ name, self._pkg_m[name][0], path))
64
+ self._pkg_m[name] = (path, None)
65
+ except Exception as e:
66
+ print("Error loading plugin %s: %s" % (p.name, str(e)))
67
+ raise e
68
+
69
+ # self._pkgs = {}
70
+ # for pkg in self._load_pkg_list():
71
+ # self._pkgs[pkg.name] = pkg
72
+
73
+ @classmethod
74
+ def inst(cls):
75
+ if cls._inst is None:
76
+ cls._inst = cls()
77
+ cls._inst._discover_plugins()
78
+ return cls._inst
@@ -0,0 +1,68 @@
1
+
2
+ import os
3
+ import fnmatch
4
+ import glob
5
+ import pydantic.dataclasses as dc
6
+ from typing import List, Tuple
7
+ from dv_flow.mgr import Task, TaskData, TaskMemento
8
+ from dv_flow.mgr import FileSet as _FileSet
9
+
10
+ class TaskFileSetMemento(TaskMemento):
11
+ files : List[Tuple[str,float]] = dc.Field(default_factory=list)
12
+
13
+ class FileSet(Task):
14
+
15
+ async def run(self, input : TaskData) -> TaskData:
16
+ print("TaskFileSet run: %s: basedir=%s, base=%s type=%s include=%s" % (
17
+ self.name,
18
+ self.srcdir,
19
+ self.params.base, self.params.type, str(self.params.include)
20
+ ))
21
+
22
+
23
+ ex_memento = self.getMemento(TaskFileSetMemento)
24
+ memento = TaskFileSetMemento()
25
+
26
+ if self.params is not None:
27
+ glob_root = os.path.join(self.srcdir, self.params.base)
28
+
29
+ print("glob_root: %s" % glob_root)
30
+
31
+ fs = _FileSet(
32
+ src=self.name,
33
+ type=self.params.type,
34
+ basedir=glob_root)
35
+ print("glob_root: %s" % glob_root)
36
+
37
+ if not isinstance(self.params.include, list):
38
+ self.params.include = [self.params.include]
39
+
40
+ included_files = []
41
+ for pattern in self.params.include:
42
+ print("pattern: %s" % pattern)
43
+ included_files.extend(glob.glob(os.path.join(glob_root, pattern), recursive=False))
44
+
45
+ for file in included_files:
46
+ if not any(glob.fnmatch.fnmatch(file, os.path.join(glob_root, pattern)) for pattern in self.params.exclude):
47
+ memento.files.append((file, os.path.getmtime(os.path.join(glob_root, file))))
48
+ fs.files.append(file[len(glob_root)+1:])
49
+
50
+ # Check to see if the filelist or fileset have changed
51
+ # Only bother doing this if the upstream task data has not changed
52
+ if ex_memento is not None and not input.changed:
53
+ ex_memento.files.sort(key=lambda x: x[0])
54
+ memento.files.sort(key=lambda x: x[0])
55
+ print("ex_memento.files: %s" % str(ex_memento.files))
56
+ print("memento.files: %s" % str(memento.files))
57
+ input.changed = ex_memento != memento
58
+ else:
59
+ input.changed = True
60
+
61
+ self.setMemento(memento)
62
+
63
+ if fs is not None:
64
+ input.addFileSet(fs)
65
+
66
+ return input
67
+
68
+ pass
@@ -0,0 +1,30 @@
1
+
2
+ package:
3
+ name: std
4
+
5
+ tasks:
6
+ - name: Message
7
+ pyclass: dv_flow.mgr.std.message.Message
8
+ with:
9
+ msg:
10
+ type: str
11
+ value: ""
12
+ - name: FileSet
13
+ pyclass: dv_flow.mgr.std.fileset.FileSet
14
+ with:
15
+ base:
16
+ type: str
17
+ value: ""
18
+ type:
19
+ type: str
20
+ value: ""
21
+ include:
22
+ type: str
23
+ value: ""
24
+ exclude:
25
+ type: str
26
+ value: ""
27
+ # - name: Exec
28
+ # pyclass: dv_flow.mgr.std.exec.Exec
29
+ # with: {}
30
+
@@ -0,0 +1,7 @@
1
+
2
+ from dv_flow.mgr import Task, TaskData
3
+
4
+ class Message(Task):
5
+ async def run(self, input : TaskData) -> TaskData:
6
+ print("%s: %s" % (self.name, self.params.msg))
7
+ return input
@@ -2,11 +2,11 @@ import os
2
2
  import glob
3
3
  import fnmatch
4
4
  import pydantic.dataclasses as dc
5
- from ...fileset import FileSet
6
- from ...package import TaskCtor
7
- from ...task import Task, TaskParams
8
- from ...task_data import TaskData
9
- from ...task_memento import TaskMemento
5
+ from ..fileset import FileSet
6
+ from ..package import TaskCtor
7
+ from ..task import Task, TaskParams
8
+ from ..task_data import TaskData
9
+ from ..task_memento import TaskMemento
10
10
  from typing import List, Tuple
11
11
 
12
12
  class TaskFileSet(Task):
@@ -0,0 +1,10 @@
1
+ from ..task import Task
2
+ from ..task_data import TaskData
3
+
4
+ class TaskNull(Task):
5
+ """The Null task simply propagates its input to its output"""
6
+
7
+ async def run(self, input : TaskData) -> TaskData:
8
+ # No memento ; data pass-through
9
+ return input
10
+