dv-flow-mgr 0.0.2.14182043984a1__py3-none-any.whl → 1.0.0.14370600369a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dv_flow/mgr/__init__.py +2 -1
- dv_flow/mgr/cmds/cmd_graph.py +2 -3
- dv_flow/mgr/cmds/cmd_run.py +7 -9
- dv_flow/mgr/cmds/cmd_show.py +1 -2
- dv_flow/mgr/cond_def.py +16 -0
- dv_flow/mgr/config.py +7 -0
- dv_flow/mgr/config_def.py +33 -0
- dv_flow/mgr/exec_callable.py +88 -0
- dv_flow/mgr/{pkg_rgy.py → ext_rgy.py} +44 -35
- dv_flow/mgr/extend_def.py +21 -0
- dv_flow/mgr/fragment_def.py +4 -3
- dv_flow/mgr/need_def.py +6 -0
- dv_flow/mgr/null_callable.py +10 -0
- dv_flow/mgr/package.py +30 -6
- dv_flow/mgr/package_def.py +40 -444
- dv_flow/mgr/package_loader.py +701 -0
- dv_flow/mgr/param_def.py +2 -1
- dv_flow/mgr/parser.out +567 -0
- dv_flow/mgr/pytask_callable.py +25 -0
- dv_flow/mgr/root_package.py +9 -0
- dv_flow/mgr/shell_callable.py +14 -0
- dv_flow/mgr/srcinfo.py +15 -0
- dv_flow/mgr/std/flow.dv +25 -4
- dv_flow/mgr/task.py +68 -0
- dv_flow/mgr/task_def.py +36 -24
- dv_flow/mgr/task_graph_builder.py +497 -247
- dv_flow/mgr/task_listener_log.py +4 -0
- dv_flow/mgr/task_node_ctor.py +11 -3
- dv_flow/mgr/task_node_ctor_compound.py +21 -33
- dv_flow/mgr/task_node_leaf.py +25 -3
- dv_flow/mgr/task_params_ctor.py +0 -1
- dv_flow/mgr/task_run_ctxt.py +4 -0
- dv_flow/mgr/task_runner.py +2 -0
- dv_flow/mgr/util/cmds/cmd_schema.py +0 -2
- dv_flow/mgr/util/util.py +4 -3
- dv_flow/mgr/yaml_srcinfo_loader.py +55 -0
- {dv_flow_mgr-0.0.2.14182043984a1.dist-info → dv_flow_mgr-1.0.0.14370600369a1.dist-info}/METADATA +1 -1
- dv_flow_mgr-1.0.0.14370600369a1.dist-info/RECORD +74 -0
- dv_flow_mgr-0.0.2.14182043984a1.dist-info/RECORD +0 -59
- {dv_flow_mgr-0.0.2.14182043984a1.dist-info → dv_flow_mgr-1.0.0.14370600369a1.dist-info}/WHEEL +0 -0
- {dv_flow_mgr-0.0.2.14182043984a1.dist-info → dv_flow_mgr-1.0.0.14370600369a1.dist-info}/entry_points.txt +0 -0
- {dv_flow_mgr-0.0.2.14182043984a1.dist-info → dv_flow_mgr-1.0.0.14370600369a1.dist-info}/licenses/LICENSE +0 -0
- {dv_flow_mgr-0.0.2.14182043984a1.dist-info → dv_flow_mgr-1.0.0.14370600369a1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,701 @@
|
|
1
|
+
import os
|
2
|
+
import dataclasses as dc
|
3
|
+
import importlib
|
4
|
+
import logging
|
5
|
+
import pydantic
|
6
|
+
import sys
|
7
|
+
import yaml
|
8
|
+
from pydantic import BaseModel
|
9
|
+
from typing import Any, Callable, ClassVar, Dict, List, Tuple
|
10
|
+
from .fragment_def import FragmentDef
|
11
|
+
from .package_def import PackageDef
|
12
|
+
from .package import Package
|
13
|
+
from .ext_rgy import ExtRgy
|
14
|
+
from .task import Task
|
15
|
+
from .task_def import TaskDef, PassthroughE, ConsumesE, RundirE
|
16
|
+
from .task_data import TaskMarker, TaskMarkerLoc, SeverityE
|
17
|
+
from .yaml_srcinfo_loader import YamlSrcInfoLoader
|
18
|
+
|
19
|
+
@dc.dataclass
|
20
|
+
class SymbolScope(object):
|
21
|
+
name : str
|
22
|
+
task_m : Dict[str,Task] = dc.field(default_factory=dict)
|
23
|
+
|
24
|
+
def add(self, task, name):
|
25
|
+
self.task_m[name] = task
|
26
|
+
|
27
|
+
def find(self, name) -> Task:
|
28
|
+
if name in self.task_m.keys():
|
29
|
+
return self.task_m[name]
|
30
|
+
else:
|
31
|
+
return None
|
32
|
+
|
33
|
+
def findType(self, name) -> Task:
|
34
|
+
pass
|
35
|
+
|
36
|
+
|
37
|
+
@dc.dataclass
|
38
|
+
class TaskScope(SymbolScope):
|
39
|
+
pass
|
40
|
+
|
41
|
+
@dc.dataclass
|
42
|
+
class LoaderScope(SymbolScope):
|
43
|
+
loader : 'PackageLoader' = None
|
44
|
+
|
45
|
+
def add(self, task, name):
|
46
|
+
raise NotImplementedError("LoaderScope.add() not implemented")
|
47
|
+
|
48
|
+
def find(self, name) -> Task:
|
49
|
+
return self.findType(name)
|
50
|
+
|
51
|
+
def findType(self, name) -> Task:
|
52
|
+
last_dot = name.rfind('.')
|
53
|
+
if last_dot != -1:
|
54
|
+
pkg_name = name[:last_dot]
|
55
|
+
task_name = name[last_dot+1:]
|
56
|
+
|
57
|
+
if pkg_name in self.loader._pkg_m.keys():
|
58
|
+
pkg = self.loader._pkg_m[pkg_name]
|
59
|
+
else:
|
60
|
+
path = self.loader.pkg_rgy.findPackagePath(pkg_name)
|
61
|
+
if path is not None:
|
62
|
+
pkg = self.loader._loadPackage(path)
|
63
|
+
self.loader._pkg_m[pkg_name] = pkg
|
64
|
+
if pkg is not None and name in pkg.task_m.keys():
|
65
|
+
return pkg.task_m[name]
|
66
|
+
else:
|
67
|
+
return None
|
68
|
+
|
69
|
+
@dc.dataclass
|
70
|
+
class PackageScope(SymbolScope):
|
71
|
+
pkg : Package = None
|
72
|
+
loader : LoaderScope = None
|
73
|
+
_scope_s : List[SymbolScope] = dc.field(default_factory=list)
|
74
|
+
_log : ClassVar = logging.getLogger("PackageScope")
|
75
|
+
|
76
|
+
def add(self, task, name):
|
77
|
+
if len(self._scope_s):
|
78
|
+
self._scope_s[-1].add(task, name)
|
79
|
+
else:
|
80
|
+
super().add(task, name)
|
81
|
+
|
82
|
+
def push_scope(self, scope):
|
83
|
+
self._scope_s.append(scope)
|
84
|
+
|
85
|
+
def pop_scope(self):
|
86
|
+
self._scope_s.pop()
|
87
|
+
|
88
|
+
def find(self, name) -> Task:
|
89
|
+
self._log.debug("--> %s::find %s" % (self.pkg.name, name))
|
90
|
+
ret = None
|
91
|
+
for i in range(len(self._scope_s)-1, -1, -1):
|
92
|
+
scope = self._scope_s[i]
|
93
|
+
ret = scope.find(name)
|
94
|
+
if ret is not None:
|
95
|
+
break
|
96
|
+
|
97
|
+
if ret is None:
|
98
|
+
ret = super().find(name)
|
99
|
+
|
100
|
+
if ret is None and name in self.pkg.task_m.keys():
|
101
|
+
ret = self.pkg.task_m[name]
|
102
|
+
|
103
|
+
if ret is None:
|
104
|
+
for pkg in self.pkg.pkg_m.values():
|
105
|
+
self._log.debug("Searching pkg %s for %s" % (pkg.name, name))
|
106
|
+
if name in pkg.task_m.keys():
|
107
|
+
ret = pkg.task_m[name]
|
108
|
+
break
|
109
|
+
|
110
|
+
if ret is None:
|
111
|
+
self._log.debug("Searching loader for %s" % name)
|
112
|
+
ret = self.loader.findType(name)
|
113
|
+
|
114
|
+
self._log.debug("<-- %s::find %s (%s)" % (self.pkg.name, name, ("found" if ret is not None else "not found")))
|
115
|
+
return ret
|
116
|
+
|
117
|
+
def findType(self, name) -> Task:
|
118
|
+
ret = None
|
119
|
+
|
120
|
+
if name in self.task_m.keys():
|
121
|
+
ret = self.task_m[name]
|
122
|
+
|
123
|
+
if ret is None:
|
124
|
+
for i in range(len(self._scope_s)-1, -1, -1):
|
125
|
+
scope = self._scope_s[i]
|
126
|
+
ret = scope.findType(name)
|
127
|
+
if ret is not None:
|
128
|
+
break
|
129
|
+
|
130
|
+
if ret is None:
|
131
|
+
ret = super().findType(name)
|
132
|
+
|
133
|
+
if ret is None and name in self.pkg.task_m.keys():
|
134
|
+
ret = self.pkg.task_m[name]
|
135
|
+
|
136
|
+
if ret is None:
|
137
|
+
ret = self.loader.findType(name)
|
138
|
+
|
139
|
+
return ret
|
140
|
+
|
141
|
+
def getScopeFullname(self, leaf=None) -> str:
|
142
|
+
path = self.name
|
143
|
+
if len(self._scope_s):
|
144
|
+
path += "."
|
145
|
+
path += ".".join([s.name for s in self._scope_s])
|
146
|
+
|
147
|
+
if leaf is not None:
|
148
|
+
path += "." + leaf
|
149
|
+
return path
|
150
|
+
|
151
|
+
|
152
|
+
@dc.dataclass
|
153
|
+
class PackageLoader(object):
|
154
|
+
pkg_rgy : ExtRgy = dc.field(default=None)
|
155
|
+
marker_listeners : List[Callable] = dc.field(default_factory=list)
|
156
|
+
_log : ClassVar = logging.getLogger("PackageLoader")
|
157
|
+
_file_s : List[str] = dc.field(default_factory=list)
|
158
|
+
_pkg_s : List[PackageScope] = dc.field(default_factory=list)
|
159
|
+
_pkg_m : Dict[str, Package] = dc.field(default_factory=dict)
|
160
|
+
_pkg_path_m : Dict[str, Package] = dc.field(default_factory=dict)
|
161
|
+
_loader_scope : LoaderScope = None
|
162
|
+
|
163
|
+
def __post_init__(self):
|
164
|
+
if self.pkg_rgy is None:
|
165
|
+
self.pkg_rgy = ExtRgy.inst()
|
166
|
+
|
167
|
+
self._loader_scope = LoaderScope(name=None, loader=self)
|
168
|
+
|
169
|
+
def load(self, root) -> Package:
|
170
|
+
self._log.debug("--> load %s" % root)
|
171
|
+
ret = self._loadPackage(root, None)
|
172
|
+
self._log.debug("<-- load %s" % root)
|
173
|
+
return ret
|
174
|
+
|
175
|
+
def load_rgy(self, name) -> Package:
|
176
|
+
self._log.debug("--> load_rgy %s" % name)
|
177
|
+
pkg = Package(None)
|
178
|
+
|
179
|
+
name = name if isinstance(name, list) else [name]
|
180
|
+
|
181
|
+
for nn in name:
|
182
|
+
pp = self.pkg_rgy.findPackagePath(nn)
|
183
|
+
if pp is None:
|
184
|
+
raise Exception("Package %s not found" % nn)
|
185
|
+
pp_n = self._loadPackage(pp)
|
186
|
+
pkg.pkg_m[pp_n.name] = pp_n
|
187
|
+
self._log.debug("<-- load_rgy %s" % name)
|
188
|
+
return pkg
|
189
|
+
|
190
|
+
def _error(self, msg, elem):
|
191
|
+
pass
|
192
|
+
|
193
|
+
def _getLoc(self, elem):
|
194
|
+
pass
|
195
|
+
|
196
|
+
def package_scope(self):
|
197
|
+
ret = None
|
198
|
+
for i in range(len(self._pkg_s)-1, -1, -1):
|
199
|
+
scope = self._pkg_s[i]
|
200
|
+
if isinstance(scope, PackageScope):
|
201
|
+
ret = scope
|
202
|
+
break
|
203
|
+
return ret
|
204
|
+
|
205
|
+
def _loadPackage(self, root, exp_pkg_name=None) -> Package:
|
206
|
+
if root in self._file_s:
|
207
|
+
raise Exception("recursive reference")
|
208
|
+
|
209
|
+
if root in self._file_s:
|
210
|
+
# TODO: should be able to unwind stack here
|
211
|
+
raise Exception("Recursive file processing @ %s: %s" % (root, ",".join(self._file_s)))
|
212
|
+
self._file_s.append(root)
|
213
|
+
pkg : Package = None
|
214
|
+
pkg_def : PackageDef = None
|
215
|
+
|
216
|
+
with open(root, "r") as fp:
|
217
|
+
self._log.debug("open %s" % root)
|
218
|
+
doc = yaml.load(fp, Loader=YamlSrcInfoLoader(root))
|
219
|
+
|
220
|
+
if "package" not in doc.keys():
|
221
|
+
raise Exception("Missing 'package' key in %s" % root)
|
222
|
+
try:
|
223
|
+
pkg_def = PackageDef(**(doc["package"]))
|
224
|
+
|
225
|
+
# for t in pkg.tasks:
|
226
|
+
# t.fullname = pkg.name + "." + t.name
|
227
|
+
|
228
|
+
except pydantic.ValidationError as e:
|
229
|
+
print("Errors: %s" % root)
|
230
|
+
error_paths = []
|
231
|
+
loc = None
|
232
|
+
for ee in e.errors():
|
233
|
+
# print(" Error: %s" % str(ee))
|
234
|
+
obj = doc["package"]
|
235
|
+
loc = None
|
236
|
+
for el in ee['loc']:
|
237
|
+
print("el: %s" % str(el))
|
238
|
+
obj = obj[el]
|
239
|
+
if type(obj) == dict and 'srcinfo' in obj.keys():
|
240
|
+
loc = obj['srcinfo']
|
241
|
+
if loc is not None:
|
242
|
+
marker_loc = TaskMarkerLoc(path=loc['file'])
|
243
|
+
if 'lineno' in loc.keys():
|
244
|
+
marker_loc.line = loc['lineno']
|
245
|
+
if 'linepos' in loc.keys():
|
246
|
+
marker_loc.pos = loc['linepos']
|
247
|
+
|
248
|
+
marker = TaskMarker(
|
249
|
+
msg=("%s (in %s)" % (ee['msg'], str(ee['loc'][-1]))),
|
250
|
+
severity=SeverityE.Error,
|
251
|
+
loc=marker_loc)
|
252
|
+
else:
|
253
|
+
marker = TaskMarker(msg=ee['msg'])
|
254
|
+
self.marker(marker)
|
255
|
+
|
256
|
+
if pkg_def is not None:
|
257
|
+
pkg = self._mkPackage(pkg_def, root)
|
258
|
+
|
259
|
+
self._file_s.pop()
|
260
|
+
|
261
|
+
self._pkg_path_m[root] = pkg
|
262
|
+
|
263
|
+
return pkg
|
264
|
+
|
265
|
+
def _mkPackage(self, pkg_def : PackageDef, root : str) -> Package:
|
266
|
+
self._log.debug("--> _mkPackage %s" % pkg_def.name)
|
267
|
+
pkg = Package(pkg_def, os.path.dirname(root))
|
268
|
+
|
269
|
+
pkg_scope = self.package_scope()
|
270
|
+
if pkg_scope is not None:
|
271
|
+
self._log.debug("Add self (%s) as a subpkg of %s" % (pkg.name, pkg_scope.pkg.name))
|
272
|
+
pkg_scope.pkg.pkg_m[pkg.name] = pkg
|
273
|
+
|
274
|
+
if pkg.name in self._pkg_m.keys():
|
275
|
+
raise Exception("Duplicate package %s" % pkg.name)
|
276
|
+
|
277
|
+
self._pkg_m[pkg.name] = pkg
|
278
|
+
self._pkg_s.append(PackageScope(name=pkg.name, pkg=pkg, loader=self._loader_scope))
|
279
|
+
# Imports are loaded first
|
280
|
+
self._loadPackageImports(pkg, pkg_def.imports, pkg.basedir)
|
281
|
+
|
282
|
+
taskdefs = pkg_def.tasks.copy()
|
283
|
+
|
284
|
+
self._loadFragments(pkg, pkg_def.fragments, pkg.basedir, taskdefs)
|
285
|
+
|
286
|
+
self._loadTasks(pkg, taskdefs, pkg.basedir)
|
287
|
+
|
288
|
+
self._pkg_s.pop()
|
289
|
+
|
290
|
+
self._log.debug("<-- _mkPackage %s (%s)" % (pkg_def.name, pkg.name))
|
291
|
+
return pkg
|
292
|
+
|
293
|
+
def _loadPackageImports(self, pkg, imports, basedir):
|
294
|
+
self._log.debug("--> _loadPackageImports %s" % str(imports))
|
295
|
+
if len(imports) > 0:
|
296
|
+
self._log.info("Loading imported packages (basedir=%s)" % basedir)
|
297
|
+
for imp in imports:
|
298
|
+
self._log.debug("Loading import %s" % imp)
|
299
|
+
self._loadPackageImport(pkg, imp, basedir)
|
300
|
+
self._log.debug("<-- _loadPackageImports %s" % str(imports))
|
301
|
+
|
302
|
+
def _loadPackageImport(self, pkg, imp, basedir):
|
303
|
+
self._log.debug("--> _loadPackageImport %s" % str(imp))
|
304
|
+
# TODO: need to locate and load these external packages (?)
|
305
|
+
if type(imp) == str:
|
306
|
+
imp_path = imp
|
307
|
+
elif imp.path is not None:
|
308
|
+
imp_path = imp.path
|
309
|
+
else:
|
310
|
+
raise Exception("imp.path is none: %s" % str(imp))
|
311
|
+
|
312
|
+
self._log.info("Loading imported package %s" % imp_path)
|
313
|
+
|
314
|
+
if not os.path.isabs(imp_path):
|
315
|
+
self._log.debug("_basedir: %s ; imp_path: %s" % (basedir, imp_path))
|
316
|
+
imp_path = os.path.join(basedir, imp_path)
|
317
|
+
|
318
|
+
# Search down the tree looking for a flow.dv file
|
319
|
+
if os.path.isdir(imp_path):
|
320
|
+
path = imp_path
|
321
|
+
|
322
|
+
while path is not None and os.path.isdir(path) and not os.path.isfile(os.path.join(path, "flow.dv")):
|
323
|
+
# Look one directory down
|
324
|
+
next_dir = None
|
325
|
+
for dir in os.listdir(path):
|
326
|
+
if os.path.isdir(os.path.join(path, dir)):
|
327
|
+
if next_dir is None:
|
328
|
+
next_dir = dir
|
329
|
+
else:
|
330
|
+
path = None
|
331
|
+
break
|
332
|
+
if path is not None:
|
333
|
+
path = next_dir
|
334
|
+
|
335
|
+
if path is not None and os.path.isfile(os.path.join(path, "flow.dv")):
|
336
|
+
imp_path = os.path.join(path, "flow.dv")
|
337
|
+
|
338
|
+
if not os.path.isfile(imp_path):
|
339
|
+
raise Exception("Import file %s not found" % imp_path)
|
340
|
+
|
341
|
+
if imp_path in self._pkg_path_m.keys():
|
342
|
+
sub_pkg = self._pkg_path_m[imp_path]
|
343
|
+
else:
|
344
|
+
self._log.info("Loading imported file %s" % imp_path)
|
345
|
+
sub_pkg = self._loadPackage(imp_path)
|
346
|
+
self._log.info("Loaded imported package %s" % sub_pkg.name)
|
347
|
+
|
348
|
+
pkg.pkg_m[sub_pkg.name] = sub_pkg
|
349
|
+
self._log.debug("<-- _loadPackageImport %s" % str(imp))
|
350
|
+
pass
|
351
|
+
|
352
|
+
def _loadFragments(self, pkg, fragments, basedir, taskdefs):
|
353
|
+
for spec in fragments:
|
354
|
+
self._loadFragmentSpec(pkg, spec, basedir, taskdefs)
|
355
|
+
|
356
|
+
def _loadFragmentSpec(self, pkg, spec, basedir, taskdefs):
|
357
|
+
# We're either going to have:
|
358
|
+
# - File path
|
359
|
+
# - Directory path
|
360
|
+
|
361
|
+
if os.path.isfile(os.path.join(basedir, spec)):
|
362
|
+
self._loadFragmentFile(
|
363
|
+
pkg,
|
364
|
+
os.path.join(basedir, spec),
|
365
|
+
taskdefs)
|
366
|
+
elif os.path.isdir(os.path.join(basedir, spec)):
|
367
|
+
self._loadFragmentDir(pkg, os.path.join(basedir, spec), taskdefs)
|
368
|
+
else:
|
369
|
+
raise Exception("Fragment spec %s not found" % spec)
|
370
|
+
|
371
|
+
def _loadFragmentDir(self, pkg, dir, taskdefs):
|
372
|
+
for file in os.listdir(dir):
|
373
|
+
if os.path.isdir(os.path.join(dir, file)):
|
374
|
+
self._loadFragmentDir(pkg, os.path.join(dir, file), taskdefs)
|
375
|
+
elif os.path.isfile(os.path.join(dir, file)) and file == "flow.dv":
|
376
|
+
self._loadFragmentFile(pkg, os.path.join(dir, file), taskdefs)
|
377
|
+
|
378
|
+
def _loadFragmentFile(self, pkg, file, taskdefs):
|
379
|
+
if file in self._file_s:
|
380
|
+
raise Exception("Recursive file processing @ %s: %s" % (file, ", ".join(self._file_s)))
|
381
|
+
self._file_s.append(file)
|
382
|
+
|
383
|
+
with open(file, "r") as fp:
|
384
|
+
doc = yaml.load(fp, Loader=YamlSrcInfoLoader(file))
|
385
|
+
self._log.debug("doc: %s" % str(doc))
|
386
|
+
if doc is not None and "fragment" in doc.keys():
|
387
|
+
frag = FragmentDef(**(doc["fragment"]))
|
388
|
+
basedir = os.path.dirname(file)
|
389
|
+
pkg.fragment_def_l.append(frag)
|
390
|
+
|
391
|
+
self._loadPackageImports(pkg, frag.imports, basedir)
|
392
|
+
self._loadFragments(pkg, frag.fragments, basedir, taskdefs)
|
393
|
+
taskdefs.extend(frag.tasks)
|
394
|
+
else:
|
395
|
+
print("Warning: file %s is not a fragment" % file)
|
396
|
+
|
397
|
+
def _loadTasks(self, pkg, taskdefs : List[TaskDef], basedir : str):
|
398
|
+
self._log.debug("--> _loadTasks %s" % pkg.name)
|
399
|
+
# Declare first
|
400
|
+
tasks = []
|
401
|
+
for taskdef in taskdefs:
|
402
|
+
if taskdef.name in pkg.task_m.keys():
|
403
|
+
raise Exception("Duplicate task %s" % taskdef.name)
|
404
|
+
|
405
|
+
# TODO: resolve 'needs'
|
406
|
+
needs = []
|
407
|
+
|
408
|
+
if taskdef.srcinfo is None:
|
409
|
+
raise Exception("null srcinfo")
|
410
|
+
self._log.debug("Create task %s in pkg %s" % (self._getScopeFullname(taskdef.name), pkg.name))
|
411
|
+
desc = taskdef.desc if taskdef.desc is not None else ""
|
412
|
+
doc = taskdef.doc if taskdef.doc is not None else ""
|
413
|
+
task = Task(
|
414
|
+
name=self._getScopeFullname(taskdef.name),
|
415
|
+
desc=desc,
|
416
|
+
doc=doc,
|
417
|
+
srcinfo=taskdef.srcinfo)
|
418
|
+
tasks.append((taskdef, task))
|
419
|
+
pkg.task_m[task.name] = task
|
420
|
+
self._pkg_s[-1].add(task, taskdef.name)
|
421
|
+
|
422
|
+
# Now, build out tasks
|
423
|
+
for taskdef, task in tasks:
|
424
|
+
|
425
|
+
if taskdef.uses is not None:
|
426
|
+
task.uses = self._findTaskType(taskdef.uses)
|
427
|
+
|
428
|
+
if task.uses is None:
|
429
|
+
raise Exception("Failed to link task %s" % taskdef.uses)
|
430
|
+
|
431
|
+
passthrough, consumes, rundir = self._getPTConsumesRundir(taskdef, task.uses)
|
432
|
+
|
433
|
+
task.passthrough = passthrough
|
434
|
+
task.consumes = consumes
|
435
|
+
task.rundir = rundir
|
436
|
+
|
437
|
+
task.paramT = self._getParamT(
|
438
|
+
taskdef,
|
439
|
+
task.uses.paramT if task.uses is not None else None)
|
440
|
+
|
441
|
+
for need in taskdef.needs:
|
442
|
+
nt = None
|
443
|
+
if isinstance(need, str):
|
444
|
+
nt = self._findTask(need)
|
445
|
+
elif isinstance(need, TaskDef):
|
446
|
+
nt = self._findTask(need.name)
|
447
|
+
else:
|
448
|
+
raise Exception("Unknown need type %s" % str(type(need)))
|
449
|
+
|
450
|
+
if nt is None:
|
451
|
+
raise Exception("Failed to find task %s" % need)
|
452
|
+
task.needs.append(nt)
|
453
|
+
|
454
|
+
if taskdef.body is not None and len(taskdef.body) > 0:
|
455
|
+
self._mkTaskBody(task, taskdef)
|
456
|
+
elif taskdef.run is not None:
|
457
|
+
task.run = taskdef.run
|
458
|
+
if taskdef.shell is not None:
|
459
|
+
task.shell = taskdef.shell
|
460
|
+
elif taskdef.pytask is not None: # Deprecated case
|
461
|
+
task.run = taskdef.pytask
|
462
|
+
task.shell = "pytask"
|
463
|
+
elif task.uses is not None and task.uses.run is not None:
|
464
|
+
task.run = task.uses.run
|
465
|
+
task.shell = task.uses.shell
|
466
|
+
|
467
|
+
self._log.debug("<-- _loadTasks %s" % pkg.name)
|
468
|
+
|
469
|
+
def _mkTaskBody(self, task, taskdef):
|
470
|
+
self._pkg_s[-1].push_scope(TaskScope(name=taskdef.name))
|
471
|
+
|
472
|
+
# Need to add subtasks from 'uses' scope?
|
473
|
+
if task.uses is not None:
|
474
|
+
for st in task.uses.subtasks:
|
475
|
+
self._pkg_s[-1].add(st, st.leafname)
|
476
|
+
|
477
|
+
# Build out first
|
478
|
+
subtasks = []
|
479
|
+
for td in taskdef.body:
|
480
|
+
if td.srcinfo is None:
|
481
|
+
raise Exception("null srcinfo")
|
482
|
+
|
483
|
+
|
484
|
+
doc = td.doc if td.doc is not None else ""
|
485
|
+
desc = td.desc if td.desc is not None else ""
|
486
|
+
st = Task(
|
487
|
+
name=self._getScopeFullname(td.name),
|
488
|
+
desc=desc,
|
489
|
+
doc=doc,
|
490
|
+
srcinfo=td.srcinfo)
|
491
|
+
subtasks.append((td, st))
|
492
|
+
task.subtasks.append(st)
|
493
|
+
self._pkg_s[-1].add(st, td.name)
|
494
|
+
|
495
|
+
# Now, resolve references
|
496
|
+
for td, st in subtasks:
|
497
|
+
if td.uses is not None:
|
498
|
+
if st.uses is None:
|
499
|
+
st.uses = self._findTaskType(td.uses)
|
500
|
+
if st.uses is None:
|
501
|
+
raise Exception("Failed to find task %s" % td.uses)
|
502
|
+
|
503
|
+
passthrough, consumes, rundir = self._getPTConsumesRundir(td, st.uses)
|
504
|
+
|
505
|
+
st.passthrough = passthrough
|
506
|
+
st.consumes = consumes
|
507
|
+
st.rundir = rundir
|
508
|
+
|
509
|
+
for need in td.needs:
|
510
|
+
if isinstance(need, str):
|
511
|
+
st.needs.append(self._findTask(need))
|
512
|
+
elif isinstance(need, TaskDef):
|
513
|
+
st.needs.append(self._findTask(need.name))
|
514
|
+
else:
|
515
|
+
raise Exception("Unknown need type %s" % str(type(need)))
|
516
|
+
|
517
|
+
if td.body is not None and len(td.body) > 0:
|
518
|
+
self._mkTaskBody(st, td)
|
519
|
+
elif td.run is not None:
|
520
|
+
st.run = td.run
|
521
|
+
st.shell = getattr(td, "shell", None)
|
522
|
+
elif td.pytask is not None:
|
523
|
+
st.run = td.pytask
|
524
|
+
st.shell = "pytask"
|
525
|
+
elif st.uses is not None and st.uses.run is not None:
|
526
|
+
st.run = st.uses.run
|
527
|
+
st.shell = st.uses.shell
|
528
|
+
|
529
|
+
st.paramT = self._getParamT(
|
530
|
+
td,
|
531
|
+
st.uses.paramT if st.uses is not None else None)
|
532
|
+
|
533
|
+
for td, st in subtasks:
|
534
|
+
# TODO: assess passthrough, consumes, needs, and rundir
|
535
|
+
# with respect to 'uses'
|
536
|
+
pass
|
537
|
+
|
538
|
+
self._pkg_s[-1].pop_scope()
|
539
|
+
|
540
|
+
def _findTaskType(self, name):
|
541
|
+
return self._pkg_s[-1].find(name)
|
542
|
+
|
543
|
+
def _findTask(self, name):
|
544
|
+
return self._pkg_s[-1].find(name)
|
545
|
+
|
546
|
+
|
547
|
+
def _getScopeFullname(self, leaf=None):
|
548
|
+
return self._pkg_s[-1].getScopeFullname(leaf)
|
549
|
+
|
550
|
+
def _resolveTaskRefs(self, pkg, task):
|
551
|
+
# Determine
|
552
|
+
pass
|
553
|
+
|
554
|
+
# def _mkPackage(self, pkg : PackageDef, params : Dict[str,Any] = None) -> 'Package':
|
555
|
+
# self._log.debug("--> mkPackage %s" % pkg.name)
|
556
|
+
# ret = Package(pkg.name)
|
557
|
+
|
558
|
+
# self.push_package(ret, add=True)
|
559
|
+
|
560
|
+
# tasks_m : Dict[str,str,TaskNodeCtor]= {}
|
561
|
+
|
562
|
+
# for task in ret.tasks:
|
563
|
+
# if task.name in tasks_m.keys():
|
564
|
+
# raise Exception("Duplicate task %s" % task.name)
|
565
|
+
# tasks_m[task.name] = (task, self._basedir, ) # We'll add a TaskNodeCtor later
|
566
|
+
|
567
|
+
# for frag in pkg._fragment_l:
|
568
|
+
# for task in frag.tasks:
|
569
|
+
# if task.name in tasks_m.keys():
|
570
|
+
# raise Exception("Duplicate task %s" % task.name)
|
571
|
+
# tasks_m[task.name] = (task, frag._basedir, ) # We'll add a TaskNodeCtor later
|
572
|
+
|
573
|
+
# # Now we have a unified map of the tasks declared in this package
|
574
|
+
# for name in list(tasks_m.keys()):
|
575
|
+
# task_i = tasks_m[name]
|
576
|
+
# fullname = pkg.name + "." + name
|
577
|
+
# if len(task_i) < 3:
|
578
|
+
# # Need to create the task ctor
|
579
|
+
# # TODO:
|
580
|
+
# ctor_t = self.mkTaskCtor(task_i[0], task_i[1], tasks_m)
|
581
|
+
# tasks_m[name] = (task_i[0], task_i[1], ctor_t)
|
582
|
+
# ret.tasks[name] = tasks_m[name][2]
|
583
|
+
# ret.tasks[fullname] = tasks_m[name][2]
|
584
|
+
|
585
|
+
# self.pop_package(ret)
|
586
|
+
|
587
|
+
# self._log.debug("<-- mkPackage %s" % pkg.name)
|
588
|
+
# return ret
|
589
|
+
|
590
|
+
|
591
|
+
|
592
|
+
def _getPTConsumesRundir(self, taskdef : TaskDef, base_t : Task):
|
593
|
+
self._log.debug("_getPTConsumesRundir %s" % taskdef.name)
|
594
|
+
passthrough = taskdef.passthrough
|
595
|
+
consumes = taskdef.consumes.copy() if isinstance(taskdef.consumes, list) else taskdef.consumes
|
596
|
+
rundir = taskdef.rundir
|
597
|
+
# needs = [] if task.needs is None else task.needs.copy()
|
598
|
+
|
599
|
+
if base_t is not None:
|
600
|
+
if passthrough is None:
|
601
|
+
passthrough = base_t.passthrough
|
602
|
+
if consumes is None:
|
603
|
+
consumes = base_t.consumes
|
604
|
+
if rundir is None:
|
605
|
+
rundir = base_t.rundir
|
606
|
+
|
607
|
+
if passthrough is None:
|
608
|
+
passthrough = PassthroughE.No
|
609
|
+
if consumes is None:
|
610
|
+
consumes = ConsumesE.All
|
611
|
+
|
612
|
+
|
613
|
+
return (passthrough, consumes, rundir)
|
614
|
+
|
615
|
+
def _getParamT(self, taskdef, base_t : BaseModel):
|
616
|
+
self._log.debug("--> _getParamT %s" % taskdef.name)
|
617
|
+
# Get the base parameter type (if available)
|
618
|
+
# We will build a new type with updated fields
|
619
|
+
|
620
|
+
ptype_m = {
|
621
|
+
"str" : str,
|
622
|
+
"int" : int,
|
623
|
+
"float" : float,
|
624
|
+
"bool" : bool,
|
625
|
+
"list" : List
|
626
|
+
}
|
627
|
+
pdflt_m = {
|
628
|
+
"str" : "",
|
629
|
+
"int" : 0,
|
630
|
+
"float" : 0.0,
|
631
|
+
"bool" : False,
|
632
|
+
"list" : []
|
633
|
+
}
|
634
|
+
|
635
|
+
fields = []
|
636
|
+
field_m : Dict[str,int] = {}
|
637
|
+
|
638
|
+
# pkg = self.package()
|
639
|
+
|
640
|
+
# First, pull out existing fields (if there's a base type)
|
641
|
+
if base_t is not None:
|
642
|
+
base_o = base_t()
|
643
|
+
self._log.debug("Base type: %s" % str(base_t))
|
644
|
+
for name,f in base_t.model_fields.items():
|
645
|
+
ff : dc.Field = f
|
646
|
+
fields.append(f)
|
647
|
+
if not hasattr(base_o, name):
|
648
|
+
raise Exception("Base type %s does not have field %s" % (str(base_t), name))
|
649
|
+
field_m[name] = (f.annotation, getattr(base_o, name))
|
650
|
+
else:
|
651
|
+
self._log.debug("No base type")
|
652
|
+
|
653
|
+
for p in taskdef.params.keys():
|
654
|
+
param = taskdef.params[p]
|
655
|
+
self._log.debug("param: %s %s (%s)" % (p, str(param), str(type(param))))
|
656
|
+
if hasattr(param, "type") and param.type is not None:
|
657
|
+
ptype_s = param.type
|
658
|
+
if ptype_s not in ptype_m.keys():
|
659
|
+
raise Exception("Unknown type %s" % ptype_s)
|
660
|
+
ptype = ptype_m[ptype_s]
|
661
|
+
|
662
|
+
if p in field_m.keys():
|
663
|
+
raise Exception("Duplicate field %s" % p)
|
664
|
+
if param.value is not None:
|
665
|
+
field_m[p] = (ptype, param.value)
|
666
|
+
else:
|
667
|
+
field_m[p] = (ptype, pdflt_m[ptype_s])
|
668
|
+
self._log.debug("Set param=%s to %s" % (p, str(field_m[p][1])))
|
669
|
+
else:
|
670
|
+
if p not in field_m.keys():
|
671
|
+
raise Exception("Field %s not found" % p)
|
672
|
+
if type(param) != dict:
|
673
|
+
value = param
|
674
|
+
elif "value" in param.keys():
|
675
|
+
value = param["value"]
|
676
|
+
else:
|
677
|
+
raise Exception("No value specified for param %s: %s" % (
|
678
|
+
p, str(param)))
|
679
|
+
field_m[p] = (field_m[p][0], value)
|
680
|
+
self._log.debug("Set param=%s to %s" % (p, str(field_m[p][1])))
|
681
|
+
|
682
|
+
params_t = pydantic.create_model("Task%sParams" % taskdef.name, **field_m)
|
683
|
+
|
684
|
+
self._log.debug("== Params")
|
685
|
+
for name,info in params_t.model_fields.items():
|
686
|
+
self._log.debug(" %s: %s" % (name, str(info)))
|
687
|
+
|
688
|
+
self._log.debug("<-- _getParamT %s" % taskdef.name)
|
689
|
+
return params_t
|
690
|
+
|
691
|
+
def error(self, msg, loc=None):
|
692
|
+
if loc is not None:
|
693
|
+
marker = TaskMarker(msg=msg, severity=TaskMarker.SeverityE.Error,
|
694
|
+
loc=loc)
|
695
|
+
else:
|
696
|
+
marker = TaskMarker(msg=msg, severity=TaskMarker.SeverityE.Error)
|
697
|
+
self.marker(marker)
|
698
|
+
|
699
|
+
def marker(self, marker):
|
700
|
+
for l in self.marker_listeners:
|
701
|
+
l(marker)
|