dv-flow-mgr 0.0.1.13979842530a1__py3-none-any.whl → 0.0.1.14097297609a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. dv_flow/mgr/__init__.py +2 -2
  2. dv_flow/mgr/__main__.py +26 -1
  3. dv_flow/mgr/cmds/cmd_graph.py +82 -0
  4. dv_flow/mgr/cmds/cmd_run.py +2 -2
  5. dv_flow/mgr/cmds/cmd_show.py +107 -0
  6. dv_flow/mgr/fileset.py +1 -0
  7. dv_flow/mgr/fragment_def.py +1 -1
  8. dv_flow/mgr/package.py +3 -3
  9. dv_flow/mgr/package_def.py +121 -33
  10. dv_flow/mgr/param_def.py +8 -3
  11. dv_flow/mgr/std/message.py +1 -1
  12. dv_flow/mgr/task_data.py +24 -20
  13. dv_flow/mgr/task_def.py +0 -1
  14. dv_flow/mgr/task_graph_builder.py +121 -12
  15. dv_flow/mgr/task_graph_dot_writer.py +78 -0
  16. dv_flow/mgr/task_node.py +3 -326
  17. dv_flow/mgr/task_node_compound.py +13 -1
  18. dv_flow/mgr/task_node_ctor.py +118 -0
  19. dv_flow/mgr/task_node_ctor_compound.py +117 -0
  20. dv_flow/mgr/task_node_ctor_compound_proxy.py +65 -0
  21. dv_flow/mgr/task_node_ctor_def_base.py +47 -0
  22. dv_flow/mgr/task_node_ctor_proxy.py +56 -0
  23. dv_flow/mgr/task_node_ctor_task.py +64 -0
  24. dv_flow/mgr/task_node_ctor_wrapper.py +96 -0
  25. dv_flow/mgr/task_node_leaf.py +170 -0
  26. dv_flow/mgr/task_runner.py +11 -6
  27. dv_flow/mgr/util/__init__.py +3 -0
  28. dv_flow/mgr/util/__main__.py +36 -0
  29. dv_flow/mgr/util/cmds/__init__.py +0 -0
  30. dv_flow/mgr/util/cmds/cmd_schema.py +63 -0
  31. dv_flow/mgr/{util.py → util/util.py} +1 -1
  32. {dv_flow_mgr-0.0.1.13979842530a1.dist-info → dv_flow_mgr-0.0.1.14097297609a1.dist-info}/METADATA +1 -1
  33. dv_flow_mgr-0.0.1.14097297609a1.dist-info/RECORD +57 -0
  34. {dv_flow_mgr-0.0.1.13979842530a1.dist-info → dv_flow_mgr-0.0.1.14097297609a1.dist-info}/WHEEL +1 -1
  35. dv_flow/mgr/task.py +0 -181
  36. dv_flow/mgr/task_ctor.py +0 -64
  37. dv_flow_mgr-0.0.1.13979842530a1.dist-info/RECORD +0 -44
  38. {dv_flow_mgr-0.0.1.13979842530a1.dist-info → dv_flow_mgr-0.0.1.14097297609a1.dist-info}/entry_points.txt +0 -0
  39. {dv_flow_mgr-0.0.1.13979842530a1.dist-info → dv_flow_mgr-0.0.1.14097297609a1.dist-info}/licenses/LICENSE +0 -0
  40. {dv_flow_mgr-0.0.1.13979842530a1.dist-info → dv_flow_mgr-0.0.1.14097297609a1.dist-info}/top_level.txt +0 -0
dv_flow/mgr/task_data.py CHANGED
@@ -48,20 +48,16 @@ class TaskMarker(BaseModel):
48
48
  severity : SeverityE
49
49
  loc : TaskMarkerLoc = dc.Field(default=None)
50
50
 
51
- class TaskParameterSet(BaseModel):
52
- type : str = None
53
- task : str = None # Name of the task that produced this param set
54
- seq : int = -1 # Order in which the param-set must appear
55
-
56
51
  class TaskDataInput(BaseModel):
57
52
  """
58
- Input data to a task
59
- - name - name of the task
60
- - changed - indicates whether any of this task's dependencies have changed
61
- - rundir - directory in which the task is to be run
62
- - params - parameters to the task
63
- - inputs - list of parameter sets 'consumed' by this task
64
- - memento - memento data previously returned by this task. None if no memento is available
53
+ Input data to a task:
54
+
55
+ * name - name of the task
56
+ * changed - indicates whether any of this task's dependencies have changed
57
+ * rundir - directory in which the task is to be run
58
+ * params - parameters to the task
59
+ * inputs - list of `TaskDataItem` that are consumed' by this task
60
+ * memento - memento data previously returned by this task. None if no memento is available
65
61
  """
66
62
  name : str
67
63
  changed : bool
@@ -73,12 +69,13 @@ class TaskDataInput(BaseModel):
73
69
 
74
70
  class TaskDataResult(BaseModel):
75
71
  """
76
- Result data from a task
77
- - changed - indicates whether the task modified its result data
78
- - output - list of output parameter sets
79
- - memento - memento data to be passed to the next invocation of the task
80
- - markers - list of markers produced by the task
81
- - status - status code (0=success, non-zero=failure)
72
+ Result data from a task:
73
+
74
+ * changed - indicates whether the task modified its result data
75
+ * output - list of output parameter sets
76
+ * memento - memento data to be passed to the next invocation of the task
77
+ * markers - list of markers produced by the task
78
+ * status - status code (0=success, non-zero=failure)
82
79
  """
83
80
  changed : bool = dc.Field(default=True)
84
81
  output : List[Any] = dc.Field(default_factory=list)
@@ -113,9 +110,16 @@ class TaskDataParam(BaseModel):
113
110
  ops : List[TaskDataParamOp] = dc.Field(default_factory=list)
114
111
 
115
112
  class TaskDataItem(BaseModel):
113
+ """
114
+ Base class for task data items
115
+
116
+ * type - Name of the data item type
117
+ * src - Name of the task that produced this item
118
+ * seq - Sequence number of the item within the task
119
+ """
116
120
  type : str
117
- src : str
118
- id : str
121
+ src : str = None
122
+ seq : int = -1
119
123
 
120
124
  class TaskData(BaseModel):
121
125
  src : str = None
dv_flow/mgr/task_def.py CHANGED
@@ -24,7 +24,6 @@ import enum
24
24
  from pydantic import BaseModel
25
25
  from typing import Any, Dict, List, Union, Tuple
26
26
  from .param_def import ParamDef
27
- from .task import Task
28
27
  from .task_output import TaskOutput
29
28
 
30
29
  @dc.dataclass
@@ -25,10 +25,21 @@ import logging
25
25
  from .package import Package
26
26
  from .package_def import PackageDef, PackageSpec
27
27
  from .pkg_rgy import PkgRgy
28
- from .task import Task
29
- from .task_node import TaskNodeCtor
28
+ from .task_node import TaskNode
29
+ from .task_node_ctor import TaskNodeCtor
30
30
  from typing import Dict, List, Union
31
31
 
32
+ @dc.dataclass
33
+ class TaskNamespaceScope(object):
34
+ task_m : Dict[str,TaskNode] = dc.field(default_factory=dict)
35
+
36
+ @dc.dataclass
37
+ class CompoundTaskCtxt(object):
38
+ parent : 'TaskGraphBuilder'
39
+ task : 'TaskNode'
40
+ task_m : Dict[str,TaskNode] = dc.field(default_factory=dict)
41
+ uses_s : List[Dict[str, TaskNode]] = dc.field(default_factory=list)
42
+
32
43
  @dc.dataclass
33
44
  class TaskGraphBuilder(object):
34
45
  """The Task-Graph Builder knows how to discover packages and construct task graphs"""
@@ -38,8 +49,12 @@ class TaskGraphBuilder(object):
38
49
  _pkg_s : List[Package] = dc.field(default_factory=list)
39
50
  _pkg_m : Dict[PackageSpec,Package] = dc.field(default_factory=dict)
40
51
  _pkg_spec_s : List[PackageDef] = dc.field(default_factory=list)
41
- _task_m : Dict['TaskSpec',Task] = dc.field(default_factory=dict)
52
+ _task_m : Dict['TaskSpec',TaskNode] = dc.field(default_factory=dict)
42
53
  _override_m : Dict[str,str] = dc.field(default_factory=dict)
54
+ _ns_scope_s : List[TaskNamespaceScope] = dc.field(default_factory=list)
55
+ _compound_task_ctxt_s : List[CompoundTaskCtxt] = dc.field(default_factory=list)
56
+ _uses_count : int = 0
57
+
43
58
  _logger : logging.Logger = None
44
59
 
45
60
  def __post_init__(self):
@@ -94,13 +109,85 @@ class TaskGraphBuilder(object):
94
109
  def package(self):
95
110
  return self._pkg_s[-1]
96
111
 
97
- def mkTaskGraph(self, task : str) -> Task:
112
+ def enter_uses(self):
113
+ self._uses_count += 1
114
+
115
+ def in_uses(self):
116
+ return (self._uses_count > 0)
117
+
118
+ def leave_uses(self):
119
+ self._uses_count -= 1
120
+
121
+ def enter_compound(self, task : TaskNode):
122
+ self._compound_task_ctxt_s.append(CompoundTaskCtxt(parent=self, task=task))
123
+
124
+ def get_name_prefix(self):
125
+ if len(self._compound_task_ctxt_s) > 0:
126
+ # Use the compound scope name
127
+ name = ".".join(c.task.name for c in self._compound_task_ctxt_s)
128
+ else:
129
+ name = self._pkg_s[-1].name
130
+
131
+ return name
132
+
133
+ def enter_compound_uses(self):
134
+ self._compound_task_ctxt_s[-1].uses_s.append({})
135
+
136
+ def leave_compound_uses(self):
137
+ if len(self._compound_task_ctxt_s[-1].uses_s) > 1:
138
+ # Propagate the items up the stack, appending 'super' to
139
+ # the names
140
+ for k,v in self._compound_task_ctxt_s[-1].uses_s[-1].items():
141
+ self._compound_task_ctxt_s[-1].uses[-2]["super.%s" % k] = v
142
+ else:
143
+ # Propagate the items to the compound namespace, appending
144
+ # 'super' to the names
145
+ for k,v in self._compound_task_ctxt_s[-1].uses_s[-1].items():
146
+ self._compound_task_ctxt_s[-1].task_m["super.%s" % k] = v
147
+ self._compound_task_ctxt_s[-1].uses_s.pop()
148
+
149
+ def is_compound_uses(self):
150
+ return len(self._compound_task_ctxt_s) > 0 and len(self._compound_task_ctxt_s[-1].uses_s) != 0
151
+
152
+ def addTask(self, name, task : TaskNode):
153
+ self._logger.debug("--> addTask: %s" % name)
154
+ if len(self._compound_task_ctxt_s) == 0:
155
+ self._task_m[name] = task
156
+ else:
157
+ if len(self._compound_task_ctxt_s[-1].uses_s) > 0:
158
+ self._compound_task_ctxt_s[-1].uses_s[-1][name] = task
159
+ else:
160
+ self._compound_task_ctxt_s[-1].task_m[name] = task
161
+ self._logger.debug("<-- addTask: %s" % name)
162
+
163
+ def findTask(self, name):
164
+ task = None
165
+
166
+ if len(self._compound_task_ctxt_s) > 0:
167
+ if len(self._compound_task_ctxt_s[-1].uses_s) > 0:
168
+ if name in self._compound_task_ctxt_s[-1].uses_s[-1].keys():
169
+ task = self._compound_task_ctxt_s[-1].uses_s[-1][name]
170
+ if task is None and name in self._compound_task_ctxt_s[-1].task_m.keys():
171
+ task = self._compound_task_ctxt_s[-1].task_m[name]
172
+ if task is None and name in self._task_m.keys():
173
+ task = self._task_m[name]
174
+
175
+ # if task is None:
176
+ # # TODO: Look for a def that hasn't yet been constructed
177
+ # task = self._mkTaskGraph(name, self.rundir)
178
+
179
+ return task
180
+
181
+ def leave_compound(self, task : TaskNode):
182
+ self._compound_task_ctxt_s.pop()
183
+
184
+ def mkTaskGraph(self, task : str) -> TaskNode:
98
185
  self._pkg_s.clear()
99
186
  self._task_m.clear()
100
187
 
101
188
  return self._mkTaskGraph(task, self.rundir)
102
189
 
103
- def _mkTaskGraph(self, task : str, parent_rundir : str) -> Task:
190
+ def _mkTaskGraph(self, task : str, parent_rundir : str) -> TaskNode:
104
191
 
105
192
  elems = task.split(".")
106
193
 
@@ -145,6 +232,7 @@ class TaskGraphBuilder(object):
145
232
  raise Exception("ctor %s returned None for params" % str(ctor_t))
146
233
 
147
234
  task = ctor_t.mkTaskNode(
235
+ builder=self,
148
236
  params=params,
149
237
  name=task,
150
238
  needs=needs)
@@ -236,15 +324,34 @@ class TaskGraphBuilder(object):
236
324
  self._logger.debug("Overriding package %s with %s" % (pkg, self._override_m[pkg]))
237
325
  task_t = self._override_m[pkg] + "." + tname
238
326
 
327
+ dot_idx = task_t.rfind(".")
328
+ pkg = task_t[0:dot_idx]
329
+ self._pkg_s.append(self.getPackage(PackageSpec(pkg)))
239
330
 
240
331
  ctor = self.getTaskCtor(task_t)
241
- self._logger.debug("ctor: %s" % ctor.name)
242
- params = ctor.mkTaskParams(kwargs)
243
- ret = ctor.mkTaskNode(
244
- params=params,
245
- name=name,
246
- srcdir=srcdir,
247
- needs=needs)
332
+ if ctor is not None:
333
+ if needs is None:
334
+ needs = []
335
+ for need_def in ctor.getNeeds():
336
+ # Resolve the full name of the need
337
+ need_fullname = self._resolveNeedRef(need_def)
338
+ self._logger.debug("Searching for qualifed-name task %s" % need_fullname)
339
+ if not need_fullname in self._task_m.keys():
340
+ need_t = self._mkTaskGraph(need_fullname, self.rundir)
341
+ self._task_m[need_fullname] = need_t
342
+ needs.append(self._task_m[need_fullname])
343
+
344
+ self._logger.debug("ctor: %s" % ctor.name)
345
+ params = ctor.mkTaskParams(kwargs)
346
+ ret = ctor.mkTaskNode(
347
+ self,
348
+ params=params,
349
+ name=name,
350
+ srcdir=srcdir,
351
+ needs=needs)
352
+ else:
353
+ raise Exception("Failed to find ctor for task %s" % task_t)
354
+ self._pkg_s.pop()
248
355
  self._logger.debug("<-- mkTaskNode: %s" % task_t)
249
356
  return ret
250
357
 
@@ -275,3 +382,5 @@ class TaskGraphBuilder(object):
275
382
 
276
383
  self._logger.debug("--> getTaskCtor %s" % spec.name)
277
384
  return ctor
385
+
386
+
@@ -0,0 +1,78 @@
1
+ import dataclasses as dc
2
+ import logging
3
+ import sys
4
+ from typing import ClassVar, Dict, TextIO
5
+ from .task_node import TaskNode
6
+ from .task_node_compound import TaskNodeCompound
7
+
8
+ @dc.dataclass
9
+ class TaskGraphDotWriter(object):
10
+ fp : TextIO = dc.field(default=None)
11
+ _ind : str = ""
12
+ _node_id_m : Dict[TaskNode, str] = dc.field(default_factory=dict)
13
+ _node_id : int = 1
14
+ _cluster_id : int = 1
15
+ _log : ClassVar = logging.getLogger("TaskGraphDotWriter")
16
+
17
+ def write(self, node, filename):
18
+ self._log.debug("--> TaskGraphDotWriter::write")
19
+
20
+ if filename == "-":
21
+ self.fp = sys.stdout
22
+ else:
23
+ self.fp = open(filename, "w")
24
+ self.println("digraph G {")
25
+ self.process_node(node)
26
+ self.println("}")
27
+
28
+ self.fp.close()
29
+ self._log.debug("<-- TaskGraphDotWriter::write")
30
+
31
+ def process_node(self, node):
32
+ self._log.debug("--> process_node %s (%d)" % (node.name, len(node.needs),))
33
+ node_id = self._node_id
34
+ self._node_id += 1
35
+ node_name = "n%d" % self._node_id
36
+ self._node_id_m[node] = node_name
37
+
38
+ if isinstance(node, TaskNodeCompound):
39
+ self.println("subgraph cluster_%d {" % self._cluster_id)
40
+ self._cluster_id += 1
41
+ self.inc_ind()
42
+ self.println("label=\"%s\";" % node.name)
43
+ self.println("color=blue;")
44
+ self.println("style=dashed;")
45
+ self.process_node(node.input)
46
+
47
+ self.println("%s[label=\"%s.out\"];" % (
48
+ node_name,
49
+ node.name))
50
+ else:
51
+ self.println("%s[label=\"%s\"];" % (
52
+ node_name,
53
+ node.name))
54
+
55
+ for dep in node.needs:
56
+ if dep[0] not in self._node_id_m.keys():
57
+ self.process_node(dep[0])
58
+ self.println("%s -> %s;" % (
59
+ self._node_id_m[dep[0]],
60
+ self._node_id_m[node]))
61
+
62
+ if isinstance(node, TaskNodeCompound):
63
+ self.dec_ind()
64
+ self.println("}")
65
+
66
+ self._log.debug("<-- process_node %s (%d)" % (node.name, len(node.needs),))
67
+
68
+ def println(self, l):
69
+ self.fp.write("%s%s\n" % (self._ind, l))
70
+
71
+ def inc_ind(self):
72
+ self._ind += " "
73
+
74
+ def dec_ind(self):
75
+ if len(self._ind) > 4:
76
+ self._ind = self._ind[4:]
77
+ else:
78
+ self._ind = ""
dv_flow/mgr/task_node.py CHANGED
@@ -27,9 +27,7 @@ import pydantic.dataclasses as pdc
27
27
  import logging
28
28
  import toposort
29
29
  from typing import Any, Callable, ClassVar, Dict, List, Tuple
30
- from .task_data import TaskDataInput, TaskDataOutput, TaskDataResult
31
- from .task_params_ctor import TaskParamsCtor
32
- from .param_ref_eval import ParamRefEval
30
+ from .task_data import TaskDataOutput, TaskDataResult
33
31
  from .param import Param
34
32
 
35
33
  class RundirE(enum.Enum):
@@ -43,9 +41,7 @@ class TaskNode(object):
43
41
  name : str
44
42
  srcdir : str
45
43
  # This can be the resolved parameters
46
- params : TaskParamsCtor
47
-
48
- task : Callable[['TaskRunner','TaskDataInput'],'TaskDataResult']
44
+ params : Any
49
45
 
50
46
  # Runtime fields -- these get populated during execution
51
47
  changed : bool = False
@@ -73,147 +69,7 @@ class TaskNode(object):
73
69
  runner,
74
70
  rundir,
75
71
  memento : Any = None) -> 'TaskDataResult':
76
- self._log.debug("--> do_run: %s" % self.name)
77
- changed = False
78
- for dep,_ in self.needs:
79
- changed |= dep.changed
80
-
81
- self.rundir = rundir
82
-
83
- # TODO: Form dep-map from inputs
84
-
85
- dep_m = {}
86
- for need,block in self.needs:
87
- self._log.debug("dep %s dep_m: %s" % (need.name, str(dep_m)))
88
- if not block:
89
- for subdep in need.output.dep_m.keys():
90
- if subdep not in dep_m.keys():
91
- dep_m[subdep] = []
92
- for dep in need.output.dep_m[subdep]:
93
- if dep not in dep_m[subdep]:
94
- dep_m[subdep].append(dep)
95
- self._log.debug("input dep_m: %s %s" % (self.name, str(dep_m)))
96
-
97
- sorted = toposort.toposort(dep_m)
98
-
99
- in_params_m = {}
100
- added_srcs = set()
101
- for need,block in self.needs:
102
- if not block:
103
- for p in need.output.output:
104
- # Avoid adding parameters from a single task more than once
105
- if p.src not in added_srcs:
106
- added_srcs.add(p.src)
107
- if p.src not in in_params_m.keys():
108
- in_params_m[p.src] = []
109
- in_params_m[p.src].append(p)
110
-
111
- # in_params holds parameter sets ordered by dependency
112
- in_params = []
113
- for sorted_s in sorted:
114
- self._log.debug("sorted_s: %s" % str(sorted_s))
115
- for dep in sorted_s:
116
- if dep in in_params_m.keys():
117
- self._log.debug("(%s) Extend with: %s" % (dep, str(in_params_m[dep])))
118
- in_params.extend(in_params_m[dep])
119
-
120
- self._log.debug("in_params[1]: %s" % ",".join(p.src for p in in_params))
121
-
122
- # Create an evaluator for substituting param values
123
- eval = ParamRefEval()
124
-
125
- self._log.debug("in_params[2]: %s" % ",".join(p.src for p in in_params))
126
- eval.setVar("in", in_params)
127
- eval.setVar("rundir", rundir)
128
-
129
- # Set variables from the inputs
130
- for need in self.needs:
131
- for name,value in {"rundir" : need[0].rundir}.items():
132
- eval.setVar("%s.%s" % (need[0].name, name), value)
133
-
134
- # Default inputs is the list of parameter sets that match 'consumes'
135
- inputs = []
136
- if self.consumes is not None and len(self.consumes):
137
- for in_p in in_params:
138
- if self._matches(in_p, self.consumes):
139
- inputs.append(in_p)
140
-
141
- for name,field in self.params.model_fields.items():
142
- value = getattr(self.params, name)
143
- if type(value) == str:
144
- if value.find("${{") != -1:
145
- new_val = eval.eval(value)
146
- self._log.debug("Param %s: Evaluate expression \"%s\" => \"%s\"" % (name, value, new_val))
147
- setattr(self.params, name, new_val)
148
- elif isinstance(value, list):
149
- for i,elem in enumerate(value):
150
- if elem.find("${{") != -1:
151
- new_val = eval.eval(elem)
152
- value[i] = new_val
153
-
154
- input = TaskDataInput(
155
- name=self.name,
156
- changed=changed,
157
- srcdir=self.srcdir,
158
- rundir=rundir,
159
- params=self.params,
160
- inputs=inputs,
161
- memento=memento)
162
-
163
- self._log.debug("--> Call task method %s" % str(self.task))
164
- self.result : TaskDataResult = await self.task(self, input)
165
- self._log.debug("<-- Call task method %s" % str(self.task))
166
-
167
- output=self.result.output.copy()
168
- for out in output:
169
- out.src = self.name
170
-
171
- self._log.debug("output[1]: %s" % str(output))
172
-
173
- # Pass-through all dependencies
174
- # Add an entry for ourselves
175
- dep_m[self.name] = list(need.name for need,_ in self.needs)
176
-
177
- if self.passthrough:
178
- self._log.debug("passthrough: %s" % self.name)
179
-
180
- if self.consumes is None and len(self.consumes):
181
- self._log.debug("Propagating all input parameters to output")
182
- for need,block in self.needs:
183
- if not block:
184
- output.extend(need.output.output)
185
- else:
186
- # Filter out parameter sets that were consumed
187
- self._log.debug("Propagating non-consumed input parameters to output")
188
- self._log.debug("consumes: %s" % str(self.consumes))
189
- for need,block in self.needs:
190
- if not block:
191
- for out in need.output.output:
192
- if not self._matches(out, self.consumes):
193
- self._log.debug("Propagating type %s from %s" % (
194
- getattr(out, "type", "<unknown>"),
195
- getattr(out, "src", "<unknown>")))
196
- output.append(out)
197
- else:
198
- self._log.debug("non-passthrough: %s (only local outputs propagated)" % self.name)
199
- # empty dependency map
200
- # dep_m = {
201
- # self.name : []
202
- # }
203
-
204
- self._log.debug("output dep_m: %s %s" % (self.name, str(dep_m)))
205
- self._log.debug("output[2]: %s" % str(output))
206
-
207
- # Store the result
208
- self.output = TaskDataOutput(
209
- changed=self.result.changed,
210
- dep_m=dep_m,
211
- output=output)
212
-
213
- # TODO:
214
- self._log.debug("<-- do_run: %s" % self.name)
215
-
216
- return self.result
72
+ pass
217
73
 
218
74
  def __hash__(self):
219
75
  return id(self)
@@ -240,186 +96,7 @@ class TaskNode(object):
240
96
  break
241
97
  self._log.debug("<-- _matches: %s %s" % (self.name, consumed))
242
98
  return consumed
243
-
244
-
245
-
246
- @dc.dataclass
247
- class TaskNodeCtor(object):
248
- """
249
- Factory for a specific task type
250
- - Produces a task parameters object, applying value-setting instructions
251
- - Produces a TaskNode
252
- """
253
- name : str
254
- srcdir : str
255
- paramT : Any
256
- passthrough : bool
257
- consumes : List[Any]
258
-
259
- def __call__(self,
260
- name=None,
261
- srcdir=None,
262
- params=None,
263
- needs=None,
264
- passthrough=None,
265
- consumes=None,
266
- **kwargs):
267
- """Convenience method for direct creation of tasks"""
268
- if params is None:
269
- params = self.mkTaskParams(kwargs)
270
-
271
- node = self.mkTaskNode(
272
- srcdir=srcdir,
273
- params=params,
274
- name=name,
275
- needs=needs)
276
- if passthrough is not None:
277
- node.passthrough = passthrough
278
- else:
279
- node.passthrough = self.passthrough
280
- if consumes is not None:
281
- if node.consumes is None:
282
- node.consumes = consumes
283
- else:
284
- node.consumes.extend(consumes)
285
- else:
286
- if node.consumes is None:
287
- node.consumes = self.consumes
288
- else:
289
- node.consumes.extend(consumes)
290
-
291
- return node
292
-
293
- def getNeeds(self) -> List[str]:
294
- return []
295
-
296
- def mkTaskNode(self,
297
- params,
298
- srcdir=None,
299
- name=None,
300
- needs=None) -> TaskNode:
301
- raise NotImplementedError("mkTaskNode in type %s" % str(type(self)))
302
-
303
- def mkTaskParams(self, params : Dict = None) -> Any:
304
- obj = self.paramT()
305
-
306
- # Apply user-specified params
307
- if params is not None:
308
- for key,value in params.items():
309
- if not hasattr(obj, key):
310
- raise Exception("Parameters class %s does not contain field %s" % (
311
- str(type(obj)),
312
- key))
313
- else:
314
- if isinstance(value, Param):
315
- if value.append is not None:
316
- ex_value = getattr(obj, key, [])
317
- ex_value.extend(value.append)
318
- setattr(obj, key, ex_value)
319
- elif value.prepend is not None:
320
- ex_value = getattr(obj, key, [])
321
- value = value.copy()
322
- value.extend(ex_value)
323
- setattr(obj, key, value)
324
- pass
325
- else:
326
- raise Exception("Unhandled value spec: %s" % str(value))
327
- else:
328
- setattr(obj, key, value)
329
- return obj
330
-
331
- @dc.dataclass
332
- class TaskNodeCtorDefBase(TaskNodeCtor):
333
- """Task defines its own needs, that will need to be filled in"""
334
- needs : List['str']
335
-
336
- def __post_init__(self):
337
- if self.needs is None:
338
- self.needs = []
339
-
340
- def getNeeds(self) -> List[str]:
341
- return self.needs
342
-
343
- @dc.dataclass
344
- class TaskNodeCtorProxy(TaskNodeCtorDefBase):
345
- """Task has a 'uses' clause, so we delegate creation of the node"""
346
- uses : TaskNodeCtor
347
-
348
- def mkTaskNode(self, params, srcdir=None, name=None, needs=None) -> TaskNode:
349
- if srcdir is None:
350
- srcdir = self.srcdir
351
- node = self.uses.mkTaskNode(params=params, srcdir=srcdir, name=name, needs=needs)
352
- node.passthrough = self.passthrough
353
- node.consumes = self.consumes
354
- return node
355
99
 
356
- @dc.dataclass
357
- class TaskNodeCtorTask(TaskNodeCtorDefBase):
358
- task : Callable[['TaskRunner','TaskDataInput'],'TaskDataResult']
359
100
 
360
- def mkTaskNode(self, params, srcdir=None, name=None, needs=None) -> TaskNode:
361
- if srcdir is None:
362
- srcdir = self.srcdir
363
-
364
- node = TaskNode(name, srcdir, params, self.task, needs=needs)
365
- node.passthrough = self.passthrough
366
- node.consumes = self.consumes
367
- node.task = self.task
368
-
369
- return node
370
-
371
- @dc.dataclass
372
- class TaskNodeCtorWrapper(TaskNodeCtor):
373
- T : Any
374
-
375
-
376
-
377
- def mkTaskNode(self, params, srcdir=None, name=None, needs=None) -> TaskNode:
378
- node = TaskNode(name, srcdir, params, self.T, needs=needs)
379
- node.passthrough = self.passthrough
380
- node.consumes = self.consumes
381
- return node
382
-
383
- def mkTaskParams(self, params : Dict = None) -> Any:
384
- obj = self.paramT()
385
-
386
- # Apply user-specified params
387
- for key,value in params.items():
388
- if not hasattr(obj, key):
389
- raise Exception("Parameters class %s does not contain field %s" % (
390
- str(type(obj)),
391
- key))
392
- else:
393
- if isinstance(value, Param):
394
- if value.append is not None:
395
- ex_value = getattr(obj, key, [])
396
- ex_value.extend(value.append)
397
- setattr(obj, key, ex_value)
398
- elif value.prepend is not None:
399
- ex_value = getattr(obj, key, [])
400
- value = value.copy()
401
- value.extend(ex_value)
402
- setattr(obj, key, value)
403
- pass
404
- else:
405
- raise Exception("Unhandled value spec: %s" % str(value))
406
- else:
407
- setattr(obj, key, value)
408
- return obj
409
-
410
- def task(paramT,passthrough=False,consumes=None):
411
- """Decorator to wrap a task method as a TaskNodeCtor"""
412
- def wrapper(T):
413
- task_mname = T.__module__
414
- task_module = sys.modules[task_mname]
415
- ctor = TaskNodeCtorWrapper(
416
- name=T.__name__,
417
- srcdir=os.path.dirname(os.path.abspath(task_module.__file__)),
418
- paramT=paramT,
419
- passthrough=passthrough,
420
- consumes=consumes,
421
- T=T)
422
- return ctor
423
- return wrapper
424
101
 
425
102