dv-flow-mgr 0.0.1.13577785562a1__py3-none-any.whl → 0.0.1.13657597614a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dv_flow/mgr/__init__.py CHANGED
@@ -7,4 +7,5 @@ from .task_data import *
7
7
  from .task_graph_runner import TaskGraphRunner
8
8
  from .task_graph_runner_local import TaskGraphRunnerLocal
9
9
  from .task_graph_builder import TaskGraphBuilder
10
+ from .task_node import task
10
11
 
@@ -33,10 +33,10 @@ from typing import Any, Dict, List, Callable, Tuple, ClassVar
33
33
  from .fragment_def import FragmentDef
34
34
  from .package import Package
35
35
  from .package_import_spec import PackageImportSpec, PackageSpec
36
- #from .task import TaskCtorCls, TaskCtorParam, TaskCtorParamCls
36
+ from .task_node import TaskNodeCtor, TaskNodeCtorProxy, TaskNodeCtorTask
37
37
  from .task_ctor import TaskCtor
38
38
  from .task_def import TaskDef, TaskSpec
39
- from .std.task_null import TaskNull
39
+ from .std.task_null import TaskNull, TaskNullParams
40
40
  from .type_def import TypeDef
41
41
 
42
42
 
@@ -122,115 +122,27 @@ class PackageDef(BaseModel):
122
122
  ctor_t = tasks_m[task_name][2]
123
123
  return ctor_t
124
124
 
125
- def handleParams(self, task, ctor_t):
126
- self._log.debug("--> handleParams %s params=%s" % (task.name, str(task.params)))
127
-
128
- if task.params is not None and len(task.params) > 0:
129
- decl_params = False
130
-
131
- # First, add in a parameter-setting stage
132
- ctor_t = TaskCtorParam(
133
- name=ctor_t.name,
134
- uses=ctor_t,
135
- srcdir=ctor_t.srcdir)
136
- # ctor_t.params.update(task.params)
137
-
138
- for value in task.params.values():
139
- self._log.debug("value: %s" % str(value))
140
- if type(value) == dict and "type" in value.keys():
141
- decl_params = True
142
- break
143
-
144
- field_m = {}
145
- # First, add parameters from the base class
146
- base_o = ctor_t.mkParams()
147
- for fname,info in base_o.model_fields.items():
148
- self._log.debug("Field: %s (%s)" % (fname, info.default))
149
- field_m[fname] = (info.annotation, info.default)
150
-
151
- if decl_params:
152
- self._log.debug("Type declares new parameters")
153
- # We need to combine base parameters with new parameters
154
- ptype_m = {
155
- "str" : str,
156
- "int" : int,
157
- "float" : float,
158
- "bool" : bool,
159
- "list" : List
160
- }
161
- pdflt_m = {
162
- "str" : "",
163
- "int" : 0,
164
- "float" : 0.0,
165
- "bool" : False,
166
- "list" : []
167
- }
168
- for p in task.params.keys():
169
- param = task.params[p]
170
- self._log.debug("param: %s" % str(param))
171
- if type(param) == dict and "type" in param.keys():
172
- ptype_s = param["type"]
173
- if ptype_s not in ptype_m.keys():
174
- raise Exception("Unknown type %s" % ptype_s)
175
- ptype = ptype_m[ptype_s]
176
-
177
- if p in field_m.keys():
178
- raise Exception("Duplicate field %s" % p)
179
- if "value" in param.keys():
180
- field_m[p] = (ptype, param["value"])
181
- else:
182
- field_m[p] = (ptype, pdflt_m[ptype_s])
183
- else:
184
- if p not in field_m.keys():
185
- raise Exception("Field %s not found" % p)
186
- if type(param) != dict:
187
- value = param
188
- elif "value" in param.keys():
189
- value = param["value"]
190
- else:
191
- raise Exception("No value specified for param %s: %s" % (
192
- p, str(param)))
193
- field_m[p] = (field_m[p][0], value)
194
- self._log.debug("field_m: %s" % str(field_m))
195
- param_t = pydantic.create_model(
196
- "Task%sParams" % task.name, **field_m)
197
- ctor_t = TaskCtorParamCls(
198
- name=ctor_t.name,
199
- uses=ctor_t,
200
- params_ctor=param_t)
201
- else: # no new parameters declared
202
- self._log.debug("Type only overrides existing parameters")
203
- for p in task.params.keys():
204
- param = task.params[p]
205
- if p not in field_m.keys():
206
- raise Exception("Field %s not found" % p)
207
- if type(param) != dict:
208
- value = param
209
- elif "value" in param.keys():
210
- value = param["value"]
211
- else:
212
- raise Exception("No value specified for param %s: %s" % (
213
- p, str(param)))
214
- field_m[p] = (field_m[p][0], value)
215
- self._log.debug("Set param=%s to %s" % (p, str(value)))
216
- ctor_t.params[p] = value
217
-
218
- self._log.debug("<-- handleParams %s" % task.name)
219
-
220
- return ctor_t
221
-
222
125
  def mkTaskCtor(self, session, task, srcdir, tasks_m) -> TaskCtor:
223
126
  self._log.debug("--> %s::mkTaskCtor %s (srcdir: %s)" % (self.name, task.name, srcdir))
127
+ base_ctor_t : TaskCtor = None
224
128
  ctor_t : TaskCtor = None
129
+ base_params : BaseModel = None
130
+ callable = None
131
+ passthrough = False
132
+ needs = [] if task.needs is None else task.needs.copy()
133
+
134
+ if task.uses is not None:
135
+ base_ctor_t = self.getTaskCtor(session, task.uses, tasks_m)
136
+ base_params = base_ctor_t.mkTaskParams()
225
137
 
226
138
  # Determine the implementation constructor first
227
- if task.pyclass is not None:
139
+ if task.pytask is not None:
228
140
  # Built-in impl
229
141
  # Now, lookup the class
230
- self._log.debug("Use PyClass implementation")
231
- last_dot = task.pyclass.rfind('.')
232
- clsname = task.pyclass[last_dot+1:]
233
- modname = task.pyclass[:last_dot]
142
+ self._log.debug("Use PyTask implementation")
143
+ last_dot = task.pytask.rfind('.')
144
+ clsname = task.pytask[last_dot+1:]
145
+ modname = task.pytask[:last_dot]
234
146
 
235
147
  try:
236
148
  if modname not in sys.modules:
@@ -244,39 +156,106 @@ class PackageDef(BaseModel):
244
156
  modname, self.basedir, str(e)))
245
157
 
246
158
  if not hasattr(mod, clsname):
247
- raise Exception("Class %s not found in module %s" % (clsname, modname))
248
- task_ctor = getattr(mod, clsname)
249
-
250
- # Determine if we need to use a new
251
-
252
- if task.uses is not None:
253
- uses = self.getTaskCtor(session, task.uses, tasks_m)
254
- else:
255
- uses = None
256
-
257
- ctor_t = TaskCtorCls(
159
+ raise Exception("Method %s not found in module %s" % (clsname, modname))
160
+ callable = getattr(mod, clsname)
161
+
162
+ # Determine if we need to use a new
163
+ paramT = self._getParamT(task, base_params)
164
+
165
+ if callable is not None:
166
+ ctor_t = TaskNodeCtorTask(
258
167
  name=task.name,
259
- uses=uses,
260
- task_ctor=task_ctor,
261
- srcdir=srcdir)
262
- elif task.uses is not None:
168
+ srcdir=srcdir,
169
+ paramT=paramT, # TODO: need to determine the parameter type
170
+ passthrough=passthrough,
171
+ needs=needs, # TODO: need to determine the needs
172
+ task=callable)
173
+ elif base_ctor_t is not None:
263
174
  # Use the existing (base) to create the implementation
264
- ctor_t = TaskCtor(
175
+ ctor_t = TaskNodeCtorProxy(
265
176
  name=task.name,
266
- uses=self.getTaskCtor(session, task.uses, tasks_m),
267
- srcdir=srcdir)
177
+ srcdir=srcdir,
178
+ paramT=paramT, # TODO: need to determine the parameter type
179
+ passthrough=passthrough,
180
+ needs=needs,
181
+ uses=base_ctor_t)
268
182
  else:
269
183
  self._log.debug("Use 'Null' as the class implementation")
270
- ctor_t = TaskCtorCls(
184
+ ctor_t = TaskNodeCtorTask(
271
185
  name=task.name,
272
- task_ctor=TaskNull,
273
- srcdir=srcdir)
274
-
275
- ctor_t = self.handleParams(task, ctor_t)
276
- ctor_t.depends.extend(task.depends)
186
+ srcdir=srcdir,
187
+ paramT=TaskNullParams,
188
+ passthrough=passthrough,
189
+ needs=needs,
190
+ task=TaskNull)
277
191
 
278
192
  self._log.debug("<-- %s::mkTaskCtor %s" % (self.name, task.name))
279
193
  return ctor_t
194
+
195
+ def _getParamT(self, task, base_t : BaseModel):
196
+ # Get the base parameter type (if available)
197
+ # We will build a new type with updated fields
198
+
199
+ ptype_m = {
200
+ "str" : str,
201
+ "int" : int,
202
+ "float" : float,
203
+ "bool" : bool,
204
+ "list" : List
205
+ }
206
+ pdflt_m = {
207
+ "str" : "",
208
+ "int" : 0,
209
+ "float" : 0.0,
210
+ "bool" : False,
211
+ "list" : []
212
+ }
213
+
214
+ fields = []
215
+ field_m : Dict[str,int] = {}
216
+
217
+ # First, pull out existing fields (if there's a base type)
218
+ if base_t is not None:
219
+ self._log.debug("Base type: %s" % str(base_t))
220
+ for name,f in base_t.model_fields.items():
221
+ ff : dc.Field = f
222
+ fields.append(f)
223
+ field_m[name] = (f.annotation, getattr(base_t, name))
224
+ else:
225
+ self._log.debug("No base type")
226
+
227
+ for p in task.params.keys():
228
+ param = task.params[p]
229
+ self._log.debug("param: %s %s (%s)" % (p, str(param), str(type(param))))
230
+ if hasattr(param, "type") and param.type is not None:
231
+ ptype_s = param.type
232
+ if ptype_s not in ptype_m.keys():
233
+ raise Exception("Unknown type %s" % ptype_s)
234
+ ptype = ptype_m[ptype_s]
235
+
236
+ if p in field_m.keys():
237
+ raise Exception("Duplicate field %s" % p)
238
+ if param.value is not None:
239
+ field_m[p] = (ptype, param.value)
240
+ else:
241
+ field_m[p] = (ptype, pdflt_m[ptype_s])
242
+ self._log.debug("Set param=%s to %s" % (p, str(field_m[p][1])))
243
+ else:
244
+ if p not in field_m.keys():
245
+ raise Exception("Field %s not found" % p)
246
+ if type(param) != dict:
247
+ value = param
248
+ elif "value" in param.keys():
249
+ value = param["value"]
250
+ else:
251
+ raise Exception("No value specified for param %s: %s" % (
252
+ p, str(param)))
253
+ field_m[p] = (field_m[p][0], value)
254
+ self._log.debug("Set param=%s to %s" % (p, str(field_m[p][1])))
255
+
256
+ params_t = pydantic.create_model("Task%sParams" % task.name, **field_m)
257
+
258
+ return params_t
280
259
 
281
260
  @staticmethod
282
261
  def load(path, exp_pkg_name=None):
dv_flow/mgr/param_def.py CHANGED
@@ -14,7 +14,6 @@ class ComplexType(BaseModel):
14
14
  map : Union[MapType, None] = None
15
15
 
16
16
  class ParamDef(BaseModel):
17
- name : str
18
17
  doc : str = None
19
18
  type : Union[str, 'ComplexType'] = None
20
19
  value : Union[Any, None] = None
@@ -7,8 +7,8 @@ from .eval_jq import eval_jq
7
7
  @dc.dataclass
8
8
  class ParamRefEval(object):
9
9
 
10
- parser : ExprParser = ExprParser()
11
- expr_eval : ExprEval = ExprEval()
10
+ parser : ExprParser = dc.field(default_factory=ExprParser)
11
+ expr_eval : ExprEval = dc.field(default_factory=ExprEval)
12
12
 
13
13
  def __post_init__(self):
14
14
  self.expr_eval.methods["jq"] = eval_jq
@@ -0,0 +1,19 @@
1
+ import asyncio
2
+ import logging
3
+ from dv_flow.mgr import TaskDataResult
4
+
5
+ _log = logging.getLogger("Exec")
6
+
7
+ async def Exec(runner, input) -> TaskDataResult:
8
+ _log.debug("TaskExec run: %s: cmd=%s" % (input.name, input.params.command))
9
+
10
+
11
+ proc = await asyncio.create_subprocess_shell(
12
+ input.params.command,
13
+ stdout=asyncio.subprocess.PIPE,
14
+ stderr=asyncio.subprocess.PIPE)
15
+
16
+ await proc.wait()
17
+
18
+ return TaskDataResult()
19
+
@@ -4,75 +4,72 @@ import fnmatch
4
4
  import glob
5
5
  import logging
6
6
  import pydantic.dataclasses as dc
7
+ from pydantic import BaseModel
7
8
  from typing import ClassVar, List, Tuple
8
- from dv_flow.mgr import Task, TaskData, TaskMemento
9
+ from dv_flow.mgr import TaskDataResult
9
10
  from dv_flow.mgr import FileSet as _FileSet
10
11
 
11
- class TaskFileSetMemento(TaskMemento):
12
+ class TaskFileSetMemento(BaseModel):
12
13
  files : List[Tuple[str,float]] = dc.Field(default_factory=list)
13
14
 
14
- class FileSet(Task):
15
+ _log = logging.getLogger("FileSet")
15
16
 
16
- _log : ClassVar = logging.getLogger("FileSet")
17
+ async def FileSet(runner, input) -> TaskDataResult:
18
+ _log.debug("TaskFileSet run: %s: basedir=%s, base=%s type=%s include=%s" % (
19
+ input.name,
20
+ input.srcdir,
21
+ input.params.base, input.params.type, str(input.params.include)
22
+ ))
17
23
 
18
- async def run(self, input : TaskData) -> TaskData:
19
- self._log.debug("TaskFileSet run: %s: basedir=%s, base=%s type=%s include=%s" % (
20
- self.name,
21
- self.srcdir,
22
- self.params.base, self.params.type, str(self.params.include)
23
- ))
24
24
 
25
+ changed = False
26
+ ex_memento = input.memento
27
+ memento = TaskFileSetMemento()
25
28
 
26
- ex_memento = self.getMemento(TaskFileSetMemento)
27
- memento = TaskFileSetMemento()
29
+ _log.debug("ex_memento: %s" % str(ex_memento))
30
+ _log.debug("params: %s" % str(input.params))
28
31
 
29
- self._log.debug("ex_memento: %s" % str(ex_memento))
30
- self._log.debug("params: %s" % str(self.params))
32
+ if input.params is not None:
33
+ glob_root = os.path.join(input.srcdir, input.params.base)
34
+ glob_root = glob_root.strip()
31
35
 
32
- if self.params is not None:
33
- glob_root = os.path.join(self.srcdir, self.params.base)
34
- glob_root = glob_root.strip()
36
+ if glob_root[-1] == '/' or glob_root == '\\':
37
+ glob_root = glob_root[:-1]
35
38
 
36
- if glob_root[-1] == '/' or glob_root == '\\':
37
- glob_root = glob_root[:-1]
39
+ _log.debug("glob_root: %s" % glob_root)
38
40
 
39
- self._log.debug("glob_root: %s" % glob_root)
40
-
41
- fs = _FileSet(
42
- src=self.name,
43
- type=self.params.type,
41
+ fs = _FileSet(
42
+ src=input.name,
43
+ type=input.params.type,
44
44
  basedir=glob_root)
45
45
 
46
- if not isinstance(self.params.include, list):
47
- self.params.include = [self.params.include]
48
-
49
- included_files = []
50
- for pattern in self.params.include:
51
- included_files.extend(glob.glob(os.path.join(glob_root, pattern), recursive=False))
52
-
53
- self._log.debug("included_files: %s" % str(included_files))
54
-
55
- for file in included_files:
56
- if not any(glob.fnmatch.fnmatch(file, os.path.join(glob_root, pattern)) for pattern in self.params.exclude):
57
- memento.files.append((file, os.path.getmtime(os.path.join(glob_root, file))))
58
- fs.files.append(file[len(glob_root)+1:])
59
-
60
- # Check to see if the filelist or fileset have changed
61
- # Only bother doing this if the upstream task data has not changed
62
- if ex_memento is not None and not input.changed:
63
- ex_memento.files.sort(key=lambda x: x[0])
64
- memento.files.sort(key=lambda x: x[0])
65
- self._log.debug("ex_memento.files: %s" % str(ex_memento.files))
66
- self._log.debug("memento.files: %s" % str(memento.files))
67
- input.changed = ex_memento != memento
68
- else:
69
- input.changed = True
70
-
71
- self.setMemento(memento)
72
-
73
- if fs is not None:
74
- input.addFileSet(fs)
75
-
76
- return input
77
-
78
- pass
46
+ if not isinstance(input.params.include, list):
47
+ input.params.include = [input.params.include]
48
+
49
+ included_files = []
50
+ for pattern in input.params.include:
51
+ included_files.extend(glob.glob(os.path.join(glob_root, pattern), recursive=False))
52
+
53
+ _log.debug("included_files: %s" % str(included_files))
54
+
55
+ for file in included_files:
56
+ if not any(glob.fnmatch.fnmatch(file, os.path.join(glob_root, pattern)) for pattern in input.params.exclude):
57
+ memento.files.append((file, os.path.getmtime(os.path.join(glob_root, file))))
58
+ fs.files.append(file[len(glob_root)+1:])
59
+
60
+ # Check to see if the filelist or fileset have changed
61
+ # Only bother doing this if the upstream task data has not changed
62
+ if ex_memento is not None and not input.changed:
63
+ ex_memento.files.sort(key=lambda x: x[0])
64
+ memento.files.sort(key=lambda x: x[0])
65
+ _log.debug("ex_memento.files: %s" % str(ex_memento.files))
66
+ _log.debug("memento.files: %s" % str(memento.files))
67
+ changed = ex_memento != memento
68
+ else:
69
+ changed = True
70
+
71
+ return TaskDataResult(
72
+ memento=memento,
73
+ changed=changed,
74
+ output=[fs]
75
+ )
dv_flow/mgr/std/flow.dv CHANGED
@@ -1,16 +1,19 @@
1
1
 
2
+ # yaml-language-server: $schema=https://dv-flow.github.io/dv-flow.schema.json
3
+
2
4
  package:
3
5
  name: std
4
6
 
5
7
  tasks:
6
8
  - name: Message
7
- pyclass: dv_flow.mgr.std.message.Message
9
+ pytask: dv_flow.mgr.std.message.Message
8
10
  with:
9
11
  msg:
10
12
  type: str
11
13
  value: ""
12
14
  - name: FileSet
13
- pyclass: dv_flow.mgr.std.fileset.FileSet
15
+ pytask: dv_flow.mgr.std.fileset.FileSet
16
+ passthrough: true
14
17
  with:
15
18
  base:
16
19
  type: str
@@ -24,9 +27,12 @@ package:
24
27
  exclude:
25
28
  type: str
26
29
  value: ""
27
- # - name: Exec
28
- # pyclass: dv_flow.mgr.std.exec.Exec
29
- # with: {}
30
+ - name: Exec
31
+ pytask: dv_flow.mgr.std.exec.Exec
32
+ with:
33
+ command:
34
+ type: str
35
+ value: ""
30
36
  types:
31
37
  # - name: TaskDataItem
32
38
  # doc: |
@@ -1,7 +1,6 @@
1
1
 
2
- from dv_flow.mgr import Task, TaskData
2
+ from dv_flow.mgr import Task, TaskDataResult
3
3
 
4
- class Message(Task):
5
- async def run(self, input : TaskData) -> TaskData:
6
- print("%s: %s" % (self.name, self.params.msg))
7
- return input
4
+ async def Message(runner, input) -> TaskDataResult:
5
+ print("%s: %s" % (input.name, input.params.msg))
6
+ return TaskDataResult()
@@ -1,11 +1,9 @@
1
- from ..task import Task
2
- from ..task_data import TaskData
1
+ from pydantic import BaseModel
2
+ from ..task_data import TaskDataResult
3
3
 
4
- class TaskNull(Task):
5
- """The Null task simply propagates its input to its output"""
4
+ class TaskNullParams(BaseModel):
5
+ pass
6
6
 
7
- async def run(self, input : TaskData) -> TaskData:
8
- # No memento ; data pass-through
9
- self._log.debug("%s: TaskNull.run" % self.name)
10
- return input
7
+ async def TaskNull(runner, input) -> TaskDataResult:
8
+ return TaskDataResult()
11
9
 
dv_flow/mgr/task_data.py CHANGED
@@ -42,6 +42,7 @@ class TaskParameterSet(BaseModel):
42
42
  seq : int = -1 # Order in which the param-set must appear
43
43
 
44
44
  class TaskDataInput(BaseModel):
45
+ name : str
45
46
  changed : bool
46
47
  srcdir : str
47
48
  rundir : str
dv_flow/mgr/task_def.py CHANGED
@@ -35,11 +35,12 @@ class TaskDef(BaseModel):
35
35
  name : str
36
36
  # type : Union[str,TaskSpec] = dc.Field(default_factory=list)
37
37
  uses : str = dc.Field(default=None)
38
- pyclass : str = dc.Field(default=None)
38
+ pytask : str = dc.Field(default=None)
39
39
  desc : str = dc.Field(default="")
40
40
  doc : str = dc.Field(default="")
41
- depends : List[Union[str,TaskSpec]] = dc.Field(default_factory=list, alias="needs")
42
- params: List[ParamDef] = dc.Field(default_factory=list, alias="with")
41
+ needs : List[Union[str,TaskSpec]] = dc.Field(default_factory=list, alias="needs")
42
+ params: Dict[str,Union[str,ParamDef]] = dc.Field(default_factory=dict, alias="with")
43
+ passthrough: bool = dc.Field(default=False)
43
44
  # out: List[TaskOutput] = dc.Field(default_factory=list)
44
45
 
45
46
  def copy(self) -> 'TaskDef':
@@ -26,6 +26,7 @@ from .package import Package
26
26
  from .package_def import PackageDef, PackageSpec
27
27
  from .pkg_rgy import PkgRgy
28
28
  from .task import Task
29
+ from .task_node import TaskNodeCtor
29
30
  from typing import Dict, List
30
31
 
31
32
  @dc.dataclass
@@ -92,24 +93,29 @@ class TaskGraphBuilder(object):
92
93
 
93
94
  self._pkg_s.append(pkg)
94
95
 
95
- ctor_t : TaskCtor = pkg.getTaskCtor(task_name)
96
+ ctor_t : TaskNodeCtor = pkg.getTaskCtor(task_name)
96
97
 
97
98
  self._logger.debug("ctor_t: %s" % ctor_t.name)
98
99
 
99
- depends = []
100
+ needs = []
100
101
 
101
- for dep in ctor_t.depends:
102
- if not dep in self._task_m.keys():
103
- task = self._mkTaskGraph(dep, rundir)
104
- self._task_m[dep] = task
105
- pass
106
- depends.append(self._task_m[dep])
102
+ for need_def in ctor_t.getNeeds():
103
+ if not need_def in self._task_m.keys():
104
+ task = self._mkTaskGraph(need_def, rundir)
105
+ self._task_m[need_def] = task
106
+ needs.append(self._task_m[need_def])
107
107
 
108
108
  # The returned task should have all param references resolved
109
- task = ctor_t.mkTask(
109
+ params = ctor_t.mkTaskParams()
110
+
111
+ if params is None:
112
+ raise Exception("ctor %s returned None for params" % str(ctor_t))
113
+
114
+ task = ctor_t.mkTaskNode(
115
+ params=params,
110
116
  name=task_name,
111
- depends=depends,
112
- rundir=rundir)
117
+ needs=needs)
118
+ task.rundir = rundir
113
119
 
114
120
  self._task_m[task.name] = task
115
121
 
@@ -0,0 +1,15 @@
1
+ import dataclasses as dc
2
+
3
+ class TaskListenerLog(object):
4
+
5
+ def event(self, task : 'Task', reason : 'Reason'):
6
+ if reason == 'enter':
7
+ print("> Task %s" % task.name, flush=True)
8
+ elif reason == 'leave':
9
+ for m in task.result.markers:
10
+ print(" %s" % m)
11
+ print("< Task %s" % task.name, flush=True)
12
+ else:
13
+ print("- Task %s" % task.name, flush=True)
14
+ pass
15
+
dv_flow/mgr/task_node.py CHANGED
@@ -1,4 +1,5 @@
1
-
1
+ import os
2
+ import sys
2
3
  import dataclasses as dc
3
4
  import pydantic.dataclasses as pdc
4
5
  import logging
@@ -21,12 +22,18 @@ class TaskNode(object):
21
22
 
22
23
  # Runtime fields -- these get populated during execution
23
24
  changed : bool = False
25
+ passthrough : bool = False
24
26
  needs : List['TaskNode'] = dc.field(default_factory=list)
25
27
  rundir : str = dc.field(default=None)
26
28
  output : TaskDataOutput = dc.field(default=None)
29
+ result : TaskDataResult = dc.field(default=None)
27
30
 
28
31
  _log : ClassVar = logging.getLogger("TaskNode")
29
32
 
33
+ def __post_init__(self):
34
+ if self.needs is None:
35
+ self.needs = []
36
+
30
37
  async def do_run(self,
31
38
  runner,
32
39
  rundir,
@@ -41,7 +48,7 @@ class TaskNode(object):
41
48
  for need in self.needs:
42
49
  in_params.extend(need.output.output)
43
50
 
44
- # TODO: create an evaluator for substituting param values
51
+ # Create an evaluator for substituting param values
45
52
  eval = ParamRefEval()
46
53
 
47
54
  eval.setVar("in", in_params)
@@ -61,6 +68,7 @@ class TaskNode(object):
61
68
  raise Exception("Unhandled param type: %s" % str(value))
62
69
 
63
70
  input = TaskDataInput(
71
+ name=self.name,
64
72
  changed=changed,
65
73
  srcdir=self.srcdir,
66
74
  rundir=rundir,
@@ -68,7 +76,7 @@ class TaskNode(object):
68
76
  memento=memento)
69
77
 
70
78
  # TODO: notify of task start
71
- ret : TaskDataResult = await self.task(self, input)
79
+ self.result : TaskDataResult = await self.task(self, input)
72
80
  # TODO: notify of task complete
73
81
 
74
82
  # TODO: form a dep map from the outgoing param sets
@@ -76,13 +84,13 @@ class TaskNode(object):
76
84
 
77
85
  # Store the result
78
86
  self.output = TaskDataOutput(
79
- changed=ret.changed,
87
+ changed=self.result.changed,
80
88
  dep_m=dep_m,
81
- output=ret.output.copy())
89
+ output=self.result.output.copy())
82
90
 
83
91
  # TODO:
84
92
 
85
- return ret
93
+ return self.result
86
94
 
87
95
  def __hash__(self):
88
96
  return id(self)
@@ -96,38 +104,119 @@ class TaskNodeCtor(object):
96
104
  - Produces a TaskNode
97
105
  """
98
106
  name : str
107
+ srcdir : str
108
+ paramT : Any
109
+ passthrough : bool
99
110
 
100
- def mkTaskNode(self, srcdir, params, name=None) -> TaskNode:
111
+ def getNeeds(self) -> List[str]:
112
+ return []
113
+
114
+ def mkTaskNode(self,
115
+ params,
116
+ srcdir=None,
117
+ name=None,
118
+ needs=None) -> TaskNode:
101
119
  raise NotImplementedError("mkTaskNode in type %s" % str(type(self)))
102
120
 
103
- def mkTaskParams(self, params : Dict) -> Any:
104
- raise NotImplementedError("mkTaskParams in type %s" % str(type(self)))
121
+ def mkTaskParams(self, params : Dict = None) -> Any:
122
+ obj = self.paramT()
123
+
124
+ # Apply user-specified params
125
+ if params is not None:
126
+ for key,value in params.items():
127
+ if not hasattr(obj, key):
128
+ raise Exception("Parameters class %s does not contain field %s" % (
129
+ str(type(obj)),
130
+ key))
131
+ else:
132
+ if isinstance(value, Param):
133
+ if value.append is not None:
134
+ ex_value = getattr(obj, key, [])
135
+ ex_value.extend(value.append)
136
+ setattr(obj, key, ex_value)
137
+ elif value.prepend is not None:
138
+ ex_value = getattr(obj, key, [])
139
+ value = value.copy()
140
+ value.extend(ex_value)
141
+ setattr(obj, key, value)
142
+ pass
143
+ else:
144
+ raise Exception("Unhandled value spec: %s" % str(value))
145
+ else:
146
+ setattr(obj, key, value)
147
+ return obj
148
+
149
+ @dc.dataclass
150
+ class TaskNodeCtorDefBase(TaskNodeCtor):
151
+ """Task defines its own needs, that will need to be filled in"""
152
+ needs : List['str']
153
+
154
+ def __post_init__(self):
155
+ if self.needs is None:
156
+ self.needs = []
157
+
158
+ def getNeeds(self) -> List[str]:
159
+ return self.needs
160
+
161
+ @dc.dataclass
162
+ class TaskNodeCtorProxy(TaskNodeCtorDefBase):
163
+ """Task has a 'uses' clause, so we delegate creation of the node"""
164
+ uses : TaskNodeCtor
165
+
166
+ def mkTaskNode(self, params, srcdir=None, name=None, needs=None) -> TaskNode:
167
+ if srcdir is None:
168
+ srcdir = self.srcdir
169
+ node = self.uses.mkTaskNode(params=params, srcdir=srcdir, name=name, needs=needs)
170
+ node.passthrough = self.passthrough
171
+ return node
172
+
173
+ @dc.dataclass
174
+ class TaskNodeCtorTask(TaskNodeCtorDefBase):
175
+ task : Callable[['TaskRunner','TaskDataInput'],'TaskDataResult']
176
+
177
+ def mkTaskNode(self, params, srcdir=None, name=None, needs=None) -> TaskNode:
178
+ if srcdir is None:
179
+ srcdir = self.srcdir
180
+
181
+ node = TaskNode(name, srcdir, params, self.task, needs=needs)
182
+ node.passthrough = self.passthrough
183
+ node.task = self.task
184
+
185
+ return node
105
186
 
106
187
  @dc.dataclass
107
188
  class TaskNodeCtorWrapper(TaskNodeCtor):
108
189
  T : Any
109
- paramT : Any
110
190
 
111
191
  def __call__(self,
112
- srcdir,
113
- name=None,
114
- params=None,
192
+ name=None,
193
+ srcdir=None,
194
+ params=None,
115
195
  needs=None,
196
+ passthrough=None,
116
197
  **kwargs):
117
198
  """Convenience method for direct creation of tasks"""
118
199
  if params is None:
119
200
  params = self.mkTaskParams(kwargs)
120
201
 
121
- node = self.mkTaskNode(srcdir, params, name)
122
- if needs is not None:
123
- node.needs.extend(needs)
202
+ node = self.mkTaskNode(
203
+ srcdir=srcdir,
204
+ params=params,
205
+ name=name,
206
+ needs=needs)
207
+ if passthrough is not None:
208
+ node.passthrough = passthrough
209
+ else:
210
+ node.passthrough = self.passthrough
211
+
124
212
  return node
125
213
 
126
- def mkTaskNode(self, srcdir, params, name=None) -> TaskNode:
127
- node = TaskNode(name, srcdir, params, self.T)
214
+ def mkTaskNode(self, params, srcdir=None, name=None, needs=None) -> TaskNode:
215
+ node = TaskNode(name, srcdir, params, self.T, needs=needs)
216
+ node.passthrough = self.passthrough
128
217
  return node
129
218
 
130
- def mkTaskParams(self, params : Dict) -> Any:
219
+ def mkTaskParams(self, params : Dict = None) -> Any:
131
220
  obj = self.paramT()
132
221
 
133
222
  # Apply user-specified params
@@ -154,8 +243,18 @@ class TaskNodeCtorWrapper(TaskNodeCtor):
154
243
  setattr(obj, key, value)
155
244
  return obj
156
245
 
157
- def task(paramT):
246
+ def task(paramT,passthrough=False):
247
+ """Decorator to wrap a task method as a TaskNodeCtor"""
158
248
  def wrapper(T):
159
- ctor = TaskNodeCtorWrapper(T.__name__, T, paramT)
249
+ task_mname = T.__module__
250
+ task_module = sys.modules[task_mname]
251
+ ctor = TaskNodeCtorWrapper(
252
+ name=T.__name__,
253
+ srcdir=os.path.dirname(os.path.abspath(task_module.__file__)),
254
+ paramT=paramT,
255
+ passthrough=passthrough,
256
+ T=T)
160
257
  return ctor
161
258
  return wrapper
259
+
260
+
@@ -1,7 +1,11 @@
1
1
  import asyncio
2
+ import json
3
+ import os
4
+ import re
2
5
  import dataclasses as dc
6
+ import logging
3
7
  from toposort import toposort
4
- from typing import Any, Callable, List, Tuple, Union
8
+ from typing import Any, Callable, ClassVar, List, Tuple, Union
5
9
  from .task_data import TaskDataInput, TaskDataOutput, TaskDataResult
6
10
  from .task_node import TaskNode
7
11
 
@@ -12,6 +16,15 @@ class TaskRunner(object):
12
16
  # List of [Listener:Callable[Task],Recurisve:bool]
13
17
  listeners : List[Tuple[Callable['Task','Reason'], bool]] = dc.field(default_factory=list)
14
18
 
19
+ _log : ClassVar = logging.getLogger("TaskRunner")
20
+
21
+ def add_listener(self, l, recursive=False):
22
+ self.listeners.append((l, recursive))
23
+
24
+ def _notify(self, task : 'Task', reason : 'Reason'):
25
+ for listener in self.listeners:
26
+ listener[0](task, reason)
27
+
15
28
  async def do_run(self,
16
29
  task : 'Task',
17
30
  memento : Any = None) -> 'TaskDataResult':
@@ -26,17 +39,52 @@ class TaskRunner(object):
26
39
  class TaskSetRunner(TaskRunner):
27
40
  nproc : int = 8
28
41
 
42
+ _anon_tid : int = 1
43
+
44
+ _log : ClassVar = logging.getLogger("TaskSetRunner")
45
+
29
46
  async def run(self, task : Union[TaskNode,List[TaskNode]]):
47
+ # Ensure that the rundir exists or can be created
48
+
49
+ if not os.path.isdir(self.rundir):
50
+ os.makedirs(self.rundir)
51
+
52
+ if not os.path.isdir(os.path.join(self.rundir, "cache")):
53
+ os.makedirs(os.path.join(self.rundir, "cache"))
54
+
55
+ src_memento = None
56
+ dst_memento = {}
57
+ if os.path.isfile(os.path.join(self.rundir, "cache", "mementos.json")):
58
+ try:
59
+ with open(os.path.join(self.rundir, "cache", "mementos.json"), "r") as f:
60
+ src_memento = json.load(f)
61
+ except Exception as e:
62
+ src_memento = {}
63
+ else:
64
+ src_memento = {}
65
+
66
+
30
67
  # First, build a depedency map
31
68
  tasks = task if isinstance(task, list) else [task]
32
69
  dep_m = {}
70
+ self._anon_tid = 1
33
71
  for t in tasks:
34
72
  self._buildDepMap(dep_m, t)
35
73
 
36
- print("dep_m: %s" % str(dep_m))
74
+ if self._log.isEnabledFor(logging.DEBUG):
75
+ self._log.debug("Deps:")
76
+ for t,value in dep_m.items():
77
+ self._log.debug(" Task: %s", str(t.name))
78
+ for v in value:
79
+ self._log.debug(" - %s", str(v.name))
37
80
 
38
81
  order = list(toposort(dep_m))
39
82
 
83
+ if self._log.isEnabledFor(logging.DEBUG):
84
+ self._log.debug("Order:")
85
+ for active_s in order:
86
+ self._log.debug("- {%s}", ",".join(t.name for t in active_s))
87
+
40
88
  active_task_l = []
41
89
  done_task_s = set()
42
90
  for active_s in order:
@@ -49,25 +97,56 @@ class TaskSetRunner(TaskRunner):
49
97
  for i in range(len(active_task_l)):
50
98
  if active_task_l[i][1] == d:
51
99
  tt = active_task_l[i][0]
100
+ if tt.result.memento is not None:
101
+ dst_memento[tt.name] = tt.result.memento.model_dump()
102
+ else:
103
+ dst_memento[tt.name] = None
104
+ self._notify(tt, "leave")
52
105
  done_task_s.add(tt)
53
106
  active_task_l.pop(i)
54
107
  break
55
108
  if t not in done_task_s:
109
+ memento = src_memento.get(t.name, None)
110
+ dirname = t.name
111
+ invalid_chars_pattern = r'[\/:*?"<>|#%&{}\$\\!\'`;=@+]'
112
+ # Replace invalid characters with the replacement string.
113
+ dirname = re.sub(invalid_chars_pattern, '_', dirname)
114
+
115
+ rundir = os.path.join(self.rundir, dirname)
116
+
117
+ if not os.path.isdir(rundir):
118
+ os.makedirs(rundir, exist_ok=True)
119
+
120
+ self._notify(t, "enter")
56
121
  coro = asyncio.Task(t.do_run(
57
122
  self,
58
- self.rundir, # TODO
59
- None)) # TODO: memento
123
+ rundir,
124
+ memento))
60
125
  active_task_l.append((t, coro))
61
126
 
62
127
  # Now, wait for tasks to complete
63
128
  if len(active_task_l):
129
+ # TODO: Shouldn't gather here -- reach to each completion
64
130
  coros = list(at[1] for at in active_task_l)
65
131
  res = await asyncio.gather(*coros)
132
+ for tt in active_task_l:
133
+ if tt[0].result.memento is not None:
134
+ dst_memento[tt[0].name] = tt[0].result.memento.model_dump()
135
+ else:
136
+ dst_memento[tt[0].name] = None
137
+ self._notify(tt[0], "leave")
138
+ active_task_l.clear()
66
139
 
140
+ with open(os.path.join(self.rundir, "cache", "mementos.json"), "w") as f:
141
+ json.dump(dst_memento, f)
67
142
 
68
143
  pass
69
144
 
70
145
  def _buildDepMap(self, dep_m, task : TaskNode):
146
+ if task.name is None:
147
+ task.name = "anon_%d" % self._anon_tid
148
+ self._anon_tid += 1
149
+
71
150
  if task not in dep_m.keys():
72
151
  dep_m[task] = set(task.needs)
73
152
  for need in task.needs:
@@ -86,8 +165,8 @@ class SingleTaskRunner(TaskRunner):
86
165
  # TODO: create an evaluator for substituting param values
87
166
  eval = None
88
167
 
89
- for field in dc.fields(task.params):
90
- print("Field: %s" % field.name)
168
+ # for field in dc.fields(task.params):
169
+ # print("Field: %s" % field.name)
91
170
 
92
171
  input = TaskDataInput(
93
172
  changed=changed,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: dv-flow-mgr
3
- Version: 0.0.1.13577785562a1
3
+ Version: 0.0.1.13657597614a1
4
4
  Summary: DV Flow Manager is a build system for silicon design
5
5
  Author-email: Matthew Ballance <matt.ballance@gmail.com>
6
6
  License: Apache License
@@ -1,4 +1,4 @@
1
- dv_flow/mgr/__init__.py,sha256=IZA7I1u7RH34DrJXSkETxWFpK5Jn_A2zXfnCAzJ8XxA,289
1
+ dv_flow/mgr/__init__.py,sha256=_IH_cJws79HRCdn0-Q11WHFuMQYA6oPCZIufeXT-BOc,317
2
2
  dv_flow/mgr/__main__.py,sha256=uik6gDAvtZNG0yyDKYc4FTl6R1QHAR543jNG2lCAa6E,1771
3
3
  dv_flow/mgr/eval_jq.py,sha256=Ue-qkyNW6uOu2Wy8u6nrTbPPY7ujaHd0-4iki0eV3Ec,294
4
4
  dv_flow/mgr/expr_eval.py,sha256=Mp0TvrV6gZWcj1uYwLfPNI-ARwwdAfwntE5byLBfBuY,2276
@@ -7,39 +7,41 @@ dv_flow/mgr/fileset.py,sha256=FNvC5sU2ArxJ0OO3v8dXTv8zX-bZ5t0a0ljne0fQQ1o,1150
7
7
  dv_flow/mgr/fragment_def.py,sha256=cyzp1XeWtNOaagScmeS-BPsoXj9j2LTBbKq5ZUioz8I,1641
8
8
  dv_flow/mgr/out,sha256=d8GGBi3J43fhdLBlnsUbzBfRe0TD0QTP3nOTz54l2bI,200
9
9
  dv_flow/mgr/package.py,sha256=878twhPD-E1pFlDNUtuyeFEgJ_Y89b560og4St-Iwrs,1679
10
- dv_flow/mgr/package_def.py,sha256=dwvs-7kGEp7wFrBK2Fybysb-vSKwtWOtxTrg1VIaL14,14975
10
+ dv_flow/mgr/package_def.py,sha256=EdKwyxqvWT8xQmALGa6BbNv0PMbDLZTr3A27L9coIzc,13761
11
11
  dv_flow/mgr/package_import_spec.py,sha256=ah3r15v5Jdub2poc3sgi6Uar1L3oGoYsCPPNiOHV-a4,1760
12
12
  dv_flow/mgr/param.py,sha256=3BY-ucig6JRw73FhjyJQL-vpd57qhAzixgZ8I5FoUpw,553
13
- dv_flow/mgr/param_def.py,sha256=e2WvRCMArbgcsKC4TKQqZTscZmCdo3WvVR6w3LN0VK8,727
14
- dv_flow/mgr/param_ref_eval.py,sha256=dpV6IPdTtZHLy8maTsN61Ce1Cc5M5dLQW5sauNujEMU,1090
13
+ dv_flow/mgr/param_def.py,sha256=gLua-EQiY8V2CFX-2svLRIlrs8PEeGh4-EPtn4a2Mng,712
14
+ dv_flow/mgr/param_ref_eval.py,sha256=U8QhDf1n_9bLnExdc1oQamq5-pOUXrFalOX4oyR9UoM,1138
15
15
  dv_flow/mgr/parsetab.py,sha256=enSOnMQ-woIsMEzHyeYiefvhAl8enxfX9Ct_o8-jkqs,3780
16
16
  dv_flow/mgr/pkg_rgy.py,sha256=2R_EaeBDJn5qUq9DzSnLc37wUP36MWSv-p0LgUjJRAg,4471
17
17
  dv_flow/mgr/task.py,sha256=kLQSvnVwj9ROIDtxq8lLu-4mJizTxOqvUeogmgN6QAA,5976
18
18
  dv_flow/mgr/task_ctor.py,sha256=hlfl-UVvyjzLFN6D0Oel9eBs0xUQPqCX7gQ0uEHoL7o,1382
19
- dv_flow/mgr/task_data.py,sha256=9c5NrVZWECHFXrCPOpwZBQXRhr1O3ni5aFeP2I1P5Rw,11398
20
- dv_flow/mgr/task_def.py,sha256=WAW1fPXUfUiQcfmgCx0iCMddMBKATDa5RsRztJWRbUk,1819
19
+ dv_flow/mgr/task_data.py,sha256=gzs7BfwTLPKUEpaiGMwvA3MNfebTKHv_wDFppvWHo8A,11413
20
+ dv_flow/mgr/task_def.py,sha256=PORXrUBoynoj_oYAVISR5NW53OZevZ6hL4T7TutkkHo,1879
21
21
  dv_flow/mgr/task_exec_data.py,sha256=aT__kmVmNxqnS_GbTRS1vrYgKiv8cH-HUSmRb6YYKsI,640
22
- dv_flow/mgr/task_graph_builder.py,sha256=-lRSjWU2UJf3euVIFtoVIU7Qdox7MI1sKERWg1k7U_g,7058
22
+ dv_flow/mgr/task_graph_builder.py,sha256=sswBPUZg71dFT8kaB0towbWIadhNdbTy5gLgvC2uiVA,7276
23
23
  dv_flow/mgr/task_graph_runner.py,sha256=jUGI49QvxUCfQoKQDDk2psbeapIcCg72qNOW1JipHzM,2182
24
24
  dv_flow/mgr/task_graph_runner_local.py,sha256=OrydPwtQ8E7hYWvSXx0h7lI3nfUNFyklULhsyMwz9dA,4687
25
25
  dv_flow/mgr/task_impl_data.py,sha256=bFPijoKrh9x7fZN2DsvRJp0UHo-gGM0VjtDQISyfhFk,321
26
+ dv_flow/mgr/task_listener_log.py,sha256=B9-LEgSF2QwcUREBoUjEsS0rRYg1_ilFHBUKlNeT5YA,444
26
27
  dv_flow/mgr/task_memento.py,sha256=C7VTQpBhDEoYuDmE6YTM-6TLMLnqHp6Y0Vat1aTgtCs,1096
27
- dv_flow/mgr/task_node.py,sha256=uCsyNT9IQu0cok27qTEi4iTB6FqobXbx7hbVAmjELKc,5130
28
+ dv_flow/mgr/task_node.py,sha256=iQKiDTC8Oz8tQwLzJF4NpsREg8JPXI-rju5tG7P6dfw,8448
28
29
  dv_flow/mgr/task_output.py,sha256=l-W-FvVo6YDah1RQS-I9N0KUtB3vp-kl7lxIdmNz0l4,178
29
30
  dv_flow/mgr/task_params_ctor.py,sha256=aXgB8o9xFPjaEjGW_xYkEC0N0apzGzGUPDj7g2ZLvus,1112
30
- dv_flow/mgr/task_runner.py,sha256=JmerE6CyXiJJK7-n6P65OzwW9UTYFQqV8Cck7eHe0B8,3882
31
+ dv_flow/mgr/task_runner.py,sha256=Xv5bPwAKV793R9W5Ksu1u_WBoRUzlpfHpGtGLXdsIlY,6940
31
32
  dv_flow/mgr/type.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
33
  dv_flow/mgr/type_def.py,sha256=KdhuNlfw-NKU-4VZFCnMPyj775yEB7cpr5tz73a9yuQ,259
33
34
  dv_flow/mgr/util.py,sha256=06eVyURF4ga-s8C9Sd3ZSDebwO4QS0XXaB8xADVbWRc,1437
34
35
  dv_flow/mgr/cmds/cmd_run.py,sha256=eths8kT7mBmpZqwOuMtpKAaux4rg-f7hPBxxTHbpKT4,2903
35
36
  dv_flow/mgr/share/flow.json,sha256=lNmZex9NXkYbyb2aZseQfUOkV9CMyfH0iLODEI7EPBw,5096
36
- dv_flow/mgr/std/fileset.py,sha256=uP7bGntRq-Tn5_GEFnt0_J_OAmfvep3GlCwCuE8by4o,2710
37
- dv_flow/mgr/std/flow.dv,sha256=j9wLrF3Ghh1ZLbJxmk7WiNiRYUYEer-8CCUA5hsgtfk,1409
38
- dv_flow/mgr/std/message.py,sha256=BPTHnEMD4tBufQ9LvsS9Sa_0xjaJATbBpwqosWslvVA,193
39
- dv_flow/mgr/std/task_null.py,sha256=KObmjG_4D08GJ1k6neqKIQrFY72Sj0jLnwXxEkq5HA0,321
40
- dv_flow_mgr-0.0.1.13577785562a1.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
41
- dv_flow_mgr-0.0.1.13577785562a1.dist-info/METADATA,sha256=UeJMpl2RnCOQX98UgtpVXI3qjSPhe4Zs_Gcg4-Cv4hE,13276
42
- dv_flow_mgr-0.0.1.13577785562a1.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
43
- dv_flow_mgr-0.0.1.13577785562a1.dist-info/entry_points.txt,sha256=1roy8wAFM48LabOvr6jiOw0MUs-qE8X3Vf8YykPazxk,50
44
- dv_flow_mgr-0.0.1.13577785562a1.dist-info/top_level.txt,sha256=amfVTkggzYPtWwLqNmRukfz1Buu0pGS2SrYBBLhXm04,8
45
- dv_flow_mgr-0.0.1.13577785562a1.dist-info/RECORD,,
37
+ dv_flow/mgr/std/exec.py,sha256=ETx9xSxhdCD_iw6pcmhrafDCJ-41AneyEAPwQf3q-3w,452
38
+ dv_flow/mgr/std/fileset.py,sha256=r2s2H45FuBhTLsjvjqn26Zb6EsR-psvV00ObMIyEGNA,2486
39
+ dv_flow/mgr/std/flow.dv,sha256=jlFOh3xVECOzHws7x6YvJ9eCIGHM5gsPeEnheiGOukY,1553
40
+ dv_flow/mgr/std/message.py,sha256=CWrBKImbXKe2d7hJ223U3Ifuxo54zLpFPJviE8BUJvk,188
41
+ dv_flow/mgr/std/task_null.py,sha256=UKwUnqwFPBY8BO44ZAPcgehQB59kHZFa1qyZc1TwUqE,196
42
+ dv_flow_mgr-0.0.1.13657597614a1.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
43
+ dv_flow_mgr-0.0.1.13657597614a1.dist-info/METADATA,sha256=HVQUNoijJOwkTY6DfEDvnWzRDQ3JVzQFeR-9brAOQus,13276
44
+ dv_flow_mgr-0.0.1.13657597614a1.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
45
+ dv_flow_mgr-0.0.1.13657597614a1.dist-info/entry_points.txt,sha256=1roy8wAFM48LabOvr6jiOw0MUs-qE8X3Vf8YykPazxk,50
46
+ dv_flow_mgr-0.0.1.13657597614a1.dist-info/top_level.txt,sha256=amfVTkggzYPtWwLqNmRukfz1Buu0pGS2SrYBBLhXm04,8
47
+ dv_flow_mgr-0.0.1.13657597614a1.dist-info/RECORD,,