dv-flow-mgr 0.0.1.12968982426a1__py3-none-any.whl → 0.0.1.12971126211a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dv_flow/mgr/__main__.py +13 -0
- dv_flow/mgr/cmds/cmd_run.py +4 -1
- dv_flow/mgr/package.py +4 -1
- dv_flow/mgr/package_def.py +148 -106
- dv_flow/mgr/std/fileset.py +7 -4
- dv_flow/mgr/std/task_null.py +1 -0
- dv_flow/mgr/task.py +148 -19
- dv_flow/mgr/task_data.py +11 -8
- dv_flow/mgr/task_graph_builder.py +36 -60
- dv_flow/mgr/task_graph_runner_local.py +5 -4
- {dv_flow_mgr-0.0.1.12968982426a1.dist-info → dv_flow_mgr-0.0.1.12971126211a1.dist-info}/METADATA +1 -1
- dv_flow_mgr-0.0.1.12971126211a1.dist-info/RECORD +28 -0
- dv_flow_mgr-0.0.1.12968982426a1.dist-info/RECORD +0 -28
- {dv_flow_mgr-0.0.1.12968982426a1.dist-info → dv_flow_mgr-0.0.1.12971126211a1.dist-info}/LICENSE +0 -0
- {dv_flow_mgr-0.0.1.12968982426a1.dist-info → dv_flow_mgr-0.0.1.12971126211a1.dist-info}/WHEEL +0 -0
- {dv_flow_mgr-0.0.1.12968982426a1.dist-info → dv_flow_mgr-0.0.1.12971126211a1.dist-info}/entry_points.txt +0 -0
- {dv_flow_mgr-0.0.1.12968982426a1.dist-info → dv_flow_mgr-0.0.1.12971126211a1.dist-info}/top_level.txt +0 -0
dv_flow/mgr/__main__.py
CHANGED
@@ -20,10 +20,17 @@
|
|
20
20
|
#*
|
21
21
|
#****************************************************************************
|
22
22
|
import argparse
|
23
|
+
import logging
|
23
24
|
from .cmds.cmd_run import CmdRun
|
24
25
|
|
25
26
|
def get_parser():
|
26
27
|
parser = argparse.ArgumentParser(description='dv_flow_mgr')
|
28
|
+
parser.add_argument("-d", "--debug",
|
29
|
+
help="Enable debug",
|
30
|
+
action="store_true")
|
31
|
+
parser.add_argument("-v", "--verbose",
|
32
|
+
help="Enable verbose output",
|
33
|
+
action="store_true")
|
27
34
|
subparsers = parser.add_subparsers(required=True)
|
28
35
|
|
29
36
|
run_parser = subparsers.add_parser('run', help='run a flow')
|
@@ -35,6 +42,12 @@ def get_parser():
|
|
35
42
|
def main():
|
36
43
|
parser = get_parser()
|
37
44
|
args = parser.parse_args()
|
45
|
+
|
46
|
+
if args.debug:
|
47
|
+
logging.basicConfig(level=logging.DEBUG)
|
48
|
+
elif args.verbose:
|
49
|
+
logging.basicConfig(level=logging.INFO)
|
50
|
+
|
38
51
|
args.func(args)
|
39
52
|
|
40
53
|
if __name__ == "__main__":
|
dv_flow/mgr/cmds/cmd_run.py
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
import asyncio
|
2
2
|
import os
|
3
|
+
import logging
|
4
|
+
from typing import ClassVar
|
3
5
|
from ..task_graph_runner import TaskGraphRunner
|
4
6
|
from ..util import loadProjPkgDef
|
5
7
|
from ..task_graph_builder import TaskGraphBuilder
|
@@ -8,6 +10,7 @@ from ..pkg_rgy import PkgRgy
|
|
8
10
|
|
9
11
|
|
10
12
|
class CmdRun(object):
|
13
|
+
_log : ClassVar = logging.getLogger("CmdRun")
|
11
14
|
|
12
15
|
def __call__(self, args):
|
13
16
|
|
@@ -17,7 +20,7 @@ class CmdRun(object):
|
|
17
20
|
if pkg is None:
|
18
21
|
raise Exception("Failed to find a 'flow.dv' file that defines a package in %s or its parent directories" % os.getcwd())
|
19
22
|
|
20
|
-
|
23
|
+
self._log.debug("Root flow file defines package: %s" % pkg.name)
|
21
24
|
|
22
25
|
if len(args.tasks) > 0:
|
23
26
|
pass
|
dv_flow/mgr/package.py
CHANGED
@@ -20,7 +20,8 @@
|
|
20
20
|
#*
|
21
21
|
#****************************************************************************
|
22
22
|
import dataclasses as dc
|
23
|
-
|
23
|
+
import logging
|
24
|
+
from typing import Any, ClassVar, Dict
|
24
25
|
from .task import TaskCtor
|
25
26
|
|
26
27
|
@dc.dataclass
|
@@ -30,8 +31,10 @@ class Package(object):
|
|
30
31
|
# Package holds constructors for tasks
|
31
32
|
# - Dict holds the default parameters for the task
|
32
33
|
tasks : Dict[str,TaskCtor] = dc.field(default_factory=dict)
|
34
|
+
_log : ClassVar = logging.getLogger("Package")
|
33
35
|
|
34
36
|
def getTaskCtor(self, name : str) -> TaskCtor:
|
37
|
+
self._log.debug("-- %s::getTaskCtor: %s" % (self.name, name))
|
35
38
|
if name not in self.tasks.keys():
|
36
39
|
raise Exception("Task %s not present in package %s" % (name, self.name))
|
37
40
|
return self.tasks[name]
|
dv_flow/mgr/package_def.py
CHANGED
@@ -32,7 +32,7 @@ from typing import Any, Dict, List, Callable, Tuple, ClassVar
|
|
32
32
|
from .fragment_def import FragmentDef
|
33
33
|
from .package import Package
|
34
34
|
from .package_import_spec import PackageImportSpec, PackageSpec
|
35
|
-
from .task import TaskCtor,
|
35
|
+
from .task import TaskCtor, TaskCtorProxy, TaskCtorCls, TaskCtorParam, TaskCtorParamCls
|
36
36
|
from .task_def import TaskDef, TaskSpec
|
37
37
|
from .std.task_null import TaskNull
|
38
38
|
|
@@ -90,53 +90,131 @@ class PackageDef(BaseModel):
|
|
90
90
|
self._log.debug("<-- mkPackage %s" % self.name)
|
91
91
|
return ret
|
92
92
|
|
93
|
-
def
|
94
|
-
|
93
|
+
def getTaskCtor(self, session, task_name, tasks_m):
|
94
|
+
self._log.debug("--> getTaskCtor")
|
95
|
+
# Find package (not package_def) that implements this task
|
96
|
+
# Insert an indirect reference to that tasks's constructor
|
97
|
+
last_dot = task_name.rfind('.')
|
98
|
+
|
99
|
+
if last_dot != -1:
|
100
|
+
pkg_name = task_name[:last_dot]
|
101
|
+
task_name = task_name[last_dot+1:]
|
102
|
+
else:
|
103
|
+
pkg_name = None
|
104
|
+
|
105
|
+
if pkg_name is not None:
|
106
|
+
self._log.debug("Package-qualified 'uses'")
|
107
|
+
pkg = session.getPackage(PackageSpec(pkg_name))
|
108
|
+
if pkg is None:
|
109
|
+
raise Exception("Failed to find package %s" % pkg_name)
|
110
|
+
ctor_t = pkg.getTaskCtor(task_name)
|
111
|
+
else:
|
112
|
+
self._log.debug("Unqualified 'uses'")
|
113
|
+
if task_name not in tasks_m.keys():
|
114
|
+
raise Exception("Failed to find task %s" % task_name)
|
115
|
+
if len(tasks_m[task_name]) != 3:
|
116
|
+
raise Exception("Task %s not fully defined" % task_name)
|
95
117
|
|
96
|
-
|
97
|
-
|
98
|
-
# Insert an indirect reference to that tasks's constructor
|
99
|
-
last_dot = task.uses.rfind('.')
|
118
|
+
ctor_t = tasks_m[task_name][2]
|
119
|
+
return ctor_t
|
100
120
|
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
121
|
+
def handleParams(self, task, ctor_t):
|
122
|
+
|
123
|
+
if task.params is not None and len(task.params) > 0:
|
124
|
+
decl_params = False
|
125
|
+
|
126
|
+
# First, add in a parameter-setting stage
|
127
|
+
ctor_t = TaskCtorParam(
|
128
|
+
name=ctor_t.name,
|
129
|
+
uses=ctor_t,
|
130
|
+
srcdir=ctor_t.srcdir)
|
131
|
+
# ctor_t.params.update(task.params)
|
132
|
+
|
133
|
+
for value in task.params.values():
|
134
|
+
if "type" in value:
|
135
|
+
decl_params = True
|
136
|
+
break
|
137
|
+
|
138
|
+
if decl_params:
|
139
|
+
# We need to combine base parameters with new parameters
|
140
|
+
field_m = {}
|
141
|
+
# First, add parameters from the base class
|
142
|
+
base_o = ctor_t.mkParams()
|
143
|
+
for fname,info in base_o.model_fields.items():
|
144
|
+
self._log.debug("Field: %s (%s)" % (fname, info.default))
|
145
|
+
field_m[fname] = (info.annotation, info.default)
|
146
|
+
ptype_m = {
|
147
|
+
"str" : str,
|
148
|
+
"int" : int,
|
149
|
+
"float" : float,
|
150
|
+
"bool" : bool,
|
151
|
+
"list" : List
|
152
|
+
}
|
153
|
+
pdflt_m = {
|
154
|
+
"str" : "",
|
155
|
+
"int" : 0,
|
156
|
+
"float" : 0.0,
|
157
|
+
"bool" : False,
|
158
|
+
"list" : []
|
159
|
+
}
|
160
|
+
for p in task.params.keys():
|
161
|
+
param = task.params[p]
|
162
|
+
if type(param) == dict and "type" in param.keys():
|
163
|
+
ptype_s = param["type"]
|
164
|
+
if ptype_s not in ptype_m.keys():
|
165
|
+
raise Exception("Unknown type %s" % ptype_s)
|
166
|
+
ptype = ptype_m[ptype_s]
|
167
|
+
|
168
|
+
if p in field_m.keys():
|
169
|
+
raise Exception("Duplicate field %s" % p)
|
170
|
+
if "value" in param.keys():
|
171
|
+
field_m[p] = (ptype, param["value"])
|
172
|
+
else:
|
173
|
+
field_m[p] = (ptype, pdflt_m[ptype_s])
|
174
|
+
else:
|
175
|
+
if p not in field_m.keys():
|
176
|
+
raise Exception("Field %s not found" % p)
|
177
|
+
if type(param) != dict:
|
178
|
+
value = param
|
179
|
+
elif "value" in param.keys():
|
180
|
+
value = param["value"]
|
181
|
+
else:
|
182
|
+
raise Exception("No value specified for param %s: %s" % (
|
183
|
+
p, str(param)))
|
184
|
+
field_m[p] = (field_m[p][0], value)
|
185
|
+
self._log.debug("field_m: %s" % str(field_m))
|
186
|
+
param_t = pydantic.create_model(
|
187
|
+
"Task%sParams" % task.name, **field_m)
|
188
|
+
ctor_t = TaskCtorParamCls(
|
189
|
+
name=ctor_t.name,
|
190
|
+
uses=ctor_t,
|
191
|
+
params_ctor=param_t)
|
192
|
+
else: # no new parameters declared
|
193
|
+
for p in task.params.keys():
|
194
|
+
param = task.params[p]
|
195
|
+
if p not in field_m.keys():
|
196
|
+
raise Exception("Field %s not found" % p)
|
197
|
+
if type(param) != dict:
|
198
|
+
value = param
|
199
|
+
elif "value" in param.keys():
|
200
|
+
value = param["value"]
|
201
|
+
else:
|
202
|
+
raise Exception("No value specified for param %s: %s" % (
|
203
|
+
p, str(param)))
|
204
|
+
field_m[p] = (field_m[p][0], value)
|
205
|
+
ctor_t.params[p] = value
|
136
206
|
|
207
|
+
return ctor_t
|
208
|
+
|
209
|
+
def mkTaskCtor(self, session, task, srcdir, tasks_m) -> TaskCtor:
|
210
|
+
self._log.debug("--> %s::mkTaskCtor %s" % (self.name, task.name))
|
211
|
+
ctor_t : TaskCtor = None
|
212
|
+
|
213
|
+
# Determine the implementation constructor first
|
137
214
|
if task.pyclass is not None:
|
138
215
|
# Built-in impl
|
139
216
|
# Now, lookup the class
|
217
|
+
self._log.debug("Use PyClass implementation")
|
140
218
|
last_dot = task.pyclass.rfind('.')
|
141
219
|
clsname = task.pyclass[last_dot+1:]
|
142
220
|
modname = task.pyclass[:last_dot]
|
@@ -154,72 +232,36 @@ class PackageDef(BaseModel):
|
|
154
232
|
|
155
233
|
if not hasattr(mod, clsname):
|
156
234
|
raise Exception("Class %s not found in module %s" % (clsname, modname))
|
157
|
-
|
158
|
-
|
159
|
-
if
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
"int" : int,
|
178
|
-
"float" : float,
|
179
|
-
"bool" : bool,
|
180
|
-
"list" : List
|
181
|
-
}
|
182
|
-
pdflt_m = {
|
183
|
-
"str" : "",
|
184
|
-
"int" : 0,
|
185
|
-
"float" : 0.0,
|
186
|
-
"bool" : False,
|
187
|
-
"list" : []
|
188
|
-
}
|
189
|
-
for p in task.params.keys():
|
190
|
-
param = task.params[p]
|
191
|
-
if type(param) == dict and "type" in param.keys():
|
192
|
-
ptype_s = param["type"]
|
193
|
-
if ptype_s not in ptype_m.keys():
|
194
|
-
raise Exception("Unknown type %s" % ptype_s)
|
195
|
-
ptype = ptype_m[ptype_s]
|
196
|
-
|
197
|
-
if p in field_m.keys():
|
198
|
-
raise Exception("Duplicate field %s" % p)
|
199
|
-
if "value" in param.keys():
|
200
|
-
field_m[p] = (ptype, param["value"])
|
201
|
-
else:
|
202
|
-
field_m[p] = (ptype, pdflt_m[ptype_s])
|
203
|
-
else:
|
204
|
-
if p not in field_m.keys():
|
205
|
-
raise Exception("Field %s not found" % p)
|
206
|
-
if type(param) != dict:
|
207
|
-
value = param
|
208
|
-
elif "value" in param.keys():
|
209
|
-
value = param["value"]
|
210
|
-
else:
|
211
|
-
raise Exception("No value specified for param %s: %s" % (
|
212
|
-
p, str(param)))
|
213
|
-
field_m[p] = (field_m[p][0], value)
|
214
|
-
self._log.debug("field_m: %s" % str(field_m))
|
215
|
-
ctor_t.param_ctor = pydantic.create_model(
|
216
|
-
"Task%sParams" % task.name, **field_m)
|
235
|
+
task_ctor = getattr(mod, clsname)
|
236
|
+
|
237
|
+
# Determine if we need to use a new
|
238
|
+
|
239
|
+
if task.uses is not None:
|
240
|
+
uses = self.getTaskCtor(session, task.uses, tasks_m)
|
241
|
+
else:
|
242
|
+
uses = None
|
243
|
+
|
244
|
+
ctor_t = TaskCtorCls(
|
245
|
+
name=task.name,
|
246
|
+
uses=uses,
|
247
|
+
task_ctor=task_ctor,
|
248
|
+
srcdir=srcdir)
|
249
|
+
elif task.uses is not None:
|
250
|
+
# Use the existing (base) to create the implementation
|
251
|
+
ctor_t = TaskCtor(
|
252
|
+
name=task.name,
|
253
|
+
uses=self.getTaskCtor(session, task.uses, tasks_m),
|
254
|
+
srcdir=srcdir)
|
217
255
|
else:
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
256
|
+
self._log.debug("Use 'Null' as the class implementation")
|
257
|
+
ctor_t = TaskCtorCls(
|
258
|
+
name=task.name,
|
259
|
+
task_ctor=TaskNull,
|
260
|
+
srcdir=srcdir)
|
261
|
+
|
262
|
+
ctor_t = self.handleParams(task, ctor_t)
|
222
263
|
|
264
|
+
self._log.debug("<-- %s::mkTaskCtor %s" % (self.name, task.name))
|
223
265
|
return ctor_t
|
224
266
|
|
225
267
|
@staticmethod
|
dv_flow/mgr/std/fileset.py
CHANGED
@@ -2,8 +2,9 @@
|
|
2
2
|
import os
|
3
3
|
import fnmatch
|
4
4
|
import glob
|
5
|
+
import logging
|
5
6
|
import pydantic.dataclasses as dc
|
6
|
-
from typing import List, Tuple
|
7
|
+
from typing import ClassVar, List, Tuple
|
7
8
|
from dv_flow.mgr import Task, TaskData, TaskMemento
|
8
9
|
from dv_flow.mgr import FileSet as _FileSet
|
9
10
|
|
@@ -12,8 +13,10 @@ class TaskFileSetMemento(TaskMemento):
|
|
12
13
|
|
13
14
|
class FileSet(Task):
|
14
15
|
|
16
|
+
_log : ClassVar = logging.getLogger("FileSet")
|
17
|
+
|
15
18
|
async def run(self, input : TaskData) -> TaskData:
|
16
|
-
|
19
|
+
self._log.debug("TaskFileSet run: %s: basedir=%s, base=%s type=%s include=%s" % (
|
17
20
|
self.name,
|
18
21
|
self.srcdir,
|
19
22
|
self.params.base, self.params.type, str(self.params.include)
|
@@ -52,8 +55,8 @@ class FileSet(Task):
|
|
52
55
|
if ex_memento is not None and not input.changed:
|
53
56
|
ex_memento.files.sort(key=lambda x: x[0])
|
54
57
|
memento.files.sort(key=lambda x: x[0])
|
55
|
-
|
56
|
-
|
58
|
+
self._log.debug("ex_memento.files: %s" % str(ex_memento.files))
|
59
|
+
self._log.debug("memento.files: %s" % str(memento.files))
|
57
60
|
input.changed = ex_memento != memento
|
58
61
|
else:
|
59
62
|
input.changed = True
|
dv_flow/mgr/std/task_null.py
CHANGED
dv_flow/mgr/task.py
CHANGED
@@ -21,10 +21,10 @@
|
|
21
21
|
#****************************************************************************
|
22
22
|
import os
|
23
23
|
import json
|
24
|
-
import asyncio
|
25
24
|
import dataclasses as dc
|
25
|
+
import logging
|
26
26
|
from pydantic import BaseModel
|
27
|
-
from typing import Any, Callable, Dict, List, Tuple
|
27
|
+
from typing import Any, Callable, ClassVar, Dict, List, Tuple
|
28
28
|
from .task_data import TaskData
|
29
29
|
from .task_memento import TaskMemento
|
30
30
|
|
@@ -35,30 +35,148 @@ class TaskSpec(object):
|
|
35
35
|
class TaskParams(BaseModel):
|
36
36
|
pass
|
37
37
|
|
38
|
+
|
38
39
|
@dc.dataclass
|
39
40
|
class TaskCtor(object):
|
40
|
-
|
41
|
-
|
42
|
-
params : Dict[str,Any] = None
|
41
|
+
name : str
|
42
|
+
uses : 'TaskCtor' = None
|
43
43
|
srcdir : str = None
|
44
44
|
depends : List[TaskSpec] = dc.field(default_factory=list)
|
45
45
|
|
46
|
-
|
47
|
-
return TaskCtor(
|
48
|
-
task_ctor=self.task_ctor,
|
49
|
-
param_ctor=self.param_ctor,
|
50
|
-
params=self.params,
|
51
|
-
srcdir=self.srcdir,
|
52
|
-
depends=self.depends.copy())
|
46
|
+
_log : ClassVar = logging.getLogger("TaskCtor")
|
53
47
|
|
48
|
+
def mkTask(self, name : str, depends, rundir, srcdir=None, params=None):
|
49
|
+
if srcdir is None:
|
50
|
+
srcdir = self.srcdir
|
51
|
+
if params is None:
|
52
|
+
params = self.mkParams()
|
53
|
+
|
54
|
+
if self.uses is not None:
|
55
|
+
return self.uses.mkTask(name, depends, rundir, srcdir, params)
|
56
|
+
else:
|
57
|
+
raise NotImplementedError("TaskCtor.mkTask() not implemented for %s" % str(type(self)))
|
58
|
+
|
54
59
|
def mkParams(self):
|
55
|
-
|
56
|
-
|
60
|
+
self._log.debug("--> %s::mkParams" % self.name)
|
61
|
+
if self.uses is not None:
|
62
|
+
params = self.uses.mkParams()
|
63
|
+
else:
|
64
|
+
params = TaskParams()
|
65
|
+
self._log.debug("<-- %s::mkParams: %s" % (self.name, str(params)))
|
66
|
+
|
67
|
+
return params
|
68
|
+
|
69
|
+
def applyParams(self, params):
|
70
|
+
if self.uses is not None:
|
71
|
+
self.uses.applyParams(params)
|
72
|
+
|
73
|
+
|
74
|
+
@dc.dataclass
|
75
|
+
class TaskCtorParam(TaskCtor):
|
76
|
+
params : Dict[str,Any] = dc.field(default_factory=dict)
|
77
|
+
|
78
|
+
_log : ClassVar = logging.getLogger("TaskCtorParam")
|
79
|
+
|
80
|
+
def mkTask(self, name : str, depends, rundir, srcdir=None, params=None):
|
81
|
+
self._log.debug("--> %s::mkTask" % self.name)
|
82
|
+
if params is None:
|
83
|
+
params = self.mkParams()
|
84
|
+
if srcdir is None:
|
85
|
+
srcdir = self.srcdir
|
86
|
+
|
87
|
+
ret = self.uses.mkTask(name, depends, rundir, srcdir, params)
|
88
|
+
|
89
|
+
self.applyParams(ret.params)
|
90
|
+
self._log.debug("<-- %s::mkTask" % self.name)
|
91
|
+
|
92
|
+
return ret
|
93
|
+
|
94
|
+
def applyParams(self, params):
|
95
|
+
self._log.debug("--> %s::applyParams: %s %s" % (self.name, str(type(self.params)), str(type(params))))
|
57
96
|
if self.params is not None:
|
58
97
|
for k,v in self.params.items():
|
59
|
-
|
98
|
+
self._log.debug(" change %s %s=>%s" % (
|
99
|
+
k,
|
100
|
+
str(getattr(params, k)),
|
101
|
+
str(v)))
|
102
|
+
setattr(params, k, v)
|
103
|
+
else:
|
104
|
+
self._log.debug(" no params")
|
105
|
+
self._log.debug("<-- %s::applyParams: %s" % (self.name, str(self.params)))
|
106
|
+
|
107
|
+
@dc.dataclass
|
108
|
+
class TaskCtorParamCls(TaskCtor):
|
109
|
+
params_ctor : Callable = None
|
110
|
+
|
111
|
+
_log : ClassVar = logging.getLogger("TaskCtorParamType")
|
112
|
+
|
113
|
+
def mkParams(self):
|
114
|
+
self._log.debug("--> %s::mkParams" % str(self.name))
|
115
|
+
params = self.params_ctor()
|
116
|
+
self._log.debug("<-- %s::mkParams: %s" % (str(self.name), str(type(params))))
|
117
|
+
return params
|
118
|
+
|
119
|
+
@dc.dataclass
|
120
|
+
class TaskCtorCls(TaskCtor):
|
121
|
+
task_ctor : Callable = None
|
122
|
+
|
123
|
+
_log : ClassVar = logging.getLogger("TaskCtorCls")
|
124
|
+
|
125
|
+
def mkTask(self, name : str, depends, rundir, srcdir=None, params=None):
|
126
|
+
self._log.debug("--> %s::mkTask (%s)" % (self.name, str(self.task_ctor)))
|
127
|
+
|
128
|
+
if srcdir is None:
|
129
|
+
srcdir = self.srcdir
|
130
|
+
|
131
|
+
if params is None:
|
132
|
+
params = self.mkParams()
|
133
|
+
|
134
|
+
ret = self.task_ctor(
|
135
|
+
name=name,
|
136
|
+
depends=depends,
|
137
|
+
rundir=rundir,
|
138
|
+
srcdir=srcdir,
|
139
|
+
params=params)
|
140
|
+
ret.srcdir = self.srcdir
|
141
|
+
|
142
|
+
# Update parameters on the way back
|
143
|
+
self.applyParams(ret.params)
|
144
|
+
|
145
|
+
self._log.debug("<-- %s::mkTask" % self.name)
|
146
|
+
return ret
|
147
|
+
|
148
|
+
@dc.dataclass
|
149
|
+
class TaskCtorProxy(TaskCtor):
|
150
|
+
task_ctor : TaskCtor = None
|
151
|
+
param_ctor : Callable = None
|
152
|
+
|
153
|
+
_log : ClassVar = logging.getLogger("TaskCtorProxy")
|
154
|
+
|
155
|
+
def mkTask(self, *args, **kwargs):
|
156
|
+
self._log.debug("--> %s::mkTask" % self.name)
|
157
|
+
ret = self.task_ctor.mkTask(*args, **kwargs)
|
158
|
+
self._log.debug("<-- %s::mkTask" % self.name)
|
60
159
|
return ret
|
61
160
|
|
161
|
+
def mkParams(self, params=None):
|
162
|
+
self._log.debug("--> %s::mkParams: %s" % (self.name, str(self.params)))
|
163
|
+
|
164
|
+
if params is None and self.param_ctor is not None:
|
165
|
+
params = self.param_ctor()
|
166
|
+
|
167
|
+
params = self.task_ctor.mkParams(params)
|
168
|
+
|
169
|
+
if self.params is not None:
|
170
|
+
for k,v in self.params.items():
|
171
|
+
self._log.debug(" change %s %s=>%s" % (
|
172
|
+
k,
|
173
|
+
str(getattr(params, k)),
|
174
|
+
str(v)))
|
175
|
+
setattr(params, k, v)
|
176
|
+
self._log.debug("<-- %s::mkParams: %s" % (self.name, str(self.params)))
|
177
|
+
return params
|
178
|
+
|
179
|
+
|
62
180
|
@dc.dataclass
|
63
181
|
class Task(object):
|
64
182
|
"""Executable view of a task"""
|
@@ -78,6 +196,8 @@ class Task(object):
|
|
78
196
|
body: Dict[str,Any] = dc.field(default_factory=dict)
|
79
197
|
impl_t : Any = None
|
80
198
|
|
199
|
+
_log : ClassVar = logging.getLogger("Task")
|
200
|
+
|
81
201
|
def init(self, runner, basedir):
|
82
202
|
self.session = runner
|
83
203
|
self.basedir = basedir
|
@@ -89,7 +209,7 @@ class Task(object):
|
|
89
209
|
data = json.load(fp)
|
90
210
|
self.memento = T(**data)
|
91
211
|
except Exception as e:
|
92
|
-
|
212
|
+
self._log.critical("Failed to load memento %s: %s" % (
|
93
213
|
os.path.join(self.rundir, "memento.json"), str(e)))
|
94
214
|
os.unlink(os.path.join(self.rundir, "memento.json"))
|
95
215
|
return self.memento
|
@@ -100,8 +220,14 @@ class Task(object):
|
|
100
220
|
async def isUpToDate(self, memento) -> bool:
|
101
221
|
return False
|
102
222
|
|
103
|
-
async def do_run(self) -> TaskData:
|
104
|
-
|
223
|
+
async def do_run(self, session) -> TaskData:
|
224
|
+
self._log.info("--> %s (%s) do_run - %d depends" % (
|
225
|
+
self.name,
|
226
|
+
str(type(self)),
|
227
|
+
len(self.depends)))
|
228
|
+
|
229
|
+
self.session = session
|
230
|
+
|
105
231
|
if len(self.depends) > 0:
|
106
232
|
deps_o = []
|
107
233
|
for d in self.depends:
|
@@ -115,7 +241,6 @@ class Task(object):
|
|
115
241
|
input.deps[self.name] = list(inp.name for inp in self.depends)
|
116
242
|
else:
|
117
243
|
input = TaskData()
|
118
|
-
|
119
244
|
|
120
245
|
|
121
246
|
# Mark the source of this data as being this task
|
@@ -133,6 +258,10 @@ class Task(object):
|
|
133
258
|
self.save_memento()
|
134
259
|
|
135
260
|
# Combine data from the deps to produce a result
|
261
|
+
self._log.info("<-- %s (%s) do_run - %d depends" % (
|
262
|
+
self.name,
|
263
|
+
str(type(self)),
|
264
|
+
len(self.depends)))
|
136
265
|
return self.output
|
137
266
|
|
138
267
|
async def run(self, input : TaskData) -> TaskData:
|
dv_flow/mgr/task_data.py
CHANGED
@@ -20,9 +20,10 @@
|
|
20
20
|
#*
|
21
21
|
#****************************************************************************
|
22
22
|
import enum
|
23
|
+
import logging
|
23
24
|
import pydantic.dataclasses as dc
|
24
25
|
from pydantic import BaseModel
|
25
|
-
from typing import Any, Dict, Set, List, Tuple
|
26
|
+
from typing import Any, ClassVar, Dict, Set, List, Tuple
|
26
27
|
from .fileset import FileSet
|
27
28
|
from toposort import toposort
|
28
29
|
|
@@ -54,6 +55,8 @@ class TaskData(BaseModel):
|
|
54
55
|
filesets : List[FileSet] = dc.Field(default_factory=list)
|
55
56
|
changed : bool = False
|
56
57
|
|
58
|
+
_log : ClassVar = logging.getLogger("TaskData")
|
59
|
+
|
57
60
|
def hasParam(self, name: str) -> bool:
|
58
61
|
return name in self.params
|
59
62
|
|
@@ -69,29 +72,29 @@ class TaskData(BaseModel):
|
|
69
72
|
def getFileSets(self, type=None, order=True) -> List[FileSet]:
|
70
73
|
ret = []
|
71
74
|
|
72
|
-
|
75
|
+
self._log.debug("getFileSets: filesets=%s" % str(self.filesets))
|
73
76
|
|
74
77
|
if order:
|
75
78
|
# The deps map specifies task dependencies
|
76
79
|
|
77
80
|
candidate_fs = []
|
78
81
|
for fs in self.filesets:
|
79
|
-
|
82
|
+
self._log.debug("fs: %s" % str(fs))
|
80
83
|
if type is None or fs.type in type:
|
81
84
|
candidate_fs.append(fs)
|
82
|
-
|
85
|
+
self._log.debug("self.deps: %s" % str(self.deps))
|
83
86
|
order = toposort(self.deps)
|
84
87
|
|
85
|
-
|
88
|
+
self._log.debug("order: %s" % str(order))
|
86
89
|
|
87
90
|
for order_s in order:
|
88
|
-
|
91
|
+
self._log.debug("order_s: %s" % str(order_s))
|
89
92
|
i = 0
|
90
93
|
while i < len(candidate_fs):
|
91
94
|
fs = candidate_fs[i]
|
92
|
-
|
95
|
+
self._log.debug("fs.src: %s" % fs.src)
|
93
96
|
if fs.src in order_s:
|
94
|
-
|
97
|
+
self._log.debug("Add fileset")
|
95
98
|
ret.append(fs)
|
96
99
|
candidate_fs.pop(i)
|
97
100
|
else:
|
@@ -94,6 +94,8 @@ class TaskGraphBuilder(object):
|
|
94
94
|
|
95
95
|
ctor_t : TaskCtor = pkg.getTaskCtor(task_name)
|
96
96
|
|
97
|
+
self._logger.debug("ctor_t: %s" % ctor_t.name)
|
98
|
+
|
97
99
|
depends = []
|
98
100
|
|
99
101
|
for dep in ctor_t.depends:
|
@@ -104,13 +106,10 @@ class TaskGraphBuilder(object):
|
|
104
106
|
depends.append(self._task_m[dep])
|
105
107
|
|
106
108
|
# The returned task should have all param references resolved
|
107
|
-
|
108
|
-
task = ctor_t.task_ctor(
|
109
|
+
task = ctor_t.mkTask(
|
109
110
|
name=task_name,
|
110
|
-
params=ctor_t.mkParams(),
|
111
111
|
depends=depends,
|
112
|
-
rundir=rundir
|
113
|
-
srcdir=ctor_t.srcdir)
|
112
|
+
rundir=rundir)
|
114
113
|
|
115
114
|
self._task_m[task.name] = task
|
116
115
|
|
@@ -137,71 +136,45 @@ class TaskGraphBuilder(object):
|
|
137
136
|
self._logger.debug("pkg_s: %d %s" % (
|
138
137
|
len(self._pkg_s), (self._pkg_s[-1].name if len(self._pkg_s) else "<unknown>")))
|
139
138
|
|
139
|
+
# First, check the active pkg_def to see if any aliases
|
140
|
+
# Should be considered
|
141
|
+
pkg_name = spec.name
|
142
|
+
if pkg_def is not None:
|
143
|
+
# Look for an import alias
|
144
|
+
self._logger.debug("Search package %s for import alias %s" % (
|
145
|
+
pkg_def.name, pkg_spec.name))
|
146
|
+
for imp in pkg_def.imports:
|
147
|
+
self._logger.debug("imp: %s" % str(imp))
|
148
|
+
if imp.alias is not None and imp.alias == spec.name:
|
149
|
+
# Found the alias name. Just need to get an instance of this package
|
150
|
+
self._logger.debug("Found alias %s -> %s" % (imp.alias, imp.name))
|
151
|
+
pkg_name = imp.name
|
152
|
+
break
|
153
|
+
|
140
154
|
# Note: _pkg_m needs to be context specific, such that imports from
|
141
155
|
# one package don't end up visible in another
|
142
|
-
|
143
|
-
|
156
|
+
spec.name = pkg_name
|
157
|
+
|
144
158
|
if spec in self._pkg_m.keys():
|
159
|
+
self._logger.debug("Found cached package instance")
|
145
160
|
pkg = self._pkg_m[spec]
|
161
|
+
elif self.pkg_rgy.hasPackage(spec.name):
|
162
|
+
self._logger.debug("Registry has a definition")
|
163
|
+
p_def = self.pkg_rgy.getPackage(spec.name)
|
164
|
+
|
165
|
+
self._pkg_spec_s.append(p_def)
|
166
|
+
pkg = p_def.mkPackage(self)
|
167
|
+
self._pkg_spec_s.pop()
|
168
|
+
self._pkg_m[spec] = pkg
|
146
169
|
else:
|
147
|
-
|
148
|
-
|
149
|
-
if pkg_def is not None:
|
150
|
-
# Look for an import alias
|
151
|
-
self._logger.debug("imports: %s" % str(pkg_def.imports))
|
152
|
-
for imp in pkg_def.imports:
|
153
|
-
self._logger.debug("imp: %s" % str(imp))
|
154
|
-
if imp.alias is not None and imp.alias == spec.name:
|
155
|
-
# Found the alias name. Just need to get an instance of this package
|
156
|
-
tgt_pkg_spec = PackageSpec(imp.name)
|
157
|
-
if tgt_pkg_spec in self._pkg_m.keys():
|
158
|
-
pkg = self._pkg_m[tgt_pkg_spec]
|
159
|
-
elif self.pkg_rgy.hasPackage(tgt_pkg_spec.name):
|
160
|
-
base = self.pkg_rgy.getPackage(tgt_pkg_spec.name)
|
161
|
-
self._pkg_spec_s.append(base)
|
162
|
-
pkg = base.mkPackage(self, spec.params)
|
163
|
-
self._pkg_spec_s.pop()
|
164
|
-
elif imp.path is not None:
|
165
|
-
# See if we can load the package
|
166
|
-
self._logger.critical("TODO: load referenced package")
|
167
|
-
else:
|
168
|
-
raise Exception("Failed to resolve target (%s) of import alias %s" % (
|
169
|
-
imp.name,
|
170
|
-
imp.alias))
|
171
|
-
break
|
172
|
-
else:
|
173
|
-
# Need to compare the spec with the full import spec
|
174
|
-
imp_spec = PackageSpec(imp.name)
|
175
|
-
# TODO: set parameters
|
176
|
-
if imp_spec == spec:
|
177
|
-
base = self.pkg_rgy.getPackage(spec.name)
|
178
|
-
if base is None:
|
179
|
-
raise Exception("Failed to find imported package %s" % spec.name)
|
180
|
-
self._pkg_spec_s.append(base)
|
181
|
-
pkg = base.mkPackage(self, spec.params)
|
182
|
-
self._pkg_m[spec] = pkg
|
183
|
-
self._pkg_spec_s.pop()
|
184
|
-
break
|
185
|
-
|
186
|
-
if pkg is None:
|
187
|
-
self._logger.debug("Checking registry")
|
188
|
-
p_def = self.pkg_rgy.getPackage(spec.name)
|
189
|
-
|
190
|
-
if p_def is not None:
|
191
|
-
self._pkg_spec_s.append(p_def)
|
192
|
-
pkg = p_def.mkPackage(self)
|
193
|
-
self._pkg_spec_s.pop()
|
194
|
-
self._pkg_m[spec] = pkg
|
195
|
-
|
196
|
-
if pkg is None:
|
197
|
-
raise Exception("Failed to find package %s from package %s" % (
|
198
|
-
spec.name, (pkg_def.name if pkg_def is not None else "<null>")))
|
170
|
+
raise Exception("Failed to find definition of package %s" % spec.name)
|
199
171
|
|
200
172
|
self._logger.debug("<-- getPackage: %s" % str(pkg))
|
201
173
|
|
202
174
|
return pkg
|
203
175
|
|
204
176
|
def getTaskCtor(self, spec : TaskSpec, pkg : PackageDef = None) -> 'TaskCtor':
|
177
|
+
self._logger.debug("--> getTaskCtor %s" % spec.name)
|
205
178
|
spec_e = spec.name.split(".")
|
206
179
|
task_name = spec_e[-1]
|
207
180
|
|
@@ -219,4 +192,7 @@ class TaskGraphBuilder(object):
|
|
219
192
|
self._logger.critical("Failed to find package %s while looking for task %s" % (pkg_name, spec.name))
|
220
193
|
raise e
|
221
194
|
|
222
|
-
|
195
|
+
ctor = pkg.getTaskCtor(task_name)
|
196
|
+
|
197
|
+
self._logger.debug("--> getTaskCtor %s" % spec.name)
|
198
|
+
return ctor
|
@@ -23,6 +23,7 @@ import asyncio
|
|
23
23
|
import os
|
24
24
|
import yaml
|
25
25
|
import dataclasses as dc
|
26
|
+
import logging
|
26
27
|
from toposort import toposort
|
27
28
|
from typing import Any, Callable, ClassVar, Coroutine, Dict, List, Tuple, Union
|
28
29
|
from .fragment_def import FragmentDef
|
@@ -47,6 +48,7 @@ class TaskGraphRunnerLocal(TaskGraphRunner):
|
|
47
48
|
# Search path for .dfs files
|
48
49
|
create_subprocess : Callable = asyncio.create_subprocess_exec
|
49
50
|
_root_dir : str = None
|
51
|
+
_log : ClassVar = logging.getLogger("TaskGraphRunnerLocal")
|
50
52
|
|
51
53
|
def __post_init__(self):
|
52
54
|
if self.nproc == -1:
|
@@ -71,11 +73,11 @@ class TaskGraphRunnerLocal(TaskGraphRunner):
|
|
71
73
|
for t in task:
|
72
74
|
self._mkDeps(dep_m, task_m, t)
|
73
75
|
|
74
|
-
|
76
|
+
self._log.debug("dep_m: %s" % str(dep_m))
|
75
77
|
|
76
78
|
order = list(toposort(dep_m))
|
77
79
|
|
78
|
-
|
80
|
+
self._log.debug("order: %s" % str(order))
|
79
81
|
|
80
82
|
active_task_l : List[Tuple[Task,Coroutine]]= []
|
81
83
|
# Now, iterate over the concurrent sets
|
@@ -96,8 +98,7 @@ class TaskGraphRunnerLocal(TaskGraphRunner):
|
|
96
98
|
break
|
97
99
|
if t not in self.done_task_m.keys():
|
98
100
|
task_t = task_m[t]
|
99
|
-
|
100
|
-
coro = asyncio.Task(task_t.do_run())
|
101
|
+
coro = asyncio.Task(task_t.do_run(self))
|
101
102
|
active_task_l.append((task_t, coro))
|
102
103
|
|
103
104
|
# Now, wait for tasks to complete
|
@@ -0,0 +1,28 @@
|
|
1
|
+
dv_flow/mgr/__init__.py,sha256=IZA7I1u7RH34DrJXSkETxWFpK5Jn_A2zXfnCAzJ8XxA,289
|
2
|
+
dv_flow/mgr/__main__.py,sha256=uik6gDAvtZNG0yyDKYc4FTl6R1QHAR543jNG2lCAa6E,1771
|
3
|
+
dv_flow/mgr/fileset.py,sha256=FNvC5sU2ArxJ0OO3v8dXTv8zX-bZ5t0a0ljne0fQQ1o,1150
|
4
|
+
dv_flow/mgr/fragment_def.py,sha256=p5i6ONtBWlDHTBFsduu3Z36_76Bn8PCIylp_xoZ7jfQ,1552
|
5
|
+
dv_flow/mgr/package.py,sha256=AOLEEotVQF9VNMMl13uNQ7Na2TuHPXxEvybBOX7XIps,1615
|
6
|
+
dv_flow/mgr/package_def.py,sha256=lokG420kcEPDNu11GYQSbCalX7gxeDoTJy8iMc1GmXM,13451
|
7
|
+
dv_flow/mgr/package_import_spec.py,sha256=ah3r15v5Jdub2poc3sgi6Uar1L3oGoYsCPPNiOHV-a4,1760
|
8
|
+
dv_flow/mgr/pkg_rgy.py,sha256=2R_EaeBDJn5qUq9DzSnLc37wUP36MWSv-p0LgUjJRAg,4471
|
9
|
+
dv_flow/mgr/task.py,sha256=7Nc7H2Wj9xWAHjspJ5TKw6DZ7g8DMisAqWuhjcy3MZU,9102
|
10
|
+
dv_flow/mgr/task_data.py,sha256=XBPWwvuaQ3BZ94wknvv1bqLh98iTzAg5fskv7_kk6DQ,10524
|
11
|
+
dv_flow/mgr/task_def.py,sha256=96hSwqJo0MazJ1VcLhovYRmNCplsNLt47AumtyjSddU,1690
|
12
|
+
dv_flow/mgr/task_graph_builder.py,sha256=4dfy_T26MYI7ls2xiofr4V0ItZHDehU2iw6UBPt-SZQ,7074
|
13
|
+
dv_flow/mgr/task_graph_runner.py,sha256=NwNYcOJ952lPMLwIIlYE9CoDdedqvcw2fWHYUsKFXuU,2164
|
14
|
+
dv_flow/mgr/task_graph_runner_local.py,sha256=66JYJcu-W60LjfuT9UkbcKslNqDcD5q4UR7ZAWrSaGM,4707
|
15
|
+
dv_flow/mgr/task_memento.py,sha256=C7VTQpBhDEoYuDmE6YTM-6TLMLnqHp6Y0Vat1aTgtCs,1096
|
16
|
+
dv_flow/mgr/util.py,sha256=06eVyURF4ga-s8C9Sd3ZSDebwO4QS0XXaB8xADVbWRc,1437
|
17
|
+
dv_flow/mgr/cmds/cmd_run.py,sha256=eths8kT7mBmpZqwOuMtpKAaux4rg-f7hPBxxTHbpKT4,2903
|
18
|
+
dv_flow/mgr/share/flow.json,sha256=lNmZex9NXkYbyb2aZseQfUOkV9CMyfH0iLODEI7EPBw,5096
|
19
|
+
dv_flow/mgr/std/fileset.py,sha256=Hn3_C1CczSRSaNYI3aDYbaaNdnKDlIqU16_GRIBP4PI,2461
|
20
|
+
dv_flow/mgr/std/flow.dv,sha256=pSpzrPPEu_L8DHccGfArxsKYgUfyQidShZc0ShgGtsY,500
|
21
|
+
dv_flow/mgr/std/message.py,sha256=BPTHnEMD4tBufQ9LvsS9Sa_0xjaJATbBpwqosWslvVA,193
|
22
|
+
dv_flow/mgr/std/task_null.py,sha256=KObmjG_4D08GJ1k6neqKIQrFY72Sj0jLnwXxEkq5HA0,321
|
23
|
+
dv_flow_mgr-0.0.1.12971126211a1.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
24
|
+
dv_flow_mgr-0.0.1.12971126211a1.dist-info/METADATA,sha256=vFSzc2a0PhhsjazyaZhTv7BES6zdQ7V4bFD-6XkrUmc,13276
|
25
|
+
dv_flow_mgr-0.0.1.12971126211a1.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
26
|
+
dv_flow_mgr-0.0.1.12971126211a1.dist-info/entry_points.txt,sha256=1roy8wAFM48LabOvr6jiOw0MUs-qE8X3Vf8YykPazxk,50
|
27
|
+
dv_flow_mgr-0.0.1.12971126211a1.dist-info/top_level.txt,sha256=amfVTkggzYPtWwLqNmRukfz1Buu0pGS2SrYBBLhXm04,8
|
28
|
+
dv_flow_mgr-0.0.1.12971126211a1.dist-info/RECORD,,
|
@@ -1,28 +0,0 @@
|
|
1
|
-
dv_flow/mgr/__init__.py,sha256=IZA7I1u7RH34DrJXSkETxWFpK5Jn_A2zXfnCAzJ8XxA,289
|
2
|
-
dv_flow/mgr/__main__.py,sha256=big-RSuqbx7P48_8rzaZKX5YW3B2US1i0mRH_TfoeIs,1340
|
3
|
-
dv_flow/mgr/fileset.py,sha256=FNvC5sU2ArxJ0OO3v8dXTv8zX-bZ5t0a0ljne0fQQ1o,1150
|
4
|
-
dv_flow/mgr/fragment_def.py,sha256=p5i6ONtBWlDHTBFsduu3Z36_76Bn8PCIylp_xoZ7jfQ,1552
|
5
|
-
dv_flow/mgr/package.py,sha256=LAJNVOMlpWkez7eK7yfChoDKeIPakApUCpDh_No059g,1469
|
6
|
-
dv_flow/mgr/package_def.py,sha256=ay4bYuzBEzrqqJsNr1ezZF9BxEAgWeS9xxIETKe-Bzs,11595
|
7
|
-
dv_flow/mgr/package_import_spec.py,sha256=ah3r15v5Jdub2poc3sgi6Uar1L3oGoYsCPPNiOHV-a4,1760
|
8
|
-
dv_flow/mgr/pkg_rgy.py,sha256=2R_EaeBDJn5qUq9DzSnLc37wUP36MWSv-p0LgUjJRAg,4471
|
9
|
-
dv_flow/mgr/task.py,sha256=ewJ7bCFWqwVuzHZZsX2LDZfzXWVFfFlH8yFyn-xxIVg,5043
|
10
|
-
dv_flow/mgr/task_data.py,sha256=-6Dqa3oUI7RJc1Js2SRSnhxNTcASkamXFYMN6UiknZQ,10376
|
11
|
-
dv_flow/mgr/task_def.py,sha256=96hSwqJo0MazJ1VcLhovYRmNCplsNLt47AumtyjSddU,1690
|
12
|
-
dv_flow/mgr/task_graph_builder.py,sha256=ie93_dMwcNLvlAp_K0thDdTGZuWhQ4l1m2rVSfoVWDU,8599
|
13
|
-
dv_flow/mgr/task_graph_runner.py,sha256=NwNYcOJ952lPMLwIIlYE9CoDdedqvcw2fWHYUsKFXuU,2164
|
14
|
-
dv_flow/mgr/task_graph_runner_local.py,sha256=UTBV1AKanLns99CSYtEQQ3EEkgo1hM8XsHpc8E5jkIg,4646
|
15
|
-
dv_flow/mgr/task_memento.py,sha256=C7VTQpBhDEoYuDmE6YTM-6TLMLnqHp6Y0Vat1aTgtCs,1096
|
16
|
-
dv_flow/mgr/util.py,sha256=06eVyURF4ga-s8C9Sd3ZSDebwO4QS0XXaB8xADVbWRc,1437
|
17
|
-
dv_flow/mgr/cmds/cmd_run.py,sha256=C1bd2yZxVU03hZizGLYoH1JfZgeq_G57Kajc279iZpo,2773
|
18
|
-
dv_flow/mgr/share/flow.json,sha256=lNmZex9NXkYbyb2aZseQfUOkV9CMyfH0iLODEI7EPBw,5096
|
19
|
-
dv_flow/mgr/std/fileset.py,sha256=0eBp0VIkxYbIYAt0YJg0m-6nUbPuzGkaVBtxpius7Cg,2354
|
20
|
-
dv_flow/mgr/std/flow.dv,sha256=pSpzrPPEu_L8DHccGfArxsKYgUfyQidShZc0ShgGtsY,500
|
21
|
-
dv_flow/mgr/std/message.py,sha256=BPTHnEMD4tBufQ9LvsS9Sa_0xjaJATbBpwqosWslvVA,193
|
22
|
-
dv_flow/mgr/std/task_null.py,sha256=UEJ3fIoIMYWVsagiQC7GHD23UES7WoH4wtq94b4tcs4,265
|
23
|
-
dv_flow_mgr-0.0.1.12968982426a1.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
24
|
-
dv_flow_mgr-0.0.1.12968982426a1.dist-info/METADATA,sha256=LKGnpuX45HstDNEJ8pxN_eVTxzw0OqiW1WoZyVSGEE4,13276
|
25
|
-
dv_flow_mgr-0.0.1.12968982426a1.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
26
|
-
dv_flow_mgr-0.0.1.12968982426a1.dist-info/entry_points.txt,sha256=1roy8wAFM48LabOvr6jiOw0MUs-qE8X3Vf8YykPazxk,50
|
27
|
-
dv_flow_mgr-0.0.1.12968982426a1.dist-info/top_level.txt,sha256=amfVTkggzYPtWwLqNmRukfz1Buu0pGS2SrYBBLhXm04,8
|
28
|
-
dv_flow_mgr-0.0.1.12968982426a1.dist-info/RECORD,,
|
{dv_flow_mgr-0.0.1.12968982426a1.dist-info → dv_flow_mgr-0.0.1.12971126211a1.dist-info}/LICENSE
RENAMED
File without changes
|
{dv_flow_mgr-0.0.1.12968982426a1.dist-info → dv_flow_mgr-0.0.1.12971126211a1.dist-info}/WHEEL
RENAMED
File without changes
|
File without changes
|
File without changes
|