dv-flow-mgr 0.0.2.14182043984a1__py3-none-any.whl → 1.0.0.14370600369a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dv_flow/mgr/__init__.py +2 -1
- dv_flow/mgr/cmds/cmd_graph.py +2 -3
- dv_flow/mgr/cmds/cmd_run.py +7 -9
- dv_flow/mgr/cmds/cmd_show.py +1 -2
- dv_flow/mgr/cond_def.py +16 -0
- dv_flow/mgr/config.py +7 -0
- dv_flow/mgr/config_def.py +33 -0
- dv_flow/mgr/exec_callable.py +88 -0
- dv_flow/mgr/{pkg_rgy.py → ext_rgy.py} +44 -35
- dv_flow/mgr/extend_def.py +21 -0
- dv_flow/mgr/fragment_def.py +4 -3
- dv_flow/mgr/need_def.py +6 -0
- dv_flow/mgr/null_callable.py +10 -0
- dv_flow/mgr/package.py +30 -6
- dv_flow/mgr/package_def.py +40 -444
- dv_flow/mgr/package_loader.py +701 -0
- dv_flow/mgr/param_def.py +2 -1
- dv_flow/mgr/parser.out +567 -0
- dv_flow/mgr/pytask_callable.py +25 -0
- dv_flow/mgr/root_package.py +9 -0
- dv_flow/mgr/shell_callable.py +14 -0
- dv_flow/mgr/srcinfo.py +15 -0
- dv_flow/mgr/std/flow.dv +25 -4
- dv_flow/mgr/task.py +68 -0
- dv_flow/mgr/task_def.py +36 -24
- dv_flow/mgr/task_graph_builder.py +497 -247
- dv_flow/mgr/task_listener_log.py +4 -0
- dv_flow/mgr/task_node_ctor.py +11 -3
- dv_flow/mgr/task_node_ctor_compound.py +21 -33
- dv_flow/mgr/task_node_leaf.py +25 -3
- dv_flow/mgr/task_params_ctor.py +0 -1
- dv_flow/mgr/task_run_ctxt.py +4 -0
- dv_flow/mgr/task_runner.py +2 -0
- dv_flow/mgr/util/cmds/cmd_schema.py +0 -2
- dv_flow/mgr/util/util.py +4 -3
- dv_flow/mgr/yaml_srcinfo_loader.py +55 -0
- {dv_flow_mgr-0.0.2.14182043984a1.dist-info → dv_flow_mgr-1.0.0.14370600369a1.dist-info}/METADATA +1 -1
- dv_flow_mgr-1.0.0.14370600369a1.dist-info/RECORD +74 -0
- dv_flow_mgr-0.0.2.14182043984a1.dist-info/RECORD +0 -59
- {dv_flow_mgr-0.0.2.14182043984a1.dist-info → dv_flow_mgr-1.0.0.14370600369a1.dist-info}/WHEEL +0 -0
- {dv_flow_mgr-0.0.2.14182043984a1.dist-info → dv_flow_mgr-1.0.0.14370600369a1.dist-info}/entry_points.txt +0 -0
- {dv_flow_mgr-0.0.2.14182043984a1.dist-info → dv_flow_mgr-1.0.0.14370600369a1.dist-info}/licenses/LICENSE +0 -0
- {dv_flow_mgr-0.0.2.14182043984a1.dist-info → dv_flow_mgr-1.0.0.14370600369a1.dist-info}/top_level.txt +0 -0
dv_flow/mgr/__init__.py
CHANGED
@@ -20,7 +20,8 @@
|
|
20
20
|
#*
|
21
21
|
#****************************************************************************
|
22
22
|
from .package_def import *
|
23
|
-
from .
|
23
|
+
from .package_loader import PackageLoader
|
24
|
+
from .ext_rgy import ExtRgy
|
24
25
|
from .task_data import *
|
25
26
|
from .task_def import *
|
26
27
|
from .task_graph_builder import TaskGraphBuilder
|
dv_flow/mgr/cmds/cmd_graph.py
CHANGED
@@ -27,7 +27,6 @@ from ..util import loadProjPkgDef
|
|
27
27
|
from ..task_graph_builder import TaskGraphBuilder
|
28
28
|
from ..task_runner import TaskSetRunner
|
29
29
|
from ..task_listener_log import TaskListenerLog
|
30
|
-
from ..pkg_rgy import PkgRgy
|
31
30
|
from ..task_graph_dot_writer import TaskGraphDotWriter
|
32
31
|
|
33
32
|
|
@@ -66,11 +65,11 @@ class CmdGraph(object):
|
|
66
65
|
"<no descripion>"
|
67
66
|
print("%s - %s" % (t.name.ljust(max_name_len), desc))
|
68
67
|
else:
|
69
|
-
rundir = os.path.join(pkg.
|
68
|
+
rundir = os.path.join(pkg.basedir, "rundir")
|
70
69
|
|
71
70
|
builder = TaskGraphBuilder(root_pkg=pkg, rundir=rundir)
|
72
71
|
|
73
|
-
t = builder.
|
72
|
+
t = builder.mkTaskNode(pkg.name + "." + args.task)
|
74
73
|
|
75
74
|
TaskGraphDotWriter().write(
|
76
75
|
t,
|
dv_flow/mgr/cmds/cmd_run.py
CHANGED
@@ -27,7 +27,6 @@ from ..util import loadProjPkgDef
|
|
27
27
|
from ..task_graph_builder import TaskGraphBuilder
|
28
28
|
from ..task_runner import TaskSetRunner
|
29
29
|
from ..task_listener_log import TaskListenerLog
|
30
|
-
from ..pkg_rgy import PkgRgy
|
31
30
|
|
32
31
|
|
33
32
|
class CmdRun(object):
|
@@ -36,7 +35,8 @@ class CmdRun(object):
|
|
36
35
|
def __call__(self, args):
|
37
36
|
|
38
37
|
# First, find the project we're working with
|
39
|
-
|
38
|
+
listener = TaskListenerLog()
|
39
|
+
pkg = loadProjPkgDef(os.getcwd(), listener=listener)
|
40
40
|
|
41
41
|
if pkg is None:
|
42
42
|
raise Exception("Failed to find a 'flow.dv' file that defines a package in %s or its parent directories" % os.getcwd())
|
@@ -48,11 +48,8 @@ class CmdRun(object):
|
|
48
48
|
else:
|
49
49
|
# Print out available tasks
|
50
50
|
tasks = []
|
51
|
-
for task in pkg.
|
51
|
+
for task in pkg.task_m.values():
|
52
52
|
tasks.append(task)
|
53
|
-
for frag in pkg._fragment_l:
|
54
|
-
for task in frag.tasks:
|
55
|
-
tasks.append(task)
|
56
53
|
tasks.sort(key=lambda x: x.name)
|
57
54
|
|
58
55
|
max_name_len = 0
|
@@ -75,7 +72,8 @@ class CmdRun(object):
|
|
75
72
|
# Maybe separate into a task-graph builder and a task-graph runner
|
76
73
|
|
77
74
|
# TODO: allow user to specify run root -- maybe relative to some fixed directory?
|
78
|
-
rundir = os.path.join(pkg.
|
75
|
+
rundir = os.path.join(pkg.basedir, "rundir")
|
76
|
+
|
79
77
|
|
80
78
|
if args.clean:
|
81
79
|
print("Note: Cleaning rundir %s" % rundir)
|
@@ -89,14 +87,14 @@ class CmdRun(object):
|
|
89
87
|
if args.j != -1:
|
90
88
|
runner.nproc = int(args.j)
|
91
89
|
|
92
|
-
runner.add_listener(
|
90
|
+
runner.add_listener(listener.event)
|
93
91
|
|
94
92
|
tasks = []
|
95
93
|
|
96
94
|
for spec in args.tasks:
|
97
95
|
if spec.find('.') == -1:
|
98
96
|
spec = pkg.name + "." + spec
|
99
|
-
task = builder.
|
97
|
+
task = builder.mkTaskNode(spec)
|
100
98
|
tasks.append(task)
|
101
99
|
|
102
100
|
asyncio.run(runner.run(tasks))
|
dv_flow/mgr/cmds/cmd_show.py
CHANGED
@@ -28,7 +28,6 @@ from ..util import loadProjPkgDef
|
|
28
28
|
from ..task_graph_builder import TaskGraphBuilder
|
29
29
|
from ..task_runner import TaskSetRunner
|
30
30
|
from ..task_listener_log import TaskListenerLog
|
31
|
-
from ..pkg_rgy import PkgRgy
|
32
31
|
from ..task_graph_dot_writer import TaskGraphDotWriter
|
33
32
|
|
34
33
|
|
@@ -73,7 +72,7 @@ class CmdShow(object):
|
|
73
72
|
|
74
73
|
|
75
74
|
if args.all:
|
76
|
-
t = builder.
|
75
|
+
t = builder.mkTaskNode(pkg.name + "." + args.task)
|
77
76
|
dep_m = TaskSetRunner(rundir=None).buildDepMap(t)
|
78
77
|
|
79
78
|
order = list(toposort.toposort(dep_m))
|
dv_flow/mgr/cond_def.py
ADDED
@@ -0,0 +1,16 @@
|
|
1
|
+
|
2
|
+
import pydantic.dataclasses as pdc
|
3
|
+
from pydantic import BaseModel
|
4
|
+
|
5
|
+
#
|
6
|
+
# - task:
|
7
|
+
# needs:
|
8
|
+
# - if: abc
|
9
|
+
# then: abc
|
10
|
+
# else: def
|
11
|
+
#
|
12
|
+
class CondDef(BaseModel):
|
13
|
+
if_then : str = pdc.Field(
|
14
|
+
description="Condition to evaluate")
|
15
|
+
then : str = pdc.Field(
|
16
|
+
description="Task to execute if the condition is true")
|
dv_flow/mgr/config.py
ADDED
@@ -0,0 +1,33 @@
|
|
1
|
+
|
2
|
+
import pydantic.dataclasses as pdc
|
3
|
+
from pydantic import BaseModel
|
4
|
+
from typing import List, Union, Any
|
5
|
+
from .extend_def import ExtendDef
|
6
|
+
from .param_def import ParamDef
|
7
|
+
|
8
|
+
class OverrideDef(BaseModel):
|
9
|
+
"""Override definition"""
|
10
|
+
package : Union[str, None] = pdc.Field(
|
11
|
+
description="Package to override")
|
12
|
+
task : Union[str, None] = pdc.Field(
|
13
|
+
description="Task to override")
|
14
|
+
value : str = pdc.Field(
|
15
|
+
description="Override to use",
|
16
|
+
alias="with")
|
17
|
+
|
18
|
+
class ConfigDef(BaseModel):
|
19
|
+
name : str = pdc.Field(
|
20
|
+
description="Name of the configuration")
|
21
|
+
params : List[ParamDef] = pdc.Field(
|
22
|
+
default_factory=list,
|
23
|
+
description="List of configuration parameters",
|
24
|
+
alias="with")
|
25
|
+
uses : str = pdc.Field(
|
26
|
+
default=None,
|
27
|
+
description="Name of the configuration to use as a base")
|
28
|
+
overrides : List[OverrideDef] = pdc.Field(
|
29
|
+
default_factory=list,
|
30
|
+
description="List of package and task overrides")
|
31
|
+
extensions : List[ExtendDef] = pdc.Field(
|
32
|
+
default_factory=list,
|
33
|
+
description="List of extensions to apply")
|
@@ -0,0 +1,88 @@
|
|
1
|
+
import dataclasses as dc
|
2
|
+
import importlib
|
3
|
+
import importlib.util
|
4
|
+
import logging
|
5
|
+
import os
|
6
|
+
import sys
|
7
|
+
from typing import ClassVar, List
|
8
|
+
from .task_data import TaskDataResult
|
9
|
+
|
10
|
+
@dc.dataclass
|
11
|
+
class ExecCallable(object):
|
12
|
+
body : str
|
13
|
+
_log : ClassVar = logging.getLogger("ExecCallable")
|
14
|
+
|
15
|
+
async def __call__(self, ctxt, input):
|
16
|
+
self._log.debug("--> ExecCallable")
|
17
|
+
self._log.debug("Body:\n%s" % self.body)
|
18
|
+
|
19
|
+
# If it is a single line, then we have a spec to load
|
20
|
+
# Otherwise, we have an inline task
|
21
|
+
|
22
|
+
if self.body.find("\n") == -1:
|
23
|
+
# Two forms:
|
24
|
+
# <path>::method
|
25
|
+
# <method-path>
|
26
|
+
|
27
|
+
ci = self.body.find("::")
|
28
|
+
if ci != -1:
|
29
|
+
# have a file to load
|
30
|
+
file = self.body[:ci]
|
31
|
+
method = self.body[ci+2:]
|
32
|
+
spec = importlib.util.spec_from_file_location(input.name, file)
|
33
|
+
module = importlib.util.module_from_spec(spec)
|
34
|
+
sys.modules[input.name] = module
|
35
|
+
spec.loader.exec_module(module)
|
36
|
+
|
37
|
+
callable = getattr(module, method)
|
38
|
+
pass
|
39
|
+
else:
|
40
|
+
# self._log.debug("Use PyTask implementation")
|
41
|
+
last_dot = self.body.rfind('.')
|
42
|
+
clsname = self.body[last_dot+1:]
|
43
|
+
modname = self.body[:last_dot]
|
44
|
+
|
45
|
+
try:
|
46
|
+
if modname not in sys.modules:
|
47
|
+
if input.srcdir not in sys.path:
|
48
|
+
sys.path.append(input.srcdir)
|
49
|
+
mod = importlib.import_module(modname)
|
50
|
+
else:
|
51
|
+
mod = sys.modules[modname]
|
52
|
+
except ModuleNotFoundError as e:
|
53
|
+
raise Exception("Failed to import module %s: %s" % (modname, str(e)))
|
54
|
+
|
55
|
+
if not hasattr(mod, clsname):
|
56
|
+
raise Exception("Method %s not found in module %s" % (clsname, modname))
|
57
|
+
callable = getattr(mod, clsname)
|
58
|
+
else:
|
59
|
+
text_lines = self.body.splitlines()
|
60
|
+
|
61
|
+
least_whitespace = 2^32
|
62
|
+
have_content = False
|
63
|
+
for line in text_lines:
|
64
|
+
line_no_leading_ws = line.lstrip()
|
65
|
+
if line_no_leading_ws != "":
|
66
|
+
have_content = True
|
67
|
+
leading_ws = len(line) - len(line_no_leading_ws)
|
68
|
+
if leading_ws < least_whitespace:
|
69
|
+
least_whitespace = leading_ws
|
70
|
+
# Remove leading whitespace
|
71
|
+
if have_content:
|
72
|
+
for i,line in enumerate(text_lines):
|
73
|
+
if len(line) >= least_whitespace:
|
74
|
+
text_lines[i] = line[least_whitespace:]
|
75
|
+
|
76
|
+
method = "async def pytask(ctxt, input):\n" + "\n".join([" %s" % l for l in text_lines])
|
77
|
+
|
78
|
+
exec(method)
|
79
|
+
|
80
|
+
callable = locals()['pytask']
|
81
|
+
|
82
|
+
result = await callable(ctxt, input)
|
83
|
+
|
84
|
+
if result is None:
|
85
|
+
result = TaskDataResult()
|
86
|
+
|
87
|
+
self._log.debug("<-- ExecCallable")
|
88
|
+
return result
|
@@ -1,5 +1,5 @@
|
|
1
1
|
#****************************************************************************
|
2
|
-
#*
|
2
|
+
#* ext_rgy.py
|
3
3
|
#*
|
4
4
|
#* Copyright 2023-2025 Matthew Ballance and Contributors
|
5
5
|
#*
|
@@ -19,18 +19,23 @@
|
|
19
19
|
#* Author:
|
20
20
|
#*
|
21
21
|
#****************************************************************************
|
22
|
+
import dataclasses as dc
|
22
23
|
import os
|
23
24
|
import logging
|
24
25
|
import sys
|
25
|
-
from typing import Dict, Tuple
|
26
|
-
from .
|
26
|
+
from typing import Callable, ClassVar, Dict, Tuple
|
27
|
+
from .exec_callable import ExecCallable
|
28
|
+
from .pytask_callable import PytaskCallable
|
29
|
+
from .shell_callable import ShellCallable
|
27
30
|
|
28
|
-
|
29
|
-
|
31
|
+
@dc.dataclass
|
32
|
+
class ExtRgy(object):
|
33
|
+
_inst : ClassVar = None
|
30
34
|
|
31
35
|
def __init__(self):
|
32
36
|
self._pkgpath = []
|
33
|
-
self._pkg_m : Dict[str,
|
37
|
+
self._pkg_m : Dict[str, str] = {}
|
38
|
+
self._shell_m : Dict[str, Callable] = {}
|
34
39
|
self._log = logging.getLogger(type(self).__name__)
|
35
40
|
self._override_m : Dict[str,str] = {}
|
36
41
|
|
@@ -47,21 +52,19 @@ class PkgRgy(object):
|
|
47
52
|
return True
|
48
53
|
else:
|
49
54
|
return False
|
50
|
-
|
51
|
-
def
|
52
|
-
self.
|
55
|
+
|
56
|
+
def findShell(self, name) -> Callable:
|
57
|
+
if name in self._shell_m.keys():
|
58
|
+
return self._shell_m[name]
|
59
|
+
|
60
|
+
def findPackagePath(self, name) -> str:
|
61
|
+
ret = None
|
62
|
+
self._log.debug("--> findPackagePath(%s)" % name)
|
53
63
|
if name in self._pkg_m.keys():
|
54
|
-
|
55
|
-
pkg_def = PackageDef.load(self._pkg_m[name][0])
|
56
|
-
# Load the package
|
57
|
-
self._pkg_m[name] = (
|
58
|
-
self._pkg_m[name][0],
|
59
|
-
pkg_def
|
60
|
-
)
|
61
|
-
ret = self._pkg_m[name][1]
|
64
|
+
ret = self._pkg_m[name]
|
62
65
|
else:
|
63
66
|
ret = self._findOnPath(name)
|
64
|
-
self._log.debug("<--
|
67
|
+
self._log.debug("<-- findPackagePath(%s)" % name)
|
65
68
|
return ret
|
66
69
|
|
67
70
|
def _findOnPath(self, name):
|
@@ -72,33 +75,31 @@ class PkgRgy(object):
|
|
72
75
|
else:
|
73
76
|
name_pref = None
|
74
77
|
|
75
|
-
|
78
|
+
ret = None
|
76
79
|
|
77
80
|
for path in self._pkgpath:
|
78
81
|
if os.path.isfile(os.path.join(path, name_dir, "flow.dv")):
|
79
|
-
|
82
|
+
ret = os.path.join(path, name_dir, "flow.dv")
|
80
83
|
elif name_pref is not None and os.path.isfile(os.path.join(path, name_pref, name_s[-1] + ".dv")):
|
81
|
-
|
84
|
+
ret = os.path.join(path, name_pref, name_s[-1] + ".dv")
|
82
85
|
elif os.path.isfile(os.path.join(path, name + ".dv")):
|
83
|
-
|
86
|
+
ret = os.path.join(path, name + ".dv")
|
84
87
|
|
85
|
-
if
|
86
|
-
self._pkg_m[name] =
|
88
|
+
if ret is not None:
|
89
|
+
self._pkg_m[name] = ret
|
87
90
|
break
|
88
91
|
|
89
|
-
return
|
90
|
-
|
91
|
-
def registerPackage(self, pkg_def):
|
92
|
-
self._log.debug("--> registerPackage %s" % pkg_def.name)
|
93
|
-
if pkg_def.name in self._pkg_m.keys():
|
94
|
-
raise Exception("Duplicate package %s" % pkg_def.name)
|
95
|
-
self._pkg_m[pkg_def.name] = (pkg_def.basedir, pkg_def)
|
96
|
-
self._log.debug("<-- registerPackage %s" % pkg_def.name)
|
92
|
+
return ret
|
97
93
|
|
98
94
|
def _discover_plugins(self):
|
99
95
|
self._log.debug("--> discover_plugins")
|
100
96
|
# Register built-in package
|
101
|
-
self._pkg_m["std"] =
|
97
|
+
self._pkg_m["std"] = os.path.join(os.path.dirname(__file__), "std/flow.dv")
|
98
|
+
|
99
|
+
# Register built-in shells
|
100
|
+
self._shell_m["shell"] = ShellCallable
|
101
|
+
self._shell_m["pytask"] = ExecCallable
|
102
|
+
|
102
103
|
|
103
104
|
if "DV_FLOW_PATH" in os.environ.keys() and os.environ["DV_FLOW_PATH"] != "":
|
104
105
|
paths = os.environ["DV_FLOW_PATH"].split(':')
|
@@ -124,7 +125,15 @@ class PkgRgy(object):
|
|
124
125
|
self._log.debug("Package %s already registered using path %s. Conflicting path: %s" % (
|
125
126
|
name, self._pkg_m[name][0], path))
|
126
127
|
else:
|
127
|
-
self._pkg_m[name] =
|
128
|
+
self._pkg_m[name] = path
|
129
|
+
if hasattr(mod, "dvfm_shells"):
|
130
|
+
shell_m = mod.dvfm_shells()
|
131
|
+
for name, shell in shell_m.items():
|
132
|
+
self._log.debug("Registering shell %s" % name)
|
133
|
+
if name in self._shell_m.keys():
|
134
|
+
self._log.debug("Shell %s already registered" % name)
|
135
|
+
else:
|
136
|
+
self._shell_m[name] = shell
|
128
137
|
except Exception as e:
|
129
138
|
self._log.critical("Error loading plugin %s: %s" % (p.name, str(e)))
|
130
139
|
raise e
|
@@ -135,7 +144,7 @@ class PkgRgy(object):
|
|
135
144
|
self._log.debug("<-- discover_plugins")
|
136
145
|
|
137
146
|
def copy(self):
|
138
|
-
ret =
|
147
|
+
ret = ExtRgy()
|
139
148
|
ret._pkgpath = self._pkgpath.copy()
|
140
149
|
ret._pkg_m = self._pkg_m.copy()
|
141
150
|
return ret
|
@@ -0,0 +1,21 @@
|
|
1
|
+
|
2
|
+
import pydantic.dataclasses as pdc
|
3
|
+
from pydantic import BaseModel
|
4
|
+
from typing import List, Union
|
5
|
+
from .param_def import ParamDef
|
6
|
+
|
7
|
+
class ExtendDef(BaseModel):
|
8
|
+
"""Extension definition"""
|
9
|
+
task : str = pdc.Field(
|
10
|
+
description="Name of the task to extend")
|
11
|
+
params : List[ParamDef] = pdc.Field(
|
12
|
+
default_factory=list,
|
13
|
+
description="Parameter extensions to apply to the task",
|
14
|
+
alias="with")
|
15
|
+
uses : str = pdc.Field(
|
16
|
+
default=None,
|
17
|
+
description="Name of the extension to use as a base")
|
18
|
+
needs: List[str] = pdc.Field(
|
19
|
+
default_factory=list,
|
20
|
+
description="List of tasks to depend on")
|
21
|
+
|
dv_flow/mgr/fragment_def.py
CHANGED
@@ -23,8 +23,8 @@ import pydantic.dataclasses as dc
|
|
23
23
|
import json
|
24
24
|
from pydantic import BaseModel
|
25
25
|
from typing import Any, Dict, List, Union
|
26
|
-
from .package import Package
|
27
26
|
from .package_import_spec import PackageImportSpec
|
27
|
+
from .srcinfo import SrcInfo
|
28
28
|
from .task_def import TaskDef
|
29
29
|
from .type_def import TypeDef
|
30
30
|
|
@@ -33,6 +33,7 @@ class FragmentDef(BaseModel):
|
|
33
33
|
imports : List[Union[str,PackageImportSpec]] = dc.Field(default_factory=list, alias="imports")
|
34
34
|
fragments: List[str] = dc.Field(default_factory=list)
|
35
35
|
types : List[TypeDef] = dc.Field(default_factory=list)
|
36
|
+
srcinfo : SrcInfo = dc.Field(default=None)
|
36
37
|
|
37
38
|
_basedir : str = None
|
38
39
|
|
@@ -41,6 +42,6 @@ class FragmentDef(BaseModel):
|
|
41
42
|
if t.name == name:
|
42
43
|
return t
|
43
44
|
|
44
|
-
def apply(self, session, pkg : Package):
|
45
|
-
pass
|
45
|
+
# def apply(self, session, pkg : Package):
|
46
|
+
# pass
|
46
47
|
|
dv_flow/mgr/need_def.py
ADDED
dv_flow/mgr/package.py
CHANGED
@@ -21,25 +21,49 @@
|
|
21
21
|
#****************************************************************************
|
22
22
|
import dataclasses as dc
|
23
23
|
import logging
|
24
|
-
from typing import Any, ClassVar, Dict
|
25
|
-
from .
|
24
|
+
from typing import Any, ClassVar, Dict, List
|
25
|
+
from .fragment_def import FragmentDef
|
26
|
+
from .package_def import PackageDef
|
27
|
+
from .task import Task
|
26
28
|
|
27
29
|
@dc.dataclass
|
28
30
|
class Package(object):
|
29
|
-
|
31
|
+
pkg_def : PackageDef
|
32
|
+
basedir : str = None
|
30
33
|
params : Dict[str,Any] = dc.field(default_factory=dict)
|
31
34
|
# Package holds constructors for tasks
|
32
35
|
# - Dict holds the default parameters for the task
|
33
|
-
|
36
|
+
task_m : Dict[str,Task] = dc.field(default_factory=dict)
|
34
37
|
types : Dict[str,Any] = dc.field(default_factory=dict)
|
38
|
+
fragment_def_l : List[FragmentDef] = dc.field(default_factory=list)
|
39
|
+
pkg_m : Dict[str, 'Package'] = dc.field(default_factory=dict)
|
35
40
|
_log : ClassVar = logging.getLogger("Package")
|
36
41
|
|
37
|
-
|
42
|
+
@property
|
43
|
+
def name(self):
|
44
|
+
return self.pkg_def.name
|
45
|
+
|
46
|
+
def getTaskCtor(self, name : str) -> Task:
|
38
47
|
self._log.debug("-- %s::getTaskCtor: %s" % (self.name, name))
|
39
48
|
if name not in self.tasks.keys():
|
40
49
|
raise Exception("Task %s not present in package %s" % (name, self.name))
|
41
50
|
return self.tasks[name]
|
51
|
+
|
52
|
+
def dump(self):
|
53
|
+
tasks = {}
|
54
|
+
for k, v in self.task_m.items():
|
55
|
+
tasks[k] = v.dump()
|
56
|
+
|
57
|
+
pkg = {
|
58
|
+
"name": self.name,
|
59
|
+
"basedir": self.basedir,
|
60
|
+
"params": self.params,
|
61
|
+
"tasks": tasks,
|
62
|
+
"fragments": [f.dump() for f in self.fragment_def_l]
|
63
|
+
}
|
64
|
+
|
65
|
+
return pkg
|
42
66
|
|
43
67
|
def __hash__(self):
|
44
|
-
return
|
68
|
+
return id(self)
|
45
69
|
|