dv-flow-mgr 1.0.0.14528489065a1__py3-none-any.whl → 1.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. dv_flow/mgr/__init__.py +4 -0
  2. dv_flow/mgr/cmds/cmd_run.py +6 -1
  3. dv_flow/mgr/cmds/cmd_show.py +3 -3
  4. dv_flow/mgr/config_def.py +1 -1
  5. dv_flow/mgr/expr_eval.py +24 -1
  6. dv_flow/mgr/expr_parser.py +26 -1
  7. dv_flow/mgr/package.py +13 -6
  8. dv_flow/mgr/package_def.py +3 -4
  9. dv_flow/mgr/package_loader.py +252 -120
  10. dv_flow/mgr/param_def.py +7 -2
  11. dv_flow/mgr/param_ref_eval.py +3 -0
  12. dv_flow/mgr/parser.out +258 -138
  13. dv_flow/mgr/parsetab.py +20 -17
  14. dv_flow/mgr/std/flow.dv +31 -53
  15. dv_flow/mgr/std/incdirs.py +18 -0
  16. dv_flow/mgr/task.py +1 -0
  17. dv_flow/mgr/task_def.py +5 -1
  18. dv_flow/mgr/task_graph_builder.py +265 -24
  19. dv_flow/mgr/task_graph_dot_writer.py +32 -3
  20. dv_flow/mgr/task_listener_log.py +46 -31
  21. dv_flow/mgr/task_node.py +3 -0
  22. dv_flow/mgr/task_node_compound.py +1 -0
  23. dv_flow/mgr/task_node_ctor_wrapper.py +3 -1
  24. dv_flow/mgr/task_node_ctxt.py +7 -0
  25. dv_flow/mgr/task_node_leaf.py +77 -54
  26. dv_flow/mgr/task_run_ctxt.py +18 -0
  27. dv_flow/mgr/task_runner.py +8 -5
  28. dv_flow/mgr/type.py +33 -0
  29. dv_flow/mgr/type_def.py +4 -2
  30. dv_flow/mgr/util/util.py +9 -0
  31. dv_flow/mgr/yaml_srcinfo_loader.py +1 -0
  32. {dv_flow_mgr-1.0.0.14528489065a1.dist-info → dv_flow_mgr-1.5.0.dist-info}/METADATA +1 -1
  33. {dv_flow_mgr-1.0.0.14528489065a1.dist-info → dv_flow_mgr-1.5.0.dist-info}/RECORD +37 -35
  34. {dv_flow_mgr-1.0.0.14528489065a1.dist-info → dv_flow_mgr-1.5.0.dist-info}/WHEEL +1 -1
  35. {dv_flow_mgr-1.0.0.14528489065a1.dist-info → dv_flow_mgr-1.5.0.dist-info}/entry_points.txt +0 -0
  36. {dv_flow_mgr-1.0.0.14528489065a1.dist-info → dv_flow_mgr-1.5.0.dist-info}/licenses/LICENSE +0 -0
  37. {dv_flow_mgr-1.0.0.14528489065a1.dist-info → dv_flow_mgr-1.5.0.dist-info}/top_level.txt +0 -0
@@ -48,9 +48,31 @@ class TaskGraphDotWriter(object):
48
48
  self._node_id += 1
49
49
  node_name = "n%d" % node_id
50
50
  self._node_id_m[node] = node_name
51
- self.println("%s[label=\"%s\"];" % (node_name, node.name))
51
+ self.println("%s[label=\"%s\",tooltip=\"%s\"];" % (
52
+ node_name,
53
+ node.name,
54
+ self._genLeafTooltip(node)))
52
55
  self._log.debug("<-- build_node %s (%d)" % (node.name, len(node.needs),))
53
56
 
57
+ def _genLeafTooltip(self, node):
58
+ params = type(node.params).model_fields
59
+ ret = ""
60
+ if len(params):
61
+ ret += "Parameters:\\n"
62
+ for k in type(node.params).model_fields.keys():
63
+ ret += "- %s: " % k
64
+ v = getattr(node.params, k)
65
+ if isinstance(v, str):
66
+ ret += "%s" % v
67
+ elif isinstance(v, list):
68
+ ret += "[%s]" % ", ".join([str(x) for x in v])
69
+ elif isinstance(v, dict):
70
+ ret += "{%s}" % ", ".join(["%s: %s" % (str(k), str(v)) for k,v in v.items()])
71
+ else:
72
+ ret += "%s" % str(v)
73
+ ret += "\\n"
74
+ return ret
75
+
54
76
  def process_needs(self, node):
55
77
  self._log.debug("--> process_needs %s (%d)" % (node.name, len(node.needs),))
56
78
 
@@ -99,13 +121,17 @@ class TaskGraphDotWriter(object):
99
121
  self.println("subgraph cluster_%d {" % id)
100
122
  self.inc_ind()
101
123
  self.println("label=\"%s\";" % node.name)
124
+ self.println("tooltip=\"%s\";" % self._genLeafTooltip(node))
102
125
  self.println("color=blue;")
103
126
  self.println("style=dashed;")
104
127
 
105
128
  task_node_id = self._node_id
106
129
  self._node_id += 1
107
130
  task_node_name = "n%d" % task_node_id
108
- self.println("%s[label=\"%s\"];" % (task_node_name, node.name))
131
+ self.println("%s[label=\"%s\", tooltip=\"%s\"];" % (
132
+ task_node_name,
133
+ node.name,
134
+ self._genLeafTooltip(node)))
109
135
  self._node_id_m[node] = task_node_name
110
136
 
111
137
  for n in node.tasks:
@@ -119,7 +145,10 @@ class TaskGraphDotWriter(object):
119
145
  node_name = "n%d" % node_id
120
146
  self._node_id_m[n] = node_name
121
147
  leaf_name = n.name[n.name.rfind(".") + 1:]
122
- self.println("%s[label=\"%s\"];" % (node_name, leaf_name))
148
+ self.println("%s[label=\"%s\",tooltip=\"%s\"];" % (
149
+ node_name,
150
+ leaf_name,
151
+ self._genLeafTooltip(n)))
123
152
  self.dec_ind()
124
153
  self.println("}")
125
154
 
@@ -22,6 +22,7 @@
22
22
  import dataclasses as dc
23
23
  from datetime import datetime
24
24
  from rich.console import Console
25
+ from typing import ClassVar, Dict
25
26
  from .task_data import SeverityE
26
27
 
27
28
  @dc.dataclass
@@ -29,12 +30,26 @@ class TaskListenerLog(object):
29
30
  console : Console = dc.field(default=None)
30
31
  level : int = 0
31
32
  quiet : bool = False
33
+ has_severity : Dict[SeverityE, int] = dc.field(default_factory=dict)
34
+
35
+ sev_pref_m : ClassVar = {
36
+ "info": "[blue]I[/blue]",
37
+ SeverityE.Info: "[blue]I[/blue]",
38
+ "warn": "[yellow]W[/yellow]",
39
+ SeverityE.Warning: "[yellow]W[/yellow]",
40
+ "error": "[red]E[/red]",
41
+ SeverityE.Error: "[red]E[/red]",
42
+ }
32
43
 
33
44
  def __post_init__(self):
34
45
  self.console = Console(highlight=False)
46
+ for sev in SeverityE:
47
+ self.has_severity[sev] = 0
35
48
 
36
49
  def marker(self, marker):
37
50
  """Receives markers during loading"""
51
+ self.show_marker(marker)
52
+ self.has_severity[marker.severity] += 1
38
53
  pass
39
54
 
40
55
  def event(self, task : 'Task', reason : 'Reason'):
@@ -55,39 +70,9 @@ class TaskListenerLog(object):
55
70
  else:
56
71
  delta_s = " %0.2fmS" % (1000*delta.total_seconds())
57
72
 
58
- sev_pref_m = {
59
- "info": "[blue]I[/blue]",
60
- SeverityE.Info: "[blue]I[/blue]",
61
- "warn": "[yellow]W[/yellow]",
62
- SeverityE.Warning: "[yellow]W[/yellow]",
63
- "error": "[red]E[/red]",
64
- SeverityE.Error: "[red]E[/red]",
65
- }
66
73
  for m in task.result.markers:
67
- severity_s = str(m.severity)
68
-
69
- if m.severity in sev_pref_m.keys():
70
- sev_pref = sev_pref_m[m.severity]
71
- elif severity_s in sev_pref_m.keys():
72
- sev_pref = sev_pref_m[severity_s]
73
- else:
74
- sev_pref = ""
75
-
76
- msg = " %s %s: %s" % (
77
- sev_pref,
78
- task.name,
79
- m.msg)
74
+ self.show_marker(m, task.name, task.rundir)
80
75
 
81
- if m.loc is not None:
82
- self.console.print("%s" % msg)
83
- if m.loc.line != -1 and m.loc.pos != -1:
84
- self.console.print(" %s:%d:%d" % (m.loc.path, m.loc.line, m.loc.pos))
85
- elif m.loc.line != -1:
86
- self.console.print(" %s:%d" % (m.loc.path, m.loc.line))
87
- else:
88
- self.console.print(" %s" % m.loc.path)
89
- else:
90
- self.console.print("%s (%s)" % (msg, task.rundir))
91
76
  if task.result.status == 0:
92
77
  self.console.print("[green]<< [%d][/green] Task %s%s%s" % (
93
78
  self.level,
@@ -101,3 +86,33 @@ class TaskListenerLog(object):
101
86
  self.console.print("[red]-[/red] Task %s" % task.name)
102
87
  pass
103
88
 
89
+ def show_marker(self, m, name=None, rundir=None):
90
+ severity_s = str(m.severity)
91
+
92
+ if m.severity in self.sev_pref_m.keys():
93
+ sev_pref = self.sev_pref_m[m.severity]
94
+ elif severity_s in self.sev_pref_m.keys():
95
+ sev_pref = self.sev_pref_m[severity_s]
96
+ else:
97
+ sev_pref = ""
98
+
99
+ msg = " %s%s: %s" % (
100
+ sev_pref,
101
+ (" " + name) if name is not None and name != "" else "",
102
+ m.msg)
103
+
104
+ if m.loc is not None:
105
+ self.console.print("%s" % msg)
106
+ if m.loc.line != -1 and m.loc.pos != -1:
107
+ self.console.print(" %s:%d:%d" % (m.loc.path, m.loc.line, m.loc.pos))
108
+ elif m.loc.line != -1:
109
+ self.console.print(" %s:%d" % (m.loc.path, m.loc.line))
110
+ else:
111
+ self.console.print(" %s" % m.loc.path)
112
+ else:
113
+ self.console.print("%s%s" % (
114
+ msg,
115
+ ("(%s)" % rundir) if rundir is not None else ""))
116
+
117
+ pass
118
+
dv_flow/mgr/task_node.py CHANGED
@@ -29,6 +29,7 @@ import logging
29
29
  import toposort
30
30
  from typing import Any, Callable, ClassVar, Dict, List, Tuple
31
31
  from .task_data import TaskDataInput, TaskDataOutput, TaskDataResult
32
+ from .task_node_ctxt import TaskNodeCtxt
32
33
  from .task_run_ctxt import TaskRunCtxt
33
34
  from .param import Param
34
35
 
@@ -44,6 +45,7 @@ class TaskNode(object):
44
45
  srcdir : str
45
46
  # This can be the resolved parameters
46
47
  params : Any
48
+ ctxt : TaskNodeCtxt
47
49
 
48
50
  # Runtime fields -- these get populated during execution
49
51
  changed : bool = False
@@ -56,6 +58,7 @@ class TaskNode(object):
56
58
  start : float = dc.field(default=None)
57
59
  end : float = dc.field(default=None)
58
60
  save_exec_data : bool = dc.field(default=True)
61
+ iff : bool = dc.field(default=True)
59
62
  parent : 'TaskNode' = dc.field(default=None)
60
63
 
61
64
  _log : ClassVar = logging.getLogger("TaskNode")
@@ -45,6 +45,7 @@ class TaskNodeCompound(TaskNode):
45
45
  name=self.name + ".in",
46
46
  srcdir=self.srcdir,
47
47
  params=NullParams(),
48
+ ctxt=self.ctxt,
48
49
  consumes=ConsumesE.No,
49
50
  passthrough=PassthroughE.All)
50
51
  self.input.task = null_run
@@ -28,6 +28,7 @@ import logging
28
28
  import toposort
29
29
  from typing import Any, Callable, ClassVar, Dict, List, Tuple
30
30
  from .task_data import TaskDataInput, TaskDataOutput, TaskDataResult
31
+ from .task_def import PassthroughE, ConsumesE
31
32
  from .param import Param
32
33
  from .task_node import TaskNode
33
34
  from .task_node_leaf import TaskNodeLeaf
@@ -47,6 +48,7 @@ class TaskNodeCtorWrapper(TaskNodeCtor):
47
48
  srcdir=srcdir,
48
49
  params=params,
49
50
  task=self.T,
51
+ ctxt=None,
50
52
  needs=needs)
51
53
  node.passthrough = self.passthrough
52
54
  node.consumes = self.consumes
@@ -87,7 +89,7 @@ class TaskNodeCtorWrapper(TaskNodeCtor):
87
89
  setattr(obj, key, value)
88
90
  return obj
89
91
 
90
- def task(paramT,passthrough=False,consumes=None):
92
+ def task(paramT,passthrough=PassthroughE.Unused,consumes=ConsumesE.All):
91
93
  """Decorator to wrap a task method as a TaskNodeCtor"""
92
94
  def wrapper(T):
93
95
  task_mname = T.__module__
@@ -0,0 +1,7 @@
1
+ import dataclasses as dc
2
+
3
+ @dc.dataclass
4
+ class TaskNodeCtxt(object):
5
+ """Holds data shared with all task-graph nodes"""
6
+ root_pkgdir : str
7
+ root_rundir : str
@@ -10,7 +10,6 @@ from .task_data import TaskDataInput, TaskDataOutput, TaskDataResult, TaskMarker
10
10
  from .task_def import ConsumesE, PassthroughE
11
11
  from .task_node import TaskNode
12
12
  from .task_run_ctxt import TaskRunCtxt
13
- from .param_ref_eval import ParamRefEval
14
13
  from .param import Param
15
14
 
16
15
  @dc.dataclass
@@ -21,6 +20,19 @@ class TaskNodeLeaf(TaskNode):
21
20
  runner,
22
21
  rundir,
23
22
  memento : Any = None) -> 'TaskDataResult':
23
+ try:
24
+ ret = await self._do_run(runner, rundir, memento)
25
+ except Exception as e:
26
+ print("Exception: %s" % str(e))
27
+ ret = TaskDataResult()
28
+ raise e
29
+
30
+ return ret
31
+
32
+ async def _do_run(self,
33
+ runner,
34
+ rundir,
35
+ memento : Any = None) -> 'TaskDataResult':
24
36
  self._log.debug("--> do_run: %s" % self.name)
25
37
  changed = False
26
38
  for dep,_ in self.needs:
@@ -34,7 +46,7 @@ class TaskNodeLeaf(TaskNode):
34
46
  # TODO: Form dep-map from inputs
35
47
 
36
48
  dep_m = {}
37
- for need,block in self.needs:
49
+ for i,(need,block) in enumerate(self.needs):
38
50
  self._log.debug("dep %s dep_m: %s" % (need.name, str(dep_m)))
39
51
  if not block:
40
52
  for subdep in need.output.dep_m.keys():
@@ -45,45 +57,51 @@ class TaskNodeLeaf(TaskNode):
45
57
  dep_m[subdep].append(dep)
46
58
  self._log.debug("input dep_m: %s %s" % (self.name, str(dep_m)))
47
59
 
48
- sorted = toposort.toposort(dep_m)
49
-
50
- in_params_m = {}
51
- added_srcs = set()
52
- for need,block in self.needs:
53
- self._log.debug("Process need=%s block=%s" % (need.name, block))
54
- if not block:
55
- for p in need.output.output:
56
-
57
- # Avoid adding parameters from a single task more than once
58
- key = (p.src, p.seq)
59
- if key not in added_srcs:
60
- added_srcs.add(key)
61
- if p.src not in in_params_m.keys():
62
- in_params_m[p.src] = []
63
- in_params_m[p.src].append(p)
60
+ # This gets the dependencies in topological order
61
+ # sorted = toposort.toposort(dep_m)
64
62
 
65
- # in_params holds parameter sets ordered by dependency
63
+ # Now, process the 'needs' in the order that they're listed
66
64
  in_params = []
67
- for sorted_s in sorted:
68
- self._log.debug("sorted_s: %s" % str(sorted_s))
69
- for dep in sorted_s:
70
- if dep in in_params_m.keys():
71
- self._log.debug("(%s) Extend with: %s" % (dep, str(in_params_m[dep])))
72
- in_params.extend(in_params_m[dep])
65
+ in_params_s = set()
66
+ in_task_s = set()
67
+
68
+ for need, _ in self.needs:
69
+ if need not in in_task_s:
70
+ in_task_s.add(need)
71
+ for item in need.output.output:
72
+ key = (item.src, item.seq)
73
+ if key not in in_params_s:
74
+ in_params_s.add(key)
75
+ in_params.append(item)
76
+
77
+ #
78
+ # in_params_m = {}
79
+ # added_srcs = set()
80
+ # for need,block in self.needs:
81
+ # self._log.debug("Process need=%s block=%s" % (need.name, block))
82
+ # if not block:
83
+ # for p in need.output.output:
84
+
85
+ # # Avoid adding parameters from a single task more than once
86
+ # key = (p.src, p.seq)
87
+ # if key not in added_srcs:
88
+ # added_srcs.add(key)
89
+ # if p.src not in in_params_m.keys():
90
+ # in_params_m[p.src] = []
91
+ # in_params_m[p.src].append(p)
92
+
93
+ # # in_params holds parameter sets ordered by dependency
94
+ # in_params = []
95
+ # for sorted_s in sorted:
96
+ # self._log.debug("sorted_s: %s" % str(sorted_s))
97
+ # for dep in sorted_s:
98
+ # if dep in in_params_m.keys():
99
+ # self._log.debug("(%s) Extend with: %s" % (dep, str(in_params_m[dep])))
100
+ # in_params.extend(in_params_m[dep])
73
101
 
74
102
  self._log.debug("in_params[1]: %s" % ",".join(p.src for p in in_params))
75
103
 
76
- # Create an evaluator for substituting param values
77
- eval = ParamRefEval()
78
104
 
79
- self._log.debug("in_params[2]: %s" % ",".join(p.src for p in in_params))
80
- eval.setVar("in", in_params)
81
- eval.setVar("rundir", rundir)
82
-
83
- # Set variables from the inputs
84
- for need in self.needs:
85
- for name,value in {"rundir" : need[0].rundir}.items():
86
- eval.setVar("%s.%s" % (need[0].name, name), value)
87
105
 
88
106
  # Default inputs is the list of parameter sets that match 'consumes'
89
107
  inputs = []
@@ -98,19 +116,6 @@ class TaskNodeLeaf(TaskNode):
98
116
  else:
99
117
  self._log.debug("consumes(unknown): %s" % str(self.consumes))
100
118
 
101
- for name,field in type(self.params).model_fields.items():
102
- value = getattr(self.params, name)
103
- if type(value) == str:
104
- if value.find("${{") != -1:
105
- new_val = eval.eval(value)
106
- self._log.debug("Param %s: Evaluate expression \"%s\" => \"%s\"" % (name, value, new_val))
107
- setattr(self.params, name, new_val)
108
- elif isinstance(value, list):
109
- for i,elem in enumerate(value):
110
- if elem.find("${{") != -1:
111
- new_val = eval.eval(elem)
112
- value[i] = new_val
113
-
114
119
  input = TaskDataInput(
115
120
  name=self.name,
116
121
  changed=changed,
@@ -120,7 +125,10 @@ class TaskNodeLeaf(TaskNode):
120
125
  inputs=inputs,
121
126
  memento=memento)
122
127
 
123
- ctxt = TaskRunCtxt(runner=runner, rundir=input.rundir)
128
+ ctxt = TaskRunCtxt(
129
+ runner=runner,
130
+ ctxt=self.ctxt,
131
+ rundir=input.rundir)
124
132
 
125
133
  self._log.debug("--> Call task method %s" % str(self.task))
126
134
  try:
@@ -144,10 +152,7 @@ class TaskNodeLeaf(TaskNode):
144
152
 
145
153
  self.result.markers.extend(ctxt._markers)
146
154
 
147
- output=self.result.output.copy()
148
- for i,out in enumerate(output):
149
- out.src = self.name
150
- out.seq = i
155
+ output = []
151
156
 
152
157
  self._log.debug("output[1]: %s" % str(output))
153
158
 
@@ -193,6 +198,13 @@ class TaskNodeLeaf(TaskNode):
193
198
  # self.name : []
194
199
  # }
195
200
 
201
+ # Add our own output
202
+ local_out = self.result.output.copy()
203
+ for i,out in enumerate(local_out):
204
+ out.src = self.name
205
+ out.seq = i
206
+ output.append(out)
207
+
196
208
  self._log.debug("output dep_m: %s %s" % (self.name, str(dep_m)))
197
209
  self._log.debug("output[2]: %s" % str(output))
198
210
 
@@ -214,6 +226,17 @@ class TaskNodeLeaf(TaskNode):
214
226
  if self.output is None:
215
227
  raise Exception("Task %s did not produce a result" % self.name)
216
228
  return self.result
229
+
230
+ def _processNeed(self, need, in_params, in_task_s):
231
+ # Go depth-first
232
+ for nn, _ in need.needs:
233
+ self._processNeed(nn, in_params, in_task_s)
234
+
235
+ if need not in in_task_s:
236
+ in_params.extend(need.output.output)
237
+
238
+
239
+
217
240
 
218
241
  def __hash__(self):
219
- return id(self)
242
+ return id(self)
@@ -5,6 +5,7 @@ import pydantic.dataclasses as pdc
5
5
  import os
6
6
  from typing import List
7
7
  from .task_data import TaskMarker, SeverityE, TaskMarkerLoc
8
+ from .task_node_ctxt import TaskNodeCtxt
8
9
 
9
10
  class ExecInfo(BaseModel):
10
11
  cmd : List[str] = pdc.Field(default_factory=list)
@@ -13,10 +14,20 @@ class ExecInfo(BaseModel):
13
14
  @dc.dataclass
14
15
  class TaskRunCtxt(object):
15
16
  runner : 'TaskRunner'
17
+ ctxt : TaskNodeCtxt
16
18
  rundir : str
19
+
17
20
  _markers : List[TaskMarker] = dc.field(default_factory=list)
18
21
  _exec_info : List[ExecInfo] = dc.field(default_factory=list)
19
22
 
23
+ @property
24
+ def root_pkgdir(self):
25
+ return self.ctxt.root_pkgdir
26
+
27
+ @property
28
+ def root_rundir(self):
29
+ return self.ctxt.root_rundir
30
+
20
31
  async def exec(self,
21
32
  cmd : List[str],
22
33
  logfile=None,
@@ -55,6 +66,13 @@ class TaskRunCtxt(object):
55
66
  if status != 0:
56
67
  self.error("Command failed: %s" % " ".join(cmd))
57
68
 
69
+ if logfilter is not None:
70
+ with open(os.path.join(self.rundir, logfile), "r") as fp:
71
+ for line in fp.readlines():
72
+ if logfilter(line):
73
+ self.info(line.strip())
74
+ logfilter("")
75
+
58
76
  return status
59
77
 
60
78
  def create(self, path, content):
@@ -126,11 +126,14 @@ class TaskSetRunner(TaskRunner):
126
126
 
127
127
  # TaskNode rundir is a list of path elements relative
128
128
  # to the root rundir
129
- rundir = self.rundir
130
-
131
- for rundir_e in t.rundir:
132
- rundir_e = re.sub(invalid_chars_pattern, '_', rundir_e)
133
- rundir = os.path.join(rundir, rundir_e)
129
+ rundir = ""
130
+
131
+ for i, rundir_e in enumerate(t.rundir):
132
+ if i:
133
+ rundir_e = re.sub(invalid_chars_pattern, '_', rundir_e)
134
+ rundir = os.path.join(rundir, rundir_e)
135
+ else:
136
+ rundir = rundir_e
134
137
 
135
138
  # if t.rundir_t == RundirE.Unique:
136
139
  # # Replace invalid characters with the replacement string.
dv_flow/mgr/type.py CHANGED
@@ -0,0 +1,33 @@
1
+ import dataclasses as dc
2
+ from typing import Any, Dict, List
3
+ from .srcinfo import SrcInfo
4
+
5
+ @dc.dataclass
6
+ class TypeField(object):
7
+ name : str
8
+ type : Any
9
+ doc : str = None
10
+ value : str = None
11
+ append : List[Any] = None
12
+ srcinfo : SrcInfo = None
13
+
14
+ @dc.dataclass
15
+ class Type(object):
16
+ name : str
17
+ doc : str = None
18
+ params : Dict[str, TypeField] = dc.field(default_factory=dict)
19
+ paramT : Any = None
20
+ uses : 'Type' = None
21
+ srcinfo : SrcInfo = None
22
+
23
+ def dump(self):
24
+ ret = {}
25
+ ret["name"] = self.name
26
+ ret["doc"] = self.doc
27
+ ret["params"] = {}
28
+ ret['srcinfo'] = self.srcinfo.dump()
29
+
30
+ return ret
31
+
32
+ def __hash__(self):
33
+ return id(self)
dv_flow/mgr/type_def.py CHANGED
@@ -19,12 +19,14 @@
19
19
  #* Author:
20
20
  #*
21
21
  #****************************************************************************
22
- from typing import List, Union
22
+ from typing import Dict, List, Union
23
23
  from pydantic import BaseModel, Field
24
24
  from .param_def import ParamDef
25
+ from .srcinfo import SrcInfo
25
26
 
26
27
  class TypeDef(BaseModel):
27
28
  name : str
28
29
  uses : str = None
29
30
  doc : str = None
30
- fields : List[ParamDef] = Field(alias="with", default_factory=list)
31
+ params : Dict[str, Union[str, ParamDef]] = Field(alias="with", default_factory=list)
32
+ srcinfo : SrcInfo = None
dv_flow/mgr/util/util.py CHANGED
@@ -22,20 +22,29 @@
22
22
  import os
23
23
  import yaml
24
24
  from ..package_loader import PackageLoader
25
+ from ..task_data import TaskMarker, TaskMarkerLoc, SeverityE
25
26
 
26
27
  def loadProjPkgDef(path, listener=None):
27
28
  """Locates the project's flow spec and returns the PackageDef"""
28
29
 
29
30
  dir = path
30
31
  ret = None
32
+ found = False
31
33
  while dir != "/" and dir != "" and os.path.isdir(dir):
32
34
  if os.path.exists(os.path.join(dir, "flow.dv")):
33
35
  with open(os.path.join(dir, "flow.dv")) as f:
34
36
  data = yaml.load(f, Loader=yaml.FullLoader)
35
37
  if "package" in data.keys():
38
+ found = True
36
39
  listeners = [listener] if listener is not None else []
37
40
  ret = PackageLoader(marker_listeners=listeners).load(os.path.join(dir, "flow.dv"))
38
41
  break
39
42
  dir = os.path.dirname(dir)
43
+
44
+ if not found:
45
+ if listener:
46
+ listener.marker(TaskMarker(
47
+ msg="Failed to find a 'flow.dv' file that defines a package in %s or its parent directories" % path))
48
+
40
49
  return ret
41
50
 
@@ -4,6 +4,7 @@ from yaml.loader import SafeLoader
4
4
  class YamlSrcInfoLoader(SafeLoader):
5
5
  scopes = {
6
6
  "tasks",
7
+ "types",
7
8
  "body",
8
9
  "package",
9
10
  "fragment"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dv-flow-mgr
3
- Version: 1.0.0.14528489065a1
3
+ Version: 1.5.0
4
4
  Summary: DV Flow Manager is a build system for silicon design
5
5
  Author-email: Matthew Ballance <matt.ballance@gmail.com>
6
6
  License: Apache License