dv-flow-mgr 0.0.2.14182043984a1__py3-none-any.whl → 1.0.0.14370600369a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. dv_flow/mgr/__init__.py +2 -1
  2. dv_flow/mgr/cmds/cmd_graph.py +2 -3
  3. dv_flow/mgr/cmds/cmd_run.py +7 -9
  4. dv_flow/mgr/cmds/cmd_show.py +1 -2
  5. dv_flow/mgr/cond_def.py +16 -0
  6. dv_flow/mgr/config.py +7 -0
  7. dv_flow/mgr/config_def.py +33 -0
  8. dv_flow/mgr/exec_callable.py +88 -0
  9. dv_flow/mgr/{pkg_rgy.py → ext_rgy.py} +44 -35
  10. dv_flow/mgr/extend_def.py +21 -0
  11. dv_flow/mgr/fragment_def.py +4 -3
  12. dv_flow/mgr/need_def.py +6 -0
  13. dv_flow/mgr/null_callable.py +10 -0
  14. dv_flow/mgr/package.py +30 -6
  15. dv_flow/mgr/package_def.py +40 -444
  16. dv_flow/mgr/package_loader.py +701 -0
  17. dv_flow/mgr/param_def.py +2 -1
  18. dv_flow/mgr/parser.out +567 -0
  19. dv_flow/mgr/pytask_callable.py +25 -0
  20. dv_flow/mgr/root_package.py +9 -0
  21. dv_flow/mgr/shell_callable.py +14 -0
  22. dv_flow/mgr/srcinfo.py +15 -0
  23. dv_flow/mgr/std/flow.dv +25 -4
  24. dv_flow/mgr/task.py +68 -0
  25. dv_flow/mgr/task_def.py +36 -24
  26. dv_flow/mgr/task_graph_builder.py +497 -247
  27. dv_flow/mgr/task_listener_log.py +4 -0
  28. dv_flow/mgr/task_node_ctor.py +11 -3
  29. dv_flow/mgr/task_node_ctor_compound.py +21 -33
  30. dv_flow/mgr/task_node_leaf.py +25 -3
  31. dv_flow/mgr/task_params_ctor.py +0 -1
  32. dv_flow/mgr/task_run_ctxt.py +4 -0
  33. dv_flow/mgr/task_runner.py +2 -0
  34. dv_flow/mgr/util/cmds/cmd_schema.py +0 -2
  35. dv_flow/mgr/util/util.py +4 -3
  36. dv_flow/mgr/yaml_srcinfo_loader.py +55 -0
  37. {dv_flow_mgr-0.0.2.14182043984a1.dist-info → dv_flow_mgr-1.0.0.14370600369a1.dist-info}/METADATA +1 -1
  38. dv_flow_mgr-1.0.0.14370600369a1.dist-info/RECORD +74 -0
  39. dv_flow_mgr-0.0.2.14182043984a1.dist-info/RECORD +0 -59
  40. {dv_flow_mgr-0.0.2.14182043984a1.dist-info → dv_flow_mgr-1.0.0.14370600369a1.dist-info}/WHEEL +0 -0
  41. {dv_flow_mgr-0.0.2.14182043984a1.dist-info → dv_flow_mgr-1.0.0.14370600369a1.dist-info}/entry_points.txt +0 -0
  42. {dv_flow_mgr-0.0.2.14182043984a1.dist-info → dv_flow_mgr-1.0.0.14370600369a1.dist-info}/licenses/LICENSE +0 -0
  43. {dv_flow_mgr-0.0.2.14182043984a1.dist-info → dv_flow_mgr-1.0.0.14370600369a1.dist-info}/top_level.txt +0 -0
@@ -19,7 +19,9 @@
19
19
  #* Author:
20
20
  #*
21
21
  #****************************************************************************
22
+ import dataclasses
22
23
  import io
24
+ import json
23
25
  import os
24
26
  import yaml
25
27
  import importlib
@@ -29,17 +31,12 @@ import pydantic
29
31
  import pydantic.dataclasses as dc
30
32
  from pydantic import BaseModel
31
33
  from typing import Any, Dict, List, Callable, Tuple, ClassVar, Union
34
+ from .config_def import ConfigDef
32
35
  from .fragment_def import FragmentDef
33
- from .package import Package
34
36
  from .package_import_spec import PackageImportSpec, PackageSpec
35
37
  from .param_def import ParamDef
36
- from .task_def import TaskDef, PassthroughE, ConsumesE, RundirE
37
- from .task_node_ctor import TaskNodeCtor
38
- from .task_node_ctor_proxy import TaskNodeCtorProxy
39
- from .task_node_ctor_task import TaskNodeCtorTask
40
- from .task_node_ctor_compound import TaskNodeCtorCompound
41
- from .task_node_ctor_compound_proxy import TaskNodeCtorCompoundProxy
42
- from .std.task_null import TaskNull
38
+ from .srcinfo import SrcInfo
39
+ from .task_def import TaskDef
43
40
  from .type_def import TypeDef
44
41
 
45
42
 
@@ -64,7 +61,17 @@ class PackageDef(BaseModel):
64
61
  params : List[ParamDef] = dc.Field(
65
62
  default_factory=list, alias="with",
66
63
  description="List of package parameters to set")
67
- srcinfo : Any = dc.Field(default=None)
64
+ configs : List[ConfigDef] = dc.Field(
65
+ default_factory=list,
66
+ description="List of package configurations")
67
+ srcinfo : SrcInfo = dc.Field(default=None)
68
+
69
+ # @pydantic.model_validator(mode='before')
70
+ # def filter_srcinfo(self, values):
71
+ # print("pkg_def values: %s" % values)
72
+ # # if values.get("srcinfo") is not None:
73
+ # # values["srcinfo"] = values["srcinfo"].replace("\\", "/")
74
+ # return self
68
75
 
69
76
  _fragment_l : List['FragmentDef'] = []
70
77
  _subpkg_m : Dict[str,'PackageDef'] = {}
@@ -101,41 +108,6 @@ class PackageDef(BaseModel):
101
108
  if t.name == name:
102
109
  return t
103
110
 
104
- def mkPackage(self, session, params : Dict[str,Any] = None) -> 'Package':
105
- self._log.debug("--> mkPackage %s" % self.name)
106
- ret = Package(self.name)
107
-
108
- session.push_package(ret, add=True)
109
-
110
- tasks_m : Dict[str,str,TaskNodeCtor]= {}
111
-
112
- for task in self.tasks:
113
- if task.name in tasks_m.keys():
114
- raise Exception("Duplicate task %s" % task.name)
115
- tasks_m[task.name] = (task, self._basedir, ) # We'll add a TaskNodeCtor later
116
-
117
- for frag in self._fragment_l:
118
- for task in frag.tasks:
119
- if task.name in tasks_m.keys():
120
- raise Exception("Duplicate task %s" % task.name)
121
- tasks_m[task.name] = (task, frag._basedir, ) # We'll add a TaskNodeCtor later
122
-
123
- # Now we have a unified map of the tasks declared in this package
124
- for name in list(tasks_m.keys()):
125
- task_i = tasks_m[name]
126
- fullname = self.name + "." + name
127
- if len(task_i) < 3:
128
- # Need to create the task ctor
129
- ctor_t = self.mkTaskCtor(session, task_i[0], task_i[1], tasks_m)
130
- tasks_m[name] = (task_i[0], task_i[1], ctor_t)
131
- ret.tasks[name] = tasks_m[name][2]
132
- ret.tasks[fullname] = tasks_m[name][2]
133
-
134
- session.pop_package(ret)
135
-
136
- self._log.debug("<-- mkPackage %s" % self.name)
137
- return ret
138
-
139
111
  def getTaskCtor(self, session, task_name, tasks_m):
140
112
  self._log.debug("--> getTaskCtor %s" % task_name)
141
113
  # Find package (not package_def) that implements this task
@@ -164,413 +136,37 @@ class PackageDef(BaseModel):
164
136
  ctor_t = tasks_m[task_name][2]
165
137
  return ctor_t
166
138
 
167
- def mkTaskCtor(self, session, task, srcdir, tasks_m) -> TaskNodeCtor:
168
- self._log.debug("--> %s::mkTaskCtor %s (srcdir: %s)" % (self.name, task.name, srcdir))
169
-
170
- if len(task.tasks) > 0:
171
- # Compound task
172
- ctor = self._mkCompoundTaskCtor(session, task, srcdir, tasks_m)
173
- else:
174
- # Leaf task
175
- ctor = self._mkLeafTaskCtor(session, task, srcdir, tasks_m)
176
-
177
- return ctor
178
-
179
-
180
- def _mkLeafTaskCtor(self, session, task, srcdir, tasks_m) -> TaskNodeCtor:
181
- self._log.debug("--> _mkLeafTaskCtor")
182
- base_ctor_t : TaskNodeCtor = None
183
- ctor_t : TaskNodeCtor = None
184
- base_params : BaseModel = None
185
- callable = None
186
- fullname = self.name + "." + task.name
187
- rundir = task.rundir
188
-
189
- if task.uses is not None:
190
- self._log.debug("Uses: %s" % task.uses)
191
- base_ctor_t = self.getTaskCtor(session, task.uses, tasks_m)
192
- base_params = base_ctor_t.mkTaskParams()
193
-
194
-
195
- if base_ctor_t is None:
196
- self._log.error("Failed to load task ctor %s" % task.uses)
197
- else:
198
- self._log.debug("No 'uses' specified")
199
-
200
- passthrough, consumes, needs = self._getPTConsumesNeeds(task, base_ctor_t)
201
-
202
- # Determine the implementation constructor first
203
- if task.pytask is not None:
204
- # Built-in impl
205
- # Now, lookup the class
206
- self._log.debug("Use PyTask implementation")
207
- last_dot = task.pytask.rfind('.')
208
- clsname = task.pytask[last_dot+1:]
209
- modname = task.pytask[:last_dot]
210
-
211
- try:
212
- if modname not in sys.modules:
213
- if self._basedir not in sys.path:
214
- sys.path.append(self._basedir)
215
- mod = importlib.import_module(modname)
216
- else:
217
- mod = sys.modules[modname]
218
- except ModuleNotFoundError as e:
219
- raise Exception("Failed to import module %s (_basedir=%s): %s" % (
220
- modname, self._basedir, str(e)))
221
-
222
- if not hasattr(mod, clsname):
223
- raise Exception("Method %s not found in module %s" % (clsname, modname))
224
- callable = getattr(mod, clsname)
225
-
226
- # Determine if we need to use a new
227
- paramT = self._getParamT(session, task, base_params)
228
-
229
- if callable is not None:
230
- ctor_t = TaskNodeCtorTask(
231
- name=fullname,
232
- srcdir=srcdir,
233
- paramT=paramT, # TODO: need to determine the parameter type
234
- passthrough=passthrough,
235
- consumes=consumes,
236
- needs=needs, # TODO: need to determine the needs
237
- task=callable,
238
- rundir=rundir)
239
- elif base_ctor_t is not None:
240
- # Use the existing (base) to create the implementation
241
- ctor_t = TaskNodeCtorProxy(
242
- name=fullname,
243
- srcdir=srcdir,
244
- paramT=paramT, # TODO: need to determine the parameter type
245
- passthrough=passthrough,
246
- consumes=consumes,
247
- needs=needs,
248
- rundir=rundir,
249
- uses=base_ctor_t)
250
- else:
251
- self._log.debug("Use 'Null' as the class implementation")
252
- ctor_t = TaskNodeCtorTask(
253
- name=fullname,
254
- srcdir=srcdir,
255
- paramT=paramT,
256
- passthrough=passthrough,
257
- consumes=consumes,
258
- needs=needs,
259
- rundir=rundir,
260
- task=TaskNull)
261
-
262
- self._log.debug("<-- %s::mkTaskCtor %s" % (self.name, task.name))
263
- return ctor_t
264
-
265
- def _mkCompoundTaskCtor(self, session, task, srcdir, tasks_m) -> TaskNodeCtor:
266
- self._log.debug("--> _mkCompoundTaskCtor")
267
- base_ctor_t : TaskNodeCtor = None
268
- ctor_t : TaskNodeCtor = None
269
- base_params : BaseModel = None
270
- callable = None
271
-
272
- fullname = self.name + "." + task.name
273
-
274
-
275
- if task.uses is not None:
276
- self._log.debug("Uses: %s" % task.uses)
277
- base_ctor_t = self.getTaskCtor(session, task.uses, tasks_m)
278
- base_params = base_ctor_t.mkTaskParams()
279
-
280
- if base_ctor_t is None:
281
- self._log.error("Failed to load task ctor %s" % task.uses)
282
-
283
- passthrough, consumes, needs = self._getPTConsumesNeeds(task, base_ctor_t)
284
-
285
- # Determine if we need to use a new
286
- paramT = self._getParamT(session, task, base_params)
287
-
288
- if base_ctor_t is not None:
289
- ctor_t = TaskNodeCtorCompoundProxy(
290
- name=fullname,
291
- srcdir=srcdir,
292
- paramT=paramT,
293
- passthrough=passthrough,
294
- consumes=consumes,
295
- needs=needs,
296
- task_def=task,
297
- uses=base_ctor_t)
298
- else:
299
- self._log.debug("No 'uses' specified")
300
- ctor_t = TaskNodeCtorCompound(
301
- name=fullname,
302
- srcdir=srcdir,
303
- paramT=paramT,
304
- passthrough=passthrough,
305
- consumes=consumes,
306
- needs=needs,
307
- task_def=task)
308
-
309
- for t in task.tasks:
310
- ctor_t.tasks.append(self.mkTaskCtor(session, t, srcdir, tasks_m))
311
-
312
-
313
- self._log.debug("<-- %s::mkTaskCtor %s (%d)" % (self.name, task.name, len(ctor_t.tasks)))
314
- return ctor_t
315
-
316
- def _getPTConsumesNeeds(self, task, base_ctor_t):
317
- passthrough = task.passthrough
318
- consumes = task.consumes.copy() if isinstance(task.consumes, list) else task.consumes
319
- needs = [] if task.needs is None else task.needs.copy()
320
-
321
- if base_ctor_t is not None:
322
- if passthrough is None:
323
- passthrough = base_ctor_t.passthrough
324
- if consumes is None:
325
- consumes = base_ctor_t.consumes
326
-
327
- if passthrough is None:
328
- passthrough = PassthroughE.No
329
- if consumes is None:
330
- consumes = ConsumesE.All
331
-
332
- return (passthrough, consumes, needs)
333
-
334
- def _getParamT(self, session, task, base_t : BaseModel):
335
- self._log.debug("--> _getParamT %s" % task.fullname)
336
- # Get the base parameter type (if available)
337
- # We will build a new type with updated fields
338
-
339
- ptype_m = {
340
- "str" : str,
341
- "int" : int,
342
- "float" : float,
343
- "bool" : bool,
344
- "list" : List
345
- }
346
- pdflt_m = {
347
- "str" : "",
348
- "int" : 0,
349
- "float" : 0.0,
350
- "bool" : False,
351
- "list" : []
352
- }
353
-
354
- fields = []
355
- field_m : Dict[str,int] = {}
356
-
357
- pkg = session.package()
358
-
359
- # First, pull out existing fields (if there's a base type)
360
- if base_t is not None:
361
- self._log.debug("Base type: %s" % str(base_t))
362
- for name,f in base_t.model_fields.items():
363
- ff : dc.Field = f
364
- fields.append(f)
365
- field_m[name] = (f.annotation, getattr(base_t, name))
366
- else:
367
- self._log.debug("No base type")
368
-
369
- for p in task.params.keys():
370
- param = task.params[p]
371
- self._log.debug("param: %s %s (%s)" % (p, str(param), str(type(param))))
372
- if hasattr(param, "type") and param.type is not None:
373
- ptype_s = param.type
374
- if ptype_s not in ptype_m.keys():
375
- raise Exception("Unknown type %s" % ptype_s)
376
- ptype = ptype_m[ptype_s]
377
-
378
- if p in field_m.keys():
379
- raise Exception("Duplicate field %s" % p)
380
- if param.value is not None:
381
- field_m[p] = (ptype, param.value)
382
- else:
383
- field_m[p] = (ptype, pdflt_m[ptype_s])
384
- self._log.debug("Set param=%s to %s" % (p, str(field_m[p][1])))
385
- else:
386
- if p not in field_m.keys():
387
- raise Exception("Field %s not found" % p)
388
- if type(param) != dict:
389
- value = param
390
- elif "value" in param.keys():
391
- value = param["value"]
392
- else:
393
- raise Exception("No value specified for param %s: %s" % (
394
- p, str(param)))
395
- field_m[p] = (field_m[p][0], value)
396
- self._log.debug("Set param=%s to %s" % (p, str(field_m[p][1])))
397
-
398
- params_t = pydantic.create_model("Task%sParams" % task.name, **field_m)
399
-
400
- self._log.debug("== Params")
401
- for name,info in params_t.model_fields.items():
402
- self._log.debug(" %s: %s" % (name, str(info)))
403
-
404
- self._log.debug("<-- _getParamT %s" % task.name)
405
- return params_t
406
-
407
- @classmethod
408
- def load(cls, path, exp_pkg_name=None):
409
- return PackageDef._loadPkgDef(path, exp_pkg_name, [])
410
- pass
411
-
412
- @classmethod
413
- def _loadPkgDef(cls, root, exp_pkg_name, file_s):
414
- from yaml.loader import SafeLoader
415
139
 
416
- class SafeLineLoader(SafeLoader):
417
- def construct_mapping(self, node, deep=False):
418
- # print("construct")
419
- mapping = super(SafeLineLoader, self).construct_mapping(node, deep=deep)
420
- # Add 1 so line numbering starts at 1
421
- # mapping['_srcinfo'] = node.start_mark.line + 1
422
- return mapping
140
+ # @classmethod
141
+ # def load(cls, path, exp_pkg_name=None):
142
+ # return PackageDef._loadPkgDef(path, exp_pkg_name, [])
143
+ # pass
423
144
 
424
- if root in file_s:
425
- raise Exception("Recursive file processing @ %s: %s" % (root, ",".join(file_s)))
426
- file_s.append(root)
427
- ret = None
428
- with open(root, "r") as fp:
429
- PackageDef._log.debug("open %s" % root)
430
- doc = yaml.load(fp, Loader=SafeLineLoader)
431
- if "package" not in doc.keys():
432
- raise Exception("Missing 'package' key in %s" % root)
433
- try:
434
- pkg = PackageDef(**(doc["package"]))
435
145
 
436
- for t in pkg.tasks:
437
- t.fullname = pkg.name + "." + t.name
146
+ # @staticmethod
147
+ # def loads(data, exp_pkg_name=None):
148
+ # return PackageDef._loadPkgDefS(data, exp_pkg_name)
149
+ # pass
438
150
 
439
- except Exception as e:
440
- PackageDef._log.error("Failed to load package from %s" % root)
441
- raise e
442
- pkg._basedir = os.path.dirname(root)
151
+ # @staticmethod
152
+ # def _loadPkgDefS(data, exp_pkg_name):
153
+ # ret = None
154
+ # doc = yaml.load(io.StringIO(data), Loader=yaml.FullLoader)
155
+ # if "package" not in doc.keys():
156
+ # raise Exception("Missing 'package' key in %s" % root)
157
+ # pkg = PackageDef(**(doc["package"]))
158
+ # pkg._basedir = None
443
159
 
444
- # for t in pkg.tasks:
445
- # t._basedir = os.path.dirname(root)
160
+ # # for t in pkg.tasks:
161
+ # # t._basedir = os.path.dirname(root)
446
162
 
447
- if exp_pkg_name is not None:
448
- if exp_pkg_name != pkg.name:
449
- raise Exception("Package name mismatch: %s != %s" % (exp_pkg_name, pkg.name))
450
- # else:
451
- # self._pkg_m[exp_pkg_name] = [PackageSpec(pkg.name)
452
- # self._pkg_spec_s.append(PackageSpec(pkg.name))
163
+ # if exp_pkg_name is not None:
164
+ # if exp_pkg_name != pkg.name:
165
+ # raise Exception("Package name mismatch: %s != %s" % (exp_pkg_name, pkg.name))
453
166
 
454
- # if not len(self._pkg_spec_s):
455
- # self._pkg_spec_s.append(PackageSpec(pkg.name))
456
- # else:
457
- # self._pkg_def_m[PackageSpec(pkg.name)] = pkg
167
+ # if len(pkg.fragments) > 0:
168
+ # raise Exception("Cannot load a package-def with fragments from a string")
458
169
 
459
- for spec in pkg.fragments:
460
- PackageDef._loadFragmentSpec(pkg, spec, file_s)
461
-
462
- if len(pkg.imports) > 0:
463
- cls._log.info("Loading imported packages (_basedir=%s)" % pkg._basedir)
464
- for imp in pkg.imports:
465
- if type(imp) == str:
466
- imp_path = imp
467
- elif imp.path is not None:
468
- imp_path = imp.path
469
- else:
470
- raise Exception("imp.path is none: %s" % str(imp))
471
-
472
- cls._log.info("Loading imported package %s" % imp_path)
473
-
474
- if not os.path.isabs(imp_path):
475
- cls._log.debug("_basedir: %s ; imp_path: %s" % (pkg._basedir, imp_path))
476
- imp_path = os.path.join(pkg._basedir, imp_path)
477
-
478
- # Search down the tree looking for a flow.dv file
479
- if os.path.isdir(imp_path):
480
- path = imp_path
481
-
482
- while path is not None and os.path.isdir(path) and not os.path.isfile(os.path.join(path, "flow.dv")):
483
- # Look one directory down
484
- next_dir = None
485
- for dir in os.listdir(path):
486
- if os.path.isdir(os.path.join(path, dir)):
487
- if next_dir is None:
488
- next_dir = dir
489
- else:
490
- path = None
491
- break
492
- if path is not None:
493
- path = next_dir
494
-
495
- if path is not None and os.path.isfile(os.path.join(path, "flow.dv")):
496
- imp_path = os.path.join(path, "flow.dv")
497
-
498
- if not os.path.isfile(imp_path):
499
- raise Exception("Import file %s not found" % imp_path)
500
-
501
- cls._log.info("Loading file %s" % imp_path)
502
-
503
- sub_pkg = PackageDef.load(imp_path)
504
- cls._log.info("Loaded imported package %s" % sub_pkg.name)
505
- pkg._subpkg_m[sub_pkg.name] = sub_pkg
506
-
507
- file_s.pop()
508
-
509
- return pkg
510
-
511
- @staticmethod
512
- def loads(data, exp_pkg_name=None):
513
- return PackageDef._loadPkgDefS(data, exp_pkg_name)
514
- pass
515
-
516
- @staticmethod
517
- def _loadPkgDefS(data, exp_pkg_name):
518
- ret = None
519
- doc = yaml.load(io.StringIO(data), Loader=yaml.FullLoader)
520
- if "package" not in doc.keys():
521
- raise Exception("Missing 'package' key in %s" % root)
522
- pkg = PackageDef(**(doc["package"]))
523
- pkg._basedir = None
524
-
525
- # for t in pkg.tasks:
526
- # t._basedir = os.path.dirname(root)
527
-
528
- if exp_pkg_name is not None:
529
- if exp_pkg_name != pkg.name:
530
- raise Exception("Package name mismatch: %s != %s" % (exp_pkg_name, pkg.name))
531
-
532
- if len(pkg.fragments) > 0:
533
- raise Exception("Cannot load a package-def with fragments from a string")
534
-
535
- return pkg
170
+ # return pkg
536
171
 
537
- @staticmethod
538
- def _loadFragmentSpec(pkg, spec, file_s):
539
- # We're either going to have:
540
- # - File path
541
- # - Directory path
542
-
543
- if os.path.isfile(os.path.join(pkg._basedir, spec)):
544
- PackageDef._loadFragmentFile(
545
- pkg,
546
- os.path.join(pkg._basedir, spec),
547
- file_s)
548
- elif os.path.isdir(os.path.join(pkg._basedir, spec)):
549
- PackageDef._loadFragmentDir(pkg, os.path.join(pkg._basedir, spec), file_s)
550
- else:
551
- raise Exception("Fragment spec %s not found" % spec)
552
-
553
- @staticmethod
554
- def _loadFragmentDir(pkg, dir, file_s):
555
- for file in os.listdir(dir):
556
- if os.path.isdir(os.path.join(dir, file)):
557
- PackageDef._loadFragmentDir(pkg, os.path.join(dir, file), file_s)
558
- elif os.path.isfile(os.path.join(dir, file)) and file == "flow.dv":
559
- PackageDef._loadFragmentFile(pkg, os.path.join(dir, file), file_s)
560
-
561
- @staticmethod
562
- def _loadFragmentFile(pkg, file, file_s):
563
- if file in file_s:
564
- raise Exception("Recursive file processing @ %s: %s" % (file, ", ".join(file_s)))
565
- file_s.append(file)
566
172
 
567
- with open(file, "r") as fp:
568
- doc = yaml.load(fp, Loader=yaml.FullLoader)
569
- PackageDef._log.debug("doc: %s" % str(doc))
570
- if doc is not None and "fragment" in doc.keys():
571
- # Merge the package definition
572
- frag = FragmentDef(**(doc["fragment"]))
573
- frag._basedir = os.path.dirname(file)
574
- pkg._fragment_l.append(frag)
575
- else:
576
- print("Warning: file %s is not a fragment" % file)