dv-flow-mgr 0.0.1.12750690879a1__tar.gz → 0.0.1.12822558956a1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/PKG-INFO +1 -1
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/docs/Roadmap.md +5 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/docs/index.rst +4 -3
- dv_flow_mgr-0.0.1.12822558956a1/docs/intro.rst +185 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/pyproject.toml +1 -1
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/fileset.py +2 -1
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/task.py +11 -13
- dv_flow_mgr-0.0.1.12822558956a1/src/dv_flow_mgr/task_data.py +269 -0
- dv_flow_mgr-0.0.1.12822558956a1/src/dv_flow_mgr/tasklib/builtin_pkg.py +61 -0
- dv_flow_mgr-0.0.1.12822558956a1/src/dv_flow_mgr/tasklib/std/flow.dv +12 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr.egg-info/PKG-INFO +1 -1
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr.egg-info/SOURCES.txt +6 -0
- dv_flow_mgr-0.0.1.12822558956a1/tests/unit/__init__.py +1 -0
- dv_flow_mgr-0.0.1.12822558956a1/tests/unit/test_data_merge.py +121 -0
- dv_flow_mgr-0.0.1.12822558956a1/tests/unit/test_pyclass.py +29 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/test_smoke.py +1 -1
- dv_flow_mgr-0.0.1.12750690879a1/src/dv_flow_mgr/task_data.py +0 -94
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/.github/workflows/ci.yml +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/.gitignore +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/.vscode/settings.json +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/LICENSE +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/README.md +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/docs/Makefile +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/docs/Notes.md +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/docs/Stages.md +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/docs/TypesAndDefs.md +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/docs/conf.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/docs/quickstart.rst +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/docs/reference.rst +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/ivpm.yaml +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/setup.cfg +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/__init__.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/__main__.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/cmds/cmd_run.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/flow.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/fragment_def.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/package.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/package_def.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/package_import_spec.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/parameters.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/session.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/share/flow.json +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/task_def.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/task_memento.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/tasklib/hdl/sim/mti_pkg.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/tasklib/hdl/sim/mti_task_sim_image.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/tasklib/hdl/sim/mti_task_sim_run.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/tasklib/hdl/sim/pkg_hdl_sim.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/tasklib/hdl/sim/task_sim_image.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/tasklib/hdl/sim/vcs_pkg.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/tasklib/hdl/sim/vcs_task_sim_image.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/tasklib/hdl/sim/vcs_task_sim_run.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/tasklib/hdl/sim/vl_task_sim_image.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/tasklib/hdl/sim/vlt_pkg.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/tasklib/hdl/sim/vlt_task_sim_image.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/tasklib/hdl/sim/vlt_task_sim_run.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/tasklib/std/fileset.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/tasklib/std/pkg_std.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/tasklib/std/std.dfs +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/tasklib/std/task_fileset.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/tasklib/std/task_null.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr.egg-info/dependency_links.txt +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr.egg-info/entry_points.txt +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr.egg-info/requires.txt +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr.egg-info/top_level.txt +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/examples/example1/example1.flow +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/data/fileset/test1/files1/file1_1.sv +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/data/fileset/test1/files1/file1_2.sv +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/data/fileset/test1/files2/file2_1.sv +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/data/fileset/test1/files2/file2_2.sv +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/data/fileset/test1/test1.dfs +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/data/fileset/test1 copy/files1/file1_1.sv +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/data/fileset/test1 copy/files1/file1_2.sv +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/data/fileset/test1 copy/files2/file2_1.sv +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/data/fileset/test1 copy/files2/file2_2.sv +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/data/fileset/test1 copy/test1.dfs +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/data/proj1/proj1.dfs +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/data/proj2/proj2.dfs +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/data/proj3/proj3.dfs +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/test_fileset.py +0 -0
- {dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/test_smoke copy.sav +0 -0
@@ -29,6 +29,11 @@ are evaluated.
|
|
29
29
|
- JQ-based data extraction
|
30
30
|
- YAML task templates / expansions
|
31
31
|
- Support for annotating job requirements
|
32
|
+
- Mark tasks as producing and accepting certain data
|
33
|
+
- FileSet task `produces` fileset of `type`
|
34
|
+
- SimImage task `accepts` systemVerilogSource, verilogSource, verilogPreCompLib, etc
|
35
|
+
=> Mostly useful for checking and suggestion
|
36
|
+
=> As more are marked, can treat as more binding
|
32
37
|
|
33
38
|
# Library
|
34
39
|
|
@@ -6,9 +6,9 @@
|
|
6
6
|
DV Flow Manager
|
7
7
|
===============
|
8
8
|
|
9
|
-
DV Flow Manager
|
10
|
-
design and verification (DV)
|
11
|
-
|
9
|
+
DV Flow Manager provides a "make for silicon engineering": a specification
|
10
|
+
for capturing design and verification (DV) tasks and dataflow in a way that
|
11
|
+
enables concurrent execution and efficient avoidance of redundant work.
|
12
12
|
|
13
13
|
.. mermaid::
|
14
14
|
|
@@ -28,4 +28,5 @@ sort of "make for silicon engineering".
|
|
28
28
|
:caption: Contents:
|
29
29
|
|
30
30
|
quickstart
|
31
|
+
intro
|
31
32
|
reference
|
@@ -0,0 +1,185 @@
|
|
1
|
+
############
|
2
|
+
Introduction
|
3
|
+
############
|
4
|
+
|
5
|
+
|
6
|
+
|
7
|
+
|
8
|
+
Many software languages have co-evolved with a build system. For example, C/C++
|
9
|
+
has Make and CMake. Java has ANT, Maven, and Gradle. All of these build systems
|
10
|
+
provide features that cater to specific ways that a given language is processed,
|
11
|
+
and provide built-in notions to make setting up simple cases as easy as possible.
|
12
|
+
|
13
|
+
One simple example is Make and a single-file C program. Take the code below:
|
14
|
+
|
15
|
+
.. code-block:: C
|
16
|
+
|
17
|
+
#include <stdio.h>
|
18
|
+
|
19
|
+
int main() {
|
20
|
+
printf("Hello, world!\n");
|
21
|
+
return 0;
|
22
|
+
}
|
23
|
+
|
24
|
+
|
25
|
+
Make provides enough built-in features that are C/C++-specific that we can create
|
26
|
+
an executable from this source file (assume it's named hello.c) simply by running:
|
27
|
+
|
28
|
+
.. code-block:: bash
|
29
|
+
|
30
|
+
make hello
|
31
|
+
|
32
|
+
Make knows about C files, it knows about the existance of a C compiler, and it knows
|
33
|
+
that an executable can be created from a C file of the same name.
|
34
|
+
|
35
|
+
Meanwhile, in Silicon Engineering Land...
|
36
|
+
=========================================
|
37
|
+
|
38
|
+
Much like software languages, the languages, tools, and flows used in silicon engineering
|
39
|
+
have their own unique characteristics. For example, in a silicon-design environment, many
|
40
|
+
flows are run over the same source files -- possibly with different configurations.
|
41
|
+
|
42
|
+
* We compile our design with a UVM testbench to run dynamic (simulation-based) verification
|
43
|
+
* We compile our design with different testbenches to run formal verification
|
44
|
+
* We likely use slightly different subset when targeting synthesis
|
45
|
+
|
46
|
+
In addition, we also need to be flexible when it comes to tooling. Over time, we'll likely
|
47
|
+
use different tools from different providers, and want our projects to adapt as easily as
|
48
|
+
possible to a change of tool. It's also likely that we will either want to add new tools
|
49
|
+
to our environment over time, or adapt our environment to take advantage of new
|
50
|
+
productivity-enhancing tool features.
|
51
|
+
|
52
|
+
DV Flow Manager is designed to be the 'make' for silicon engineering. There are three
|
53
|
+
aspects to the tool:
|
54
|
+
|
55
|
+
* **Flow Specification** - Processing steps for a given project are captured in a hierarchy
|
56
|
+
of YAML files. The flow-specification schema is designed to be tool-independent, such
|
57
|
+
that multiple tools can be implemented that comprehend a flow specification.
|
58
|
+
* **Task Library** - Processing steps are implemented as `tasks`. A library of common tasks
|
59
|
+
is defined to cover common cases, such as creating a simulation image. External libraries
|
60
|
+
of tasks are supported, such that tools can bundle a task library along with the tool installation.
|
61
|
+
* **Tools** - The Python implementation of DV Flow Manager is one example of a tool. Other tools
|
62
|
+
may be added in the future to provide visualization, simplify development, etc.
|
63
|
+
|
64
|
+
|
65
|
+
|
66
|
+
Key Concepts
|
67
|
+
============
|
68
|
+
|
69
|
+
DV Flow Manager has three key concepts:
|
70
|
+
* **Package** - A packages is parameterizd namespace that contain tasks.
|
71
|
+
* **Task** - A task is a processing step in a flow. Tasks represent a data-processing step, which
|
72
|
+
might be as simple as building a list of files, or might be a complex as creating a hardened macro
|
73
|
+
from multiple source collections.
|
74
|
+
* **Dataflow Dependencies** - Tasks are related by dataflow dependencies. In order for a task to
|
75
|
+
execute, the data from all of its dependencies must be available. Each task also produces a
|
76
|
+
dataflow object that can be consumed by other tasks.
|
77
|
+
|
78
|
+
Let's look at an example to better-understand these concepts.
|
79
|
+
|
80
|
+
.. code-block:: YAML
|
81
|
+
|
82
|
+
package:
|
83
|
+
name: my_ip
|
84
|
+
|
85
|
+
imports:
|
86
|
+
- name: hdl.sim.vlt
|
87
|
+
as: hdl.sim
|
88
|
+
|
89
|
+
tasks:
|
90
|
+
- name: rtl
|
91
|
+
uses: std.Fileset
|
92
|
+
with:
|
93
|
+
base: "rtl"
|
94
|
+
include: "*.sv"
|
95
|
+
|
96
|
+
- name: tb
|
97
|
+
uses: std.Fileset
|
98
|
+
needs: [rtl]
|
99
|
+
with:
|
100
|
+
base: "tb"
|
101
|
+
include: "*.sv"
|
102
|
+
|
103
|
+
- name: sim
|
104
|
+
uses: hdl.sim.SimImage
|
105
|
+
needs: [rtl, tb]
|
106
|
+
|
107
|
+
-name: test1
|
108
|
+
uses: hdl.sim.RunSim
|
109
|
+
needs: [sim]
|
110
|
+
|
111
|
+
The code above specifies two collections of source code --
|
112
|
+
one for the design and one for the testbench. This source
|
113
|
+
code is compiled into as simulation image using the
|
114
|
+
pre-defined task named `hdl.sim.SimImage`. After,
|
115
|
+
we execute the simulation image.
|
116
|
+
|
117
|
+
|
118
|
+
.. mermaid::
|
119
|
+
|
120
|
+
flowchart TD
|
121
|
+
A[rtl] --> B[tb]
|
122
|
+
B[tb] --> E[sim]
|
123
|
+
E --> F[test1]
|
124
|
+
|
125
|
+
The task graph for this flow is shown above. Each step depends on the
|
126
|
+
prior step, so there is no opportunity for concurrent execution.
|
127
|
+
|
128
|
+
Now, let's say that we want to run a series of tests. We can add
|
129
|
+
a new task per tests, where we customize the activity that is run
|
130
|
+
by passing arguments to the simulation.
|
131
|
+
|
132
|
+
.. code-block:: YAML
|
133
|
+
|
134
|
+
# ...
|
135
|
+
-name: test1
|
136
|
+
uses: hdl.sim.RunSim
|
137
|
+
needs: [sim]
|
138
|
+
-name: test2
|
139
|
+
uses: hdl.sim.RunSim
|
140
|
+
needs: [sim]
|
141
|
+
-name: test3
|
142
|
+
uses: hdl.sim.RunSim
|
143
|
+
needs: [sim]
|
144
|
+
|
145
|
+
.. mermaid::
|
146
|
+
|
147
|
+
flowchart TD
|
148
|
+
A[rtl] --> B[tb]
|
149
|
+
B[tb] --> E[sim]
|
150
|
+
E --> F[test1]
|
151
|
+
E --> G[test2]
|
152
|
+
E --> H[test3]
|
153
|
+
|
154
|
+
Our task graph now looks like the above. Our build tasks are sequential,
|
155
|
+
while our test tasks only depend on the simulation image being
|
156
|
+
up-to-date, and and can execute concurrently.
|
157
|
+
|
158
|
+
## Dataflow
|
159
|
+
|
160
|
+
What ties all the tasks above together is dependency-based dataflow.
|
161
|
+
|
162
|
+
.. code-block:: YAML
|
163
|
+
|
164
|
+
- name: tb
|
165
|
+
uses: std.Fileset
|
166
|
+
needs: [rtl]
|
167
|
+
with:
|
168
|
+
base: "tb"
|
169
|
+
include: "*.sv"
|
170
|
+
|
171
|
+
- name: sim
|
172
|
+
uses: hdl.sim.SimImage
|
173
|
+
needs: [rtl, tb]
|
174
|
+
|
175
|
+
When the `sim` task places dependencies on the `rtl` and `tb`
|
176
|
+
tasks, it receives the output from those tasks as input. In
|
177
|
+
this case, that means that the simulation-image compilation
|
178
|
+
task has a list of all of the source files that it needs to
|
179
|
+
compile. The `sim` task also produces an output, which contains
|
180
|
+
a reference to the directory where the simulation image resides.
|
181
|
+
The `test` tasks use this input to locate the simulation image.
|
182
|
+
|
183
|
+
|
184
|
+
|
185
|
+
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/fileset.py
RENAMED
@@ -24,8 +24,9 @@ from pydantic import BaseModel
|
|
24
24
|
from typing import Any, Dict, List, Tuple
|
25
25
|
|
26
26
|
class FileSet(BaseModel):
|
27
|
-
src : str
|
28
27
|
type : str
|
29
28
|
basedir : str
|
29
|
+
name : str = ""
|
30
|
+
src : str = None
|
30
31
|
files : List[str] = dc.Field(default_factory=list)
|
31
32
|
params : Dict[str,str] = dc.Field(default_factory=dict)
|
@@ -155,14 +155,6 @@ class Task(object):
|
|
155
155
|
# Merge filesets. A fileset with the same
|
156
156
|
print("deps_o: %s" % str(deps_o))
|
157
157
|
|
158
|
-
# First, merge the dep maps of all the inputs
|
159
|
-
deps_m = self.depends[0].output.deps.copy()
|
160
|
-
for deps in map(lambda d: d.deps, self.depends[1:]):
|
161
|
-
for k,v in deps.items():
|
162
|
-
if k in deps_m:
|
163
|
-
deps_m[k].add(v)
|
164
|
-
else:
|
165
|
-
deps_m[k] = set(v)
|
166
158
|
|
167
159
|
print("deps_m: %s" % str(deps_m))
|
168
160
|
|
@@ -182,8 +174,7 @@ class Task(object):
|
|
182
174
|
# Mark the source of this data as being this task
|
183
175
|
input.src = self.name
|
184
176
|
|
185
|
-
|
186
|
-
os.makedirs(self.rundir)
|
177
|
+
self.init_rundir()
|
187
178
|
|
188
179
|
result = await self.run(input)
|
189
180
|
|
@@ -202,9 +193,7 @@ class Task(object):
|
|
202
193
|
result = self.getOutput()
|
203
194
|
|
204
195
|
# Write-back the memento, if specified
|
205
|
-
|
206
|
-
with open(os.path.join(self.rundir, "memento.json"), "w") as fp:
|
207
|
-
fp.write(self.memento.model_dump_json(indent=2))
|
196
|
+
self.save_memento()
|
208
197
|
|
209
198
|
self.running = False
|
210
199
|
|
@@ -214,6 +203,15 @@ class Task(object):
|
|
214
203
|
async def run(self, input : TaskData) -> TaskData:
|
215
204
|
raise NotImplementedError("TaskImpl.run() not implemented")
|
216
205
|
|
206
|
+
def init_rundir(self):
|
207
|
+
if not os.path.isdir(self.rundir):
|
208
|
+
os.makedirs(self.rundir)
|
209
|
+
|
210
|
+
def save_memento(self):
|
211
|
+
if self.memento is not None:
|
212
|
+
with open(os.path.join(self.rundir, "memento.json"), "w") as fp:
|
213
|
+
fp.write(self.memento.model_dump_json(indent=2))
|
214
|
+
|
217
215
|
def setOutput(self, output : TaskData):
|
218
216
|
self.output_set = True
|
219
217
|
output.src = self.name
|
@@ -0,0 +1,269 @@
|
|
1
|
+
#****************************************************************************
|
2
|
+
#* task_data.py
|
3
|
+
#*
|
4
|
+
#* Copyright 2023 Matthew Ballance and Contributors
|
5
|
+
#*
|
6
|
+
#* Licensed under the Apache License, Version 2.0 (the "License"); you may
|
7
|
+
#* not use this file except in compliance with the License.
|
8
|
+
#* You may obtain a copy of the License at:
|
9
|
+
#*
|
10
|
+
#* http://www.apache.org/licenses/LICENSE-2.0
|
11
|
+
#*
|
12
|
+
#* Unless required by applicable law or agreed to in writing, software
|
13
|
+
#* distributed under the License is distributed on an "AS IS" BASIS,
|
14
|
+
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
15
|
+
#* See the License for the specific language governing permissions and
|
16
|
+
#* limitations under the License.
|
17
|
+
#*
|
18
|
+
#* Created on:
|
19
|
+
#* Author:
|
20
|
+
#*
|
21
|
+
#****************************************************************************
|
22
|
+
import enum
|
23
|
+
import pydantic.dataclasses as dc
|
24
|
+
from pydantic import BaseModel
|
25
|
+
from typing import Any, Dict, Set, List, Tuple
|
26
|
+
from .fileset import FileSet
|
27
|
+
from toposort import toposort
|
28
|
+
|
29
|
+
class TaskDataParamOpE(enum.Enum):
|
30
|
+
Set = enum.auto()
|
31
|
+
Append = enum.auto()
|
32
|
+
Prepend = enum.auto()
|
33
|
+
PathAppend = enum.auto()
|
34
|
+
PathPrepend = enum.auto()
|
35
|
+
|
36
|
+
class TaskDataParamKindE(enum.Enum):
|
37
|
+
String = enum.auto()
|
38
|
+
FilePath = enum.auto()
|
39
|
+
SearchPath = enum.auto()
|
40
|
+
List = enum.auto()
|
41
|
+
|
42
|
+
class TaskDataParamOp(BaseModel):
|
43
|
+
op : TaskDataParamOpE
|
44
|
+
value : Any
|
45
|
+
|
46
|
+
class TaskDataParam(BaseModel):
|
47
|
+
kind : TaskDataParamKindE
|
48
|
+
ops : List[TaskDataParamOp] = dc.Field(default_factory=list)
|
49
|
+
|
50
|
+
class TaskData(BaseModel):
|
51
|
+
src : str = None
|
52
|
+
params : Dict[str,TaskDataParam] = dc.Field(default_factory=dict)
|
53
|
+
deps : Dict[str,Set[str]] = dc.Field(default_factory=dict)
|
54
|
+
filesets : List[FileSet] = dc.Field(default_factory=list)
|
55
|
+
changed : bool = False
|
56
|
+
|
57
|
+
def hasParam(self, name: str) -> bool:
|
58
|
+
return name in self.params
|
59
|
+
|
60
|
+
def getParam(self, name: str) -> Any:
|
61
|
+
return self.params[name]
|
62
|
+
|
63
|
+
def setParam(self, name: str, value: Any):
|
64
|
+
self.params[name] = value
|
65
|
+
|
66
|
+
def addFileSet(self, fs : FileSet):
|
67
|
+
self.filesets.append(fs)
|
68
|
+
|
69
|
+
def getFileSets(self, type=None, order=True) -> List[FileSet]:
|
70
|
+
ret = []
|
71
|
+
|
72
|
+
if order:
|
73
|
+
# The deps map specifies task dependencies
|
74
|
+
|
75
|
+
candidate_fs = []
|
76
|
+
for fs in self.filesets:
|
77
|
+
if type is None or fs.type in type:
|
78
|
+
candidate_fs.append(fs)
|
79
|
+
|
80
|
+
order = toposort(self.deps)
|
81
|
+
|
82
|
+
for order_s in order:
|
83
|
+
for fs in candidate_fs:
|
84
|
+
if fs.src in order_s:
|
85
|
+
ret.append(fs)
|
86
|
+
else:
|
87
|
+
for fs in self.filesets:
|
88
|
+
if type is None or fs.type in type:
|
89
|
+
ret.append(fs)
|
90
|
+
|
91
|
+
return ret
|
92
|
+
|
93
|
+
def copy(self) -> 'TaskData':
|
94
|
+
ret = TaskData()
|
95
|
+
ret.src = self.src
|
96
|
+
ret.params = self.params.copy()
|
97
|
+
for d in self.deps:
|
98
|
+
ret.deps.append(d.clone())
|
99
|
+
ret.changed = self.changed
|
100
|
+
return ret
|
101
|
+
|
102
|
+
def setParamVal(self, name: str, kind : TaskDataParamKindE, value: Any):
|
103
|
+
if name not in self.params:
|
104
|
+
self.params[name] = TaskDataParam(kind=kind)
|
105
|
+
self.params[name].ops.append(TaskDataParamOp(op=TaskDataParamOpE.Set, value=value))
|
106
|
+
|
107
|
+
def getParamVal(self, name: str) -> Any:
|
108
|
+
if name not in self.params.keys():
|
109
|
+
raise Exception("No such parameter: %s" % name)
|
110
|
+
param = self.params[name]
|
111
|
+
value = param.ops[0].value
|
112
|
+
|
113
|
+
if len(param.ops) > 1:
|
114
|
+
for op in param.ops[1:]:
|
115
|
+
if op.op == TaskDataParamOpE.Append:
|
116
|
+
if isinstance(value, list):
|
117
|
+
value.extend(op.value)
|
118
|
+
else:
|
119
|
+
value += op.value
|
120
|
+
elif op.op == TaskDataParamOpE.Prepend:
|
121
|
+
if isinstance(value, list):
|
122
|
+
for nv in op.value:
|
123
|
+
value.insert(0, nv)
|
124
|
+
else:
|
125
|
+
value = op.value + value
|
126
|
+
elif op.op == TaskDataParamOpE.PathAppend:
|
127
|
+
if isinstance(value, list):
|
128
|
+
value = ":".join(value)
|
129
|
+
value = value + ":" + op.value
|
130
|
+
elif op.op == TaskDataParamOpE.PathPrepend:
|
131
|
+
if isinstance(value, list):
|
132
|
+
value = ":".join(value)
|
133
|
+
value = op.value + ":" + value
|
134
|
+
|
135
|
+
return value
|
136
|
+
|
137
|
+
@staticmethod
|
138
|
+
def merge(incoming : List['TaskData'], local : 'TaskData' = None) -> 'TaskData':
|
139
|
+
"""Merges incoming data with local settings and produces an output"""
|
140
|
+
|
141
|
+
# Deal with the dependency trees first
|
142
|
+
output = TaskData()
|
143
|
+
|
144
|
+
# First, merge the dep maps of all the inputs
|
145
|
+
output.deps = incoming[0].deps.copy()
|
146
|
+
for deps in map(lambda i: i.deps, incoming[1:]):
|
147
|
+
for k,v in deps.items():
|
148
|
+
if k not in output.deps:
|
149
|
+
output.deps[k] = []
|
150
|
+
for vi in v:
|
151
|
+
if vi not in output.deps[k]:
|
152
|
+
output.deps[k].append(v)
|
153
|
+
|
154
|
+
# Process filesets
|
155
|
+
for inp in incoming:
|
156
|
+
for fs in inp.filesets:
|
157
|
+
exists = False
|
158
|
+
for fs_o in output.filesets:
|
159
|
+
if fs_o.name == fs.name and fs_o.src == fs.src:
|
160
|
+
exists = True
|
161
|
+
break
|
162
|
+
if not exists:
|
163
|
+
output.addFileSet(fs.model_copy())
|
164
|
+
|
165
|
+
# Now, deal with parameters
|
166
|
+
# Find collisions first
|
167
|
+
colliding_keys = set()
|
168
|
+
passthrough_keys = set()
|
169
|
+
|
170
|
+
for i in incoming:
|
171
|
+
for k in i.params.keys():
|
172
|
+
if k in passthrough_keys:
|
173
|
+
colliding_keys.add(k)
|
174
|
+
else:
|
175
|
+
passthrough_keys.add(k)
|
176
|
+
|
177
|
+
# Now, removes those that are locally set
|
178
|
+
local_set_params = set()
|
179
|
+
if local is not None:
|
180
|
+
for k,v in local.params.items():
|
181
|
+
if len(v.ops) == 1 and v.ops[0].op == TaskDataParamOpE.Set:
|
182
|
+
local_set_params.add(k)
|
183
|
+
# If are setting locally, it's not passthrough
|
184
|
+
passthrough_keys.remove(k)
|
185
|
+
if k in colliding_keys:
|
186
|
+
colliding_keys.remove(k)
|
187
|
+
|
188
|
+
# Construct the passthrough set by removing
|
189
|
+
# colliding entries and those that we will set locally
|
190
|
+
for k in colliding_keys:
|
191
|
+
if k in passthrough_keys:
|
192
|
+
passthrough_keys.remove(k)
|
193
|
+
|
194
|
+
# For the remaining keys, check for conflicts by
|
195
|
+
# confirming that the last 'set' in each incoming parameter
|
196
|
+
# are equal
|
197
|
+
for k in colliding_keys:
|
198
|
+
value = None
|
199
|
+
for i,inp in enumerate(incoming):
|
200
|
+
value_i = None
|
201
|
+
param = inp.params[k]
|
202
|
+
if len(param.ops) == 1:
|
203
|
+
value_i = param.ops[0].value
|
204
|
+
else:
|
205
|
+
# Iterate in reverse over the operations
|
206
|
+
for op in param.ops[::-1]:
|
207
|
+
if op.op == TaskDataParamOpE.Set:
|
208
|
+
value_i = op.value
|
209
|
+
break
|
210
|
+
if not i:
|
211
|
+
value = value_i
|
212
|
+
else:
|
213
|
+
if value != value_i:
|
214
|
+
raise Exception("Parameter %s has conflicting values (%s %s)" % (
|
215
|
+
k,
|
216
|
+
str(value),
|
217
|
+
value(value_i)))
|
218
|
+
|
219
|
+
|
220
|
+
# Now, we need to construct the result
|
221
|
+
# - copy over passthrough parameters
|
222
|
+
# - add locally-set parameters
|
223
|
+
# - for others
|
224
|
+
# - Apply full list for first input
|
225
|
+
# - Apply all beyond the last 'set' operation for others
|
226
|
+
for k in passthrough_keys:
|
227
|
+
# Find an input that has the parameter
|
228
|
+
for inp in incoming:
|
229
|
+
if k in inp.params:
|
230
|
+
break
|
231
|
+
# Find the value of the param
|
232
|
+
param = inp.params[k]
|
233
|
+
|
234
|
+
if len(param.ops) == 1:
|
235
|
+
output.params[k] = TaskDataParam(kind=param.kind)
|
236
|
+
output.params[k].ops.append(param.ops[0])
|
237
|
+
else:
|
238
|
+
for op in param.ops[::-1]:
|
239
|
+
if op.op == TaskDataParamOpE.Set:
|
240
|
+
output.params[k] = TaskDataParam(kind=param.kind)
|
241
|
+
output.params[k].ops.append(op)
|
242
|
+
break
|
243
|
+
for k in local_set_params:
|
244
|
+
output.params[k] = local.params[k].model_copy()
|
245
|
+
|
246
|
+
for k in colliding_keys:
|
247
|
+
value = None
|
248
|
+
for i,inp in enumerate(incoming):
|
249
|
+
# Find the last location that performs a 'set'
|
250
|
+
last_set_i = -1
|
251
|
+
param = inp.params[k]
|
252
|
+
if len(param.ops) == 1:
|
253
|
+
last_set_i = 0
|
254
|
+
else:
|
255
|
+
# Iterate in reverse over the operations
|
256
|
+
for j,op in enumerate(param.ops[::-1]):
|
257
|
+
if op.op == TaskDataParamOpE.Set:
|
258
|
+
last_set_i = j
|
259
|
+
break
|
260
|
+
|
261
|
+
if not i:
|
262
|
+
# Copy the full list, including the last 'set'
|
263
|
+
output.params[k].ops = param.param[last_set_i:].copy()
|
264
|
+
else:
|
265
|
+
# append any additional directives
|
266
|
+
if last_set_i+1 < len(param.ops):
|
267
|
+
output.params[k].extend(param.ops[last_set_i+1:])
|
268
|
+
|
269
|
+
return output
|
@@ -0,0 +1,61 @@
|
|
1
|
+
import os
|
2
|
+
import sys
|
3
|
+
import glob
|
4
|
+
import fnmatch
|
5
|
+
import importlib
|
6
|
+
import pydantic.dataclasses as dc
|
7
|
+
from ..package import TaskCtor
|
8
|
+
from ..task import Task, TaskParams, TaskCtorT
|
9
|
+
from ..task_data import TaskData
|
10
|
+
from ..task_memento import TaskMemento
|
11
|
+
from typing import List, Tuple
|
12
|
+
import dataclasses as dc
|
13
|
+
from ..package_def import Package
|
14
|
+
|
15
|
+
class TaskPyClass(Task):
|
16
|
+
|
17
|
+
async def run(self, input : TaskData) -> TaskData:
|
18
|
+
|
19
|
+
if self.srcdir not in sys.path:
|
20
|
+
sys.path.insert(0, self.srcdir)
|
21
|
+
|
22
|
+
print("sys.path: %s" % str(sys.path), flush=True)
|
23
|
+
idx = self.params.pyclass.rfind('.')
|
24
|
+
modname = self.params.pyclass[:idx]
|
25
|
+
clsname = self.params.pyclass[idx+1:]
|
26
|
+
|
27
|
+
if os.path.isfile(os.path.join(self.basedir, "my_module.py")):
|
28
|
+
print("my_module.py exists", flush=True)
|
29
|
+
else:
|
30
|
+
print("my_module.py does not exist", flush=True)
|
31
|
+
|
32
|
+
try:
|
33
|
+
print("modname=%s" % modname, flush=True)
|
34
|
+
module = importlib.import_module(modname)
|
35
|
+
except ModuleNotFoundError as e:
|
36
|
+
print("Module not found: %s syspath=%s" % (str(e), str(sys.path)), flush=True)
|
37
|
+
raise e
|
38
|
+
|
39
|
+
cls = getattr(module, clsname)
|
40
|
+
|
41
|
+
obj = cls(self.name, self.task_id, self.session, self.basedir, srcdir=self.srcdir)
|
42
|
+
|
43
|
+
return await obj.run(input)
|
44
|
+
|
45
|
+
|
46
|
+
class TaskPyClassParams(TaskParams):
|
47
|
+
pyclass : str
|
48
|
+
|
49
|
+
class TaskPyClassMemento(TaskMemento):
|
50
|
+
pass
|
51
|
+
|
52
|
+
class TaskPyClassCtor(TaskCtorT):
|
53
|
+
def __init__(self):
|
54
|
+
super().__init__(TaskPyClassParams, TaskPyClass)
|
55
|
+
|
56
|
+
@dc.dataclass
|
57
|
+
class PackageBuiltin(Package):
|
58
|
+
|
59
|
+
def __post_init__(self):
|
60
|
+
print("PackageBuiltin::__post_init__", flush=True)
|
61
|
+
self.tasks["PyClass"] = TaskPyClass()
|
@@ -12,6 +12,7 @@ docs/Stages.md
|
|
12
12
|
docs/TypesAndDefs.md
|
13
13
|
docs/conf.py
|
14
14
|
docs/index.rst
|
15
|
+
docs/intro.rst
|
15
16
|
docs/quickstart.rst
|
16
17
|
docs/reference.rst
|
17
18
|
src/dv_flow_mgr/__init__.py
|
@@ -36,6 +37,7 @@ src/dv_flow_mgr.egg-info/requires.txt
|
|
36
37
|
src/dv_flow_mgr.egg-info/top_level.txt
|
37
38
|
src/dv_flow_mgr/cmds/cmd_run.py
|
38
39
|
src/dv_flow_mgr/share/flow.json
|
40
|
+
src/dv_flow_mgr/tasklib/builtin_pkg.py
|
39
41
|
src/dv_flow_mgr/tasklib/hdl/sim/mti_pkg.py
|
40
42
|
src/dv_flow_mgr/tasklib/hdl/sim/mti_task_sim_image.py
|
41
43
|
src/dv_flow_mgr/tasklib/hdl/sim/mti_task_sim_run.py
|
@@ -49,12 +51,16 @@ src/dv_flow_mgr/tasklib/hdl/sim/vlt_pkg.py
|
|
49
51
|
src/dv_flow_mgr/tasklib/hdl/sim/vlt_task_sim_image.py
|
50
52
|
src/dv_flow_mgr/tasklib/hdl/sim/vlt_task_sim_run.py
|
51
53
|
src/dv_flow_mgr/tasklib/std/fileset.py
|
54
|
+
src/dv_flow_mgr/tasklib/std/flow.dv
|
52
55
|
src/dv_flow_mgr/tasklib/std/pkg_std.py
|
53
56
|
src/dv_flow_mgr/tasklib/std/std.dfs
|
54
57
|
src/dv_flow_mgr/tasklib/std/task_fileset.py
|
55
58
|
src/dv_flow_mgr/tasklib/std/task_null.py
|
56
59
|
tests/examples/example1/example1.flow
|
60
|
+
tests/unit/__init__.py
|
61
|
+
tests/unit/test_data_merge.py
|
57
62
|
tests/unit/test_fileset.py
|
63
|
+
tests/unit/test_pyclass.py
|
58
64
|
tests/unit/test_smoke copy.sav
|
59
65
|
tests/unit/test_smoke.py
|
60
66
|
tests/unit/data/fileset/test1/test1.dfs
|
@@ -0,0 +1 @@
|
|
1
|
+
|
@@ -0,0 +1,121 @@
|
|
1
|
+
import asyncio
|
2
|
+
import io
|
3
|
+
import os
|
4
|
+
import dataclasses as dc
|
5
|
+
import pytest
|
6
|
+
from typing import List
|
7
|
+
import yaml
|
8
|
+
from dv_flow_mgr import TaskData, FileSet, Session, TaskData, TaskDataParamKindE
|
9
|
+
from pydantic import BaseModel
|
10
|
+
from shutil import copytree
|
11
|
+
from .tasklib import TaskNull
|
12
|
+
|
13
|
+
def test_empty_in():
|
14
|
+
|
15
|
+
in1 = TaskData()
|
16
|
+
in2 = TaskData()
|
17
|
+
|
18
|
+
out = TaskData.merge([in1, in2])
|
19
|
+
|
20
|
+
assert len(out.params) == 0
|
21
|
+
|
22
|
+
def test_empty_combine_nonoverlap_in():
|
23
|
+
|
24
|
+
in1 = TaskData()
|
25
|
+
in1.setParamVal("v1", "1")
|
26
|
+
in2 = TaskData()
|
27
|
+
in2.setParamVal("v2", "2")
|
28
|
+
|
29
|
+
out = TaskData.merge([in1, in2])
|
30
|
+
|
31
|
+
assert len(out.params) != 0
|
32
|
+
assert "v1" in out.params.keys()
|
33
|
+
assert out.getParamVal("v1") == "1"
|
34
|
+
assert "v2" in out.params.keys()
|
35
|
+
assert out.getParamVal("v2") == "2"
|
36
|
+
|
37
|
+
def test_empty_combine_nonoverlap_in():
|
38
|
+
|
39
|
+
in1 = TaskData()
|
40
|
+
in1.setParamVal("v1", TaskDataParamKindE.String, "1")
|
41
|
+
in2 = TaskData()
|
42
|
+
in2.setParamVal("v2", TaskDataParamKindE.String, "2")
|
43
|
+
|
44
|
+
out = TaskData.merge([in1, in2])
|
45
|
+
|
46
|
+
assert len(out.params) != 0
|
47
|
+
assert "v1" in out.params.keys()
|
48
|
+
assert out.getParamVal("v1") == "1"
|
49
|
+
assert "v2" in out.params.keys()
|
50
|
+
assert out.getParamVal("v2") == "2"
|
51
|
+
|
52
|
+
def test_conflict_1():
|
53
|
+
|
54
|
+
in1 = TaskData()
|
55
|
+
in1.setParamVal("v1", TaskDataParamKindE.String, "1")
|
56
|
+
in2 = TaskData()
|
57
|
+
in2.setParamVal("v1", TaskDataParamKindE.String, "2")
|
58
|
+
|
59
|
+
with pytest.raises(Exception):
|
60
|
+
out = TaskData.merge([in1, in2])
|
61
|
+
|
62
|
+
def test_fileset_merge_1():
|
63
|
+
in1 = TaskData(src="in1")
|
64
|
+
in1.addFileSet(FileSet(
|
65
|
+
src="in1",
|
66
|
+
type="systemVerilogSource",
|
67
|
+
basedir="."))
|
68
|
+
|
69
|
+
in2 = TaskData(src="in2")
|
70
|
+
in2.addFileSet(FileSet(
|
71
|
+
src="in2",
|
72
|
+
type="systemVerilogSource",
|
73
|
+
basedir="."))
|
74
|
+
|
75
|
+
out = TaskData.merge([in1, in2])
|
76
|
+
|
77
|
+
assert len(out.filesets) == 2
|
78
|
+
|
79
|
+
def test_fileset_merge_common_dep_1():
|
80
|
+
in1 = TaskData(src="in1")
|
81
|
+
in1.addFileSet(FileSet(
|
82
|
+
src="in1",
|
83
|
+
type="systemVerilogSource",
|
84
|
+
basedir="."))
|
85
|
+
in1.addFileSet(FileSet(
|
86
|
+
src="in0",
|
87
|
+
type="systemVerilogSource",
|
88
|
+
basedir="."))
|
89
|
+
in1.deps = {
|
90
|
+
"in1": ["in0"]
|
91
|
+
}
|
92
|
+
|
93
|
+
in2 = TaskData(src="in2")
|
94
|
+
in2.addFileSet(FileSet(
|
95
|
+
src="in1",
|
96
|
+
type="systemVerilogSource",
|
97
|
+
basedir="."))
|
98
|
+
in2.addFileSet(FileSet(
|
99
|
+
src="in2",
|
100
|
+
type="systemVerilogSource",
|
101
|
+
basedir="."))
|
102
|
+
in2.addFileSet(FileSet(
|
103
|
+
src="in0",
|
104
|
+
type="systemVerilogSource",
|
105
|
+
basedir="."))
|
106
|
+
in2.deps = {
|
107
|
+
"in1": ["in0"],
|
108
|
+
"in2": ["in1"]
|
109
|
+
}
|
110
|
+
|
111
|
+
out = TaskData.merge([in2, in1])
|
112
|
+
|
113
|
+
assert len(out.filesets) == 3
|
114
|
+
fs = out.getFileSets(type=["systemVerilogSource"], order=True)
|
115
|
+
assert len(fs) == 3
|
116
|
+
|
117
|
+
assert fs[0].src == "in0"
|
118
|
+
assert fs[1].src == "in1"
|
119
|
+
assert fs[2].src == "in2"
|
120
|
+
|
121
|
+
|
@@ -0,0 +1,29 @@
|
|
1
|
+
|
2
|
+
import os
|
3
|
+
import asyncio
|
4
|
+
import pytest
|
5
|
+
from dv_flow_mgr import TaskData
|
6
|
+
from dv_flow_mgr.tasklib.builtin_pkg import TaskPyClass, TaskPyClassParams
|
7
|
+
|
8
|
+
def test_smoke(tmpdir):
|
9
|
+
module = """
|
10
|
+
from dv_flow_mgr import Task, TaskData
|
11
|
+
|
12
|
+
class foo(Task):
|
13
|
+
|
14
|
+
async def run(self, input : TaskData) -> TaskData:
|
15
|
+
print("foo::run", flush=True)
|
16
|
+
return input
|
17
|
+
"""
|
18
|
+
print("test_smoke")
|
19
|
+
|
20
|
+
with open(os.path.join(tmpdir, "my_module.py"), "w") as f:
|
21
|
+
f.write(module)
|
22
|
+
|
23
|
+
params = TaskPyClassParams(pyclass="my_module.foo")
|
24
|
+
basedir = os.path.join(tmpdir)
|
25
|
+
task = TaskPyClass("t1", -1, None, params, basedir, srcdir=basedir)
|
26
|
+
|
27
|
+
in_data = TaskData()
|
28
|
+
asyncio.run(task.run(in_data))
|
29
|
+
pass
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/test_smoke.py
RENAMED
@@ -5,7 +5,7 @@ import dataclasses as dc
|
|
5
5
|
import pytest
|
6
6
|
from typing import List
|
7
7
|
import yaml
|
8
|
-
from dv_flow_mgr import FileSet, PackageDef, Session, TaskData
|
8
|
+
from dv_flow_mgr import FileSet, PackageDef, Session, TaskData
|
9
9
|
from pydantic import BaseModel
|
10
10
|
from shutil import copytree
|
11
11
|
|
@@ -1,94 +0,0 @@
|
|
1
|
-
#****************************************************************************
|
2
|
-
#* task_data.py
|
3
|
-
#*
|
4
|
-
#* Copyright 2023 Matthew Ballance and Contributors
|
5
|
-
#*
|
6
|
-
#* Licensed under the Apache License, Version 2.0 (the "License"); you may
|
7
|
-
#* not use this file except in compliance with the License.
|
8
|
-
#* You may obtain a copy of the License at:
|
9
|
-
#*
|
10
|
-
#* http://www.apache.org/licenses/LICENSE-2.0
|
11
|
-
#*
|
12
|
-
#* Unless required by applicable law or agreed to in writing, software
|
13
|
-
#* distributed under the License is distributed on an "AS IS" BASIS,
|
14
|
-
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
15
|
-
#* See the License for the specific language governing permissions and
|
16
|
-
#* limitations under the License.
|
17
|
-
#*
|
18
|
-
#* Created on:
|
19
|
-
#* Author:
|
20
|
-
#*
|
21
|
-
#****************************************************************************
|
22
|
-
import enum
|
23
|
-
import pydantic.dataclasses as dc
|
24
|
-
from pydantic import BaseModel
|
25
|
-
from typing import Any, Dict, Set, List, Tuple
|
26
|
-
from .fileset import FileSet
|
27
|
-
|
28
|
-
class TaskDataParamOpE(enum.Enum):
|
29
|
-
Set = enum.auto()
|
30
|
-
Append = enum.auto()
|
31
|
-
Prepend = enum.auto()
|
32
|
-
PathAppend = enum.auto()
|
33
|
-
PathPrepend = enum.auto()
|
34
|
-
|
35
|
-
class TaskDataParamOp(BaseModel):
|
36
|
-
op : TaskDataParamOpE
|
37
|
-
value : Any
|
38
|
-
|
39
|
-
class TaskDataParam(BaseModel):
|
40
|
-
value : Any
|
41
|
-
ops : List[TaskDataParamOp] = dc.Field(default_factory=list)
|
42
|
-
|
43
|
-
class TaskData(BaseModel):
|
44
|
-
src : str = None
|
45
|
-
params : Dict[str,Any] = dc.Field(default_factory=dict)
|
46
|
-
deps : Dict[str,Set[str]] = dc.Field(default_factory=dict)
|
47
|
-
changed : bool = False
|
48
|
-
|
49
|
-
def hasParam(self, name: str) -> bool:
|
50
|
-
return name in self.params
|
51
|
-
|
52
|
-
def getParam(self, name: str) -> Any:
|
53
|
-
return self.params[name]
|
54
|
-
|
55
|
-
def setParam(self, name: str, value: Any):
|
56
|
-
self.params[name] = value
|
57
|
-
|
58
|
-
def addFileSet(self, fs : FileSet):
|
59
|
-
fs.src = self.src
|
60
|
-
if "filesets" not in self.params:
|
61
|
-
self.params["filesets"] = []
|
62
|
-
self.params["filesets"].append(fs)
|
63
|
-
|
64
|
-
def getFileSets(self, type=None) -> List[FileSet]:
|
65
|
-
ret = []
|
66
|
-
|
67
|
-
if "filesets" in self.params:
|
68
|
-
for fs in self.params["filesets"]:
|
69
|
-
if type is None or fs.type in type:
|
70
|
-
ret.append(fs)
|
71
|
-
|
72
|
-
return ret
|
73
|
-
|
74
|
-
def copy(self) -> 'TaskData':
|
75
|
-
ret = TaskData()
|
76
|
-
ret.src = self.src
|
77
|
-
ret.params = self.params.copy()
|
78
|
-
for d in self.deps:
|
79
|
-
ret.deps.append(d.clone())
|
80
|
-
ret.changed = self.changed
|
81
|
-
return ret
|
82
|
-
|
83
|
-
def merge(self, other):
|
84
|
-
for k,v in other.params.items():
|
85
|
-
if k not in self.params:
|
86
|
-
if hasattr(v, "copy"):
|
87
|
-
self.params[k] = v.copy()
|
88
|
-
else:
|
89
|
-
self.params[k] = v
|
90
|
-
elif hasattr(self.params[k], "merge"):
|
91
|
-
self.params[k].merge(v)
|
92
|
-
elif self.params[k] != v:
|
93
|
-
raise Exception("Parameter %s has conflicting values" % k)
|
94
|
-
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/.github/workflows/ci.yml
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/__init__.py
RENAMED
File without changes
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/__main__.py
RENAMED
File without changes
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/cmds/cmd_run.py
RENAMED
File without changes
|
File without changes
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/fragment_def.py
RENAMED
File without changes
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/package.py
RENAMED
File without changes
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/package_def.py
RENAMED
File without changes
|
File without changes
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/parameters.py
RENAMED
File without changes
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/session.py
RENAMED
File without changes
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/share/flow.json
RENAMED
File without changes
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/task_def.py
RENAMED
File without changes
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/src/dv_flow_mgr/task_memento.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/data/proj1/proj1.dfs
RENAMED
File without changes
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/data/proj2/proj2.dfs
RENAMED
File without changes
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/data/proj3/proj3.dfs
RENAMED
File without changes
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/test_fileset.py
RENAMED
File without changes
|
{dv_flow_mgr-0.0.1.12750690879a1 → dv_flow_mgr-0.0.1.12822558956a1}/tests/unit/test_smoke copy.sav
RENAMED
File without changes
|