data-flow-diagram 1.10.0__tar.gz → 1.11.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/PKG-INFO +16 -5
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/setup.py +5 -5
- data_flow_diagram-1.11.0/src/data_flow_diagram/__init__.py +211 -0
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram/dependency_checker.py +21 -12
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram/dfd.py +99 -40
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram/dfd_dot_templates.py +10 -4
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram/dot.py +5 -4
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram/error.py +2 -1
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram/markdown.py +26 -17
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram/model.py +20 -6
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram/parser.py +108 -69
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram/scanner.py +28 -14
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram.egg-info/PKG-INFO +16 -5
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram.egg-info/entry_points.txt +0 -1
- data-flow-diagram-1.10.0/src/data_flow_diagram/__init__.py +0 -177
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/README.md +0 -0
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/pyproject.toml +0 -0
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/setup.cfg +0 -0
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram/config.py +0 -0
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram.egg-info/SOURCES.txt +0 -0
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram.egg-info/dependency_links.txt +0 -0
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram.egg-info/requires.txt +0 -0
- {data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.2
|
|
2
2
|
Name: data-flow-diagram
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.11.0
|
|
4
4
|
Summary: Commandline tool to generate data flow diagrams from text
|
|
5
5
|
Home-page: https://github.com/pbauermeister/dfd
|
|
6
6
|
Author: Pascal Bauermeister
|
|
@@ -9,7 +9,6 @@ License: GNU General Public License v3 (GPLv3)
|
|
|
9
9
|
Project-URL: Bug Reports, https://github.com/pbauermeister/dfd/issues
|
|
10
10
|
Project-URL: Source, https://github.com/pbauermeister/dfd
|
|
11
11
|
Keywords: diagram-generator,development,tool
|
|
12
|
-
Platform: UNKNOWN
|
|
13
12
|
Classifier: Development Status :: 3 - Alpha
|
|
14
13
|
Classifier: Intended Audience :: Information Technology
|
|
15
14
|
Classifier: Topic :: Software Development
|
|
@@ -19,7 +18,21 @@ Classifier: Programming Language :: Python :: 3
|
|
|
19
18
|
Requires-Python: >=3.10, <4
|
|
20
19
|
Description-Content-Type: text/markdown
|
|
21
20
|
Provides-Extra: dev
|
|
21
|
+
Requires-Dist: check-manifest; extra == "dev"
|
|
22
22
|
Provides-Extra: test
|
|
23
|
+
Requires-Dist: coverage; extra == "test"
|
|
24
|
+
Dynamic: author
|
|
25
|
+
Dynamic: author-email
|
|
26
|
+
Dynamic: classifier
|
|
27
|
+
Dynamic: description
|
|
28
|
+
Dynamic: description-content-type
|
|
29
|
+
Dynamic: home-page
|
|
30
|
+
Dynamic: keywords
|
|
31
|
+
Dynamic: license
|
|
32
|
+
Dynamic: project-url
|
|
33
|
+
Dynamic: provides-extra
|
|
34
|
+
Dynamic: requires-python
|
|
35
|
+
Dynamic: summary
|
|
23
36
|
|
|
24
37
|
# DFD
|
|
25
38
|
|
|
@@ -132,5 +145,3 @@ examples.
|
|
|
132
145
|
[src]: https://github.com/pbauermeister/dfd
|
|
133
146
|
[pypi]: https://pypi.org/project/data-flow-diagram
|
|
134
147
|
[doc]: https://github.com/pbauermeister/dfd/tree/master/doc/README.md
|
|
135
|
-
|
|
136
|
-
|
|
@@ -12,7 +12,7 @@ long_description = (here / "README.md").read_text(encoding="utf-8")
|
|
|
12
12
|
|
|
13
13
|
setup(
|
|
14
14
|
name="data-flow-diagram",
|
|
15
|
-
version="1.
|
|
15
|
+
version="1.11.0",
|
|
16
16
|
description="Commandline tool to generate data flow diagrams from text",
|
|
17
17
|
long_description=long_description,
|
|
18
18
|
long_description_content_type="text/markdown",
|
|
@@ -39,9 +39,9 @@ setup(
|
|
|
39
39
|
"test": ["coverage"],
|
|
40
40
|
},
|
|
41
41
|
package_data={
|
|
42
|
-
# "data_flow_diagram": ["tbdpackage__data.dat"],
|
|
42
|
+
# "data_flow_diagram": ["tbdpackage__data.dat"],
|
|
43
43
|
},
|
|
44
|
-
# data_files=[('data_flow_diagram', ["VERSION"])],
|
|
44
|
+
# data_files=[('data_flow_diagram', ["VERSION"])],
|
|
45
45
|
# The following would provide a command called `data-flow-diagram` which
|
|
46
46
|
# executes the function `main` from this package when invoked:
|
|
47
47
|
entry_points={
|
|
@@ -51,8 +51,8 @@ setup(
|
|
|
51
51
|
},
|
|
52
52
|
project_urls={
|
|
53
53
|
"Bug Reports": "https://github.com/pbauermeister/dfd/issues",
|
|
54
|
-
# "Funding": "https://donate.pypi.org",
|
|
55
|
-
# "Say Thanks!": "http://saythanks.io/to/example",
|
|
54
|
+
# "Funding": "https://donate.pypi.org",
|
|
55
|
+
# "Say Thanks!": "http://saythanks.io/to/example",
|
|
56
56
|
"Source": "https://github.com/pbauermeister/dfd",
|
|
57
57
|
},
|
|
58
58
|
)
|
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
"""Command-line DFD diagram generator. Converts a textual description
|
|
2
|
+
into a graphic file.
|
|
3
|
+
|
|
4
|
+
-----
|
|
5
|
+
|
|
6
|
+
See https://github.com/pbauermeister/dfd for information, syntax and
|
|
7
|
+
examples.
|
|
8
|
+
|
|
9
|
+
-----
|
|
10
|
+
|
|
11
|
+
This module parses the commandline args, prepares the I/Os and calls the stuff.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import argparse
|
|
15
|
+
import io
|
|
16
|
+
import os
|
|
17
|
+
import re
|
|
18
|
+
import sys
|
|
19
|
+
import tempfile
|
|
20
|
+
from typing import TextIO
|
|
21
|
+
|
|
22
|
+
import pkg_resources
|
|
23
|
+
|
|
24
|
+
from . import dfd, dot, markdown, model
|
|
25
|
+
from .error import print_error
|
|
26
|
+
|
|
27
|
+
try:
|
|
28
|
+
VERSION = pkg_resources.require("data-flow-diagram")[0].version
|
|
29
|
+
except pkg_resources.DistributionNotFound:
|
|
30
|
+
VERSION = 'undefined'
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def parse_args() -> argparse.Namespace:
|
|
34
|
+
description, epilog = [each.strip() for each in __doc__.split('-----')[:2]]
|
|
35
|
+
|
|
36
|
+
parser = argparse.ArgumentParser(description=description, epilog=epilog)
|
|
37
|
+
|
|
38
|
+
parser.add_argument(
|
|
39
|
+
'INPUT_FILE',
|
|
40
|
+
action='store',
|
|
41
|
+
default=None,
|
|
42
|
+
nargs='?',
|
|
43
|
+
help='UML sequence input file; ' 'if omitted, stdin is used',
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
parser.add_argument(
|
|
47
|
+
'--output-file',
|
|
48
|
+
'-o',
|
|
49
|
+
required=False,
|
|
50
|
+
help='output file name; pass \'-\' to use stdout; '
|
|
51
|
+
'if omitted, use INPUT_FILE base name with \'.svg\' '
|
|
52
|
+
'extension, or stdout',
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
parser.add_argument(
|
|
56
|
+
'--markdown',
|
|
57
|
+
'-m',
|
|
58
|
+
action='store_true',
|
|
59
|
+
help='consider snippets between opening marker: '
|
|
60
|
+
'```data-flow-diagram OUTFILE, and closing marker: ``` '
|
|
61
|
+
'allowing to generate all diagrams contained in an '
|
|
62
|
+
'INPUT_FILE that is a markdown file',
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
parser.add_argument(
|
|
66
|
+
'--format',
|
|
67
|
+
'-f',
|
|
68
|
+
required=False,
|
|
69
|
+
default='svg',
|
|
70
|
+
help='output format: gif, jpg, tiff, bmp, pnm, eps, '
|
|
71
|
+
'pdf, svg (any supported by Graphviz); default is svg',
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
parser.add_argument(
|
|
75
|
+
'--percent-zoom',
|
|
76
|
+
'-p',
|
|
77
|
+
required=False,
|
|
78
|
+
default=100,
|
|
79
|
+
type=int,
|
|
80
|
+
help='magnification percentage; default is 100',
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
parser.add_argument(
|
|
84
|
+
'--background-color',
|
|
85
|
+
'-b',
|
|
86
|
+
required=False,
|
|
87
|
+
default='white',
|
|
88
|
+
help='background color name (including \'none\' for'
|
|
89
|
+
' transparent) in web color notation; see'
|
|
90
|
+
' https://developer.mozilla.org/en-US/docs/Web/CSS/color_value'
|
|
91
|
+
' for a list of valid names; default is white',
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
parser.add_argument(
|
|
95
|
+
'--no-graph-title',
|
|
96
|
+
action='store_true',
|
|
97
|
+
default=False,
|
|
98
|
+
help='suppress graph title',
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
parser.add_argument(
|
|
102
|
+
'--no-check-dependencies',
|
|
103
|
+
action='store_true',
|
|
104
|
+
default=False,
|
|
105
|
+
help='suppress dependencies checking',
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
parser.add_argument(
|
|
109
|
+
'--debug',
|
|
110
|
+
action='store_true',
|
|
111
|
+
default=False,
|
|
112
|
+
help='emit debug messages',
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
parser.add_argument(
|
|
116
|
+
'--version',
|
|
117
|
+
'-V',
|
|
118
|
+
action='store_true',
|
|
119
|
+
help='print the version and exit',
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
return parser.parse_args()
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def handle_markdown_source(
|
|
126
|
+
options: model.Options, provenance: str, input_fp: TextIO
|
|
127
|
+
) -> None:
|
|
128
|
+
text = input_fp.read()
|
|
129
|
+
snippets = markdown.extract_snippets(text)
|
|
130
|
+
markdown.check_snippets_unicity(provenance, snippets)
|
|
131
|
+
snippets_params = markdown.make_snippets_params(provenance, snippets)
|
|
132
|
+
for params in snippets_params:
|
|
133
|
+
dfd.build(
|
|
134
|
+
params.root,
|
|
135
|
+
params.input_fp.read(),
|
|
136
|
+
params.file_name,
|
|
137
|
+
options,
|
|
138
|
+
snippet_by_name=params.snippet_by_name,
|
|
139
|
+
)
|
|
140
|
+
print(f'{sys.argv[0]}: generated {params.file_name}', file=sys.stderr)
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def handle_dfd_source(
|
|
144
|
+
options: model.Options, provenance: str, input_fp: TextIO, output_path: str
|
|
145
|
+
) -> None:
|
|
146
|
+
root = model.SourceLine("", provenance, None, None)
|
|
147
|
+
if output_path == '-':
|
|
148
|
+
# output to stdout
|
|
149
|
+
with tempfile.TemporaryDirectory() as d:
|
|
150
|
+
path = os.path.join(d, 'file.svg')
|
|
151
|
+
dfd.build(root, input_fp.read(), path, options)
|
|
152
|
+
with open(path) as f:
|
|
153
|
+
print(f.read())
|
|
154
|
+
else:
|
|
155
|
+
# output to file
|
|
156
|
+
dfd.build(root, input_fp.read(), output_path, options)
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def run(args: argparse.Namespace) -> None:
|
|
160
|
+
# adjust input
|
|
161
|
+
if args.INPUT_FILE is None:
|
|
162
|
+
input_fp = sys.stdin
|
|
163
|
+
provenance = '<stdin>'
|
|
164
|
+
else:
|
|
165
|
+
input_fp = open(args.INPUT_FILE)
|
|
166
|
+
provenance = f'<file:{args.INPUT_FILE}>'
|
|
167
|
+
|
|
168
|
+
options = model.Options(
|
|
169
|
+
args.format,
|
|
170
|
+
args.percent_zoom,
|
|
171
|
+
args.background_color,
|
|
172
|
+
args.no_graph_title,
|
|
173
|
+
args.no_check_dependencies,
|
|
174
|
+
args.debug,
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
# markdown source
|
|
178
|
+
if args.markdown:
|
|
179
|
+
handle_markdown_source(options, provenance, input_fp)
|
|
180
|
+
return
|
|
181
|
+
|
|
182
|
+
# adjust output
|
|
183
|
+
if args.output_file is None:
|
|
184
|
+
if args.INPUT_FILE is not None:
|
|
185
|
+
basename = os.path.splitext(args.INPUT_FILE)[0]
|
|
186
|
+
output_path = basename + '.' + args.format
|
|
187
|
+
else:
|
|
188
|
+
output_path = '-'
|
|
189
|
+
else:
|
|
190
|
+
output_path = args.output_file
|
|
191
|
+
|
|
192
|
+
# DFD source
|
|
193
|
+
handle_dfd_source(options, provenance, input_fp, output_path)
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def main() -> None:
|
|
197
|
+
"""Entry point for the application script"""
|
|
198
|
+
|
|
199
|
+
dot.check_installed()
|
|
200
|
+
|
|
201
|
+
args = parse_args()
|
|
202
|
+
if args.version:
|
|
203
|
+
print('data-flow-diagram', VERSION)
|
|
204
|
+
sys.exit(0)
|
|
205
|
+
|
|
206
|
+
try:
|
|
207
|
+
run(args)
|
|
208
|
+
except model.DfdException as e:
|
|
209
|
+
text = f'ERROR: {e}'
|
|
210
|
+
print_error(text)
|
|
211
|
+
sys.exit(1)
|
{data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram/dependency_checker.py
RENAMED
|
@@ -3,8 +3,11 @@ import sys
|
|
|
3
3
|
from . import model, scanner, parser
|
|
4
4
|
|
|
5
5
|
|
|
6
|
-
def check(
|
|
7
|
-
|
|
6
|
+
def check(
|
|
7
|
+
dependencies: model.GraphDependencies,
|
|
8
|
+
snippet_by_name: model.SnippetByName | None,
|
|
9
|
+
options: model.Options,
|
|
10
|
+
) -> None:
|
|
8
11
|
|
|
9
12
|
snippet_by_name = snippet_by_name or {}
|
|
10
13
|
errors: list[str] = []
|
|
@@ -37,34 +40,40 @@ def check(dependencies: model.GraphDependencies, snippet_by_name: model.SnippetB
|
|
|
37
40
|
# if only graph is targetted, we're done
|
|
38
41
|
if dep.to_item is None:
|
|
39
42
|
if dep.to_type != model.NONE:
|
|
40
|
-
errors.append(
|
|
41
|
-
|
|
42
|
-
|
|
43
|
+
errors.append(
|
|
44
|
+
f'{prefix}A whole graph may only be referred to '
|
|
45
|
+
f'by an item of type "{model.NONE}", and not '
|
|
46
|
+
f'"{dep.to_type}"'
|
|
47
|
+
)
|
|
43
48
|
continue
|
|
44
49
|
|
|
45
50
|
# scan and parse
|
|
46
51
|
lines = scanner.scan(dep.source, text, snippet_by_name, options.debug)
|
|
47
|
-
statements, _ = parser.parse(lines, options.debug)
|
|
52
|
+
statements, _, _ = parser.parse(lines, options.debug)
|
|
48
53
|
|
|
49
54
|
# find name
|
|
50
55
|
item = find_item(dep.to_item, statements)
|
|
51
56
|
if item:
|
|
52
57
|
if dep.to_type != item.type:
|
|
53
|
-
errors.append(
|
|
54
|
-
|
|
55
|
-
|
|
58
|
+
errors.append(
|
|
59
|
+
f'{prefix}Referred item "{dep.to_item}" is of '
|
|
60
|
+
f'type "{item.type}", but is referred to as '
|
|
61
|
+
f'type "{dep.to_type}"'
|
|
62
|
+
)
|
|
56
63
|
|
|
57
64
|
continue # Found!
|
|
58
65
|
|
|
59
|
-
errors.append(
|
|
60
|
-
|
|
66
|
+
errors.append(
|
|
67
|
+
f'{prefix}Referring to unknown item name "{dep.to_item}"'
|
|
68
|
+
f' of {what} "{name}"'
|
|
69
|
+
)
|
|
61
70
|
|
|
62
71
|
if errors:
|
|
63
72
|
errors.insert(0, 'Dependency error(s) found:')
|
|
64
73
|
raise model.DfdException('\n\n'.join(errors))
|
|
65
74
|
|
|
66
75
|
|
|
67
|
-
def find_item(name: str, statements: model.Statements) -> model.Item:
|
|
76
|
+
def find_item(name: str, statements: model.Statements) -> model.Item | None:
|
|
68
77
|
for statement in statements:
|
|
69
78
|
match statement:
|
|
70
79
|
case model.Item() as item:
|
|
@@ -1,21 +1,26 @@
|
|
|
1
1
|
"""Run the generation process"""
|
|
2
2
|
|
|
3
3
|
import os.path
|
|
4
|
+
import pprint
|
|
4
5
|
import re
|
|
5
6
|
import textwrap
|
|
6
7
|
from typing import Any, Optional
|
|
7
8
|
|
|
8
|
-
from . import config
|
|
9
|
+
from . import config, dependency_checker
|
|
9
10
|
from . import dfd_dot_templates as TMPL
|
|
10
|
-
from . import dot, model, parser, scanner
|
|
11
|
+
from . import dot, model, parser, scanner
|
|
11
12
|
|
|
12
13
|
|
|
13
|
-
def build(
|
|
14
|
-
|
|
15
|
-
|
|
14
|
+
def build(
|
|
15
|
+
provenance: model.SourceLine,
|
|
16
|
+
dfd_src: str,
|
|
17
|
+
output_path: str,
|
|
18
|
+
options: model.Options,
|
|
19
|
+
snippet_by_name: model.SnippetByName | None = None,
|
|
20
|
+
) -> None:
|
|
16
21
|
"""Take a DFD source and build the final image or document"""
|
|
17
22
|
lines = scanner.scan(provenance, dfd_src, snippet_by_name, options.debug)
|
|
18
|
-
statements, dependencies = parser.parse(lines, options.debug)
|
|
23
|
+
statements, dependencies, attribs = parser.parse(lines, options.debug)
|
|
19
24
|
if dependencies and not options.no_check_dependencies:
|
|
20
25
|
dependency_checker.check(dependencies, snippet_by_name, options)
|
|
21
26
|
items_by_name = parser.check(statements)
|
|
@@ -23,11 +28,11 @@ def build(provenance: model.SourceLine, dfd_src: str, output_path: str,
|
|
|
23
28
|
statements, graph_options = handle_options(statements)
|
|
24
29
|
|
|
25
30
|
if options.no_graph_title:
|
|
26
|
-
title =
|
|
31
|
+
title = ""
|
|
27
32
|
else:
|
|
28
33
|
title = os.path.splitext(output_path)[0]
|
|
29
34
|
|
|
30
|
-
gen = Generator(graph_options)
|
|
35
|
+
gen = Generator(graph_options, attribs)
|
|
31
36
|
text = generate_dot(gen, title, statements, items_by_name)
|
|
32
37
|
if options.debug:
|
|
33
38
|
print(text)
|
|
@@ -44,11 +49,15 @@ def wrap(text: str, cols: int) -> str:
|
|
|
44
49
|
class Generator:
|
|
45
50
|
RX_NUMBERED_NAME = re.compile(r'(\d+[.])(.*)')
|
|
46
51
|
|
|
47
|
-
def __init__(
|
|
52
|
+
def __init__(
|
|
53
|
+
self, graph_options: model.GraphOptions, attribs: model.Attribs
|
|
54
|
+
) -> None:
|
|
48
55
|
self.lines: list[str] = []
|
|
49
56
|
self.star_nr = 0
|
|
50
57
|
self.frame_nr = 0
|
|
51
58
|
self.graph_options = graph_options
|
|
59
|
+
self.attribs = attribs
|
|
60
|
+
self.attribs_rx = self._compile_attribs_names(attribs)
|
|
52
61
|
|
|
53
62
|
def append(self, line: str, statement: model.Statement) -> None:
|
|
54
63
|
self.lines.append('')
|
|
@@ -57,14 +66,16 @@ class Generator:
|
|
|
57
66
|
self.lines.append(line)
|
|
58
67
|
|
|
59
68
|
def generate_item(self, item: model.Item) -> None:
|
|
60
|
-
|
|
61
|
-
hits = self.RX_NUMBERED_NAME.findall(text)
|
|
69
|
+
copy = model.Item(**item.__dict__)
|
|
70
|
+
hits = self.RX_NUMBERED_NAME.findall(copy.text)
|
|
62
71
|
if hits:
|
|
63
|
-
text = '\\n'.join(hits[0])
|
|
72
|
+
copy.text = '\\n'.join(hits[0])
|
|
64
73
|
|
|
65
|
-
text = wrap(text, self.graph_options.item_text_width)
|
|
66
|
-
attrs =
|
|
67
|
-
|
|
74
|
+
copy.text = wrap(copy.text, self.graph_options.item_text_width)
|
|
75
|
+
attrs = copy.attrs or ''
|
|
76
|
+
attrs = self._expand_attribs(attrs)
|
|
77
|
+
|
|
78
|
+
match copy.type:
|
|
68
79
|
case model.PROCESS:
|
|
69
80
|
if self.graph_options.is_context:
|
|
70
81
|
shape = 'circle'
|
|
@@ -72,30 +83,37 @@ class Generator:
|
|
|
72
83
|
else:
|
|
73
84
|
shape = 'ellipse'
|
|
74
85
|
fc = '"#eeeeee"'
|
|
75
|
-
line = (
|
|
76
|
-
|
|
86
|
+
line = (
|
|
87
|
+
f'"{copy.name}" [shape={shape} label="{copy.text}" '
|
|
88
|
+
f'fillcolor={fc} style=filled {attrs}]'
|
|
89
|
+
)
|
|
77
90
|
case model.CONTROL:
|
|
78
91
|
fc = '"#eeeeee"'
|
|
79
|
-
line = (
|
|
80
|
-
|
|
92
|
+
line = (
|
|
93
|
+
f'"{copy.name}" [shape=ellipse label="{copy.text}" '
|
|
94
|
+
f'fillcolor={fc} style="filled,dashed" {attrs}]'
|
|
95
|
+
)
|
|
81
96
|
case model.ENTITY:
|
|
82
|
-
line = (
|
|
83
|
-
|
|
97
|
+
line = (
|
|
98
|
+
f'"{copy.name}" [shape=rectangle label="{copy.text}" '
|
|
99
|
+
f'{attrs}]'
|
|
100
|
+
)
|
|
84
101
|
case model.STORE:
|
|
85
|
-
d = self._item_to_html_dict(
|
|
102
|
+
d = self._item_to_html_dict(copy)
|
|
86
103
|
line = TMPL.STORE.format(**d)
|
|
87
104
|
case model.NONE:
|
|
88
|
-
line = f'"{
|
|
105
|
+
line = f'"{copy.name}" [shape=none label="{copy.text}" {attrs}]'
|
|
89
106
|
case model.CHANNEL:
|
|
90
|
-
d = self._item_to_html_dict(
|
|
107
|
+
d = self._item_to_html_dict(copy)
|
|
91
108
|
if self.graph_options.is_vertical:
|
|
92
109
|
line = TMPL.CHANNEL_HORIZONTAL.format(**d)
|
|
93
110
|
else:
|
|
94
111
|
line = TMPL.CHANNEL.format(**d)
|
|
95
112
|
case _:
|
|
96
|
-
prefix = model.mk_err_prefix_from(
|
|
97
|
-
raise model.DfdException(
|
|
98
|
-
|
|
113
|
+
prefix = model.mk_err_prefix_from(copy.source)
|
|
114
|
+
raise model.DfdException(
|
|
115
|
+
f'{prefix}Unsupported item type ' f'"{copy.type}"'
|
|
116
|
+
)
|
|
99
117
|
self.append(line, item)
|
|
100
118
|
|
|
101
119
|
def _item_to_html_dict(self, item: model.Item) -> dict[str, Any]:
|
|
@@ -103,6 +121,32 @@ class Generator:
|
|
|
103
121
|
d['text'] = d['text'].replace('\\n', '<br/>')
|
|
104
122
|
return d
|
|
105
123
|
|
|
124
|
+
def _compile_attribs_names(
|
|
125
|
+
self, attribs: model.Attribs
|
|
126
|
+
) -> re.Pattern[str] | None:
|
|
127
|
+
if not attribs:
|
|
128
|
+
return None
|
|
129
|
+
names = [re.escape(k) for k in attribs.keys()]
|
|
130
|
+
pattern = '|'.join(names)
|
|
131
|
+
return re.compile(pattern)
|
|
132
|
+
|
|
133
|
+
def _expand_attribs(self, attrs: str) -> str:
|
|
134
|
+
def replacer(m: re.Match[str]) -> str:
|
|
135
|
+
alias = m[0]
|
|
136
|
+
if alias not in self.attribs:
|
|
137
|
+
raise model.DfdException(
|
|
138
|
+
f'Alias '
|
|
139
|
+
f'"{alias}" '
|
|
140
|
+
f'not found in '
|
|
141
|
+
f'{pprint.pformat(self.attribs)}'
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
return self.attribs[alias].text
|
|
145
|
+
|
|
146
|
+
return (
|
|
147
|
+
self.attribs_rx.sub(replacer, attrs) if self.attribs_rx else attrs
|
|
148
|
+
)
|
|
149
|
+
|
|
106
150
|
def generate_star(self, text: str) -> str:
|
|
107
151
|
text = wrap(text, self.graph_options.item_text_width)
|
|
108
152
|
star_name = f'__star_{self.star_nr}__'
|
|
@@ -111,8 +155,12 @@ class Generator:
|
|
|
111
155
|
self.star_nr += 1
|
|
112
156
|
return star_name
|
|
113
157
|
|
|
114
|
-
def generate_connection(
|
|
115
|
-
|
|
158
|
+
def generate_connection(
|
|
159
|
+
self,
|
|
160
|
+
conn: model.Connection,
|
|
161
|
+
src_item: model.Item | None,
|
|
162
|
+
dst_item: model.Item | None,
|
|
163
|
+
) -> None:
|
|
116
164
|
text = conn.text or ''
|
|
117
165
|
text = wrap(text, self.graph_options.connection_text_width)
|
|
118
166
|
|
|
@@ -135,8 +183,9 @@ class Generator:
|
|
|
135
183
|
dst_port = ':x:c'
|
|
136
184
|
|
|
137
185
|
attrs = f'label="{text}"'
|
|
186
|
+
|
|
138
187
|
if conn.attrs:
|
|
139
|
-
attrs += ' ' + conn.attrs
|
|
188
|
+
attrs += ' ' + self._expand_attribs(conn.attrs)
|
|
140
189
|
|
|
141
190
|
match conn.type:
|
|
142
191
|
case model.FLOW:
|
|
@@ -158,8 +207,9 @@ class Generator:
|
|
|
158
207
|
attrs += ' style=dashed'
|
|
159
208
|
case _:
|
|
160
209
|
prefix = model.mk_err_prefix_from(conn.source)
|
|
161
|
-
raise model.DfdException(
|
|
162
|
-
|
|
210
|
+
raise model.DfdException(
|
|
211
|
+
f'{prefix}Unsupported connection type ' f'"{conn.type}"'
|
|
212
|
+
)
|
|
163
213
|
if conn.relaxed:
|
|
164
214
|
attrs += ' constraint=false'
|
|
165
215
|
|
|
@@ -204,9 +254,14 @@ class Generator:
|
|
|
204
254
|
return text
|
|
205
255
|
|
|
206
256
|
|
|
207
|
-
def generate_dot(
|
|
208
|
-
|
|
257
|
+
def generate_dot(
|
|
258
|
+
gen: Generator,
|
|
259
|
+
title: str,
|
|
260
|
+
statements: model.Statements,
|
|
261
|
+
items_by_name: dict[str, model.Item],
|
|
262
|
+
) -> str:
|
|
209
263
|
"""Iterate over statements and generate a dot source file"""
|
|
264
|
+
|
|
210
265
|
def get_item(name: str) -> Optional[model.Item]:
|
|
211
266
|
return None if name == '*' else items_by_name[name]
|
|
212
267
|
|
|
@@ -234,8 +289,10 @@ def filter_statements(statements: model.Statements) -> model.Statements:
|
|
|
234
289
|
connected_items = set()
|
|
235
290
|
for statement in statements:
|
|
236
291
|
match statement:
|
|
237
|
-
case model.Connection() as conn:
|
|
238
|
-
|
|
292
|
+
case model.Connection() as conn:
|
|
293
|
+
pass
|
|
294
|
+
case _:
|
|
295
|
+
continue
|
|
239
296
|
for point in conn.src, conn.dst:
|
|
240
297
|
connected_items.add(point)
|
|
241
298
|
|
|
@@ -251,8 +308,9 @@ def filter_statements(statements: model.Statements) -> model.Statements:
|
|
|
251
308
|
return new_statements
|
|
252
309
|
|
|
253
310
|
|
|
254
|
-
def handle_options(
|
|
255
|
-
|
|
311
|
+
def handle_options(
|
|
312
|
+
statements: model.Statements,
|
|
313
|
+
) -> tuple[model.Statements, model.GraphOptions]:
|
|
256
314
|
new_statements = []
|
|
257
315
|
options = model.GraphOptions()
|
|
258
316
|
for statement in statements:
|
|
@@ -278,8 +336,9 @@ def handle_options(statements: model.Statements) -> tuple[
|
|
|
278
336
|
raise model.DfdException(f'{prefix}{e}"')
|
|
279
337
|
|
|
280
338
|
case _:
|
|
281
|
-
raise model.DfdException(
|
|
282
|
-
|
|
339
|
+
raise model.DfdException(
|
|
340
|
+
f'{prefix}Unsupported style ' f'"{style.style}"'
|
|
341
|
+
)
|
|
283
342
|
|
|
284
343
|
continue
|
|
285
344
|
new_statements.append(statement)
|
{data-flow-diagram-1.10.0 → data_flow_diagram-1.11.0}/src/data_flow_diagram/dfd_dot_templates.py
RENAMED
|
@@ -38,17 +38,23 @@ DOT_FONT_GRAPH = 'fontname="helvetica" fontsize=9 fontcolor="#000060"'
|
|
|
38
38
|
|
|
39
39
|
DOT_GRAPH_TITLE = """graph[label="\n- {title} -" """ + DOT_FONT_GRAPH + """]"""
|
|
40
40
|
|
|
41
|
-
DOT =
|
|
41
|
+
DOT = (
|
|
42
|
+
"""
|
|
42
43
|
digraph D {{
|
|
43
44
|
{graph_params}
|
|
44
|
-
edge[color=gray """
|
|
45
|
-
|
|
45
|
+
edge[color=gray """
|
|
46
|
+
+ DOT_FONT_EDGE
|
|
47
|
+
+ """]
|
|
48
|
+
node["""
|
|
49
|
+
+ DOT_FONT_NODE
|
|
50
|
+
+ """]
|
|
46
51
|
{block}
|
|
47
52
|
}}
|
|
48
53
|
""".strip()
|
|
54
|
+
)
|
|
49
55
|
|
|
50
56
|
|
|
51
57
|
GRAPH_PARAMS_CONTEXT_DIAGRAM = 'edge [len=2.25]'
|
|
52
58
|
|
|
53
59
|
|
|
54
|
-
ITEM_EXTERNAL_ATTRS = 'fillcolor=white color=grey fontcolor=grey'
|
|
60
|
+
ITEM_EXTERNAL_ATTRS = 'fillcolor=white color=grey fontcolor=grey'
|
|
@@ -7,14 +7,15 @@ from . import model
|
|
|
7
7
|
from .error import print_error
|
|
8
8
|
|
|
9
9
|
|
|
10
|
-
def generate_image(
|
|
11
|
-
|
|
10
|
+
def generate_image(
|
|
11
|
+
graph_options: model.GraphOptions, text: str, output_path: str, format: str
|
|
12
|
+
) -> None:
|
|
12
13
|
if graph_options.is_context:
|
|
13
14
|
engine = 'neato' # circo is not as good
|
|
14
15
|
else:
|
|
15
16
|
engine = 'dot'
|
|
16
17
|
|
|
17
|
-
cmd = [engine, f'-T{format}',
|
|
18
|
+
cmd = [engine, f'-T{format}', f'-o{output_path}']
|
|
18
19
|
try:
|
|
19
20
|
subprocess.run(cmd, input=text, encoding='utf-8', check=True)
|
|
20
21
|
except subprocess.CalledProcessError as e:
|
|
@@ -22,7 +23,7 @@ def generate_image(graph_options: model.GraphOptions,
|
|
|
22
23
|
print(f'{n+1:2}: {line}', file=sys.stderr)
|
|
23
24
|
print_error(f'ERROR: {e}')
|
|
24
25
|
sys.exit(1)
|
|
25
|
-
#print('Generated:', output_path, file=sys.stderr)
|
|
26
|
+
# print('Generated:', output_path, file=sys.stderr)
|
|
26
27
|
|
|
27
28
|
|
|
28
29
|
def check_installed() -> None:
|