data-flow-diagram 1.12.1.post3__tar.gz → 1.13.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/CHANGES.md +4 -0
  2. {data_flow_diagram-1.12.1.post3/src/data_flow_diagram.egg-info → data_flow_diagram-1.13.1}/PKG-INFO +1 -1
  3. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/src/data_flow_diagram/__init__.py +55 -55
  4. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/src/data_flow_diagram/dependency_checker.py +8 -8
  5. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/src/data_flow_diagram/dfd.py +61 -54
  6. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/src/data_flow_diagram/dfd_dot_templates.py +2 -2
  7. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/src/data_flow_diagram/dot.py +7 -7
  8. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/src/data_flow_diagram/error.py +1 -1
  9. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/src/data_flow_diagram/markdown.py +10 -10
  10. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/src/data_flow_diagram/model.py +27 -26
  11. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/src/data_flow_diagram/parser.py +65 -85
  12. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/src/data_flow_diagram/scanner.py +13 -18
  13. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1/src/data_flow_diagram.egg-info}/PKG-INFO +1 -1
  14. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/MANIFEST.in +0 -0
  15. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/README.md +0 -0
  16. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/pyproject.toml +0 -0
  17. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/setup.cfg +0 -0
  18. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/setup.py +0 -0
  19. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/src/data_flow_diagram/config.py +0 -0
  20. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/src/data_flow_diagram.egg-info/SOURCES.txt +0 -0
  21. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/src/data_flow_diagram.egg-info/dependency_links.txt +0 -0
  22. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/src/data_flow_diagram.egg-info/entry_points.txt +0 -0
  23. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/src/data_flow_diagram.egg-info/requires.txt +0 -0
  24. {data_flow_diagram-1.12.1.post3 → data_flow_diagram-1.13.1}/src/data_flow_diagram.egg-info/top_level.txt +0 -0
@@ -1,3 +1,7 @@
1
+ ## Version 1.13.1:
2
+
3
+ - Supports style rotated (and unrotated).
4
+
1
5
  ## Version 1.12.1.post3:
2
6
 
3
7
  - CHANGES.md is read by setup.py to deduce the version.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: data-flow-diagram
3
- Version: 1.12.1.post3
3
+ Version: 1.13.1
4
4
  Summary: Commandline tool to generate data flow diagrams from text
5
5
  Home-page: https://github.com/pbauermeister/dfd
6
6
  Author: Pascal Bauermeister
@@ -27,96 +27,96 @@ from .error import print_error
27
27
  try:
28
28
  VERSION = pkg_resources.require("data-flow-diagram")[0].version
29
29
  except pkg_resources.DistributionNotFound:
30
- VERSION = 'undefined'
30
+ VERSION = "undefined"
31
31
 
32
32
 
33
33
  def parse_args() -> argparse.Namespace:
34
- description, epilog = [each.strip() for each in __doc__.split('-----')[:2]]
34
+ description, epilog = [each.strip() for each in __doc__.split("-----")[:2]]
35
35
 
36
36
  parser = argparse.ArgumentParser(description=description, epilog=epilog)
37
37
 
38
38
  parser.add_argument(
39
- 'INPUT_FILE',
40
- action='store',
39
+ "INPUT_FILE",
40
+ action="store",
41
41
  default=None,
42
- nargs='?',
43
- help='UML sequence input file; ' 'if omitted, stdin is used',
42
+ nargs="?",
43
+ help="UML sequence input file; " "if omitted, stdin is used",
44
44
  )
45
45
 
46
46
  parser.add_argument(
47
- '--output-file',
48
- '-o',
47
+ "--output-file",
48
+ "-o",
49
49
  required=False,
50
- help='output file name; pass \'-\' to use stdout; '
51
- 'if omitted, use INPUT_FILE base name with \'.svg\' '
52
- 'extension, or stdout',
50
+ help="output file name; pass '-' to use stdout; "
51
+ "if omitted, use INPUT_FILE base name with '.svg' "
52
+ "extension, or stdout",
53
53
  )
54
54
 
55
55
  parser.add_argument(
56
- '--markdown',
57
- '-m',
58
- action='store_true',
59
- help='consider snippets between opening marker: '
60
- '```data-flow-diagram OUTFILE, and closing marker: ``` '
61
- 'allowing to generate all diagrams contained in an '
62
- 'INPUT_FILE that is a markdown file',
56
+ "--markdown",
57
+ "-m",
58
+ action="store_true",
59
+ help="consider snippets between opening marker: "
60
+ "```data-flow-diagram OUTFILE, and closing marker: ``` "
61
+ "allowing to generate all diagrams contained in an "
62
+ "INPUT_FILE that is a markdown file",
63
63
  )
64
64
 
65
65
  parser.add_argument(
66
- '--format',
67
- '-f',
66
+ "--format",
67
+ "-f",
68
68
  required=False,
69
- default='svg',
70
- help='output format: gif, jpg, tiff, bmp, pnm, eps, '
71
- 'pdf, svg (any supported by Graphviz); default is svg',
69
+ default="svg",
70
+ help="output format: gif, jpg, tiff, bmp, pnm, eps, "
71
+ "pdf, svg (any supported by Graphviz); default is svg",
72
72
  )
73
73
 
74
74
  parser.add_argument(
75
- '--percent-zoom',
76
- '-p',
75
+ "--percent-zoom",
76
+ "-p",
77
77
  required=False,
78
78
  default=100,
79
79
  type=int,
80
- help='magnification percentage; default is 100',
80
+ help="magnification percentage; default is 100",
81
81
  )
82
82
 
83
83
  parser.add_argument(
84
- '--background-color',
85
- '-b',
84
+ "--background-color",
85
+ "-b",
86
86
  required=False,
87
- default='white',
88
- help='background color name (including \'none\' for'
89
- ' transparent) in web color notation; see'
90
- ' https://developer.mozilla.org/en-US/docs/Web/CSS/color_value'
91
- ' for a list of valid names; default is white',
87
+ default="white",
88
+ help="background color name (including 'none' for"
89
+ " transparent) in web color notation; see"
90
+ " https://developer.mozilla.org/en-US/docs/Web/CSS/color_value"
91
+ " for a list of valid names; default is white",
92
92
  )
93
93
 
94
94
  parser.add_argument(
95
- '--no-graph-title',
96
- action='store_true',
95
+ "--no-graph-title",
96
+ action="store_true",
97
97
  default=False,
98
- help='suppress graph title',
98
+ help="suppress graph title",
99
99
  )
100
100
 
101
101
  parser.add_argument(
102
- '--no-check-dependencies',
103
- action='store_true',
102
+ "--no-check-dependencies",
103
+ action="store_true",
104
104
  default=False,
105
- help='suppress dependencies checking',
105
+ help="suppress dependencies checking",
106
106
  )
107
107
 
108
108
  parser.add_argument(
109
- '--debug',
110
- action='store_true',
109
+ "--debug",
110
+ action="store_true",
111
111
  default=False,
112
- help='emit debug messages',
112
+ help="emit debug messages",
113
113
  )
114
114
 
115
115
  parser.add_argument(
116
- '--version',
117
- '-V',
118
- action='store_true',
119
- help='print the version and exit',
116
+ "--version",
117
+ "-V",
118
+ action="store_true",
119
+ help="print the version and exit",
120
120
  )
121
121
 
122
122
  return parser.parse_args()
@@ -137,17 +137,17 @@ def handle_markdown_source(
137
137
  options,
138
138
  snippet_by_name=params.snippet_by_name,
139
139
  )
140
- print(f'{sys.argv[0]}: generated {params.file_name}', file=sys.stderr)
140
+ print(f"{sys.argv[0]}: generated {params.file_name}", file=sys.stderr)
141
141
 
142
142
 
143
143
  def handle_dfd_source(
144
144
  options: model.Options, provenance: str, input_fp: TextIO, output_path: str
145
145
  ) -> None:
146
146
  root = model.SourceLine("", provenance, None, None)
147
- if output_path == '-':
147
+ if output_path == "-":
148
148
  # output to stdout
149
149
  with tempfile.TemporaryDirectory() as d:
150
- path = os.path.join(d, 'file.svg')
150
+ path = os.path.join(d, "file.svg")
151
151
  dfd.build(root, input_fp.read(), path, options)
152
152
  with open(path) as f:
153
153
  print(f.read())
@@ -160,10 +160,10 @@ def run(args: argparse.Namespace) -> None:
160
160
  # adjust input
161
161
  if args.INPUT_FILE is None:
162
162
  input_fp = sys.stdin
163
- provenance = '<stdin>'
163
+ provenance = "<stdin>"
164
164
  else:
165
165
  input_fp = open(args.INPUT_FILE)
166
- provenance = f'<file:{args.INPUT_FILE}>'
166
+ provenance = f"<file:{args.INPUT_FILE}>"
167
167
 
168
168
  options = model.Options(
169
169
  args.format,
@@ -183,9 +183,9 @@ def run(args: argparse.Namespace) -> None:
183
183
  if args.output_file is None:
184
184
  if args.INPUT_FILE is not None:
185
185
  basename = os.path.splitext(args.INPUT_FILE)[0]
186
- output_path = basename + '.' + args.format
186
+ output_path = basename + "." + args.format
187
187
  else:
188
- output_path = '-'
188
+ output_path = "-"
189
189
  else:
190
190
  output_path = args.output_file
191
191
 
@@ -200,12 +200,12 @@ def main() -> None:
200
200
 
201
201
  args = parse_args()
202
202
  if args.version:
203
- print('data-flow-diagram', VERSION)
203
+ print("data-flow-diagram", VERSION)
204
204
  sys.exit(0)
205
205
 
206
206
  try:
207
207
  run(args)
208
208
  except model.DfdException as e:
209
- text = f'ERROR: {e}'
209
+ text = f"ERROR: {e}"
210
210
  print_error(text)
211
211
  sys.exit(1)
@@ -15,33 +15,33 @@ def check(
15
15
  prefix = model.mk_err_prefix_from(dep.source)
16
16
 
17
17
  # load source text
18
- if dep.to_graph.startswith('#'):
18
+ if dep.to_graph.startswith("#"):
19
19
  # from snippet
20
20
  name = dep.to_graph[1:]
21
21
  if name not in snippet_by_name:
22
22
  errors.append(f'{prefix}Referring to unknown snippet "{name}"')
23
23
  continue
24
24
  text = snippet_by_name[name].text
25
- what = 'snippet'
25
+ what = "snippet"
26
26
  else:
27
27
  # from file
28
28
  name = dep.to_graph
29
29
  try:
30
- with open(name, encoding='utf-8') as f:
30
+ with open(name, encoding="utf-8") as f:
31
31
  text = f.read()
32
32
  except FileNotFoundError as e:
33
33
  if name in snippet_by_name:
34
34
  errors.append(f'{prefix}{e}. Did you mean "#{name}" ?')
35
35
  else:
36
- errors.append(f'{prefix}{e}')
36
+ errors.append(f"{prefix}{e}")
37
37
  continue
38
- what = 'file'
38
+ what = "file"
39
39
 
40
40
  # if only graph is targetted, we're done
41
41
  if dep.to_item is None:
42
42
  if dep.to_type != model.NONE:
43
43
  errors.append(
44
- f'{prefix}A whole graph may only be referred to '
44
+ f"{prefix}A whole graph may only be referred to "
45
45
  f'by an item of type "{model.NONE}", and not '
46
46
  f'"{dep.to_type}"'
47
47
  )
@@ -69,8 +69,8 @@ def check(
69
69
  )
70
70
 
71
71
  if errors:
72
- errors.insert(0, 'Dependency error(s) found:')
73
- raise model.DfdException('\n\n'.join(errors))
72
+ errors.insert(0, "Dependency error(s) found:")
73
+ raise model.DfdException("\n\n".join(errors))
74
74
 
75
75
 
76
76
  def find_item(name: str, statements: model.Statements) -> model.Item | None:
@@ -41,13 +41,13 @@ def build(
41
41
 
42
42
  def wrap(text: str, cols: int) -> str:
43
43
  res: list[str] = []
44
- for each in text.strip().split('\\n'):
45
- res += textwrap.wrap(each, width=cols, break_long_words=False) or ['']
46
- return '\\n'.join(res)
44
+ for each in text.strip().split("\\n"):
45
+ res += textwrap.wrap(each, width=cols, break_long_words=False) or [""]
46
+ return "\\n".join(res)
47
47
 
48
48
 
49
49
  class Generator:
50
- RX_NUMBERED_NAME = re.compile(r'(\d+[.])(.*)')
50
+ RX_NUMBERED_NAME = re.compile(r"(\d+[.])(.*)")
51
51
 
52
52
  def __init__(
53
53
  self, graph_options: model.GraphOptions, attribs: model.Attribs
@@ -60,32 +60,32 @@ class Generator:
60
60
  self.attribs_rx = self._compile_attribs_names(attribs)
61
61
 
62
62
  def append(self, line: str, statement: model.Statement) -> None:
63
- self.lines.append('')
63
+ self.lines.append("")
64
64
  text = model.pack(statement.source.text)
65
- self.lines.append(f'/* {statement.source.line_nr}: {text} */')
65
+ self.lines.append(f"/* {statement.source.line_nr}: {text} */")
66
66
  self.lines.append(line)
67
67
 
68
68
  def generate_item(self, item: model.Item) -> None:
69
69
  copy = model.Item(**item.__dict__)
70
70
  hits = self.RX_NUMBERED_NAME.findall(copy.text)
71
71
  if hits:
72
- copy.text = '\\n'.join(hits[0])
72
+ copy.text = "\\n".join(hits[0])
73
73
 
74
74
  copy.text = wrap(copy.text, self.graph_options.item_text_width)
75
- attrs = copy.attrs or ''
75
+ attrs = copy.attrs or ""
76
76
  attrs = self._expand_attribs(attrs)
77
77
 
78
78
  match copy.type:
79
79
  case model.PROCESS:
80
80
  if self.graph_options.is_context:
81
- shape = 'circle'
82
- fc = 'white'
81
+ shape = "circle"
82
+ fc = "white"
83
83
  else:
84
- shape = 'ellipse'
84
+ shape = "ellipse"
85
85
  fc = '"#eeeeee"'
86
86
  line = (
87
87
  f'"{copy.name}" [shape={shape} label="{copy.text}" '
88
- f'fillcolor={fc} style=filled {attrs}]'
88
+ f"fillcolor={fc} style=filled {attrs}]"
89
89
  )
90
90
  case model.CONTROL:
91
91
  fc = '"#eeeeee"'
@@ -96,7 +96,7 @@ class Generator:
96
96
  case model.ENTITY:
97
97
  line = (
98
98
  f'"{copy.name}" [shape=rectangle label="{copy.text}" '
99
- f'{attrs}]'
99
+ f"{attrs}]"
100
100
  )
101
101
  case model.STORE:
102
102
  d = self._attrib_to_dict(copy, attrs)
@@ -112,22 +112,22 @@ class Generator:
112
112
  case _:
113
113
  prefix = model.mk_err_prefix_from(copy.source)
114
114
  raise model.DfdException(
115
- f'{prefix}Unsupported item type ' f'"{copy.type}"'
115
+ f"{prefix}Unsupported item type " f'"{copy.type}"'
116
116
  )
117
117
  self.append(line, item)
118
118
 
119
119
  def _attrib_to_dict(self, item: model.Item, attrs: str) -> dict[str, str]:
120
120
  d = self._item_to_html_dict(item)
121
- d.update({'fontcolor': 'black', 'color': 'black'})
121
+ d.update({"fontcolor": "black", "color": "black"})
122
122
  attrs_d = {
123
- k: v for k, v in [each.split('=', 1) for each in attrs.split()]
123
+ k: v for k, v in [each.split("=", 1) for each in attrs.split()]
124
124
  }
125
125
  d.update(attrs_d)
126
126
  return d
127
127
 
128
128
  def _item_to_html_dict(self, item: model.Item) -> dict[str, Any]:
129
129
  d = item.__dict__
130
- d['text'] = d['text'].replace('\\n', '<br/>')
130
+ d["text"] = d["text"].replace("\\n", "<br/>")
131
131
  return d
132
132
 
133
133
  def _compile_attribs_names(
@@ -135,8 +135,8 @@ class Generator:
135
135
  ) -> re.Pattern[str] | None:
136
136
  if not attribs:
137
137
  return None
138
- names = ['\\b' + re.escape(k) + '\\b' for k in attribs.keys()]
139
- pattern = '|'.join(names)
138
+ names = ["\\b" + re.escape(k) + "\\b" for k in attribs.keys()]
139
+ pattern = "|".join(names)
140
140
  return re.compile(pattern)
141
141
 
142
142
  def _expand_attribs(self, attrs: str) -> str:
@@ -144,10 +144,10 @@ class Generator:
144
144
  alias = m[0]
145
145
  if alias not in self.attribs:
146
146
  raise model.DfdException(
147
- f'Alias '
147
+ f"Alias "
148
148
  f'"{alias}" '
149
- f'not found in '
150
- f'{pprint.pformat(self.attribs)}'
149
+ f"not found in "
150
+ f"{pprint.pformat(self.attribs)}"
151
151
  )
152
152
 
153
153
  return self.attribs[alias].text
@@ -158,7 +158,7 @@ class Generator:
158
158
 
159
159
  def generate_star(self, text: str) -> str:
160
160
  text = wrap(text, self.graph_options.item_text_width)
161
- star_name = f'__star_{self.star_nr}__'
161
+ star_name = f"__star_{self.star_nr}__"
162
162
  line = f'"{star_name}" [shape=none label="{text}" {TMPL.DOT_FONT_EDGE}]'
163
163
  self.lines.append(line)
164
164
  self.star_nr += 1
@@ -170,57 +170,57 @@ class Generator:
170
170
  src_item: model.Item | None,
171
171
  dst_item: model.Item | None,
172
172
  ) -> None:
173
- text = conn.text or ''
173
+ text = conn.text or ""
174
174
  text = wrap(text, self.graph_options.connection_text_width)
175
175
 
176
176
  src_port = dst_port = ""
177
177
 
178
178
  if not src_item:
179
179
  src_name = self.generate_star(text)
180
- text = ''
180
+ text = ""
181
181
  else:
182
182
  src_name = src_item.name
183
183
  if src_item.type == model.CHANNEL:
184
- src_port = ':x:c'
184
+ src_port = ":x:c"
185
185
 
186
186
  if not dst_item:
187
187
  dst_name = self.generate_star(text)
188
- text = ''
188
+ text = ""
189
189
  else:
190
190
  dst_name = dst_item.name
191
191
  if dst_item.type == model.CHANNEL:
192
- dst_port = ':x:c'
192
+ dst_port = ":x:c"
193
193
 
194
194
  attrs = f'label="{text}"'
195
195
 
196
196
  if conn.attrs:
197
- attrs += ' ' + self._expand_attribs(conn.attrs)
197
+ attrs += " " + self._expand_attribs(conn.attrs)
198
198
 
199
199
  match conn.type:
200
200
  case model.FLOW:
201
201
  if conn.reversed:
202
- attrs += ' dir=back'
202
+ attrs += " dir=back"
203
203
  case model.BFLOW:
204
- attrs += ' dir=both'
204
+ attrs += " dir=both"
205
205
  case model.CFLOW:
206
206
  if conn.reversed:
207
- attrs += ' dir=back'
208
- attrs += ' arrowtail=normalnormal'
207
+ attrs += " dir=back"
208
+ attrs += " arrowtail=normalnormal"
209
209
  else:
210
- attrs += ' arrowhead=normalnormal'
210
+ attrs += " arrowhead=normalnormal"
211
211
  case model.UFLOW:
212
- attrs += ' dir=none'
212
+ attrs += " dir=none"
213
213
  case model.SIGNAL:
214
214
  if conn.reversed:
215
- attrs += ' dir=back'
216
- attrs += ' style=dashed'
215
+ attrs += " dir=back"
216
+ attrs += " style=dashed"
217
217
  case _:
218
218
  prefix = model.mk_err_prefix_from(conn.source)
219
219
  raise model.DfdException(
220
- f'{prefix}Unsupported connection type ' f'"{conn.type}"'
220
+ f"{prefix}Unsupported connection type " f'"{conn.type}"'
221
221
  )
222
222
  if conn.relaxed:
223
- attrs += ' constraint=false'
223
+ attrs += " constraint=false"
224
224
 
225
225
  line = f'"{src_name}"{src_port} -> "{dst_name}"{dst_port} [{attrs}]'
226
226
  self.append(line, conn)
@@ -229,17 +229,17 @@ class Generator:
229
229
  pass
230
230
 
231
231
  def generate_frame(self, frame: model.Frame) -> None:
232
- self.append(f'subgraph cluster_{self.frame_nr} {{', frame)
232
+ self.append(f"subgraph cluster_{self.frame_nr} {{", frame)
233
233
  self.frame_nr += 1
234
234
 
235
235
  self.lines.append(f' label="{frame.text}"')
236
236
  if frame.attrs:
237
237
  attrs = self._expand_attribs(frame.attrs)
238
- self.lines.append(f' {attrs}')
238
+ self.lines.append(f" {attrs}")
239
239
 
240
240
  for item in frame.items:
241
241
  self.lines.append(f' "{item}"')
242
- self.lines.append('}')
242
+ self.lines.append("}")
243
243
 
244
244
  def generate_dot_text(self, title: str) -> str:
245
245
  graph_params = []
@@ -250,16 +250,19 @@ class Generator:
250
250
  graph_params.append(TMPL.DOT_GRAPH_TITLE.format(title=title))
251
251
 
252
252
  if self.graph_options.is_vertical:
253
- graph_params.append('rankdir=TB')
253
+ graph_params.append("rankdir=TB")
254
254
  else:
255
- graph_params.append('rankdir=LR')
255
+ graph_params.append("rankdir=LR")
256
256
 
257
- block = '\n'.join(self.lines).replace('\n', '\n ')
257
+ if self.graph_options.is_rotated:
258
+ graph_params.append(f"rotate=90")
259
+
260
+ block = "\n".join(self.lines).replace("\n", "\n ")
258
261
  text = TMPL.DOT.format(
259
262
  title=title,
260
263
  block=block,
261
- graph_params='\n '.join(graph_params),
262
- ).replace('\n \n', '\n\n')
264
+ graph_params="\n ".join(graph_params),
265
+ ).replace("\n \n", "\n\n")
263
266
  # print(text)
264
267
  return text
265
268
 
@@ -273,7 +276,7 @@ def generate_dot(
273
276
  """Iterate over statements and generate a dot source file"""
274
277
 
275
278
  def get_item(name: str) -> Optional[model.Item]:
276
- return None if name == '*' else items_by_name[name]
279
+ return None if name == "*" else items_by_name[name]
277
280
 
278
281
  for statement in statements:
279
282
  match statement:
@@ -328,18 +331,22 @@ def handle_options(
328
331
  match statement:
329
332
  case model.Style() as style:
330
333
  match style.style:
331
- case 'vertical':
334
+ case "vertical":
332
335
  options.is_vertical = True
333
- case 'context':
336
+ case "context":
334
337
  options.is_context = True
335
- case 'horizontal':
338
+ case "horizontal":
336
339
  options.is_vertical = False
337
- case 'item-text-width':
340
+ case "rotated":
341
+ options.is_rotated = True
342
+ case "unrotated":
343
+ options.is_rotated = False
344
+ case "item-text-width":
338
345
  try:
339
346
  options.item_text_width = int(style.value)
340
347
  except ValueError as e:
341
348
  raise model.DfdException(f'{prefix}{e}"')
342
- case 'connection-text-width':
349
+ case "connection-text-width":
343
350
  try:
344
351
  options.connection_text_width = int(style.value)
345
352
  except ValueError as e:
@@ -347,7 +354,7 @@ def handle_options(
347
354
 
348
355
  case _:
349
356
  raise model.DfdException(
350
- f'{prefix}Unsupported style ' f'"{style.style}"'
357
+ f"{prefix}Unsupported style " f'"{style.style}"'
351
358
  )
352
359
 
353
360
  continue
@@ -54,7 +54,7 @@ digraph D {{
54
54
  )
55
55
 
56
56
 
57
- GRAPH_PARAMS_CONTEXT_DIAGRAM = 'edge [len=2.25]'
57
+ GRAPH_PARAMS_CONTEXT_DIAGRAM = "edge [len=2.25]"
58
58
 
59
59
 
60
- ITEM_EXTERNAL_ATTRS = 'fillcolor=white color=grey fontcolor=grey'
60
+ ITEM_EXTERNAL_ATTRS = "fillcolor=white color=grey fontcolor=grey"
@@ -11,23 +11,23 @@ def generate_image(
11
11
  graph_options: model.GraphOptions, text: str, output_path: str, format: str
12
12
  ) -> None:
13
13
  if graph_options.is_context:
14
- engine = 'neato' # circo is not as good
14
+ engine = "neato" # circo is not as good
15
15
  else:
16
- engine = 'dot'
16
+ engine = "dot"
17
17
 
18
- cmd = [engine, f'-T{format}', f'-o{output_path}']
18
+ cmd = [engine, f"-T{format}", f"-o{output_path}"]
19
19
  try:
20
- subprocess.run(cmd, input=text, encoding='utf-8', check=True)
20
+ subprocess.run(cmd, input=text, encoding="utf-8", check=True)
21
21
  except subprocess.CalledProcessError as e:
22
22
  for n, line in enumerate(text.splitlines()):
23
- print(f'{n+1:2}: {line}', file=sys.stderr)
24
- print_error(f'ERROR: {e}')
23
+ print(f"{n+1:2}: {line}", file=sys.stderr)
24
+ print_error(f"ERROR: {e}")
25
25
  sys.exit(1)
26
26
  # print('Generated:', output_path, file=sys.stderr)
27
27
 
28
28
 
29
29
  def check_installed() -> None:
30
- cmd = ['dot', '-V']
30
+ cmd = ["dot", "-V"]
31
31
  try:
32
32
  subprocess.run(cmd, stderr=subprocess.DEVNULL)
33
33
  except FileNotFoundError as e:
@@ -3,5 +3,5 @@ import sys
3
3
 
4
4
  def print_error(text: str) -> None:
5
5
  if sys.stderr.isatty():
6
- text = f'\033[31m{text}\033[0m'
6
+ text = f"\033[31m{text}\033[0m"
7
7
  print(text, file=sys.stderr)
@@ -21,18 +21,18 @@ SnippetContexts = list[SnippetContext]
21
21
 
22
22
  def extract_snippets(text: str) -> model.Snippets:
23
23
  rx = re.compile(
24
- r'^```(?P<head>\s*)'
25
- r'data-flow-diagram\s+'
26
- r'(?P<output>.*?)\s*'
27
- r'^(?P<src>.*?)^\s*```',
24
+ r"^```(?P<head>\s*)"
25
+ r"data-flow-diagram\s+"
26
+ r"(?P<output>.*?)\s*"
27
+ r"^(?P<src>.*?)^\s*```",
28
28
  re.DOTALL | re.M,
29
29
  )
30
30
 
31
31
  return [
32
32
  model.Snippet(
33
- text=match['head'] + match['src'],
34
- name=os.path.splitext(match['output'])[0],
35
- output=match['output'],
33
+ text=match["head"] + match["src"],
34
+ name=os.path.splitext(match["output"])[0],
35
+ output=match["output"],
36
36
  line_nr=len(text[: match.start()].splitlines()),
37
37
  )
38
38
  for match in rx.finditer(text)
@@ -49,12 +49,12 @@ def make_snippets_params(
49
49
 
50
50
  for snippet in snippets:
51
51
  # snippet w/o output, maybe just as includee
52
- if snippet.output.startswith('#'):
52
+ if snippet.output.startswith("#"):
53
53
  continue
54
54
 
55
55
  # snippet with output
56
56
  input_fp = io.StringIO(snippet.text)
57
- snippet_provenance = f'{provenance}<snippet:{snippet.output}>'
57
+ snippet_provenance = f"{provenance}<snippet:{snippet.output}>"
58
58
  root = model.SourceLine(
59
59
  "", snippet_provenance, None, snippet.line_nr, is_container=True
60
60
  )
@@ -72,5 +72,5 @@ def check_snippets_unicity(provenance: str, snippets: model.Snippets) -> None:
72
72
  root = model.SourceLine("", provenance, None, None)
73
73
  error_prefix = model.mk_err_prefix_from(root)
74
74
  raise model.DfdException(
75
- f'{error_prefix}Snippets defined multiple ' f'times: {multiples}'
75
+ f"{error_prefix}Snippets defined multiple " f"times: {multiples}"
76
76
  )
@@ -12,8 +12,8 @@ from . import config
12
12
 
13
13
  def repr(o: Any) -> str:
14
14
  name: str = o.__class__.__name__
15
- val: str = json.dumps(dataclasses.asdict(o), indent=' ')
16
- return f'{name} {val}'
15
+ val: str = json.dumps(dataclasses.asdict(o), indent=" ")
16
+ return f"{name} {val}"
17
17
 
18
18
 
19
19
  @dataclass
@@ -21,8 +21,8 @@ class Base:
21
21
  def __repr__(self) -> str:
22
22
  return (
23
23
  self.__class__.__name__
24
- + ' '
25
- + json.dumps(dataclasses.asdict(self), indent=' ')
24
+ + " "
25
+ + json.dumps(dataclasses.asdict(self), indent=" ")
26
26
  )
27
27
 
28
28
 
@@ -89,61 +89,62 @@ class Attrib(Statement):
89
89
  Attribs = dict[str, Attrib]
90
90
 
91
91
 
92
- STYLE = 'style'
92
+ STYLE = "style"
93
93
 
94
- PROCESS = 'process'
95
- CONTROL = 'control'
96
- ENTITY = 'entity'
97
- STORE = 'store'
98
- CHANNEL = 'channel'
99
- NONE = 'none'
94
+ PROCESS = "process"
95
+ CONTROL = "control"
96
+ ENTITY = "entity"
97
+ STORE = "store"
98
+ CHANNEL = "channel"
99
+ NONE = "none"
100
100
 
101
- FLOW = 'flow'
102
- BFLOW = 'bflow'
103
- CFLOW = 'cflow'
104
- UFLOW = 'uflow'
105
- SIGNAL = 'signal'
101
+ FLOW = "flow"
102
+ BFLOW = "bflow"
103
+ CFLOW = "cflow"
104
+ UFLOW = "uflow"
105
+ SIGNAL = "signal"
106
106
 
107
- FRAME = 'frame'
107
+ FRAME = "frame"
108
108
 
109
- ATTRIB = 'attrib'
109
+ ATTRIB = "attrib"
110
110
 
111
111
 
112
112
  @dataclass
113
113
  class GraphOptions:
114
114
  is_vertical: bool = False
115
115
  is_context: bool = False
116
+ is_rotated: bool = False
116
117
  item_text_width = config.DEFAULT_ITEM_TEXT_WIDTH
117
118
  connection_text_width = config.DEFAULT_CONNECTION_TEXT_WIDTH
118
119
 
119
120
 
120
121
  def pack(src_line: str | None) -> str:
121
122
  if src_line is None:
122
- return '<none>'
123
- return ' '.join(src_line.split())
123
+ return "<none>"
124
+ return " ".join(src_line.split())
124
125
 
125
126
 
126
127
  def mk_err_prefix_from(src: SourceLine) -> str:
127
128
 
128
129
  def _add_to_stack(stack: list[str], src: SourceLine) -> None:
129
130
  if src.line_nr is None:
130
- stack += [f' {pack(src.raw_text)}']
131
+ stack += [f" {pack(src.raw_text)}"]
131
132
  else:
132
133
  if src.parent and src.parent.is_container:
133
134
  nr = src.parent.line_nr + 1
134
135
  delta = src.line_nr + 1
135
136
  final = nr + delta
136
- stack += [f' line {final}: {pack(src.raw_text)}']
137
+ stack += [f" line {final}: {pack(src.raw_text)}"]
137
138
  else:
138
139
  nr = src.line_nr + 1
139
- stack += [f' line {nr}: {pack(src.raw_text)}']
140
+ stack += [f" line {nr}: {pack(src.raw_text)}"]
140
141
  if src.parent:
141
142
  _add_to_stack(stack, src.parent)
142
143
 
143
- stack: list[str] = ['(most recent first)']
144
+ stack: list[str] = ["(most recent first)"]
144
145
  _add_to_stack(stack, src)
145
- stack += ['']
146
- return '\n'.join(stack) + 'Error: '
146
+ stack += [""]
147
+ return "\n".join(stack) + "Error: "
147
148
 
148
149
 
149
150
  class DfdException(Exception):
@@ -31,7 +31,7 @@ def check(statements: model.Statements) -> dict[str, model.Item]:
31
31
  other_text = model.pack(other.source.text)
32
32
  raise model.DfdException(
33
33
  f'{error_prefix}Name "{name}" already exists '
34
- f'at line {other.source.line_nr+1}: {other_text}'
34
+ f"at line {other.source.line_nr+1}: {other_text}"
35
35
  )
36
36
 
37
37
  # check references and values of connections
@@ -50,7 +50,7 @@ def check(statements: model.Statements) -> dict[str, model.Item]:
50
50
  if point not in items_by_name:
51
51
  raise model.DfdException(
52
52
  f'{error_prefix}Connection "{conn.type}" links to "{point}", '
53
- f'which is not defined'
53
+ f"which is not defined"
54
54
  )
55
55
  if (
56
56
  items_by_name[point].type == model.CONTROL
@@ -64,8 +64,7 @@ def check(statements: model.Statements) -> dict[str, model.Item]:
64
64
 
65
65
  if nb_stars == 2:
66
66
  raise model.DfdException(
67
- f'{error_prefix}Connection "{conn.type}" may not link to two '
68
- f'stars'
67
+ f'{error_prefix}Connection "{conn.type}" may not link to two ' f"stars"
69
68
  )
70
69
 
71
70
  # check references of frames
@@ -78,16 +77,15 @@ def check(statements: model.Statements) -> dict[str, model.Item]:
78
77
  case _:
79
78
  continue
80
79
  if not frame.items:
81
- raise model.DfdException(f'{error_prefix}Frame is empty')
80
+ raise model.DfdException(f"{error_prefix}Frame is empty")
82
81
  for name in frame.items:
83
82
  if name not in items_by_name:
84
83
  raise model.DfdException(
85
- f'{error_prefix}Frame includes "{name}", '
86
- f'which is not defined'
84
+ f'{error_prefix}Frame includes "{name}", ' f"which is not defined"
87
85
  )
88
86
  if name in framed_items:
89
87
  raise model.DfdException(
90
- f'{error_prefix}Item "{name}", ' f'is in multiple frames'
88
+ f'{error_prefix}Item "{name}", ' f"is in multiple frames"
91
89
  )
92
90
  framed_items.add(name)
93
91
 
@@ -108,7 +106,7 @@ def parse(
108
106
  src_line = source.text
109
107
 
110
108
  src_line = src_line.strip()
111
- if not src_line or src_line.startswith('#'):
109
+ if not src_line or src_line.startswith("#"):
112
110
  continue
113
111
  error_prefix = model.mk_err_prefix_from(source)
114
112
 
@@ -131,25 +129,23 @@ def parse(
131
129
  model.BFLOW: parse_bflow,
132
130
  model.UFLOW: parse_uflow,
133
131
  model.SIGNAL: parse_signal,
134
- model.FLOW + '?': parse_flow_q,
135
- model.CFLOW + '?': parse_cflow_q,
136
- model.BFLOW + '?': parse_bflow_q,
137
- model.UFLOW + '?': parse_uflow_q,
138
- model.SIGNAL + '?': parse_signal_q,
139
- 'flow.r': parse_flow_r,
140
- 'cflow.r': parse_cflow_r,
141
- 'signal.r': parse_signal_r,
142
- 'flow.r?': parse_flow_r_q,
143
- 'cflow.r?': parse_cflow_r_q,
144
- 'signal.r?': parse_signal_r_q,
132
+ model.FLOW + "?": parse_flow_q,
133
+ model.CFLOW + "?": parse_cflow_q,
134
+ model.BFLOW + "?": parse_bflow_q,
135
+ model.UFLOW + "?": parse_uflow_q,
136
+ model.SIGNAL + "?": parse_signal_q,
137
+ "flow.r": parse_flow_r,
138
+ "cflow.r": parse_cflow_r,
139
+ "signal.r": parse_signal_r,
140
+ "flow.r?": parse_flow_r_q,
141
+ "cflow.r?": parse_cflow_r_q,
142
+ "signal.r?": parse_signal_r_q,
145
143
  model.FRAME: parse_frame,
146
144
  model.ATTRIB: parse_attrib,
147
145
  }.get(word)
148
146
 
149
147
  if f is None:
150
- raise model.DfdException(
151
- f'{error_prefix}Unrecognized keyword ' f'"{word}"'
152
- )
148
+ raise model.DfdException(f"{error_prefix}Unrecognized keyword " f'"{word}"')
153
149
 
154
150
  try:
155
151
  statement = f(source)
@@ -177,27 +173,25 @@ def parse(
177
173
  return statements, dependencies, attribs
178
174
 
179
175
 
180
- def split_args(
181
- dfd_line: str, n: int, last_is_optional: bool = False
182
- ) -> list[str]:
176
+ def split_args(dfd_line: str, n: int, last_is_optional: bool = False) -> list[str]:
183
177
  """Split DFD line into n (possibly n-1) tokens"""
184
178
 
185
179
  terms: list[str] = dfd_line.split(maxsplit=n)
186
180
  if len(terms) - 1 == n - 1 and last_is_optional:
187
- terms.append('')
181
+ terms.append("")
188
182
 
189
183
  if len(terms) - 1 != n:
190
184
  if not last_is_optional:
191
- raise model.DfdException(f'Expected {n} argument(s)')
185
+ raise model.DfdException(f"Expected {n} argument(s)")
192
186
  else:
193
- raise model.DfdException(f'Expected {n-1} or {n} argument')
187
+ raise model.DfdException(f"Expected {n-1} or {n} argument")
194
188
 
195
189
  return terms[1:]
196
190
 
197
191
 
198
192
  def parse_item_name(name: str) -> Tuple[str, bool]:
199
193
  """If name ends with ?, make it hidable"""
200
- if name.endswith('?'):
194
+ if name.endswith("?"):
201
195
  return name[:-1], True
202
196
  else:
203
197
  return name, False
@@ -308,57 +302,43 @@ def parse_signal_r(source: model.SourceLine) -> model.Statement:
308
302
  def parse_flow_q(source: model.SourceLine) -> model.Statement:
309
303
  """Parse directional flow statement"""
310
304
  src, dst, text = split_args(source.text, 3, True)
311
- return model.Connection(
312
- source, model.FLOW, text, "", src, dst, relaxed=True
313
- )
305
+ return model.Connection(source, model.FLOW, text, "", src, dst, relaxed=True)
314
306
 
315
307
 
316
308
  def parse_flow_r_q(source: model.SourceLine) -> model.Statement:
317
309
  """Parse directional reversed flow statement"""
318
310
  src, dst, text = split_args(source.text, 3, True)
319
- return model.Connection(
320
- source, model.FLOW, text, "", src, dst, True, relaxed=True
321
- )
311
+ return model.Connection(source, model.FLOW, text, "", src, dst, True, relaxed=True)
322
312
 
323
313
 
324
314
  def parse_cflow_q(source: model.SourceLine) -> model.Statement:
325
315
  """Parse continuous flow statement"""
326
316
  src, dst, text = split_args(source.text, 3, True)
327
- return model.Connection(
328
- source, model.CFLOW, text, "", src, dst, relaxed=True
329
- )
317
+ return model.Connection(source, model.CFLOW, text, "", src, dst, relaxed=True)
330
318
 
331
319
 
332
320
  def parse_cflow_r_q(source: model.SourceLine) -> model.Statement:
333
321
  """Parse continuous flow statement"""
334
322
  src, dst, text = split_args(source.text, 3, True)
335
- return model.Connection(
336
- source, model.CFLOW, text, "", src, dst, True, relaxed=True
337
- )
323
+ return model.Connection(source, model.CFLOW, text, "", src, dst, True, relaxed=True)
338
324
 
339
325
 
340
326
  def parse_bflow_q(source: model.SourceLine) -> model.Statement:
341
327
  """Parse bidirectional flow statement"""
342
328
  src, dst, text = split_args(source.text, 3, True)
343
- return model.Connection(
344
- source, model.BFLOW, text, "", src, dst, relaxed=True
345
- )
329
+ return model.Connection(source, model.BFLOW, text, "", src, dst, relaxed=True)
346
330
 
347
331
 
348
332
  def parse_uflow_q(source: model.SourceLine) -> model.Statement:
349
333
  """Parse undirected flow flow statement"""
350
334
  src, dst, text = split_args(source.text, 3, True)
351
- return model.Connection(
352
- source, model.UFLOW, text, "", src, dst, relaxed=True
353
- )
335
+ return model.Connection(source, model.UFLOW, text, "", src, dst, relaxed=True)
354
336
 
355
337
 
356
338
  def parse_signal_q(source: model.SourceLine) -> model.Statement:
357
339
  """Parse signal statement"""
358
340
  src, dst, text = split_args(source.text, 3, True)
359
- return model.Connection(
360
- source, model.SIGNAL, text, "", src, dst, relaxed=True
361
- )
341
+ return model.Connection(source, model.SIGNAL, text, "", src, dst, relaxed=True)
362
342
 
363
343
 
364
344
  def parse_signal_r_q(source: model.SourceLine) -> model.Statement:
@@ -381,45 +361,45 @@ def apply_syntactic_sugars(src_line: str) -> str:
381
361
  del array[2] # remove arrow
382
362
  if swap:
383
363
  array[1], array[2] = array[2], array[1]
384
- return '\t'.join(array)
364
+ return "\t".join(array)
385
365
 
386
- new_line = ''
387
- if re.fullmatch(r'-+>[?]?', op):
388
- q = '?' if op.endswith('?') else ''
366
+ new_line = ""
367
+ if re.fullmatch(r"-+>[?]?", op):
368
+ q = "?" if op.endswith("?") else ""
389
369
  parts = src_line.split(maxsplit=3)
390
- new_line = fmt('flow' + q, parts)
391
- elif re.fullmatch(r'<-+[?]?', op):
392
- q = '?' if op.endswith('?') else ''
370
+ new_line = fmt("flow" + q, parts)
371
+ elif re.fullmatch(r"<-+[?]?", op):
372
+ q = "?" if op.endswith("?") else ""
393
373
  parts = src_line.split(maxsplit=3)
394
- new_line = fmt('flow.r' + q, parts) # , swap=True)
374
+ new_line = fmt("flow.r" + q, parts) # , swap=True)
395
375
 
396
- if re.fullmatch(r'-+>>[?]?', op):
397
- q = '?' if op.endswith('?') else ''
376
+ if re.fullmatch(r"-+>>[?]?", op):
377
+ q = "?" if op.endswith("?") else ""
398
378
  parts = src_line.split(maxsplit=3)
399
- new_line = fmt('cflow' + q, parts)
400
- elif re.fullmatch(r'<<-+[?]?', op):
401
- q = '?' if op.endswith('?') else ''
379
+ new_line = fmt("cflow" + q, parts)
380
+ elif re.fullmatch(r"<<-+[?]?", op):
381
+ q = "?" if op.endswith("?") else ""
402
382
  parts = src_line.split(maxsplit=3)
403
- new_line = fmt('cflow.r' + q, parts) # , swap=True)
383
+ new_line = fmt("cflow.r" + q, parts) # , swap=True)
404
384
 
405
- elif re.fullmatch(r'<-+>[?]?', op):
406
- q = '?' if op.endswith('?') else ''
385
+ elif re.fullmatch(r"<-+>[?]?", op):
386
+ q = "?" if op.endswith("?") else ""
407
387
  parts = src_line.split(maxsplit=3)
408
- new_line = fmt('bflow' + q, parts)
388
+ new_line = fmt("bflow" + q, parts)
409
389
 
410
- elif re.fullmatch(r'--+[?]?', op):
411
- q = '?' if op.endswith('?') else ''
390
+ elif re.fullmatch(r"--+[?]?", op):
391
+ q = "?" if op.endswith("?") else ""
412
392
  parts = src_line.split(maxsplit=3)
413
- new_line = fmt('uflow' + q, parts)
393
+ new_line = fmt("uflow" + q, parts)
414
394
 
415
- elif re.fullmatch(r':+>[?]?', op):
416
- q = '?' if op.endswith('?') else ''
395
+ elif re.fullmatch(r":+>[?]?", op):
396
+ q = "?" if op.endswith("?") else ""
417
397
  parts = src_line.split(maxsplit=3)
418
- new_line = fmt('signal' + q, parts)
419
- elif re.fullmatch(r'<:+[?]?', op):
420
- q = '?' if op.endswith('?') else ''
398
+ new_line = fmt("signal" + q, parts)
399
+ elif re.fullmatch(r"<:+[?]?", op):
400
+ q = "?" if op.endswith("?") else ""
421
401
  parts = src_line.split(maxsplit=3)
422
- new_line = fmt('signal.r' + q, parts) # , swap=True)
402
+ new_line = fmt("signal.r" + q, parts) # , swap=True)
423
403
 
424
404
  if new_line:
425
405
  return new_line
@@ -428,8 +408,8 @@ def apply_syntactic_sugars(src_line: str) -> str:
428
408
 
429
409
 
430
410
  def parse_drawable_attrs(drawable: model.Drawable) -> None:
431
- if drawable.text and drawable.text.startswith('['):
432
- parts = drawable.text[1:].split(']', 1)
411
+ if drawable.text and drawable.text.startswith("["):
412
+ parts = drawable.text[1:].split("]", 1)
433
413
  drawable.attrs = parts[0]
434
414
  drawable.text = parts[1].strip()
435
415
 
@@ -441,7 +421,7 @@ def parse_drawable_attrs(drawable: model.Drawable) -> None:
441
421
  def parse_item_external(
442
422
  item: model.Item, dependencies: model.GraphDependencies
443
423
  ) -> None:
444
- parts = item.name.split(':', 1)
424
+ parts = item.name.split(":", 1)
445
425
  if len(parts) > 1:
446
426
  item.attrs = TMPL.ITEM_EXTERNAL_ATTRS
447
427
  if parts[-1]:
@@ -449,7 +429,7 @@ def parse_item_external(
449
429
  else:
450
430
  item.name = parts[-2]
451
431
 
452
- if item.name.startswith('#'):
432
+ if item.name.startswith("#"):
453
433
  item.name = item.name[1:]
454
434
  else:
455
435
  item.name = os.path.splitext(item.name)[0]
@@ -465,13 +445,13 @@ def parse_item_external(
465
445
 
466
446
  def parse_frame(source: model.SourceLine) -> model.Statement:
467
447
  """Parse frame statement"""
468
- parts = source.text.split('=', maxsplit=1)
448
+ parts = source.text.split("=", maxsplit=1)
469
449
  if len(parts) == 1:
470
- text = ''
450
+ text = ""
471
451
  else:
472
452
  text = parts[1].strip()
473
453
 
474
454
  items = parts[0].split()[1:]
475
- type = '' # so far there is only one type of frame
476
- attrs = 'style=dashed'
455
+ type = "" # so far there is only one type of frame
456
+ attrs = "style=dashed"
477
457
  return model.Frame(source, type, text, attrs, items)
@@ -8,7 +8,7 @@ from . import model
8
8
  # def
9
9
  # into:
10
10
  # abcdef
11
- RX_LINE_CONT = re.compile('[\\\\]\\s*\n\\s*', re.MULTILINE)
11
+ RX_LINE_CONT = re.compile("[\\\\]\\s*\n\\s*", re.MULTILINE)
12
12
 
13
13
 
14
14
  def scan(
@@ -21,21 +21,21 @@ def scan(
21
21
  includes: set[str] = set()
22
22
 
23
23
  # stitch continuated lines
24
- input = RX_LINE_CONT.sub('', input)
24
+ input = RX_LINE_CONT.sub("", input)
25
25
 
26
26
  if provenance is None:
27
27
  provenance = model.SourceLine("", provenance, None, 0)
28
28
  _scan(input, provenance, output, snippet_by_name, includes)
29
29
 
30
30
  if debug:
31
- print('=' * 40)
31
+ print("=" * 40)
32
32
  print(provenance)
33
- print('----------')
33
+ print("----------")
34
34
  print(input)
35
- print('----------')
35
+ print("----------")
36
36
  for l in output:
37
37
  print(model.repr(l))
38
- print('=' * 40)
38
+ print("=" * 40)
39
39
 
40
40
  return output
41
41
 
@@ -52,7 +52,7 @@ def _scan(
52
52
  continue
53
53
  source_line = model.SourceLine(line, line, parent, nr)
54
54
  pair = line.split(maxsplit=1)
55
- if len(pair) == 2 and pair[0] == '#include':
55
+ if len(pair) == 2 and pair[0] == "#include":
56
56
  include(line, source_line, output, snippet_by_name, includes)
57
57
  else:
58
58
  output.append(source_line)
@@ -73,30 +73,25 @@ def include(
73
73
  raise model.DfdException(f'{prefix}Recursive include of "{name}"')
74
74
  includes.add(name)
75
75
 
76
- caller = model.SourceLine("", f'<snippet {name}>', parent, 0)
77
- if name.startswith('#'):
76
+ caller = model.SourceLine("", f"<snippet {name}>", parent, 0)
77
+ if name.startswith("#"):
78
78
  # include from MD snippet
79
79
  if not snippet_by_name:
80
80
  raise model.DfdException(
81
- f'{prefix}source is not markdown, '
82
- f'cannot include snippet "{name}".'
81
+ f"{prefix}source is not markdown, " f'cannot include snippet "{name}".'
83
82
  )
84
83
  name0 = name
85
84
  name = name[1:]
86
85
  snippet = snippet_by_name.get(name) or snippet_by_name.get(name0)
87
86
  if not snippet:
88
- raise model.DfdException(
89
- f'{prefix}included snippet "{name}" not found.'
90
- )
87
+ raise model.DfdException(f'{prefix}included snippet "{name}" not found.')
91
88
 
92
89
  _scan(snippet.text, caller, output, snippet_by_name, includes)
93
90
 
94
91
  else:
95
92
  # include from file
96
93
  if not os.path.exists(name):
97
- raise model.DfdException(
98
- f'{prefix}included file "{name}" not found.'
99
- )
100
- with open(name, encoding='utf-8') as f:
94
+ raise model.DfdException(f'{prefix}included file "{name}" not found.')
95
+ with open(name, encoding="utf-8") as f:
101
96
  text = f.read()
102
97
  _scan(text, caller, output, snippet_by_name, includes)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: data-flow-diagram
3
- Version: 1.12.1.post3
3
+ Version: 1.13.1
4
4
  Summary: Commandline tool to generate data flow diagrams from text
5
5
  Home-page: https://github.com/pbauermeister/dfd
6
6
  Author: Pascal Bauermeister