julee 0.1.3__py3-none-any.whl → 0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- julee/api/tests/routers/test_documents.py +6 -6
- julee/docs/sphinx_hcd/__init__.py +4 -10
- julee/docs/sphinx_hcd/accelerators.py +277 -180
- julee/docs/sphinx_hcd/apps.py +78 -59
- julee/docs/sphinx_hcd/config.py +16 -16
- julee/docs/sphinx_hcd/epics.py +47 -42
- julee/docs/sphinx_hcd/integrations.py +53 -49
- julee/docs/sphinx_hcd/journeys.py +124 -110
- julee/docs/sphinx_hcd/personas.py +75 -53
- julee/docs/sphinx_hcd/stories.py +99 -71
- julee/docs/sphinx_hcd/utils.py +23 -18
- julee/domain/models/document/document.py +12 -21
- julee/domain/models/document/tests/test_document.py +14 -34
- julee/domain/use_cases/extract_assemble_data.py +1 -1
- julee/domain/use_cases/initialize_system_data.py +75 -21
- julee/fixtures/documents.yaml +4 -43
- julee/fixtures/knowledge_service_queries.yaml +9 -0
- julee/maintenance/release.py +85 -30
- julee/repositories/memory/document.py +19 -13
- julee/repositories/memory/tests/test_document.py +18 -18
- julee/repositories/minio/document.py +25 -22
- julee/repositories/minio/tests/test_document.py +16 -16
- {julee-0.1.3.dist-info → julee-0.1.4.dist-info}/METADATA +2 -3
- {julee-0.1.3.dist-info → julee-0.1.4.dist-info}/RECORD +27 -28
- julee/fixtures/assembly_specifications.yaml +0 -70
- {julee-0.1.3.dist-info → julee-0.1.4.dist-info}/WHEEL +0 -0
- {julee-0.1.3.dist-info → julee-0.1.4.dist-info}/licenses/LICENSE +0 -0
- {julee-0.1.3.dist-info → julee-0.1.4.dist-info}/top_level.txt +0 -0
julee/docs/sphinx_hcd/epics.py
CHANGED
|
@@ -8,8 +8,8 @@ Provides directives:
|
|
|
8
8
|
"""
|
|
9
9
|
|
|
10
10
|
from docutils import nodes
|
|
11
|
-
from sphinx.util.docutils import SphinxDirective
|
|
12
11
|
from sphinx.util import logging
|
|
12
|
+
from sphinx.util.docutils import SphinxDirective
|
|
13
13
|
|
|
14
14
|
from .config import get_config
|
|
15
15
|
from .utils import normalize_name, path_to_root
|
|
@@ -19,14 +19,14 @@ logger = logging.getLogger(__name__)
|
|
|
19
19
|
|
|
20
20
|
def get_epic_registry(env):
|
|
21
21
|
"""Get or create the epic registry on the environment."""
|
|
22
|
-
if not hasattr(env,
|
|
22
|
+
if not hasattr(env, "epic_registry"):
|
|
23
23
|
env.epic_registry = {}
|
|
24
24
|
return env.epic_registry
|
|
25
25
|
|
|
26
26
|
|
|
27
27
|
def get_current_epic(env):
|
|
28
28
|
"""Get or create the current epic tracker on the environment."""
|
|
29
|
-
if not hasattr(env,
|
|
29
|
+
if not hasattr(env, "epic_current"):
|
|
30
30
|
env.epic_current = {}
|
|
31
31
|
return env.epic_current
|
|
32
32
|
|
|
@@ -51,17 +51,17 @@ class DefineEpicDirective(SphinxDirective):
|
|
|
51
51
|
docname = self.env.docname
|
|
52
52
|
|
|
53
53
|
# Description is the directive content
|
|
54
|
-
description =
|
|
54
|
+
description = "\n".join(self.content).strip()
|
|
55
55
|
|
|
56
56
|
# Register the epic in environment
|
|
57
57
|
epic_registry = get_epic_registry(self.env)
|
|
58
58
|
current_epic = get_current_epic(self.env)
|
|
59
59
|
|
|
60
60
|
epic_data = {
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
61
|
+
"slug": epic_slug,
|
|
62
|
+
"description": description,
|
|
63
|
+
"stories": [], # Will be populated by epic-story
|
|
64
|
+
"docname": docname,
|
|
65
65
|
}
|
|
66
66
|
epic_registry[epic_slug] = epic_data
|
|
67
67
|
current_epic[docname] = epic_slug
|
|
@@ -76,8 +76,8 @@ class DefineEpicDirective(SphinxDirective):
|
|
|
76
76
|
|
|
77
77
|
# Add a placeholder for stories (will be filled in doctree-resolved)
|
|
78
78
|
stories_placeholder = nodes.container()
|
|
79
|
-
stories_placeholder[
|
|
80
|
-
stories_placeholder[
|
|
79
|
+
stories_placeholder["classes"].append("epic-stories-placeholder")
|
|
80
|
+
stories_placeholder["epic_slug"] = epic_slug
|
|
81
81
|
result_nodes.append(stories_placeholder)
|
|
82
82
|
|
|
83
83
|
return result_nodes
|
|
@@ -104,7 +104,7 @@ class EpicStoryDirective(SphinxDirective):
|
|
|
104
104
|
|
|
105
105
|
epic_slug = current_epic.get(docname)
|
|
106
106
|
if epic_slug and epic_slug in epic_registry:
|
|
107
|
-
epic_registry[epic_slug][
|
|
107
|
+
epic_registry[epic_slug]["stories"].append(story_title)
|
|
108
108
|
|
|
109
109
|
# Return empty - rendering happens in doctree-resolved
|
|
110
110
|
return []
|
|
@@ -126,6 +126,7 @@ class EpicIndexDirective(SphinxDirective):
|
|
|
126
126
|
|
|
127
127
|
class EpicIndexPlaceholder(nodes.General, nodes.Element):
|
|
128
128
|
"""Placeholder node for epic index, replaced at doctree-resolved."""
|
|
129
|
+
|
|
129
130
|
pass
|
|
130
131
|
|
|
131
132
|
|
|
@@ -143,12 +144,13 @@ class EpicsForPersonaDirective(SphinxDirective):
|
|
|
143
144
|
def run(self):
|
|
144
145
|
# Return placeholder - actual rendering in doctree-resolved
|
|
145
146
|
node = EpicsForPersonaPlaceholder()
|
|
146
|
-
node[
|
|
147
|
+
node["persona"] = self.arguments[0]
|
|
147
148
|
return [node]
|
|
148
149
|
|
|
149
150
|
|
|
150
151
|
class EpicsForPersonaPlaceholder(nodes.General, nodes.Element):
|
|
151
152
|
"""Placeholder node for epics-for-persona, replaced at doctree-resolved."""
|
|
153
|
+
|
|
152
154
|
pass
|
|
153
155
|
|
|
154
156
|
|
|
@@ -161,8 +163,7 @@ def clear_epic_state(app, env, docname):
|
|
|
161
163
|
del current_epic[docname]
|
|
162
164
|
|
|
163
165
|
# Remove epics defined in this document
|
|
164
|
-
to_remove = [slug for slug, e in epic_registry.items()
|
|
165
|
-
if e['docname'] == docname]
|
|
166
|
+
to_remove = [slug for slug, e in epic_registry.items() if e["docname"] == docname]
|
|
166
167
|
for slug in to_remove:
|
|
167
168
|
del epic_registry[slug]
|
|
168
169
|
|
|
@@ -173,11 +174,11 @@ def validate_epics(app, env):
|
|
|
173
174
|
|
|
174
175
|
epic_registry = get_epic_registry(env)
|
|
175
176
|
_story_registry = stories.get_story_registry()
|
|
176
|
-
story_titles = {normalize_name(s[
|
|
177
|
+
story_titles = {normalize_name(s["feature"]) for s in _story_registry}
|
|
177
178
|
|
|
178
179
|
for slug, epic in epic_registry.items():
|
|
179
180
|
# Validate story references
|
|
180
|
-
for story_title in epic[
|
|
181
|
+
for story_title in epic["stories"]:
|
|
181
182
|
if normalize_name(story_title) not in story_titles:
|
|
182
183
|
logger.warning(
|
|
183
184
|
f"Epic '{slug}' references unknown story: '{story_title}'"
|
|
@@ -187,16 +188,18 @@ def validate_epics(app, env):
|
|
|
187
188
|
def get_personas_for_epic(epic: dict, story_registry: list) -> set[str]:
|
|
188
189
|
"""Get the set of personas for an epic based on its stories."""
|
|
189
190
|
personas = set()
|
|
190
|
-
for story_title in epic[
|
|
191
|
+
for story_title in epic["stories"]:
|
|
191
192
|
story_normalized = normalize_name(story_title)
|
|
192
193
|
for story in story_registry:
|
|
193
|
-
if normalize_name(story[
|
|
194
|
-
personas.add(story[
|
|
194
|
+
if normalize_name(story["feature"]) == story_normalized:
|
|
195
|
+
personas.add(story["persona"])
|
|
195
196
|
break
|
|
196
197
|
return personas
|
|
197
198
|
|
|
198
199
|
|
|
199
|
-
def render_epic_stories(
|
|
200
|
+
def render_epic_stories(
|
|
201
|
+
epic: dict, docname: str, story_registry: list, known_personas: set
|
|
202
|
+
):
|
|
200
203
|
"""Render epic stories as a simple bullet list."""
|
|
201
204
|
from . import stories
|
|
202
205
|
|
|
@@ -204,10 +207,10 @@ def render_epic_stories(epic: dict, docname: str, story_registry: list, known_pe
|
|
|
204
207
|
_known_apps = stories.get_known_apps()
|
|
205
208
|
|
|
206
209
|
stories_data = []
|
|
207
|
-
for story_title in epic[
|
|
210
|
+
for story_title in epic["stories"]:
|
|
208
211
|
story_normalized = normalize_name(story_title)
|
|
209
212
|
for story in story_registry:
|
|
210
|
-
if normalize_name(story[
|
|
213
|
+
if normalize_name(story["feature"]) == story_normalized:
|
|
211
214
|
stories_data.append(story)
|
|
212
215
|
break
|
|
213
216
|
|
|
@@ -227,7 +230,7 @@ def render_epic_stories(epic: dict, docname: str, story_registry: list, known_pe
|
|
|
227
230
|
# Simple bullet list: "story name (App Name)"
|
|
228
231
|
story_list = nodes.bullet_list()
|
|
229
232
|
|
|
230
|
-
for story in sorted(stories_data, key=lambda s: s[
|
|
233
|
+
for story in sorted(stories_data, key=lambda s: s["feature"].lower()):
|
|
231
234
|
story_item = nodes.list_item()
|
|
232
235
|
story_para = nodes.paragraph()
|
|
233
236
|
|
|
@@ -237,14 +240,14 @@ def render_epic_stories(epic: dict, docname: str, story_registry: list, known_pe
|
|
|
237
240
|
# App in parentheses
|
|
238
241
|
story_para += nodes.Text(" (")
|
|
239
242
|
app_path = f"{prefix}{config.get_doc_path('applications')}/{story['app']}.html"
|
|
240
|
-
app_valid = story[
|
|
243
|
+
app_valid = story["app_normalized"] in _known_apps
|
|
241
244
|
|
|
242
245
|
if app_valid:
|
|
243
246
|
app_ref = nodes.reference("", "", refuri=app_path)
|
|
244
|
-
app_ref += nodes.Text(story[
|
|
247
|
+
app_ref += nodes.Text(story["app"].replace("-", " ").title())
|
|
245
248
|
story_para += app_ref
|
|
246
249
|
else:
|
|
247
|
-
story_para += nodes.Text(story[
|
|
250
|
+
story_para += nodes.Text(story["app"].replace("-", " ").title())
|
|
248
251
|
|
|
249
252
|
story_para += nodes.Text(")")
|
|
250
253
|
|
|
@@ -260,7 +263,7 @@ def process_epic_placeholders(app, doctree, docname):
|
|
|
260
263
|
"""Replace epic placeholders with rendered content."""
|
|
261
264
|
from . import stories
|
|
262
265
|
|
|
263
|
-
|
|
266
|
+
get_config()
|
|
264
267
|
env = app.env
|
|
265
268
|
epic_registry = get_epic_registry(env)
|
|
266
269
|
current_epic = get_current_epic(env)
|
|
@@ -273,7 +276,7 @@ def process_epic_placeholders(app, doctree, docname):
|
|
|
273
276
|
epic = epic_registry[epic_slug]
|
|
274
277
|
|
|
275
278
|
for node in doctree.traverse(nodes.container):
|
|
276
|
-
if
|
|
279
|
+
if "epic-stories-placeholder" in node.get("classes", []):
|
|
277
280
|
stories_nodes = render_epic_stories(
|
|
278
281
|
epic, docname, _story_registry, _known_personas
|
|
279
282
|
)
|
|
@@ -290,10 +293,8 @@ def process_epic_placeholders(app, doctree, docname):
|
|
|
290
293
|
|
|
291
294
|
# Process epics-for-persona placeholder
|
|
292
295
|
for node in doctree.traverse(EpicsForPersonaPlaceholder):
|
|
293
|
-
persona = node[
|
|
294
|
-
epics_node = build_epics_for_persona(
|
|
295
|
-
env, docname, persona, _story_registry
|
|
296
|
-
)
|
|
296
|
+
persona = node["persona"]
|
|
297
|
+
epics_node = build_epics_for_persona(env, docname, persona, _story_registry)
|
|
297
298
|
node.replace_self(epics_node)
|
|
298
299
|
|
|
299
300
|
|
|
@@ -316,7 +317,7 @@ def build_epic_index(env, docname: str, story_registry: list):
|
|
|
316
317
|
# Collect all stories assigned to epics
|
|
317
318
|
assigned_stories = set()
|
|
318
319
|
for epic in epic_registry.values():
|
|
319
|
-
for story_title in epic[
|
|
320
|
+
for story_title in epic["stories"]:
|
|
320
321
|
assigned_stories.add(normalize_name(story_title))
|
|
321
322
|
|
|
322
323
|
for slug in sorted(epic_registry.keys()):
|
|
@@ -332,7 +333,7 @@ def build_epic_index(env, docname: str, story_registry: list):
|
|
|
332
333
|
para += epic_ref
|
|
333
334
|
|
|
334
335
|
# Story count
|
|
335
|
-
story_count = len(epic[
|
|
336
|
+
story_count = len(epic["stories"])
|
|
336
337
|
para += nodes.Text(f" ({story_count} stories)")
|
|
337
338
|
|
|
338
339
|
item += para
|
|
@@ -343,7 +344,7 @@ def build_epic_index(env, docname: str, story_registry: list):
|
|
|
343
344
|
# Find unassigned stories
|
|
344
345
|
unassigned_stories = []
|
|
345
346
|
for story in story_registry:
|
|
346
|
-
if normalize_name(story[
|
|
347
|
+
if normalize_name(story["feature"]) not in assigned_stories:
|
|
347
348
|
unassigned_stories.append(story)
|
|
348
349
|
|
|
349
350
|
if unassigned_stories:
|
|
@@ -356,12 +357,14 @@ def build_epic_index(env, docname: str, story_registry: list):
|
|
|
356
357
|
result_nodes.append(heading)
|
|
357
358
|
|
|
358
359
|
intro = nodes.paragraph()
|
|
359
|
-
intro += nodes.Text(
|
|
360
|
+
intro += nodes.Text(
|
|
361
|
+
f"{len(unassigned_stories)} stories not yet assigned to an epic:"
|
|
362
|
+
)
|
|
360
363
|
result_nodes.append(intro)
|
|
361
364
|
|
|
362
365
|
# List unassigned stories
|
|
363
366
|
unassigned_list = nodes.bullet_list()
|
|
364
|
-
for story in sorted(unassigned_stories, key=lambda s: s[
|
|
367
|
+
for story in sorted(unassigned_stories, key=lambda s: s["feature"].lower()):
|
|
365
368
|
item = nodes.list_item()
|
|
366
369
|
para = nodes.paragraph()
|
|
367
370
|
|
|
@@ -370,15 +373,17 @@ def build_epic_index(env, docname: str, story_registry: list):
|
|
|
370
373
|
|
|
371
374
|
# App in parentheses
|
|
372
375
|
para += nodes.Text(" (")
|
|
373
|
-
app_path =
|
|
374
|
-
|
|
376
|
+
app_path = (
|
|
377
|
+
f"{prefix}{config.get_doc_path('applications')}/{story['app']}.html"
|
|
378
|
+
)
|
|
379
|
+
app_valid = story["app_normalized"] in _known_apps
|
|
375
380
|
|
|
376
381
|
if app_valid:
|
|
377
382
|
app_ref = nodes.reference("", "", refuri=app_path)
|
|
378
|
-
app_ref += nodes.Text(story[
|
|
383
|
+
app_ref += nodes.Text(story["app"].replace("-", " ").title())
|
|
379
384
|
para += app_ref
|
|
380
385
|
else:
|
|
381
|
-
para += nodes.Text(story[
|
|
386
|
+
para += nodes.Text(story["app"].replace("-", " ").title())
|
|
382
387
|
|
|
383
388
|
para += nodes.Text(")")
|
|
384
389
|
|
|
@@ -413,7 +418,7 @@ def build_epics_for_persona(env, docname: str, persona_arg: str, story_registry:
|
|
|
413
418
|
|
|
414
419
|
bullet_list = nodes.bullet_list()
|
|
415
420
|
|
|
416
|
-
for slug,
|
|
421
|
+
for slug, _epic in sorted(matching_epics, key=lambda x: x[0]):
|
|
417
422
|
item = nodes.list_item()
|
|
418
423
|
para = nodes.paragraph()
|
|
419
424
|
|
|
@@ -9,11 +9,11 @@ Provides directives:
|
|
|
9
9
|
"""
|
|
10
10
|
|
|
11
11
|
import os
|
|
12
|
+
|
|
12
13
|
import yaml
|
|
13
|
-
from pathlib import Path
|
|
14
14
|
from docutils import nodes
|
|
15
|
-
from sphinx.util.docutils import SphinxDirective
|
|
16
15
|
from sphinx.util import logging
|
|
16
|
+
from sphinx.util.docutils import SphinxDirective
|
|
17
17
|
|
|
18
18
|
from .config import get_config
|
|
19
19
|
|
|
@@ -30,7 +30,7 @@ def get_integration_registry() -> dict:
|
|
|
30
30
|
|
|
31
31
|
def get_documented_integrations(env) -> set:
|
|
32
32
|
"""Get documented integrations set from env, creating if needed."""
|
|
33
|
-
if not hasattr(env,
|
|
33
|
+
if not hasattr(env, "documented_integrations"):
|
|
34
34
|
env.documented_integrations = set()
|
|
35
35
|
return env.documented_integrations
|
|
36
36
|
|
|
@@ -41,14 +41,16 @@ def scan_integration_manifests(app):
|
|
|
41
41
|
_integration_registry = {}
|
|
42
42
|
|
|
43
43
|
config = get_config()
|
|
44
|
-
integrations_dir = config.get_path(
|
|
44
|
+
integrations_dir = config.get_path("integration_manifests")
|
|
45
45
|
|
|
46
46
|
if not integrations_dir.exists():
|
|
47
|
-
logger.info(
|
|
47
|
+
logger.info(
|
|
48
|
+
f"Integrations directory not found at {integrations_dir} - no integration manifests to index"
|
|
49
|
+
)
|
|
48
50
|
return
|
|
49
51
|
|
|
50
52
|
for int_dir in integrations_dir.iterdir():
|
|
51
|
-
if not int_dir.is_dir() or int_dir.name.startswith(
|
|
53
|
+
if not int_dir.is_dir() or int_dir.name.startswith("_"):
|
|
52
54
|
continue
|
|
53
55
|
|
|
54
56
|
manifest_path = int_dir / "integration.yaml"
|
|
@@ -64,15 +66,15 @@ def scan_integration_manifests(app):
|
|
|
64
66
|
logger.warning(f"Could not read {manifest_path}: {e}")
|
|
65
67
|
continue
|
|
66
68
|
|
|
67
|
-
slug = manifest.get(
|
|
69
|
+
slug = manifest.get("slug", module_name.replace("_", "-"))
|
|
68
70
|
_integration_registry[slug] = {
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
71
|
+
"slug": slug,
|
|
72
|
+
"module": module_name,
|
|
73
|
+
"name": manifest.get("name", slug.replace("-", " ").title()),
|
|
74
|
+
"description": manifest.get("description", "").strip(),
|
|
75
|
+
"direction": manifest.get("direction", "bidirectional"),
|
|
76
|
+
"depends_on": manifest.get("depends_on", []),
|
|
77
|
+
"manifest_path": str(manifest_path),
|
|
76
78
|
}
|
|
77
79
|
|
|
78
80
|
logger.info(f"Indexed {len(_integration_registry)} integrations from manifests")
|
|
@@ -114,12 +116,13 @@ class DefineIntegrationDirective(SphinxDirective):
|
|
|
114
116
|
get_documented_integrations(self.env).add(slug)
|
|
115
117
|
|
|
116
118
|
node = DefineIntegrationPlaceholder()
|
|
117
|
-
node[
|
|
119
|
+
node["integration_slug"] = slug
|
|
118
120
|
return [node]
|
|
119
121
|
|
|
120
122
|
|
|
121
123
|
class DefineIntegrationPlaceholder(nodes.General, nodes.Element):
|
|
122
124
|
"""Placeholder node for define-integration, replaced at doctree-resolved."""
|
|
125
|
+
|
|
123
126
|
pass
|
|
124
127
|
|
|
125
128
|
|
|
@@ -137,6 +140,7 @@ class IntegrationIndexDirective(SphinxDirective):
|
|
|
137
140
|
|
|
138
141
|
class IntegrationIndexPlaceholder(nodes.General, nodes.Element):
|
|
139
142
|
"""Placeholder node for integration-index, replaced at doctree-resolved."""
|
|
143
|
+
|
|
140
144
|
pass
|
|
141
145
|
|
|
142
146
|
|
|
@@ -153,9 +157,9 @@ def build_integration_content(slug, docname):
|
|
|
153
157
|
result_nodes = []
|
|
154
158
|
|
|
155
159
|
# Description
|
|
156
|
-
if data[
|
|
160
|
+
if data["description"]:
|
|
157
161
|
desc_para = nodes.paragraph()
|
|
158
|
-
desc_para += nodes.Text(data[
|
|
162
|
+
desc_para += nodes.Text(data["description"])
|
|
159
163
|
result_nodes.append(desc_para)
|
|
160
164
|
|
|
161
165
|
# Seealso with metadata
|
|
@@ -163,13 +167,13 @@ def build_integration_content(slug, docname):
|
|
|
163
167
|
|
|
164
168
|
# Direction
|
|
165
169
|
direction_labels = {
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
170
|
+
"inbound": "Inbound (data source)",
|
|
171
|
+
"outbound": "Outbound (data sink)",
|
|
172
|
+
"bidirectional": "Bidirectional",
|
|
169
173
|
}
|
|
170
174
|
dir_para = nodes.paragraph()
|
|
171
175
|
dir_para += nodes.strong(text="Direction: ")
|
|
172
|
-
dir_para += nodes.Text(direction_labels.get(data[
|
|
176
|
+
dir_para += nodes.Text(direction_labels.get(data["direction"], data["direction"]))
|
|
173
177
|
seealso_node += dir_para
|
|
174
178
|
|
|
175
179
|
# Module
|
|
@@ -179,17 +183,17 @@ def build_integration_content(slug, docname):
|
|
|
179
183
|
seealso_node += mod_para
|
|
180
184
|
|
|
181
185
|
# External dependencies
|
|
182
|
-
if data[
|
|
186
|
+
if data["depends_on"]:
|
|
183
187
|
deps_para = nodes.paragraph()
|
|
184
188
|
deps_para += nodes.strong(text="Depends On: ")
|
|
185
|
-
for i, dep in enumerate(data[
|
|
186
|
-
if dep.get(
|
|
187
|
-
ref = nodes.reference("", "", refuri=dep[
|
|
188
|
-
ref += nodes.Text(dep[
|
|
189
|
+
for i, dep in enumerate(data["depends_on"]):
|
|
190
|
+
if dep.get("url"):
|
|
191
|
+
ref = nodes.reference("", "", refuri=dep["url"])
|
|
192
|
+
ref += nodes.Text(dep["name"])
|
|
189
193
|
deps_para += ref
|
|
190
194
|
else:
|
|
191
|
-
deps_para += nodes.Text(dep[
|
|
192
|
-
if i < len(data[
|
|
195
|
+
deps_para += nodes.Text(dep["name"])
|
|
196
|
+
if i < len(data["depends_on"]) - 1:
|
|
193
197
|
deps_para += nodes.Text(", ")
|
|
194
198
|
seealso_node += deps_para
|
|
195
199
|
|
|
@@ -231,10 +235,10 @@ def build_integration_index(docname):
|
|
|
231
235
|
int_id = slug.replace("-", "_")
|
|
232
236
|
lines.append(f'component "{data["name"]}" as {int_id} <<integration>>')
|
|
233
237
|
|
|
234
|
-
for dep in data.get(
|
|
235
|
-
dep_id = dep[
|
|
236
|
-
dep_label = dep[
|
|
237
|
-
if dep.get(
|
|
238
|
+
for dep in data.get("depends_on", []):
|
|
239
|
+
dep_id = dep["name"].lower().replace(" ", "_").replace("-", "_")
|
|
240
|
+
dep_label = dep["name"]
|
|
241
|
+
if dep.get("description"):
|
|
238
242
|
dep_label += f"\\n({dep['description']})"
|
|
239
243
|
lines.append(f'component "{dep_label}" as {dep_id} <<external>>')
|
|
240
244
|
|
|
@@ -243,34 +247,34 @@ def build_integration_index(docname):
|
|
|
243
247
|
|
|
244
248
|
for slug, data in sorted(_integration_registry.items()):
|
|
245
249
|
int_id = slug.replace("-", "_")
|
|
246
|
-
direction = data.get(
|
|
250
|
+
direction = data.get("direction", "bidirectional")
|
|
247
251
|
|
|
248
252
|
# Core to/from integration
|
|
249
|
-
if direction ==
|
|
250
|
-
lines.append(f
|
|
251
|
-
elif direction ==
|
|
252
|
-
lines.append(f
|
|
253
|
+
if direction == "inbound":
|
|
254
|
+
lines.append(f"{int_id} --> core")
|
|
255
|
+
elif direction == "outbound":
|
|
256
|
+
lines.append(f"core --> {int_id}")
|
|
253
257
|
else:
|
|
254
|
-
lines.append(f
|
|
258
|
+
lines.append(f"core <--> {int_id}")
|
|
255
259
|
|
|
256
260
|
# Integration to external dependencies
|
|
257
|
-
for dep in data.get(
|
|
258
|
-
dep_id = dep[
|
|
259
|
-
if direction ==
|
|
260
|
-
lines.append(f
|
|
261
|
-
elif direction ==
|
|
262
|
-
lines.append(f
|
|
261
|
+
for dep in data.get("depends_on", []):
|
|
262
|
+
dep_id = dep["name"].lower().replace(" ", "_").replace("-", "_")
|
|
263
|
+
if direction == "inbound":
|
|
264
|
+
lines.append(f"{dep_id} --> {int_id}")
|
|
265
|
+
elif direction == "outbound":
|
|
266
|
+
lines.append(f"{int_id} --> {dep_id}")
|
|
263
267
|
else:
|
|
264
|
-
lines.append(f
|
|
268
|
+
lines.append(f"{int_id} <--> {dep_id}")
|
|
265
269
|
|
|
266
270
|
lines.append("")
|
|
267
271
|
lines.append("@enduml")
|
|
268
272
|
|
|
269
273
|
puml_source = "\n".join(lines)
|
|
270
274
|
node = plantuml(puml_source)
|
|
271
|
-
node[
|
|
272
|
-
node[
|
|
273
|
-
node[
|
|
275
|
+
node["uml"] = puml_source
|
|
276
|
+
node["incdir"] = os.path.dirname(docname)
|
|
277
|
+
node["filename"] = os.path.basename(docname) + ".rst"
|
|
274
278
|
result_nodes.append(node)
|
|
275
279
|
|
|
276
280
|
return result_nodes
|
|
@@ -279,7 +283,7 @@ def build_integration_index(docname):
|
|
|
279
283
|
def process_integration_placeholders(app, doctree, docname):
|
|
280
284
|
"""Replace integration placeholders after all documents are read."""
|
|
281
285
|
for node in doctree.traverse(DefineIntegrationPlaceholder):
|
|
282
|
-
slug = node[
|
|
286
|
+
slug = node["integration_slug"]
|
|
283
287
|
content = build_integration_content(slug, docname)
|
|
284
288
|
node.replace_self(content)
|
|
285
289
|
|