julee 0.1.4__py3-none-any.whl → 0.1.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- julee/__init__.py +1 -1
- julee/api/tests/routers/test_assembly_specifications.py +2 -0
- julee/api/tests/routers/test_documents.py +2 -0
- julee/api/tests/routers/test_knowledge_service_configs.py +2 -0
- julee/api/tests/routers/test_knowledge_service_queries.py +2 -0
- julee/api/tests/routers/test_system.py +2 -0
- julee/api/tests/routers/test_workflows.py +2 -0
- julee/api/tests/test_app.py +2 -0
- julee/api/tests/test_dependencies.py +2 -0
- julee/api/tests/test_requests.py +2 -0
- julee/contrib/polling/__init__.py +22 -19
- julee/contrib/polling/apps/__init__.py +17 -0
- julee/contrib/polling/apps/worker/__init__.py +17 -0
- julee/contrib/polling/apps/worker/pipelines.py +288 -0
- julee/contrib/polling/domain/__init__.py +7 -9
- julee/contrib/polling/domain/models/__init__.py +6 -7
- julee/contrib/polling/domain/models/polling_config.py +18 -1
- julee/contrib/polling/domain/services/__init__.py +6 -5
- julee/contrib/polling/domain/services/poller.py +1 -1
- julee/contrib/polling/infrastructure/__init__.py +9 -8
- julee/contrib/polling/infrastructure/services/__init__.py +6 -5
- julee/contrib/polling/infrastructure/services/polling/__init__.py +6 -5
- julee/contrib/polling/infrastructure/services/polling/http/__init__.py +6 -5
- julee/contrib/polling/infrastructure/services/polling/http/http_poller_service.py +5 -2
- julee/contrib/polling/infrastructure/temporal/__init__.py +12 -12
- julee/contrib/polling/infrastructure/temporal/activities.py +1 -1
- julee/contrib/polling/infrastructure/temporal/manager.py +291 -0
- julee/contrib/polling/infrastructure/temporal/proxies.py +1 -1
- julee/contrib/polling/tests/unit/apps/worker/test_pipelines.py +580 -0
- julee/contrib/polling/tests/unit/infrastructure/services/polling/http/test_http_poller_service.py +40 -2
- julee/contrib/polling/tests/unit/infrastructure/temporal/__init__.py +7 -0
- julee/contrib/polling/tests/unit/infrastructure/temporal/test_manager.py +475 -0
- julee/docs/sphinx_hcd/__init__.py +146 -13
- julee/docs/sphinx_hcd/domain/__init__.py +5 -0
- julee/docs/sphinx_hcd/domain/models/__init__.py +32 -0
- julee/docs/sphinx_hcd/domain/models/accelerator.py +152 -0
- julee/docs/sphinx_hcd/domain/models/app.py +151 -0
- julee/docs/sphinx_hcd/domain/models/code_info.py +121 -0
- julee/docs/sphinx_hcd/domain/models/epic.py +79 -0
- julee/docs/sphinx_hcd/domain/models/integration.py +230 -0
- julee/docs/sphinx_hcd/domain/models/journey.py +222 -0
- julee/docs/sphinx_hcd/domain/models/persona.py +106 -0
- julee/docs/sphinx_hcd/domain/models/story.py +128 -0
- julee/docs/sphinx_hcd/domain/repositories/__init__.py +25 -0
- julee/docs/sphinx_hcd/domain/repositories/accelerator.py +98 -0
- julee/docs/sphinx_hcd/domain/repositories/app.py +57 -0
- julee/docs/sphinx_hcd/domain/repositories/base.py +89 -0
- julee/docs/sphinx_hcd/domain/repositories/code_info.py +69 -0
- julee/docs/sphinx_hcd/domain/repositories/epic.py +62 -0
- julee/docs/sphinx_hcd/domain/repositories/integration.py +79 -0
- julee/docs/sphinx_hcd/domain/repositories/journey.py +106 -0
- julee/docs/sphinx_hcd/domain/repositories/story.py +68 -0
- julee/docs/sphinx_hcd/domain/use_cases/__init__.py +64 -0
- julee/docs/sphinx_hcd/domain/use_cases/derive_personas.py +166 -0
- julee/docs/sphinx_hcd/domain/use_cases/resolve_accelerator_references.py +236 -0
- julee/docs/sphinx_hcd/domain/use_cases/resolve_app_references.py +144 -0
- julee/docs/sphinx_hcd/domain/use_cases/resolve_story_references.py +121 -0
- julee/docs/sphinx_hcd/parsers/__init__.py +48 -0
- julee/docs/sphinx_hcd/parsers/ast.py +150 -0
- julee/docs/sphinx_hcd/parsers/gherkin.py +155 -0
- julee/docs/sphinx_hcd/parsers/yaml.py +184 -0
- julee/docs/sphinx_hcd/repositories/__init__.py +4 -0
- julee/docs/sphinx_hcd/repositories/memory/__init__.py +25 -0
- julee/docs/sphinx_hcd/repositories/memory/accelerator.py +86 -0
- julee/docs/sphinx_hcd/repositories/memory/app.py +45 -0
- julee/docs/sphinx_hcd/repositories/memory/base.py +106 -0
- julee/docs/sphinx_hcd/repositories/memory/code_info.py +59 -0
- julee/docs/sphinx_hcd/repositories/memory/epic.py +54 -0
- julee/docs/sphinx_hcd/repositories/memory/integration.py +70 -0
- julee/docs/sphinx_hcd/repositories/memory/journey.py +96 -0
- julee/docs/sphinx_hcd/repositories/memory/story.py +63 -0
- julee/docs/sphinx_hcd/sphinx/__init__.py +28 -0
- julee/docs/sphinx_hcd/sphinx/adapters.py +116 -0
- julee/docs/sphinx_hcd/sphinx/context.py +163 -0
- julee/docs/sphinx_hcd/sphinx/directives/__init__.py +160 -0
- julee/docs/sphinx_hcd/sphinx/directives/accelerator.py +576 -0
- julee/docs/sphinx_hcd/sphinx/directives/app.py +349 -0
- julee/docs/sphinx_hcd/sphinx/directives/base.py +211 -0
- julee/docs/sphinx_hcd/sphinx/directives/epic.py +434 -0
- julee/docs/sphinx_hcd/sphinx/directives/integration.py +220 -0
- julee/docs/sphinx_hcd/sphinx/directives/journey.py +642 -0
- julee/docs/sphinx_hcd/sphinx/directives/persona.py +345 -0
- julee/docs/sphinx_hcd/sphinx/directives/story.py +575 -0
- julee/docs/sphinx_hcd/sphinx/event_handlers/__init__.py +16 -0
- julee/docs/sphinx_hcd/sphinx/event_handlers/builder_inited.py +31 -0
- julee/docs/sphinx_hcd/sphinx/event_handlers/doctree_read.py +27 -0
- julee/docs/sphinx_hcd/sphinx/event_handlers/doctree_resolved.py +43 -0
- julee/docs/sphinx_hcd/sphinx/event_handlers/env_purge_doc.py +42 -0
- julee/docs/sphinx_hcd/sphinx/initialization.py +139 -0
- julee/docs/sphinx_hcd/tests/__init__.py +9 -0
- julee/docs/sphinx_hcd/tests/conftest.py +6 -0
- julee/docs/sphinx_hcd/tests/domain/__init__.py +1 -0
- julee/docs/sphinx_hcd/tests/domain/models/__init__.py +1 -0
- julee/docs/sphinx_hcd/tests/domain/models/test_accelerator.py +266 -0
- julee/docs/sphinx_hcd/tests/domain/models/test_app.py +258 -0
- julee/docs/sphinx_hcd/tests/domain/models/test_code_info.py +231 -0
- julee/docs/sphinx_hcd/tests/domain/models/test_epic.py +163 -0
- julee/docs/sphinx_hcd/tests/domain/models/test_integration.py +327 -0
- julee/docs/sphinx_hcd/tests/domain/models/test_journey.py +249 -0
- julee/docs/sphinx_hcd/tests/domain/models/test_persona.py +172 -0
- julee/docs/sphinx_hcd/tests/domain/models/test_story.py +216 -0
- julee/docs/sphinx_hcd/tests/domain/use_cases/__init__.py +1 -0
- julee/docs/sphinx_hcd/tests/domain/use_cases/test_derive_personas.py +314 -0
- julee/docs/sphinx_hcd/tests/domain/use_cases/test_resolve_accelerator_references.py +476 -0
- julee/docs/sphinx_hcd/tests/domain/use_cases/test_resolve_app_references.py +265 -0
- julee/docs/sphinx_hcd/tests/domain/use_cases/test_resolve_story_references.py +229 -0
- julee/docs/sphinx_hcd/tests/integration/__init__.py +1 -0
- julee/docs/sphinx_hcd/tests/parsers/__init__.py +1 -0
- julee/docs/sphinx_hcd/tests/parsers/test_ast.py +298 -0
- julee/docs/sphinx_hcd/tests/parsers/test_gherkin.py +282 -0
- julee/docs/sphinx_hcd/tests/parsers/test_yaml.py +496 -0
- julee/docs/sphinx_hcd/tests/repositories/__init__.py +1 -0
- julee/docs/sphinx_hcd/tests/repositories/test_accelerator.py +298 -0
- julee/docs/sphinx_hcd/tests/repositories/test_app.py +218 -0
- julee/docs/sphinx_hcd/tests/repositories/test_base.py +151 -0
- julee/docs/sphinx_hcd/tests/repositories/test_code_info.py +253 -0
- julee/docs/sphinx_hcd/tests/repositories/test_epic.py +237 -0
- julee/docs/sphinx_hcd/tests/repositories/test_integration.py +268 -0
- julee/docs/sphinx_hcd/tests/repositories/test_journey.py +294 -0
- julee/docs/sphinx_hcd/tests/repositories/test_story.py +236 -0
- julee/docs/sphinx_hcd/tests/sphinx/__init__.py +1 -0
- julee/docs/sphinx_hcd/tests/sphinx/directives/__init__.py +1 -0
- julee/docs/sphinx_hcd/tests/sphinx/directives/test_base.py +160 -0
- julee/docs/sphinx_hcd/tests/sphinx/test_adapters.py +176 -0
- julee/docs/sphinx_hcd/tests/sphinx/test_context.py +257 -0
- julee/domain/models/assembly/tests/test_assembly.py +2 -0
- julee/domain/models/assembly_specification/tests/test_assembly_specification.py +2 -0
- julee/domain/models/assembly_specification/tests/test_knowledge_service_query.py +2 -0
- julee/domain/models/custom_fields/tests/test_custom_fields.py +2 -0
- julee/domain/models/document/tests/test_document.py +2 -0
- julee/domain/models/policy/tests/test_document_policy_validation.py +2 -0
- julee/domain/models/policy/tests/test_policy.py +2 -0
- julee/domain/use_cases/tests/test_extract_assemble_data.py +2 -0
- julee/domain/use_cases/tests/test_initialize_system_data.py +2 -0
- julee/domain/use_cases/tests/test_validate_document.py +2 -0
- julee/maintenance/release.py +10 -5
- julee/repositories/memory/tests/test_document.py +2 -0
- julee/repositories/memory/tests/test_document_policy_validation.py +2 -0
- julee/repositories/memory/tests/test_policy.py +2 -0
- julee/repositories/minio/tests/test_assembly.py +2 -0
- julee/repositories/minio/tests/test_assembly_specification.py +2 -0
- julee/repositories/minio/tests/test_client_protocol.py +3 -0
- julee/repositories/minio/tests/test_document.py +2 -0
- julee/repositories/minio/tests/test_document_policy_validation.py +2 -0
- julee/repositories/minio/tests/test_knowledge_service_config.py +2 -0
- julee/repositories/minio/tests/test_knowledge_service_query.py +2 -0
- julee/repositories/minio/tests/test_policy.py +2 -0
- julee/services/knowledge_service/anthropic/tests/test_knowledge_service.py +2 -0
- julee/services/knowledge_service/memory/test_knowledge_service.py +2 -0
- julee/services/knowledge_service/test_factory.py +2 -0
- julee/util/tests/test_decorators.py +2 -0
- julee-0.1.6.dist-info/METADATA +104 -0
- julee-0.1.6.dist-info/RECORD +288 -0
- julee/docs/sphinx_hcd/accelerators.py +0 -1175
- julee/docs/sphinx_hcd/apps.py +0 -518
- julee/docs/sphinx_hcd/epics.py +0 -453
- julee/docs/sphinx_hcd/integrations.py +0 -310
- julee/docs/sphinx_hcd/journeys.py +0 -797
- julee/docs/sphinx_hcd/personas.py +0 -457
- julee/docs/sphinx_hcd/stories.py +0 -960
- julee-0.1.4.dist-info/METADATA +0 -197
- julee-0.1.4.dist-info/RECORD +0 -196
- {julee-0.1.4.dist-info → julee-0.1.6.dist-info}/WHEEL +0 -0
- {julee-0.1.4.dist-info → julee-0.1.6.dist-info}/licenses/LICENSE +0 -0
- {julee-0.1.4.dist-info → julee-0.1.6.dist-info}/top_level.txt +0 -0
|
@@ -1,1175 +0,0 @@
|
|
|
1
|
-
"""Sphinx extension for accelerators with code introspection.
|
|
2
|
-
|
|
3
|
-
Provides directives that introspect src/{slug}/ for ADR 001-compliant code
|
|
4
|
-
structure and cross-reference with apps, stories, and journeys.
|
|
5
|
-
|
|
6
|
-
Stage 1: Introspects src/{slug}/ for entities, use cases, protocols.
|
|
7
|
-
Stage 2 (future): Will inspect apps/worker/pipelines/ for pipeline treatment.
|
|
8
|
-
|
|
9
|
-
Provides directives:
|
|
10
|
-
- define-accelerator: Define accelerator with metadata + introspected code
|
|
11
|
-
- accelerator-index: Generate index table grouped by status
|
|
12
|
-
- accelerators-for-app: List accelerators an app exposes
|
|
13
|
-
- dependent-accelerators: List accelerators that depend on/publish to an integration
|
|
14
|
-
- accelerator-dependency-diagram: Generate PlantUML component diagram
|
|
15
|
-
- accelerator-status: Show status, milestone, and acceptance info
|
|
16
|
-
- src-accelerator-backlinks: Generate seealso links from autodoc back to docs
|
|
17
|
-
- src-app-backlinks: Generate seealso links from app autodoc pages back to docs
|
|
18
|
-
"""
|
|
19
|
-
|
|
20
|
-
import ast
|
|
21
|
-
import os
|
|
22
|
-
from pathlib import Path
|
|
23
|
-
|
|
24
|
-
from docutils import nodes
|
|
25
|
-
from docutils.parsers.rst import directives
|
|
26
|
-
from sphinx.util import logging
|
|
27
|
-
from sphinx.util.docutils import SphinxDirective
|
|
28
|
-
|
|
29
|
-
from .config import get_config
|
|
30
|
-
from .utils import (
|
|
31
|
-
kebab_to_snake,
|
|
32
|
-
normalize_name,
|
|
33
|
-
parse_integration_options,
|
|
34
|
-
parse_list_option,
|
|
35
|
-
path_to_root,
|
|
36
|
-
)
|
|
37
|
-
|
|
38
|
-
logger = logging.getLogger(__name__)
|
|
39
|
-
|
|
40
|
-
# Global registry for code introspection (populated at builder-inited, doesn't change)
|
|
41
|
-
_code_registry: dict = {}
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
def get_accelerator_registry(env):
|
|
45
|
-
"""Get the accelerator registry from env, creating if needed."""
|
|
46
|
-
if not hasattr(env, "accelerator_registry"):
|
|
47
|
-
env.accelerator_registry = {}
|
|
48
|
-
return env.accelerator_registry
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
def get_documented_accelerators(env):
|
|
52
|
-
"""Get the set of documented accelerators from env, creating if needed."""
|
|
53
|
-
if not hasattr(env, "documented_accelerators"):
|
|
54
|
-
env.documented_accelerators = set()
|
|
55
|
-
return env.documented_accelerators
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
def scan_python_classes(directory: Path) -> list[dict]:
|
|
59
|
-
"""Extract class names from Python files in a directory using AST."""
|
|
60
|
-
if not directory.exists():
|
|
61
|
-
return []
|
|
62
|
-
|
|
63
|
-
classes = []
|
|
64
|
-
for py_file in directory.glob("*.py"):
|
|
65
|
-
if py_file.name.startswith("_"):
|
|
66
|
-
continue
|
|
67
|
-
|
|
68
|
-
try:
|
|
69
|
-
with open(py_file) as f:
|
|
70
|
-
tree = ast.parse(f.read(), filename=str(py_file))
|
|
71
|
-
|
|
72
|
-
for node in ast.walk(tree):
|
|
73
|
-
if isinstance(node, ast.ClassDef):
|
|
74
|
-
docstring = ast.get_docstring(node) or ""
|
|
75
|
-
first_line = docstring.split("\n")[0].strip() if docstring else ""
|
|
76
|
-
classes.append(
|
|
77
|
-
{
|
|
78
|
-
"name": node.name,
|
|
79
|
-
"docstring": first_line,
|
|
80
|
-
"file": py_file.name,
|
|
81
|
-
}
|
|
82
|
-
)
|
|
83
|
-
except Exception as e:
|
|
84
|
-
logger.warning(f"Could not parse {py_file}: {e}")
|
|
85
|
-
|
|
86
|
-
return sorted(classes, key=lambda c: c["name"])
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
def get_module_docstring(module_path: Path) -> tuple[str | None, str | None]:
|
|
90
|
-
"""Extract module docstring from a Python file using AST."""
|
|
91
|
-
if not module_path.exists():
|
|
92
|
-
return None, None
|
|
93
|
-
|
|
94
|
-
try:
|
|
95
|
-
with open(module_path) as f:
|
|
96
|
-
tree = ast.parse(f.read(), filename=str(module_path))
|
|
97
|
-
docstring = ast.get_docstring(tree)
|
|
98
|
-
if docstring:
|
|
99
|
-
first_line = docstring.split("\n")[0].strip()
|
|
100
|
-
return first_line, docstring
|
|
101
|
-
except Exception as e:
|
|
102
|
-
logger.warning(f"Could not parse {module_path}: {e}")
|
|
103
|
-
|
|
104
|
-
return None, None
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
def scan_bounded_context(slug: str, project_root: Path) -> dict | None:
|
|
108
|
-
"""Introspect src/{slug}/ for ADR 001-compliant code structure."""
|
|
109
|
-
snake_slug = kebab_to_snake(slug)
|
|
110
|
-
config = get_config()
|
|
111
|
-
src_dir = config.get_path("bounded_contexts")
|
|
112
|
-
context_dir = src_dir / snake_slug
|
|
113
|
-
|
|
114
|
-
if not context_dir.exists() and snake_slug != slug:
|
|
115
|
-
context_dir = src_dir / slug
|
|
116
|
-
if not context_dir.exists():
|
|
117
|
-
return None
|
|
118
|
-
elif not context_dir.exists():
|
|
119
|
-
return None
|
|
120
|
-
|
|
121
|
-
init_file = context_dir / "__init__.py"
|
|
122
|
-
objective, full_docstring = get_module_docstring(init_file)
|
|
123
|
-
|
|
124
|
-
return {
|
|
125
|
-
"entities": scan_python_classes(context_dir / "domain" / "models"),
|
|
126
|
-
"use_cases": scan_python_classes(context_dir / "use_cases"),
|
|
127
|
-
"repository_protocols": scan_python_classes(
|
|
128
|
-
context_dir / "domain" / "repositories"
|
|
129
|
-
),
|
|
130
|
-
"service_protocols": scan_python_classes(context_dir / "domain" / "services"),
|
|
131
|
-
"has_infrastructure": (context_dir / "infrastructure").exists(),
|
|
132
|
-
"code_dir": context_dir.name,
|
|
133
|
-
"objective": objective,
|
|
134
|
-
"docstring": full_docstring,
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
def scan_code_structure(app):
|
|
139
|
-
"""Scan src/ for all bounded contexts at build init."""
|
|
140
|
-
global _code_registry
|
|
141
|
-
_code_registry = {}
|
|
142
|
-
|
|
143
|
-
config = get_config()
|
|
144
|
-
src_dir = config.get_path("bounded_contexts")
|
|
145
|
-
|
|
146
|
-
if not src_dir.exists():
|
|
147
|
-
logger.info("src/ directory not found - no code to introspect yet")
|
|
148
|
-
return
|
|
149
|
-
|
|
150
|
-
for context_dir in src_dir.iterdir():
|
|
151
|
-
if not context_dir.is_dir():
|
|
152
|
-
continue
|
|
153
|
-
if context_dir.name.startswith((".", "_")):
|
|
154
|
-
continue
|
|
155
|
-
|
|
156
|
-
slug = context_dir.name
|
|
157
|
-
code_info = scan_bounded_context(slug, config.project_root)
|
|
158
|
-
if code_info:
|
|
159
|
-
_code_registry[slug] = code_info
|
|
160
|
-
logger.info(
|
|
161
|
-
f"Introspected bounded context '{slug}': "
|
|
162
|
-
f"{len(code_info['entities'])} entities, "
|
|
163
|
-
f"{len(code_info['use_cases'])} use cases"
|
|
164
|
-
)
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
def get_apps_for_accelerator(accelerator_slug: str) -> list[str]:
|
|
168
|
-
"""Get apps that expose this accelerator (from app manifests)."""
|
|
169
|
-
from . import apps
|
|
170
|
-
|
|
171
|
-
_app_registry = apps.get_app_registry()
|
|
172
|
-
|
|
173
|
-
result = []
|
|
174
|
-
for app_slug, app_data in _app_registry.items():
|
|
175
|
-
if accelerator_slug in app_data.get("accelerators", []):
|
|
176
|
-
result.append(app_slug)
|
|
177
|
-
return sorted(result)
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
def get_stories_for_accelerator(accelerator_slug: str) -> list[dict]:
|
|
181
|
-
"""Get stories for apps that use this accelerator."""
|
|
182
|
-
from . import stories
|
|
183
|
-
|
|
184
|
-
_story_registry = stories.get_story_registry()
|
|
185
|
-
|
|
186
|
-
app_slugs = get_apps_for_accelerator(accelerator_slug)
|
|
187
|
-
result = []
|
|
188
|
-
|
|
189
|
-
for story in _story_registry:
|
|
190
|
-
if story["app"] in app_slugs:
|
|
191
|
-
result.append(story)
|
|
192
|
-
|
|
193
|
-
return result
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
def get_journeys_for_accelerator(accelerator_slug: str, env) -> list[str]:
|
|
197
|
-
"""Get journeys that include stories from this accelerator's apps."""
|
|
198
|
-
from . import journeys
|
|
199
|
-
|
|
200
|
-
journey_registry = journeys.get_journey_registry(env)
|
|
201
|
-
|
|
202
|
-
story_list = get_stories_for_accelerator(accelerator_slug)
|
|
203
|
-
story_titles = {normalize_name(s["feature"]) for s in story_list}
|
|
204
|
-
|
|
205
|
-
result = []
|
|
206
|
-
for slug, journey in journey_registry.items():
|
|
207
|
-
for step in journey.get("steps", []):
|
|
208
|
-
if step.get("type") == "story":
|
|
209
|
-
if normalize_name(step["ref"]) in story_titles:
|
|
210
|
-
result.append(slug)
|
|
211
|
-
break
|
|
212
|
-
|
|
213
|
-
return sorted(set(result))
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
class DefineAcceleratorDirective(SphinxDirective):
|
|
217
|
-
"""Define an accelerator with metadata and introspected code.
|
|
218
|
-
|
|
219
|
-
Usage::
|
|
220
|
-
|
|
221
|
-
.. define-accelerator:: vocabulary
|
|
222
|
-
:status: alpha
|
|
223
|
-
:milestone: 2 (Nov 2025)
|
|
224
|
-
:acceptance: Reference environment deployed and accepted.
|
|
225
|
-
:sources_from: pilot-data-collection (Scheme documentation, standards materials)
|
|
226
|
-
:feeds_into: traceability, conformity
|
|
227
|
-
:publishes_to: reference-implementation (SVC artefacts)
|
|
228
|
-
|
|
229
|
-
Accelerate the creation of Sustainable Vocabulary Catalogs.
|
|
230
|
-
"""
|
|
231
|
-
|
|
232
|
-
required_arguments = 1
|
|
233
|
-
has_content = True
|
|
234
|
-
option_spec = {
|
|
235
|
-
"status": directives.unchanged,
|
|
236
|
-
"milestone": directives.unchanged,
|
|
237
|
-
"acceptance": directives.unchanged,
|
|
238
|
-
"sources_from": directives.unchanged,
|
|
239
|
-
"feeds_into": directives.unchanged,
|
|
240
|
-
"publishes_to": directives.unchanged,
|
|
241
|
-
"depends_on": directives.unchanged,
|
|
242
|
-
}
|
|
243
|
-
|
|
244
|
-
def run(self):
|
|
245
|
-
slug = self.arguments[0]
|
|
246
|
-
|
|
247
|
-
get_documented_accelerators(self.env).add(slug)
|
|
248
|
-
|
|
249
|
-
status = self.options.get("status", "").strip()
|
|
250
|
-
milestone = self.options.get("milestone", "").strip()
|
|
251
|
-
acceptance = self.options.get("acceptance", "").strip()
|
|
252
|
-
sources_from = parse_integration_options(self.options.get("sources_from", ""))
|
|
253
|
-
feeds_into = parse_list_option(self.options.get("feeds_into", ""))
|
|
254
|
-
publishes_to = parse_integration_options(self.options.get("publishes_to", ""))
|
|
255
|
-
depends_on = parse_list_option(self.options.get("depends_on", ""))
|
|
256
|
-
|
|
257
|
-
objective = "\n".join(self.content).strip()
|
|
258
|
-
|
|
259
|
-
get_accelerator_registry(self.env)[slug] = {
|
|
260
|
-
"slug": slug,
|
|
261
|
-
"status": status,
|
|
262
|
-
"milestone": milestone,
|
|
263
|
-
"acceptance": acceptance,
|
|
264
|
-
"objective": objective,
|
|
265
|
-
"sources_from": sources_from,
|
|
266
|
-
"feeds_into": feeds_into,
|
|
267
|
-
"publishes_to": publishes_to,
|
|
268
|
-
"depends_on": depends_on,
|
|
269
|
-
"docname": self.env.docname,
|
|
270
|
-
}
|
|
271
|
-
|
|
272
|
-
node = DefineAcceleratorPlaceholder()
|
|
273
|
-
node["accelerator_slug"] = slug
|
|
274
|
-
return [node]
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
class DefineAcceleratorPlaceholder(nodes.General, nodes.Element):
|
|
278
|
-
pass
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
class AcceleratorIndexDirective(SphinxDirective):
|
|
282
|
-
"""Generate index table grouped by status."""
|
|
283
|
-
|
|
284
|
-
def run(self):
|
|
285
|
-
node = AcceleratorIndexPlaceholder()
|
|
286
|
-
return [node]
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
class AcceleratorIndexPlaceholder(nodes.General, nodes.Element):
|
|
290
|
-
pass
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
class AcceleratorStatusDirective(SphinxDirective):
|
|
294
|
-
"""Show status, milestone, and acceptance for an accelerator."""
|
|
295
|
-
|
|
296
|
-
required_arguments = 1
|
|
297
|
-
|
|
298
|
-
def run(self):
|
|
299
|
-
node = AcceleratorStatusPlaceholder()
|
|
300
|
-
node["accelerator_slug"] = self.arguments[0]
|
|
301
|
-
return [node]
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
class AcceleratorStatusPlaceholder(nodes.General, nodes.Element):
|
|
305
|
-
pass
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
class AcceleratorsForAppDirective(SphinxDirective):
|
|
309
|
-
"""List accelerators an app exposes."""
|
|
310
|
-
|
|
311
|
-
required_arguments = 1
|
|
312
|
-
|
|
313
|
-
def run(self):
|
|
314
|
-
node = AcceleratorsForAppPlaceholder()
|
|
315
|
-
node["app_slug"] = self.arguments[0]
|
|
316
|
-
return [node]
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
class AcceleratorsForAppPlaceholder(nodes.General, nodes.Element):
|
|
320
|
-
pass
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
class DependentAcceleratorsDirective(SphinxDirective):
|
|
324
|
-
"""List accelerators that depend on or publish to an integration."""
|
|
325
|
-
|
|
326
|
-
option_spec = {
|
|
327
|
-
"relationship": directives.unchanged_required,
|
|
328
|
-
}
|
|
329
|
-
|
|
330
|
-
def run(self):
|
|
331
|
-
relationship = self.options.get("relationship", "").strip()
|
|
332
|
-
if relationship not in ("sources_from", "publishes_to"):
|
|
333
|
-
error = self.state_machine.reporter.error(
|
|
334
|
-
f"Invalid relationship '{relationship}'. "
|
|
335
|
-
f"Must be 'sources_from' or 'publishes_to'.",
|
|
336
|
-
line=self.lineno,
|
|
337
|
-
)
|
|
338
|
-
return [error]
|
|
339
|
-
|
|
340
|
-
docname = self.env.docname
|
|
341
|
-
integration_slug = docname.split("/")[-1]
|
|
342
|
-
|
|
343
|
-
node = DependentAcceleratorsPlaceholder()
|
|
344
|
-
node["integration_slug"] = integration_slug
|
|
345
|
-
node["relationship"] = relationship
|
|
346
|
-
return [node]
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
class DependentAcceleratorsPlaceholder(nodes.General, nodes.Element):
|
|
350
|
-
pass
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
class AcceleratorDependencyDiagramDirective(SphinxDirective):
|
|
354
|
-
"""Generate a PlantUML component diagram showing accelerator dependencies."""
|
|
355
|
-
|
|
356
|
-
required_arguments = 1
|
|
357
|
-
|
|
358
|
-
def run(self):
|
|
359
|
-
node = AcceleratorDependencyDiagramPlaceholder()
|
|
360
|
-
node["accelerator_slug"] = self.arguments[0]
|
|
361
|
-
return [node]
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
class AcceleratorDependencyDiagramPlaceholder(nodes.General, nodes.Element):
|
|
365
|
-
pass
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
class SrcAcceleratorBacklinksDirective(SphinxDirective):
|
|
369
|
-
"""Generate seealso links from autodoc pages back to documentation."""
|
|
370
|
-
|
|
371
|
-
required_arguments = 1
|
|
372
|
-
|
|
373
|
-
def run(self):
|
|
374
|
-
node = SrcAcceleratorBacklinksPlaceholder()
|
|
375
|
-
node["accelerator_slug"] = self.arguments[0]
|
|
376
|
-
return [node]
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
class SrcAcceleratorBacklinksPlaceholder(nodes.General, nodes.Element):
|
|
380
|
-
pass
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
class SrcAppBacklinksDirective(SphinxDirective):
|
|
384
|
-
"""Generate seealso links from app autodoc pages back to documentation."""
|
|
385
|
-
|
|
386
|
-
required_arguments = 1
|
|
387
|
-
|
|
388
|
-
def run(self):
|
|
389
|
-
node = SrcAppBacklinksPlaceholder()
|
|
390
|
-
node["app_slug"] = self.arguments[0]
|
|
391
|
-
return [node]
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
class SrcAppBacklinksPlaceholder(nodes.General, nodes.Element):
|
|
395
|
-
pass
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
def build_accelerator_status(slug: str, env) -> list:
|
|
399
|
-
"""Build status/milestone/acceptance block for an accelerator."""
|
|
400
|
-
accelerator_registry = get_accelerator_registry(env)
|
|
401
|
-
|
|
402
|
-
if slug not in accelerator_registry:
|
|
403
|
-
para = nodes.paragraph()
|
|
404
|
-
para += nodes.problematic(text=f"Accelerator '{slug}' not defined")
|
|
405
|
-
return [para]
|
|
406
|
-
|
|
407
|
-
accel = accelerator_registry[slug]
|
|
408
|
-
result_nodes = []
|
|
409
|
-
|
|
410
|
-
if accel["status"] or accel["milestone"]:
|
|
411
|
-
status_para = nodes.paragraph()
|
|
412
|
-
if accel["status"]:
|
|
413
|
-
status_para += nodes.strong(text="Status: ")
|
|
414
|
-
status_para += nodes.Text(accel["status"].title())
|
|
415
|
-
if accel["status"] and accel["milestone"]:
|
|
416
|
-
status_para += nodes.Text(" | ")
|
|
417
|
-
if accel["milestone"]:
|
|
418
|
-
status_para += nodes.strong(text="Milestone: ")
|
|
419
|
-
status_para += nodes.Text(accel["milestone"])
|
|
420
|
-
result_nodes.append(status_para)
|
|
421
|
-
|
|
422
|
-
if accel["acceptance"]:
|
|
423
|
-
accept_para = nodes.paragraph()
|
|
424
|
-
accept_para += nodes.strong(text="Acceptance: ")
|
|
425
|
-
accept_para += nodes.Text(accel["acceptance"])
|
|
426
|
-
result_nodes.append(accept_para)
|
|
427
|
-
|
|
428
|
-
return result_nodes
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
def build_accelerator_content(slug: str, docname: str, env) -> list:
|
|
432
|
-
"""Build the content nodes for an accelerator page."""
|
|
433
|
-
from sphinx.addnodes import seealso
|
|
434
|
-
|
|
435
|
-
config = get_config()
|
|
436
|
-
accelerator_registry = get_accelerator_registry(env)
|
|
437
|
-
|
|
438
|
-
if slug not in accelerator_registry:
|
|
439
|
-
para = nodes.paragraph()
|
|
440
|
-
para += nodes.problematic(text=f"Accelerator '{slug}' not defined")
|
|
441
|
-
return [para]
|
|
442
|
-
|
|
443
|
-
accel = accelerator_registry[slug]
|
|
444
|
-
result_nodes = []
|
|
445
|
-
|
|
446
|
-
prefix = path_to_root(docname)
|
|
447
|
-
|
|
448
|
-
snake_slug = kebab_to_snake(slug)
|
|
449
|
-
code_info = _code_registry.get(slug) or _code_registry.get(snake_slug)
|
|
450
|
-
|
|
451
|
-
objective = None
|
|
452
|
-
if code_info and code_info.get("objective"):
|
|
453
|
-
objective = code_info["objective"]
|
|
454
|
-
elif accel["objective"]:
|
|
455
|
-
objective = accel["objective"]
|
|
456
|
-
|
|
457
|
-
if objective:
|
|
458
|
-
obj_para = nodes.paragraph()
|
|
459
|
-
obj_para += nodes.Text(objective)
|
|
460
|
-
result_nodes.append(obj_para)
|
|
461
|
-
|
|
462
|
-
seealso_items = []
|
|
463
|
-
|
|
464
|
-
if code_info:
|
|
465
|
-
code_dir = code_info.get("code_dir", snake_slug)
|
|
466
|
-
autodoc_path = f"{prefix}source/_autosummary/rba.{code_dir}.html"
|
|
467
|
-
seealso_items.append(("Source", [(autodoc_path, f"rba.{code_dir}", True)]))
|
|
468
|
-
else:
|
|
469
|
-
seealso_items.append(
|
|
470
|
-
(
|
|
471
|
-
"Source",
|
|
472
|
-
[
|
|
473
|
-
(
|
|
474
|
-
None,
|
|
475
|
-
f"No implementation yet — expecting code at src/{snake_slug}/",
|
|
476
|
-
False,
|
|
477
|
-
)
|
|
478
|
-
],
|
|
479
|
-
)
|
|
480
|
-
)
|
|
481
|
-
|
|
482
|
-
apps = get_apps_for_accelerator(slug)
|
|
483
|
-
if apps:
|
|
484
|
-
app_links = []
|
|
485
|
-
for app_slug in apps:
|
|
486
|
-
app_path = f"{prefix}{config.get_doc_path('applications')}/{app_slug}.html"
|
|
487
|
-
app_links.append((app_path, app_slug.replace("-", " ").title(), False))
|
|
488
|
-
seealso_items.append(("Exposed By", app_links))
|
|
489
|
-
|
|
490
|
-
journeys = get_journeys_for_accelerator(slug, env)
|
|
491
|
-
if journeys:
|
|
492
|
-
journey_links = []
|
|
493
|
-
for journey_slug in journeys:
|
|
494
|
-
journey_path = (
|
|
495
|
-
f"{prefix}{config.get_doc_path('journeys')}/{journey_slug}.html"
|
|
496
|
-
)
|
|
497
|
-
journey_links.append(
|
|
498
|
-
(journey_path, journey_slug.replace("-", " ").title(), False)
|
|
499
|
-
)
|
|
500
|
-
seealso_items.append(("Journeys", journey_links))
|
|
501
|
-
|
|
502
|
-
if accel["depends_on"]:
|
|
503
|
-
accel_links = []
|
|
504
|
-
for dep_slug in accel["depends_on"]:
|
|
505
|
-
if dep_slug in accelerator_registry:
|
|
506
|
-
accel_path = (
|
|
507
|
-
f"{prefix}{config.get_doc_path('accelerators')}/{dep_slug}.html"
|
|
508
|
-
)
|
|
509
|
-
accel_links.append(
|
|
510
|
-
(accel_path, dep_slug.replace("-", " ").title(), False)
|
|
511
|
-
)
|
|
512
|
-
else:
|
|
513
|
-
accel_links.append(
|
|
514
|
-
(None, f"{dep_slug.replace('-', ' ').title()} [not found]", False)
|
|
515
|
-
)
|
|
516
|
-
seealso_items.append(("Depends On", accel_links))
|
|
517
|
-
|
|
518
|
-
if accel["feeds_into"]:
|
|
519
|
-
accel_links = []
|
|
520
|
-
for feed_slug in accel["feeds_into"]:
|
|
521
|
-
if feed_slug in accelerator_registry:
|
|
522
|
-
accel_path = (
|
|
523
|
-
f"{prefix}{config.get_doc_path('accelerators')}/{feed_slug}.html"
|
|
524
|
-
)
|
|
525
|
-
accel_links.append(
|
|
526
|
-
(accel_path, feed_slug.replace("-", " ").title(), False)
|
|
527
|
-
)
|
|
528
|
-
else:
|
|
529
|
-
accel_links.append(
|
|
530
|
-
(None, f"{feed_slug.replace('-', ' ').title()} [not found]", False)
|
|
531
|
-
)
|
|
532
|
-
seealso_items.append(("Feeds Into", accel_links))
|
|
533
|
-
|
|
534
|
-
if accel["sources_from"]:
|
|
535
|
-
int_links = []
|
|
536
|
-
for source in accel["sources_from"]:
|
|
537
|
-
int_path = (
|
|
538
|
-
f"{prefix}{config.get_doc_path('integrations')}/{source['slug']}.html"
|
|
539
|
-
)
|
|
540
|
-
label = source["slug"].replace("-", " ").title()
|
|
541
|
-
int_links.append((int_path, label, False))
|
|
542
|
-
seealso_items.append(("Sources From", int_links))
|
|
543
|
-
|
|
544
|
-
if accel["publishes_to"]:
|
|
545
|
-
int_links = []
|
|
546
|
-
for target in accel["publishes_to"]:
|
|
547
|
-
int_path = (
|
|
548
|
-
f"{prefix}{config.get_doc_path('integrations')}/{target['slug']}.html"
|
|
549
|
-
)
|
|
550
|
-
label = target["slug"].replace("-", " ").title()
|
|
551
|
-
int_links.append((int_path, label, False))
|
|
552
|
-
seealso_items.append(("Publishes To", int_links))
|
|
553
|
-
|
|
554
|
-
if seealso_items:
|
|
555
|
-
seealso_node = seealso()
|
|
556
|
-
|
|
557
|
-
for label, links in seealso_items:
|
|
558
|
-
para = nodes.paragraph()
|
|
559
|
-
para += nodes.strong(text=f"{label}: ")
|
|
560
|
-
|
|
561
|
-
for i, (path, text, is_code) in enumerate(links):
|
|
562
|
-
if path:
|
|
563
|
-
ref = nodes.reference("", "", refuri=path)
|
|
564
|
-
if is_code:
|
|
565
|
-
ref += nodes.literal(text=text)
|
|
566
|
-
else:
|
|
567
|
-
ref += nodes.Text(text)
|
|
568
|
-
para += ref
|
|
569
|
-
else:
|
|
570
|
-
if is_code:
|
|
571
|
-
para += nodes.literal(text=text)
|
|
572
|
-
else:
|
|
573
|
-
para += nodes.emphasis(text=text)
|
|
574
|
-
|
|
575
|
-
if i < len(links) - 1:
|
|
576
|
-
para += nodes.Text(", ")
|
|
577
|
-
|
|
578
|
-
seealso_node += para
|
|
579
|
-
|
|
580
|
-
result_nodes.append(seealso_node)
|
|
581
|
-
|
|
582
|
-
return result_nodes
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
def build_accelerator_index(docname: str, env) -> list:
|
|
586
|
-
"""Build the accelerator index grouped by status."""
|
|
587
|
-
accelerator_registry = get_accelerator_registry(env)
|
|
588
|
-
|
|
589
|
-
if not accelerator_registry:
|
|
590
|
-
para = nodes.paragraph()
|
|
591
|
-
para += nodes.emphasis(text="No accelerators defined")
|
|
592
|
-
return [para]
|
|
593
|
-
|
|
594
|
-
by_status = {"alpha": [], "future": [], "production": [], "other": []}
|
|
595
|
-
for slug, accel in accelerator_registry.items():
|
|
596
|
-
status = accel.get("status", "").lower()
|
|
597
|
-
if status in by_status:
|
|
598
|
-
by_status[status].append((slug, accel))
|
|
599
|
-
else:
|
|
600
|
-
by_status["other"].append((slug, accel))
|
|
601
|
-
|
|
602
|
-
result_nodes = []
|
|
603
|
-
|
|
604
|
-
status_sections = [
|
|
605
|
-
("alpha", "Alpha Phase"),
|
|
606
|
-
("production", "Production"),
|
|
607
|
-
("future", "Future"),
|
|
608
|
-
("other", "Other"),
|
|
609
|
-
]
|
|
610
|
-
|
|
611
|
-
for status_key, status_label in status_sections:
|
|
612
|
-
accels = by_status.get(status_key, [])
|
|
613
|
-
if not accels:
|
|
614
|
-
continue
|
|
615
|
-
|
|
616
|
-
heading = nodes.paragraph()
|
|
617
|
-
heading += nodes.strong(text=status_label)
|
|
618
|
-
result_nodes.append(heading)
|
|
619
|
-
|
|
620
|
-
accel_list = nodes.bullet_list()
|
|
621
|
-
|
|
622
|
-
for slug, accel in sorted(accels, key=lambda x: x[0]):
|
|
623
|
-
item = nodes.list_item()
|
|
624
|
-
para = nodes.paragraph()
|
|
625
|
-
|
|
626
|
-
accel_path = f"{slug}.html"
|
|
627
|
-
ref = nodes.reference("", "", refuri=accel_path)
|
|
628
|
-
ref += nodes.Text(slug.replace("-", " ").title())
|
|
629
|
-
para += ref
|
|
630
|
-
|
|
631
|
-
if accel.get("milestone"):
|
|
632
|
-
para += nodes.Text(f" — {accel['milestone']}")
|
|
633
|
-
|
|
634
|
-
if slug in _code_registry:
|
|
635
|
-
para += nodes.Text(" [code]")
|
|
636
|
-
|
|
637
|
-
item += para
|
|
638
|
-
|
|
639
|
-
if accel.get("objective"):
|
|
640
|
-
obj_para = nodes.paragraph()
|
|
641
|
-
obj_text = accel["objective"]
|
|
642
|
-
if len(obj_text) > 100:
|
|
643
|
-
obj_text = obj_text[:100] + "..."
|
|
644
|
-
obj_para += nodes.Text(obj_text)
|
|
645
|
-
item += obj_para
|
|
646
|
-
|
|
647
|
-
accel_list += item
|
|
648
|
-
|
|
649
|
-
result_nodes.append(accel_list)
|
|
650
|
-
|
|
651
|
-
return result_nodes
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
def build_accelerators_for_app(app_slug: str, docname: str, env) -> list:
|
|
655
|
-
"""Build list of accelerators for an app."""
|
|
656
|
-
from . import apps
|
|
657
|
-
|
|
658
|
-
config = get_config()
|
|
659
|
-
accelerator_registry = get_accelerator_registry(env)
|
|
660
|
-
_app_registry = apps.get_app_registry()
|
|
661
|
-
|
|
662
|
-
prefix = path_to_root(docname)
|
|
663
|
-
|
|
664
|
-
app_data = _app_registry.get(app_slug)
|
|
665
|
-
|
|
666
|
-
if not app_data:
|
|
667
|
-
para = nodes.paragraph()
|
|
668
|
-
para += nodes.emphasis(text=f"App '{app_slug}' not found")
|
|
669
|
-
return [para]
|
|
670
|
-
|
|
671
|
-
accel_slugs = app_data.get("accelerators", [])
|
|
672
|
-
if not accel_slugs:
|
|
673
|
-
para = nodes.paragraph()
|
|
674
|
-
para += nodes.emphasis(text="No accelerators")
|
|
675
|
-
return [para]
|
|
676
|
-
|
|
677
|
-
bullet_list = nodes.bullet_list()
|
|
678
|
-
|
|
679
|
-
for slug in sorted(accel_slugs):
|
|
680
|
-
item = nodes.list_item()
|
|
681
|
-
para = nodes.paragraph()
|
|
682
|
-
|
|
683
|
-
accel_path = f"{prefix}{config.get_doc_path('accelerators')}/{slug}.html"
|
|
684
|
-
ref = nodes.reference("", "", refuri=accel_path)
|
|
685
|
-
ref += nodes.Text(slug.replace("-", " ").title())
|
|
686
|
-
para += ref
|
|
687
|
-
|
|
688
|
-
if slug in accelerator_registry:
|
|
689
|
-
objective = accelerator_registry[slug].get("objective", "")
|
|
690
|
-
if objective:
|
|
691
|
-
para += nodes.Text(f" — {objective[:60]}...")
|
|
692
|
-
|
|
693
|
-
item += para
|
|
694
|
-
bullet_list += item
|
|
695
|
-
|
|
696
|
-
return [bullet_list]
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
def build_dependent_accelerators(
|
|
700
|
-
integration_slug: str, relationship: str, docname: str, env
|
|
701
|
-
) -> list:
|
|
702
|
-
"""Build table of accelerators that depend on or publish to an integration."""
|
|
703
|
-
config = get_config()
|
|
704
|
-
accelerator_registry = get_accelerator_registry(env)
|
|
705
|
-
|
|
706
|
-
prefix = path_to_root(docname)
|
|
707
|
-
|
|
708
|
-
matches = []
|
|
709
|
-
for accel_slug, accel in accelerator_registry.items():
|
|
710
|
-
rel_list = accel.get(relationship, [])
|
|
711
|
-
for rel in rel_list:
|
|
712
|
-
if rel["slug"] == integration_slug:
|
|
713
|
-
matches.append(
|
|
714
|
-
{
|
|
715
|
-
"slug": accel_slug,
|
|
716
|
-
"description": rel.get("description"),
|
|
717
|
-
}
|
|
718
|
-
)
|
|
719
|
-
break
|
|
720
|
-
|
|
721
|
-
if not matches:
|
|
722
|
-
para = nodes.paragraph()
|
|
723
|
-
para += nodes.emphasis(text="No accelerators found")
|
|
724
|
-
return [para]
|
|
725
|
-
|
|
726
|
-
table = nodes.table()
|
|
727
|
-
tgroup = nodes.tgroup(cols=2)
|
|
728
|
-
table += tgroup
|
|
729
|
-
|
|
730
|
-
tgroup += nodes.colspec(colwidth=30)
|
|
731
|
-
tgroup += nodes.colspec(colwidth=70)
|
|
732
|
-
|
|
733
|
-
thead = nodes.thead()
|
|
734
|
-
tgroup += thead
|
|
735
|
-
header_row = nodes.row()
|
|
736
|
-
thead += header_row
|
|
737
|
-
|
|
738
|
-
accel_header = nodes.entry()
|
|
739
|
-
accel_header += nodes.paragraph(text="Accelerator")
|
|
740
|
-
header_row += accel_header
|
|
741
|
-
|
|
742
|
-
data_header = nodes.entry()
|
|
743
|
-
if relationship == "sources_from":
|
|
744
|
-
data_header += nodes.paragraph(text="What it sources")
|
|
745
|
-
else:
|
|
746
|
-
data_header += nodes.paragraph(text="What it publishes")
|
|
747
|
-
header_row += data_header
|
|
748
|
-
|
|
749
|
-
tbody = nodes.tbody()
|
|
750
|
-
tgroup += tbody
|
|
751
|
-
|
|
752
|
-
for match in sorted(matches, key=lambda m: m["slug"]):
|
|
753
|
-
row = nodes.row()
|
|
754
|
-
tbody += row
|
|
755
|
-
|
|
756
|
-
accel_cell = nodes.entry()
|
|
757
|
-
accel_para = nodes.paragraph()
|
|
758
|
-
accel_path = (
|
|
759
|
-
f"{prefix}{config.get_doc_path('accelerators')}/{match['slug']}.html"
|
|
760
|
-
)
|
|
761
|
-
ref = nodes.reference("", "", refuri=accel_path)
|
|
762
|
-
ref += nodes.strong(text=match["slug"].replace("-", " ").title())
|
|
763
|
-
accel_para += ref
|
|
764
|
-
accel_cell += accel_para
|
|
765
|
-
row += accel_cell
|
|
766
|
-
|
|
767
|
-
desc_cell = nodes.entry()
|
|
768
|
-
desc_para = nodes.paragraph()
|
|
769
|
-
if match["description"]:
|
|
770
|
-
desc_para += nodes.Text(match["description"])
|
|
771
|
-
else:
|
|
772
|
-
desc_para += nodes.emphasis(text="(not specified)")
|
|
773
|
-
desc_cell += desc_para
|
|
774
|
-
row += desc_cell
|
|
775
|
-
|
|
776
|
-
return [table]
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
def build_accelerator_dependency_diagram(slug: str, docname: str, env) -> list:
|
|
780
|
-
"""Build PlantUML component diagram for an accelerator."""
|
|
781
|
-
from sphinxcontrib.plantuml import plantuml
|
|
782
|
-
|
|
783
|
-
accelerator_registry = get_accelerator_registry(env)
|
|
784
|
-
|
|
785
|
-
if slug not in accelerator_registry:
|
|
786
|
-
para = nodes.paragraph()
|
|
787
|
-
para += nodes.problematic(text=f"Accelerator '{slug}' not defined")
|
|
788
|
-
return [para]
|
|
789
|
-
|
|
790
|
-
accel = accelerator_registry[slug]
|
|
791
|
-
apps = get_apps_for_accelerator(slug)
|
|
792
|
-
sources_from = accel.get("sources_from", [])
|
|
793
|
-
publishes_to = accel.get("publishes_to", [])
|
|
794
|
-
|
|
795
|
-
lines = [
|
|
796
|
-
"@startuml",
|
|
797
|
-
"skinparam componentStyle rectangle",
|
|
798
|
-
"skinparam defaultTextAlignment center",
|
|
799
|
-
"skinparam component {",
|
|
800
|
-
" BackgroundColor<<accelerator>> LightBlue",
|
|
801
|
-
" BackgroundColor<<app>> LightGreen",
|
|
802
|
-
" BackgroundColor<<integration>> LightYellow",
|
|
803
|
-
"}",
|
|
804
|
-
"",
|
|
805
|
-
]
|
|
806
|
-
|
|
807
|
-
accel_title = slug.replace("-", " ").title()
|
|
808
|
-
lines.append(f'title "{accel_title}" Dependencies')
|
|
809
|
-
lines.append("")
|
|
810
|
-
|
|
811
|
-
def safe_id(name):
|
|
812
|
-
return name.replace("-", "_").replace(" ", "_")
|
|
813
|
-
|
|
814
|
-
if apps:
|
|
815
|
-
lines.append("' Applications that expose this accelerator")
|
|
816
|
-
for app_slug in apps:
|
|
817
|
-
app_id = safe_id(app_slug)
|
|
818
|
-
app_name = app_slug.replace("-", " ").title()
|
|
819
|
-
lines.append(f'component "{app_name}" as {app_id} <<app>>')
|
|
820
|
-
lines.append("")
|
|
821
|
-
|
|
822
|
-
lines.append("' The accelerator (bounded context)")
|
|
823
|
-
accel_id = safe_id(slug)
|
|
824
|
-
lines.append(f'component "{accel_title}" as {accel_id} <<accelerator>>')
|
|
825
|
-
lines.append("")
|
|
826
|
-
|
|
827
|
-
if sources_from or publishes_to:
|
|
828
|
-
lines.append("' Integration dependencies")
|
|
829
|
-
|
|
830
|
-
for source in sources_from:
|
|
831
|
-
source_slug = source["slug"]
|
|
832
|
-
source_id = safe_id(source_slug)
|
|
833
|
-
source_name = source_slug.replace("-", " ").title()
|
|
834
|
-
lines.append(f'component "{source_name}" as {source_id} <<integration>>')
|
|
835
|
-
|
|
836
|
-
for target in publishes_to:
|
|
837
|
-
target_slug = target["slug"]
|
|
838
|
-
target_id = safe_id(target_slug)
|
|
839
|
-
if not any(s["slug"] == target_slug for s in sources_from):
|
|
840
|
-
target_name = target_slug.replace("-", " ").title()
|
|
841
|
-
lines.append(
|
|
842
|
-
f'component "{target_name}" as {target_id} <<integration>>'
|
|
843
|
-
)
|
|
844
|
-
|
|
845
|
-
lines.append("")
|
|
846
|
-
|
|
847
|
-
lines.append("' Dependencies")
|
|
848
|
-
|
|
849
|
-
for app_slug in apps:
|
|
850
|
-
app_id = safe_id(app_slug)
|
|
851
|
-
lines.append(f"{app_id} --> {accel_id} : exposes")
|
|
852
|
-
|
|
853
|
-
for source in sources_from:
|
|
854
|
-
source_id = safe_id(source["slug"])
|
|
855
|
-
label = "sources from"
|
|
856
|
-
if source.get("description"):
|
|
857
|
-
desc = source["description"]
|
|
858
|
-
if len(desc) > 30:
|
|
859
|
-
desc = desc[:27] + "..."
|
|
860
|
-
label = desc
|
|
861
|
-
lines.append(f'{accel_id} --> {source_id} : "{label}"')
|
|
862
|
-
|
|
863
|
-
for target in publishes_to:
|
|
864
|
-
target_id = safe_id(target["slug"])
|
|
865
|
-
label = "publishes to"
|
|
866
|
-
if target.get("description"):
|
|
867
|
-
desc = target["description"]
|
|
868
|
-
if len(desc) > 30:
|
|
869
|
-
desc = desc[:27] + "..."
|
|
870
|
-
label = desc
|
|
871
|
-
lines.append(f'{accel_id} --> {target_id} : "{label}"')
|
|
872
|
-
|
|
873
|
-
lines.append("")
|
|
874
|
-
lines.append("@enduml")
|
|
875
|
-
|
|
876
|
-
puml_source = "\n".join(lines)
|
|
877
|
-
|
|
878
|
-
node = plantuml(puml_source)
|
|
879
|
-
node["uml"] = puml_source
|
|
880
|
-
node["incdir"] = os.path.dirname(docname)
|
|
881
|
-
node["filename"] = os.path.basename(docname) + ".rst"
|
|
882
|
-
|
|
883
|
-
return [node]
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
def build_accelerator_backlinks(slug: str, docname: str, env) -> nodes.Element:
|
|
887
|
-
"""Build seealso node with backlinks for an accelerator."""
|
|
888
|
-
from sphinx.addnodes import seealso
|
|
889
|
-
|
|
890
|
-
from . import apps
|
|
891
|
-
|
|
892
|
-
config = get_config()
|
|
893
|
-
accelerator_registry = get_accelerator_registry(env)
|
|
894
|
-
_app_registry = apps.get_app_registry()
|
|
895
|
-
|
|
896
|
-
prefix = path_to_root(docname)
|
|
897
|
-
|
|
898
|
-
seealso_node = seealso()
|
|
899
|
-
|
|
900
|
-
items = []
|
|
901
|
-
|
|
902
|
-
accel_path = f"{prefix}{config.get_doc_path('accelerators')}/{slug}.html"
|
|
903
|
-
accel_data = accelerator_registry.get(slug, {})
|
|
904
|
-
accel_desc = accel_data.get("objective", "").strip()
|
|
905
|
-
if not accel_desc:
|
|
906
|
-
accel_desc = f"Business accelerator for {slug.replace('-', ' ')} capabilities"
|
|
907
|
-
if len(accel_desc) > 120:
|
|
908
|
-
accel_desc = accel_desc[:117] + "..."
|
|
909
|
-
items.append(
|
|
910
|
-
(accel_path, f"{slug.replace('-', ' ').title()} Accelerator", accel_desc)
|
|
911
|
-
)
|
|
912
|
-
|
|
913
|
-
app_list = get_apps_for_accelerator(slug)
|
|
914
|
-
for app_slug in app_list:
|
|
915
|
-
app_path = f"{prefix}{config.get_doc_path('applications')}/{app_slug}.html"
|
|
916
|
-
app_data = _app_registry.get(app_slug, {})
|
|
917
|
-
app_desc = app_data.get("description", "Application documentation")
|
|
918
|
-
if len(app_desc) > 120:
|
|
919
|
-
app_desc = app_desc[:117] + "..."
|
|
920
|
-
items.append(
|
|
921
|
-
(
|
|
922
|
-
app_path,
|
|
923
|
-
app_data.get("name", app_slug.replace("-", " ").title()),
|
|
924
|
-
app_desc,
|
|
925
|
-
)
|
|
926
|
-
)
|
|
927
|
-
|
|
928
|
-
if app_list:
|
|
929
|
-
for app_slug in app_list[:2]:
|
|
930
|
-
story_path = f"{prefix}{config.get_doc_path('stories')}/{app_slug}.html"
|
|
931
|
-
items.append(
|
|
932
|
-
(
|
|
933
|
-
story_path,
|
|
934
|
-
f"{app_slug.replace('-', ' ').title()} Stories",
|
|
935
|
-
"User stories",
|
|
936
|
-
)
|
|
937
|
-
)
|
|
938
|
-
|
|
939
|
-
def_list = nodes.definition_list()
|
|
940
|
-
for path, title, description in items:
|
|
941
|
-
item = nodes.definition_list_item()
|
|
942
|
-
|
|
943
|
-
term = nodes.term()
|
|
944
|
-
ref = nodes.reference("", "", refuri=path)
|
|
945
|
-
ref += nodes.Text(title)
|
|
946
|
-
term += ref
|
|
947
|
-
item += term
|
|
948
|
-
|
|
949
|
-
definition = nodes.definition()
|
|
950
|
-
para = nodes.paragraph()
|
|
951
|
-
para += nodes.Text(description)
|
|
952
|
-
definition += para
|
|
953
|
-
item += definition
|
|
954
|
-
|
|
955
|
-
def_list += item
|
|
956
|
-
|
|
957
|
-
seealso_node += def_list
|
|
958
|
-
return seealso_node
|
|
959
|
-
|
|
960
|
-
|
|
961
|
-
def build_app_backlinks(app_slug: str, docname: str, env) -> nodes.Element:
|
|
962
|
-
"""Build seealso node with backlinks for an app."""
|
|
963
|
-
from sphinx.addnodes import seealso
|
|
964
|
-
|
|
965
|
-
from . import apps, journeys, stories
|
|
966
|
-
|
|
967
|
-
config = get_config()
|
|
968
|
-
accelerator_registry = get_accelerator_registry(env)
|
|
969
|
-
_app_registry = apps.get_app_registry()
|
|
970
|
-
_apps_with_stories = stories.get_apps_with_stories()
|
|
971
|
-
|
|
972
|
-
prefix = path_to_root(docname)
|
|
973
|
-
|
|
974
|
-
seealso_node = seealso()
|
|
975
|
-
|
|
976
|
-
items = []
|
|
977
|
-
|
|
978
|
-
app_data = _app_registry.get(app_slug)
|
|
979
|
-
|
|
980
|
-
if app_data:
|
|
981
|
-
app_path = f"{prefix}{config.get_doc_path('applications')}/{app_slug}.html"
|
|
982
|
-
app_desc = app_data.get("description", "Application documentation")
|
|
983
|
-
if len(app_desc) > 120:
|
|
984
|
-
app_desc = app_desc[:117] + "..."
|
|
985
|
-
items.append(
|
|
986
|
-
(
|
|
987
|
-
app_path,
|
|
988
|
-
app_data.get("name", app_slug.replace("-", " ").title()),
|
|
989
|
-
app_desc,
|
|
990
|
-
)
|
|
991
|
-
)
|
|
992
|
-
|
|
993
|
-
accelerators = app_data.get("accelerators", [])
|
|
994
|
-
for accel_slug in accelerators[:4]:
|
|
995
|
-
accel_path = (
|
|
996
|
-
f"{prefix}{config.get_doc_path('accelerators')}/{accel_slug}.html"
|
|
997
|
-
)
|
|
998
|
-
accel_data = accelerator_registry.get(accel_slug, {})
|
|
999
|
-
accel_desc = accel_data.get("objective", "").strip()
|
|
1000
|
-
if not accel_desc:
|
|
1001
|
-
accel_desc = f"Business accelerator for {accel_slug.replace('-', ' ')} capabilities"
|
|
1002
|
-
if len(accel_desc) > 120:
|
|
1003
|
-
accel_desc = accel_desc[:117] + "..."
|
|
1004
|
-
items.append(
|
|
1005
|
-
(
|
|
1006
|
-
accel_path,
|
|
1007
|
-
f"{accel_slug.replace('-', ' ').title()} Accelerator",
|
|
1008
|
-
accel_desc,
|
|
1009
|
-
)
|
|
1010
|
-
)
|
|
1011
|
-
|
|
1012
|
-
app_normalized = normalize_name(app_slug)
|
|
1013
|
-
if app_normalized in {normalize_name(a) for a in _apps_with_stories}:
|
|
1014
|
-
story_path = f"{prefix}{config.get_doc_path('stories')}/{app_slug}.html"
|
|
1015
|
-
items.append(
|
|
1016
|
-
(
|
|
1017
|
-
story_path,
|
|
1018
|
-
f"{app_slug.replace('-', ' ').title()} Stories",
|
|
1019
|
-
"User stories",
|
|
1020
|
-
)
|
|
1021
|
-
)
|
|
1022
|
-
|
|
1023
|
-
def get_journeys_for_app_slug(slug):
|
|
1024
|
-
journey_registry = journeys.get_journey_registry(env)
|
|
1025
|
-
_story_registry = stories.get_story_registry()
|
|
1026
|
-
story_list = [
|
|
1027
|
-
s
|
|
1028
|
-
for s in _story_registry
|
|
1029
|
-
if normalize_name(s["app"]) == normalize_name(slug)
|
|
1030
|
-
]
|
|
1031
|
-
story_titles = {normalize_name(s["feature"]) for s in story_list}
|
|
1032
|
-
|
|
1033
|
-
result = []
|
|
1034
|
-
for j_slug, journey in journey_registry.items():
|
|
1035
|
-
for step in journey.get("steps", []):
|
|
1036
|
-
if step.get("type") == "story":
|
|
1037
|
-
if normalize_name(step["ref"]) in story_titles:
|
|
1038
|
-
result.append(j_slug)
|
|
1039
|
-
break
|
|
1040
|
-
return sorted(set(result))
|
|
1041
|
-
|
|
1042
|
-
journey_list = get_journeys_for_app_slug(app_slug)
|
|
1043
|
-
for journey_slug in journey_list[:3]:
|
|
1044
|
-
journey_path = f"{prefix}{config.get_doc_path('journeys')}/{journey_slug}.html"
|
|
1045
|
-
items.append(
|
|
1046
|
-
(journey_path, f"{journey_slug.replace('-', ' ').title()}", "User journey")
|
|
1047
|
-
)
|
|
1048
|
-
|
|
1049
|
-
def_list = nodes.definition_list()
|
|
1050
|
-
for path, title, description in items:
|
|
1051
|
-
item = nodes.definition_list_item()
|
|
1052
|
-
|
|
1053
|
-
term = nodes.term()
|
|
1054
|
-
ref = nodes.reference("", "", refuri=path)
|
|
1055
|
-
ref += nodes.Text(title)
|
|
1056
|
-
term += ref
|
|
1057
|
-
item += term
|
|
1058
|
-
|
|
1059
|
-
definition = nodes.definition()
|
|
1060
|
-
para = nodes.paragraph()
|
|
1061
|
-
para += nodes.Text(description)
|
|
1062
|
-
definition += para
|
|
1063
|
-
item += definition
|
|
1064
|
-
|
|
1065
|
-
def_list += item
|
|
1066
|
-
|
|
1067
|
-
seealso_node += def_list
|
|
1068
|
-
return seealso_node
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
def validate_accelerators(app, env):
|
|
1072
|
-
"""Validate accelerator coverage after all documents are read."""
|
|
1073
|
-
from . import apps
|
|
1074
|
-
|
|
1075
|
-
_app_registry = apps.get_app_registry()
|
|
1076
|
-
documented_accelerators = get_documented_accelerators(env)
|
|
1077
|
-
|
|
1078
|
-
referenced_accelerators = set()
|
|
1079
|
-
for app_data in _app_registry.values():
|
|
1080
|
-
for accel in app_data.get("accelerators", []):
|
|
1081
|
-
referenced_accelerators.add(accel)
|
|
1082
|
-
|
|
1083
|
-
for accel in referenced_accelerators:
|
|
1084
|
-
if accel not in documented_accelerators:
|
|
1085
|
-
logger.warning(
|
|
1086
|
-
f"Accelerator '{accel}' in app manifest has no docs page. "
|
|
1087
|
-
f"Create domain/accelerators/{accel}.rst with '.. define-accelerator:: {accel}' "
|
|
1088
|
-
f"(or run 'make clean html' if the file exists)"
|
|
1089
|
-
)
|
|
1090
|
-
|
|
1091
|
-
for slug in documented_accelerators:
|
|
1092
|
-
snake_slug = kebab_to_snake(slug)
|
|
1093
|
-
if slug not in _code_registry and snake_slug not in _code_registry:
|
|
1094
|
-
logger.info(
|
|
1095
|
-
f"Accelerator '{slug}' has no code yet (expected at src/{snake_slug}/)"
|
|
1096
|
-
)
|
|
1097
|
-
|
|
1098
|
-
|
|
1099
|
-
def process_accelerator_placeholders(app, doctree, docname):
|
|
1100
|
-
"""Replace all accelerator placeholders after all documents are read."""
|
|
1101
|
-
env = app.env
|
|
1102
|
-
|
|
1103
|
-
for node in doctree.traverse(DefineAcceleratorPlaceholder):
|
|
1104
|
-
slug = node["accelerator_slug"]
|
|
1105
|
-
content = build_accelerator_content(slug, docname, env)
|
|
1106
|
-
node.replace_self(content)
|
|
1107
|
-
|
|
1108
|
-
for node in doctree.traverse(AcceleratorStatusPlaceholder):
|
|
1109
|
-
slug = node["accelerator_slug"]
|
|
1110
|
-
content = build_accelerator_status(slug, env)
|
|
1111
|
-
node.replace_self(content)
|
|
1112
|
-
|
|
1113
|
-
for node in doctree.traverse(AcceleratorIndexPlaceholder):
|
|
1114
|
-
content = build_accelerator_index(docname, env)
|
|
1115
|
-
node.replace_self(content)
|
|
1116
|
-
|
|
1117
|
-
for node in doctree.traverse(AcceleratorsForAppPlaceholder):
|
|
1118
|
-
app_slug = node["app_slug"]
|
|
1119
|
-
content = build_accelerators_for_app(app_slug, docname, env)
|
|
1120
|
-
node.replace_self(content)
|
|
1121
|
-
|
|
1122
|
-
for node in doctree.traverse(DependentAcceleratorsPlaceholder):
|
|
1123
|
-
integration_slug = node["integration_slug"]
|
|
1124
|
-
relationship = node["relationship"]
|
|
1125
|
-
content = build_dependent_accelerators(
|
|
1126
|
-
integration_slug, relationship, docname, env
|
|
1127
|
-
)
|
|
1128
|
-
node.replace_self(content)
|
|
1129
|
-
|
|
1130
|
-
for node in doctree.traverse(AcceleratorDependencyDiagramPlaceholder):
|
|
1131
|
-
slug = node["accelerator_slug"]
|
|
1132
|
-
content = build_accelerator_dependency_diagram(slug, docname, env)
|
|
1133
|
-
node.replace_self(content)
|
|
1134
|
-
|
|
1135
|
-
for node in doctree.traverse(SrcAcceleratorBacklinksPlaceholder):
|
|
1136
|
-
slug = node["accelerator_slug"]
|
|
1137
|
-
content = build_accelerator_backlinks(slug, docname, env)
|
|
1138
|
-
node.replace_self([content])
|
|
1139
|
-
|
|
1140
|
-
for node in doctree.traverse(SrcAppBacklinksPlaceholder):
|
|
1141
|
-
app_slug = node["app_slug"]
|
|
1142
|
-
content = build_app_backlinks(app_slug, docname, env)
|
|
1143
|
-
node.replace_self([content])
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
def setup(app):
|
|
1147
|
-
app.connect("builder-inited", scan_code_structure)
|
|
1148
|
-
app.connect("env-check-consistency", validate_accelerators)
|
|
1149
|
-
app.connect("doctree-resolved", process_accelerator_placeholders)
|
|
1150
|
-
|
|
1151
|
-
app.add_directive("define-accelerator", DefineAcceleratorDirective)
|
|
1152
|
-
app.add_directive("accelerator-index", AcceleratorIndexDirective)
|
|
1153
|
-
app.add_directive("accelerator-status", AcceleratorStatusDirective)
|
|
1154
|
-
app.add_directive("accelerators-for-app", AcceleratorsForAppDirective)
|
|
1155
|
-
app.add_directive("dependent-accelerators", DependentAcceleratorsDirective)
|
|
1156
|
-
app.add_directive(
|
|
1157
|
-
"accelerator-dependency-diagram", AcceleratorDependencyDiagramDirective
|
|
1158
|
-
)
|
|
1159
|
-
app.add_directive("src-accelerator-backlinks", SrcAcceleratorBacklinksDirective)
|
|
1160
|
-
app.add_directive("src-app-backlinks", SrcAppBacklinksDirective)
|
|
1161
|
-
|
|
1162
|
-
app.add_node(DefineAcceleratorPlaceholder)
|
|
1163
|
-
app.add_node(AcceleratorIndexPlaceholder)
|
|
1164
|
-
app.add_node(AcceleratorStatusPlaceholder)
|
|
1165
|
-
app.add_node(AcceleratorsForAppPlaceholder)
|
|
1166
|
-
app.add_node(DependentAcceleratorsPlaceholder)
|
|
1167
|
-
app.add_node(AcceleratorDependencyDiagramPlaceholder)
|
|
1168
|
-
app.add_node(SrcAcceleratorBacklinksPlaceholder)
|
|
1169
|
-
app.add_node(SrcAppBacklinksPlaceholder)
|
|
1170
|
-
|
|
1171
|
-
return {
|
|
1172
|
-
"version": "1.0",
|
|
1173
|
-
"parallel_read_safe": False,
|
|
1174
|
-
"parallel_write_safe": True,
|
|
1175
|
-
}
|