julee 0.1.4__py3-none-any.whl → 0.1.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- julee/__init__.py +1 -1
- julee/api/tests/routers/test_assembly_specifications.py +2 -0
- julee/api/tests/routers/test_documents.py +2 -0
- julee/api/tests/routers/test_knowledge_service_configs.py +2 -0
- julee/api/tests/routers/test_knowledge_service_queries.py +2 -0
- julee/api/tests/routers/test_system.py +2 -0
- julee/api/tests/routers/test_workflows.py +2 -0
- julee/api/tests/test_app.py +2 -0
- julee/api/tests/test_dependencies.py +2 -0
- julee/api/tests/test_requests.py +2 -0
- julee/contrib/polling/__init__.py +22 -19
- julee/contrib/polling/apps/__init__.py +17 -0
- julee/contrib/polling/apps/worker/__init__.py +17 -0
- julee/contrib/polling/apps/worker/pipelines.py +288 -0
- julee/contrib/polling/domain/__init__.py +7 -9
- julee/contrib/polling/domain/models/__init__.py +6 -7
- julee/contrib/polling/domain/models/polling_config.py +18 -1
- julee/contrib/polling/domain/services/__init__.py +6 -5
- julee/contrib/polling/domain/services/poller.py +1 -1
- julee/contrib/polling/infrastructure/__init__.py +9 -8
- julee/contrib/polling/infrastructure/services/__init__.py +6 -5
- julee/contrib/polling/infrastructure/services/polling/__init__.py +6 -5
- julee/contrib/polling/infrastructure/services/polling/http/__init__.py +6 -5
- julee/contrib/polling/infrastructure/services/polling/http/http_poller_service.py +5 -2
- julee/contrib/polling/infrastructure/temporal/__init__.py +12 -12
- julee/contrib/polling/infrastructure/temporal/activities.py +1 -1
- julee/contrib/polling/infrastructure/temporal/manager.py +291 -0
- julee/contrib/polling/infrastructure/temporal/proxies.py +1 -1
- julee/contrib/polling/tests/unit/apps/worker/test_pipelines.py +580 -0
- julee/contrib/polling/tests/unit/infrastructure/services/polling/http/test_http_poller_service.py +40 -2
- julee/contrib/polling/tests/unit/infrastructure/temporal/__init__.py +7 -0
- julee/contrib/polling/tests/unit/infrastructure/temporal/test_manager.py +475 -0
- julee/docs/sphinx_hcd/__init__.py +146 -13
- julee/docs/sphinx_hcd/domain/__init__.py +5 -0
- julee/docs/sphinx_hcd/domain/models/__init__.py +32 -0
- julee/docs/sphinx_hcd/domain/models/accelerator.py +152 -0
- julee/docs/sphinx_hcd/domain/models/app.py +151 -0
- julee/docs/sphinx_hcd/domain/models/code_info.py +121 -0
- julee/docs/sphinx_hcd/domain/models/epic.py +79 -0
- julee/docs/sphinx_hcd/domain/models/integration.py +230 -0
- julee/docs/sphinx_hcd/domain/models/journey.py +222 -0
- julee/docs/sphinx_hcd/domain/models/persona.py +106 -0
- julee/docs/sphinx_hcd/domain/models/story.py +128 -0
- julee/docs/sphinx_hcd/domain/repositories/__init__.py +25 -0
- julee/docs/sphinx_hcd/domain/repositories/accelerator.py +98 -0
- julee/docs/sphinx_hcd/domain/repositories/app.py +57 -0
- julee/docs/sphinx_hcd/domain/repositories/base.py +89 -0
- julee/docs/sphinx_hcd/domain/repositories/code_info.py +69 -0
- julee/docs/sphinx_hcd/domain/repositories/epic.py +62 -0
- julee/docs/sphinx_hcd/domain/repositories/integration.py +79 -0
- julee/docs/sphinx_hcd/domain/repositories/journey.py +106 -0
- julee/docs/sphinx_hcd/domain/repositories/story.py +68 -0
- julee/docs/sphinx_hcd/domain/use_cases/__init__.py +64 -0
- julee/docs/sphinx_hcd/domain/use_cases/derive_personas.py +166 -0
- julee/docs/sphinx_hcd/domain/use_cases/resolve_accelerator_references.py +236 -0
- julee/docs/sphinx_hcd/domain/use_cases/resolve_app_references.py +144 -0
- julee/docs/sphinx_hcd/domain/use_cases/resolve_story_references.py +121 -0
- julee/docs/sphinx_hcd/parsers/__init__.py +48 -0
- julee/docs/sphinx_hcd/parsers/ast.py +150 -0
- julee/docs/sphinx_hcd/parsers/gherkin.py +155 -0
- julee/docs/sphinx_hcd/parsers/yaml.py +184 -0
- julee/docs/sphinx_hcd/repositories/__init__.py +4 -0
- julee/docs/sphinx_hcd/repositories/memory/__init__.py +25 -0
- julee/docs/sphinx_hcd/repositories/memory/accelerator.py +86 -0
- julee/docs/sphinx_hcd/repositories/memory/app.py +45 -0
- julee/docs/sphinx_hcd/repositories/memory/base.py +106 -0
- julee/docs/sphinx_hcd/repositories/memory/code_info.py +59 -0
- julee/docs/sphinx_hcd/repositories/memory/epic.py +54 -0
- julee/docs/sphinx_hcd/repositories/memory/integration.py +70 -0
- julee/docs/sphinx_hcd/repositories/memory/journey.py +96 -0
- julee/docs/sphinx_hcd/repositories/memory/story.py +63 -0
- julee/docs/sphinx_hcd/sphinx/__init__.py +28 -0
- julee/docs/sphinx_hcd/sphinx/adapters.py +116 -0
- julee/docs/sphinx_hcd/sphinx/context.py +163 -0
- julee/docs/sphinx_hcd/sphinx/directives/__init__.py +160 -0
- julee/docs/sphinx_hcd/sphinx/directives/accelerator.py +576 -0
- julee/docs/sphinx_hcd/sphinx/directives/app.py +349 -0
- julee/docs/sphinx_hcd/sphinx/directives/base.py +211 -0
- julee/docs/sphinx_hcd/sphinx/directives/epic.py +434 -0
- julee/docs/sphinx_hcd/sphinx/directives/integration.py +220 -0
- julee/docs/sphinx_hcd/sphinx/directives/journey.py +642 -0
- julee/docs/sphinx_hcd/sphinx/directives/persona.py +345 -0
- julee/docs/sphinx_hcd/sphinx/directives/story.py +575 -0
- julee/docs/sphinx_hcd/sphinx/event_handlers/__init__.py +16 -0
- julee/docs/sphinx_hcd/sphinx/event_handlers/builder_inited.py +31 -0
- julee/docs/sphinx_hcd/sphinx/event_handlers/doctree_read.py +27 -0
- julee/docs/sphinx_hcd/sphinx/event_handlers/doctree_resolved.py +43 -0
- julee/docs/sphinx_hcd/sphinx/event_handlers/env_purge_doc.py +42 -0
- julee/docs/sphinx_hcd/sphinx/initialization.py +139 -0
- julee/docs/sphinx_hcd/tests/__init__.py +9 -0
- julee/docs/sphinx_hcd/tests/conftest.py +6 -0
- julee/docs/sphinx_hcd/tests/domain/__init__.py +1 -0
- julee/docs/sphinx_hcd/tests/domain/models/__init__.py +1 -0
- julee/docs/sphinx_hcd/tests/domain/models/test_accelerator.py +266 -0
- julee/docs/sphinx_hcd/tests/domain/models/test_app.py +258 -0
- julee/docs/sphinx_hcd/tests/domain/models/test_code_info.py +231 -0
- julee/docs/sphinx_hcd/tests/domain/models/test_epic.py +163 -0
- julee/docs/sphinx_hcd/tests/domain/models/test_integration.py +327 -0
- julee/docs/sphinx_hcd/tests/domain/models/test_journey.py +249 -0
- julee/docs/sphinx_hcd/tests/domain/models/test_persona.py +172 -0
- julee/docs/sphinx_hcd/tests/domain/models/test_story.py +216 -0
- julee/docs/sphinx_hcd/tests/domain/use_cases/__init__.py +1 -0
- julee/docs/sphinx_hcd/tests/domain/use_cases/test_derive_personas.py +314 -0
- julee/docs/sphinx_hcd/tests/domain/use_cases/test_resolve_accelerator_references.py +476 -0
- julee/docs/sphinx_hcd/tests/domain/use_cases/test_resolve_app_references.py +265 -0
- julee/docs/sphinx_hcd/tests/domain/use_cases/test_resolve_story_references.py +229 -0
- julee/docs/sphinx_hcd/tests/integration/__init__.py +1 -0
- julee/docs/sphinx_hcd/tests/parsers/__init__.py +1 -0
- julee/docs/sphinx_hcd/tests/parsers/test_ast.py +298 -0
- julee/docs/sphinx_hcd/tests/parsers/test_gherkin.py +282 -0
- julee/docs/sphinx_hcd/tests/parsers/test_yaml.py +496 -0
- julee/docs/sphinx_hcd/tests/repositories/__init__.py +1 -0
- julee/docs/sphinx_hcd/tests/repositories/test_accelerator.py +298 -0
- julee/docs/sphinx_hcd/tests/repositories/test_app.py +218 -0
- julee/docs/sphinx_hcd/tests/repositories/test_base.py +151 -0
- julee/docs/sphinx_hcd/tests/repositories/test_code_info.py +253 -0
- julee/docs/sphinx_hcd/tests/repositories/test_epic.py +237 -0
- julee/docs/sphinx_hcd/tests/repositories/test_integration.py +268 -0
- julee/docs/sphinx_hcd/tests/repositories/test_journey.py +294 -0
- julee/docs/sphinx_hcd/tests/repositories/test_story.py +236 -0
- julee/docs/sphinx_hcd/tests/sphinx/__init__.py +1 -0
- julee/docs/sphinx_hcd/tests/sphinx/directives/__init__.py +1 -0
- julee/docs/sphinx_hcd/tests/sphinx/directives/test_base.py +160 -0
- julee/docs/sphinx_hcd/tests/sphinx/test_adapters.py +176 -0
- julee/docs/sphinx_hcd/tests/sphinx/test_context.py +257 -0
- julee/domain/models/assembly/tests/test_assembly.py +2 -0
- julee/domain/models/assembly_specification/tests/test_assembly_specification.py +2 -0
- julee/domain/models/assembly_specification/tests/test_knowledge_service_query.py +2 -0
- julee/domain/models/custom_fields/tests/test_custom_fields.py +2 -0
- julee/domain/models/document/tests/test_document.py +2 -0
- julee/domain/models/policy/tests/test_document_policy_validation.py +2 -0
- julee/domain/models/policy/tests/test_policy.py +2 -0
- julee/domain/use_cases/tests/test_extract_assemble_data.py +2 -0
- julee/domain/use_cases/tests/test_initialize_system_data.py +2 -0
- julee/domain/use_cases/tests/test_validate_document.py +2 -0
- julee/maintenance/release.py +10 -5
- julee/repositories/memory/tests/test_document.py +2 -0
- julee/repositories/memory/tests/test_document_policy_validation.py +2 -0
- julee/repositories/memory/tests/test_policy.py +2 -0
- julee/repositories/minio/tests/test_assembly.py +2 -0
- julee/repositories/minio/tests/test_assembly_specification.py +2 -0
- julee/repositories/minio/tests/test_client_protocol.py +3 -0
- julee/repositories/minio/tests/test_document.py +2 -0
- julee/repositories/minio/tests/test_document_policy_validation.py +2 -0
- julee/repositories/minio/tests/test_knowledge_service_config.py +2 -0
- julee/repositories/minio/tests/test_knowledge_service_query.py +2 -0
- julee/repositories/minio/tests/test_policy.py +2 -0
- julee/services/knowledge_service/anthropic/tests/test_knowledge_service.py +2 -0
- julee/services/knowledge_service/memory/test_knowledge_service.py +2 -0
- julee/services/knowledge_service/test_factory.py +2 -0
- julee/util/tests/test_decorators.py +2 -0
- julee-0.1.6.dist-info/METADATA +104 -0
- julee-0.1.6.dist-info/RECORD +288 -0
- julee/docs/sphinx_hcd/accelerators.py +0 -1175
- julee/docs/sphinx_hcd/apps.py +0 -518
- julee/docs/sphinx_hcd/epics.py +0 -453
- julee/docs/sphinx_hcd/integrations.py +0 -310
- julee/docs/sphinx_hcd/journeys.py +0 -797
- julee/docs/sphinx_hcd/personas.py +0 -457
- julee/docs/sphinx_hcd/stories.py +0 -960
- julee-0.1.4.dist-info/METADATA +0 -197
- julee-0.1.4.dist-info/RECORD +0 -196
- {julee-0.1.4.dist-info → julee-0.1.6.dist-info}/WHEEL +0 -0
- {julee-0.1.4.dist-info → julee-0.1.6.dist-info}/licenses/LICENSE +0 -0
- {julee-0.1.4.dist-info → julee-0.1.6.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
"""Use case for resolving story references.
|
|
2
|
+
|
|
3
|
+
Finds epics and journeys that reference a specific story.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from ...utils import normalize_name
|
|
7
|
+
from ..models.epic import Epic
|
|
8
|
+
from ..models.journey import Journey
|
|
9
|
+
from ..models.story import Story
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def get_epics_for_story(
|
|
13
|
+
story: Story,
|
|
14
|
+
epics: list[Epic],
|
|
15
|
+
) -> list[Epic]:
|
|
16
|
+
"""Get epics that contain a specific story.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
story: Story to find epics for
|
|
20
|
+
epics: All Epic entities to search
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
List of Epic entities containing this story, sorted by slug
|
|
24
|
+
"""
|
|
25
|
+
story_normalized = normalize_name(story.feature_title)
|
|
26
|
+
matching = []
|
|
27
|
+
|
|
28
|
+
for epic in epics:
|
|
29
|
+
if any(normalize_name(ref) == story_normalized for ref in epic.story_refs):
|
|
30
|
+
matching.append(epic)
|
|
31
|
+
|
|
32
|
+
return sorted(matching, key=lambda e: e.slug)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def get_journeys_for_story(
|
|
36
|
+
story: Story,
|
|
37
|
+
journeys: list[Journey],
|
|
38
|
+
) -> list[Journey]:
|
|
39
|
+
"""Get journeys that reference a specific story.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
story: Story to find journeys for
|
|
43
|
+
journeys: All Journey entities to search
|
|
44
|
+
|
|
45
|
+
Returns:
|
|
46
|
+
List of Journey entities containing this story, sorted by slug
|
|
47
|
+
"""
|
|
48
|
+
story_normalized = normalize_name(story.feature_title)
|
|
49
|
+
matching = []
|
|
50
|
+
|
|
51
|
+
for journey in journeys:
|
|
52
|
+
story_refs = journey.get_story_refs()
|
|
53
|
+
if any(normalize_name(ref) == story_normalized for ref in story_refs):
|
|
54
|
+
matching.append(journey)
|
|
55
|
+
|
|
56
|
+
return sorted(matching, key=lambda j: j.slug)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def get_related_stories(
|
|
60
|
+
story: Story,
|
|
61
|
+
stories: list[Story],
|
|
62
|
+
epics: list[Epic],
|
|
63
|
+
) -> list[Story]:
|
|
64
|
+
"""Get stories related to a story via shared epics.
|
|
65
|
+
|
|
66
|
+
Finds other stories that are in the same epic(s) as the given story.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
story: Story to find related stories for
|
|
70
|
+
stories: All Story entities
|
|
71
|
+
epics: All Epic entities
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
List of related Story entities (excluding the input story), sorted by feature_title
|
|
75
|
+
"""
|
|
76
|
+
# Find epics containing this story
|
|
77
|
+
story_epics = get_epics_for_story(story, epics)
|
|
78
|
+
|
|
79
|
+
# Collect all story refs from those epics
|
|
80
|
+
related_refs: set[str] = set()
|
|
81
|
+
for epic in story_epics:
|
|
82
|
+
for ref in epic.story_refs:
|
|
83
|
+
related_refs.add(normalize_name(ref))
|
|
84
|
+
|
|
85
|
+
# Remove the original story
|
|
86
|
+
story_normalized = normalize_name(story.feature_title)
|
|
87
|
+
related_refs.discard(story_normalized)
|
|
88
|
+
|
|
89
|
+
# Find matching stories
|
|
90
|
+
related = []
|
|
91
|
+
for s in stories:
|
|
92
|
+
if normalize_name(s.feature_title) in related_refs:
|
|
93
|
+
related.append(s)
|
|
94
|
+
|
|
95
|
+
return sorted(related, key=lambda s: s.feature_title)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def get_story_cross_references(
|
|
99
|
+
story: Story,
|
|
100
|
+
stories: list[Story],
|
|
101
|
+
epics: list[Epic],
|
|
102
|
+
journeys: list[Journey],
|
|
103
|
+
) -> dict:
|
|
104
|
+
"""Get all cross-references for a story.
|
|
105
|
+
|
|
106
|
+
Convenience function to get all related entities at once.
|
|
107
|
+
|
|
108
|
+
Args:
|
|
109
|
+
story: Story to find references for
|
|
110
|
+
stories: All Story entities
|
|
111
|
+
epics: All Epic entities
|
|
112
|
+
journeys: All Journey entities
|
|
113
|
+
|
|
114
|
+
Returns:
|
|
115
|
+
Dict with keys: epics, journeys, related_stories
|
|
116
|
+
"""
|
|
117
|
+
return {
|
|
118
|
+
"epics": get_epics_for_story(story, epics),
|
|
119
|
+
"journeys": get_journeys_for_story(story, journeys),
|
|
120
|
+
"related_stories": get_related_stories(story, stories, epics),
|
|
121
|
+
}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"""Parsers for sphinx_hcd.
|
|
2
|
+
|
|
3
|
+
Contains parsing logic for:
|
|
4
|
+
- gherkin.py: Feature file parsing (.feature files)
|
|
5
|
+
- yaml.py: App and integration manifest parsing
|
|
6
|
+
- ast.py: Python code introspection for accelerators
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from .ast import (
|
|
10
|
+
parse_bounded_context,
|
|
11
|
+
parse_module_docstring,
|
|
12
|
+
parse_python_classes,
|
|
13
|
+
scan_bounded_contexts,
|
|
14
|
+
)
|
|
15
|
+
from .gherkin import (
|
|
16
|
+
ParsedFeature,
|
|
17
|
+
parse_feature_content,
|
|
18
|
+
parse_feature_file,
|
|
19
|
+
scan_feature_directory,
|
|
20
|
+
)
|
|
21
|
+
from .yaml import (
|
|
22
|
+
parse_app_manifest,
|
|
23
|
+
parse_integration_manifest,
|
|
24
|
+
parse_manifest_content,
|
|
25
|
+
scan_app_manifests,
|
|
26
|
+
scan_integration_manifests,
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
__all__ = [
|
|
30
|
+
# AST - Python introspection
|
|
31
|
+
"parse_bounded_context",
|
|
32
|
+
"parse_module_docstring",
|
|
33
|
+
"parse_python_classes",
|
|
34
|
+
"scan_bounded_contexts",
|
|
35
|
+
# Gherkin
|
|
36
|
+
"ParsedFeature",
|
|
37
|
+
"parse_feature_content",
|
|
38
|
+
"parse_feature_file",
|
|
39
|
+
"scan_feature_directory",
|
|
40
|
+
# YAML - Apps
|
|
41
|
+
"parse_app_manifest",
|
|
42
|
+
"scan_app_manifests",
|
|
43
|
+
# YAML - Integrations
|
|
44
|
+
"parse_integration_manifest",
|
|
45
|
+
"scan_integration_manifests",
|
|
46
|
+
# YAML - Common
|
|
47
|
+
"parse_manifest_content",
|
|
48
|
+
]
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"""Python code introspection parser.
|
|
2
|
+
|
|
3
|
+
Parses Python source files using AST to extract class information
|
|
4
|
+
for ADR 001-compliant bounded contexts.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import ast
|
|
8
|
+
import logging
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
from ..domain.models.code_info import BoundedContextInfo, ClassInfo
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def parse_python_classes(directory: Path) -> list[ClassInfo]:
|
|
17
|
+
"""Extract class information from Python files in a directory using AST.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
directory: Directory to scan for .py files
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
List of ClassInfo objects sorted by class name
|
|
24
|
+
"""
|
|
25
|
+
if not directory.exists():
|
|
26
|
+
return []
|
|
27
|
+
|
|
28
|
+
classes = []
|
|
29
|
+
for py_file in directory.glob("*.py"):
|
|
30
|
+
if py_file.name.startswith("_"):
|
|
31
|
+
continue
|
|
32
|
+
|
|
33
|
+
try:
|
|
34
|
+
source = py_file.read_text()
|
|
35
|
+
tree = ast.parse(source, filename=str(py_file))
|
|
36
|
+
|
|
37
|
+
for node in ast.walk(tree):
|
|
38
|
+
if isinstance(node, ast.ClassDef):
|
|
39
|
+
docstring = ast.get_docstring(node) or ""
|
|
40
|
+
first_line = docstring.split("\n")[0].strip() if docstring else ""
|
|
41
|
+
classes.append(
|
|
42
|
+
ClassInfo(
|
|
43
|
+
name=node.name,
|
|
44
|
+
docstring=first_line,
|
|
45
|
+
file=py_file.name,
|
|
46
|
+
)
|
|
47
|
+
)
|
|
48
|
+
except SyntaxError as e:
|
|
49
|
+
logger.warning(f"Syntax error in {py_file}: {e}")
|
|
50
|
+
except Exception as e:
|
|
51
|
+
logger.warning(f"Could not parse {py_file}: {e}")
|
|
52
|
+
|
|
53
|
+
return sorted(classes, key=lambda c: c.name)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def parse_module_docstring(module_path: Path) -> tuple[str | None, str | None]:
|
|
57
|
+
"""Extract module docstring from a Python file using AST.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
module_path: Path to Python file
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
Tuple of (first_line, full_docstring) or (None, None) if not found
|
|
64
|
+
"""
|
|
65
|
+
if not module_path.exists():
|
|
66
|
+
return None, None
|
|
67
|
+
|
|
68
|
+
try:
|
|
69
|
+
source = module_path.read_text()
|
|
70
|
+
tree = ast.parse(source, filename=str(module_path))
|
|
71
|
+
docstring = ast.get_docstring(tree)
|
|
72
|
+
if docstring:
|
|
73
|
+
first_line = docstring.split("\n")[0].strip()
|
|
74
|
+
return first_line, docstring
|
|
75
|
+
except SyntaxError as e:
|
|
76
|
+
logger.warning(f"Syntax error in {module_path}: {e}")
|
|
77
|
+
except Exception as e:
|
|
78
|
+
logger.warning(f"Could not parse {module_path}: {e}")
|
|
79
|
+
|
|
80
|
+
return None, None
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def parse_bounded_context(context_dir: Path) -> BoundedContextInfo | None:
|
|
84
|
+
"""Introspect a bounded context directory for ADR 001-compliant code structure.
|
|
85
|
+
|
|
86
|
+
Expected directory structure:
|
|
87
|
+
- context_dir/
|
|
88
|
+
- __init__.py (module docstring becomes objective)
|
|
89
|
+
- domain/
|
|
90
|
+
- models/ (entities)
|
|
91
|
+
- repositories/ (repository protocols)
|
|
92
|
+
- services/ (service protocols)
|
|
93
|
+
- use_cases/ (use case classes)
|
|
94
|
+
- infrastructure/ (optional)
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
context_dir: Path to the bounded context directory
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
BoundedContextInfo if directory exists, None otherwise
|
|
101
|
+
"""
|
|
102
|
+
if not context_dir.exists() or not context_dir.is_dir():
|
|
103
|
+
return None
|
|
104
|
+
|
|
105
|
+
init_file = context_dir / "__init__.py"
|
|
106
|
+
objective, full_docstring = parse_module_docstring(init_file)
|
|
107
|
+
|
|
108
|
+
return BoundedContextInfo(
|
|
109
|
+
slug=context_dir.name,
|
|
110
|
+
entities=parse_python_classes(context_dir / "domain" / "models"),
|
|
111
|
+
use_cases=parse_python_classes(context_dir / "use_cases"),
|
|
112
|
+
repository_protocols=parse_python_classes(
|
|
113
|
+
context_dir / "domain" / "repositories"
|
|
114
|
+
),
|
|
115
|
+
service_protocols=parse_python_classes(context_dir / "domain" / "services"),
|
|
116
|
+
has_infrastructure=(context_dir / "infrastructure").exists(),
|
|
117
|
+
code_dir=context_dir.name,
|
|
118
|
+
objective=objective,
|
|
119
|
+
docstring=full_docstring,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def scan_bounded_contexts(src_dir: Path) -> list[BoundedContextInfo]:
|
|
124
|
+
"""Scan a source directory for all bounded contexts.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
src_dir: Root source directory (e.g., project/src/)
|
|
128
|
+
|
|
129
|
+
Returns:
|
|
130
|
+
List of BoundedContextInfo objects for all discovered contexts
|
|
131
|
+
"""
|
|
132
|
+
if not src_dir.exists():
|
|
133
|
+
logger.info(f"Source directory not found: {src_dir}")
|
|
134
|
+
return []
|
|
135
|
+
|
|
136
|
+
contexts = []
|
|
137
|
+
for context_dir in src_dir.iterdir():
|
|
138
|
+
if not context_dir.is_dir():
|
|
139
|
+
continue
|
|
140
|
+
if context_dir.name.startswith((".", "_")):
|
|
141
|
+
continue
|
|
142
|
+
|
|
143
|
+
context_info = parse_bounded_context(context_dir)
|
|
144
|
+
if context_info:
|
|
145
|
+
contexts.append(context_info)
|
|
146
|
+
logger.info(
|
|
147
|
+
f"Introspected bounded context '{context_info.slug}': {context_info.summary()}"
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
return contexts
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
"""Gherkin feature file parser.
|
|
2
|
+
|
|
3
|
+
Parses .feature files to extract user story information.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import logging
|
|
7
|
+
import re
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
from ..domain.models.story import Story
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class ParsedFeature:
|
|
18
|
+
"""Raw parsed data from a feature file.
|
|
19
|
+
|
|
20
|
+
This intermediate representation holds the extracted values
|
|
21
|
+
before creating a Story entity.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
feature_title: str
|
|
25
|
+
persona: str
|
|
26
|
+
i_want: str
|
|
27
|
+
so_that: str
|
|
28
|
+
gherkin_snippet: str
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def parse_feature_content(content: str) -> ParsedFeature:
|
|
32
|
+
"""Parse the content of a Gherkin feature file.
|
|
33
|
+
|
|
34
|
+
Extracts:
|
|
35
|
+
- Feature: <title>
|
|
36
|
+
- As a <persona>
|
|
37
|
+
- I want to <action>
|
|
38
|
+
- So that <benefit>
|
|
39
|
+
- The story header (everything before Scenario/Background)
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
content: The full text content of a .feature file
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
ParsedFeature with extracted values (defaults for missing fields)
|
|
46
|
+
"""
|
|
47
|
+
# Extract header components using regex
|
|
48
|
+
feature_match = re.search(r"^Feature:\s*(.+)$", content, re.MULTILINE)
|
|
49
|
+
as_a_match = re.search(r"^\s*As an?\s+(.+)$", content, re.MULTILINE)
|
|
50
|
+
i_want_match = re.search(r"^\s*I want to\s+(.+)$", content, re.MULTILINE)
|
|
51
|
+
so_that_match = re.search(r"^\s*So that\s+(.+)$", content, re.MULTILINE)
|
|
52
|
+
|
|
53
|
+
# Extract Gherkin snippet (story header only, stop before scenarios)
|
|
54
|
+
lines = content.split("\n")
|
|
55
|
+
snippet_lines = []
|
|
56
|
+
for line in lines:
|
|
57
|
+
stripped = line.strip()
|
|
58
|
+
# Stop at scenario markers or step keywords at start of line
|
|
59
|
+
if stripped.startswith(
|
|
60
|
+
("Scenario", "Background", "@", "Given", "When", "Then", "And", "But")
|
|
61
|
+
):
|
|
62
|
+
break
|
|
63
|
+
if stripped:
|
|
64
|
+
snippet_lines.append(line)
|
|
65
|
+
gherkin_snippet = "\n".join(snippet_lines)
|
|
66
|
+
|
|
67
|
+
return ParsedFeature(
|
|
68
|
+
feature_title=feature_match.group(1).strip() if feature_match else "Unknown",
|
|
69
|
+
persona=as_a_match.group(1).strip() if as_a_match else "unknown",
|
|
70
|
+
i_want=i_want_match.group(1).strip() if i_want_match else "do something",
|
|
71
|
+
so_that=so_that_match.group(1).strip() if so_that_match else "achieve a goal",
|
|
72
|
+
gherkin_snippet=gherkin_snippet,
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def parse_feature_file(
|
|
77
|
+
file_path: Path,
|
|
78
|
+
project_root: Path,
|
|
79
|
+
app_slug: str | None = None,
|
|
80
|
+
) -> Story | None:
|
|
81
|
+
"""Parse a single feature file and return a Story.
|
|
82
|
+
|
|
83
|
+
Args:
|
|
84
|
+
file_path: Absolute path to the .feature file
|
|
85
|
+
project_root: Project root for computing relative paths
|
|
86
|
+
app_slug: Optional app slug override. If None, extracted from path.
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
Story entity, or None if parsing fails
|
|
90
|
+
"""
|
|
91
|
+
try:
|
|
92
|
+
content = file_path.read_text()
|
|
93
|
+
except Exception as e:
|
|
94
|
+
logger.warning(f"Could not read {file_path}: {e}")
|
|
95
|
+
return None
|
|
96
|
+
|
|
97
|
+
# Parse the content
|
|
98
|
+
parsed = parse_feature_content(content)
|
|
99
|
+
|
|
100
|
+
# Compute relative path
|
|
101
|
+
try:
|
|
102
|
+
rel_path = file_path.relative_to(project_root)
|
|
103
|
+
except ValueError:
|
|
104
|
+
rel_path = file_path
|
|
105
|
+
logger.warning(f"Feature file {file_path} is not under project root")
|
|
106
|
+
|
|
107
|
+
# Extract app slug from path if not provided
|
|
108
|
+
# Expected: tests/e2e/{app}/features/{name}.feature
|
|
109
|
+
if app_slug is None:
|
|
110
|
+
parts = rel_path.parts
|
|
111
|
+
if len(parts) >= 4 and parts[2] != "features":
|
|
112
|
+
app_slug = parts[2]
|
|
113
|
+
else:
|
|
114
|
+
app_slug = "unknown"
|
|
115
|
+
|
|
116
|
+
return Story.from_feature_file(
|
|
117
|
+
feature_title=parsed.feature_title,
|
|
118
|
+
persona=parsed.persona,
|
|
119
|
+
i_want=parsed.i_want,
|
|
120
|
+
so_that=parsed.so_that,
|
|
121
|
+
app_slug=app_slug,
|
|
122
|
+
file_path=str(rel_path),
|
|
123
|
+
abs_path=str(file_path),
|
|
124
|
+
gherkin_snippet=parsed.gherkin_snippet,
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def scan_feature_directory(
|
|
129
|
+
feature_dir: Path,
|
|
130
|
+
project_root: Path,
|
|
131
|
+
) -> list[Story]:
|
|
132
|
+
"""Scan a directory tree for .feature files and parse them.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
feature_dir: Root directory to scan (e.g., tests/e2e/)
|
|
136
|
+
project_root: Project root for computing relative paths
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
List of parsed Story entities
|
|
140
|
+
"""
|
|
141
|
+
stories = []
|
|
142
|
+
|
|
143
|
+
if not feature_dir.exists():
|
|
144
|
+
logger.info(
|
|
145
|
+
f"Feature files directory not found at {feature_dir} - no stories to index"
|
|
146
|
+
)
|
|
147
|
+
return stories
|
|
148
|
+
|
|
149
|
+
for feature_file in feature_dir.rglob("*.feature"):
|
|
150
|
+
story = parse_feature_file(feature_file, project_root)
|
|
151
|
+
if story:
|
|
152
|
+
stories.append(story)
|
|
153
|
+
|
|
154
|
+
logger.info(f"Indexed {len(stories)} Gherkin stories from {feature_dir}")
|
|
155
|
+
return stories
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
"""YAML manifest parsers.
|
|
2
|
+
|
|
3
|
+
Parses YAML manifest files for apps and integrations.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import logging
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
import yaml
|
|
10
|
+
|
|
11
|
+
from ..domain.models.app import App
|
|
12
|
+
from ..domain.models.integration import Integration
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def parse_app_manifest(manifest_path: Path, app_slug: str | None = None) -> App | None:
|
|
18
|
+
"""Parse an app.yaml manifest file.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
manifest_path: Path to the app.yaml file
|
|
22
|
+
app_slug: Optional app slug override. If None, extracted from directory name.
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
App entity, or None if parsing fails
|
|
26
|
+
"""
|
|
27
|
+
try:
|
|
28
|
+
content = manifest_path.read_text()
|
|
29
|
+
except Exception as e:
|
|
30
|
+
logger.warning(f"Could not read {manifest_path}: {e}")
|
|
31
|
+
return None
|
|
32
|
+
|
|
33
|
+
try:
|
|
34
|
+
manifest = yaml.safe_load(content)
|
|
35
|
+
except yaml.YAMLError as e:
|
|
36
|
+
logger.warning(f"Could not parse YAML in {manifest_path}: {e}")
|
|
37
|
+
return None
|
|
38
|
+
|
|
39
|
+
if manifest is None:
|
|
40
|
+
logger.warning(f"Empty manifest at {manifest_path}")
|
|
41
|
+
return None
|
|
42
|
+
|
|
43
|
+
# Extract app slug from directory name if not provided
|
|
44
|
+
if app_slug is None:
|
|
45
|
+
app_slug = manifest_path.parent.name
|
|
46
|
+
|
|
47
|
+
return App.from_manifest(
|
|
48
|
+
slug=app_slug,
|
|
49
|
+
manifest=manifest,
|
|
50
|
+
manifest_path=str(manifest_path),
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def scan_app_manifests(apps_dir: Path) -> list[App]:
|
|
55
|
+
"""Scan a directory for app.yaml manifest files.
|
|
56
|
+
|
|
57
|
+
Expects structure: apps_dir/{app-slug}/app.yaml
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
apps_dir: Directory containing app subdirectories
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
List of parsed App entities
|
|
64
|
+
"""
|
|
65
|
+
apps = []
|
|
66
|
+
|
|
67
|
+
if not apps_dir.exists():
|
|
68
|
+
logger.info(
|
|
69
|
+
f"Apps directory not found at {apps_dir} - no app manifests to index"
|
|
70
|
+
)
|
|
71
|
+
return apps
|
|
72
|
+
|
|
73
|
+
for app_dir in apps_dir.iterdir():
|
|
74
|
+
if not app_dir.is_dir():
|
|
75
|
+
continue
|
|
76
|
+
|
|
77
|
+
manifest_path = app_dir / "app.yaml"
|
|
78
|
+
if not manifest_path.exists():
|
|
79
|
+
continue
|
|
80
|
+
|
|
81
|
+
app = parse_app_manifest(manifest_path)
|
|
82
|
+
if app:
|
|
83
|
+
apps.append(app)
|
|
84
|
+
|
|
85
|
+
logger.info(f"Indexed {len(apps)} apps from {apps_dir}")
|
|
86
|
+
return apps
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def parse_manifest_content(content: str) -> dict | None:
|
|
90
|
+
"""Parse YAML content string.
|
|
91
|
+
|
|
92
|
+
A lower-level helper for testing and direct content parsing.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
content: YAML content string
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
Parsed dictionary, or None if parsing fails
|
|
99
|
+
"""
|
|
100
|
+
try:
|
|
101
|
+
return yaml.safe_load(content)
|
|
102
|
+
except yaml.YAMLError as e:
|
|
103
|
+
logger.warning(f"Could not parse YAML content: {e}")
|
|
104
|
+
return None
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
# Integration manifest parsing
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def parse_integration_manifest(
|
|
111
|
+
manifest_path: Path, module_name: str | None = None
|
|
112
|
+
) -> Integration | None:
|
|
113
|
+
"""Parse an integration.yaml manifest file.
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
manifest_path: Path to the integration.yaml file
|
|
117
|
+
module_name: Optional module name override. If None, extracted from directory name.
|
|
118
|
+
|
|
119
|
+
Returns:
|
|
120
|
+
Integration entity, or None if parsing fails
|
|
121
|
+
"""
|
|
122
|
+
try:
|
|
123
|
+
content = manifest_path.read_text()
|
|
124
|
+
except Exception as e:
|
|
125
|
+
logger.warning(f"Could not read {manifest_path}: {e}")
|
|
126
|
+
return None
|
|
127
|
+
|
|
128
|
+
try:
|
|
129
|
+
manifest = yaml.safe_load(content)
|
|
130
|
+
except yaml.YAMLError as e:
|
|
131
|
+
logger.warning(f"Could not parse YAML in {manifest_path}: {e}")
|
|
132
|
+
return None
|
|
133
|
+
|
|
134
|
+
if manifest is None:
|
|
135
|
+
logger.warning(f"Empty manifest at {manifest_path}")
|
|
136
|
+
return None
|
|
137
|
+
|
|
138
|
+
# Extract module name from directory name if not provided
|
|
139
|
+
if module_name is None:
|
|
140
|
+
module_name = manifest_path.parent.name
|
|
141
|
+
|
|
142
|
+
return Integration.from_manifest(
|
|
143
|
+
module_name=module_name,
|
|
144
|
+
manifest=manifest,
|
|
145
|
+
manifest_path=str(manifest_path),
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def scan_integration_manifests(integrations_dir: Path) -> list[Integration]:
|
|
150
|
+
"""Scan a directory for integration.yaml manifest files.
|
|
151
|
+
|
|
152
|
+
Expects structure: integrations_dir/{module_name}/integration.yaml
|
|
153
|
+
Directories starting with '_' are skipped.
|
|
154
|
+
|
|
155
|
+
Args:
|
|
156
|
+
integrations_dir: Directory containing integration subdirectories
|
|
157
|
+
|
|
158
|
+
Returns:
|
|
159
|
+
List of parsed Integration entities
|
|
160
|
+
"""
|
|
161
|
+
integrations = []
|
|
162
|
+
|
|
163
|
+
if not integrations_dir.exists():
|
|
164
|
+
logger.info(
|
|
165
|
+
f"Integrations directory not found at {integrations_dir} - "
|
|
166
|
+
"no integration manifests to index"
|
|
167
|
+
)
|
|
168
|
+
return integrations
|
|
169
|
+
|
|
170
|
+
for int_dir in integrations_dir.iterdir():
|
|
171
|
+
# Skip non-directories and directories starting with '_'
|
|
172
|
+
if not int_dir.is_dir() or int_dir.name.startswith("_"):
|
|
173
|
+
continue
|
|
174
|
+
|
|
175
|
+
manifest_path = int_dir / "integration.yaml"
|
|
176
|
+
if not manifest_path.exists():
|
|
177
|
+
continue
|
|
178
|
+
|
|
179
|
+
integration = parse_integration_manifest(manifest_path)
|
|
180
|
+
if integration:
|
|
181
|
+
integrations.append(integration)
|
|
182
|
+
|
|
183
|
+
logger.info(f"Indexed {len(integrations)} integrations from {integrations_dir}")
|
|
184
|
+
return integrations
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"""Memory repository implementations for sphinx_hcd.
|
|
2
|
+
|
|
3
|
+
In-memory implementations used during Sphinx builds. These repositories
|
|
4
|
+
are populated at builder-inited and queried during doctree processing.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from .accelerator import MemoryAcceleratorRepository
|
|
8
|
+
from .app import MemoryAppRepository
|
|
9
|
+
from .base import MemoryRepositoryMixin
|
|
10
|
+
from .code_info import MemoryCodeInfoRepository
|
|
11
|
+
from .epic import MemoryEpicRepository
|
|
12
|
+
from .integration import MemoryIntegrationRepository
|
|
13
|
+
from .journey import MemoryJourneyRepository
|
|
14
|
+
from .story import MemoryStoryRepository
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
"MemoryAcceleratorRepository",
|
|
18
|
+
"MemoryAppRepository",
|
|
19
|
+
"MemoryCodeInfoRepository",
|
|
20
|
+
"MemoryEpicRepository",
|
|
21
|
+
"MemoryIntegrationRepository",
|
|
22
|
+
"MemoryJourneyRepository",
|
|
23
|
+
"MemoryRepositoryMixin",
|
|
24
|
+
"MemoryStoryRepository",
|
|
25
|
+
]
|