julee 0.1.5__py3-none-any.whl → 0.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (105) hide show
  1. julee/docs/sphinx_hcd/__init__.py +146 -13
  2. julee/docs/sphinx_hcd/domain/__init__.py +5 -0
  3. julee/docs/sphinx_hcd/domain/models/__init__.py +32 -0
  4. julee/docs/sphinx_hcd/domain/models/accelerator.py +152 -0
  5. julee/docs/sphinx_hcd/domain/models/app.py +151 -0
  6. julee/docs/sphinx_hcd/domain/models/code_info.py +121 -0
  7. julee/docs/sphinx_hcd/domain/models/epic.py +79 -0
  8. julee/docs/sphinx_hcd/domain/models/integration.py +230 -0
  9. julee/docs/sphinx_hcd/domain/models/journey.py +222 -0
  10. julee/docs/sphinx_hcd/domain/models/persona.py +106 -0
  11. julee/docs/sphinx_hcd/domain/models/story.py +128 -0
  12. julee/docs/sphinx_hcd/domain/repositories/__init__.py +25 -0
  13. julee/docs/sphinx_hcd/domain/repositories/accelerator.py +98 -0
  14. julee/docs/sphinx_hcd/domain/repositories/app.py +57 -0
  15. julee/docs/sphinx_hcd/domain/repositories/base.py +89 -0
  16. julee/docs/sphinx_hcd/domain/repositories/code_info.py +69 -0
  17. julee/docs/sphinx_hcd/domain/repositories/epic.py +62 -0
  18. julee/docs/sphinx_hcd/domain/repositories/integration.py +79 -0
  19. julee/docs/sphinx_hcd/domain/repositories/journey.py +106 -0
  20. julee/docs/sphinx_hcd/domain/repositories/story.py +68 -0
  21. julee/docs/sphinx_hcd/domain/use_cases/__init__.py +64 -0
  22. julee/docs/sphinx_hcd/domain/use_cases/derive_personas.py +166 -0
  23. julee/docs/sphinx_hcd/domain/use_cases/resolve_accelerator_references.py +236 -0
  24. julee/docs/sphinx_hcd/domain/use_cases/resolve_app_references.py +144 -0
  25. julee/docs/sphinx_hcd/domain/use_cases/resolve_story_references.py +121 -0
  26. julee/docs/sphinx_hcd/parsers/__init__.py +48 -0
  27. julee/docs/sphinx_hcd/parsers/ast.py +150 -0
  28. julee/docs/sphinx_hcd/parsers/gherkin.py +155 -0
  29. julee/docs/sphinx_hcd/parsers/yaml.py +184 -0
  30. julee/docs/sphinx_hcd/repositories/__init__.py +4 -0
  31. julee/docs/sphinx_hcd/repositories/memory/__init__.py +25 -0
  32. julee/docs/sphinx_hcd/repositories/memory/accelerator.py +86 -0
  33. julee/docs/sphinx_hcd/repositories/memory/app.py +45 -0
  34. julee/docs/sphinx_hcd/repositories/memory/base.py +106 -0
  35. julee/docs/sphinx_hcd/repositories/memory/code_info.py +59 -0
  36. julee/docs/sphinx_hcd/repositories/memory/epic.py +54 -0
  37. julee/docs/sphinx_hcd/repositories/memory/integration.py +70 -0
  38. julee/docs/sphinx_hcd/repositories/memory/journey.py +96 -0
  39. julee/docs/sphinx_hcd/repositories/memory/story.py +63 -0
  40. julee/docs/sphinx_hcd/sphinx/__init__.py +28 -0
  41. julee/docs/sphinx_hcd/sphinx/adapters.py +116 -0
  42. julee/docs/sphinx_hcd/sphinx/context.py +163 -0
  43. julee/docs/sphinx_hcd/sphinx/directives/__init__.py +160 -0
  44. julee/docs/sphinx_hcd/sphinx/directives/accelerator.py +576 -0
  45. julee/docs/sphinx_hcd/sphinx/directives/app.py +349 -0
  46. julee/docs/sphinx_hcd/sphinx/directives/base.py +211 -0
  47. julee/docs/sphinx_hcd/sphinx/directives/epic.py +434 -0
  48. julee/docs/sphinx_hcd/sphinx/directives/integration.py +220 -0
  49. julee/docs/sphinx_hcd/sphinx/directives/journey.py +642 -0
  50. julee/docs/sphinx_hcd/sphinx/directives/persona.py +345 -0
  51. julee/docs/sphinx_hcd/sphinx/directives/story.py +575 -0
  52. julee/docs/sphinx_hcd/sphinx/event_handlers/__init__.py +16 -0
  53. julee/docs/sphinx_hcd/sphinx/event_handlers/builder_inited.py +31 -0
  54. julee/docs/sphinx_hcd/sphinx/event_handlers/doctree_read.py +27 -0
  55. julee/docs/sphinx_hcd/sphinx/event_handlers/doctree_resolved.py +43 -0
  56. julee/docs/sphinx_hcd/sphinx/event_handlers/env_purge_doc.py +42 -0
  57. julee/docs/sphinx_hcd/sphinx/initialization.py +139 -0
  58. julee/docs/sphinx_hcd/tests/__init__.py +9 -0
  59. julee/docs/sphinx_hcd/tests/conftest.py +6 -0
  60. julee/docs/sphinx_hcd/tests/domain/__init__.py +1 -0
  61. julee/docs/sphinx_hcd/tests/domain/models/__init__.py +1 -0
  62. julee/docs/sphinx_hcd/tests/domain/models/test_accelerator.py +266 -0
  63. julee/docs/sphinx_hcd/tests/domain/models/test_app.py +258 -0
  64. julee/docs/sphinx_hcd/tests/domain/models/test_code_info.py +231 -0
  65. julee/docs/sphinx_hcd/tests/domain/models/test_epic.py +163 -0
  66. julee/docs/sphinx_hcd/tests/domain/models/test_integration.py +327 -0
  67. julee/docs/sphinx_hcd/tests/domain/models/test_journey.py +249 -0
  68. julee/docs/sphinx_hcd/tests/domain/models/test_persona.py +172 -0
  69. julee/docs/sphinx_hcd/tests/domain/models/test_story.py +216 -0
  70. julee/docs/sphinx_hcd/tests/domain/use_cases/__init__.py +1 -0
  71. julee/docs/sphinx_hcd/tests/domain/use_cases/test_derive_personas.py +314 -0
  72. julee/docs/sphinx_hcd/tests/domain/use_cases/test_resolve_accelerator_references.py +476 -0
  73. julee/docs/sphinx_hcd/tests/domain/use_cases/test_resolve_app_references.py +265 -0
  74. julee/docs/sphinx_hcd/tests/domain/use_cases/test_resolve_story_references.py +229 -0
  75. julee/docs/sphinx_hcd/tests/integration/__init__.py +1 -0
  76. julee/docs/sphinx_hcd/tests/parsers/__init__.py +1 -0
  77. julee/docs/sphinx_hcd/tests/parsers/test_ast.py +298 -0
  78. julee/docs/sphinx_hcd/tests/parsers/test_gherkin.py +282 -0
  79. julee/docs/sphinx_hcd/tests/parsers/test_yaml.py +496 -0
  80. julee/docs/sphinx_hcd/tests/repositories/__init__.py +1 -0
  81. julee/docs/sphinx_hcd/tests/repositories/test_accelerator.py +298 -0
  82. julee/docs/sphinx_hcd/tests/repositories/test_app.py +218 -0
  83. julee/docs/sphinx_hcd/tests/repositories/test_base.py +151 -0
  84. julee/docs/sphinx_hcd/tests/repositories/test_code_info.py +253 -0
  85. julee/docs/sphinx_hcd/tests/repositories/test_epic.py +237 -0
  86. julee/docs/sphinx_hcd/tests/repositories/test_integration.py +268 -0
  87. julee/docs/sphinx_hcd/tests/repositories/test_journey.py +294 -0
  88. julee/docs/sphinx_hcd/tests/repositories/test_story.py +236 -0
  89. julee/docs/sphinx_hcd/tests/sphinx/__init__.py +1 -0
  90. julee/docs/sphinx_hcd/tests/sphinx/directives/__init__.py +1 -0
  91. julee/docs/sphinx_hcd/tests/sphinx/directives/test_base.py +160 -0
  92. julee/docs/sphinx_hcd/tests/sphinx/test_adapters.py +176 -0
  93. julee/docs/sphinx_hcd/tests/sphinx/test_context.py +257 -0
  94. {julee-0.1.5.dist-info → julee-0.1.6.dist-info}/METADATA +2 -1
  95. {julee-0.1.5.dist-info → julee-0.1.6.dist-info}/RECORD +98 -13
  96. julee/docs/sphinx_hcd/accelerators.py +0 -1175
  97. julee/docs/sphinx_hcd/apps.py +0 -518
  98. julee/docs/sphinx_hcd/epics.py +0 -453
  99. julee/docs/sphinx_hcd/integrations.py +0 -310
  100. julee/docs/sphinx_hcd/journeys.py +0 -797
  101. julee/docs/sphinx_hcd/personas.py +0 -457
  102. julee/docs/sphinx_hcd/stories.py +0 -960
  103. {julee-0.1.5.dist-info → julee-0.1.6.dist-info}/WHEEL +0 -0
  104. {julee-0.1.5.dist-info → julee-0.1.6.dist-info}/licenses/LICENSE +0 -0
  105. {julee-0.1.5.dist-info → julee-0.1.6.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,184 @@
1
+ """YAML manifest parsers.
2
+
3
+ Parses YAML manifest files for apps and integrations.
4
+ """
5
+
6
+ import logging
7
+ from pathlib import Path
8
+
9
+ import yaml
10
+
11
+ from ..domain.models.app import App
12
+ from ..domain.models.integration import Integration
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ def parse_app_manifest(manifest_path: Path, app_slug: str | None = None) -> App | None:
18
+ """Parse an app.yaml manifest file.
19
+
20
+ Args:
21
+ manifest_path: Path to the app.yaml file
22
+ app_slug: Optional app slug override. If None, extracted from directory name.
23
+
24
+ Returns:
25
+ App entity, or None if parsing fails
26
+ """
27
+ try:
28
+ content = manifest_path.read_text()
29
+ except Exception as e:
30
+ logger.warning(f"Could not read {manifest_path}: {e}")
31
+ return None
32
+
33
+ try:
34
+ manifest = yaml.safe_load(content)
35
+ except yaml.YAMLError as e:
36
+ logger.warning(f"Could not parse YAML in {manifest_path}: {e}")
37
+ return None
38
+
39
+ if manifest is None:
40
+ logger.warning(f"Empty manifest at {manifest_path}")
41
+ return None
42
+
43
+ # Extract app slug from directory name if not provided
44
+ if app_slug is None:
45
+ app_slug = manifest_path.parent.name
46
+
47
+ return App.from_manifest(
48
+ slug=app_slug,
49
+ manifest=manifest,
50
+ manifest_path=str(manifest_path),
51
+ )
52
+
53
+
54
+ def scan_app_manifests(apps_dir: Path) -> list[App]:
55
+ """Scan a directory for app.yaml manifest files.
56
+
57
+ Expects structure: apps_dir/{app-slug}/app.yaml
58
+
59
+ Args:
60
+ apps_dir: Directory containing app subdirectories
61
+
62
+ Returns:
63
+ List of parsed App entities
64
+ """
65
+ apps = []
66
+
67
+ if not apps_dir.exists():
68
+ logger.info(
69
+ f"Apps directory not found at {apps_dir} - no app manifests to index"
70
+ )
71
+ return apps
72
+
73
+ for app_dir in apps_dir.iterdir():
74
+ if not app_dir.is_dir():
75
+ continue
76
+
77
+ manifest_path = app_dir / "app.yaml"
78
+ if not manifest_path.exists():
79
+ continue
80
+
81
+ app = parse_app_manifest(manifest_path)
82
+ if app:
83
+ apps.append(app)
84
+
85
+ logger.info(f"Indexed {len(apps)} apps from {apps_dir}")
86
+ return apps
87
+
88
+
89
+ def parse_manifest_content(content: str) -> dict | None:
90
+ """Parse YAML content string.
91
+
92
+ A lower-level helper for testing and direct content parsing.
93
+
94
+ Args:
95
+ content: YAML content string
96
+
97
+ Returns:
98
+ Parsed dictionary, or None if parsing fails
99
+ """
100
+ try:
101
+ return yaml.safe_load(content)
102
+ except yaml.YAMLError as e:
103
+ logger.warning(f"Could not parse YAML content: {e}")
104
+ return None
105
+
106
+
107
+ # Integration manifest parsing
108
+
109
+
110
+ def parse_integration_manifest(
111
+ manifest_path: Path, module_name: str | None = None
112
+ ) -> Integration | None:
113
+ """Parse an integration.yaml manifest file.
114
+
115
+ Args:
116
+ manifest_path: Path to the integration.yaml file
117
+ module_name: Optional module name override. If None, extracted from directory name.
118
+
119
+ Returns:
120
+ Integration entity, or None if parsing fails
121
+ """
122
+ try:
123
+ content = manifest_path.read_text()
124
+ except Exception as e:
125
+ logger.warning(f"Could not read {manifest_path}: {e}")
126
+ return None
127
+
128
+ try:
129
+ manifest = yaml.safe_load(content)
130
+ except yaml.YAMLError as e:
131
+ logger.warning(f"Could not parse YAML in {manifest_path}: {e}")
132
+ return None
133
+
134
+ if manifest is None:
135
+ logger.warning(f"Empty manifest at {manifest_path}")
136
+ return None
137
+
138
+ # Extract module name from directory name if not provided
139
+ if module_name is None:
140
+ module_name = manifest_path.parent.name
141
+
142
+ return Integration.from_manifest(
143
+ module_name=module_name,
144
+ manifest=manifest,
145
+ manifest_path=str(manifest_path),
146
+ )
147
+
148
+
149
+ def scan_integration_manifests(integrations_dir: Path) -> list[Integration]:
150
+ """Scan a directory for integration.yaml manifest files.
151
+
152
+ Expects structure: integrations_dir/{module_name}/integration.yaml
153
+ Directories starting with '_' are skipped.
154
+
155
+ Args:
156
+ integrations_dir: Directory containing integration subdirectories
157
+
158
+ Returns:
159
+ List of parsed Integration entities
160
+ """
161
+ integrations = []
162
+
163
+ if not integrations_dir.exists():
164
+ logger.info(
165
+ f"Integrations directory not found at {integrations_dir} - "
166
+ "no integration manifests to index"
167
+ )
168
+ return integrations
169
+
170
+ for int_dir in integrations_dir.iterdir():
171
+ # Skip non-directories and directories starting with '_'
172
+ if not int_dir.is_dir() or int_dir.name.startswith("_"):
173
+ continue
174
+
175
+ manifest_path = int_dir / "integration.yaml"
176
+ if not manifest_path.exists():
177
+ continue
178
+
179
+ integration = parse_integration_manifest(manifest_path)
180
+ if integration:
181
+ integrations.append(integration)
182
+
183
+ logger.info(f"Indexed {len(integrations)} integrations from {integrations_dir}")
184
+ return integrations
@@ -0,0 +1,4 @@
1
+ """Repository implementations for sphinx_hcd.
2
+
3
+ Contains memory repository implementations following julee patterns.
4
+ """
@@ -0,0 +1,25 @@
1
+ """Memory repository implementations for sphinx_hcd.
2
+
3
+ In-memory implementations used during Sphinx builds. These repositories
4
+ are populated at builder-inited and queried during doctree processing.
5
+ """
6
+
7
+ from .accelerator import MemoryAcceleratorRepository
8
+ from .app import MemoryAppRepository
9
+ from .base import MemoryRepositoryMixin
10
+ from .code_info import MemoryCodeInfoRepository
11
+ from .epic import MemoryEpicRepository
12
+ from .integration import MemoryIntegrationRepository
13
+ from .journey import MemoryJourneyRepository
14
+ from .story import MemoryStoryRepository
15
+
16
+ __all__ = [
17
+ "MemoryAcceleratorRepository",
18
+ "MemoryAppRepository",
19
+ "MemoryCodeInfoRepository",
20
+ "MemoryEpicRepository",
21
+ "MemoryIntegrationRepository",
22
+ "MemoryJourneyRepository",
23
+ "MemoryRepositoryMixin",
24
+ "MemoryStoryRepository",
25
+ ]
@@ -0,0 +1,86 @@
1
+ """Memory implementation of AcceleratorRepository."""
2
+
3
+ import logging
4
+
5
+ from ...domain.models.accelerator import Accelerator
6
+ from ...domain.repositories.accelerator import AcceleratorRepository
7
+ from .base import MemoryRepositoryMixin
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class MemoryAcceleratorRepository(
13
+ MemoryRepositoryMixin[Accelerator], AcceleratorRepository
14
+ ):
15
+ """In-memory implementation of AcceleratorRepository.
16
+
17
+ Accelerators are stored in a dictionary keyed by slug. This implementation
18
+ is used during Sphinx builds where accelerators are populated during doctree
19
+ processing and support incremental builds via docname tracking.
20
+ """
21
+
22
+ def __init__(self) -> None:
23
+ """Initialize with empty storage."""
24
+ self.storage: dict[str, Accelerator] = {}
25
+ self.entity_name = "Accelerator"
26
+ self.id_field = "slug"
27
+
28
+ async def get_by_status(self, status: str) -> list[Accelerator]:
29
+ """Get all accelerators with a specific status."""
30
+ status_normalized = status.lower().strip()
31
+ return [
32
+ accel
33
+ for accel in self.storage.values()
34
+ if accel.status_normalized == status_normalized
35
+ ]
36
+
37
+ async def get_by_docname(self, docname: str) -> list[Accelerator]:
38
+ """Get all accelerators defined in a specific document."""
39
+ return [accel for accel in self.storage.values() if accel.docname == docname]
40
+
41
+ async def clear_by_docname(self, docname: str) -> int:
42
+ """Remove all accelerators defined in a specific document."""
43
+ to_remove = [
44
+ slug for slug, accel in self.storage.items() if accel.docname == docname
45
+ ]
46
+ for slug in to_remove:
47
+ del self.storage[slug]
48
+ return len(to_remove)
49
+
50
+ async def get_by_integration(
51
+ self, integration_slug: str, relationship: str
52
+ ) -> list[Accelerator]:
53
+ """Get accelerators that have a relationship with an integration."""
54
+ result = []
55
+ for accel in self.storage.values():
56
+ if relationship == "sources_from":
57
+ if integration_slug in accel.get_sources_from_slugs():
58
+ result.append(accel)
59
+ elif relationship == "publishes_to":
60
+ if integration_slug in accel.get_publishes_to_slugs():
61
+ result.append(accel)
62
+ return result
63
+
64
+ async def get_dependents(self, accelerator_slug: str) -> list[Accelerator]:
65
+ """Get accelerators that depend on a specific accelerator."""
66
+ return [
67
+ accel
68
+ for accel in self.storage.values()
69
+ if accelerator_slug in accel.depends_on
70
+ ]
71
+
72
+ async def get_fed_by(self, accelerator_slug: str) -> list[Accelerator]:
73
+ """Get accelerators that feed into a specific accelerator."""
74
+ return [
75
+ accel
76
+ for accel in self.storage.values()
77
+ if accelerator_slug in accel.feeds_into
78
+ ]
79
+
80
+ async def get_all_statuses(self) -> set[str]:
81
+ """Get all unique statuses across all accelerators."""
82
+ return {
83
+ accel.status_normalized
84
+ for accel in self.storage.values()
85
+ if accel.status_normalized
86
+ }
@@ -0,0 +1,45 @@
1
+ """Memory implementation of AppRepository."""
2
+
3
+ import logging
4
+
5
+ from ...domain.models.app import App, AppType
6
+ from ...domain.repositories.app import AppRepository
7
+ from ...utils import normalize_name
8
+ from .base import MemoryRepositoryMixin
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+
13
+ class MemoryAppRepository(MemoryRepositoryMixin[App], AppRepository):
14
+ """In-memory implementation of AppRepository.
15
+
16
+ Apps are stored in a dictionary keyed by slug. This implementation
17
+ is used during Sphinx builds where apps are populated at builder-inited
18
+ and queried during doctree processing.
19
+ """
20
+
21
+ def __init__(self) -> None:
22
+ """Initialize with empty storage."""
23
+ self.storage: dict[str, App] = {}
24
+ self.entity_name = "App"
25
+ self.id_field = "slug"
26
+
27
+ async def get_by_type(self, app_type: AppType) -> list[App]:
28
+ """Get all apps of a specific type."""
29
+ return [app for app in self.storage.values() if app.app_type == app_type]
30
+
31
+ async def get_by_name(self, name: str) -> App | None:
32
+ """Get an app by its display name (case-insensitive)."""
33
+ name_normalized = normalize_name(name)
34
+ for app in self.storage.values():
35
+ if app.name_normalized == name_normalized:
36
+ return app
37
+ return None
38
+
39
+ async def get_all_types(self) -> set[AppType]:
40
+ """Get all unique app types that have apps."""
41
+ return {app.app_type for app in self.storage.values()}
42
+
43
+ async def get_apps_with_accelerators(self) -> list[App]:
44
+ """Get all apps that have accelerators defined."""
45
+ return [app for app in self.storage.values() if app.accelerators]
@@ -0,0 +1,106 @@
1
+ """Memory repository base classes and mixins for sphinx_hcd.
2
+
3
+ Provides common functionality for in-memory repository implementations,
4
+ following julee patterns but simplified for sphinx_hcd's needs.
5
+ """
6
+
7
+ import logging
8
+ from typing import Any, Generic, TypeVar
9
+
10
+ from pydantic import BaseModel
11
+
12
+ T = TypeVar("T", bound=BaseModel)
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class MemoryRepositoryMixin(Generic[T]):
18
+ """Mixin providing common repository patterns for memory implementations.
19
+
20
+ Encapsulates common functionality used across all memory repository
21
+ implementations:
22
+ - Dictionary-based entity storage and retrieval
23
+ - Standardized logging patterns
24
+ - Generic CRUD operations
25
+
26
+ Classes using this mixin must provide:
27
+ - self.storage: dict[str, T] for entity storage
28
+ - self.entity_name: str for logging
29
+ - self.id_field: str naming the entity's ID field
30
+ """
31
+
32
+ storage: dict[str, T]
33
+ entity_name: str
34
+ id_field: str
35
+
36
+ def _get_entity_id(self, entity: T) -> str:
37
+ """Extract the entity ID from an entity instance."""
38
+ return getattr(entity, self.id_field)
39
+
40
+ async def get(self, entity_id: str) -> T | None:
41
+ """Retrieve an entity by ID."""
42
+ entity = self.storage.get(entity_id)
43
+ if entity is None:
44
+ logger.debug(
45
+ f"Memory{self.entity_name}Repository: {self.entity_name} not found",
46
+ extra={f"{self.entity_name.lower()}_id": entity_id},
47
+ )
48
+ return entity
49
+
50
+ async def get_many(self, entity_ids: list[str]) -> dict[str, T | None]:
51
+ """Retrieve multiple entities by ID."""
52
+ result: dict[str, T | None] = {}
53
+ for entity_id in entity_ids:
54
+ result[entity_id] = self.storage.get(entity_id)
55
+ return result
56
+
57
+ async def save(self, entity: T) -> None:
58
+ """Save an entity to storage."""
59
+ entity_id = self._get_entity_id(entity)
60
+ self.storage[entity_id] = entity
61
+ logger.debug(
62
+ f"Memory{self.entity_name}Repository: Saved {self.entity_name}",
63
+ extra={f"{self.entity_name.lower()}_id": entity_id},
64
+ )
65
+
66
+ async def list_all(self) -> list[T]:
67
+ """List all entities."""
68
+ return list(self.storage.values())
69
+
70
+ async def delete(self, entity_id: str) -> bool:
71
+ """Delete an entity by ID."""
72
+ if entity_id in self.storage:
73
+ del self.storage[entity_id]
74
+ logger.debug(
75
+ f"Memory{self.entity_name}Repository: Deleted {self.entity_name}",
76
+ extra={f"{self.entity_name.lower()}_id": entity_id},
77
+ )
78
+ return True
79
+ return False
80
+
81
+ async def clear(self) -> None:
82
+ """Remove all entities from storage."""
83
+ count = len(self.storage)
84
+ self.storage.clear()
85
+ logger.debug(
86
+ f"Memory{self.entity_name}Repository: Cleared {count} entities",
87
+ )
88
+
89
+ # Additional query methods that subclasses can use
90
+
91
+ async def find_by_field(self, field: str, value: Any) -> list[T]:
92
+ """Find all entities where field equals value."""
93
+ return [
94
+ entity
95
+ for entity in self.storage.values()
96
+ if getattr(entity, field, None) == value
97
+ ]
98
+
99
+ async def find_by_field_in(self, field: str, values: list[Any]) -> list[T]:
100
+ """Find all entities where field is in values."""
101
+ value_set = set(values)
102
+ return [
103
+ entity
104
+ for entity in self.storage.values()
105
+ if getattr(entity, field, None) in value_set
106
+ ]
@@ -0,0 +1,59 @@
1
+ """Memory implementation of CodeInfoRepository."""
2
+
3
+ import logging
4
+
5
+ from ...domain.models.code_info import BoundedContextInfo
6
+ from ...domain.repositories.code_info import CodeInfoRepository
7
+ from .base import MemoryRepositoryMixin
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class MemoryCodeInfoRepository(
13
+ MemoryRepositoryMixin[BoundedContextInfo], CodeInfoRepository
14
+ ):
15
+ """In-memory implementation of CodeInfoRepository.
16
+
17
+ Bounded context info is stored in a dictionary keyed by slug. This implementation
18
+ is used during Sphinx builds where code info is populated at builder-inited
19
+ by scanning src/ directories.
20
+ """
21
+
22
+ def __init__(self) -> None:
23
+ """Initialize with empty storage."""
24
+ self.storage: dict[str, BoundedContextInfo] = {}
25
+ self.entity_name = "BoundedContextInfo"
26
+ self.id_field = "slug"
27
+
28
+ async def get_by_code_dir(self, code_dir: str) -> BoundedContextInfo | None:
29
+ """Get bounded context info by its code directory name."""
30
+ for info in self.storage.values():
31
+ if info.code_dir == code_dir:
32
+ return info
33
+ return None
34
+
35
+ async def get_with_entities(self) -> list[BoundedContextInfo]:
36
+ """Get all bounded contexts that have domain entities."""
37
+ return [info for info in self.storage.values() if info.has_entities]
38
+
39
+ async def get_with_use_cases(self) -> list[BoundedContextInfo]:
40
+ """Get all bounded contexts that have use cases."""
41
+ return [info for info in self.storage.values() if info.has_use_cases]
42
+
43
+ async def get_with_infrastructure(self) -> list[BoundedContextInfo]:
44
+ """Get all bounded contexts that have infrastructure."""
45
+ return [info for info in self.storage.values() if info.has_infrastructure]
46
+
47
+ async def get_all_entity_names(self) -> set[str]:
48
+ """Get all unique entity class names across all bounded contexts."""
49
+ names: set[str] = set()
50
+ for info in self.storage.values():
51
+ names.update(info.get_entity_names())
52
+ return names
53
+
54
+ async def get_all_use_case_names(self) -> set[str]:
55
+ """Get all unique use case class names across all bounded contexts."""
56
+ names: set[str] = set()
57
+ for info in self.storage.values():
58
+ names.update(info.get_use_case_names())
59
+ return names
@@ -0,0 +1,54 @@
1
+ """Memory implementation of EpicRepository."""
2
+
3
+ import logging
4
+
5
+ from ...domain.models.epic import Epic
6
+ from ...domain.repositories.epic import EpicRepository
7
+ from ...utils import normalize_name
8
+ from .base import MemoryRepositoryMixin
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+
13
+ class MemoryEpicRepository(MemoryRepositoryMixin[Epic], EpicRepository):
14
+ """In-memory implementation of EpicRepository.
15
+
16
+ Epics are stored in a dictionary keyed by slug. This implementation
17
+ is used during Sphinx builds where epics are populated during doctree
18
+ processing and support incremental builds via docname tracking.
19
+ """
20
+
21
+ def __init__(self) -> None:
22
+ """Initialize with empty storage."""
23
+ self.storage: dict[str, Epic] = {}
24
+ self.entity_name = "Epic"
25
+ self.id_field = "slug"
26
+
27
+ async def get_by_docname(self, docname: str) -> list[Epic]:
28
+ """Get all epics defined in a specific document."""
29
+ return [epic for epic in self.storage.values() if epic.docname == docname]
30
+
31
+ async def clear_by_docname(self, docname: str) -> int:
32
+ """Remove all epics defined in a specific document."""
33
+ to_remove = [
34
+ slug for slug, epic in self.storage.items() if epic.docname == docname
35
+ ]
36
+ for slug in to_remove:
37
+ del self.storage[slug]
38
+ return len(to_remove)
39
+
40
+ async def get_with_story_ref(self, story_title: str) -> list[Epic]:
41
+ """Get epics that contain a specific story."""
42
+ story_normalized = normalize_name(story_title)
43
+ return [
44
+ epic
45
+ for epic in self.storage.values()
46
+ if any(normalize_name(ref) == story_normalized for ref in epic.story_refs)
47
+ ]
48
+
49
+ async def get_all_story_refs(self) -> set[str]:
50
+ """Get all unique story references across all epics."""
51
+ refs: set[str] = set()
52
+ for epic in self.storage.values():
53
+ refs.update(normalize_name(ref) for ref in epic.story_refs)
54
+ return refs
@@ -0,0 +1,70 @@
1
+ """Memory implementation of IntegrationRepository."""
2
+
3
+ import logging
4
+
5
+ from ...domain.models.integration import Direction, Integration
6
+ from ...domain.repositories.integration import IntegrationRepository
7
+ from ...utils import normalize_name
8
+ from .base import MemoryRepositoryMixin
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+
13
+ class MemoryIntegrationRepository(
14
+ MemoryRepositoryMixin[Integration], IntegrationRepository
15
+ ):
16
+ """In-memory implementation of IntegrationRepository.
17
+
18
+ Integrations are stored in a dictionary keyed by slug. This implementation
19
+ is used during Sphinx builds where integrations are populated at builder-inited
20
+ and queried during doctree processing.
21
+ """
22
+
23
+ def __init__(self) -> None:
24
+ """Initialize with empty storage."""
25
+ self.storage: dict[str, Integration] = {}
26
+ self.entity_name = "Integration"
27
+ self.id_field = "slug"
28
+
29
+ async def get_by_direction(self, direction: Direction) -> list[Integration]:
30
+ """Get all integrations with a specific direction."""
31
+ return [
32
+ integration
33
+ for integration in self.storage.values()
34
+ if integration.direction == direction
35
+ ]
36
+
37
+ async def get_by_module(self, module: str) -> Integration | None:
38
+ """Get an integration by its module name."""
39
+ for integration in self.storage.values():
40
+ if integration.module == module:
41
+ return integration
42
+ return None
43
+
44
+ async def get_by_name(self, name: str) -> Integration | None:
45
+ """Get an integration by its display name (case-insensitive)."""
46
+ name_normalized = normalize_name(name)
47
+ for integration in self.storage.values():
48
+ if integration.name_normalized == name_normalized:
49
+ return integration
50
+ return None
51
+
52
+ async def get_all_directions(self) -> set[Direction]:
53
+ """Get all unique directions that have integrations."""
54
+ return {integration.direction for integration in self.storage.values()}
55
+
56
+ async def get_with_dependencies(self) -> list[Integration]:
57
+ """Get all integrations that have external dependencies."""
58
+ return [
59
+ integration
60
+ for integration in self.storage.values()
61
+ if integration.depends_on
62
+ ]
63
+
64
+ async def get_by_dependency(self, dep_name: str) -> list[Integration]:
65
+ """Get all integrations that depend on a specific external system."""
66
+ return [
67
+ integration
68
+ for integration in self.storage.values()
69
+ if integration.has_dependency(dep_name)
70
+ ]