ethernium-continuity-pro 2.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ethernium_continuity_pro-2.1.0/LICENSE +22 -0
- ethernium_continuity_pro-2.1.0/PKG-INFO +100 -0
- ethernium_continuity_pro-2.1.0/README.md +37 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/__init__.py +1 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/archive_manager.py +77 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/automation_common.py +339 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/bootstrap_context.py +36 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/bootstrap_project.py +167 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/context_loader.py +160 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/continuity_status.py +76 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/continuity_suggest.py +64 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/decision_engine.py +136 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/discover_project.py +109 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/doc_parity_check.py +127 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/encoding_sanitizer.py +135 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/heal_parity.py +78 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/hook_utils.py +83 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/memory_graph_lite.py +64 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/notify_webhook.py +143 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/run_continuity_cycle.py +211 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/secret_detector.py +67 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/summarize_memory.py +58 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/sync_external_dev_context.py +232 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/sync_translations.py +306 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/system_membership_check.py +69 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/update_memory.py +81 -0
- ethernium_continuity_pro-2.1.0/continuity_pro/continuity_legacy/vector_store_lite.py +69 -0
- ethernium_continuity_pro-2.1.0/ethernium_continuity_pro.egg-info/PKG-INFO +100 -0
- ethernium_continuity_pro-2.1.0/ethernium_continuity_pro.egg-info/SOURCES.txt +35 -0
- ethernium_continuity_pro-2.1.0/ethernium_continuity_pro.egg-info/dependency_links.txt +1 -0
- ethernium_continuity_pro-2.1.0/ethernium_continuity_pro.egg-info/entry_points.txt +2 -0
- ethernium_continuity_pro-2.1.0/ethernium_continuity_pro.egg-info/requires.txt +12 -0
- ethernium_continuity_pro-2.1.0/ethernium_continuity_pro.egg-info/top_level.txt +1 -0
- ethernium_continuity_pro-2.1.0/pyproject.toml +40 -0
- ethernium_continuity_pro-2.1.0/setup.cfg +4 -0
- ethernium_continuity_pro-2.1.0/tests/test_logic.py +101 -0
- ethernium_continuity_pro-2.1.0/tests/test_parity_logic.py +44 -0
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 Ethernium (X: @Steveblackbeard)
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
22
|
+
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: ethernium-continuity-pro
|
|
3
|
+
Version: 2.1.0
|
|
4
|
+
Summary: Continuity Legacy (Pro): Industrial-grade AI continuity framework with security audits and global synchronization.
|
|
5
|
+
Author: SteveBlackbeard
|
|
6
|
+
Project-URL: Homepage, https://github.com/SteveBlackbeard/CONTINUITY-LEGACY-by-Ethernium
|
|
7
|
+
Project-URL: Bug Tracker, https://github.com/SteveBlackbeard/CONTINUITY-LEGACY-by-Ethernium/issues
|
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
|
9
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
10
|
+
Classifier: Operating System :: OS Independent
|
|
11
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
12
|
+
Requires-Python: >=3.8
|
|
13
|
+
Description-Content-Type: text/markdown
|
|
14
|
+
License-File: LICENSE
|
|
15
|
+
Requires-Dist: typer[all]>=0.9.0
|
|
16
|
+
Requires-Dist: rich>=13.0.0
|
|
17
|
+
Provides-Extra: vector
|
|
18
|
+
Requires-Dist: chromadb>=0.4.0; extra == "vector"
|
|
19
|
+
Requires-Dist: sentence-transformers; extra == "vector"
|
|
20
|
+
Provides-Extra: all
|
|
21
|
+
Requires-Dist: chromadb>=0.4.0; extra == "all"
|
|
22
|
+
Requires-Dist: sentence-transformers; extra == "all"
|
|
23
|
+
Requires-Dist: networkx; extra == "all"
|
|
24
|
+
Requires-Dist: rdflib; extra == "all"
|
|
25
|
+
Dynamic: license-file
|
|
26
|
+
|
|
27
|
+
# Continuity Pro (v2.1.0) - Tactical Evolution Engine
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
[](https://github.com/SteveBlackbeard/CONTINUITY-LEGACY-by-Ethernium)
|
|
32
|
+
|
|
33
|
+
[](https://github.com/SteveBlackbeard/CONTINUITY-LEGACY-by-Ethernium)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
**Pro** is the industry-grade motor for tactical context synchronization and architectural auditing.
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
---
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
## Installation (Quick)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
```bash
|
|
50
|
+
|
|
51
|
+
# Install the Pro edition from its folder
|
|
52
|
+
|
|
53
|
+
pip install -e .
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
# Setup the Git DNA Guardian Entry Point
|
|
58
|
+
|
|
59
|
+
continuity-pro --hook
|
|
60
|
+
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
---
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
## Minimal Usage
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
```bash
|
|
74
|
+
|
|
75
|
+
# Run the tactical audit cycle in strict mode
|
|
76
|
+
|
|
77
|
+
continuity-pro --strict
|
|
78
|
+
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
---
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
## Hardware Profile
|
|
88
|
+
|
|
89
|
+
- **CPU**: Standard Multicore.
|
|
90
|
+
|
|
91
|
+
- **RAM**: 4GB+ Recommended.
|
|
92
|
+
|
|
93
|
+
- **Python**: 3.9+ (External packages for sync/sanitization).
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
---
|
|
98
|
+
|
|
99
|
+
*Continuity: Protecting the logical lineage of your software.*
|
|
100
|
+
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
# Continuity Pro (v2.1.0) - Tactical Evolution Engine
|
|
2
|
+
|
|
3
|
+
[](https://github.com/SteveBlackbeard/CONTINUITY-LEGACY-by-Ethernium)
|
|
4
|
+
[](https://github.com/SteveBlackbeard/CONTINUITY-LEGACY-by-Ethernium)
|
|
5
|
+
|
|
6
|
+
**Pro** is the industry-grade motor for tactical context synchronization and architectural auditing.
|
|
7
|
+
|
|
8
|
+
---
|
|
9
|
+
|
|
10
|
+
## Installation (Quick)
|
|
11
|
+
|
|
12
|
+
```bash
|
|
13
|
+
# Install the Pro edition from its folder
|
|
14
|
+
pip install -e .
|
|
15
|
+
|
|
16
|
+
# Setup the Git DNA Guardian Entry Point
|
|
17
|
+
continuity-pro --hook
|
|
18
|
+
```
|
|
19
|
+
|
|
20
|
+
---
|
|
21
|
+
|
|
22
|
+
## Minimal Usage
|
|
23
|
+
|
|
24
|
+
```bash
|
|
25
|
+
# Run the tactical audit cycle in strict mode
|
|
26
|
+
continuity-pro --strict
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
---
|
|
30
|
+
|
|
31
|
+
## Hardware Profile
|
|
32
|
+
- **CPU**: Standard Multicore.
|
|
33
|
+
- **RAM**: 4GB+ Recommended.
|
|
34
|
+
- **Python**: 3.9+ (External packages for sync/sanitization).
|
|
35
|
+
|
|
36
|
+
---
|
|
37
|
+
*Continuity: Protecting the logical lineage of your software.*
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Core helpers for Continuity Legacy."""
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
archive_manager.py — CONTINUITY LEGACY Pro
|
|
4
|
+
==========================================
|
|
5
|
+
Systemic Log Rotation & Context Archiving.
|
|
6
|
+
|
|
7
|
+
Moves historical outputs and oversized log files from .continuity/
|
|
8
|
+
to .continuity/archive/ to prevent context window saturation while
|
|
9
|
+
preserving the project's 'Genetic Code'.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import os
|
|
13
|
+
import json
|
|
14
|
+
import shutil
|
|
15
|
+
import datetime
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
|
|
18
|
+
# Config
|
|
19
|
+
MAX_LOG_SIZE_BYTES = 50 * 1024 # 50KB threshold for rotation
|
|
20
|
+
ARCHIVE_DIR_NAME = "archive"
|
|
21
|
+
|
|
22
|
+
def rotate_logs(repo_root: Path):
|
|
23
|
+
print(f"[*] Archive Manager: Evaluating log health in {repo_root}...")
|
|
24
|
+
|
|
25
|
+
continuity_dir = repo_root / ".continuity"
|
|
26
|
+
archive_dir = continuity_dir / ARCHIVE_DIR_NAME
|
|
27
|
+
|
|
28
|
+
if not continuity_dir.exists():
|
|
29
|
+
print("[!] No .continuity directory found. Skipping archiving.")
|
|
30
|
+
return
|
|
31
|
+
|
|
32
|
+
archive_dir.mkdir(parents=True, exist_ok=True)
|
|
33
|
+
|
|
34
|
+
# Files to track for rotation
|
|
35
|
+
targets = [
|
|
36
|
+
"DECISIONS_LOG.md",
|
|
37
|
+
"TIMELINE.md",
|
|
38
|
+
]
|
|
39
|
+
|
|
40
|
+
for target in targets:
|
|
41
|
+
file_path = continuity_dir / target
|
|
42
|
+
if file_path.exists() and file_path.stat().st_size > MAX_LOG_SIZE_BYTES:
|
|
43
|
+
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
44
|
+
archive_name = f"{target.replace('.md', '')}_{timestamp}.md"
|
|
45
|
+
dest_path = archive_dir / archive_name
|
|
46
|
+
|
|
47
|
+
print(f" [>] Rotating {target} (Size: {file_path.stat().st_size} bytes)")
|
|
48
|
+
shutil.copy2(file_path, dest_path)
|
|
49
|
+
|
|
50
|
+
# Reset the active file with a link to the archive
|
|
51
|
+
with open(file_path, "w", encoding="utf-8") as f:
|
|
52
|
+
f.write(f"# {target.replace('.md', '').replace('_', ' ')}\n")
|
|
53
|
+
f.write(f"> [!] LOG ROTATED on {timestamp}. See archive/{archive_name} for history.\n\n")
|
|
54
|
+
|
|
55
|
+
# Cleanup old cycle reports in outputs
|
|
56
|
+
outputs_dir = repo_root / "outputs" / "continuity"
|
|
57
|
+
if outputs_dir.exists():
|
|
58
|
+
# Keep only the last 10 reports, archive the rest? Or just prune.
|
|
59
|
+
# For '' we archive them.
|
|
60
|
+
report_archive = archive_dir / "reports"
|
|
61
|
+
report_archive.mkdir(exist_ok=True)
|
|
62
|
+
|
|
63
|
+
all_reports = sorted(list(outputs_dir.glob("*.json")), key=os.path.getmtime)
|
|
64
|
+
if len(all_reports) > 10:
|
|
65
|
+
to_archive = all_reports[:-10]
|
|
66
|
+
print(f" [>] Archiving {len(to_archive)} historical reports...")
|
|
67
|
+
for r in to_archive:
|
|
68
|
+
shutil.move(str(r), str(report_archive / r.name))
|
|
69
|
+
|
|
70
|
+
print("[✔] Archiving Cycle Complete.")
|
|
71
|
+
|
|
72
|
+
if __name__ == "__main__":
|
|
73
|
+
import argparse
|
|
74
|
+
parser = argparse.ArgumentParser(description="Rotate and archive continuity logs.")
|
|
75
|
+
parser.add_argument("--repo-root", default=".", help="Root directory of the project")
|
|
76
|
+
args = parser.parse_args()
|
|
77
|
+
rotate_logs(Path(args.repo_root).resolve())
|
|
@@ -0,0 +1,339 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from copy import deepcopy
|
|
5
|
+
from datetime import datetime, timezone
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class Color:
|
|
11
|
+
"""Terminal color constants for standardized elite CLI feedback."""
|
|
12
|
+
PURPLE = "\033[95m"
|
|
13
|
+
CYAN = "\033[96m"
|
|
14
|
+
DARKCYAN = "\033[36m"
|
|
15
|
+
BLUE = "\033[94m"
|
|
16
|
+
GREEN = "\033[92m"
|
|
17
|
+
YELLOW = "\033[93m"
|
|
18
|
+
RED = "\033[91m"
|
|
19
|
+
WHITE = "\033[97m"
|
|
20
|
+
BOLD = "\033[1m"
|
|
21
|
+
UNDERLINE = "\033[4m"
|
|
22
|
+
END = "\033[0m"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def echo(text: str, color: str = "") -> None:
|
|
26
|
+
"""Prints text to the console with optional color formatting.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
text: The string to be printed.
|
|
30
|
+
color: The ANSI color code to apply (e.g., Color.GREEN).
|
|
31
|
+
"""
|
|
32
|
+
if color:
|
|
33
|
+
print(f"{color}{text}{Color.END}")
|
|
34
|
+
else:
|
|
35
|
+
print(text)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
DEFAULT_CONFIG = {
|
|
39
|
+
"template_name": "CONTINUITY LEGACY",
|
|
40
|
+
"project_name": "YOUR_PROJECT",
|
|
41
|
+
"project_slug": "your_project",
|
|
42
|
+
"context_file": "PROJECT_CONTEXT.md",
|
|
43
|
+
"state_file": "STATE.json",
|
|
44
|
+
"roadmap_file": "ROADMAP.md",
|
|
45
|
+
"continuity_dir": ".continuity",
|
|
46
|
+
"outputs_dir": "outputs/continuity",
|
|
47
|
+
"external_docs": {
|
|
48
|
+
"enabled": False,
|
|
49
|
+
"folder_name": "YOUR_PROJECTDEV",
|
|
50
|
+
"root_override": "",
|
|
51
|
+
},
|
|
52
|
+
"metadata": {
|
|
53
|
+
"generated_by": "Continuity Legacy by Ethernium",
|
|
54
|
+
"tool_version": "5.0.0",
|
|
55
|
+
"creator": "@Steveblackbeard",
|
|
56
|
+
"include_in_reports": True,
|
|
57
|
+
},
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
ALLOWED_MEMBERSHIP_STATUSES = [
|
|
61
|
+
"canonical",
|
|
62
|
+
"bridge",
|
|
63
|
+
"archive_source",
|
|
64
|
+
"external_optional",
|
|
65
|
+
]
|
|
66
|
+
|
|
67
|
+
# v2.1.1 Crystallization: Governance Thresholds
|
|
68
|
+
ENTROPY_THRESHOLD = 6.5 # Bits per character (Shannon Entropy)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def utc_now_iso() -> str:
|
|
72
|
+
return datetime.now(timezone.utc).isoformat()
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def resolve_repo_root(repo_root: str | Path | None, current_file: str | Path) -> Path:
|
|
76
|
+
if repo_root:
|
|
77
|
+
return Path(repo_root).resolve()
|
|
78
|
+
|
|
79
|
+
# v2.1.1: Intelligent Upward Discovery to protect hierarchy sovereignty
|
|
80
|
+
start_path = Path(current_file).resolve().parent
|
|
81
|
+
for parent in [start_path] + list(start_path.parents):
|
|
82
|
+
if (parent / "continuity-legacy.json").exists() or (parent / ".continuity").exists():
|
|
83
|
+
return parent
|
|
84
|
+
|
|
85
|
+
# Fallback safe: current branch root
|
|
86
|
+
return start_path.parents[2]
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def load_config(repo_root: Path) -> dict:
|
|
90
|
+
"""Loads and merges the continuity-legacy.json configuration.
|
|
91
|
+
|
|
92
|
+
Args:
|
|
93
|
+
repo_root: The filesystem path to the root of the project.
|
|
94
|
+
|
|
95
|
+
Returns:
|
|
96
|
+
A dictionary containing the merged project configuration.
|
|
97
|
+
"""
|
|
98
|
+
config_file = repo_root / "continuity-legacy.json"
|
|
99
|
+
payload = {}
|
|
100
|
+
if config_file.exists():
|
|
101
|
+
payload = json.loads(config_file.read_text(encoding="utf-8"))
|
|
102
|
+
|
|
103
|
+
return _deep_merge(DEFAULT_CONFIG, payload)
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def save_config(repo_root: Path, config: dict) -> None:
|
|
107
|
+
"""Saves the continuity-legacy.json configuration to the repository root.
|
|
108
|
+
|
|
109
|
+
Args:
|
|
110
|
+
repo_root: The filesystem path to the root of the project.
|
|
111
|
+
config: The configuration dictionary to persist.
|
|
112
|
+
"""
|
|
113
|
+
config_file = repo_root / "continuity-legacy.json"
|
|
114
|
+
config_file.write_text(json.dumps(config, indent=2), encoding="utf-8")
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def config_path(repo_root: str | Path) -> Path:
|
|
118
|
+
return Path(repo_root) / "continuity_legacy.json"
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def read_text(path: str | Path) -> str:
|
|
122
|
+
return Path(path).read_text(encoding="utf-8", errors="ignore")
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def write_text(path: str | Path, content: str) -> None:
|
|
126
|
+
target = Path(path)
|
|
127
|
+
target.parent.mkdir(parents=True, exist_ok=True)
|
|
128
|
+
target.write_text(content.rstrip() + "\n", encoding="utf-8")
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def read_json(path: str | Path, default: Any | None = None) -> Any:
|
|
132
|
+
file_path = Path(path)
|
|
133
|
+
if not file_path.exists():
|
|
134
|
+
return deepcopy(default)
|
|
135
|
+
return json.loads(file_path.read_text(encoding="utf-8"))
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def write_json(path: str | Path, payload: Any) -> None:
|
|
139
|
+
target = Path(path)
|
|
140
|
+
target.parent.mkdir(parents=True, exist_ok=True)
|
|
141
|
+
target.write_text(json.dumps(payload, indent=2, ensure_ascii=True), encoding="utf-8")
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def _deep_merge(base: dict[str, Any], override: dict[str, Any]) -> dict[str, Any]:
|
|
145
|
+
merged = deepcopy(base)
|
|
146
|
+
for key, value in override.items():
|
|
147
|
+
if isinstance(value, dict) and isinstance(merged.get(key), dict):
|
|
148
|
+
merged[key] = _deep_merge(merged[key], value)
|
|
149
|
+
else:
|
|
150
|
+
merged[key] = value
|
|
151
|
+
return merged
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def build_context_snapshot(repo_root: Path, external_root_override: Path | None = None) -> dict:
|
|
155
|
+
"""Builds a comprehensive snapshot of the project context from multiple sources.
|
|
156
|
+
|
|
157
|
+
Args:
|
|
158
|
+
repo_root: The root directory of the project.
|
|
159
|
+
external_root_override: Optional path to an external documentation root.
|
|
160
|
+
|
|
161
|
+
Returns:
|
|
162
|
+
A dictionary containing the aggregated 'truth' of the project.
|
|
163
|
+
"""
|
|
164
|
+
config = load_config(repo_root)
|
|
165
|
+
state = read_json(state_path(repo_root, config), {})
|
|
166
|
+
context_text = read_text(context_path(repo_root, config))
|
|
167
|
+
roadmap_text = read_text(roadmap_path(repo_root, config))
|
|
168
|
+
|
|
169
|
+
return {
|
|
170
|
+
"project_name": config.get("project_name"),
|
|
171
|
+
"project_slug": config.get("project_slug"),
|
|
172
|
+
"phase": state.get("status", "unknown"),
|
|
173
|
+
"next_actions": state.get("next_actions", []),
|
|
174
|
+
"context_summary": context_text[:2000],
|
|
175
|
+
"roadmap_summary": roadmap_text[:2000],
|
|
176
|
+
"last_decision": state.get("last_decision", "none"),
|
|
177
|
+
"timestamp": utc_now_iso(),
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def context_path(repo_root: str | Path, config: dict[str, Any] | None = None) -> Path:
|
|
182
|
+
config = config or load_config(repo_root)
|
|
183
|
+
return Path(repo_root) / config["context_file"]
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
def state_path(repo_root: str | Path, config: dict[str, Any] | None = None) -> Path:
|
|
187
|
+
config = config or load_config(repo_root)
|
|
188
|
+
return Path(repo_root) / config["state_file"]
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def roadmap_path(repo_root: str | Path, config: dict[str, Any] | None = None) -> Path:
|
|
192
|
+
config = config or load_config(repo_root)
|
|
193
|
+
return Path(repo_root) / config["roadmap_file"]
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def continuity_dir(repo_root: str | Path, config: dict[str, Any] | None = None) -> Path:
|
|
197
|
+
config = config or load_config(repo_root)
|
|
198
|
+
return Path(repo_root) / config["continuity_dir"]
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def continuity_doc_path(repo_root: str | Path, name: str, config: dict[str, Any] | None = None) -> Path:
|
|
202
|
+
return continuity_dir(repo_root, config) / name
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def outputs_dir(repo_root: str | Path, config: dict[str, Any] | None = None) -> Path:
|
|
206
|
+
config = config or load_config(repo_root)
|
|
207
|
+
return Path(repo_root) / config["outputs_dir"]
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def bootstrap_output_path(repo_root: str | Path, config: dict[str, Any] | None = None) -> Path:
|
|
211
|
+
return outputs_dir(repo_root, config) / "context_bootstrap_summary.json"
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def continuity_report_path(repo_root: str | Path, config: dict[str, Any] | None = None) -> Path:
|
|
215
|
+
return outputs_dir(repo_root, config) / "continuity_cycle_report.json"
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def dependency_map_path(repo_root: str | Path, config: dict[str, Any] | None = None) -> Path:
|
|
219
|
+
return continuity_dir(repo_root, config) / "registry" / "document_dependency_map.json"
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
def membership_registry_path(repo_root: str | Path, config: dict[str, Any] | None = None) -> Path:
|
|
223
|
+
return continuity_dir(repo_root, config) / "registry" / "system_membership_registry.json"
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def load_dependency_map(repo_root: str | Path, config: dict[str, Any] | None = None) -> dict[str, Any]:
|
|
227
|
+
return read_json(dependency_map_path(repo_root, config), {"version": "1.0", "documents": []}) or {
|
|
228
|
+
"version": "1.0",
|
|
229
|
+
"documents": [],
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
def load_membership_registry(repo_root: str | Path, config: dict[str, Any] | None = None) -> dict[str, Any]:
|
|
234
|
+
return read_json(
|
|
235
|
+
membership_registry_path(repo_root, config),
|
|
236
|
+
{
|
|
237
|
+
"version": "1.0",
|
|
238
|
+
"allowed_statuses": ALLOWED_MEMBERSHIP_STATUSES,
|
|
239
|
+
"entries": [],
|
|
240
|
+
},
|
|
241
|
+
) or {
|
|
242
|
+
"version": "1.0",
|
|
243
|
+
"allowed_statuses": ALLOWED_MEMBERSHIP_STATUSES,
|
|
244
|
+
"entries": [],
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
def is_ignored(repo_root: str | Path, rel_path: str) -> bool:
|
|
249
|
+
ignore_file = Path(repo_root) / ".continuityignore"
|
|
250
|
+
if not ignore_file.exists():
|
|
251
|
+
return False
|
|
252
|
+
|
|
253
|
+
import fnmatch
|
|
254
|
+
patterns = [line.strip() for line in ignore_file.read_text(encoding="utf-8").splitlines() if line.strip() and not line.startswith("#")]
|
|
255
|
+
for pattern in patterns:
|
|
256
|
+
if fnmatch.fnmatch(rel_path, pattern):
|
|
257
|
+
return True
|
|
258
|
+
return False
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def external_root(
|
|
262
|
+
repo_root: str | Path,
|
|
263
|
+
config: dict[str, Any] | None = None,
|
|
264
|
+
override: str | Path | None = None,
|
|
265
|
+
) -> Path | None:
|
|
266
|
+
config = config or load_config(repo_root)
|
|
267
|
+
external_cfg = config.get("external_docs", {})
|
|
268
|
+
if override:
|
|
269
|
+
return Path(override).resolve()
|
|
270
|
+
root_override = str(external_cfg.get("root_override") or "").strip()
|
|
271
|
+
if root_override:
|
|
272
|
+
return Path(root_override).resolve()
|
|
273
|
+
if not external_cfg.get("enabled"):
|
|
274
|
+
return None
|
|
275
|
+
return Path(repo_root).resolve().parent / str(external_cfg.get("folder_name") or "PROJECTDEV")
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def calculate_sha256(path: str | Path) -> str:
|
|
279
|
+
"""Calculates the SHA-256 hash of a file for high-fidelity DNA synthesis."""
|
|
280
|
+
import hashlib
|
|
281
|
+
p = Path(path)
|
|
282
|
+
if not p.exists():
|
|
283
|
+
return ""
|
|
284
|
+
return hashlib.sha256(p.read_bytes()).hexdigest()
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
def build_merkle_tree(hashes: list[str]) -> str:
|
|
288
|
+
"""RFC 6962 compliant Merkle Tree with leaf/node prefix hardening.
|
|
289
|
+
|
|
290
|
+
Uses deterministic sorting and prefixed hashing:
|
|
291
|
+
- Leaf nodes: H(0x00 || data)
|
|
292
|
+
- Internal nodes: H(0x01 || left || right)
|
|
293
|
+
|
|
294
|
+
This prevents second-preimage attacks where an attacker crafts
|
|
295
|
+
a different input set that produces the same root hash.
|
|
296
|
+
"""
|
|
297
|
+
import hashlib
|
|
298
|
+
if not hashes:
|
|
299
|
+
return "0" * 64
|
|
300
|
+
|
|
301
|
+
# Leaf nodes: H(0x00 || data)
|
|
302
|
+
current_level = [hashlib.sha256(b"\x00" + h.encode("utf-8")).hexdigest() for h in sorted(hashes)]
|
|
303
|
+
|
|
304
|
+
while len(current_level) > 1:
|
|
305
|
+
next_level = []
|
|
306
|
+
if len(current_level) % 2 != 0:
|
|
307
|
+
current_level.append(current_level[-1])
|
|
308
|
+
|
|
309
|
+
for i in range(0, len(current_level), 2):
|
|
310
|
+
# Internal nodes: H(0x01 || left || right)
|
|
311
|
+
combined = b"\x01" + (current_level[i] + current_level[i+1]).encode("utf-8")
|
|
312
|
+
next_level.append(hashlib.sha256(combined).hexdigest())
|
|
313
|
+
current_level = next_level
|
|
314
|
+
|
|
315
|
+
return current_level[0]
|
|
316
|
+
|
|
317
|
+
# Alias for backward compatibility
|
|
318
|
+
build_merkle_root = build_merkle_tree
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
def calculate_context_entropy(text: str) -> float:
|
|
322
|
+
"""Calculates the Shannon Entropy of a text block to measure context volatility."""
|
|
323
|
+
import math
|
|
324
|
+
if not text:
|
|
325
|
+
return 0.0
|
|
326
|
+
|
|
327
|
+
# Calculate frequencies
|
|
328
|
+
freqs = {}
|
|
329
|
+
for char in text:
|
|
330
|
+
freqs[char] = freqs.get(char, 0) + 1
|
|
331
|
+
|
|
332
|
+
# Shannon Entropy formula
|
|
333
|
+
length = len(text)
|
|
334
|
+
entropy = 0.0
|
|
335
|
+
for count in freqs.values():
|
|
336
|
+
p = count / length
|
|
337
|
+
entropy -= p * math.log2(p)
|
|
338
|
+
|
|
339
|
+
return entropy
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import json
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from core.automation_common import bootstrap_output_path, resolve_repo_root
|
|
8
|
+
from core.context_loader import build_context_snapshot, summarize_snapshot
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def parse_args() -> argparse.Namespace:
|
|
12
|
+
parser = argparse.ArgumentParser(description="Build a continuity snapshot for the current project.")
|
|
13
|
+
parser.add_argument("--repo-root", default=None)
|
|
14
|
+
parser.add_argument("--output-json", default=None)
|
|
15
|
+
parser.add_argument("--external-root", default=None)
|
|
16
|
+
parser.add_argument("--no-print", action="store_true")
|
|
17
|
+
return parser.parse_args()
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def main() -> None:
|
|
21
|
+
args = parse_args()
|
|
22
|
+
repo_root = resolve_repo_root(args.repo_root, __file__)
|
|
23
|
+
snapshot = build_context_snapshot(repo_root, args.external_root)
|
|
24
|
+
output_path = bootstrap_output_path(repo_root)
|
|
25
|
+
if args.output_json:
|
|
26
|
+
output_path = Path(args.output_json)
|
|
27
|
+
if not output_path.is_absolute():
|
|
28
|
+
output_path = repo_root / output_path
|
|
29
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
30
|
+
output_path.write_text(json.dumps(snapshot, indent=2, ensure_ascii=True), encoding="utf-8")
|
|
31
|
+
if not args.no_print:
|
|
32
|
+
print(summarize_snapshot(snapshot))
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
if __name__ == "__main__":
|
|
36
|
+
main()
|