loki-mode 6.81.1 → 6.83.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/SKILL.md +2 -2
- package/VERSION +1 -1
- package/autonomy/completion-council.sh +58 -0
- package/autonomy/run.sh +334 -44
- package/dashboard/__init__.py +1 -1
- package/docs/INSTALLATION.md +1 -1
- package/mcp/__init__.py +1 -1
- package/mcp/requirements.txt +1 -0
- package/mcp/server.py +105 -0
- package/memory/managed_memory/__init__.py +113 -0
- package/memory/managed_memory/_beta.py +11 -0
- package/memory/managed_memory/client.py +210 -0
- package/memory/managed_memory/events.py +79 -0
- package/memory/managed_memory/fakes.py +120 -0
- package/memory/managed_memory/retrieve.py +347 -0
- package/memory/managed_memory/shadow_write.py +350 -0
- package/package.json +2 -2
package/mcp/server.py
CHANGED
|
@@ -1005,6 +1005,111 @@ async def loki_consolidate_memory(since_hours: int = 24) -> str:
|
|
|
1005
1005
|
return json.dumps({"error": str(e)})
|
|
1006
1006
|
|
|
1007
1007
|
|
|
1008
|
+
@mcp.tool()
|
|
1009
|
+
async def loki_complete_task(
|
|
1010
|
+
completion_statement: str,
|
|
1011
|
+
evidence: str,
|
|
1012
|
+
confidence: str = "medium",
|
|
1013
|
+
) -> str:
|
|
1014
|
+
"""
|
|
1015
|
+
Declare that the current PRD / task is complete.
|
|
1016
|
+
|
|
1017
|
+
Replaces the legacy 'COMPLETION PROMISE FULFILLED: ...' prose string with a
|
|
1018
|
+
structured tool call. The orchestrator (run.sh) detects this via a signal
|
|
1019
|
+
file and stops the iteration loop gracefully.
|
|
1020
|
+
|
|
1021
|
+
Args:
|
|
1022
|
+
completion_statement: A short statement of what is complete (for example,
|
|
1023
|
+
"PRD requirements implemented, all tests passing, checklist 100%").
|
|
1024
|
+
evidence: Concrete evidence supporting the claim -- tests that passed,
|
|
1025
|
+
checklist items verified, files created/modified, metrics hit.
|
|
1026
|
+
confidence: One of 'high', 'medium', 'low' (default 'medium').
|
|
1027
|
+
'low' signals the orchestrator should still run the completion council.
|
|
1028
|
+
|
|
1029
|
+
Returns:
|
|
1030
|
+
JSON: {"recorded": true, "path": ".loki/events.jsonl"} on success,
|
|
1031
|
+
{"error": "..."} otherwise.
|
|
1032
|
+
"""
|
|
1033
|
+
_emit_tool_event_async(
|
|
1034
|
+
'loki_complete_task', 'start',
|
|
1035
|
+
parameters={
|
|
1036
|
+
'confidence': confidence,
|
|
1037
|
+
'statement_len': len(completion_statement or ''),
|
|
1038
|
+
'evidence_len': len(evidence or ''),
|
|
1039
|
+
},
|
|
1040
|
+
)
|
|
1041
|
+
|
|
1042
|
+
# Validate inputs
|
|
1043
|
+
if not completion_statement or not completion_statement.strip():
|
|
1044
|
+
_emit_tool_event_async(
|
|
1045
|
+
'loki_complete_task', 'complete',
|
|
1046
|
+
result_status='error', error='completion_statement required')
|
|
1047
|
+
return json.dumps({"error": "completion_statement is required"})
|
|
1048
|
+
if not evidence or not evidence.strip():
|
|
1049
|
+
_emit_tool_event_async(
|
|
1050
|
+
'loki_complete_task', 'complete',
|
|
1051
|
+
result_status='error', error='evidence required')
|
|
1052
|
+
return json.dumps({"error": "evidence is required"})
|
|
1053
|
+
|
|
1054
|
+
confidence_norm = (confidence or 'medium').strip().lower()
|
|
1055
|
+
if confidence_norm not in ('high', 'medium', 'low'):
|
|
1056
|
+
confidence_norm = 'medium'
|
|
1057
|
+
|
|
1058
|
+
timestamp = datetime.now(timezone.utc).isoformat().replace('+00:00', 'Z')
|
|
1059
|
+
|
|
1060
|
+
payload = {
|
|
1061
|
+
'type': 'task_completion_claim',
|
|
1062
|
+
'statement': completion_statement.strip(),
|
|
1063
|
+
'evidence': evidence.strip(),
|
|
1064
|
+
'confidence': confidence_norm,
|
|
1065
|
+
'timestamp': timestamp,
|
|
1066
|
+
}
|
|
1067
|
+
|
|
1068
|
+
# Wrap event record with timestamp and type at the outer level so it matches
|
|
1069
|
+
# the shape of other events in .loki/events.jsonl.
|
|
1070
|
+
event_record = {
|
|
1071
|
+
'timestamp': timestamp,
|
|
1072
|
+
'type': 'task_completion_claim',
|
|
1073
|
+
'data': payload,
|
|
1074
|
+
}
|
|
1075
|
+
|
|
1076
|
+
try:
|
|
1077
|
+
# Ensure .loki/ and .loki/signals/ exist under the project root
|
|
1078
|
+
loki_dir = safe_path_join('.loki')
|
|
1079
|
+
os.makedirs(loki_dir, exist_ok=True)
|
|
1080
|
+
signals_dir = safe_path_join('.loki', 'signals')
|
|
1081
|
+
os.makedirs(signals_dir, exist_ok=True)
|
|
1082
|
+
|
|
1083
|
+
events_path = safe_path_join('.loki', 'events.jsonl')
|
|
1084
|
+
with safe_open(events_path, 'a') as f:
|
|
1085
|
+
f.write(json.dumps(event_record) + '\n')
|
|
1086
|
+
|
|
1087
|
+
signal_path = safe_path_join('.loki', 'signals', 'TASK_COMPLETION_CLAIMED')
|
|
1088
|
+
with safe_open(signal_path, 'w') as f:
|
|
1089
|
+
f.write(json.dumps(payload, indent=2))
|
|
1090
|
+
|
|
1091
|
+
_emit_tool_event_async(
|
|
1092
|
+
'loki_complete_task', 'complete', result_status='success')
|
|
1093
|
+
return json.dumps({
|
|
1094
|
+
"recorded": True,
|
|
1095
|
+
"path": ".loki/events.jsonl",
|
|
1096
|
+
"signal": ".loki/signals/TASK_COMPLETION_CLAIMED",
|
|
1097
|
+
"confidence": confidence_norm,
|
|
1098
|
+
})
|
|
1099
|
+
except PathTraversalError as e:
|
|
1100
|
+
logger.error(f"Path traversal attempt blocked in loki_complete_task: {e}")
|
|
1101
|
+
_emit_tool_event_async(
|
|
1102
|
+
'loki_complete_task', 'complete',
|
|
1103
|
+
result_status='error', error='Access denied')
|
|
1104
|
+
return json.dumps({"error": "Access denied"})
|
|
1105
|
+
except Exception as e:
|
|
1106
|
+
logger.error(f"loki_complete_task failed: {e}")
|
|
1107
|
+
_emit_tool_event_async(
|
|
1108
|
+
'loki_complete_task', 'complete',
|
|
1109
|
+
result_status='error', error=str(e))
|
|
1110
|
+
return json.dumps({"error": str(e)})
|
|
1111
|
+
|
|
1112
|
+
|
|
1008
1113
|
# ============================================================
|
|
1009
1114
|
# RESOURCES - Data that can be read
|
|
1010
1115
|
# ============================================================
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Loki Managed Agents Memory package (v6.83.0 Phase 1).
|
|
3
|
+
|
|
4
|
+
Opt-in integration with Claude Managed Agents memory stores. All behavior in
|
|
5
|
+
this package is gated on the two environment variables:
|
|
6
|
+
|
|
7
|
+
LOKI_MANAGED_AGENTS parent switch (default: false)
|
|
8
|
+
LOKI_MANAGED_MEMORY child switch (default: false)
|
|
9
|
+
|
|
10
|
+
Both must be "true" for any API call to be issued. If the child is "true" while
|
|
11
|
+
the parent is "false", the loki runner fails fast at startup (see
|
|
12
|
+
autonomy/run.sh). If the flags are off, every exported function is a cheap
|
|
13
|
+
no-op -- importing this package will NOT trigger any network or SDK import
|
|
14
|
+
side-effects.
|
|
15
|
+
|
|
16
|
+
This package is intentionally the ONLY place in the codebase that imports the
|
|
17
|
+
`anthropic` SDK. A CI test (tests/managed_memory/test_sdk_isolation.sh)
|
|
18
|
+
enforces that invariant.
|
|
19
|
+
|
|
20
|
+
Exports:
|
|
21
|
+
is_enabled() - bool, True iff both flags are on
|
|
22
|
+
ManagedDisabled - raised when callers try to force an op while off
|
|
23
|
+
shadow_write_verdict(path) - shadow-write a council verdict JSON to the store
|
|
24
|
+
shadow_write_pattern(obj) - shadow-write a semantic pattern dict
|
|
25
|
+
retrieve_related_verdicts(q, top_k=3, store_id=None)
|
|
26
|
+
- return list of related prior verdicts
|
|
27
|
+
hydrate_patterns(mtime_floor)
|
|
28
|
+
- pull recent patterns and merge locally
|
|
29
|
+
probe_beta_header() - return the active beta header string
|
|
30
|
+
emit_managed_event(type, payload)
|
|
31
|
+
- low-level event writer
|
|
32
|
+
|
|
33
|
+
None of these functions raise on SDK or network errors. They return empty /
|
|
34
|
+
None and log one WARN line. Real SDK errors surface through
|
|
35
|
+
`.loki/managed/events.ndjson`.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
from __future__ import annotations
|
|
39
|
+
|
|
40
|
+
import os
|
|
41
|
+
from typing import Optional
|
|
42
|
+
|
|
43
|
+
from ._beta import BETA_HEADER
|
|
44
|
+
from .events import emit_managed_event
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class ManagedDisabled(Exception):
|
|
48
|
+
"""Raised when a managed-memory operation is attempted while flags are off."""
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def is_enabled() -> bool:
|
|
52
|
+
"""True iff LOKI_MANAGED_AGENTS=true AND LOKI_MANAGED_MEMORY=true."""
|
|
53
|
+
parent = os.environ.get("LOKI_MANAGED_AGENTS", "").strip().lower() == "true"
|
|
54
|
+
child = os.environ.get("LOKI_MANAGED_MEMORY", "").strip().lower() == "true"
|
|
55
|
+
return parent and child
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def probe_beta_header() -> str:
|
|
59
|
+
"""Return the pinned managed-agents beta header."""
|
|
60
|
+
return BETA_HEADER
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
# Lazy re-exports. Importing the top-level package MUST NOT import the
|
|
64
|
+
# anthropic SDK, so we defer the real imports until the functions are called.
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def shadow_write_verdict(verdict_json_path: str) -> None:
|
|
68
|
+
"""Shadow-write a council verdict file to the managed store (opt-in)."""
|
|
69
|
+
if not is_enabled():
|
|
70
|
+
return None
|
|
71
|
+
from . import shadow_write as _sw # local import: gated on flags
|
|
72
|
+
return _sw.shadow_write_verdict(verdict_json_path)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def shadow_write_pattern(pattern: dict) -> None:
|
|
76
|
+
"""Shadow-write a semantic pattern dict to the managed store (opt-in)."""
|
|
77
|
+
if not is_enabled():
|
|
78
|
+
return None
|
|
79
|
+
from . import shadow_write as _sw
|
|
80
|
+
return _sw.shadow_write_pattern(pattern)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def retrieve_related_verdicts(
|
|
84
|
+
query: str,
|
|
85
|
+
top_k: int = 3,
|
|
86
|
+
store_id: Optional[str] = None,
|
|
87
|
+
):
|
|
88
|
+
"""Return related verdicts from the managed store; [] when disabled or on error."""
|
|
89
|
+
if not is_enabled():
|
|
90
|
+
return []
|
|
91
|
+
from . import retrieve as _r
|
|
92
|
+
return _r.retrieve_related_verdicts(query, top_k=top_k, store_id=store_id)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def hydrate_patterns(local_mtime_floor: float):
|
|
96
|
+
"""Pull semantic patterns updated after floor; no-op when disabled."""
|
|
97
|
+
if not is_enabled():
|
|
98
|
+
return None
|
|
99
|
+
from . import retrieve as _r
|
|
100
|
+
return _r.hydrate_patterns(local_mtime_floor)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
__all__ = [
|
|
104
|
+
"BETA_HEADER",
|
|
105
|
+
"ManagedDisabled",
|
|
106
|
+
"emit_managed_event",
|
|
107
|
+
"hydrate_patterns",
|
|
108
|
+
"is_enabled",
|
|
109
|
+
"probe_beta_header",
|
|
110
|
+
"retrieve_related_verdicts",
|
|
111
|
+
"shadow_write_pattern",
|
|
112
|
+
"shadow_write_verdict",
|
|
113
|
+
]
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Loki Managed Agents Memory - Beta Header (v6.83.0 Phase 1).
|
|
3
|
+
|
|
4
|
+
Single source of truth for the anthropic-beta header required by Claude
|
|
5
|
+
Managed Agents. All callers in memory/managed_memory/ import BETA_HEADER
|
|
6
|
+
from here. Update this constant to roll to a new beta.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
# Pin to the public Managed Agents beta channel current as of v6.83.0.
|
|
10
|
+
# This value is sent as the `anthropic-beta` HTTP header on every request.
|
|
11
|
+
BETA_HEADER = "managed-agents-2026-04-01"
|
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Loki Managed Agents Memory - Client wrapper (v6.83.0 Phase 1).
|
|
3
|
+
|
|
4
|
+
Thin wrapper around the `anthropic` SDK. This is the ONLY file in the codebase
|
|
5
|
+
that imports `anthropic`. A CI invariant test enforces that.
|
|
6
|
+
|
|
7
|
+
The wrapper:
|
|
8
|
+
- Sets anthropic-beta: managed-agents-2026-04-01 on every request.
|
|
9
|
+
- Reads ANTHROPIC_API_KEY from env. Absence raises ManagedDisabled.
|
|
10
|
+
- Wraps every SDK call in a 10s hard timeout. Timeouts are treated as
|
|
11
|
+
recoverable: the caller decides whether to fall back.
|
|
12
|
+
- Never retries inside the client (no retry-storm). Callers implement
|
|
13
|
+
bounded retry (e.g. 409 precondition merge-and-retry-once).
|
|
14
|
+
|
|
15
|
+
NOTE on API surface: the exact Managed Agents memory endpoints are under a
|
|
16
|
+
beta channel. This wrapper implements a minimal, forward-compatible subset --
|
|
17
|
+
stores_list, stores_get_or_create, memory_create, memory_read, memories_list.
|
|
18
|
+
If the SDK version installed does not expose `beta.memory`, calls raise an
|
|
19
|
+
AttributeError which the callers translate into a ManagedDisabled/fallback.
|
|
20
|
+
|
|
21
|
+
Not tested end-to-end against a live ANTHROPIC_API_KEY in CI. Automated tests
|
|
22
|
+
use memory/managed_memory/fakes.py.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
from __future__ import annotations
|
|
26
|
+
|
|
27
|
+
import hashlib
|
|
28
|
+
import os
|
|
29
|
+
import threading
|
|
30
|
+
from typing import Any, Dict, List, Optional
|
|
31
|
+
|
|
32
|
+
from . import ManagedDisabled
|
|
33
|
+
from ._beta import BETA_HEADER
|
|
34
|
+
|
|
35
|
+
_DEFAULT_TIMEOUT = 10.0 # seconds
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _check_flags_or_raise() -> None:
|
|
39
|
+
parent = os.environ.get("LOKI_MANAGED_AGENTS", "").strip().lower() == "true"
|
|
40
|
+
child = os.environ.get("LOKI_MANAGED_MEMORY", "").strip().lower() == "true"
|
|
41
|
+
if not (parent and child):
|
|
42
|
+
raise ManagedDisabled(
|
|
43
|
+
"managed memory flags are off "
|
|
44
|
+
"(LOKI_MANAGED_AGENTS and LOKI_MANAGED_MEMORY must both be 'true')"
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _require_api_key() -> str:
|
|
49
|
+
key = os.environ.get("ANTHROPIC_API_KEY", "").strip()
|
|
50
|
+
if not key:
|
|
51
|
+
raise ManagedDisabled("ANTHROPIC_API_KEY is not set")
|
|
52
|
+
return key
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def compute_sha256(content: str) -> str:
|
|
56
|
+
"""Stable content hash used as an optimistic precondition on writes."""
|
|
57
|
+
return hashlib.sha256(content.encode("utf-8")).hexdigest()
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class ManagedClient:
|
|
61
|
+
"""
|
|
62
|
+
Thin SDK wrapper. Instantiating this class imports anthropic and validates
|
|
63
|
+
credentials; callers should construct it lazily inside flag-gated paths.
|
|
64
|
+
"""
|
|
65
|
+
|
|
66
|
+
def __init__(self, timeout: float = _DEFAULT_TIMEOUT) -> None:
|
|
67
|
+
_check_flags_or_raise()
|
|
68
|
+
api_key = _require_api_key()
|
|
69
|
+
# Import lazily so the top-level package stays SDK-free.
|
|
70
|
+
try:
|
|
71
|
+
import anthropic # noqa: F401 (imported for side-effects + symbol)
|
|
72
|
+
except ImportError as e: # pragma: no cover - import surface
|
|
73
|
+
raise ManagedDisabled(f"anthropic SDK not installed: {e}")
|
|
74
|
+
|
|
75
|
+
self._anthropic = anthropic
|
|
76
|
+
self._client = anthropic.Anthropic(
|
|
77
|
+
api_key=api_key,
|
|
78
|
+
timeout=timeout,
|
|
79
|
+
default_headers={"anthropic-beta": BETA_HEADER},
|
|
80
|
+
)
|
|
81
|
+
self._timeout = timeout
|
|
82
|
+
|
|
83
|
+
# ---------- helpers -------------------------------------------------
|
|
84
|
+
|
|
85
|
+
def _beta(self):
|
|
86
|
+
"""Return the beta namespace, if the SDK exposes it.
|
|
87
|
+
|
|
88
|
+
Newer SDK versions expose `client.beta.memory.*`. If the attribute
|
|
89
|
+
path is missing we raise ManagedDisabled so callers can fall back.
|
|
90
|
+
"""
|
|
91
|
+
beta = getattr(self._client, "beta", None)
|
|
92
|
+
if beta is None:
|
|
93
|
+
raise ManagedDisabled("anthropic SDK missing `beta` namespace")
|
|
94
|
+
return beta
|
|
95
|
+
|
|
96
|
+
# ---------- stores --------------------------------------------------
|
|
97
|
+
|
|
98
|
+
def stores_list(self) -> List[Dict[str, Any]]:
|
|
99
|
+
"""List managed memory stores on this account (may be empty)."""
|
|
100
|
+
beta = self._beta()
|
|
101
|
+
stores = getattr(beta, "memory_stores", None) or getattr(beta, "stores", None)
|
|
102
|
+
if stores is None or not hasattr(stores, "list"):
|
|
103
|
+
raise ManagedDisabled("memory_stores API not available in SDK")
|
|
104
|
+
result = stores.list()
|
|
105
|
+
# SDK returns a pydantic model; normalize to list of dicts.
|
|
106
|
+
data = getattr(result, "data", result)
|
|
107
|
+
return [self._to_dict(x) for x in (data or [])]
|
|
108
|
+
|
|
109
|
+
def stores_get_or_create(
|
|
110
|
+
self, name: str, description: str = "", scope: str = "project"
|
|
111
|
+
) -> Dict[str, Any]:
|
|
112
|
+
"""Return existing store with `name` or create it."""
|
|
113
|
+
existing = [s for s in self.stores_list() if s.get("name") == name]
|
|
114
|
+
if existing:
|
|
115
|
+
return existing[0]
|
|
116
|
+
beta = self._beta()
|
|
117
|
+
stores = getattr(beta, "memory_stores", None) or getattr(beta, "stores", None)
|
|
118
|
+
if stores is None or not hasattr(stores, "create"):
|
|
119
|
+
raise ManagedDisabled("memory_stores.create not available in SDK")
|
|
120
|
+
created = stores.create(name=name, description=description, scope=scope)
|
|
121
|
+
return self._to_dict(created)
|
|
122
|
+
|
|
123
|
+
# ---------- memories ------------------------------------------------
|
|
124
|
+
|
|
125
|
+
def memory_create(
|
|
126
|
+
self,
|
|
127
|
+
store_id: str,
|
|
128
|
+
path: str,
|
|
129
|
+
content: str,
|
|
130
|
+
sha256_precondition: Optional[str] = None,
|
|
131
|
+
) -> Dict[str, Any]:
|
|
132
|
+
"""
|
|
133
|
+
Create a memory entry at `path` in `store_id`.
|
|
134
|
+
|
|
135
|
+
When sha256_precondition is supplied, this is an optimistic
|
|
136
|
+
concurrency hint: if the store already holds a different hash the
|
|
137
|
+
SDK is expected to surface a 409-shaped error. Callers handle the
|
|
138
|
+
409 by re-reading, merging, and retrying once.
|
|
139
|
+
"""
|
|
140
|
+
beta = self._beta()
|
|
141
|
+
memories = getattr(beta, "memories", None)
|
|
142
|
+
if memories is None or not hasattr(memories, "create"):
|
|
143
|
+
raise ManagedDisabled("memories.create not available in SDK")
|
|
144
|
+
kwargs: Dict[str, Any] = {
|
|
145
|
+
"store_id": store_id,
|
|
146
|
+
"path": path,
|
|
147
|
+
"content": content,
|
|
148
|
+
}
|
|
149
|
+
if sha256_precondition:
|
|
150
|
+
kwargs["if_match_sha256"] = sha256_precondition
|
|
151
|
+
created = memories.create(**kwargs)
|
|
152
|
+
return self._to_dict(created)
|
|
153
|
+
|
|
154
|
+
def memory_read(self, store_id: str, memory_id: str) -> Dict[str, Any]:
|
|
155
|
+
beta = self._beta()
|
|
156
|
+
memories = getattr(beta, "memories", None)
|
|
157
|
+
if memories is None or not hasattr(memories, "retrieve"):
|
|
158
|
+
raise ManagedDisabled("memories.retrieve not available in SDK")
|
|
159
|
+
got = memories.retrieve(store_id=store_id, memory_id=memory_id)
|
|
160
|
+
return self._to_dict(got)
|
|
161
|
+
|
|
162
|
+
def memories_list(
|
|
163
|
+
self, store_id: str, path_prefix: Optional[str] = None
|
|
164
|
+
) -> List[Dict[str, Any]]:
|
|
165
|
+
beta = self._beta()
|
|
166
|
+
memories = getattr(beta, "memories", None)
|
|
167
|
+
if memories is None or not hasattr(memories, "list"):
|
|
168
|
+
raise ManagedDisabled("memories.list not available in SDK")
|
|
169
|
+
kwargs: Dict[str, Any] = {"store_id": store_id}
|
|
170
|
+
if path_prefix:
|
|
171
|
+
kwargs["path_prefix"] = path_prefix
|
|
172
|
+
result = memories.list(**kwargs)
|
|
173
|
+
data = getattr(result, "data", result)
|
|
174
|
+
return [self._to_dict(x) for x in (data or [])]
|
|
175
|
+
|
|
176
|
+
# ---------- internal ------------------------------------------------
|
|
177
|
+
|
|
178
|
+
@staticmethod
|
|
179
|
+
def _to_dict(obj: Any) -> Dict[str, Any]:
|
|
180
|
+
"""Best-effort pydantic-or-dict to dict conversion."""
|
|
181
|
+
if isinstance(obj, dict):
|
|
182
|
+
return obj
|
|
183
|
+
to_dict = getattr(obj, "model_dump", None) or getattr(obj, "dict", None)
|
|
184
|
+
if callable(to_dict):
|
|
185
|
+
try:
|
|
186
|
+
return to_dict()
|
|
187
|
+
except TypeError:
|
|
188
|
+
return to_dict()
|
|
189
|
+
return {"raw": str(obj)}
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
# Optional helper for callers that want a thread-safe singleton.
|
|
193
|
+
_singleton: Optional[ManagedClient] = None
|
|
194
|
+
_singleton_lock = threading.Lock()
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def get_client() -> ManagedClient:
|
|
198
|
+
"""Return a lazily-constructed singleton. Raises ManagedDisabled if off."""
|
|
199
|
+
global _singleton
|
|
200
|
+
with _singleton_lock:
|
|
201
|
+
if _singleton is None:
|
|
202
|
+
_singleton = ManagedClient()
|
|
203
|
+
return _singleton
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def reset_client() -> None:
|
|
207
|
+
"""Test hook: drop the cached singleton so tests can swap implementations."""
|
|
208
|
+
global _singleton
|
|
209
|
+
with _singleton_lock:
|
|
210
|
+
_singleton = None
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Loki Managed Agents Memory - Event emission (v6.83.0 Phase 1).
|
|
3
|
+
|
|
4
|
+
Appends structured JSONL events to .loki/managed/events.ndjson. Single-writer
|
|
5
|
+
convention: only code in memory/managed_memory/ writes to this file. Rotates
|
|
6
|
+
when the file exceeds 10MB.
|
|
7
|
+
|
|
8
|
+
Events are used to record fallbacks, shadow-write successes/failures, and
|
|
9
|
+
retrieve hits. The file is safe to tail for observability during development.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from __future__ import annotations
|
|
13
|
+
|
|
14
|
+
import json
|
|
15
|
+
import os
|
|
16
|
+
from datetime import datetime, timezone
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
from typing import Any, Dict, Optional
|
|
19
|
+
|
|
20
|
+
# 10 MB rotation threshold. Keeping rotation simple: rename to .<YYYYMMDD>.
|
|
21
|
+
_ROTATE_BYTES = 10 * 1024 * 1024
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _events_dir(target_dir: Optional[str] = None) -> Path:
|
|
25
|
+
base = target_dir or os.environ.get("LOKI_TARGET_DIR") or os.getcwd()
|
|
26
|
+
return Path(base) / ".loki" / "managed"
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _maybe_rotate(path: Path) -> None:
|
|
30
|
+
"""Rotate the events file if it has exceeded the size threshold."""
|
|
31
|
+
try:
|
|
32
|
+
if path.exists() and path.stat().st_size >= _ROTATE_BYTES:
|
|
33
|
+
stamp = datetime.now(timezone.utc).strftime("%Y%m%d-%H%M%S")
|
|
34
|
+
rotated = path.with_suffix(path.suffix + f".{stamp}")
|
|
35
|
+
# Best-effort rename; if another writer got there first, move on.
|
|
36
|
+
try:
|
|
37
|
+
path.rename(rotated)
|
|
38
|
+
except OSError:
|
|
39
|
+
pass
|
|
40
|
+
except OSError:
|
|
41
|
+
# If stat fails, skip rotation; next write will retry.
|
|
42
|
+
pass
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def emit_managed_event(
|
|
46
|
+
event_type: str,
|
|
47
|
+
payload: Dict[str, Any],
|
|
48
|
+
target_dir: Optional[str] = None,
|
|
49
|
+
) -> None:
|
|
50
|
+
"""
|
|
51
|
+
Append a managed-memory event to .loki/managed/events.ndjson.
|
|
52
|
+
|
|
53
|
+
Never raises: on any I/O error the function silently returns. Callers
|
|
54
|
+
rely on this to keep the main RARV-C loop unblocked.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
event_type: short tag, e.g. "managed_agents_fallback",
|
|
58
|
+
"managed_memory_retrieve", "managed_memory_shadow_write".
|
|
59
|
+
payload: JSON-serializable context for the event.
|
|
60
|
+
target_dir: optional project root override. Defaults to
|
|
61
|
+
LOKI_TARGET_DIR env or cwd.
|
|
62
|
+
"""
|
|
63
|
+
try:
|
|
64
|
+
dir_path = _events_dir(target_dir)
|
|
65
|
+
dir_path.mkdir(parents=True, exist_ok=True)
|
|
66
|
+
path = dir_path / "events.ndjson"
|
|
67
|
+
_maybe_rotate(path)
|
|
68
|
+
|
|
69
|
+
record = {
|
|
70
|
+
"ts": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
|
|
71
|
+
"type": event_type,
|
|
72
|
+
"payload": payload,
|
|
73
|
+
}
|
|
74
|
+
# Line-buffered append; JSONL.
|
|
75
|
+
with open(path, "a", encoding="utf-8") as f:
|
|
76
|
+
f.write(json.dumps(record, default=str) + "\n")
|
|
77
|
+
except Exception:
|
|
78
|
+
# Never raise from the event emitter.
|
|
79
|
+
return
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Loki Managed Agents Memory - FakeManagedClient for CI tests (v6.83.0 Phase 1).
|
|
3
|
+
|
|
4
|
+
Implements the same surface as ManagedClient but keeps state in memory and
|
|
5
|
+
returns deterministic responses keyed on input path. Used by
|
|
6
|
+
tests/managed_memory/test_*_mock.py so CI does not call the real API.
|
|
7
|
+
|
|
8
|
+
This file is importable without the `anthropic` SDK installed.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
import hashlib
|
|
14
|
+
from typing import Any, Dict, List, Optional
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class _Conflict(Exception):
|
|
18
|
+
"""Simulated 409 raised by memory_create on sha256 mismatch."""
|
|
19
|
+
|
|
20
|
+
status_code = 409
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class FakeManagedClient:
|
|
24
|
+
"""In-memory fake matching ManagedClient's public interface."""
|
|
25
|
+
|
|
26
|
+
def __init__(self) -> None:
|
|
27
|
+
self.stores: Dict[str, Dict[str, Any]] = {}
|
|
28
|
+
# memories keyed by (store_id, path) -> dict(content, sha, version)
|
|
29
|
+
self.memories: Dict[tuple, Dict[str, Any]] = {}
|
|
30
|
+
self.calls: List[Dict[str, Any]] = []
|
|
31
|
+
|
|
32
|
+
# ---------- stores --------------------------------------------------
|
|
33
|
+
|
|
34
|
+
def stores_list(self) -> List[Dict[str, Any]]:
|
|
35
|
+
self.calls.append({"op": "stores_list"})
|
|
36
|
+
return list(self.stores.values())
|
|
37
|
+
|
|
38
|
+
def stores_get_or_create(
|
|
39
|
+
self, name: str, description: str = "", scope: str = "project"
|
|
40
|
+
) -> Dict[str, Any]:
|
|
41
|
+
self.calls.append({"op": "stores_get_or_create", "name": name})
|
|
42
|
+
for s in self.stores.values():
|
|
43
|
+
if s.get("name") == name:
|
|
44
|
+
return s
|
|
45
|
+
store_id = f"store_{len(self.stores) + 1:04d}"
|
|
46
|
+
store = {
|
|
47
|
+
"id": store_id,
|
|
48
|
+
"name": name,
|
|
49
|
+
"description": description,
|
|
50
|
+
"scope": scope,
|
|
51
|
+
}
|
|
52
|
+
self.stores[store_id] = store
|
|
53
|
+
return store
|
|
54
|
+
|
|
55
|
+
# ---------- memories ------------------------------------------------
|
|
56
|
+
|
|
57
|
+
def memory_create(
|
|
58
|
+
self,
|
|
59
|
+
store_id: str,
|
|
60
|
+
path: str,
|
|
61
|
+
content: str,
|
|
62
|
+
sha256_precondition: Optional[str] = None,
|
|
63
|
+
) -> Dict[str, Any]:
|
|
64
|
+
self.calls.append(
|
|
65
|
+
{
|
|
66
|
+
"op": "memory_create",
|
|
67
|
+
"store_id": store_id,
|
|
68
|
+
"path": path,
|
|
69
|
+
"sha256_precondition": sha256_precondition,
|
|
70
|
+
}
|
|
71
|
+
)
|
|
72
|
+
key = (store_id, path)
|
|
73
|
+
sha = hashlib.sha256(content.encode("utf-8")).hexdigest()
|
|
74
|
+
existing = self.memories.get(key)
|
|
75
|
+
if existing is not None and sha256_precondition is not None:
|
|
76
|
+
if existing["sha"] != sha256_precondition:
|
|
77
|
+
# Simulated 409: caller must re-read + merge + retry.
|
|
78
|
+
raise _Conflict(
|
|
79
|
+
f"sha256 mismatch for {path}: "
|
|
80
|
+
f"have={existing['sha']}, want={sha256_precondition}"
|
|
81
|
+
)
|
|
82
|
+
version = (existing.get("version", 0) + 1) if existing else 1
|
|
83
|
+
entry = {
|
|
84
|
+
"id": f"mem_{abs(hash(key)) % 10**8:08d}",
|
|
85
|
+
"store_id": store_id,
|
|
86
|
+
"path": path,
|
|
87
|
+
"content": content,
|
|
88
|
+
"sha": sha,
|
|
89
|
+
"version": version,
|
|
90
|
+
}
|
|
91
|
+
self.memories[key] = entry
|
|
92
|
+
return entry
|
|
93
|
+
|
|
94
|
+
def memory_read(self, store_id: str, memory_id: str) -> Dict[str, Any]:
|
|
95
|
+
self.calls.append({"op": "memory_read", "memory_id": memory_id})
|
|
96
|
+
for entry in self.memories.values():
|
|
97
|
+
if entry.get("id") == memory_id:
|
|
98
|
+
return entry
|
|
99
|
+
raise KeyError(memory_id)
|
|
100
|
+
|
|
101
|
+
def memories_list(
|
|
102
|
+
self, store_id: str, path_prefix: Optional[str] = None
|
|
103
|
+
) -> List[Dict[str, Any]]:
|
|
104
|
+
self.calls.append(
|
|
105
|
+
{"op": "memories_list", "store_id": store_id, "path_prefix": path_prefix}
|
|
106
|
+
)
|
|
107
|
+
out = []
|
|
108
|
+
for (sid, path), entry in self.memories.items():
|
|
109
|
+
if sid != store_id:
|
|
110
|
+
continue
|
|
111
|
+
if path_prefix and not path.startswith(path_prefix):
|
|
112
|
+
continue
|
|
113
|
+
out.append(entry)
|
|
114
|
+
return out
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
# Helper exported for tests that need to simulate the 409 without
|
|
118
|
+
# importing the private _Conflict class directly.
|
|
119
|
+
def make_conflict_error() -> Exception:
|
|
120
|
+
return _Conflict("forced conflict for tests")
|