@sesamespace/hivemind 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +86 -0
- package/config/TEAM-CHARTER.md +87 -0
- package/config/default.toml +39 -0
- package/dist/__tests__/fleet-integration.test.d.ts +9 -0
- package/dist/__tests__/fleet-integration.test.d.ts.map +1 -0
- package/dist/__tests__/fleet-integration.test.js +201 -0
- package/dist/__tests__/fleet-integration.test.js.map +1 -0
- package/dist/__tests__/fleet.test.d.ts +7 -0
- package/dist/__tests__/fleet.test.d.ts.map +1 -0
- package/dist/__tests__/fleet.test.js +171 -0
- package/dist/__tests__/fleet.test.js.map +1 -0
- package/dist/__tests__/integration.test.d.ts +2 -0
- package/dist/__tests__/integration.test.d.ts.map +1 -0
- package/dist/__tests__/integration.test.js +348 -0
- package/dist/__tests__/integration.test.js.map +1 -0
- package/dist/agent.d.ts +27 -0
- package/dist/agent.d.ts.map +1 -0
- package/dist/agent.js +217 -0
- package/dist/agent.js.map +1 -0
- package/dist/commands/fleet.d.ts +13 -0
- package/dist/commands/fleet.d.ts.map +1 -0
- package/dist/commands/fleet.js +193 -0
- package/dist/commands/fleet.js.map +1 -0
- package/dist/commands/init.d.ts +2 -0
- package/dist/commands/init.d.ts.map +1 -0
- package/dist/commands/init.js +170 -0
- package/dist/commands/init.js.map +1 -0
- package/dist/commands/start.d.ts +2 -0
- package/dist/commands/start.d.ts.map +1 -0
- package/dist/commands/start.js +39 -0
- package/dist/commands/start.js.map +1 -0
- package/dist/config.d.ts +44 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +73 -0
- package/dist/config.js.map +1 -0
- package/dist/context.d.ts +50 -0
- package/dist/context.d.ts.map +1 -0
- package/dist/context.js +153 -0
- package/dist/context.js.map +1 -0
- package/dist/fleet/fleet-manager.d.ts +86 -0
- package/dist/fleet/fleet-manager.d.ts.map +1 -0
- package/dist/fleet/fleet-manager.js +298 -0
- package/dist/fleet/fleet-manager.js.map +1 -0
- package/dist/fleet/memory-sync.d.ts +91 -0
- package/dist/fleet/memory-sync.d.ts.map +1 -0
- package/dist/fleet/memory-sync.js +292 -0
- package/dist/fleet/memory-sync.js.map +1 -0
- package/dist/fleet/primary-client.d.ts +49 -0
- package/dist/fleet/primary-client.d.ts.map +1 -0
- package/dist/fleet/primary-client.js +222 -0
- package/dist/fleet/primary-client.js.map +1 -0
- package/dist/fleet/worker-protocol.d.ts +125 -0
- package/dist/fleet/worker-protocol.d.ts.map +1 -0
- package/dist/fleet/worker-protocol.js +27 -0
- package/dist/fleet/worker-protocol.js.map +1 -0
- package/dist/fleet/worker-server.d.ts +53 -0
- package/dist/fleet/worker-server.d.ts.map +1 -0
- package/dist/fleet/worker-server.js +191 -0
- package/dist/fleet/worker-server.js.map +1 -0
- package/dist/index.d.ts +26 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +16 -0
- package/dist/index.js.map +1 -0
- package/dist/llm-client.d.ts +24 -0
- package/dist/llm-client.d.ts.map +1 -0
- package/dist/llm-client.js +40 -0
- package/dist/llm-client.js.map +1 -0
- package/dist/main.d.ts +3 -0
- package/dist/main.d.ts.map +1 -0
- package/dist/main.js +41 -0
- package/dist/main.js.map +1 -0
- package/dist/memory-client.d.ts +88 -0
- package/dist/memory-client.d.ts.map +1 -0
- package/dist/memory-client.js +185 -0
- package/dist/memory-client.js.map +1 -0
- package/dist/pipeline.d.ts +2 -0
- package/dist/pipeline.d.ts.map +1 -0
- package/dist/pipeline.js +125 -0
- package/dist/pipeline.js.map +1 -0
- package/dist/prompt.d.ts +6 -0
- package/dist/prompt.d.ts.map +1 -0
- package/dist/prompt.js +75 -0
- package/dist/prompt.js.map +1 -0
- package/dist/sesame.d.ts +33 -0
- package/dist/sesame.d.ts.map +1 -0
- package/dist/sesame.js +67 -0
- package/dist/sesame.js.map +1 -0
- package/dist/start.d.ts +3 -0
- package/dist/start.d.ts.map +1 -0
- package/dist/start.js +20 -0
- package/dist/start.js.map +1 -0
- package/dist/task-engine.d.ts +32 -0
- package/dist/task-engine.d.ts.map +1 -0
- package/dist/task-engine.js +80 -0
- package/dist/task-engine.js.map +1 -0
- package/dist/worker.d.ts +73 -0
- package/dist/worker.d.ts.map +1 -0
- package/dist/worker.js +279 -0
- package/dist/worker.js.map +1 -0
- package/install.sh +186 -0
- package/package.json +36 -0
- package/packages/memory/Cargo.lock +6480 -0
- package/packages/memory/Cargo.toml +21 -0
- package/packages/memory/src/src/context.rs +179 -0
- package/packages/memory/src/src/embeddings.rs +51 -0
- package/packages/memory/src/src/main.rs +626 -0
- package/packages/memory/src/src/promotion.rs +637 -0
- package/packages/memory/src/src/scoring.rs +131 -0
- package/packages/memory/src/src/store.rs +460 -0
- package/packages/memory/src/src/tasks.rs +321 -0
|
@@ -0,0 +1,626 @@
|
|
|
1
|
+
mod context;
|
|
2
|
+
mod embeddings;
|
|
3
|
+
mod promotion;
|
|
4
|
+
mod scoring;
|
|
5
|
+
mod store;
|
|
6
|
+
mod tasks;
|
|
7
|
+
|
|
8
|
+
use axum::{
|
|
9
|
+
extract::{Path, Query, State},
|
|
10
|
+
http::StatusCode,
|
|
11
|
+
routing::{delete, get, patch, post},
|
|
12
|
+
Json, Router,
|
|
13
|
+
};
|
|
14
|
+
use serde::{Deserialize, Serialize};
|
|
15
|
+
use std::sync::Arc;
|
|
16
|
+
use tower_http::trace::TraceLayer;
|
|
17
|
+
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
|
18
|
+
|
|
19
|
+
use crate::context::ContextStore;
|
|
20
|
+
use crate::embeddings::OllamaClient;
|
|
21
|
+
use crate::promotion::PromotionEngine;
|
|
22
|
+
use crate::store::{Episode, EpisodeInput, MemoryStore};
|
|
23
|
+
use crate::tasks::{TaskInput, TaskRecord, TaskStore, TaskUpdate};
|
|
24
|
+
|
|
25
|
+
use crate::scoring::ScoringConfig;
|
|
26
|
+
|
|
27
|
+
#[derive(Clone)]
|
|
28
|
+
struct AppState {
|
|
29
|
+
store: Arc<MemoryStore>,
|
|
30
|
+
ollama: Arc<OllamaClient>,
|
|
31
|
+
context_store: Arc<ContextStore>,
|
|
32
|
+
promotion_engine: Arc<PromotionEngine>,
|
|
33
|
+
task_store: Arc<TaskStore>,
|
|
34
|
+
scoring_config: Arc<ScoringConfig>,
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
#[derive(Deserialize)]
|
|
38
|
+
struct SearchQuery {
|
|
39
|
+
q: String,
|
|
40
|
+
context: Option<String>,
|
|
41
|
+
limit: Option<usize>,
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
#[derive(Serialize)]
|
|
45
|
+
struct SearchResult {
|
|
46
|
+
episodes: Vec<ScoredEpisode>,
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
#[derive(Serialize)]
|
|
50
|
+
struct ScoredEpisode {
|
|
51
|
+
#[serde(flatten)]
|
|
52
|
+
episode: Episode,
|
|
53
|
+
score: f64,
|
|
54
|
+
#[serde(skip_serializing_if = "Option::is_none")]
|
|
55
|
+
source_context: Option<String>,
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
#[derive(Deserialize)]
|
|
59
|
+
struct CreateContextRequest {
|
|
60
|
+
name: String,
|
|
61
|
+
#[serde(default)]
|
|
62
|
+
description: String,
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
#[derive(Serialize)]
|
|
66
|
+
struct HealthResponse {
|
|
67
|
+
status: String,
|
|
68
|
+
version: String,
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
#[derive(Serialize)]
|
|
72
|
+
struct ContextInfoResponse {
|
|
73
|
+
name: String,
|
|
74
|
+
description: String,
|
|
75
|
+
created_at: String,
|
|
76
|
+
episode_count: usize,
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
#[derive(Serialize)]
|
|
80
|
+
struct ListContextsResponse {
|
|
81
|
+
contexts: Vec<ContextInfoResponse>,
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
#[derive(Serialize)]
|
|
85
|
+
struct CrossContextResult {
|
|
86
|
+
context: String,
|
|
87
|
+
episodes: Vec<ScoredEpisode>,
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
#[derive(Serialize)]
|
|
91
|
+
struct CrossContextResponse {
|
|
92
|
+
results: Vec<CrossContextResult>,
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
#[derive(Deserialize)]
|
|
96
|
+
struct ShareEpisodeRequest {
|
|
97
|
+
target_context: String,
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
#[derive(Deserialize)]
|
|
101
|
+
struct CoAccessRequest {
|
|
102
|
+
episode_ids: Vec<String>,
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
#[derive(Serialize)]
|
|
106
|
+
struct PromotionResponse {
|
|
107
|
+
promoted_count: usize,
|
|
108
|
+
episode_ids: Vec<String>,
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
#[derive(Deserialize)]
|
|
112
|
+
struct PromotionQuery {
|
|
113
|
+
context: Option<String>,
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
#[derive(Deserialize)]
|
|
117
|
+
struct TaskQuery {
|
|
118
|
+
context: String,
|
|
119
|
+
status: Option<String>,
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
#[derive(Deserialize)]
|
|
123
|
+
struct NextTaskQuery {
|
|
124
|
+
context: String,
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
#[derive(Serialize)]
|
|
128
|
+
struct ListTasksResponse {
|
|
129
|
+
tasks: Vec<TaskRecord>,
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
#[tokio::main]
|
|
133
|
+
async fn main() -> anyhow::Result<()> {
|
|
134
|
+
tracing_subscriber::registry()
|
|
135
|
+
.with(
|
|
136
|
+
tracing_subscriber::EnvFilter::try_from_default_env()
|
|
137
|
+
.unwrap_or_else(|_| "hivemind_memory=debug,tower_http=debug".into()),
|
|
138
|
+
)
|
|
139
|
+
.with(tracing_subscriber::fmt::layer())
|
|
140
|
+
.init();
|
|
141
|
+
|
|
142
|
+
let ollama_url =
|
|
143
|
+
std::env::var("OLLAMA_URL").unwrap_or_else(|_| "http://localhost:11434".to_string());
|
|
144
|
+
let embedding_model =
|
|
145
|
+
std::env::var("EMBEDDING_MODEL").unwrap_or_else(|_| "nomic-embed-text".to_string());
|
|
146
|
+
let db_path = std::env::var("DB_PATH").unwrap_or_else(|_| "./data/lancedb".to_string());
|
|
147
|
+
let port: u16 = std::env::var("PORT")
|
|
148
|
+
.unwrap_or_else(|_| "3434".to_string())
|
|
149
|
+
.parse()?;
|
|
150
|
+
|
|
151
|
+
let store = MemoryStore::new(&db_path).await?;
|
|
152
|
+
let ollama = OllamaClient::new(&ollama_url, &embedding_model);
|
|
153
|
+
let context_store = ContextStore::new(store.get_connection()).await?;
|
|
154
|
+
let promotion_engine = PromotionEngine::new(store.get_connection()).await?;
|
|
155
|
+
let task_store = TaskStore::new(store.get_connection()).await?;
|
|
156
|
+
|
|
157
|
+
let scoring_config = ScoringConfig::new();
|
|
158
|
+
|
|
159
|
+
let state = AppState {
|
|
160
|
+
store: Arc::new(store),
|
|
161
|
+
ollama: Arc::new(ollama),
|
|
162
|
+
context_store: Arc::new(context_store),
|
|
163
|
+
promotion_engine: Arc::new(promotion_engine),
|
|
164
|
+
task_store: Arc::new(task_store),
|
|
165
|
+
scoring_config: Arc::new(scoring_config),
|
|
166
|
+
};
|
|
167
|
+
|
|
168
|
+
let app = Router::new()
|
|
169
|
+
// Health
|
|
170
|
+
.route("/health", get(health))
|
|
171
|
+
// Episodes
|
|
172
|
+
.route("/episodes", post(create_episode))
|
|
173
|
+
.route("/episodes/{id}/access", post(record_access))
|
|
174
|
+
.route("/episodes/{id}/share", post(share_episode))
|
|
175
|
+
.route("/episodes/co-access", post(record_co_access))
|
|
176
|
+
// Search
|
|
177
|
+
.route("/search", get(search_episodes))
|
|
178
|
+
.route("/search/cross-context", get(search_cross_context))
|
|
179
|
+
// Contexts
|
|
180
|
+
.route("/contexts", get(list_contexts))
|
|
181
|
+
.route("/contexts", post(create_context))
|
|
182
|
+
.route("/contexts/{name}", get(get_context_episodes))
|
|
183
|
+
.route("/contexts/{name}", delete(delete_context))
|
|
184
|
+
// Scoring config
|
|
185
|
+
.route("/contexts/{name}/scoring", post(set_context_scoring))
|
|
186
|
+
// Promotion
|
|
187
|
+
.route("/promotion/run", post(run_promotion))
|
|
188
|
+
.route("/promotion/l3", get(get_l3_knowledge))
|
|
189
|
+
// Tasks
|
|
190
|
+
.route("/tasks", get(list_tasks))
|
|
191
|
+
.route("/tasks", post(create_task))
|
|
192
|
+
.route("/tasks/next", get(get_next_task))
|
|
193
|
+
.route("/tasks/{id}", patch(update_task))
|
|
194
|
+
.layer(TraceLayer::new_for_http())
|
|
195
|
+
.with_state(state);
|
|
196
|
+
|
|
197
|
+
let listener = tokio::net::TcpListener::bind(format!("0.0.0.0:{}", port)).await?;
|
|
198
|
+
tracing::info!("Memory daemon listening on port {}", port);
|
|
199
|
+
axum::serve(listener, app).await?;
|
|
200
|
+
|
|
201
|
+
Ok(())
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
async fn health() -> Json<HealthResponse> {
|
|
205
|
+
Json(HealthResponse {
|
|
206
|
+
status: "ok".to_string(),
|
|
207
|
+
version: env!("CARGO_PKG_VERSION").to_string(),
|
|
208
|
+
})
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
// === Episode endpoints ===
|
|
212
|
+
|
|
213
|
+
async fn create_episode(
|
|
214
|
+
State(state): State<AppState>,
|
|
215
|
+
Json(input): Json<EpisodeInput>,
|
|
216
|
+
) -> Result<Json<Episode>, (StatusCode, String)> {
|
|
217
|
+
let content_for_check = input.content.clone();
|
|
218
|
+
let context_for_check = input.context_name.clone().unwrap_or_else(|| "global".to_string());
|
|
219
|
+
|
|
220
|
+
let embedding = state
|
|
221
|
+
.ollama
|
|
222
|
+
.embed(&input.content)
|
|
223
|
+
.await
|
|
224
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Embedding failed: {e}")))?;
|
|
225
|
+
|
|
226
|
+
let episode = state
|
|
227
|
+
.store
|
|
228
|
+
.insert_episode(input, embedding.clone())
|
|
229
|
+
.await
|
|
230
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Store failed: {e}")))?;
|
|
231
|
+
|
|
232
|
+
// Check for auto-promotion to Global: if similar content exists in 3+ contexts
|
|
233
|
+
if context_for_check != "global" {
|
|
234
|
+
let state_clone = state.clone();
|
|
235
|
+
let content = content_for_check;
|
|
236
|
+
let embed = embedding;
|
|
237
|
+
tokio::spawn(async move {
|
|
238
|
+
if let Ok(contexts) = state_clone
|
|
239
|
+
.promotion_engine
|
|
240
|
+
.check_cross_context_promotion(&content)
|
|
241
|
+
.await
|
|
242
|
+
{
|
|
243
|
+
let unique_contexts: std::collections::HashSet<&str> =
|
|
244
|
+
contexts.iter().map(|s| s.as_str()).collect();
|
|
245
|
+
if unique_contexts.len() >= 3 {
|
|
246
|
+
// Auto-promote to global context
|
|
247
|
+
let global_input = EpisodeInput {
|
|
248
|
+
context_name: Some("global".to_string()),
|
|
249
|
+
role: "system".to_string(),
|
|
250
|
+
content: content.clone(),
|
|
251
|
+
};
|
|
252
|
+
if let Err(e) = state_clone.store.insert_episode(global_input, embed).await {
|
|
253
|
+
tracing::warn!("Auto-promotion to global failed: {}", e);
|
|
254
|
+
} else {
|
|
255
|
+
tracing::info!(
|
|
256
|
+
"Auto-promoted episode to global (referenced in {} contexts)",
|
|
257
|
+
unique_contexts.len()
|
|
258
|
+
);
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
});
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
Ok(Json(episode))
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
async fn record_access(
|
|
269
|
+
State(state): State<AppState>,
|
|
270
|
+
Path(id): Path<String>,
|
|
271
|
+
) -> Result<StatusCode, (StatusCode, String)> {
|
|
272
|
+
state
|
|
273
|
+
.promotion_engine
|
|
274
|
+
.record_access(&id)
|
|
275
|
+
.await
|
|
276
|
+
.map_err(|e| {
|
|
277
|
+
(
|
|
278
|
+
StatusCode::INTERNAL_SERVER_ERROR,
|
|
279
|
+
format!("Record access failed: {e}"),
|
|
280
|
+
)
|
|
281
|
+
})?;
|
|
282
|
+
|
|
283
|
+
Ok(StatusCode::NO_CONTENT)
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
async fn share_episode(
|
|
287
|
+
State(state): State<AppState>,
|
|
288
|
+
Path(id): Path<String>,
|
|
289
|
+
Json(req): Json<ShareEpisodeRequest>,
|
|
290
|
+
) -> Result<StatusCode, (StatusCode, String)> {
|
|
291
|
+
// Get the original episode
|
|
292
|
+
let episode = state
|
|
293
|
+
.store
|
|
294
|
+
.get_by_id(&id)
|
|
295
|
+
.await
|
|
296
|
+
.map_err(|e| {
|
|
297
|
+
(
|
|
298
|
+
StatusCode::INTERNAL_SERVER_ERROR,
|
|
299
|
+
format!("Get episode failed: {e}"),
|
|
300
|
+
)
|
|
301
|
+
})?
|
|
302
|
+
.ok_or_else(|| (StatusCode::NOT_FOUND, "Episode not found".to_string()))?;
|
|
303
|
+
|
|
304
|
+
// Re-embed and store in target context
|
|
305
|
+
let embedding = state
|
|
306
|
+
.ollama
|
|
307
|
+
.embed(&episode.content)
|
|
308
|
+
.await
|
|
309
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Embedding failed: {e}")))?;
|
|
310
|
+
|
|
311
|
+
let input = EpisodeInput {
|
|
312
|
+
context_name: Some(req.target_context),
|
|
313
|
+
role: episode.role,
|
|
314
|
+
content: episode.content,
|
|
315
|
+
};
|
|
316
|
+
|
|
317
|
+
state
|
|
318
|
+
.store
|
|
319
|
+
.insert_episode(input, embedding)
|
|
320
|
+
.await
|
|
321
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Store failed: {e}")))?;
|
|
322
|
+
|
|
323
|
+
Ok(StatusCode::CREATED)
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
async fn record_co_access(
|
|
327
|
+
State(state): State<AppState>,
|
|
328
|
+
Json(req): Json<CoAccessRequest>,
|
|
329
|
+
) -> Result<StatusCode, (StatusCode, String)> {
|
|
330
|
+
state
|
|
331
|
+
.promotion_engine
|
|
332
|
+
.record_co_access(&req.episode_ids)
|
|
333
|
+
.await
|
|
334
|
+
.map_err(|e| {
|
|
335
|
+
(
|
|
336
|
+
StatusCode::INTERNAL_SERVER_ERROR,
|
|
337
|
+
format!("Co-access failed: {e}"),
|
|
338
|
+
)
|
|
339
|
+
})?;
|
|
340
|
+
|
|
341
|
+
Ok(StatusCode::NO_CONTENT)
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
// === Search endpoints ===
|
|
345
|
+
|
|
346
|
+
async fn search_episodes(
|
|
347
|
+
State(state): State<AppState>,
|
|
348
|
+
Query(query): Query<SearchQuery>,
|
|
349
|
+
) -> Result<Json<SearchResult>, (StatusCode, String)> {
|
|
350
|
+
let embedding = state
|
|
351
|
+
.ollama
|
|
352
|
+
.embed(&query.q)
|
|
353
|
+
.await
|
|
354
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Embedding failed: {e}")))?;
|
|
355
|
+
|
|
356
|
+
let limit = query.limit.unwrap_or(10);
|
|
357
|
+
let context = query.context.as_deref().unwrap_or("global");
|
|
358
|
+
|
|
359
|
+
let results = state
|
|
360
|
+
.store
|
|
361
|
+
.search(embedding, context, limit)
|
|
362
|
+
.await
|
|
363
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Search failed: {e}")))?;
|
|
364
|
+
|
|
365
|
+
let half_life = state.scoring_config.get_half_life(context);
|
|
366
|
+
let scored: Vec<ScoredEpisode> = results
|
|
367
|
+
.into_iter()
|
|
368
|
+
.map(|(episode, distance)| {
|
|
369
|
+
let score = scoring::combined_score(&episode.timestamp, distance, half_life);
|
|
370
|
+
ScoredEpisode {
|
|
371
|
+
episode,
|
|
372
|
+
score,
|
|
373
|
+
source_context: None,
|
|
374
|
+
}
|
|
375
|
+
})
|
|
376
|
+
.collect();
|
|
377
|
+
|
|
378
|
+
Ok(Json(SearchResult { episodes: scored }))
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
async fn search_cross_context(
|
|
382
|
+
State(state): State<AppState>,
|
|
383
|
+
Query(query): Query<SearchQuery>,
|
|
384
|
+
) -> Result<Json<CrossContextResponse>, (StatusCode, String)> {
|
|
385
|
+
let embedding = state
|
|
386
|
+
.ollama
|
|
387
|
+
.embed(&query.q)
|
|
388
|
+
.await
|
|
389
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Embedding failed: {e}")))?;
|
|
390
|
+
|
|
391
|
+
let limit = query.limit.unwrap_or(10);
|
|
392
|
+
|
|
393
|
+
// Search across all contexts
|
|
394
|
+
let results = state
|
|
395
|
+
.store
|
|
396
|
+
.search_all(embedding, limit)
|
|
397
|
+
.await
|
|
398
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Search failed: {e}")))?;
|
|
399
|
+
|
|
400
|
+
// Group by context
|
|
401
|
+
let mut by_context: std::collections::HashMap<String, Vec<ScoredEpisode>> =
|
|
402
|
+
std::collections::HashMap::new();
|
|
403
|
+
|
|
404
|
+
for (episode, distance) in results {
|
|
405
|
+
let ctx = episode.context_name.clone();
|
|
406
|
+
let half_life = state.scoring_config.get_half_life(&ctx);
|
|
407
|
+
let score = scoring::combined_score(&episode.timestamp, distance, half_life);
|
|
408
|
+
by_context
|
|
409
|
+
.entry(ctx.clone())
|
|
410
|
+
.or_default()
|
|
411
|
+
.push(ScoredEpisode {
|
|
412
|
+
episode,
|
|
413
|
+
score,
|
|
414
|
+
source_context: Some(ctx),
|
|
415
|
+
});
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
let results: Vec<CrossContextResult> = by_context
|
|
419
|
+
.into_iter()
|
|
420
|
+
.map(|(context, episodes)| CrossContextResult { context, episodes })
|
|
421
|
+
.collect();
|
|
422
|
+
|
|
423
|
+
Ok(Json(CrossContextResponse { results }))
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
// === Context endpoints ===
|
|
427
|
+
|
|
428
|
+
async fn list_contexts(
|
|
429
|
+
State(state): State<AppState>,
|
|
430
|
+
) -> Result<Json<ListContextsResponse>, (StatusCode, String)> {
|
|
431
|
+
let contexts = state
|
|
432
|
+
.context_store
|
|
433
|
+
.list_contexts()
|
|
434
|
+
.await
|
|
435
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("List failed: {e}")))?;
|
|
436
|
+
|
|
437
|
+
let mut infos = Vec::new();
|
|
438
|
+
for ctx in contexts {
|
|
439
|
+
let count = state
|
|
440
|
+
.store
|
|
441
|
+
.count_by_context(&ctx.name)
|
|
442
|
+
.await
|
|
443
|
+
.unwrap_or(0);
|
|
444
|
+
infos.push(ContextInfoResponse {
|
|
445
|
+
name: ctx.name,
|
|
446
|
+
description: ctx.description,
|
|
447
|
+
created_at: ctx.created_at,
|
|
448
|
+
episode_count: count,
|
|
449
|
+
});
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
Ok(Json(ListContextsResponse { contexts: infos }))
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
async fn create_context(
|
|
456
|
+
State(state): State<AppState>,
|
|
457
|
+
Json(req): Json<CreateContextRequest>,
|
|
458
|
+
) -> Result<(StatusCode, Json<serde_json::Value>), (StatusCode, String)> {
|
|
459
|
+
state
|
|
460
|
+
.context_store
|
|
461
|
+
.create_context(&req.name, &req.description)
|
|
462
|
+
.await
|
|
463
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Failed: {e}")))?;
|
|
464
|
+
|
|
465
|
+
Ok((
|
|
466
|
+
StatusCode::CREATED,
|
|
467
|
+
Json(serde_json::json!({ "name": req.name, "created": true })),
|
|
468
|
+
))
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
async fn get_context_episodes(
|
|
472
|
+
State(state): State<AppState>,
|
|
473
|
+
Path(name): Path<String>,
|
|
474
|
+
) -> Result<Json<Vec<Episode>>, (StatusCode, String)> {
|
|
475
|
+
let episodes = state
|
|
476
|
+
.store
|
|
477
|
+
.get_by_context(&name)
|
|
478
|
+
.await
|
|
479
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Query failed: {e}")))?;
|
|
480
|
+
|
|
481
|
+
Ok(Json(episodes))
|
|
482
|
+
}
|
|
483
|
+
|
|
484
|
+
async fn delete_context(
|
|
485
|
+
State(state): State<AppState>,
|
|
486
|
+
Path(name): Path<String>,
|
|
487
|
+
) -> Result<StatusCode, (StatusCode, String)> {
|
|
488
|
+
state
|
|
489
|
+
.context_store
|
|
490
|
+
.delete_context(&name)
|
|
491
|
+
.await
|
|
492
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Delete failed: {e}")))?;
|
|
493
|
+
|
|
494
|
+
// Also delete episodes in that context
|
|
495
|
+
state
|
|
496
|
+
.store
|
|
497
|
+
.delete_by_context(&name)
|
|
498
|
+
.await
|
|
499
|
+
.map_err(|e| {
|
|
500
|
+
(
|
|
501
|
+
StatusCode::INTERNAL_SERVER_ERROR,
|
|
502
|
+
format!("Delete episodes failed: {e}"),
|
|
503
|
+
)
|
|
504
|
+
})?;
|
|
505
|
+
|
|
506
|
+
Ok(StatusCode::NO_CONTENT)
|
|
507
|
+
}
|
|
508
|
+
|
|
509
|
+
// === Scoring config endpoint ===
|
|
510
|
+
|
|
511
|
+
#[derive(Deserialize)]
|
|
512
|
+
struct ScoringConfigRequest {
|
|
513
|
+
half_life_hours: f64,
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
async fn set_context_scoring(
|
|
517
|
+
State(state): State<AppState>,
|
|
518
|
+
Path(name): Path<String>,
|
|
519
|
+
Json(req): Json<ScoringConfigRequest>,
|
|
520
|
+
) -> StatusCode {
|
|
521
|
+
state.scoring_config.set_half_life(&name, req.half_life_hours);
|
|
522
|
+
tracing::info!("Set half-life for context '{}' to {} hours", name, req.half_life_hours);
|
|
523
|
+
StatusCode::NO_CONTENT
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
// === Promotion endpoint ===
|
|
527
|
+
|
|
528
|
+
async fn run_promotion(
|
|
529
|
+
State(state): State<AppState>,
|
|
530
|
+
Query(query): Query<PromotionQuery>,
|
|
531
|
+
) -> Result<Json<PromotionResponse>, (StatusCode, String)> {
|
|
532
|
+
let promoted = state
|
|
533
|
+
.promotion_engine
|
|
534
|
+
.run_promotion(query.context.as_deref())
|
|
535
|
+
.await
|
|
536
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Promotion failed: {e}")))?;
|
|
537
|
+
|
|
538
|
+
Ok(Json(PromotionResponse {
|
|
539
|
+
promoted_count: promoted.len(),
|
|
540
|
+
episode_ids: promoted,
|
|
541
|
+
}))
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
#[derive(Deserialize)]
|
|
545
|
+
struct L3Query {
|
|
546
|
+
context: String,
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
#[derive(Serialize)]
|
|
550
|
+
struct L3Response {
|
|
551
|
+
entries: Vec<promotion::L3Entry>,
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
async fn get_l3_knowledge(
|
|
555
|
+
State(state): State<AppState>,
|
|
556
|
+
Query(query): Query<L3Query>,
|
|
557
|
+
) -> Result<Json<L3Response>, (StatusCode, String)> {
|
|
558
|
+
let entries = state
|
|
559
|
+
.promotion_engine
|
|
560
|
+
.get_l3_entries(&query.context)
|
|
561
|
+
.await
|
|
562
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("L3 query failed: {e}")))?;
|
|
563
|
+
|
|
564
|
+
Ok(Json(L3Response { entries }))
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
// === Task endpoints ===
|
|
568
|
+
|
|
569
|
+
async fn create_task(
|
|
570
|
+
State(state): State<AppState>,
|
|
571
|
+
Json(input): Json<TaskInput>,
|
|
572
|
+
) -> Result<(StatusCode, Json<TaskRecord>), (StatusCode, String)> {
|
|
573
|
+
let task = state
|
|
574
|
+
.task_store
|
|
575
|
+
.create_task(input)
|
|
576
|
+
.await
|
|
577
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Create task failed: {e}")))?;
|
|
578
|
+
|
|
579
|
+
Ok((StatusCode::CREATED, Json(task)))
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
async fn list_tasks(
|
|
583
|
+
State(state): State<AppState>,
|
|
584
|
+
Query(query): Query<TaskQuery>,
|
|
585
|
+
) -> Result<Json<ListTasksResponse>, (StatusCode, String)> {
|
|
586
|
+
let tasks = state
|
|
587
|
+
.task_store
|
|
588
|
+
.list_tasks(&query.context, query.status.as_deref())
|
|
589
|
+
.await
|
|
590
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("List tasks failed: {e}")))?;
|
|
591
|
+
|
|
592
|
+
Ok(Json(ListTasksResponse { tasks }))
|
|
593
|
+
}
|
|
594
|
+
|
|
595
|
+
async fn get_next_task(
|
|
596
|
+
State(state): State<AppState>,
|
|
597
|
+
Query(query): Query<NextTaskQuery>,
|
|
598
|
+
) -> Result<Json<TaskRecord>, (StatusCode, String)> {
|
|
599
|
+
let task = state
|
|
600
|
+
.task_store
|
|
601
|
+
.get_next_task(&query.context)
|
|
602
|
+
.await
|
|
603
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Get next task failed: {e}")))?;
|
|
604
|
+
|
|
605
|
+
match task {
|
|
606
|
+
Some(t) => Ok(Json(t)),
|
|
607
|
+
None => Err((StatusCode::NOT_FOUND, "No available tasks".to_string())),
|
|
608
|
+
}
|
|
609
|
+
}
|
|
610
|
+
|
|
611
|
+
async fn update_task(
|
|
612
|
+
State(state): State<AppState>,
|
|
613
|
+
Path(id): Path<String>,
|
|
614
|
+
Json(update): Json<TaskUpdate>,
|
|
615
|
+
) -> Result<Json<TaskRecord>, (StatusCode, String)> {
|
|
616
|
+
let task = state
|
|
617
|
+
.task_store
|
|
618
|
+
.update_task(&id, update)
|
|
619
|
+
.await
|
|
620
|
+
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Update task failed: {e}")))?;
|
|
621
|
+
|
|
622
|
+
match task {
|
|
623
|
+
Some(t) => Ok(Json(t)),
|
|
624
|
+
None => Err((StatusCode::NOT_FOUND, "Task not found".to_string())),
|
|
625
|
+
}
|
|
626
|
+
}
|