@sesamespace/hivemind 0.2.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/PLANNING.md +383 -0
  2. package/TASKS.md +60 -0
  3. package/install.sh +187 -0
  4. package/npm-package.json +28 -0
  5. package/package.json +13 -20
  6. package/packages/cli/package.json +23 -0
  7. package/{dist/chunk-DVR2KBL7.js → packages/cli/src/commands/fleet.ts} +50 -30
  8. package/packages/cli/src/commands/init.ts +230 -0
  9. package/{dist/chunk-MBS5A6BZ.js → packages/cli/src/commands/service.ts} +51 -42
  10. package/{dist/chunk-RNK5Q5GR.js → packages/cli/src/commands/start.ts} +12 -14
  11. package/{dist/main.js → packages/cli/src/main.ts} +12 -18
  12. package/packages/cli/tsconfig.json +8 -0
  13. package/packages/memory/Cargo.lock +6480 -0
  14. package/packages/memory/Cargo.toml +21 -0
  15. package/packages/memory/src/context.rs +179 -0
  16. package/packages/memory/src/embeddings.rs +51 -0
  17. package/packages/memory/src/main.rs +626 -0
  18. package/packages/memory/src/promotion.rs +637 -0
  19. package/packages/memory/src/scoring.rs +131 -0
  20. package/packages/memory/src/store.rs +460 -0
  21. package/packages/memory/src/tasks.rs +321 -0
  22. package/packages/runtime/package.json +24 -0
  23. package/packages/runtime/src/__tests__/fleet-integration.test.ts +235 -0
  24. package/packages/runtime/src/__tests__/fleet.test.ts +207 -0
  25. package/packages/runtime/src/__tests__/integration.test.ts +434 -0
  26. package/packages/runtime/src/agent.ts +255 -0
  27. package/packages/runtime/src/config.ts +130 -0
  28. package/packages/runtime/src/context.ts +192 -0
  29. package/packages/runtime/src/fleet/fleet-manager.ts +399 -0
  30. package/packages/runtime/src/fleet/memory-sync.ts +362 -0
  31. package/packages/runtime/src/fleet/primary-client.ts +285 -0
  32. package/packages/runtime/src/fleet/worker-protocol.ts +158 -0
  33. package/packages/runtime/src/fleet/worker-server.ts +246 -0
  34. package/packages/runtime/src/index.ts +57 -0
  35. package/packages/runtime/src/llm-client.ts +65 -0
  36. package/packages/runtime/src/memory-client.ts +309 -0
  37. package/packages/runtime/src/pipeline.ts +151 -0
  38. package/packages/runtime/src/prompt.ts +173 -0
  39. package/packages/runtime/src/sesame.ts +174 -0
  40. package/{dist/start.js → packages/runtime/src/start.ts} +7 -9
  41. package/packages/runtime/src/task-engine.ts +113 -0
  42. package/packages/runtime/src/worker.ts +339 -0
  43. package/packages/runtime/tsconfig.json +8 -0
  44. package/pnpm-workspace.yaml +2 -0
  45. package/run-aidan.sh +23 -0
  46. package/scripts/bootstrap.sh +196 -0
  47. package/scripts/build-npm.sh +94 -0
  48. package/scripts/com.hivemind.agent.plist +44 -0
  49. package/scripts/com.hivemind.memory.plist +31 -0
  50. package/tsconfig.json +22 -0
  51. package/tsup.config.ts +28 -0
  52. package/dist/chunk-2I2O6X5D.js +0 -1408
  53. package/dist/chunk-2I2O6X5D.js.map +0 -1
  54. package/dist/chunk-DVR2KBL7.js.map +0 -1
  55. package/dist/chunk-MBS5A6BZ.js.map +0 -1
  56. package/dist/chunk-NVJ424TB.js +0 -731
  57. package/dist/chunk-NVJ424TB.js.map +0 -1
  58. package/dist/chunk-RNK5Q5GR.js.map +0 -1
  59. package/dist/chunk-XNOWVLXD.js +0 -160
  60. package/dist/chunk-XNOWVLXD.js.map +0 -1
  61. package/dist/commands/fleet.js +0 -9
  62. package/dist/commands/fleet.js.map +0 -1
  63. package/dist/commands/init.js +0 -7
  64. package/dist/commands/init.js.map +0 -1
  65. package/dist/commands/service.js +0 -7
  66. package/dist/commands/service.js.map +0 -1
  67. package/dist/commands/start.js +0 -9
  68. package/dist/commands/start.js.map +0 -1
  69. package/dist/index.js +0 -41
  70. package/dist/index.js.map +0 -1
  71. package/dist/main.js.map +0 -1
  72. package/dist/start.js.map +0 -1
@@ -0,0 +1,626 @@
1
+ mod context;
2
+ mod embeddings;
3
+ mod promotion;
4
+ mod scoring;
5
+ mod store;
6
+ mod tasks;
7
+
8
+ use axum::{
9
+ extract::{Path, Query, State},
10
+ http::StatusCode,
11
+ routing::{delete, get, patch, post},
12
+ Json, Router,
13
+ };
14
+ use serde::{Deserialize, Serialize};
15
+ use std::sync::Arc;
16
+ use tower_http::trace::TraceLayer;
17
+ use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
18
+
19
+ use crate::context::ContextStore;
20
+ use crate::embeddings::OllamaClient;
21
+ use crate::promotion::PromotionEngine;
22
+ use crate::store::{Episode, EpisodeInput, MemoryStore};
23
+ use crate::tasks::{TaskInput, TaskRecord, TaskStore, TaskUpdate};
24
+
25
+ use crate::scoring::ScoringConfig;
26
+
27
+ #[derive(Clone)]
28
+ struct AppState {
29
+ store: Arc<MemoryStore>,
30
+ ollama: Arc<OllamaClient>,
31
+ context_store: Arc<ContextStore>,
32
+ promotion_engine: Arc<PromotionEngine>,
33
+ task_store: Arc<TaskStore>,
34
+ scoring_config: Arc<ScoringConfig>,
35
+ }
36
+
37
+ #[derive(Deserialize)]
38
+ struct SearchQuery {
39
+ q: String,
40
+ context: Option<String>,
41
+ limit: Option<usize>,
42
+ }
43
+
44
+ #[derive(Serialize)]
45
+ struct SearchResult {
46
+ episodes: Vec<ScoredEpisode>,
47
+ }
48
+
49
+ #[derive(Serialize)]
50
+ struct ScoredEpisode {
51
+ #[serde(flatten)]
52
+ episode: Episode,
53
+ score: f64,
54
+ #[serde(skip_serializing_if = "Option::is_none")]
55
+ source_context: Option<String>,
56
+ }
57
+
58
+ #[derive(Deserialize)]
59
+ struct CreateContextRequest {
60
+ name: String,
61
+ #[serde(default)]
62
+ description: String,
63
+ }
64
+
65
+ #[derive(Serialize)]
66
+ struct HealthResponse {
67
+ status: String,
68
+ version: String,
69
+ }
70
+
71
+ #[derive(Serialize)]
72
+ struct ContextInfoResponse {
73
+ name: String,
74
+ description: String,
75
+ created_at: String,
76
+ episode_count: usize,
77
+ }
78
+
79
+ #[derive(Serialize)]
80
+ struct ListContextsResponse {
81
+ contexts: Vec<ContextInfoResponse>,
82
+ }
83
+
84
+ #[derive(Serialize)]
85
+ struct CrossContextResult {
86
+ context: String,
87
+ episodes: Vec<ScoredEpisode>,
88
+ }
89
+
90
+ #[derive(Serialize)]
91
+ struct CrossContextResponse {
92
+ results: Vec<CrossContextResult>,
93
+ }
94
+
95
+ #[derive(Deserialize)]
96
+ struct ShareEpisodeRequest {
97
+ target_context: String,
98
+ }
99
+
100
+ #[derive(Deserialize)]
101
+ struct CoAccessRequest {
102
+ episode_ids: Vec<String>,
103
+ }
104
+
105
+ #[derive(Serialize)]
106
+ struct PromotionResponse {
107
+ promoted_count: usize,
108
+ episode_ids: Vec<String>,
109
+ }
110
+
111
+ #[derive(Deserialize)]
112
+ struct PromotionQuery {
113
+ context: Option<String>,
114
+ }
115
+
116
+ #[derive(Deserialize)]
117
+ struct TaskQuery {
118
+ context: String,
119
+ status: Option<String>,
120
+ }
121
+
122
+ #[derive(Deserialize)]
123
+ struct NextTaskQuery {
124
+ context: String,
125
+ }
126
+
127
+ #[derive(Serialize)]
128
+ struct ListTasksResponse {
129
+ tasks: Vec<TaskRecord>,
130
+ }
131
+
132
+ #[tokio::main]
133
+ async fn main() -> anyhow::Result<()> {
134
+ tracing_subscriber::registry()
135
+ .with(
136
+ tracing_subscriber::EnvFilter::try_from_default_env()
137
+ .unwrap_or_else(|_| "hivemind_memory=debug,tower_http=debug".into()),
138
+ )
139
+ .with(tracing_subscriber::fmt::layer())
140
+ .init();
141
+
142
+ let ollama_url =
143
+ std::env::var("OLLAMA_URL").unwrap_or_else(|_| "http://localhost:11434".to_string());
144
+ let embedding_model =
145
+ std::env::var("EMBEDDING_MODEL").unwrap_or_else(|_| "nomic-embed-text".to_string());
146
+ let db_path = std::env::var("DB_PATH").unwrap_or_else(|_| "./data/lancedb".to_string());
147
+ let port: u16 = std::env::var("PORT")
148
+ .unwrap_or_else(|_| "3434".to_string())
149
+ .parse()?;
150
+
151
+ let store = MemoryStore::new(&db_path).await?;
152
+ let ollama = OllamaClient::new(&ollama_url, &embedding_model);
153
+ let context_store = ContextStore::new(store.get_connection()).await?;
154
+ let promotion_engine = PromotionEngine::new(store.get_connection()).await?;
155
+ let task_store = TaskStore::new(store.get_connection()).await?;
156
+
157
+ let scoring_config = ScoringConfig::new();
158
+
159
+ let state = AppState {
160
+ store: Arc::new(store),
161
+ ollama: Arc::new(ollama),
162
+ context_store: Arc::new(context_store),
163
+ promotion_engine: Arc::new(promotion_engine),
164
+ task_store: Arc::new(task_store),
165
+ scoring_config: Arc::new(scoring_config),
166
+ };
167
+
168
+ let app = Router::new()
169
+ // Health
170
+ .route("/health", get(health))
171
+ // Episodes
172
+ .route("/episodes", post(create_episode))
173
+ .route("/episodes/{id}/access", post(record_access))
174
+ .route("/episodes/{id}/share", post(share_episode))
175
+ .route("/episodes/co-access", post(record_co_access))
176
+ // Search
177
+ .route("/search", get(search_episodes))
178
+ .route("/search/cross-context", get(search_cross_context))
179
+ // Contexts
180
+ .route("/contexts", get(list_contexts))
181
+ .route("/contexts", post(create_context))
182
+ .route("/contexts/{name}", get(get_context_episodes))
183
+ .route("/contexts/{name}", delete(delete_context))
184
+ // Scoring config
185
+ .route("/contexts/{name}/scoring", post(set_context_scoring))
186
+ // Promotion
187
+ .route("/promotion/run", post(run_promotion))
188
+ .route("/promotion/l3", get(get_l3_knowledge))
189
+ // Tasks
190
+ .route("/tasks", get(list_tasks))
191
+ .route("/tasks", post(create_task))
192
+ .route("/tasks/next", get(get_next_task))
193
+ .route("/tasks/{id}", patch(update_task))
194
+ .layer(TraceLayer::new_for_http())
195
+ .with_state(state);
196
+
197
+ let listener = tokio::net::TcpListener::bind(format!("0.0.0.0:{}", port)).await?;
198
+ tracing::info!("Memory daemon listening on port {}", port);
199
+ axum::serve(listener, app).await?;
200
+
201
+ Ok(())
202
+ }
203
+
204
+ async fn health() -> Json<HealthResponse> {
205
+ Json(HealthResponse {
206
+ status: "ok".to_string(),
207
+ version: env!("CARGO_PKG_VERSION").to_string(),
208
+ })
209
+ }
210
+
211
+ // === Episode endpoints ===
212
+
213
+ async fn create_episode(
214
+ State(state): State<AppState>,
215
+ Json(input): Json<EpisodeInput>,
216
+ ) -> Result<Json<Episode>, (StatusCode, String)> {
217
+ let content_for_check = input.content.clone();
218
+ let context_for_check = input.context_name.clone().unwrap_or_else(|| "global".to_string());
219
+
220
+ let embedding = state
221
+ .ollama
222
+ .embed(&input.content)
223
+ .await
224
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Embedding failed: {e}")))?;
225
+
226
+ let episode = state
227
+ .store
228
+ .insert_episode(input, embedding.clone())
229
+ .await
230
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Store failed: {e}")))?;
231
+
232
+ // Check for auto-promotion to Global: if similar content exists in 3+ contexts
233
+ if context_for_check != "global" {
234
+ let state_clone = state.clone();
235
+ let content = content_for_check;
236
+ let embed = embedding;
237
+ tokio::spawn(async move {
238
+ if let Ok(contexts) = state_clone
239
+ .promotion_engine
240
+ .check_cross_context_promotion(&content)
241
+ .await
242
+ {
243
+ let unique_contexts: std::collections::HashSet<&str> =
244
+ contexts.iter().map(|s| s.as_str()).collect();
245
+ if unique_contexts.len() >= 3 {
246
+ // Auto-promote to global context
247
+ let global_input = EpisodeInput {
248
+ context_name: Some("global".to_string()),
249
+ role: "system".to_string(),
250
+ content: content.clone(),
251
+ };
252
+ if let Err(e) = state_clone.store.insert_episode(global_input, embed).await {
253
+ tracing::warn!("Auto-promotion to global failed: {}", e);
254
+ } else {
255
+ tracing::info!(
256
+ "Auto-promoted episode to global (referenced in {} contexts)",
257
+ unique_contexts.len()
258
+ );
259
+ }
260
+ }
261
+ }
262
+ });
263
+ }
264
+
265
+ Ok(Json(episode))
266
+ }
267
+
268
+ async fn record_access(
269
+ State(state): State<AppState>,
270
+ Path(id): Path<String>,
271
+ ) -> Result<StatusCode, (StatusCode, String)> {
272
+ state
273
+ .promotion_engine
274
+ .record_access(&id)
275
+ .await
276
+ .map_err(|e| {
277
+ (
278
+ StatusCode::INTERNAL_SERVER_ERROR,
279
+ format!("Record access failed: {e}"),
280
+ )
281
+ })?;
282
+
283
+ Ok(StatusCode::NO_CONTENT)
284
+ }
285
+
286
+ async fn share_episode(
287
+ State(state): State<AppState>,
288
+ Path(id): Path<String>,
289
+ Json(req): Json<ShareEpisodeRequest>,
290
+ ) -> Result<StatusCode, (StatusCode, String)> {
291
+ // Get the original episode
292
+ let episode = state
293
+ .store
294
+ .get_by_id(&id)
295
+ .await
296
+ .map_err(|e| {
297
+ (
298
+ StatusCode::INTERNAL_SERVER_ERROR,
299
+ format!("Get episode failed: {e}"),
300
+ )
301
+ })?
302
+ .ok_or_else(|| (StatusCode::NOT_FOUND, "Episode not found".to_string()))?;
303
+
304
+ // Re-embed and store in target context
305
+ let embedding = state
306
+ .ollama
307
+ .embed(&episode.content)
308
+ .await
309
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Embedding failed: {e}")))?;
310
+
311
+ let input = EpisodeInput {
312
+ context_name: Some(req.target_context),
313
+ role: episode.role,
314
+ content: episode.content,
315
+ };
316
+
317
+ state
318
+ .store
319
+ .insert_episode(input, embedding)
320
+ .await
321
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Store failed: {e}")))?;
322
+
323
+ Ok(StatusCode::CREATED)
324
+ }
325
+
326
+ async fn record_co_access(
327
+ State(state): State<AppState>,
328
+ Json(req): Json<CoAccessRequest>,
329
+ ) -> Result<StatusCode, (StatusCode, String)> {
330
+ state
331
+ .promotion_engine
332
+ .record_co_access(&req.episode_ids)
333
+ .await
334
+ .map_err(|e| {
335
+ (
336
+ StatusCode::INTERNAL_SERVER_ERROR,
337
+ format!("Co-access failed: {e}"),
338
+ )
339
+ })?;
340
+
341
+ Ok(StatusCode::NO_CONTENT)
342
+ }
343
+
344
+ // === Search endpoints ===
345
+
346
+ async fn search_episodes(
347
+ State(state): State<AppState>,
348
+ Query(query): Query<SearchQuery>,
349
+ ) -> Result<Json<SearchResult>, (StatusCode, String)> {
350
+ let embedding = state
351
+ .ollama
352
+ .embed(&query.q)
353
+ .await
354
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Embedding failed: {e}")))?;
355
+
356
+ let limit = query.limit.unwrap_or(10);
357
+ let context = query.context.as_deref().unwrap_or("global");
358
+
359
+ let results = state
360
+ .store
361
+ .search(embedding, context, limit)
362
+ .await
363
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Search failed: {e}")))?;
364
+
365
+ let half_life = state.scoring_config.get_half_life(context);
366
+ let scored: Vec<ScoredEpisode> = results
367
+ .into_iter()
368
+ .map(|(episode, distance)| {
369
+ let score = scoring::combined_score(&episode.timestamp, distance, half_life);
370
+ ScoredEpisode {
371
+ episode,
372
+ score,
373
+ source_context: None,
374
+ }
375
+ })
376
+ .collect();
377
+
378
+ Ok(Json(SearchResult { episodes: scored }))
379
+ }
380
+
381
+ async fn search_cross_context(
382
+ State(state): State<AppState>,
383
+ Query(query): Query<SearchQuery>,
384
+ ) -> Result<Json<CrossContextResponse>, (StatusCode, String)> {
385
+ let embedding = state
386
+ .ollama
387
+ .embed(&query.q)
388
+ .await
389
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Embedding failed: {e}")))?;
390
+
391
+ let limit = query.limit.unwrap_or(10);
392
+
393
+ // Search across all contexts
394
+ let results = state
395
+ .store
396
+ .search_all(embedding, limit)
397
+ .await
398
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Search failed: {e}")))?;
399
+
400
+ // Group by context
401
+ let mut by_context: std::collections::HashMap<String, Vec<ScoredEpisode>> =
402
+ std::collections::HashMap::new();
403
+
404
+ for (episode, distance) in results {
405
+ let ctx = episode.context_name.clone();
406
+ let half_life = state.scoring_config.get_half_life(&ctx);
407
+ let score = scoring::combined_score(&episode.timestamp, distance, half_life);
408
+ by_context
409
+ .entry(ctx.clone())
410
+ .or_default()
411
+ .push(ScoredEpisode {
412
+ episode,
413
+ score,
414
+ source_context: Some(ctx),
415
+ });
416
+ }
417
+
418
+ let results: Vec<CrossContextResult> = by_context
419
+ .into_iter()
420
+ .map(|(context, episodes)| CrossContextResult { context, episodes })
421
+ .collect();
422
+
423
+ Ok(Json(CrossContextResponse { results }))
424
+ }
425
+
426
+ // === Context endpoints ===
427
+
428
+ async fn list_contexts(
429
+ State(state): State<AppState>,
430
+ ) -> Result<Json<ListContextsResponse>, (StatusCode, String)> {
431
+ let contexts = state
432
+ .context_store
433
+ .list_contexts()
434
+ .await
435
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("List failed: {e}")))?;
436
+
437
+ let mut infos = Vec::new();
438
+ for ctx in contexts {
439
+ let count = state
440
+ .store
441
+ .count_by_context(&ctx.name)
442
+ .await
443
+ .unwrap_or(0);
444
+ infos.push(ContextInfoResponse {
445
+ name: ctx.name,
446
+ description: ctx.description,
447
+ created_at: ctx.created_at,
448
+ episode_count: count,
449
+ });
450
+ }
451
+
452
+ Ok(Json(ListContextsResponse { contexts: infos }))
453
+ }
454
+
455
+ async fn create_context(
456
+ State(state): State<AppState>,
457
+ Json(req): Json<CreateContextRequest>,
458
+ ) -> Result<(StatusCode, Json<serde_json::Value>), (StatusCode, String)> {
459
+ state
460
+ .context_store
461
+ .create_context(&req.name, &req.description)
462
+ .await
463
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Failed: {e}")))?;
464
+
465
+ Ok((
466
+ StatusCode::CREATED,
467
+ Json(serde_json::json!({ "name": req.name, "created": true })),
468
+ ))
469
+ }
470
+
471
+ async fn get_context_episodes(
472
+ State(state): State<AppState>,
473
+ Path(name): Path<String>,
474
+ ) -> Result<Json<Vec<Episode>>, (StatusCode, String)> {
475
+ let episodes = state
476
+ .store
477
+ .get_by_context(&name)
478
+ .await
479
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Query failed: {e}")))?;
480
+
481
+ Ok(Json(episodes))
482
+ }
483
+
484
+ async fn delete_context(
485
+ State(state): State<AppState>,
486
+ Path(name): Path<String>,
487
+ ) -> Result<StatusCode, (StatusCode, String)> {
488
+ state
489
+ .context_store
490
+ .delete_context(&name)
491
+ .await
492
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Delete failed: {e}")))?;
493
+
494
+ // Also delete episodes in that context
495
+ state
496
+ .store
497
+ .delete_by_context(&name)
498
+ .await
499
+ .map_err(|e| {
500
+ (
501
+ StatusCode::INTERNAL_SERVER_ERROR,
502
+ format!("Delete episodes failed: {e}"),
503
+ )
504
+ })?;
505
+
506
+ Ok(StatusCode::NO_CONTENT)
507
+ }
508
+
509
+ // === Scoring config endpoint ===
510
+
511
+ #[derive(Deserialize)]
512
+ struct ScoringConfigRequest {
513
+ half_life_hours: f64,
514
+ }
515
+
516
+ async fn set_context_scoring(
517
+ State(state): State<AppState>,
518
+ Path(name): Path<String>,
519
+ Json(req): Json<ScoringConfigRequest>,
520
+ ) -> StatusCode {
521
+ state.scoring_config.set_half_life(&name, req.half_life_hours);
522
+ tracing::info!("Set half-life for context '{}' to {} hours", name, req.half_life_hours);
523
+ StatusCode::NO_CONTENT
524
+ }
525
+
526
+ // === Promotion endpoint ===
527
+
528
+ async fn run_promotion(
529
+ State(state): State<AppState>,
530
+ Query(query): Query<PromotionQuery>,
531
+ ) -> Result<Json<PromotionResponse>, (StatusCode, String)> {
532
+ let promoted = state
533
+ .promotion_engine
534
+ .run_promotion(query.context.as_deref())
535
+ .await
536
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Promotion failed: {e}")))?;
537
+
538
+ Ok(Json(PromotionResponse {
539
+ promoted_count: promoted.len(),
540
+ episode_ids: promoted,
541
+ }))
542
+ }
543
+
544
+ #[derive(Deserialize)]
545
+ struct L3Query {
546
+ context: String,
547
+ }
548
+
549
+ #[derive(Serialize)]
550
+ struct L3Response {
551
+ entries: Vec<promotion::L3Entry>,
552
+ }
553
+
554
+ async fn get_l3_knowledge(
555
+ State(state): State<AppState>,
556
+ Query(query): Query<L3Query>,
557
+ ) -> Result<Json<L3Response>, (StatusCode, String)> {
558
+ let entries = state
559
+ .promotion_engine
560
+ .get_l3_entries(&query.context)
561
+ .await
562
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("L3 query failed: {e}")))?;
563
+
564
+ Ok(Json(L3Response { entries }))
565
+ }
566
+
567
+ // === Task endpoints ===
568
+
569
+ async fn create_task(
570
+ State(state): State<AppState>,
571
+ Json(input): Json<TaskInput>,
572
+ ) -> Result<(StatusCode, Json<TaskRecord>), (StatusCode, String)> {
573
+ let task = state
574
+ .task_store
575
+ .create_task(input)
576
+ .await
577
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Create task failed: {e}")))?;
578
+
579
+ Ok((StatusCode::CREATED, Json(task)))
580
+ }
581
+
582
+ async fn list_tasks(
583
+ State(state): State<AppState>,
584
+ Query(query): Query<TaskQuery>,
585
+ ) -> Result<Json<ListTasksResponse>, (StatusCode, String)> {
586
+ let tasks = state
587
+ .task_store
588
+ .list_tasks(&query.context, query.status.as_deref())
589
+ .await
590
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("List tasks failed: {e}")))?;
591
+
592
+ Ok(Json(ListTasksResponse { tasks }))
593
+ }
594
+
595
+ async fn get_next_task(
596
+ State(state): State<AppState>,
597
+ Query(query): Query<NextTaskQuery>,
598
+ ) -> Result<Json<TaskRecord>, (StatusCode, String)> {
599
+ let task = state
600
+ .task_store
601
+ .get_next_task(&query.context)
602
+ .await
603
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Get next task failed: {e}")))?;
604
+
605
+ match task {
606
+ Some(t) => Ok(Json(t)),
607
+ None => Err((StatusCode::NOT_FOUND, "No available tasks".to_string())),
608
+ }
609
+ }
610
+
611
+ async fn update_task(
612
+ State(state): State<AppState>,
613
+ Path(id): Path<String>,
614
+ Json(update): Json<TaskUpdate>,
615
+ ) -> Result<Json<TaskRecord>, (StatusCode, String)> {
616
+ let task = state
617
+ .task_store
618
+ .update_task(&id, update)
619
+ .await
620
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, format!("Update task failed: {e}")))?;
621
+
622
+ match task {
623
+ Some(t) => Ok(Json(t)),
624
+ None => Err((StatusCode::NOT_FOUND, "Task not found".to_string())),
625
+ }
626
+ }