hindsight-api 0.1.5__py3-none-any.whl → 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. hindsight_api/__init__.py +10 -9
  2. hindsight_api/alembic/env.py +5 -8
  3. hindsight_api/alembic/versions/5a366d414dce_initial_schema.py +266 -180
  4. hindsight_api/alembic/versions/b7c4d8e9f1a2_add_chunks_table.py +32 -32
  5. hindsight_api/alembic/versions/c8e5f2a3b4d1_add_retain_params_to_documents.py +11 -11
  6. hindsight_api/alembic/versions/d9f6a3b4c5e2_rename_bank_to_interactions.py +7 -12
  7. hindsight_api/alembic/versions/e0a1b2c3d4e5_disposition_to_3_traits.py +23 -15
  8. hindsight_api/alembic/versions/rename_personality_to_disposition.py +30 -21
  9. hindsight_api/api/__init__.py +10 -10
  10. hindsight_api/api/http.py +575 -593
  11. hindsight_api/api/mcp.py +30 -28
  12. hindsight_api/banner.py +13 -6
  13. hindsight_api/config.py +9 -13
  14. hindsight_api/engine/__init__.py +9 -9
  15. hindsight_api/engine/cross_encoder.py +22 -21
  16. hindsight_api/engine/db_utils.py +5 -4
  17. hindsight_api/engine/embeddings.py +22 -21
  18. hindsight_api/engine/entity_resolver.py +81 -75
  19. hindsight_api/engine/llm_wrapper.py +61 -79
  20. hindsight_api/engine/memory_engine.py +603 -625
  21. hindsight_api/engine/query_analyzer.py +100 -97
  22. hindsight_api/engine/response_models.py +105 -106
  23. hindsight_api/engine/retain/__init__.py +9 -16
  24. hindsight_api/engine/retain/bank_utils.py +34 -58
  25. hindsight_api/engine/retain/chunk_storage.py +4 -12
  26. hindsight_api/engine/retain/deduplication.py +9 -28
  27. hindsight_api/engine/retain/embedding_processing.py +4 -11
  28. hindsight_api/engine/retain/embedding_utils.py +3 -4
  29. hindsight_api/engine/retain/entity_processing.py +7 -17
  30. hindsight_api/engine/retain/fact_extraction.py +155 -165
  31. hindsight_api/engine/retain/fact_storage.py +11 -23
  32. hindsight_api/engine/retain/link_creation.py +11 -39
  33. hindsight_api/engine/retain/link_utils.py +166 -95
  34. hindsight_api/engine/retain/observation_regeneration.py +39 -52
  35. hindsight_api/engine/retain/orchestrator.py +72 -62
  36. hindsight_api/engine/retain/types.py +49 -43
  37. hindsight_api/engine/search/__init__.py +5 -5
  38. hindsight_api/engine/search/fusion.py +6 -15
  39. hindsight_api/engine/search/graph_retrieval.py +22 -23
  40. hindsight_api/engine/search/mpfp_retrieval.py +76 -92
  41. hindsight_api/engine/search/observation_utils.py +9 -16
  42. hindsight_api/engine/search/reranking.py +4 -7
  43. hindsight_api/engine/search/retrieval.py +87 -66
  44. hindsight_api/engine/search/scoring.py +5 -7
  45. hindsight_api/engine/search/temporal_extraction.py +8 -11
  46. hindsight_api/engine/search/think_utils.py +115 -39
  47. hindsight_api/engine/search/trace.py +68 -39
  48. hindsight_api/engine/search/tracer.py +44 -35
  49. hindsight_api/engine/search/types.py +20 -17
  50. hindsight_api/engine/task_backend.py +21 -26
  51. hindsight_api/engine/utils.py +25 -10
  52. hindsight_api/main.py +21 -40
  53. hindsight_api/mcp_local.py +190 -0
  54. hindsight_api/metrics.py +44 -30
  55. hindsight_api/migrations.py +10 -8
  56. hindsight_api/models.py +60 -72
  57. hindsight_api/pg0.py +22 -23
  58. hindsight_api/server.py +3 -6
  59. hindsight_api-0.1.7.dist-info/METADATA +178 -0
  60. hindsight_api-0.1.7.dist-info/RECORD +64 -0
  61. {hindsight_api-0.1.5.dist-info → hindsight_api-0.1.7.dist-info}/entry_points.txt +1 -0
  62. hindsight_api-0.1.5.dist-info/METADATA +0 -42
  63. hindsight_api-0.1.5.dist-info/RECORD +0 -63
  64. {hindsight_api-0.1.5.dist-info → hindsight_api-0.1.7.dist-info}/WHEEL +0 -0
@@ -4,24 +4,25 @@ Search tracer for collecting detailed search execution traces.
4
4
  The SearchTracer collects comprehensive information about each step
5
5
  of the spreading activation search process for debugging and visualization.
6
6
  """
7
+
7
8
  import time
8
- from datetime import datetime, timezone
9
- from typing import List, Optional, Dict, Any, Literal
9
+ from datetime import UTC, datetime
10
+ from typing import Any, Literal
10
11
 
11
12
  from .trace import (
12
- SearchTrace,
13
- QueryInfo,
14
13
  EntryPoint,
15
- NodeVisit,
16
- WeightComponents,
17
14
  LinkInfo,
15
+ NodeVisit,
18
16
  PruningDecision,
19
- SearchSummary,
20
- SearchPhaseMetrics,
21
- RetrievalResult,
17
+ QueryInfo,
18
+ RerankedResult,
22
19
  RetrievalMethodResults,
20
+ RetrievalResult,
23
21
  RRFMergeResult,
24
- RerankedResult,
22
+ SearchPhaseMetrics,
23
+ SearchSummary,
24
+ SearchTrace,
25
+ WeightComponents,
25
26
  )
26
27
 
27
28
 
@@ -58,17 +59,17 @@ class SearchTracer:
58
59
  self.max_tokens = max_tokens
59
60
 
60
61
  # Trace data
61
- self.query_embedding: Optional[List[float]] = None
62
- self.start_time: Optional[float] = None
63
- self.entry_points: List[EntryPoint] = []
64
- self.visits: List[NodeVisit] = []
65
- self.pruned: List[PruningDecision] = []
66
- self.phase_metrics: List[SearchPhaseMetrics] = []
62
+ self.query_embedding: list[float] | None = None
63
+ self.start_time: float | None = None
64
+ self.entry_points: list[EntryPoint] = []
65
+ self.visits: list[NodeVisit] = []
66
+ self.pruned: list[PruningDecision] = []
67
+ self.phase_metrics: list[SearchPhaseMetrics] = []
67
68
 
68
69
  # New 4-way retrieval tracking
69
- self.retrieval_results: List[RetrievalMethodResults] = []
70
- self.rrf_merged: List[RRFMergeResult] = []
71
- self.reranked: List[RerankedResult] = []
70
+ self.retrieval_results: list[RetrievalMethodResults] = []
71
+ self.rrf_merged: list[RRFMergeResult] = []
72
+ self.reranked: list[RerankedResult] = []
72
73
 
73
74
  # Tracking state
74
75
  self.current_step = 0
@@ -83,7 +84,7 @@ class SearchTracer:
83
84
  """Start timing the search."""
84
85
  self.start_time = time.time()
85
86
 
86
- def record_query_embedding(self, embedding: List[float]):
87
+ def record_query_embedding(self, embedding: list[float]):
87
88
  """Record the query embedding."""
88
89
  self.query_embedding = embedding
89
90
 
@@ -117,9 +118,9 @@ class SearchTracer:
117
118
  event_date: datetime,
118
119
  access_count: int,
119
120
  is_entry_point: bool,
120
- parent_node_id: Optional[str],
121
- link_type: Optional[Literal["temporal", "semantic", "entity"]],
122
- link_weight: Optional[float],
121
+ parent_node_id: str | None,
122
+ link_type: Literal["temporal", "semantic", "entity"] | None,
123
+ link_weight: float | None,
123
124
  activation: float,
124
125
  semantic_similarity: float,
125
126
  recency: float,
@@ -199,10 +200,10 @@ class SearchTracer:
199
200
  to_node_id: str,
200
201
  link_type: Literal["temporal", "semantic", "entity"],
201
202
  link_weight: float,
202
- entity_id: Optional[str],
203
- new_activation: Optional[float],
203
+ entity_id: str | None,
204
+ new_activation: float | None,
204
205
  followed: bool,
205
- prune_reason: Optional[str] = None,
206
+ prune_reason: str | None = None,
206
207
  is_supplementary: bool = False,
207
208
  ):
208
209
  """
@@ -266,7 +267,7 @@ class SearchTracer:
266
267
  )
267
268
  )
268
269
 
269
- def add_phase_metric(self, phase_name: str, duration_seconds: float, details: Optional[Dict[str, Any]] = None):
270
+ def add_phase_metric(self, phase_name: str, duration_seconds: float, details: dict[str, Any] | None = None):
270
271
  """
271
272
  Record metrics for a search phase.
272
273
 
@@ -286,11 +287,11 @@ class SearchTracer:
286
287
  def add_retrieval_results(
287
288
  self,
288
289
  method_name: Literal["semantic", "bm25", "graph", "temporal"],
289
- results: List[tuple], # List of (doc_id, data) tuples
290
+ results: list[tuple], # List of (doc_id, data) tuples
290
291
  duration_seconds: float,
291
292
  score_field: str, # e.g., "similarity", "bm25_score"
292
- metadata: Optional[Dict[str, Any]] = None,
293
- fact_type: Optional[str] = None
293
+ metadata: dict[str, Any] | None = None,
294
+ fact_type: str | None = None,
294
295
  ):
295
296
  """
296
297
  Record results from a single retrieval method.
@@ -331,7 +332,7 @@ class SearchTracer:
331
332
  )
332
333
  )
333
334
 
334
- def add_rrf_merged(self, merged_results: List[tuple]):
335
+ def add_rrf_merged(self, merged_results: list[tuple]):
335
336
  """
336
337
  Record RRF merged results.
337
338
 
@@ -350,7 +351,7 @@ class SearchTracer:
350
351
  )
351
352
  )
352
353
 
353
- def add_reranked(self, reranked_results: List[Dict[str, Any]], rrf_merged: List):
354
+ def add_reranked(self, reranked_results: list[dict[str, Any]], rrf_merged: list):
354
355
  """
355
356
  Record reranked results.
356
357
 
@@ -373,7 +374,15 @@ class SearchTracer:
373
374
  # Keys from ScoredResult.to_dict(): cross_encoder_score, cross_encoder_score_normalized,
374
375
  # rrf_normalized, temporal, recency, combined_score, weight
375
376
  score_components = {}
376
- for key in ["cross_encoder_score", "cross_encoder_score_normalized", "rrf_score", "rrf_normalized", "temporal", "recency", "combined_score"]:
377
+ for key in [
378
+ "cross_encoder_score",
379
+ "cross_encoder_score_normalized",
380
+ "rrf_score",
381
+ "rrf_normalized",
382
+ "temporal",
383
+ "recency",
384
+ "combined_score",
385
+ ]:
377
386
  if key in result and result[key] is not None:
378
387
  score_components[key] = result[key]
379
388
 
@@ -389,7 +398,7 @@ class SearchTracer:
389
398
  )
390
399
  )
391
400
 
392
- def finalize(self, final_results: List[Dict[str, Any]]) -> SearchTrace:
401
+ def finalize(self, final_results: list[dict[str, Any]]) -> SearchTrace:
393
402
  """
394
403
  Finalize the trace and return the complete SearchTrace object.
395
404
 
@@ -416,7 +425,7 @@ class SearchTracer:
416
425
  query_info = QueryInfo(
417
426
  query_text=self.query_text,
418
427
  query_embedding=self.query_embedding or [],
419
- timestamp=datetime.now(timezone.utc),
428
+ timestamp=datetime.now(UTC),
420
429
  budget=self.budget,
421
430
  max_tokens=self.max_tokens,
422
431
  )
@@ -6,8 +6,8 @@ providing type safety and making data flow explicit.
6
6
  """
7
7
 
8
8
  from dataclasses import dataclass, field
9
- from typing import Optional, List, Dict, Any
10
9
  from datetime import datetime
10
+ from typing import Any
11
11
 
12
12
 
13
13
  @dataclass
@@ -17,28 +17,29 @@ class RetrievalResult:
17
17
 
18
18
  This represents a raw result from the database query, before merging or reranking.
19
19
  """
20
+
20
21
  id: str
21
22
  text: str
22
23
  fact_type: str
23
- context: Optional[str] = None
24
- event_date: Optional[datetime] = None
25
- occurred_start: Optional[datetime] = None
26
- occurred_end: Optional[datetime] = None
27
- mentioned_at: Optional[datetime] = None
28
- document_id: Optional[str] = None
29
- chunk_id: Optional[str] = None
24
+ context: str | None = None
25
+ event_date: datetime | None = None
26
+ occurred_start: datetime | None = None
27
+ occurred_end: datetime | None = None
28
+ mentioned_at: datetime | None = None
29
+ document_id: str | None = None
30
+ chunk_id: str | None = None
30
31
  access_count: int = 0
31
- embedding: Optional[List[float]] = None
32
+ embedding: list[float] | None = None
32
33
 
33
34
  # Retrieval-specific scores (only one will be set depending on retrieval method)
34
- similarity: Optional[float] = None # Semantic retrieval
35
- bm25_score: Optional[float] = None # BM25 retrieval
36
- activation: Optional[float] = None # Graph retrieval (spreading activation)
37
- temporal_score: Optional[float] = None # Temporal retrieval
38
- temporal_proximity: Optional[float] = None # Temporal retrieval
35
+ similarity: float | None = None # Semantic retrieval
36
+ bm25_score: float | None = None # BM25 retrieval
37
+ activation: float | None = None # Graph retrieval (spreading activation)
38
+ temporal_score: float | None = None # Temporal retrieval
39
+ temporal_proximity: float | None = None # Temporal retrieval
39
40
 
40
41
  @classmethod
41
- def from_db_row(cls, row: Dict[str, Any]) -> "RetrievalResult":
42
+ def from_db_row(cls, row: dict[str, Any]) -> "RetrievalResult":
42
43
  """Create from a database row (asyncpg Record converted to dict)."""
43
44
  return cls(
44
45
  id=str(row["id"]),
@@ -68,13 +69,14 @@ class MergedCandidate:
68
69
 
69
70
  Contains the original retrieval data plus RRF metadata.
70
71
  """
72
+
71
73
  # Original retrieval data
72
74
  retrieval: RetrievalResult
73
75
 
74
76
  # RRF metadata
75
77
  rrf_score: float
76
78
  rrf_rank: int = 0
77
- source_ranks: Dict[str, int] = field(default_factory=dict) # method_name -> rank
79
+ source_ranks: dict[str, int] = field(default_factory=dict) # method_name -> rank
78
80
 
79
81
  @property
80
82
  def id(self) -> str:
@@ -89,6 +91,7 @@ class ScoredResult:
89
91
 
90
92
  Contains all retrieval/merge data plus reranking scores and combined score.
91
93
  """
94
+
92
95
  # Original merged candidate
93
96
  candidate: MergedCandidate
94
97
 
@@ -115,7 +118,7 @@ class ScoredResult:
115
118
  """Convenience property to access retrieval data."""
116
119
  return self.candidate.retrieval
117
120
 
118
- def to_dict(self) -> Dict[str, Any]:
121
+ def to_dict(self) -> dict[str, Any]:
119
122
  """
120
123
  Convert to dict for backwards compatibility.
121
124
 
@@ -6,10 +6,12 @@ This provides an abstraction that can be adapted to different execution models:
6
6
  - Pub/Sub architectures (future)
7
7
  - Message brokers (future)
8
8
  """
9
- from abc import ABC, abstractmethod
10
- from typing import Any, Dict, Optional, Callable, Awaitable
9
+
11
10
  import asyncio
12
11
  import logging
12
+ from abc import ABC, abstractmethod
13
+ from collections.abc import Awaitable, Callable
14
+ from typing import Any
13
15
 
14
16
  logger = logging.getLogger(__name__)
15
17
 
@@ -29,10 +31,10 @@ class TaskBackend(ABC):
29
31
 
30
32
  def __init__(self):
31
33
  """Initialize the task backend."""
32
- self._executor: Optional[Callable[[Dict[str, Any]], Awaitable[None]]] = None
34
+ self._executor: Callable[[dict[str, Any]], Awaitable[None]] | None = None
33
35
  self._initialized = False
34
36
 
35
- def set_executor(self, executor: Callable[[Dict[str, Any]], Awaitable[None]]):
37
+ def set_executor(self, executor: Callable[[dict[str, Any]], Awaitable[None]]):
36
38
  """
37
39
  Set the executor callback for processing tasks.
38
40
 
@@ -49,7 +51,7 @@ class TaskBackend(ABC):
49
51
  pass
50
52
 
51
53
  @abstractmethod
52
- async def submit_task(self, task_dict: Dict[str, Any]):
54
+ async def submit_task(self, task_dict: dict[str, Any]):
53
55
  """
54
56
  Submit a task for execution.
55
57
 
@@ -65,7 +67,7 @@ class TaskBackend(ABC):
65
67
  """
66
68
  pass
67
69
 
68
- async def _execute_task(self, task_dict: Dict[str, Any]):
70
+ async def _execute_task(self, task_dict: dict[str, Any]):
69
71
  """
70
72
  Execute a task through the registered executor.
71
73
 
@@ -73,16 +75,17 @@ class TaskBackend(ABC):
73
75
  task_dict: Task dictionary to execute
74
76
  """
75
77
  if self._executor is None:
76
- task_type = task_dict.get('type', 'unknown')
78
+ task_type = task_dict.get("type", "unknown")
77
79
  logger.warning(f"No executor registered, skipping task {task_type}")
78
80
  return
79
81
 
80
82
  try:
81
83
  await self._executor(task_dict)
82
84
  except Exception as e:
83
- task_type = task_dict.get('type', 'unknown')
85
+ task_type = task_dict.get("type", "unknown")
84
86
  logger.error(f"Error executing task {task_type}: {e}")
85
87
  import traceback
88
+
86
89
  traceback.print_exc()
87
90
 
88
91
 
@@ -94,11 +97,7 @@ class AsyncIOQueueBackend(TaskBackend):
94
97
  and a periodic consumer worker.
95
98
  """
96
99
 
97
- def __init__(
98
- self,
99
- batch_size: int = 100,
100
- batch_interval: float = 1.0
101
- ):
100
+ def __init__(self, batch_size: int = 100, batch_interval: float = 1.0):
102
101
  """
103
102
  Initialize AsyncIO queue backend.
104
103
 
@@ -107,9 +106,9 @@ class AsyncIOQueueBackend(TaskBackend):
107
106
  batch_interval: Maximum time (seconds) to wait before processing batch
108
107
  """
109
108
  super().__init__()
110
- self._queue: Optional[asyncio.Queue] = None
111
- self._worker_task: Optional[asyncio.Task] = None
112
- self._shutdown_event: Optional[asyncio.Event] = None
109
+ self._queue: asyncio.Queue | None = None
110
+ self._worker_task: asyncio.Task | None = None
111
+ self._shutdown_event: asyncio.Event | None = None
113
112
  self._batch_size = batch_size
114
113
  self._batch_interval = batch_interval
115
114
 
@@ -124,7 +123,7 @@ class AsyncIOQueueBackend(TaskBackend):
124
123
  self._initialized = True
125
124
  logger.info("AsyncIOQueueBackend initialized")
126
125
 
127
- async def submit_task(self, task_dict: Dict[str, Any]):
126
+ async def submit_task(self, task_dict: dict[str, Any]):
128
127
  """
129
128
  Submit a task by putting it in the queue.
130
129
 
@@ -135,8 +134,8 @@ class AsyncIOQueueBackend(TaskBackend):
135
134
  await self.initialize()
136
135
 
137
136
  await self._queue.put(task_dict)
138
- task_type = task_dict.get('type', 'unknown')
139
- task_id = task_dict.get('id')
137
+ task_type = task_dict.get("type", "unknown")
138
+ task_id = task_dict.get("id")
140
139
 
141
140
  async def wait_for_pending_tasks(self, timeout: float = 5.0):
142
141
  """
@@ -200,20 +199,16 @@ class AsyncIOQueueBackend(TaskBackend):
200
199
  while len(tasks) < self._batch_size and asyncio.get_event_loop().time() < deadline:
201
200
  try:
202
201
  remaining_time = max(0.1, deadline - asyncio.get_event_loop().time())
203
- task_dict = await asyncio.wait_for(
204
- self._queue.get(),
205
- timeout=remaining_time
206
- )
202
+ task_dict = await asyncio.wait_for(self._queue.get(), timeout=remaining_time)
207
203
  tasks.append(task_dict)
208
- except asyncio.TimeoutError:
204
+ except TimeoutError:
209
205
  break
210
206
 
211
207
  # Process batch
212
208
  if tasks:
213
209
  # Execute tasks concurrently
214
210
  await asyncio.gather(
215
- *[self._execute_task(task_dict) for task_dict in tasks],
216
- return_exceptions=True
211
+ *[self._execute_task(task_dict) for task_dict in tasks], return_exceptions=True
217
212
  )
218
213
 
219
214
  except asyncio.CancelledError:
@@ -1,9 +1,10 @@
1
1
  """
2
2
  Utility functions for memory system.
3
3
  """
4
+
4
5
  import logging
5
6
  from datetime import datetime
6
- from typing import List, Dict, TYPE_CHECKING
7
+ from typing import TYPE_CHECKING
7
8
 
8
9
  if TYPE_CHECKING:
9
10
  from .llm_wrapper import LLMConfig
@@ -12,7 +13,14 @@ if TYPE_CHECKING:
12
13
  from .retain.fact_extraction import extract_facts_from_text
13
14
 
14
15
 
15
- async def extract_facts(text: str, event_date: datetime, context: str = "", llm_config: 'LLMConfig' = None, agent_name: str = None, extract_opinions: bool = False) -> tuple[List['Fact'], List[tuple[str, int]]]:
16
+ async def extract_facts(
17
+ text: str,
18
+ event_date: datetime,
19
+ context: str = "",
20
+ llm_config: "LLMConfig" = None,
21
+ agent_name: str = None,
22
+ extract_opinions: bool = False,
23
+ ) -> tuple[list["Fact"], list[tuple[str, int]]]:
16
24
  """
17
25
  Extract semantic facts from text using LLM.
18
26
 
@@ -41,16 +49,25 @@ async def extract_facts(text: str, event_date: datetime, context: str = "", llm_
41
49
  if not text or not text.strip():
42
50
  return [], []
43
51
 
44
- facts, chunks = await extract_facts_from_text(text, event_date, context=context, llm_config=llm_config, agent_name=agent_name, extract_opinions=extract_opinions)
52
+ facts, chunks = await extract_facts_from_text(
53
+ text,
54
+ event_date,
55
+ context=context,
56
+ llm_config=llm_config,
57
+ agent_name=agent_name,
58
+ extract_opinions=extract_opinions,
59
+ )
45
60
 
46
61
  if not facts:
47
- logging.warning(f"LLM extracted 0 facts from text of length {len(text)}. This may indicate the text contains no meaningful information, or the LLM failed to extract facts. Full text: {text}")
62
+ logging.warning(
63
+ f"LLM extracted 0 facts from text of length {len(text)}. This may indicate the text contains no meaningful information, or the LLM failed to extract facts. Full text: {text}"
64
+ )
48
65
  return [], chunks
49
66
 
50
67
  return facts, chunks
51
68
 
52
69
 
53
- def cosine_similarity(vec1: List[float], vec2: List[float]) -> float:
70
+ def cosine_similarity(vec1: list[float], vec2: list[float]) -> float:
54
71
  """
55
72
  Calculate cosine similarity between two vectors.
56
73
 
@@ -100,6 +117,7 @@ def calculate_recency_weight(days_since: float, half_life_days: float = 365.0) -
100
117
  Weight between 0 and 1
101
118
  """
102
119
  import math
120
+
103
121
  # Logarithmic decay: 1 / (1 + log(1 + days_since/half_life))
104
122
  # This decays much slower than exponential, giving better long-term differentiation
105
123
  normalized_age = days_since / half_life_days
@@ -121,6 +139,7 @@ def calculate_frequency_weight(access_count: int, max_boost: float = 2.0) -> flo
121
139
  Weight between 1.0 and max_boost
122
140
  """
123
141
  import math
142
+
124
143
  if access_count <= 0:
125
144
  return 1.0
126
145
 
@@ -158,11 +177,7 @@ def calculate_temporal_anchor(occurred_start: datetime, occurred_end: datetime)
158
177
  return midpoint
159
178
 
160
179
 
161
- def calculate_temporal_proximity(
162
- anchor_a: datetime,
163
- anchor_b: datetime,
164
- half_life_days: float = 30.0
165
- ) -> float:
180
+ def calculate_temporal_proximity(anchor_a: datetime, anchor_b: datetime, half_life_days: float = 30.0) -> float:
166
181
  """
167
182
  Calculate temporal proximity between two temporal anchors.
168
183
 
hindsight_api/main.py CHANGED
@@ -6,6 +6,7 @@ Run the server with:
6
6
 
7
7
  Stop with Ctrl+C.
8
8
  """
9
+
9
10
  import argparse
10
11
  import asyncio
11
12
  import atexit
@@ -13,15 +14,14 @@ import os
13
14
  import signal
14
15
  import sys
15
16
  import warnings
16
- from typing import Optional
17
17
 
18
18
  import uvicorn
19
19
 
20
20
  from . import MemoryEngine
21
21
  from .api import create_app
22
- from .config import get_config, HindsightConfig
23
-
24
22
  from .banner import print_banner
23
+ from .config import HindsightConfig, get_config
24
+
25
25
  print()
26
26
  print_banner()
27
27
 
@@ -33,7 +33,7 @@ warnings.filterwarnings("ignore", message="websockets.server.WebSocketServerProt
33
33
  os.environ["TOKENIZERS_PARALLELISM"] = "false"
34
34
 
35
35
  # Global reference for cleanup
36
- _memory: Optional[MemoryEngine] = None
36
+ _memory: MemoryEngine | None = None
37
37
 
38
38
 
39
39
  def _cleanup():
@@ -70,59 +70,41 @@ def main():
70
70
 
71
71
  # Server options
72
72
  parser.add_argument(
73
- "--host", default=config.host,
74
- help=f"Host to bind to (default: {config.host}, env: HINDSIGHT_API_HOST)"
73
+ "--host", default=config.host, help=f"Host to bind to (default: {config.host}, env: HINDSIGHT_API_HOST)"
75
74
  )
76
75
  parser.add_argument(
77
- "--port", type=int, default=config.port,
78
- help=f"Port to bind to (default: {config.port}, env: HINDSIGHT_API_PORT)"
76
+ "--port",
77
+ type=int,
78
+ default=config.port,
79
+ help=f"Port to bind to (default: {config.port}, env: HINDSIGHT_API_PORT)",
79
80
  )
80
81
  parser.add_argument(
81
- "--log-level", default=config.log_level,
82
+ "--log-level",
83
+ default=config.log_level,
82
84
  choices=["critical", "error", "warning", "info", "debug", "trace"],
83
- help=f"Log level (default: {config.log_level}, env: HINDSIGHT_API_LOG_LEVEL)"
85
+ help=f"Log level (default: {config.log_level}, env: HINDSIGHT_API_LOG_LEVEL)",
84
86
  )
85
87
 
86
88
  # Development options
87
- parser.add_argument(
88
- "--reload", action="store_true",
89
- help="Enable auto-reload on code changes (development only)"
90
- )
91
- parser.add_argument(
92
- "--workers", type=int, default=1,
93
- help="Number of worker processes (default: 1)"
94
- )
89
+ parser.add_argument("--reload", action="store_true", help="Enable auto-reload on code changes (development only)")
90
+ parser.add_argument("--workers", type=int, default=1, help="Number of worker processes (default: 1)")
95
91
 
96
92
  # Access log options
97
- parser.add_argument(
98
- "--access-log", action="store_true",
99
- help="Enable access log"
100
- )
101
- parser.add_argument(
102
- "--no-access-log", dest="access_log", action="store_false",
103
- help="Disable access log (default)"
104
- )
93
+ parser.add_argument("--access-log", action="store_true", help="Enable access log")
94
+ parser.add_argument("--no-access-log", dest="access_log", action="store_false", help="Disable access log (default)")
105
95
  parser.set_defaults(access_log=False)
106
96
 
107
97
  # Proxy options
108
98
  parser.add_argument(
109
- "--proxy-headers", action="store_true",
110
- help="Enable X-Forwarded-Proto, X-Forwarded-For headers"
99
+ "--proxy-headers", action="store_true", help="Enable X-Forwarded-Proto, X-Forwarded-For headers"
111
100
  )
112
101
  parser.add_argument(
113
- "--forwarded-allow-ips", default=None,
114
- help="Comma separated list of IPs to trust with proxy headers"
102
+ "--forwarded-allow-ips", default=None, help="Comma separated list of IPs to trust with proxy headers"
115
103
  )
116
104
 
117
105
  # SSL options
118
- parser.add_argument(
119
- "--ssl-keyfile", default=None,
120
- help="SSL key file"
121
- )
122
- parser.add_argument(
123
- "--ssl-certfile", default=None,
124
- help="SSL certificate file"
125
- )
106
+ parser.add_argument("--ssl-keyfile", default=None, help="SSL key file")
107
+ parser.add_argument("--ssl-certfile", default=None, help="SSL certificate file")
126
108
 
127
109
  args = parser.parse_args()
128
110
 
@@ -188,9 +170,8 @@ def main():
188
170
  if args.ssl_certfile:
189
171
  uvicorn_config["ssl_certfile"] = args.ssl_certfile
190
172
 
191
-
192
-
193
173
  from .banner import print_startup_info
174
+
194
175
  print_startup_info(
195
176
  host=args.host,
196
177
  port=args.port,