haiku.rag 0.11.4__py3-none-any.whl → 0.12.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of haiku.rag might be problematic. Click here for more details.

haiku/rag/cli.py CHANGED
@@ -16,65 +16,6 @@ cli = typer.Typer(
16
16
  )
17
17
 
18
18
 
19
- def complete_document_ids(ctx: typer.Context, incomplete: str):
20
- """Autocomplete document IDs from the selected DB."""
21
- db_path = ctx.params.get("db") or (Config.DEFAULT_DATA_DIR / "haiku.rag.lancedb")
22
-
23
- try:
24
- from haiku.rag.client import HaikuRAG
25
-
26
- async def _list_ids():
27
- async with HaikuRAG(db_path) as client:
28
- docs = await client.list_documents()
29
- return [d.id for d in docs if d.id]
30
-
31
- ids = asyncio.run(_list_ids())
32
- except Exception:
33
- return []
34
-
35
- return [i for i in ids if i and i.startswith(incomplete)]
36
-
37
-
38
- def complete_local_paths(ctx: typer.Context, incomplete: str) -> list[str]:
39
- """Autocomplete local filesystem paths.
40
-
41
- Provides directory/file suggestions based on the current incomplete input.
42
- Does not validate or restrict to specific extensions to keep it flexible
43
- (URLs are still allowed to be typed manually).
44
- """
45
- try:
46
- text = incomplete or ""
47
-
48
- # Expand user home
49
- from os.path import expanduser
50
-
51
- expanded = expanduser(text)
52
- p = Path(expanded)
53
-
54
- # Choose directory to list and prefix to filter
55
- if text == "" or text.endswith(("/", "\\")):
56
- directory = p
57
- prefix = ""
58
- else:
59
- directory = p.parent
60
- prefix = p.name
61
-
62
- if not directory.exists():
63
- return []
64
-
65
- suggestions: list[str] = []
66
- for entry in directory.iterdir():
67
- name = entry.name
68
- if not prefix or name.startswith(prefix):
69
- suggestion = str(directory / name)
70
- if entry.is_dir():
71
- suggestion += "/"
72
- suggestions.append(suggestion)
73
- return suggestions
74
- except Exception:
75
- return []
76
-
77
-
78
19
  async def check_version():
79
20
  """Check if haiku.rag is up to date and show warning if not."""
80
21
  up_to_date, current_version, latest_version = await is_up_to_date()
@@ -187,11 +128,10 @@ def add_document_text(
187
128
  asyncio.run(app.add_document_from_text(text=text, metadata=metadata or None))
188
129
 
189
130
 
190
- @cli.command("add-src", help="Add a document from a file path or URL")
131
+ @cli.command("add-src", help="Add a document from a file path, directory, or URL")
191
132
  def add_document_src(
192
133
  source: str = typer.Argument(
193
- help="The file path or URL of the document to add",
194
- autocompletion=complete_local_paths,
134
+ help="The file path, directory, or URL of the document(s) to add",
195
135
  ),
196
136
  title: str | None = typer.Option(
197
137
  None,
@@ -225,7 +165,6 @@ def add_document_src(
225
165
  def get_document(
226
166
  doc_id: str = typer.Argument(
227
167
  help="The ID of the document to get",
228
- autocompletion=complete_document_ids,
229
168
  ),
230
169
  db: Path = typer.Option(
231
170
  Config.DEFAULT_DATA_DIR / "haiku.rag.lancedb",
@@ -243,7 +182,6 @@ def get_document(
243
182
  def delete_document(
244
183
  doc_id: str = typer.Argument(
245
184
  help="The ID of the document to delete",
246
- autocompletion=complete_document_ids,
247
185
  ),
248
186
  db: Path = typer.Option(
249
187
  Config.DEFAULT_DATA_DIR / "haiku.rag.lancedb",
@@ -428,7 +366,8 @@ def download_models_cmd():
428
366
 
429
367
 
430
368
  @cli.command(
431
- "serve", help="Start the haiku.rag MCP server (by default in streamable HTTP mode)"
369
+ "serve",
370
+ help="Start haiku.rag server. Use --monitor, --mcp, and/or --a2a to enable services.",
432
371
  )
433
372
  def serve(
434
373
  db: Path = typer.Option(
@@ -436,22 +375,71 @@ def serve(
436
375
  "--db",
437
376
  help="Path to the LanceDB database file",
438
377
  ),
378
+ monitor: bool = typer.Option(
379
+ False,
380
+ "--monitor",
381
+ help="Enable file monitoring",
382
+ ),
383
+ mcp: bool = typer.Option(
384
+ False,
385
+ "--mcp",
386
+ help="Enable MCP server",
387
+ ),
439
388
  stdio: bool = typer.Option(
440
389
  False,
441
390
  "--stdio",
442
- help="Run MCP server on stdio Transport",
391
+ help="Run MCP server on stdio Transport (requires --mcp)",
392
+ ),
393
+ mcp_port: int = typer.Option(
394
+ 8001,
395
+ "--mcp-port",
396
+ help="Port to bind MCP server to (ignored with --stdio)",
397
+ ),
398
+ a2a: bool = typer.Option(
399
+ False,
400
+ "--a2a",
401
+ help="Enable A2A (Agent-to-Agent) server",
402
+ ),
403
+ a2a_host: str = typer.Option(
404
+ "127.0.0.1",
405
+ "--a2a-host",
406
+ help="Host to bind A2A server to",
407
+ ),
408
+ a2a_port: int = typer.Option(
409
+ 8000,
410
+ "--a2a-port",
411
+ help="Port to bind A2A server to",
443
412
  ),
444
413
  ) -> None:
445
- """Start the MCP server."""
414
+ """Start the server with selected services."""
415
+ # Require at least one service flag
416
+ if not (monitor or mcp or a2a):
417
+ typer.echo(
418
+ "Error: At least one service flag (--monitor, --mcp, or --a2a) must be specified"
419
+ )
420
+ raise typer.Exit(1)
421
+
422
+ if stdio and not mcp:
423
+ typer.echo("Error: --stdio requires --mcp")
424
+ raise typer.Exit(1)
425
+
446
426
  from haiku.rag.app import HaikuRAGApp
447
427
 
448
428
  app = HaikuRAGApp(db_path=db)
449
429
 
450
- transport = None
451
- if stdio:
452
- transport = "stdio"
430
+ transport = "stdio" if stdio else None
453
431
 
454
- asyncio.run(app.serve(transport=transport))
432
+ asyncio.run(
433
+ app.serve(
434
+ enable_monitor=monitor,
435
+ enable_mcp=mcp,
436
+ mcp_transport=transport,
437
+ mcp_port=mcp_port,
438
+ enable_a2a=a2a,
439
+ a2a_host=a2a_host,
440
+ a2a_port=a2a_port,
441
+ )
442
+ )
455
443
 
456
444
 
457
445
  @cli.command("migrate", help="Migrate an SQLite database to LanceDB")
@@ -472,5 +460,27 @@ def migrate(
472
460
  raise typer.Exit(1)
473
461
 
474
462
 
463
+ @cli.command(
464
+ "a2aclient", help="Run interactive client to chat with haiku.rag's A2A server"
465
+ )
466
+ def a2aclient(
467
+ url: str = typer.Option(
468
+ "http://localhost:8000",
469
+ "--url",
470
+ help="Base URL of the A2A server",
471
+ ),
472
+ ):
473
+ try:
474
+ from haiku.rag.a2a.client import run_interactive_client
475
+ except ImportError:
476
+ typer.echo(
477
+ "Error: A2A support requires the 'a2a' extra. "
478
+ "Install with: uv pip install 'haiku.rag[a2a]'"
479
+ )
480
+ raise typer.Exit(1)
481
+
482
+ asyncio.run(run_interactive_client(url=url))
483
+
484
+
475
485
  if __name__ == "__main__":
476
486
  cli()
haiku/rag/client.py CHANGED
@@ -106,8 +106,8 @@ class HaikuRAG:
106
106
 
107
107
  async def create_document_from_source(
108
108
  self, source: str | Path, title: str | None = None, metadata: dict | None = None
109
- ) -> Document:
110
- """Create or update a document from a file path or URL.
109
+ ) -> Document | list[Document]:
110
+ """Create or update document(s) from a file path, directory, or URL.
111
111
 
112
112
  Checks if a document with the same URI already exists:
113
113
  - If MD5 is unchanged, returns existing document
@@ -115,11 +115,13 @@ class HaikuRAG:
115
115
  - If no document exists, creates a new one
116
116
 
117
117
  Args:
118
- source: File path (as string or Path) or URL to parse
118
+ source: File path, directory (as string or Path), or URL to parse
119
+ title: Optional title (only used for single files, not directories)
119
120
  metadata: Optional metadata dictionary
120
121
 
121
122
  Returns:
122
- Document instance (created, updated, or existing)
123
+ Document instance (created, updated, or existing) for single files/URLs
124
+ List of Document instances for directories
123
125
 
124
126
  Raises:
125
127
  ValueError: If the file/URL cannot be parsed or doesn't exist
@@ -142,6 +144,45 @@ class HaikuRAG:
142
144
  else:
143
145
  # Handle as regular file path
144
146
  source_path = Path(source) if isinstance(source, str) else source
147
+
148
+ # Handle directories
149
+ if source_path.is_dir():
150
+ documents = []
151
+ supported_extensions = set(FileReader.extensions)
152
+ for file_path in source_path.rglob("*"):
153
+ if (
154
+ file_path.is_file()
155
+ and file_path.suffix.lower() in supported_extensions
156
+ ):
157
+ doc = await self._create_document_from_file(
158
+ file_path, title=None, metadata=metadata
159
+ )
160
+ documents.append(doc)
161
+ return documents
162
+
163
+ # Handle single file
164
+ return await self._create_document_from_file(
165
+ source_path, title=title, metadata=metadata
166
+ )
167
+
168
+ async def _create_document_from_file(
169
+ self, source_path: Path, title: str | None = None, metadata: dict | None = None
170
+ ) -> Document:
171
+ """Create or update a document from a single file path.
172
+
173
+ Args:
174
+ source_path: Path to the file
175
+ title: Optional title
176
+ metadata: Optional metadata dictionary
177
+
178
+ Returns:
179
+ Document instance (created, updated, or existing)
180
+
181
+ Raises:
182
+ ValueError: If the file cannot be parsed or doesn't exist
183
+ """
184
+ metadata = metadata or {}
185
+
145
186
  if source_path.suffix.lower() not in FileReader.extensions:
146
187
  raise ValueError(f"Unsupported file extension: {source_path.suffix}")
147
188
 
@@ -592,6 +633,8 @@ class HaikuRAG:
592
633
  new_doc = await self.create_document_from_source(
593
634
  source=doc.uri, metadata=doc.metadata or {}
594
635
  )
636
+ # URIs always point to single files/URLs, never directories
637
+ assert isinstance(new_doc, Document)
595
638
  assert new_doc.id is not None, (
596
639
  "New document ID should not be None"
597
640
  )
haiku/rag/config.py CHANGED
@@ -62,6 +62,10 @@ class AppConfig(BaseModel):
62
62
  # to allow concurrent connections to safely use recent versions.
63
63
  VACUUM_RETENTION_SECONDS: int = 60
64
64
 
65
+ # Maximum number of A2A contexts to keep in memory. When exceeded, least
66
+ # recently used contexts will be evicted. Default is 1000.
67
+ A2A_MAX_CONTEXTS: int = 1000
68
+
65
69
  @field_validator("MONITOR_DIRECTORIES", mode="before")
66
70
  @classmethod
67
71
  def parse_monitor_directories(cls, v):
@@ -1,3 +1,5 @@
1
+ from typing import overload
2
+
1
3
  from haiku.rag.config import Config
2
4
 
3
5
 
@@ -9,6 +11,12 @@ class EmbedderBase:
9
11
  self._model = model
10
12
  self._vector_dim = vector_dim
11
13
 
14
+ @overload
15
+ async def embed(self, text: str) -> list[float]: ...
16
+
17
+ @overload
18
+ async def embed(self, text: list[str]) -> list[list[float]]: ...
19
+
12
20
  async def embed(self, text: str | list[str]) -> list[float] | list[list[float]]:
13
21
  raise NotImplementedError(
14
22
  "Embedder is an abstract class. Please implement the embed method in a subclass."
@@ -1,3 +1,5 @@
1
+ from typing import overload
2
+
1
3
  from openai import AsyncOpenAI
2
4
 
3
5
  from haiku.rag.config import Config
@@ -5,6 +7,12 @@ from haiku.rag.embeddings.base import EmbedderBase
5
7
 
6
8
 
7
9
  class Embedder(EmbedderBase):
10
+ @overload
11
+ async def embed(self, text: str) -> list[float]: ...
12
+
13
+ @overload
14
+ async def embed(self, text: list[str]) -> list[list[float]]: ...
15
+
8
16
  async def embed(self, text: str | list[str]) -> list[float] | list[list[float]]:
9
17
  client = AsyncOpenAI(base_url=f"{Config.OLLAMA_BASE_URL}/v1", api_key="dummy")
10
18
  if not text:
@@ -1,9 +1,17 @@
1
+ from typing import overload
2
+
1
3
  from openai import AsyncOpenAI
2
4
 
3
5
  from haiku.rag.embeddings.base import EmbedderBase
4
6
 
5
7
 
6
8
  class Embedder(EmbedderBase):
9
+ @overload
10
+ async def embed(self, text: str) -> list[float]: ...
11
+
12
+ @overload
13
+ async def embed(self, text: list[str]) -> list[list[float]]: ...
14
+
7
15
  async def embed(self, text: str | list[str]) -> list[float] | list[list[float]]:
8
16
  client = AsyncOpenAI()
9
17
  if not text:
@@ -1,3 +1,5 @@
1
+ from typing import overload
2
+
1
3
  from openai import AsyncOpenAI
2
4
 
3
5
  from haiku.rag.config import Config
@@ -5,6 +7,12 @@ from haiku.rag.embeddings.base import EmbedderBase
5
7
 
6
8
 
7
9
  class Embedder(EmbedderBase):
10
+ @overload
11
+ async def embed(self, text: str) -> list[float]: ...
12
+
13
+ @overload
14
+ async def embed(self, text: list[str]) -> list[list[float]]: ...
15
+
8
16
  async def embed(self, text: str | list[str]) -> list[float] | list[list[float]]:
9
17
  client = AsyncOpenAI(
10
18
  base_url=f"{Config.VLLM_EMBEDDINGS_BASE_URL}/v1", api_key="dummy"
@@ -1,9 +1,17 @@
1
1
  try:
2
+ from typing import overload
3
+
2
4
  from voyageai.client import Client # type: ignore
3
5
 
4
6
  from haiku.rag.embeddings.base import EmbedderBase
5
7
 
6
8
  class Embedder(EmbedderBase):
9
+ @overload
10
+ async def embed(self, text: str) -> list[float]: ...
11
+
12
+ @overload
13
+ async def embed(self, text: list[str]) -> list[list[float]]: ...
14
+
7
15
  async def embed(self, text: str | list[str]) -> list[float] | list[list[float]]:
8
16
  client = Client()
9
17
  if not text:
haiku/rag/mcp.py CHANGED
@@ -5,6 +5,8 @@ from fastmcp import FastMCP
5
5
  from pydantic import BaseModel
6
6
 
7
7
  from haiku.rag.client import HaikuRAG
8
+ from haiku.rag.config import Config
9
+ from haiku.rag.research.models import ResearchReport
8
10
 
9
11
 
10
12
  class SearchResult(BaseModel):
@@ -153,4 +155,101 @@ def create_mcp_server(db_path: Path) -> FastMCP:
153
155
  except Exception:
154
156
  return False
155
157
 
158
+ @mcp.tool()
159
+ async def ask_question(
160
+ question: str,
161
+ cite: bool = False,
162
+ deep: bool = False,
163
+ ) -> str:
164
+ """Ask a question using the QA agent.
165
+
166
+ Args:
167
+ question: The question to ask.
168
+ cite: Whether to include citations in the response.
169
+ deep: Use deep multi-agent QA for complex questions that require decomposition.
170
+
171
+ Returns:
172
+ The answer as a string.
173
+ """
174
+ try:
175
+ async with HaikuRAG(db_path) as rag:
176
+ if deep:
177
+ from haiku.rag.config import Config
178
+ from haiku.rag.qa.deep.dependencies import DeepQAContext
179
+ from haiku.rag.qa.deep.graph import build_deep_qa_graph
180
+ from haiku.rag.qa.deep.nodes import DeepQAPlanNode
181
+ from haiku.rag.qa.deep.state import DeepQADeps, DeepQAState
182
+
183
+ graph = build_deep_qa_graph()
184
+ context = DeepQAContext(
185
+ original_question=question, use_citations=cite
186
+ )
187
+ state = DeepQAState(context=context)
188
+ deps = DeepQADeps(client=rag)
189
+
190
+ start_node = DeepQAPlanNode(
191
+ provider=Config.QA_PROVIDER,
192
+ model=Config.QA_MODEL,
193
+ )
194
+
195
+ result = await graph.run(
196
+ start_node=start_node, state=state, deps=deps
197
+ )
198
+ answer = result.output.answer
199
+ else:
200
+ answer = await rag.ask(question, cite=cite)
201
+ return answer
202
+ except Exception as e:
203
+ return f"Error answering question: {e!s}"
204
+
205
+ @mcp.tool()
206
+ async def research_question(
207
+ question: str,
208
+ max_iterations: int = 3,
209
+ confidence_threshold: float = 0.8,
210
+ max_concurrency: int = 1,
211
+ ) -> ResearchReport | None:
212
+ """Run multi-agent research to investigate a complex question.
213
+
214
+ The research process uses multiple agents to plan, search, evaluate, and synthesize
215
+ information iteratively until confidence threshold is met or max iterations reached.
216
+
217
+ Args:
218
+ question: The research question to investigate.
219
+ max_iterations: Maximum search/analyze iterations (default: 3).
220
+ confidence_threshold: Minimum confidence score (0-1) to stop early (default: 0.8).
221
+ max_concurrency: Maximum concurrent searches per iteration (default: 1).
222
+
223
+ Returns:
224
+ A research report with findings, or None if an error occurred.
225
+ """
226
+ try:
227
+ from haiku.rag.graph.nodes.plan import PlanNode
228
+ from haiku.rag.research.dependencies import ResearchContext
229
+ from haiku.rag.research.graph import build_research_graph
230
+ from haiku.rag.research.state import ResearchDeps, ResearchState
231
+
232
+ async with HaikuRAG(db_path) as rag:
233
+ graph = build_research_graph()
234
+ state = ResearchState(
235
+ context=ResearchContext(original_question=question),
236
+ max_iterations=max_iterations,
237
+ confidence_threshold=confidence_threshold,
238
+ max_concurrency=max_concurrency,
239
+ )
240
+ deps = ResearchDeps(client=rag)
241
+
242
+ result = await graph.run(
243
+ PlanNode(
244
+ provider=Config.RESEARCH_PROVIDER or Config.QA_PROVIDER,
245
+ model=Config.RESEARCH_MODEL or Config.QA_MODEL,
246
+ ),
247
+ state=state,
248
+ deps=deps,
249
+ )
250
+
251
+ return result.output
252
+ except Exception:
253
+ return None
254
+
156
255
  return mcp
haiku/rag/qa/agent.py CHANGED
@@ -54,9 +54,6 @@ class QuestionAnswerAgent:
54
54
  limit: int = 3,
55
55
  ) -> list[SearchResult]:
56
56
  """Search the knowledge base for relevant documents."""
57
-
58
- # Remove quotes from queries as this requires positional indexing in lancedb
59
- query = query.replace('"', "")
60
57
  search_results = await ctx.deps.client.search(query, limit=limit)
61
58
  expanded_results = await ctx.deps.client.expand_context(search_results)
62
59
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: haiku.rag
3
- Version: 0.11.4
3
+ Version: 0.12.1
4
4
  Summary: Agentic Retrieval Augmented Generation (RAG) with LanceDB
5
5
  Author-email: Yiorgis Gozadinos <ggozadinos@gmail.com>
6
6
  License: MIT
@@ -18,18 +18,20 @@ Classifier: Programming Language :: Python :: 3.11
18
18
  Classifier: Programming Language :: Python :: 3.12
19
19
  Classifier: Typing :: Typed
20
20
  Requires-Python: >=3.12
21
- Requires-Dist: docling>=2.52.0
22
- Requires-Dist: fastmcp>=2.12.3
21
+ Requires-Dist: docling>=2.56.1
22
+ Requires-Dist: fastmcp>=2.12.4
23
23
  Requires-Dist: httpx>=0.28.1
24
- Requires-Dist: lancedb>=0.25.0
25
- Requires-Dist: pydantic-ai>=1.0.8
26
- Requires-Dist: pydantic-graph>=1.0.8
27
- Requires-Dist: pydantic>=2.11.9
24
+ Requires-Dist: lancedb>=0.25.2
25
+ Requires-Dist: pydantic-ai>=1.0.18
26
+ Requires-Dist: pydantic-graph>=1.0.18
27
+ Requires-Dist: pydantic>=2.12.2
28
28
  Requires-Dist: python-dotenv>=1.1.1
29
- Requires-Dist: rich>=14.1.0
30
- Requires-Dist: tiktoken>=0.11.0
31
- Requires-Dist: typer>=0.16.1
29
+ Requires-Dist: rich>=14.2.0
30
+ Requires-Dist: tiktoken>=0.12.0
31
+ Requires-Dist: typer>=0.19.2
32
32
  Requires-Dist: watchfiles>=1.1.0
33
+ Provides-Extra: a2a
34
+ Requires-Dist: fasta2a>=0.1.0; extra == 'a2a'
33
35
  Provides-Extra: mxbai
34
36
  Requires-Dist: mxbai-rerank>=0.1.6; extra == 'mxbai'
35
37
  Provides-Extra: voyageai
@@ -56,6 +58,7 @@ Retrieval-Augmented Generation (RAG) library built on LanceDB.
56
58
  - **File monitoring**: Auto-index files when run as server
57
59
  - **40+ file formats**: PDF, DOCX, HTML, Markdown, code files, URLs
58
60
  - **MCP server**: Expose as tools for AI assistants
61
+ - **A2A agent**: Conversational agent with context and multi-turn dialogue
59
62
  - **CLI & Python API**: Use from command line or Python
60
63
 
61
64
  ## Quick Start
@@ -181,6 +184,24 @@ haiku-rag serve --stdio
181
184
 
182
185
  Provides tools for document management and search directly in your AI assistant.
183
186
 
187
+ ## A2A Agent
188
+
189
+ Run as a conversational agent with the Agent-to-Agent protocol:
190
+
191
+ ```bash
192
+ # Start the A2A server
193
+ haiku-rag serve --a2a
194
+
195
+ # Connect with the interactive client (in another terminal)
196
+ haiku-rag a2aclient
197
+ ```
198
+
199
+ The A2A agent provides:
200
+ - Multi-turn dialogue with context
201
+ - Intelligent multi-search for complex questions
202
+ - Source citations with titles and URIs
203
+ - Full document retrieval on request
204
+
184
205
  ## Documentation
185
206
 
186
207
  Full documentation at: https://ggozad.github.io/haiku.rag/
@@ -190,4 +211,6 @@ Full documentation at: https://ggozad.github.io/haiku.rag/
190
211
  - [CLI](https://ggozad.github.io/haiku.rag/cli/) - Command reference
191
212
  - [Python API](https://ggozad.github.io/haiku.rag/python/) - Complete API docs
192
213
  - [Agents](https://ggozad.github.io/haiku.rag/agents/) - QA agent and multi-agent research
214
+ - [MCP Server](https://ggozad.github.io/haiku.rag/mcp/) - Model Context Protocol integration
215
+ - [A2A Agent](https://ggozad.github.io/haiku.rag/a2a/) - Agent-to-Agent protocol support
193
216
  - [Benchmarks](https://ggozad.github.io/haiku.rag/benchmarks/) - Performance Benchmarks
@@ -1,21 +1,29 @@
1
1
  haiku/rag/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- haiku/rag/app.py,sha256=B5BZaUgC9u3hz17tRKu_xKefPu5xqdZQPCxfa5K8_EI,19180
2
+ haiku/rag/app.py,sha256=nrfg3iGWP_HJBfwUFPv60_1Do8YK3WQYWZhq22r001s,21548
3
3
  haiku/rag/chunker.py,sha256=PVe6ysv8UlacUd4Zb3_8RFWIaWDXnzBAy2VDJ4TaUsE,1555
4
- haiku/rag/cli.py,sha256=6GmWfs30uUytcnyBMv-OE4tCkkuKnfYeJFU5DIrZ_vU,13212
5
- haiku/rag/client.py,sha256=tk3BWa9u2mtUEMEOEcgF2ebUqdZRv4jSnu4S6Zho-Xg,24210
6
- haiku/rag/config.py,sha256=rJ-xi66gc94F_QA6BvgRgNq8CNiFTN0CCIaOv1TMMFw,2569
4
+ haiku/rag/cli.py,sha256=ghmfvCmoitgySZsF6t5UQjsm3_rul0KUh0L774BzeuI,13196
5
+ haiku/rag/client.py,sha256=GVXHq9weIaFdcZvO9a4YO1WnrroJJUXVVriDGdMxpH4,25855
6
+ haiku/rag/config.py,sha256=FBsMMijl5PxIfPGifk_AJVRjL4omb03jfoZm0P_VqxI,2743
7
7
  haiku/rag/logging.py,sha256=dm65AwADpcQsH5OAPtRA-4hsw0w5DK-sGOvzYkj6jzw,1720
8
- haiku/rag/mcp.py,sha256=H7XibtSNUviFeaJVsXzHiRqUm0nJCpA7A1QHuBv6SKQ,5057
8
+ haiku/rag/mcp.py,sha256=DZk-IJgVjAesu-vvqVd5BYnfDWKWNR6TQugKgdoFrvg,8976
9
9
  haiku/rag/migration.py,sha256=XldX0CTHPXNGrkdQ-gocr4kQGBsz-316WcE0ZDRfb48,11076
10
10
  haiku/rag/monitor.py,sha256=VP3bqY0mEodOP60eN4RMldgrL1ti5gMjuDuQ-_vBvFc,2759
11
11
  haiku/rag/reader.py,sha256=aW8LG0X31kVWS7kU2tKVpe8RqP3Ne_oIidd_X3UDLH0,3307
12
12
  haiku/rag/utils.py,sha256=dBzhKaOHI9KRiJqHErcXUnqtnXY2AgOK8PCLA3rhO0A,6115
13
+ haiku/rag/a2a/__init__.py,sha256=4SlJBr9GUVZ0879o5VI6-qpcBKpieP2hW4hmNbm8NGg,5933
14
+ haiku/rag/a2a/client.py,sha256=awuiHXgVHn1uzaEXE98RIqqKHj1JjszOvn9WI3Jtth8,8760
15
+ haiku/rag/a2a/context.py,sha256=SofkFUZcGonoJcgZh-RGqHTh0UWT4J7Zl4Mz6WDkMl4,2053
16
+ haiku/rag/a2a/models.py,sha256=XhGYj2g3rgVM4JoCDXlll0YjaysqdalybJrBqFXSwl4,689
17
+ haiku/rag/a2a/prompts.py,sha256=yCla8x0hbOhKrkuaqVrF1upn-YjQM3-2NsE2TSnet0M,3030
18
+ haiku/rag/a2a/skills.py,sha256=dwyD2Bn493eL3Vf4uQzmyxj_9IUSb66kQ-085FBAuCs,2701
19
+ haiku/rag/a2a/storage.py,sha256=c8vmGCiZ3nuV9wUuTnwpoRD2HVVvK2JPySQOc5PVMvg,2759
20
+ haiku/rag/a2a/worker.py,sha256=S9hiA1ncpJPdtN0eEmMjsvr5LQ4wMVN5R8CjYkTeohU,12367
13
21
  haiku/rag/embeddings/__init__.py,sha256=44IfDITGIFTflGT6UEmiYOwpWFVbYv5smLY59D0YeCs,1419
14
- haiku/rag/embeddings/base.py,sha256=BnSviKrlzjv3L0sZJs_T-pxfawd-bcTak-rsX-D2f3A,497
15
- haiku/rag/embeddings/ollama.py,sha256=c1BeKTgpymniZw1sm4iAIdK5vA0MYoRzHLcd2_pFA44,638
16
- haiku/rag/embeddings/openai.py,sha256=bwoUVlzu9UtbDpN7CtG6OPt0d5tfJNeje4lR81Btpl0,546
17
- haiku/rag/embeddings/vllm.py,sha256=7ocp9D9bD1R5rqRIC4-Vih9VlKQNuD429k8-9wu234E,669
18
- haiku/rag/embeddings/voyageai.py,sha256=I4kVdT2KPtwcbjxD22GWJmgcIQIEEHpkOY2_QbFh7mQ,712
22
+ haiku/rag/embeddings/base.py,sha256=Aw4kjfVn2can0R17pdiAgpPRyk5BpdBgMXuor5mstDY,682
23
+ haiku/rag/embeddings/ollama.py,sha256=KXq-eJ58co5rwYchIO3kpvIv0OBwMJkwMXq1xDsETz0,823
24
+ haiku/rag/embeddings/openai.py,sha256=BfmPni567DH8KqwLCPiOmr3q-dpzpOJkvFFoUuTR5as,731
25
+ haiku/rag/embeddings/vllm.py,sha256=wgul0nMWTn6Q1aKA4DJe03EktsRoBxEgtB7gfpWVOyQ,854
26
+ haiku/rag/embeddings/voyageai.py,sha256=6vEuk6q510AJv-K2lL93P2dVrziAjELTOe_w_Zp5YT4,917
19
27
  haiku/rag/graph/__init__.py,sha256=BHfMchuUO_UhHKpjjGHjd6xPxNkrIwJzHn4YJiLqG1g,62
20
28
  haiku/rag/graph/base.py,sha256=DepZqLF9E64YCCkjmbqmgyp28oNp69WfJCXp614xzh0,819
21
29
  haiku/rag/graph/common.py,sha256=xTejucXei3x9tqbal3ZS_64lZAC6Bw3-QfXPniZcZEw,986
@@ -27,7 +35,7 @@ haiku/rag/graph/nodes/plan.py,sha256=Bb6Fva9vwArCU-5xBr24N4pM3wfLP-Vwufgss8HfXMQ
27
35
  haiku/rag/graph/nodes/search.py,sha256=DdHhEY7fmWUqis6Nk0bj-di56-ML262B51N9zytzKYk,3699
28
36
  haiku/rag/graph/nodes/synthesize.py,sha256=WF0D44SwLP1OK8C6ViOAhFOtGQ0mj3aO54z5bemJb4E,1828
29
37
  haiku/rag/qa/__init__.py,sha256=eFRV5GFwe1UsqniEqOLdzAMT2J6QhSiHq5_Li7c6Fs4,520
30
- haiku/rag/qa/agent.py,sha256=A4FrzoYP4pRzJOOJQGlNFp48yRWMSICH4d8JfxFabqk,3256
38
+ haiku/rag/qa/agent.py,sha256=sN2SVpaQAxg5Hm47LhrHpbo3ELVi1ev9DxKu_ec1c-Y,3123
31
39
  haiku/rag/qa/prompts.py,sha256=Lqwn3m4zCsu_CJiC4s9cLsuPNbb9nq6j2PqEF3lw1eA,3380
32
40
  haiku/rag/qa/deep/__init__.py,sha256=SnCpWxWip-TaFzVKlFyrOgYeXEqT_gpIlaSItEEJ6r0,50
33
41
  haiku/rag/qa/deep/dependencies.py,sha256=AKFqcC1D3N1VPudnFmLH29K5eJWEC5wtwUGkO4FM4jc,998
@@ -61,8 +69,8 @@ haiku/rag/store/repositories/settings.py,sha256=ObrDrzxHn-yA1WcbgIoJoVmAbVvQHAFv
61
69
  haiku/rag/store/upgrades/__init__.py,sha256=RQ8A6rEXBASLb5PD9vdDnEas_m_GgRzzdVu4B88Snqc,1975
62
70
  haiku/rag/store/upgrades/v0_10_1.py,sha256=qNGnxj6hoHaHJ1rKTiALfw0c9NQOi0KAK-VZCD_073A,1959
63
71
  haiku/rag/store/upgrades/v0_9_3.py,sha256=NrjNilQSgDtFWRbL3ZUtzQzJ8tf9u0dDRJtnDFwwbdw,3322
64
- haiku_rag-0.11.4.dist-info/METADATA,sha256=YA7Fr6OnWYeOH139aZkLRP9Yj0S0KXEayzfKsgFaz08,6748
65
- haiku_rag-0.11.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
66
- haiku_rag-0.11.4.dist-info/entry_points.txt,sha256=G1U3nAkNd5YDYd4v0tuYFbriz0i-JheCsFuT9kIoGCI,48
67
- haiku_rag-0.11.4.dist-info/licenses/LICENSE,sha256=eXZrWjSk9PwYFNK9yUczl3oPl95Z4V9UXH7bPN46iPo,1065
68
- haiku_rag-0.11.4.dist-info/RECORD,,
72
+ haiku_rag-0.12.1.dist-info/METADATA,sha256=POFHzbGYiVj7UkX_1VSA8zUByIiQEG1dPePWO55T7nU,7477
73
+ haiku_rag-0.12.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
74
+ haiku_rag-0.12.1.dist-info/entry_points.txt,sha256=G1U3nAkNd5YDYd4v0tuYFbriz0i-JheCsFuT9kIoGCI,48
75
+ haiku_rag-0.12.1.dist-info/licenses/LICENSE,sha256=eXZrWjSk9PwYFNK9yUczl3oPl95Z4V9UXH7bPN46iPo,1065
76
+ haiku_rag-0.12.1.dist-info/RECORD,,