haiku.rag-slim 0.16.0__py3-none-any.whl → 0.24.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of haiku.rag-slim might be problematic. Click here for more details.

Files changed (94) hide show
  1. haiku/rag/app.py +430 -72
  2. haiku/rag/chunkers/__init__.py +31 -0
  3. haiku/rag/chunkers/base.py +31 -0
  4. haiku/rag/chunkers/docling_local.py +164 -0
  5. haiku/rag/chunkers/docling_serve.py +179 -0
  6. haiku/rag/cli.py +207 -24
  7. haiku/rag/cli_chat.py +489 -0
  8. haiku/rag/client.py +1251 -266
  9. haiku/rag/config/__init__.py +16 -10
  10. haiku/rag/config/loader.py +5 -44
  11. haiku/rag/config/models.py +126 -17
  12. haiku/rag/converters/__init__.py +31 -0
  13. haiku/rag/converters/base.py +63 -0
  14. haiku/rag/converters/docling_local.py +193 -0
  15. haiku/rag/converters/docling_serve.py +229 -0
  16. haiku/rag/converters/text_utils.py +237 -0
  17. haiku/rag/embeddings/__init__.py +123 -24
  18. haiku/rag/embeddings/voyageai.py +175 -20
  19. haiku/rag/graph/__init__.py +0 -11
  20. haiku/rag/graph/agui/__init__.py +8 -2
  21. haiku/rag/graph/agui/cli_renderer.py +1 -1
  22. haiku/rag/graph/agui/emitter.py +219 -31
  23. haiku/rag/graph/agui/server.py +20 -62
  24. haiku/rag/graph/agui/stream.py +1 -2
  25. haiku/rag/graph/research/__init__.py +5 -2
  26. haiku/rag/graph/research/dependencies.py +12 -126
  27. haiku/rag/graph/research/graph.py +390 -135
  28. haiku/rag/graph/research/models.py +91 -112
  29. haiku/rag/graph/research/prompts.py +99 -91
  30. haiku/rag/graph/research/state.py +35 -27
  31. haiku/rag/inspector/__init__.py +8 -0
  32. haiku/rag/inspector/app.py +259 -0
  33. haiku/rag/inspector/widgets/__init__.py +6 -0
  34. haiku/rag/inspector/widgets/chunk_list.py +100 -0
  35. haiku/rag/inspector/widgets/context_modal.py +89 -0
  36. haiku/rag/inspector/widgets/detail_view.py +130 -0
  37. haiku/rag/inspector/widgets/document_list.py +75 -0
  38. haiku/rag/inspector/widgets/info_modal.py +209 -0
  39. haiku/rag/inspector/widgets/search_modal.py +183 -0
  40. haiku/rag/inspector/widgets/visual_modal.py +126 -0
  41. haiku/rag/mcp.py +106 -102
  42. haiku/rag/monitor.py +33 -9
  43. haiku/rag/providers/__init__.py +5 -0
  44. haiku/rag/providers/docling_serve.py +108 -0
  45. haiku/rag/qa/__init__.py +12 -10
  46. haiku/rag/qa/agent.py +43 -61
  47. haiku/rag/qa/prompts.py +35 -57
  48. haiku/rag/reranking/__init__.py +9 -6
  49. haiku/rag/reranking/base.py +1 -1
  50. haiku/rag/reranking/cohere.py +5 -4
  51. haiku/rag/reranking/mxbai.py +5 -2
  52. haiku/rag/reranking/vllm.py +3 -4
  53. haiku/rag/reranking/zeroentropy.py +6 -5
  54. haiku/rag/store/__init__.py +2 -1
  55. haiku/rag/store/engine.py +242 -42
  56. haiku/rag/store/exceptions.py +4 -0
  57. haiku/rag/store/models/__init__.py +8 -2
  58. haiku/rag/store/models/chunk.py +190 -0
  59. haiku/rag/store/models/document.py +46 -0
  60. haiku/rag/store/repositories/chunk.py +141 -121
  61. haiku/rag/store/repositories/document.py +25 -84
  62. haiku/rag/store/repositories/settings.py +11 -14
  63. haiku/rag/store/upgrades/__init__.py +19 -3
  64. haiku/rag/store/upgrades/v0_10_1.py +1 -1
  65. haiku/rag/store/upgrades/v0_19_6.py +65 -0
  66. haiku/rag/store/upgrades/v0_20_0.py +68 -0
  67. haiku/rag/store/upgrades/v0_23_1.py +100 -0
  68. haiku/rag/store/upgrades/v0_9_3.py +3 -3
  69. haiku/rag/utils.py +371 -146
  70. {haiku_rag_slim-0.16.0.dist-info → haiku_rag_slim-0.24.0.dist-info}/METADATA +15 -12
  71. haiku_rag_slim-0.24.0.dist-info/RECORD +78 -0
  72. {haiku_rag_slim-0.16.0.dist-info → haiku_rag_slim-0.24.0.dist-info}/WHEEL +1 -1
  73. haiku/rag/chunker.py +0 -65
  74. haiku/rag/embeddings/base.py +0 -25
  75. haiku/rag/embeddings/ollama.py +0 -28
  76. haiku/rag/embeddings/openai.py +0 -26
  77. haiku/rag/embeddings/vllm.py +0 -29
  78. haiku/rag/graph/agui/events.py +0 -254
  79. haiku/rag/graph/common/__init__.py +0 -5
  80. haiku/rag/graph/common/models.py +0 -42
  81. haiku/rag/graph/common/nodes.py +0 -265
  82. haiku/rag/graph/common/prompts.py +0 -46
  83. haiku/rag/graph/common/utils.py +0 -44
  84. haiku/rag/graph/deep_qa/__init__.py +0 -1
  85. haiku/rag/graph/deep_qa/dependencies.py +0 -27
  86. haiku/rag/graph/deep_qa/graph.py +0 -243
  87. haiku/rag/graph/deep_qa/models.py +0 -20
  88. haiku/rag/graph/deep_qa/prompts.py +0 -59
  89. haiku/rag/graph/deep_qa/state.py +0 -56
  90. haiku/rag/graph/research/common.py +0 -87
  91. haiku/rag/reader.py +0 -135
  92. haiku_rag_slim-0.16.0.dist-info/RECORD +0 -71
  93. {haiku_rag_slim-0.16.0.dist-info → haiku_rag_slim-0.24.0.dist-info}/entry_points.txt +0 -0
  94. {haiku_rag_slim-0.16.0.dist-info → haiku_rag_slim-0.24.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,59 +0,0 @@
1
- """Deep QA specific prompts."""
2
-
3
- SYNTHESIS_PROMPT = """You are an expert at synthesizing information into clear, concise answers.
4
-
5
- Task:
6
- - Combine the gathered information from sub-questions into a single comprehensive answer
7
- - Answer the original question directly and completely
8
- - Base your answer strictly on the provided evidence
9
- - Be clear, accurate, and well-structured
10
-
11
- Output format:
12
- - answer: The complete answer to the original question (2-4 paragraphs)
13
- - sources: List of document titles/URIs used (extract from the sub-answers)
14
-
15
- Guidelines:
16
- - Start directly with the answer - no preamble like "Based on the research..."
17
- - Use a clear, professional tone
18
- - Organize information logically
19
- - If evidence is incomplete, state limitations clearly
20
- - Do not include any claims not supported by the gathered information"""
21
-
22
- SYNTHESIS_PROMPT_WITH_CITATIONS = """You are an expert at synthesizing information into clear, concise answers with proper citations.
23
-
24
- Task:
25
- - Combine the gathered information from sub-questions into a single comprehensive answer
26
- - Answer the original question directly and completely
27
- - Base your answer strictly on the provided evidence
28
- - Include inline citations using [Source Title] format
29
-
30
- Output format:
31
- - answer: The complete answer with inline citations (2-4 paragraphs)
32
- - sources: List of document titles/URIs used (extract from the sub-answers)
33
-
34
- Guidelines:
35
- - Start directly with the answer - no preamble like "Based on the research..."
36
- - Add citations after each claim: [Source Title]
37
- - Use a clear, professional tone
38
- - Organize information logically
39
- - If evidence is incomplete, state limitations clearly
40
- - Do not include any claims not supported by the gathered information"""
41
-
42
- DECISION_PROMPT = """You are an expert at evaluating whether gathered information is sufficient to answer a question.
43
-
44
- Task:
45
- - Review the original question and all gathered sub-question answers
46
- - Determine if we have enough information to provide a comprehensive answer
47
- - If insufficient, suggest specific new sub-questions to fill the gaps
48
-
49
- Output format:
50
- - is_sufficient: Boolean indicating if we can answer the question comprehensively
51
- - reasoning: Clear explanation of your assessment
52
- - new_questions: List of specific follow-up questions needed (empty if sufficient)
53
-
54
- Guidelines:
55
- - Be strict but reasonable in your assessment
56
- - Focus on whether core aspects of the question are addressed
57
- - New questions should be specific and distinct from what's been asked
58
- - Limit new questions to 2-3 maximum
59
- - Consider whether additional searches would meaningfully improve the answer"""
@@ -1,56 +0,0 @@
1
- import asyncio
2
- from dataclasses import dataclass
3
- from typing import TYPE_CHECKING
4
-
5
- from pydantic import BaseModel, Field
6
-
7
- from haiku.rag.client import HaikuRAG
8
- from haiku.rag.graph.deep_qa.dependencies import DeepQAContext
9
- from haiku.rag.graph.deep_qa.models import DeepQAAnswer
10
-
11
- if TYPE_CHECKING:
12
- from haiku.rag.config.models import AppConfig
13
- from haiku.rag.graph.agui.emitter import AGUIEmitter
14
-
15
-
16
- @dataclass
17
- class DeepQADeps:
18
- client: HaikuRAG
19
- agui_emitter: "AGUIEmitter[DeepQAState, DeepQAAnswer] | None" = None
20
- semaphore: asyncio.Semaphore | None = None
21
-
22
-
23
- class DeepQAState(BaseModel):
24
- """Deep QA state for multi-agent question answering."""
25
-
26
- model_config = {"arbitrary_types_allowed": True}
27
-
28
- context: DeepQAContext = Field(description="Shared QA context")
29
- max_sub_questions: int = Field(
30
- default=3, description="Maximum number of sub-questions"
31
- )
32
- max_iterations: int = Field(
33
- default=2, description="Maximum number of QA iterations"
34
- )
35
- max_concurrency: int = Field(
36
- default=1, description="Maximum parallel sub-question searches"
37
- )
38
- iterations: int = Field(default=0, description="Current iteration number")
39
-
40
- @classmethod
41
- def from_config(cls, context: DeepQAContext, config: "AppConfig") -> "DeepQAState":
42
- """Create a DeepQAState from an AppConfig.
43
-
44
- Args:
45
- context: The DeepQAContext containing the question and settings
46
- config: The AppConfig object (uses config.qa for state parameters)
47
-
48
- Returns:
49
- A configured DeepQAState instance
50
- """
51
- return cls(
52
- context=context,
53
- max_sub_questions=config.qa.max_sub_questions,
54
- max_iterations=config.qa.max_iterations,
55
- max_concurrency=config.qa.max_concurrency,
56
- )
@@ -1,87 +0,0 @@
1
- from pydantic_ai import format_as_xml
2
-
3
- from haiku.rag.graph.research.dependencies import ResearchContext
4
- from haiku.rag.graph.research.models import InsightAnalysis
5
-
6
-
7
- def format_context_for_prompt(context: ResearchContext) -> str:
8
- """Format the research context as XML for inclusion in prompts."""
9
-
10
- context_data = {
11
- "original_question": context.original_question,
12
- "unanswered_questions": context.sub_questions,
13
- "qa_responses": [
14
- {
15
- "question": qa.query,
16
- "answer": qa.answer,
17
- "context_snippets": qa.context,
18
- "sources": qa.sources, # pyright: ignore[reportAttributeAccessIssue]
19
- }
20
- for qa in context.qa_responses
21
- ],
22
- "insights": [
23
- {
24
- "id": insight.id,
25
- "summary": insight.summary,
26
- "status": insight.status.value,
27
- "supporting_sources": insight.supporting_sources,
28
- "originating_questions": insight.originating_questions,
29
- "notes": insight.notes,
30
- }
31
- for insight in context.insights
32
- ],
33
- "gaps": [
34
- {
35
- "id": gap.id,
36
- "description": gap.description,
37
- "severity": gap.severity.value,
38
- "blocking": gap.blocking,
39
- "resolved": gap.resolved,
40
- "resolved_by": gap.resolved_by,
41
- "supporting_sources": gap.supporting_sources,
42
- "notes": gap.notes,
43
- }
44
- for gap in context.gaps
45
- ],
46
- }
47
- return format_as_xml(context_data, root_tag="research_context")
48
-
49
-
50
- def format_analysis_for_prompt(
51
- analysis: InsightAnalysis | None,
52
- ) -> str:
53
- """Format the latest insight analysis as XML for prompts."""
54
-
55
- if analysis is None:
56
- return "<latest_analysis />"
57
-
58
- data = {
59
- "commentary": analysis.commentary,
60
- "highlights": [
61
- {
62
- "id": insight.id,
63
- "summary": insight.summary,
64
- "status": insight.status.value,
65
- "supporting_sources": insight.supporting_sources,
66
- "originating_questions": insight.originating_questions,
67
- "notes": insight.notes,
68
- }
69
- for insight in analysis.highlights
70
- ],
71
- "gap_assessments": [
72
- {
73
- "id": gap.id,
74
- "description": gap.description,
75
- "severity": gap.severity.value,
76
- "blocking": gap.blocking,
77
- "resolved": gap.resolved,
78
- "resolved_by": gap.resolved_by,
79
- "supporting_sources": gap.supporting_sources,
80
- "notes": gap.notes,
81
- }
82
- for gap in analysis.gap_assessments
83
- ],
84
- "resolved_gaps": analysis.resolved_gaps,
85
- "new_questions": analysis.new_questions,
86
- }
87
- return format_as_xml(data, root_tag="latest_analysis")
haiku/rag/reader.py DELETED
@@ -1,135 +0,0 @@
1
- from pathlib import Path
2
- from typing import ClassVar
3
-
4
- from docling_core.types.doc.document import DoclingDocument
5
-
6
- from haiku.rag.utils import text_to_docling_document
7
-
8
- # Check if docling is available
9
- try:
10
- import docling # noqa: F401
11
-
12
- DOCLING_AVAILABLE = True
13
- except ImportError:
14
- DOCLING_AVAILABLE = False
15
-
16
-
17
- class FileReader:
18
- # Extensions supported by docling
19
- docling_extensions: ClassVar[list[str]] = [
20
- ".adoc",
21
- ".asc",
22
- ".asciidoc",
23
- ".bmp",
24
- ".csv",
25
- ".docx",
26
- ".html",
27
- ".xhtml",
28
- ".jpeg",
29
- ".jpg",
30
- ".md",
31
- ".pdf",
32
- ".png",
33
- ".pptx",
34
- ".tiff",
35
- ".xlsx",
36
- ".xml",
37
- ".webp",
38
- ]
39
-
40
- # Plain text extensions that we'll read directly
41
- text_extensions: ClassVar[list[str]] = [
42
- ".astro",
43
- ".c",
44
- ".cpp",
45
- ".css",
46
- ".go",
47
- ".h",
48
- ".hpp",
49
- ".java",
50
- ".js",
51
- ".json",
52
- ".kt",
53
- ".mdx",
54
- ".mjs",
55
- ".php",
56
- ".py",
57
- ".rb",
58
- ".rs",
59
- ".svelte",
60
- ".swift",
61
- ".ts",
62
- ".tsx",
63
- ".txt",
64
- ".vue",
65
- ".yaml",
66
- ".yml",
67
- ]
68
-
69
- # Code file extensions with their markdown language identifiers for syntax highlighting
70
- code_markdown_identifier: ClassVar[dict[str, str]] = {
71
- ".astro": "astro",
72
- ".c": "c",
73
- ".cpp": "cpp",
74
- ".css": "css",
75
- ".go": "go",
76
- ".h": "c",
77
- ".hpp": "cpp",
78
- ".java": "java",
79
- ".js": "javascript",
80
- ".json": "json",
81
- ".kt": "kotlin",
82
- ".mjs": "javascript",
83
- ".php": "php",
84
- ".py": "python",
85
- ".rb": "ruby",
86
- ".rs": "rust",
87
- ".svelte": "svelte",
88
- ".swift": "swift",
89
- ".ts": "typescript",
90
- ".tsx": "tsx",
91
- ".vue": "vue",
92
- ".yaml": "yaml",
93
- ".yml": "yaml",
94
- }
95
-
96
- extensions: ClassVar[list[str]] = docling_extensions + text_extensions
97
-
98
- @staticmethod
99
- def parse_file(path: Path) -> DoclingDocument:
100
- try:
101
- file_extension = path.suffix.lower()
102
-
103
- if file_extension in FileReader.docling_extensions:
104
- # Use docling for complex document formats
105
- if not DOCLING_AVAILABLE:
106
- raise ImportError(
107
- "Docling is required for processing this file type. "
108
- "Install with: pip install haiku.rag-slim[docling]"
109
- )
110
- from docling.document_converter import DocumentConverter
111
-
112
- converter = DocumentConverter()
113
- result = converter.convert(path)
114
- return result.document
115
- elif file_extension in FileReader.text_extensions:
116
- # Read plain text files directly
117
- content = path.read_text(encoding="utf-8")
118
-
119
- # Wrap code files (but not plain txt) in markdown code blocks for better presentation
120
- if file_extension in FileReader.code_markdown_identifier:
121
- language = FileReader.code_markdown_identifier[file_extension]
122
- content = f"```{language}\n{content}\n```"
123
-
124
- # Convert text to DoclingDocument by wrapping as markdown
125
- return text_to_docling_document(content, name=f"{path.stem}.md")
126
- else:
127
- # Fallback: try to read as text and convert to DoclingDocument
128
- content = path.read_text(encoding="utf-8")
129
- return text_to_docling_document(content, name=f"{path.stem}.md")
130
- except ImportError:
131
- raise
132
- except Exception as e:
133
- raise ValueError(
134
- f"Failed to parse file: {path} - {type(e).__name__}: {e}"
135
- ) from e
@@ -1,71 +0,0 @@
1
- haiku/rag/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- haiku/rag/app.py,sha256=pBauUXYs6Bme4c4qoeYvOVxQxuLL-BYmorA3TSeTZKQ,22363
3
- haiku/rag/chunker.py,sha256=V3Sm47BY9vAQlzXuGKJiUMA22-XvAZXLkHX37pwvXcY,1926
4
- haiku/rag/cli.py,sha256=eVj2Muo07TiI5Tssx6rYMxoMvrtfzvYvt2YX0Trj1a4,12593
5
- haiku/rag/client.py,sha256=cYaThPAf9sdBxdC-WusnC_2MMcjcFHHkEOTDMjTJHWs,27848
6
- haiku/rag/logging.py,sha256=dm65AwADpcQsH5OAPtRA-4hsw0w5DK-sGOvzYkj6jzw,1720
7
- haiku/rag/mcp.py,sha256=ld3xLkPDzLGcoo2tUmN-TXY6XIxfIky3PUZdA24VZPQ,8729
8
- haiku/rag/monitor.py,sha256=aM92Q36-RDYMabaOk-TK7ok3C865E9aB63ObvLOZkNU,6978
9
- haiku/rag/reader.py,sha256=dK3zayFyNQcay-ROB_M3n8r0s4sLj3Y2sNCFoF4Ot9I,3846
10
- haiku/rag/utils.py,sha256=VExKww-W2ZQU0peougb89JNfsU6JVr9goad1qUb66dc,6411
11
- haiku/rag/config/__init__.py,sha256=U12iAoS5V1oW7wOSkAOvuy_cB5bbSsajtsye57Hu5B8,1636
12
- haiku/rag/config/loader.py,sha256=mhvoPoTXYGPx0ASeJ22rvRl0vQGtNrTzcKm--vXGBfQ,2752
13
- haiku/rag/config/models.py,sha256=AwgEuIFiKkhxxZ6bR4oBRVbrjEc1bSjI1Wg0zIv2rkg,2701
14
- haiku/rag/embeddings/__init__.py,sha256=zwWRU9S5YGEJxlgPv5haHBgj3LUJMe-dEwr3LKLa9RY,1731
15
- haiku/rag/embeddings/base.py,sha256=kzca54e2HGzS_0YKt9OLESM9lrFKpBm_97V07jx0aas,783
16
- haiku/rag/embeddings/ollama.py,sha256=_uIIObbZX9QVU1lcgWQFooA3b-AeZRNncM7yQ2TxlEU,825
17
- haiku/rag/embeddings/openai.py,sha256=BfmPni567DH8KqwLCPiOmr3q-dpzpOJkvFFoUuTR5as,731
18
- haiku/rag/embeddings/vllm.py,sha256=IZFS3pbvLXkhvdT7pFVi2csFlNTSza5bpybwz7ud3Po,847
19
- haiku/rag/embeddings/voyageai.py,sha256=6vEuk6q510AJv-K2lL93P2dVrziAjELTOe_w_Zp5YT4,917
20
- haiku/rag/graph/__init__.py,sha256=rQJKwsr52NLhqjLvdGlA9Svpm7iAYIPutH_8w-aATWM,646
21
- haiku/rag/graph/agui/__init__.py,sha256=Mh3l373-LAT1sR7pubTbNQk5UOqkxWXlp-_VAb5bb_8,1351
22
- haiku/rag/graph/agui/cli_renderer.py,sha256=_IVkPZWUrFJsKtA2fNI_GRMx-tfOUmBfKFZwttqiZZY,5187
23
- haiku/rag/graph/agui/emitter.py,sha256=k5lEAASa0lC0Gthsxi-mNFWyI6GpR697EKkoL9E1SkE,6445
24
- haiku/rag/graph/agui/events.py,sha256=5XGVEfFX2KqywkrQOgW5v3dtluwGswb5LgDyEnIK1GQ,6033
25
- haiku/rag/graph/agui/server.py,sha256=muLq9D-ekejlfT7p1NpPy7RDFIX5CoE5fN87_wEFN28,10497
26
- haiku/rag/graph/agui/state.py,sha256=LkuuAY9w32pc0kkXkLJvyNGC0JzhXn05IfIVZzCXAv0,965
27
- haiku/rag/graph/agui/stream.py,sha256=Z3AVzgcPf2S89c7JBdd_p62VDj7AuMKKr3aroZahnsw,2693
28
- haiku/rag/graph/common/__init__.py,sha256=nqxh6QViWuUw-cq1E8CWfqWciAh17dbfufIABK1wPsw,127
29
- haiku/rag/graph/common/models.py,sha256=Nwzw8TZ_J0aEdenU5to14OIMyDROGYvAbBtVIlC4ZXM,1373
30
- haiku/rag/graph/common/nodes.py,sha256=r9GxXPaGPAF_442pQuMrD6P12GI1ZRTNxGg43hFrfmk,9119
31
- haiku/rag/graph/common/prompts.py,sha256=DTEk4QN1uRHUZRSPyAhGPJjQNLVASQOWkb4Rhc_gnv4,2276
32
- haiku/rag/graph/common/utils.py,sha256=QoOIkgIRxIJ8nwFrcbHGzJC7MlzzUb8hfVZlyzHa7dY,1458
33
- haiku/rag/graph/deep_qa/__init__.py,sha256=BajDKA9m47hoc-alGzQjhOPFirnl6z3vtJod8iYSLQg,56
34
- haiku/rag/graph/deep_qa/dependencies.py,sha256=Shdj7z9Q1iHpGsuZbiFXVMHjx5_lZA4agHHMOHd__Lk,937
35
- haiku/rag/graph/deep_qa/graph.py,sha256=2PpJgHQHsgZ4nsVWuG3ytGPs4tEiDRsWMXlh1y1NjIQ,8017
36
- haiku/rag/graph/deep_qa/models.py,sha256=siZMQXD21_3nk8kaLCv0BCuD9TydLYo-yC4-9CxQy3E,683
37
- haiku/rag/graph/deep_qa/prompts.py,sha256=XtESQvlAvMpJdE2TX9OZYY55yMFmsFT1h6V6QhnvTCA,2598
38
- haiku/rag/graph/deep_qa/state.py,sha256=EPUGHbLFUWeJzfx43koQctL7HiB9EsMtO8hfw3t3JzY,1823
39
- haiku/rag/graph/research/__init__.py,sha256=eNTG4ujgxnY9-7pTJ-fw7xcqOr4MHXQx-P_BH6cA15Q,220
40
- haiku/rag/graph/research/common.py,sha256=3QtESionc7hxwaInAwk7dFnNpqAoG7h4briTdCf3gDk,3000
41
- haiku/rag/graph/research/dependencies.py,sha256=6Swm-XJ37GBdJe8Ynd1ZwRjM4CJODmNSfNgrStC9yMo,6100
42
- haiku/rag/graph/research/graph.py,sha256=llka6emUBAyRUTsBCUX5Jwj7_JUFaUDW4e2kbFH3rH8,10408
43
- haiku/rag/graph/research/models.py,sha256=U68SdBKvNz8CSX9V3cj3PTevMyiShm6JyASUD0t2kCU,5375
44
- haiku/rag/graph/research/prompts.py,sha256=opz4MXjoDHH1wjG6bPyiqT0LVzk3pBA6y_a9zpBW8yM,4834
45
- haiku/rag/graph/research/state.py,sha256=fmuL30bNO5xY4YjZx8vmcr8N2DcoIBKJ6i5BDnO6MoU,2777
46
- haiku/rag/qa/__init__.py,sha256=Q18B5cjgYSuOdzwsJkXDeqcclAI2pu3tBIcWLcMTT5M,949
47
- haiku/rag/qa/agent.py,sha256=ReuvluxVzaH82PhrFLNAAM3rVrSj-sKHkhki266SsGI,3181
48
- haiku/rag/qa/prompts.py,sha256=Lqwn3m4zCsu_CJiC4s9cLsuPNbb9nq6j2PqEF3lw1eA,3380
49
- haiku/rag/reranking/__init__.py,sha256=cwkydVEJr7Tgs4uAWB057y9j5N3F1BDO-71YJNVkL-s,1900
50
- haiku/rag/reranking/base.py,sha256=Yji15nAR8LyIJGqZvEZifTWmortNQ4k_7ZHst_5mRYk,408
51
- haiku/rag/reranking/cohere.py,sha256=BhBPPnaSnDoVlkL_MHF74kegXQBrsZGKnWqC40ztiAk,1050
52
- haiku/rag/reranking/mxbai.py,sha256=qR55dmpaBz15lSN_wXD3-Z6Kqr_bmNKU9q4Pwef_wB8,911
53
- haiku/rag/reranking/vllm.py,sha256=Ip83qzV2RM7qXTj0mE2St66hvXykovoNW8Hu3AUebDc,1489
54
- haiku/rag/reranking/zeroentropy.py,sha256=bVW5gcdSEz8A97xVSD0jhWGN1l4lUZ10I-5vufINGKE,1913
55
- haiku/rag/store/__init__.py,sha256=R2IRcxtkFDxqa2sgMirqLq3l2-FPdWr6ydYStaqm5OQ,104
56
- haiku/rag/store/engine.py,sha256=LTv2_QXqkTJqwHTOKIwzvEBodBZAax3zkW9MtJciMjo,11696
57
- haiku/rag/store/models/__init__.py,sha256=kc7Ctf53Jr483tk4QTIrcgqBbXDz4ZoeYSkFXfPnpks,89
58
- haiku/rag/store/models/chunk.py,sha256=3EuZav4QekJIeHBCub48EM8SjNX8HEJ6wVDXGot4PEQ,421
59
- haiku/rag/store/models/document.py,sha256=cZXy_jEti-hnhq7FKhuhCfd99ccY9fIHMLovB_Thbb8,425
60
- haiku/rag/store/repositories/__init__.py,sha256=Olv5dLfBQINRV3HrsfUpjzkZ7Qm7goEYyMNykgo_DaY,291
61
- haiku/rag/store/repositories/chunk.py,sha256=wlC_D_Vu_CjHvr2dfXV6qG7lyQExERHSsOlJMtAwldk,15930
62
- haiku/rag/store/repositories/document.py,sha256=JKpDUQmYgSqbc2eUKPYz9MX0x--oOHCdI0tNEib1Yqw,8704
63
- haiku/rag/store/repositories/settings.py,sha256=15gS7Xj7cG4qetv_ioxZO_r31by7GuSqtpowOsMkHmc,6129
64
- haiku/rag/store/upgrades/__init__.py,sha256=RQ8A6rEXBASLb5PD9vdDnEas_m_GgRzzdVu4B88Snqc,1975
65
- haiku/rag/store/upgrades/v0_10_1.py,sha256=qNGnxj6hoHaHJ1rKTiALfw0c9NQOi0KAK-VZCD_073A,1959
66
- haiku/rag/store/upgrades/v0_9_3.py,sha256=NrjNilQSgDtFWRbL3ZUtzQzJ8tf9u0dDRJtnDFwwbdw,3322
67
- haiku_rag_slim-0.16.0.dist-info/METADATA,sha256=7BjfptrvrZgWin4NuyVLmZUGMwzIpeNfw5-_CakZJk4,4192
68
- haiku_rag_slim-0.16.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
69
- haiku_rag_slim-0.16.0.dist-info/entry_points.txt,sha256=G1U3nAkNd5YDYd4v0tuYFbriz0i-JheCsFuT9kIoGCI,48
70
- haiku_rag_slim-0.16.0.dist-info/licenses/LICENSE,sha256=eXZrWjSk9PwYFNK9yUczl3oPl95Z4V9UXH7bPN46iPo,1065
71
- haiku_rag_slim-0.16.0.dist-info/RECORD,,