haiku.rag 0.10.2__py3-none-any.whl → 0.11.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of haiku.rag might be problematic. Click here for more details.
- haiku/rag/app.py +15 -16
- haiku/rag/research/__init__.py +8 -0
- haiku/rag/research/common.py +71 -6
- haiku/rag/research/dependencies.py +179 -11
- haiku/rag/research/graph.py +5 -3
- haiku/rag/research/models.py +134 -1
- haiku/rag/research/nodes/analysis.py +181 -0
- haiku/rag/research/nodes/plan.py +16 -9
- haiku/rag/research/nodes/search.py +14 -11
- haiku/rag/research/nodes/synthesize.py +7 -3
- haiku/rag/research/prompts.py +67 -28
- haiku/rag/research/state.py +11 -4
- haiku/rag/research/stream.py +177 -0
- {haiku_rag-0.10.2.dist-info → haiku_rag-0.11.0.dist-info}/METADATA +32 -13
- {haiku_rag-0.10.2.dist-info → haiku_rag-0.11.0.dist-info}/RECORD +18 -17
- haiku/rag/research/nodes/evaluate.py +0 -80
- {haiku_rag-0.10.2.dist-info → haiku_rag-0.11.0.dist-info}/WHEEL +0 -0
- {haiku_rag-0.10.2.dist-info → haiku_rag-0.11.0.dist-info}/entry_points.txt +0 -0
- {haiku_rag-0.10.2.dist-info → haiku_rag-0.11.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: haiku.rag
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.11.0
|
|
4
4
|
Summary: Agentic Retrieval Augmented Generation (RAG) with LanceDB
|
|
5
5
|
Author-email: Yiorgis Gozadinos <ggozadinos@gmail.com>
|
|
6
6
|
License: MIT
|
|
@@ -102,11 +102,12 @@ haiku-rag serve
|
|
|
102
102
|
```python
|
|
103
103
|
from haiku.rag.client import HaikuRAG
|
|
104
104
|
from haiku.rag.research import (
|
|
105
|
+
PlanNode,
|
|
105
106
|
ResearchContext,
|
|
106
107
|
ResearchDeps,
|
|
107
108
|
ResearchState,
|
|
108
109
|
build_research_graph,
|
|
109
|
-
|
|
110
|
+
stream_research_graph,
|
|
110
111
|
)
|
|
111
112
|
|
|
112
113
|
async with HaikuRAG("database.lancedb") as client:
|
|
@@ -128,22 +129,40 @@ async with HaikuRAG("database.lancedb") as client:
|
|
|
128
129
|
|
|
129
130
|
# Multi‑agent research pipeline (Plan → Search → Evaluate → Synthesize)
|
|
130
131
|
graph = build_research_graph()
|
|
132
|
+
question = (
|
|
133
|
+
"What are the main drivers and trends of global temperature "
|
|
134
|
+
"anomalies since 1990?"
|
|
135
|
+
)
|
|
131
136
|
state = ResearchState(
|
|
132
|
-
|
|
133
|
-
"What are the main drivers and trends of global temperature "
|
|
134
|
-
"anomalies since 1990?"
|
|
135
|
-
),
|
|
136
|
-
context=ResearchContext(original_question="…"),
|
|
137
|
+
context=ResearchContext(original_question=question),
|
|
137
138
|
max_iterations=2,
|
|
138
139
|
confidence_threshold=0.8,
|
|
139
|
-
max_concurrency=
|
|
140
|
+
max_concurrency=2,
|
|
140
141
|
)
|
|
141
142
|
deps = ResearchDeps(client=client)
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
143
|
+
|
|
144
|
+
# Blocking run (final result only)
|
|
145
|
+
result = await graph.run(
|
|
146
|
+
PlanNode(provider="openai", model="gpt-4o-mini"),
|
|
147
|
+
state=state,
|
|
148
|
+
deps=deps,
|
|
149
|
+
)
|
|
150
|
+
print(result.output.title)
|
|
151
|
+
|
|
152
|
+
# Streaming progress (log/report/error events)
|
|
153
|
+
async for event in stream_research_graph(
|
|
154
|
+
graph,
|
|
155
|
+
PlanNode(provider="openai", model="gpt-4o-mini"),
|
|
156
|
+
state,
|
|
157
|
+
deps,
|
|
158
|
+
):
|
|
159
|
+
if event.type == "log":
|
|
160
|
+
iteration = event.state.iterations if event.state else state.iterations
|
|
161
|
+
print(f"[{iteration}] {event.message}")
|
|
162
|
+
elif event.type == "report":
|
|
163
|
+
print("\nResearch complete!\n")
|
|
164
|
+
print(event.report.title)
|
|
165
|
+
print(event.report.executive_summary)
|
|
147
166
|
```
|
|
148
167
|
|
|
149
168
|
## MCP Server
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
haiku/rag/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
haiku/rag/app.py,sha256
|
|
2
|
+
haiku/rag/app.py,sha256=TRFwMP9mzLaM7EPc7dhsPODKZxCDkSSgPCnGAdj65VU,17929
|
|
3
3
|
haiku/rag/chunker.py,sha256=PVe6ysv8UlacUd4Zb3_8RFWIaWDXnzBAy2VDJ4TaUsE,1555
|
|
4
4
|
haiku/rag/cli.py,sha256=wreAxyXSRnn7f09t9SGe4uAXQjlieUQIpNpOapJT7y8,12910
|
|
5
5
|
haiku/rag/client.py,sha256=iUaa6YUac3CXFniIm8DsaaNsiyHsi4cp8-fPhF5XuVU,22925
|
|
@@ -24,17 +24,18 @@ haiku/rag/reranking/base.py,sha256=LM9yUSSJ414UgBZhFTgxGprlRqzfTe4I1vgjricz2JY,4
|
|
|
24
24
|
haiku/rag/reranking/cohere.py,sha256=1iTdiaa8vvb6oHVB2qpWzUOVkyfUcimVSZp6Qr4aq4c,1049
|
|
25
25
|
haiku/rag/reranking/mxbai.py,sha256=uveGFIdmNmepd2EQsvYr64wv0ra2_wB845hdSZXy5Cw,908
|
|
26
26
|
haiku/rag/reranking/vllm.py,sha256=xVGH9ss-ISWdJ5SKUUHUbTqBo7PIEmA_SQv0ScdJ6XA,1479
|
|
27
|
-
haiku/rag/research/__init__.py,sha256=
|
|
28
|
-
haiku/rag/research/common.py,sha256=
|
|
29
|
-
haiku/rag/research/dependencies.py,sha256=
|
|
30
|
-
haiku/rag/research/graph.py,sha256=
|
|
31
|
-
haiku/rag/research/models.py,sha256=
|
|
32
|
-
haiku/rag/research/prompts.py,sha256=
|
|
33
|
-
haiku/rag/research/state.py,sha256=
|
|
34
|
-
haiku/rag/research/
|
|
35
|
-
haiku/rag/research/nodes/
|
|
36
|
-
haiku/rag/research/nodes/
|
|
37
|
-
haiku/rag/research/nodes/
|
|
27
|
+
haiku/rag/research/__init__.py,sha256=Ujci3u7yM11g10J3EzOYs6y4nG1W3CeG70pMRvPoSL4,708
|
|
28
|
+
haiku/rag/research/common.py,sha256=E-7SN1XBbMoTp5sWGGcItBfnmOvFgbeybB0FnqSCp9I,3995
|
|
29
|
+
haiku/rag/research/dependencies.py,sha256=GYtD2jkxBkxeHm44JAtrCGx0IMJbDCiVXnbQd_n6T0M,8118
|
|
30
|
+
haiku/rag/research/graph.py,sha256=Zaqdjj3wmSTPdMoMl5CmhM2z1otwDq9kS-e5vYi-Y7k,879
|
|
31
|
+
haiku/rag/research/models.py,sha256=LInrRtcL-VzCL_PA8LRBqpH5wVckEFuf8zkVnZg7wEg,6666
|
|
32
|
+
haiku/rag/research/prompts.py,sha256=5lDXT874npJn-oXucLk6Z5jqvXEf0cGrnEeNE46iur8,7056
|
|
33
|
+
haiku/rag/research/state.py,sha256=P8RXJMi3wA3l1j6yo8dsAyso6S27FgqS7fvZUUY447A,917
|
|
34
|
+
haiku/rag/research/stream.py,sha256=amyGDimkNp_FHYUXCqtpbeDOx7sC1jQ-7DwoxuNOL1g,5576
|
|
35
|
+
haiku/rag/research/nodes/analysis.py,sha256=-ydF7dpzyiOSwk6jwhWHqUtTXk_Bzr-dMWodIdBr9KA,6475
|
|
36
|
+
haiku/rag/research/nodes/plan.py,sha256=8lsrDt70SjLcK-awnII7PyW6gaGZ-YLmP7nCqCxqkBo,2588
|
|
37
|
+
haiku/rag/research/nodes/search.py,sha256=-5JgsaWVivFmt7jCdBradE0ZXf_i87o2fj9eLgRC1uE,3674
|
|
38
|
+
haiku/rag/research/nodes/synthesize.py,sha256=SEQ6aDgSwvSVHyJxVK0248JnpfrfDoUbvlEUTAQll58,1803
|
|
38
39
|
haiku/rag/store/__init__.py,sha256=R2IRcxtkFDxqa2sgMirqLq3l2-FPdWr6ydYStaqm5OQ,104
|
|
39
40
|
haiku/rag/store/engine.py,sha256=BceAeTpDgV92B1A3GVcjsTwlD-c0cZPPvGiXW2Gola0,10215
|
|
40
41
|
haiku/rag/store/models/__init__.py,sha256=kc7Ctf53Jr483tk4QTIrcgqBbXDz4ZoeYSkFXfPnpks,89
|
|
@@ -47,8 +48,8 @@ haiku/rag/store/repositories/settings.py,sha256=7XMBMavU8zRgdBoQzQg0Obfa7UKjuVnB
|
|
|
47
48
|
haiku/rag/store/upgrades/__init__.py,sha256=RQ8A6rEXBASLb5PD9vdDnEas_m_GgRzzdVu4B88Snqc,1975
|
|
48
49
|
haiku/rag/store/upgrades/v0_10_1.py,sha256=qNGnxj6hoHaHJ1rKTiALfw0c9NQOi0KAK-VZCD_073A,1959
|
|
49
50
|
haiku/rag/store/upgrades/v0_9_3.py,sha256=NrjNilQSgDtFWRbL3ZUtzQzJ8tf9u0dDRJtnDFwwbdw,3322
|
|
50
|
-
haiku_rag-0.
|
|
51
|
-
haiku_rag-0.
|
|
52
|
-
haiku_rag-0.
|
|
53
|
-
haiku_rag-0.
|
|
54
|
-
haiku_rag-0.
|
|
51
|
+
haiku_rag-0.11.0.dist-info/METADATA,sha256=wT7Q8ZsLKwHpDoRTdT9Aa5gPTMaSRH-9bbK_tBCPl-8,6542
|
|
52
|
+
haiku_rag-0.11.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
53
|
+
haiku_rag-0.11.0.dist-info/entry_points.txt,sha256=G1U3nAkNd5YDYd4v0tuYFbriz0i-JheCsFuT9kIoGCI,48
|
|
54
|
+
haiku_rag-0.11.0.dist-info/licenses/LICENSE,sha256=eXZrWjSk9PwYFNK9yUczl3oPl95Z4V9UXH7bPN46iPo,1065
|
|
55
|
+
haiku_rag-0.11.0.dist-info/RECORD,,
|
|
@@ -1,80 +0,0 @@
|
|
|
1
|
-
from dataclasses import dataclass
|
|
2
|
-
|
|
3
|
-
from pydantic_ai import Agent
|
|
4
|
-
from pydantic_graph import BaseNode, GraphRunContext
|
|
5
|
-
|
|
6
|
-
from haiku.rag.research.common import format_context_for_prompt, get_model, log
|
|
7
|
-
from haiku.rag.research.dependencies import (
|
|
8
|
-
ResearchDependencies,
|
|
9
|
-
)
|
|
10
|
-
from haiku.rag.research.models import EvaluationResult, ResearchReport
|
|
11
|
-
from haiku.rag.research.nodes.synthesize import SynthesizeNode
|
|
12
|
-
from haiku.rag.research.prompts import EVALUATION_AGENT_PROMPT
|
|
13
|
-
from haiku.rag.research.state import ResearchDeps, ResearchState
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
@dataclass
|
|
17
|
-
class EvaluateNode(BaseNode[ResearchState, ResearchDeps, ResearchReport]):
|
|
18
|
-
provider: str
|
|
19
|
-
model: str
|
|
20
|
-
|
|
21
|
-
async def run(
|
|
22
|
-
self, ctx: GraphRunContext[ResearchState, ResearchDeps]
|
|
23
|
-
) -> BaseNode[ResearchState, ResearchDeps, ResearchReport]:
|
|
24
|
-
state = ctx.state
|
|
25
|
-
deps = ctx.deps
|
|
26
|
-
|
|
27
|
-
log(
|
|
28
|
-
deps.console,
|
|
29
|
-
"\n[bold cyan]📊 Analyzing and evaluating research progress...[/bold cyan]",
|
|
30
|
-
)
|
|
31
|
-
|
|
32
|
-
agent = Agent(
|
|
33
|
-
model=get_model(self.provider, self.model),
|
|
34
|
-
output_type=EvaluationResult,
|
|
35
|
-
instructions=EVALUATION_AGENT_PROMPT,
|
|
36
|
-
retries=3,
|
|
37
|
-
deps_type=ResearchDependencies,
|
|
38
|
-
)
|
|
39
|
-
|
|
40
|
-
context_xml = format_context_for_prompt(state.context)
|
|
41
|
-
prompt = (
|
|
42
|
-
"Analyze gathered information and evaluate completeness for the original question.\n\n"
|
|
43
|
-
f"{context_xml}"
|
|
44
|
-
)
|
|
45
|
-
agent_deps = ResearchDependencies(
|
|
46
|
-
client=deps.client, context=state.context, console=deps.console
|
|
47
|
-
)
|
|
48
|
-
eval_result = await agent.run(prompt, deps=agent_deps)
|
|
49
|
-
output = eval_result.output
|
|
50
|
-
|
|
51
|
-
for insight in output.key_insights:
|
|
52
|
-
state.context.add_insight(insight)
|
|
53
|
-
for new_q in output.new_questions:
|
|
54
|
-
if new_q not in state.sub_questions:
|
|
55
|
-
state.sub_questions.append(new_q)
|
|
56
|
-
|
|
57
|
-
state.last_eval = output
|
|
58
|
-
state.iterations += 1
|
|
59
|
-
|
|
60
|
-
if output.key_insights:
|
|
61
|
-
log(deps.console, " [bold]Key insights:[/bold]")
|
|
62
|
-
for ins in output.key_insights:
|
|
63
|
-
log(deps.console, f" • {ins}")
|
|
64
|
-
log(
|
|
65
|
-
deps.console,
|
|
66
|
-
f" Confidence: [yellow]{output.confidence_score:.1%}[/yellow]",
|
|
67
|
-
)
|
|
68
|
-
status = "[green]Yes[/green]" if output.is_sufficient else "[red]No[/red]"
|
|
69
|
-
log(deps.console, f" Sufficient: {status}")
|
|
70
|
-
|
|
71
|
-
from haiku.rag.research.nodes.search import SearchDispatchNode
|
|
72
|
-
|
|
73
|
-
if (
|
|
74
|
-
output.is_sufficient
|
|
75
|
-
and output.confidence_score >= state.confidence_threshold
|
|
76
|
-
) or state.iterations >= state.max_iterations:
|
|
77
|
-
log(deps.console, "\n[bold green]✅ Stopping research.[/bold green]")
|
|
78
|
-
return SynthesizeNode(self.provider, self.model)
|
|
79
|
-
|
|
80
|
-
return SearchDispatchNode(self.provider, self.model)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|