haiku.rag 0.12.1__py3-none-any.whl → 0.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of haiku.rag might be problematic. Click here for more details.

@@ -1,6 +1,5 @@
1
1
  import json
2
2
 
3
- from haiku.rag.config import Config
4
3
  from haiku.rag.store.engine import SettingsRecord, Store
5
4
 
6
5
 
@@ -73,7 +72,7 @@ class SettingsRepository:
73
72
 
74
73
  def save_current_settings(self) -> None:
75
74
  """Save the current configuration to the database."""
76
- current_config = Config.model_dump(mode="json")
75
+ current_config = self.store._config.model_dump(mode="json")
77
76
 
78
77
  # Check if settings exist
79
78
  existing = list(
@@ -116,17 +115,28 @@ class SettingsRepository:
116
115
  self.save_current_settings()
117
116
  return
118
117
 
119
- current_config = Config.model_dump(mode="json")
118
+ current_config = self.store._config.model_dump(mode="json")
120
119
 
121
120
  # Check if embedding provider or model has changed
122
- stored_provider = stored_settings.get("EMBEDDINGS_PROVIDER")
123
- current_provider = current_config.get("EMBEDDINGS_PROVIDER")
121
+ # Support both old flat structure and new nested structure for backward compatibility
122
+ stored_embeddings = stored_settings.get("embeddings", {})
123
+ current_embeddings = current_config.get("embeddings", {})
124
124
 
125
- stored_model = stored_settings.get("EMBEDDINGS_MODEL")
126
- current_model = current_config.get("EMBEDDINGS_MODEL")
125
+ # Try nested structure first, fall back to flat for old databases
126
+ stored_provider = stored_embeddings.get("provider") or stored_settings.get(
127
+ "EMBEDDINGS_PROVIDER"
128
+ )
129
+ current_provider = current_embeddings.get("provider")
130
+
131
+ stored_model = stored_embeddings.get("model") or stored_settings.get(
132
+ "EMBEDDINGS_MODEL"
133
+ )
134
+ current_model = current_embeddings.get("model")
127
135
 
128
- stored_vector_dim = stored_settings.get("EMBEDDINGS_VECTOR_DIM")
129
- current_vector_dim = current_config.get("EMBEDDINGS_VECTOR_DIM")
136
+ stored_vector_dim = stored_embeddings.get("vector_dim") or stored_settings.get(
137
+ "EMBEDDINGS_VECTOR_DIM"
138
+ )
139
+ current_vector_dim = current_embeddings.get("vector_dim")
130
140
 
131
141
  # Check for incompatible changes
132
142
  incompatible_changes = []
haiku/rag/utils.py CHANGED
@@ -176,19 +176,19 @@ def prefetch_models():
176
176
 
177
177
  # Collect Ollama models from config
178
178
  required_models: set[str] = set()
179
- if Config.EMBEDDINGS_PROVIDER == "ollama":
180
- required_models.add(Config.EMBEDDINGS_MODEL)
181
- if Config.QA_PROVIDER == "ollama":
182
- required_models.add(Config.QA_MODEL)
183
- if Config.RESEARCH_PROVIDER == "ollama":
184
- required_models.add(Config.RESEARCH_MODEL)
185
- if Config.RERANK_PROVIDER == "ollama":
186
- required_models.add(Config.RERANK_MODEL)
179
+ if Config.embeddings.provider == "ollama":
180
+ required_models.add(Config.embeddings.model)
181
+ if Config.qa.provider == "ollama":
182
+ required_models.add(Config.qa.model)
183
+ if Config.research.provider == "ollama":
184
+ required_models.add(Config.research.model)
185
+ if Config.reranking.provider == "ollama":
186
+ required_models.add(Config.reranking.model)
187
187
 
188
188
  if not required_models:
189
189
  return
190
190
 
191
- base_url = Config.OLLAMA_BASE_URL
191
+ base_url = Config.providers.ollama.base_url
192
192
 
193
193
  with httpx.Client(timeout=None) as client:
194
194
  for model in sorted(required_models):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: haiku.rag
3
- Version: 0.12.1
3
+ Version: 0.13.0
4
4
  Summary: Agentic Retrieval Augmented Generation (RAG) with LanceDB
5
5
  Author-email: Yiorgis Gozadinos <ggozadinos@gmail.com>
6
6
  License: MIT
@@ -13,9 +13,8 @@ Classifier: Operating System :: MacOS
13
13
  Classifier: Operating System :: Microsoft :: Windows :: Windows 10
14
14
  Classifier: Operating System :: Microsoft :: Windows :: Windows 11
15
15
  Classifier: Operating System :: POSIX :: Linux
16
- Classifier: Programming Language :: Python :: 3.10
17
- Classifier: Programming Language :: Python :: 3.11
18
16
  Classifier: Programming Language :: Python :: 3.12
17
+ Classifier: Programming Language :: Python :: 3.13
19
18
  Classifier: Typing :: Typed
20
19
  Requires-Python: >=3.12
21
20
  Requires-Dist: docling>=2.56.1
@@ -26,6 +25,7 @@ Requires-Dist: pydantic-ai>=1.0.18
26
25
  Requires-Dist: pydantic-graph>=1.0.18
27
26
  Requires-Dist: pydantic>=2.12.2
28
27
  Requires-Dist: python-dotenv>=1.1.1
28
+ Requires-Dist: pyyaml>=6.0.1
29
29
  Requires-Dist: rich>=14.2.0
30
30
  Requires-Dist: tiktoken>=0.12.0
31
31
  Requires-Dist: typer>=0.19.2
@@ -44,7 +44,7 @@ Retrieval-Augmented Generation (RAG) library built on LanceDB.
44
44
 
45
45
  `haiku.rag` is a Retrieval-Augmented Generation (RAG) library built to work with LanceDB as a local vector database. It uses LanceDB for storing embeddings and performs semantic (vector) search as well as full-text search combined through native hybrid search with Reciprocal Rank Fusion. Both open-source (Ollama) as well as commercial (OpenAI, VoyageAI) embedding providers are supported.
46
46
 
47
- > **Note**: Starting with version 0.7.0, haiku.rag uses LanceDB instead of SQLite. If you have an existing SQLite database, use `haiku-rag migrate old_database.sqlite` to migrate your data safely.
47
+ > **Note**: Configuration now uses YAML files instead of environment variables. If you're upgrading from an older version, run `haiku-rag init-config --from-env` to migrate your `.env` file to `haiku.rag.yaml`. See [Configuration](https://ggozad.github.io/haiku.rag/configuration/) for details.
48
48
 
49
49
  ## Features
50
50
 
@@ -65,6 +65,7 @@ Retrieval-Augmented Generation (RAG) library built on LanceDB.
65
65
 
66
66
  ```bash
67
67
  # Install
68
+ # Python 3.12 or newer required
68
69
  uv pip install haiku.rag
69
70
 
70
71
  # Add documents
@@ -98,14 +99,12 @@ haiku-rag research \
98
99
  # Rebuild database (re-chunk and re-embed all documents)
99
100
  haiku-rag rebuild
100
101
 
101
- # Migrate from SQLite to LanceDB
102
- haiku-rag migrate old_database.sqlite
103
-
104
102
  # Start server with file monitoring
105
- export MONITOR_DIRECTORIES="/path/to/docs"
106
- haiku-rag serve
103
+ haiku-rag serve --monitor
107
104
  ```
108
105
 
106
+ To customize settings, create a `haiku.rag.yaml` config file (see [Configuration](https://ggozad.github.io/haiku.rag/configuration/)).
107
+
109
108
  ## Python Usage
110
109
 
111
110
  ```python
@@ -197,17 +196,26 @@ haiku-rag a2aclient
197
196
  ```
198
197
 
199
198
  The A2A agent provides:
199
+
200
200
  - Multi-turn dialogue with context
201
201
  - Intelligent multi-search for complex questions
202
202
  - Source citations with titles and URIs
203
203
  - Full document retrieval on request
204
204
 
205
+ ## Examples
206
+
207
+ See the [examples directory](examples/) for working examples:
208
+
209
+ - **[Interactive Research Assistant](examples/ag-ui-research/)** - Full-stack research assistant with Pydantic AI and AG-UI featuring human-in-the-loop approval and real-time state synchronization
210
+ - **[Docker Setup](examples/docker/)** - Complete Docker deployment with file monitoring, MCP server, and A2A agent
211
+ - **[A2A Security](examples/a2a-security/)** - Authentication examples (API key, OAuth2, GitHub)
212
+
205
213
  ## Documentation
206
214
 
207
215
  Full documentation at: https://ggozad.github.io/haiku.rag/
208
216
 
209
217
  - [Installation](https://ggozad.github.io/haiku.rag/installation/) - Provider setup
210
- - [Configuration](https://ggozad.github.io/haiku.rag/configuration/) - Environment variables
218
+ - [Configuration](https://ggozad.github.io/haiku.rag/configuration/) - YAML configuration
211
219
  - [CLI](https://ggozad.github.io/haiku.rag/cli/) - Command reference
212
220
  - [Python API](https://ggozad.github.io/haiku.rag/python/) - Complete API docs
213
221
  - [Agents](https://ggozad.github.io/haiku.rag/agents/) - QA agent and multi-agent research
@@ -1,16 +1,14 @@
1
1
  haiku/rag/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- haiku/rag/app.py,sha256=nrfg3iGWP_HJBfwUFPv60_1Do8YK3WQYWZhq22r001s,21548
3
- haiku/rag/chunker.py,sha256=PVe6ysv8UlacUd4Zb3_8RFWIaWDXnzBAy2VDJ4TaUsE,1555
4
- haiku/rag/cli.py,sha256=ghmfvCmoitgySZsF6t5UQjsm3_rul0KUh0L774BzeuI,13196
5
- haiku/rag/client.py,sha256=GVXHq9weIaFdcZvO9a4YO1WnrroJJUXVVriDGdMxpH4,25855
6
- haiku/rag/config.py,sha256=FBsMMijl5PxIfPGifk_AJVRjL4omb03jfoZm0P_VqxI,2743
2
+ haiku/rag/app.py,sha256=lNPYYWQLOPlOLYiwIR4yQuwzl-LrRNQn7n2nacgdq_k,21594
3
+ haiku/rag/chunker.py,sha256=pA0S0fFKAuvzGm2dGyp7FAkeFZA0YTCm_ata83Pnflw,1566
4
+ haiku/rag/cli.py,sha256=Y42tnlVFGvCZVjBcLWrIVgM0A7KjNYX9MAuk9-zQvvE,14523
5
+ haiku/rag/client.py,sha256=cG6DAhzJJ4vdo8QFn9p8iA6YTa0arMrTtIswoZc7sY0,26816
7
6
  haiku/rag/logging.py,sha256=dm65AwADpcQsH5OAPtRA-4hsw0w5DK-sGOvzYkj6jzw,1720
8
- haiku/rag/mcp.py,sha256=DZk-IJgVjAesu-vvqVd5BYnfDWKWNR6TQugKgdoFrvg,8976
9
- haiku/rag/migration.py,sha256=XldX0CTHPXNGrkdQ-gocr4kQGBsz-316WcE0ZDRfb48,11076
10
- haiku/rag/monitor.py,sha256=VP3bqY0mEodOP60eN4RMldgrL1ti5gMjuDuQ-_vBvFc,2759
7
+ haiku/rag/mcp.py,sha256=txuEnrUMWvs_shQBk15gEkJD7xNdSYzp3z75UUWaHFM,9328
8
+ haiku/rag/monitor.py,sha256=d92oRufhI8oYXH7oF6oYVf1_AcpFUafjM6tl4VhAupI,3322
11
9
  haiku/rag/reader.py,sha256=aW8LG0X31kVWS7kU2tKVpe8RqP3Ne_oIidd_X3UDLH0,3307
12
- haiku/rag/utils.py,sha256=dBzhKaOHI9KRiJqHErcXUnqtnXY2AgOK8PCLA3rhO0A,6115
13
- haiku/rag/a2a/__init__.py,sha256=4SlJBr9GUVZ0879o5VI6-qpcBKpieP2hW4hmNbm8NGg,5933
10
+ haiku/rag/utils.py,sha256=47ehVYJlLz6Of_Ua89qj94JclO5ZPBFU9eyonifvnVg,6131
11
+ haiku/rag/a2a/__init__.py,sha256=tY_jLSUM0zKzyBctMkjpqmDWpxWc9QVEK1qAsb-plGs,5933
14
12
  haiku/rag/a2a/client.py,sha256=awuiHXgVHn1uzaEXE98RIqqKHj1JjszOvn9WI3Jtth8,8760
15
13
  haiku/rag/a2a/context.py,sha256=SofkFUZcGonoJcgZh-RGqHTh0UWT4J7Zl4Mz6WDkMl4,2053
16
14
  haiku/rag/a2a/models.py,sha256=XhGYj2g3rgVM4JoCDXlll0YjaysqdalybJrBqFXSwl4,689
@@ -18,15 +16,18 @@ haiku/rag/a2a/prompts.py,sha256=yCla8x0hbOhKrkuaqVrF1upn-YjQM3-2NsE2TSnet0M,3030
18
16
  haiku/rag/a2a/skills.py,sha256=dwyD2Bn493eL3Vf4uQzmyxj_9IUSb66kQ-085FBAuCs,2701
19
17
  haiku/rag/a2a/storage.py,sha256=c8vmGCiZ3nuV9wUuTnwpoRD2HVVvK2JPySQOc5PVMvg,2759
20
18
  haiku/rag/a2a/worker.py,sha256=S9hiA1ncpJPdtN0eEmMjsvr5LQ4wMVN5R8CjYkTeohU,12367
21
- haiku/rag/embeddings/__init__.py,sha256=44IfDITGIFTflGT6UEmiYOwpWFVbYv5smLY59D0YeCs,1419
22
- haiku/rag/embeddings/base.py,sha256=Aw4kjfVn2can0R17pdiAgpPRyk5BpdBgMXuor5mstDY,682
23
- haiku/rag/embeddings/ollama.py,sha256=KXq-eJ58co5rwYchIO3kpvIv0OBwMJkwMXq1xDsETz0,823
19
+ haiku/rag/config/__init__.py,sha256=19sgLKkTB6tm26dY4V6s4QeIK8dWiniNe_U3sBt9EJk,1110
20
+ haiku/rag/config/loader.py,sha256=6Y1Wfu1-AFnpJDmOJIOUVHJsHbyWTGFXo5Xd8TQ1AR8,5360
21
+ haiku/rag/config/models.py,sha256=mNRFzvBC8OrLX0erHeZj2b8IpXtUhLD7oMPIdrPg3j4,2053
22
+ haiku/rag/embeddings/__init__.py,sha256=lA-IxsgLBhLQhcuictVOuaerKOMZsniVBTREG6CNDWM,1611
23
+ haiku/rag/embeddings/base.py,sha256=LRiP0e6YfVFMETVXEQnCi_7LsUteN-zaNlXnq9vU-GA,682
24
+ haiku/rag/embeddings/ollama.py,sha256=O8g8GsSs2-By0wAvmyGRZmlHnQmMFJx6uZQslPj58Ag,855
24
25
  haiku/rag/embeddings/openai.py,sha256=BfmPni567DH8KqwLCPiOmr3q-dpzpOJkvFFoUuTR5as,731
25
- haiku/rag/embeddings/vllm.py,sha256=wgul0nMWTn6Q1aKA4DJe03EktsRoBxEgtB7gfpWVOyQ,854
26
+ haiku/rag/embeddings/vllm.py,sha256=_zKvyqGIuwRv6TCyuOQn4YCvHGa_FQ8siNUhmLE4gTs,864
26
27
  haiku/rag/embeddings/voyageai.py,sha256=6vEuk6q510AJv-K2lL93P2dVrziAjELTOe_w_Zp5YT4,917
27
28
  haiku/rag/graph/__init__.py,sha256=BHfMchuUO_UhHKpjjGHjd6xPxNkrIwJzHn4YJiLqG1g,62
28
29
  haiku/rag/graph/base.py,sha256=DepZqLF9E64YCCkjmbqmgyp28oNp69WfJCXp614xzh0,819
29
- haiku/rag/graph/common.py,sha256=xTejucXei3x9tqbal3ZS_64lZAC6Bw3-QfXPniZcZEw,986
30
+ haiku/rag/graph/common.py,sha256=-Pdao6ZiTgv4ppNctrRpwLG0U6za-66aScQWZ0uCUjc,1016
30
31
  haiku/rag/graph/models.py,sha256=sgL5_wSbQJrNOITH615jryPBhTE8J3ZiZWVxO9Ty-JI,755
31
32
  haiku/rag/graph/prompts.py,sha256=xJgAHjUVczxCgk7YLPyy6DdQFi0lwj42vJqIFnPqcYw,2221
32
33
  haiku/rag/graph/nodes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -34,8 +35,8 @@ haiku/rag/graph/nodes/analysis.py,sha256=g-Aw3nPuCHWo0CXM96Ixa4vQI4TpI6tg6ooHT_J
34
35
  haiku/rag/graph/nodes/plan.py,sha256=Bb6Fva9vwArCU-5xBr24N4pM3wfLP-Vwufgss8HfXMQ,2622
35
36
  haiku/rag/graph/nodes/search.py,sha256=DdHhEY7fmWUqis6Nk0bj-di56-ML262B51N9zytzKYk,3699
36
37
  haiku/rag/graph/nodes/synthesize.py,sha256=WF0D44SwLP1OK8C6ViOAhFOtGQ0mj3aO54z5bemJb4E,1828
37
- haiku/rag/qa/__init__.py,sha256=eFRV5GFwe1UsqniEqOLdzAMT2J6QhSiHq5_Li7c6Fs4,520
38
- haiku/rag/qa/agent.py,sha256=sN2SVpaQAxg5Hm47LhrHpbo3ELVi1ev9DxKu_ec1c-Y,3123
38
+ haiku/rag/qa/__init__.py,sha256=Q18B5cjgYSuOdzwsJkXDeqcclAI2pu3tBIcWLcMTT5M,949
39
+ haiku/rag/qa/agent.py,sha256=ReuvluxVzaH82PhrFLNAAM3rVrSj-sKHkhki266SsGI,3181
39
40
  haiku/rag/qa/prompts.py,sha256=Lqwn3m4zCsu_CJiC4s9cLsuPNbb9nq6j2PqEF3lw1eA,3380
40
41
  haiku/rag/qa/deep/__init__.py,sha256=SnCpWxWip-TaFzVKlFyrOgYeXEqT_gpIlaSItEEJ6r0,50
41
42
  haiku/rag/qa/deep/dependencies.py,sha256=AKFqcC1D3N1VPudnFmLH29K5eJWEC5wtwUGkO4FM4jc,998
@@ -44,11 +45,11 @@ haiku/rag/qa/deep/models.py,sha256=siZMQXD21_3nk8kaLCv0BCuD9TydLYo-yC4-9CxQy3E,6
44
45
  haiku/rag/qa/deep/nodes.py,sha256=XbDujD_xg-NsJS2oYm3LqkfxeHZCzT2f9wBNhVh0wns,10442
45
46
  haiku/rag/qa/deep/prompts.py,sha256=t1fEvoD5Rdab92eAOvefv2wBVmkPFuR0BQ8Kh4X0-mY,2565
46
47
  haiku/rag/qa/deep/state.py,sha256=ICFIX0oRtBs6Sdq9YnmnP04BkGiQYwucfS3Mf8XLcCU,570
47
- haiku/rag/reranking/__init__.py,sha256=95ApqN51rcog9MLkTh_uNE69qOVozO1Z6KMbZZj8nH0,963
48
- haiku/rag/reranking/base.py,sha256=LM9yUSSJ414UgBZhFTgxGprlRqzfTe4I1vgjricz2JY,405
49
- haiku/rag/reranking/cohere.py,sha256=1iTdiaa8vvb6oHVB2qpWzUOVkyfUcimVSZp6Qr4aq4c,1049
50
- haiku/rag/reranking/mxbai.py,sha256=uveGFIdmNmepd2EQsvYr64wv0ra2_wB845hdSZXy5Cw,908
51
- haiku/rag/reranking/vllm.py,sha256=xVGH9ss-ISWdJ5SKUUHUbTqBo7PIEmA_SQv0ScdJ6XA,1479
48
+ haiku/rag/reranking/__init__.py,sha256=34xH2YZ7OCC3H8Yb-zyPuehTRQdijMSY9TC8AmZDOGQ,1296
49
+ haiku/rag/reranking/base.py,sha256=Yji15nAR8LyIJGqZvEZifTWmortNQ4k_7ZHst_5mRYk,408
50
+ haiku/rag/reranking/cohere.py,sha256=BhBPPnaSnDoVlkL_MHF74kegXQBrsZGKnWqC40ztiAk,1050
51
+ haiku/rag/reranking/mxbai.py,sha256=qR55dmpaBz15lSN_wXD3-Z6Kqr_bmNKU9q4Pwef_wB8,911
52
+ haiku/rag/reranking/vllm.py,sha256=Ip83qzV2RM7qXTj0mE2St66hvXykovoNW8Hu3AUebDc,1489
52
53
  haiku/rag/research/__init__.py,sha256=xfVzYkt8QETjZaP8v4LdK8MC2R_JmxKDD34LefvrJbo,201
53
54
  haiku/rag/research/common.py,sha256=mrKXolTnDxcONjodmSPVAtXYWqbU0Bie2W-4lOTclGY,2988
54
55
  haiku/rag/research/dependencies.py,sha256=hiGImk7HyJF4LRYnTmsLFGzY1QrGjAyPW5vb_2JQRDo,8148
@@ -58,19 +59,19 @@ haiku/rag/research/prompts.py,sha256=opz4MXjoDHH1wjG6bPyiqT0LVzk3pBA6y_a9zpBW8yM
58
59
  haiku/rag/research/state.py,sha256=P8RXJMi3wA3l1j6yo8dsAyso6S27FgqS7fvZUUY447A,917
59
60
  haiku/rag/research/stream.py,sha256=amyGDimkNp_FHYUXCqtpbeDOx7sC1jQ-7DwoxuNOL1g,5576
60
61
  haiku/rag/store/__init__.py,sha256=R2IRcxtkFDxqa2sgMirqLq3l2-FPdWr6ydYStaqm5OQ,104
61
- haiku/rag/store/engine.py,sha256=n2IxztyN2UpLLSUVXurjL-e_ANthKUpWyB1gdHfgBMM,11468
62
+ haiku/rag/store/engine.py,sha256=FP1-9LOxoEvQBswYcM2GS_E2RpvSZct49vVktL-oPlo,11697
62
63
  haiku/rag/store/models/__init__.py,sha256=kc7Ctf53Jr483tk4QTIrcgqBbXDz4ZoeYSkFXfPnpks,89
63
64
  haiku/rag/store/models/chunk.py,sha256=3EuZav4QekJIeHBCub48EM8SjNX8HEJ6wVDXGot4PEQ,421
64
65
  haiku/rag/store/models/document.py,sha256=cZXy_jEti-hnhq7FKhuhCfd99ccY9fIHMLovB_Thbb8,425
65
66
  haiku/rag/store/repositories/__init__.py,sha256=Olv5dLfBQINRV3HrsfUpjzkZ7Qm7goEYyMNykgo_DaY,291
66
- haiku/rag/store/repositories/chunk.py,sha256=B0CowrBNy0fd8GLnVJVfqDaLoWxEPPJK3SODya0I0OI,14093
67
- haiku/rag/store/repositories/document.py,sha256=EtgD5pDjghXf6dloBOOEVJp8DI9O_celc_FbYzOywAE,8125
68
- haiku/rag/store/repositories/settings.py,sha256=ObrDrzxHn-yA1WcbgIoJoVmAbVvQHAFvEdRyJFt5Opc,5685
67
+ haiku/rag/store/repositories/chunk.py,sha256=bXa-NBfLdLKJuFLGEKQhFlsLi-XNbojhQYVyBjwUxz8,14205
68
+ haiku/rag/store/repositories/document.py,sha256=UOC_5QEUl-3RnGPJzn92KjrCnhmh5TmWln4yd5cJ4Ss,8133
69
+ haiku/rag/store/repositories/settings.py,sha256=15gS7Xj7cG4qetv_ioxZO_r31by7GuSqtpowOsMkHmc,6129
69
70
  haiku/rag/store/upgrades/__init__.py,sha256=RQ8A6rEXBASLb5PD9vdDnEas_m_GgRzzdVu4B88Snqc,1975
70
71
  haiku/rag/store/upgrades/v0_10_1.py,sha256=qNGnxj6hoHaHJ1rKTiALfw0c9NQOi0KAK-VZCD_073A,1959
71
72
  haiku/rag/store/upgrades/v0_9_3.py,sha256=NrjNilQSgDtFWRbL3ZUtzQzJ8tf9u0dDRJtnDFwwbdw,3322
72
- haiku_rag-0.12.1.dist-info/METADATA,sha256=POFHzbGYiVj7UkX_1VSA8zUByIiQEG1dPePWO55T7nU,7477
73
- haiku_rag-0.12.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
74
- haiku_rag-0.12.1.dist-info/entry_points.txt,sha256=G1U3nAkNd5YDYd4v0tuYFbriz0i-JheCsFuT9kIoGCI,48
75
- haiku_rag-0.12.1.dist-info/licenses/LICENSE,sha256=eXZrWjSk9PwYFNK9yUczl3oPl95Z4V9UXH7bPN46iPo,1065
76
- haiku_rag-0.12.1.dist-info/RECORD,,
73
+ haiku_rag-0.13.0.dist-info/METADATA,sha256=d7r87GHCKyBiEBM_-3lSc8kVgumsiTX0TQJqi_4LJ3s,8101
74
+ haiku_rag-0.13.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
75
+ haiku_rag-0.13.0.dist-info/entry_points.txt,sha256=G1U3nAkNd5YDYd4v0tuYFbriz0i-JheCsFuT9kIoGCI,48
76
+ haiku_rag-0.13.0.dist-info/licenses/LICENSE,sha256=eXZrWjSk9PwYFNK9yUczl3oPl95Z4V9UXH7bPN46iPo,1065
77
+ haiku_rag-0.13.0.dist-info/RECORD,,
haiku/rag/config.py DELETED
@@ -1,90 +0,0 @@
1
- import os
2
- from pathlib import Path
3
-
4
- from dotenv import load_dotenv
5
- from pydantic import BaseModel, field_validator
6
-
7
- from haiku.rag.utils import get_default_data_dir
8
-
9
- load_dotenv()
10
-
11
-
12
- class AppConfig(BaseModel):
13
- ENV: str = "production"
14
-
15
- LANCEDB_API_KEY: str = ""
16
- LANCEDB_URI: str = ""
17
- LANCEDB_REGION: str = ""
18
-
19
- DEFAULT_DATA_DIR: Path = get_default_data_dir()
20
- MONITOR_DIRECTORIES: list[Path] = []
21
-
22
- EMBEDDINGS_PROVIDER: str = "ollama"
23
- EMBEDDINGS_MODEL: str = "qwen3-embedding"
24
- EMBEDDINGS_VECTOR_DIM: int = 4096
25
-
26
- RERANK_PROVIDER: str = ""
27
- RERANK_MODEL: str = ""
28
-
29
- QA_PROVIDER: str = "ollama"
30
- QA_MODEL: str = "gpt-oss"
31
-
32
- # Research defaults (fallback to QA if not provided via env)
33
- RESEARCH_PROVIDER: str = "ollama"
34
- RESEARCH_MODEL: str = "gpt-oss"
35
-
36
- CHUNK_SIZE: int = 256
37
- CONTEXT_CHUNK_RADIUS: int = 0
38
-
39
- # Optional dotted path or file path to a callable that preprocesses
40
- # markdown content before chunking. Examples:
41
- MARKDOWN_PREPROCESSOR: str = ""
42
-
43
- OLLAMA_BASE_URL: str = "http://localhost:11434"
44
-
45
- VLLM_EMBEDDINGS_BASE_URL: str = ""
46
- VLLM_RERANK_BASE_URL: str = ""
47
- VLLM_QA_BASE_URL: str = ""
48
- VLLM_RESEARCH_BASE_URL: str = ""
49
-
50
- # Provider keys
51
- VOYAGE_API_KEY: str = ""
52
- OPENAI_API_KEY: str = ""
53
- ANTHROPIC_API_KEY: str = ""
54
- COHERE_API_KEY: str = ""
55
-
56
- # If true, refuse to auto-create a new LanceDB database or tables
57
- # and error out when the database does not already exist.
58
- DISABLE_DB_AUTOCREATE: bool = False
59
-
60
- # Vacuum retention threshold in seconds. Only versions older than this
61
- # threshold will be removed during vacuum operations. Default is 60 seconds
62
- # to allow concurrent connections to safely use recent versions.
63
- VACUUM_RETENTION_SECONDS: int = 60
64
-
65
- # Maximum number of A2A contexts to keep in memory. When exceeded, least
66
- # recently used contexts will be evicted. Default is 1000.
67
- A2A_MAX_CONTEXTS: int = 1000
68
-
69
- @field_validator("MONITOR_DIRECTORIES", mode="before")
70
- @classmethod
71
- def parse_monitor_directories(cls, v):
72
- if isinstance(v, str):
73
- if not v.strip():
74
- return []
75
- return [
76
- Path(path.strip()).absolute() for path in v.split(",") if path.strip()
77
- ]
78
- return v
79
-
80
-
81
- # Expose Config object for app to import
82
- Config = AppConfig.model_validate(os.environ)
83
- if Config.OPENAI_API_KEY:
84
- os.environ["OPENAI_API_KEY"] = Config.OPENAI_API_KEY
85
- if Config.VOYAGE_API_KEY:
86
- os.environ["VOYAGE_API_KEY"] = Config.VOYAGE_API_KEY
87
- if Config.ANTHROPIC_API_KEY:
88
- os.environ["ANTHROPIC_API_KEY"] = Config.ANTHROPIC_API_KEY
89
- if Config.COHERE_API_KEY:
90
- os.environ["CO_API_KEY"] = Config.COHERE_API_KEY