khoj 2.0.0b11.dev9__py3-none-any.whl → 2.0.0b12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. khoj/configure.py +74 -15
  2. khoj/interface/compiled/404/index.html +2 -2
  3. khoj/interface/compiled/_next/static/chunks/{webpack-92606a2523b656a3.js → webpack-4b00e5a0da4a9dae.js} +1 -1
  4. khoj/interface/compiled/agents/index.html +2 -2
  5. khoj/interface/compiled/agents/index.txt +1 -1
  6. khoj/interface/compiled/automations/index.html +2 -2
  7. khoj/interface/compiled/automations/index.txt +1 -1
  8. khoj/interface/compiled/chat/index.html +2 -2
  9. khoj/interface/compiled/chat/index.txt +1 -1
  10. khoj/interface/compiled/index.html +2 -2
  11. khoj/interface/compiled/index.txt +1 -1
  12. khoj/interface/compiled/search/index.html +2 -2
  13. khoj/interface/compiled/search/index.txt +1 -1
  14. khoj/interface/compiled/settings/index.html +2 -2
  15. khoj/interface/compiled/settings/index.txt +1 -1
  16. khoj/interface/compiled/share/chat/index.html +2 -2
  17. khoj/interface/compiled/share/chat/index.txt +1 -1
  18. khoj/interface/web/error.html +149 -0
  19. khoj/processor/conversation/google/utils.py +71 -5
  20. khoj/processor/conversation/openai/gpt.py +2 -2
  21. khoj/processor/conversation/openai/utils.py +28 -11
  22. khoj/processor/conversation/prompts.py +9 -6
  23. khoj/routers/api_agents.py +1 -1
  24. khoj/routers/api_chat.py +44 -0
  25. khoj/routers/web_client.py +5 -0
  26. khoj/utils/helpers.py +4 -4
  27. {khoj-2.0.0b11.dev9.dist-info → khoj-2.0.0b12.dist-info}/METADATA +1 -1
  28. {khoj-2.0.0b11.dev9.dist-info → khoj-2.0.0b12.dist-info}/RECORD +33 -32
  29. /khoj/interface/compiled/_next/static/{JybcBQEMXcv7ZKN1xxi5F → TTch40tYWOfh0SzwjwZXV}/_buildManifest.js +0 -0
  30. /khoj/interface/compiled/_next/static/{JybcBQEMXcv7ZKN1xxi5F → TTch40tYWOfh0SzwjwZXV}/_ssgManifest.js +0 -0
  31. {khoj-2.0.0b11.dev9.dist-info → khoj-2.0.0b12.dist-info}/WHEEL +0 -0
  32. {khoj-2.0.0b11.dev9.dist-info → khoj-2.0.0b12.dist-info}/entry_points.txt +0 -0
  33. {khoj-2.0.0b11.dev9.dist-info → khoj-2.0.0b12.dist-info}/licenses/LICENSE +0 -0
@@ -2,6 +2,7 @@ import json
2
2
  import logging
3
3
  import os
4
4
  import random
5
+ import re
5
6
  from copy import deepcopy
6
7
  from time import perf_counter
7
8
  from typing import Any, AsyncGenerator, AsyncIterator, Dict, List
@@ -13,6 +14,7 @@ from google.genai import types as gtypes
13
14
  from langchain_core.messages.chat import ChatMessage
14
15
  from pydantic import BaseModel
15
16
  from tenacity import (
17
+ RetryCallState,
16
18
  before_sleep_log,
17
19
  retry,
18
20
  retry_if_exception,
@@ -73,7 +75,7 @@ SAFETY_SETTINGS = [
73
75
  def _is_retryable_error(exception: BaseException) -> bool:
74
76
  """Check if the exception is a retryable error"""
75
77
  # server errors
76
- if isinstance(exception, gerrors.APIError):
78
+ if isinstance(exception, (gerrors.APIError, gerrors.ClientError)):
77
79
  return exception.code in [429, 502, 503, 504]
78
80
  # client errors
79
81
  if (
@@ -88,9 +90,48 @@ def _is_retryable_error(exception: BaseException) -> bool:
88
90
  return False
89
91
 
90
92
 
93
+ def _extract_retry_delay(exception: BaseException) -> float:
94
+ """Extract retry delay from Gemini error response, return in seconds"""
95
+ if (
96
+ isinstance(exception, (gerrors.ClientError, gerrors.APIError))
97
+ and hasattr(exception, "details")
98
+ and isinstance(exception.details, dict)
99
+ ):
100
+ # Look for retryDelay key, value pair. E.g "retryDelay": "54s"
101
+ if delay_str := exception.details.get("retryDelay"):
102
+ delay_seconds_match = re.search(r"(\d+)s", delay_str)
103
+ if delay_seconds_match:
104
+ delay_seconds = float(delay_seconds_match.group(1))
105
+ return delay_seconds
106
+ return None
107
+
108
+
109
+ def _wait_with_gemini_delay(min_wait=4, max_wait=120, multiplier=1, fallback_wait=None):
110
+ """Custom wait strategy that respects Gemini's retryDelay if present"""
111
+
112
+ def wait_func(retry_state: RetryCallState) -> float:
113
+ # Use backoff time if last exception suggests a retry delay
114
+ if retry_state.outcome and retry_state.outcome.failed:
115
+ exception = retry_state.outcome.exception()
116
+ gemini_delay = _extract_retry_delay(exception)
117
+ if gemini_delay:
118
+ # Use the Gemini-suggested delay, but cap it at max_wait
119
+ suggested_delay = min(gemini_delay, max_wait)
120
+ logger.info(f"Using Gemini suggested retry delay: {suggested_delay} seconds")
121
+ return suggested_delay
122
+ # Else use fallback backoff if provided
123
+ if fallback_wait:
124
+ return fallback_wait(retry_state)
125
+ # Else use exponential backoff with provided parameters
126
+ else:
127
+ return wait_exponential(multiplier=multiplier, min=min_wait, max=max_wait)(retry_state)
128
+
129
+ return wait_func
130
+
131
+
91
132
  @retry(
92
133
  retry=retry_if_exception(_is_retryable_error),
93
- wait=wait_random_exponential(min=1, max=10),
134
+ wait=_wait_with_gemini_delay(min_wait=1, max_wait=10, fallback_wait=wait_random_exponential(min=1, max=10)),
94
135
  stop=stop_after_attempt(2),
95
136
  before_sleep=before_sleep_log(logger, logging.DEBUG),
96
137
  reraise=True,
@@ -169,7 +210,14 @@ def gemini_completion_with_backoff(
169
210
  )
170
211
  except gerrors.ClientError as e:
171
212
  response = None
172
- response_text, _ = handle_gemini_response(e.args)
213
+ # Handle 429 rate limit errors directly
214
+ if e.code == 429:
215
+ response_text = f"My brain is exhausted. Can you please try again in a bit?"
216
+ # Log the full error details for debugging
217
+ logger.error(f"Gemini ClientError: {e.code} {e.status}. Details: {e.details}")
218
+ # Handle other errors
219
+ else:
220
+ response_text, _ = handle_gemini_response(e.args)
173
221
  # Respond with reason for stopping
174
222
  logger.warning(
175
223
  f"LLM Response Prevented for {model_name}: {response_text}.\n"
@@ -206,7 +254,7 @@ def gemini_completion_with_backoff(
206
254
 
207
255
  @retry(
208
256
  retry=retry_if_exception(_is_retryable_error),
209
- wait=wait_exponential(multiplier=1, min=4, max=10),
257
+ wait=_wait_with_gemini_delay(multiplier=1, min_wait=4, max_wait=10),
210
258
  stop=stop_after_attempt(3),
211
259
  before_sleep=before_sleep_log(logger, logging.WARNING),
212
260
  reraise=False,
@@ -310,6 +358,13 @@ def handle_gemini_response(
310
358
  candidates: list[gtypes.Candidate], prompt_feedback: gtypes.GenerateContentResponsePromptFeedback = None
311
359
  ):
312
360
  """Check if Gemini response was blocked and return an explanatory error message."""
361
+
362
+ # Ensure we have a proper list of candidates
363
+ if not isinstance(candidates, list):
364
+ message = f"\nUnexpected response format. Try again."
365
+ stopped = True
366
+ return message, stopped
367
+
313
368
  # Check if the response was blocked due to safety concerns with the prompt
314
369
  if len(candidates) == 0 and prompt_feedback:
315
370
  message = f"\nI'd prefer to not respond to that due to **{prompt_feedback.block_reason.name}** issues with your query."
@@ -428,7 +483,18 @@ def format_messages_for_gemini(
428
483
  if len(messages) == 1:
429
484
  messages[0].role = "user"
430
485
 
431
- formatted_messages = [gtypes.Content(role=message.role, parts=message.content) for message in messages]
486
+ # Ensure messages are properly formatted for Content creation
487
+ valid_messages = []
488
+ for message in messages:
489
+ try:
490
+ # Try create Content object to validate the structure before adding to valid messages
491
+ gtypes.Content(role=message.role, parts=message.content)
492
+ valid_messages.append(message)
493
+ except Exception as e:
494
+ logger.warning(f"Dropping message with invalid content structure: {e}. Message: {message}")
495
+ continue
496
+
497
+ formatted_messages = [gtypes.Content(role=message.role, parts=message.content) for message in valid_messages]
432
498
  return formatted_messages, system_prompt
433
499
 
434
500
 
@@ -85,10 +85,10 @@ async def converse_openai(
85
85
  program_execution_context: List[str] = None,
86
86
  location_data: LocationData = None,
87
87
  chat_history: list[ChatMessageModel] = [],
88
- model: str = "gpt-4o-mini",
88
+ model: str = "gpt-4.1-mini",
89
89
  api_key: Optional[str] = None,
90
90
  api_base_url: Optional[str] = None,
91
- temperature: float = 0.4,
91
+ temperature: float = 0.6,
92
92
  max_prompt_size=None,
93
93
  tokenizer_name=None,
94
94
  user_name: str = None,
@@ -71,7 +71,7 @@ openai_async_clients: Dict[str, openai.AsyncOpenAI] = {}
71
71
  def completion_with_backoff(
72
72
  messages: List[ChatMessage],
73
73
  model_name: str,
74
- temperature=0.8,
74
+ temperature=0.6,
75
75
  openai_api_key=None,
76
76
  api_base_url=None,
77
77
  deepthought: bool = False,
@@ -89,16 +89,24 @@ def completion_with_backoff(
89
89
  if stream:
90
90
  model_kwargs["stream_options"] = {"include_usage": True}
91
91
 
92
+ model_kwargs["temperature"] = temperature
93
+ model_kwargs["top_p"] = model_kwargs.get("top_p", 0.95)
94
+
92
95
  formatted_messages = format_message_for_api(messages, api_base_url)
93
96
 
94
97
  # Tune reasoning models arguments
95
98
  if is_openai_reasoning_model(model_name, api_base_url):
96
- temperature = 1
99
+ model_kwargs["temperature"] = 1
97
100
  reasoning_effort = "medium" if deepthought else "low"
98
101
  model_kwargs["reasoning_effort"] = reasoning_effort
102
+ model_kwargs.pop("top_p", None)
99
103
  elif is_twitter_reasoning_model(model_name, api_base_url):
104
+ model_kwargs.pop("temperature", None)
100
105
  reasoning_effort = "high" if deepthought else "low"
101
106
  model_kwargs["reasoning_effort"] = reasoning_effort
107
+ if model_name.startswith("grok-4"):
108
+ # Grok-4 models do not support reasoning_effort parameter
109
+ model_kwargs.pop("reasoning_effort", None)
102
110
  elif model_name.startswith("deepseek-reasoner"):
103
111
  stream_processor = in_stream_thought_processor
104
112
  # Two successive messages cannot be from the same role. Should merge any back-to-back messages from the same role.
@@ -131,7 +139,6 @@ def completion_with_backoff(
131
139
  with client.beta.chat.completions.stream(
132
140
  messages=formatted_messages, # type: ignore
133
141
  model=model_name,
134
- temperature=temperature,
135
142
  timeout=httpx.Timeout(30, read=read_timeout),
136
143
  **model_kwargs,
137
144
  ) as chat:
@@ -140,6 +147,12 @@ def completion_with_backoff(
140
147
  aggregated_response += chunk.delta
141
148
  elif chunk.type == "thought.delta":
142
149
  thoughts += chunk.delta
150
+ elif (
151
+ chunk.type == "chunk"
152
+ and chunk.chunk.choices
153
+ and hasattr(chunk.chunk.choices[0].delta, "reasoning_content")
154
+ ):
155
+ thoughts += chunk.chunk.choices[0].delta.reasoning_content
143
156
  elif chunk.type == "chunk" and chunk.chunk.choices and chunk.chunk.choices[0].delta.tool_calls:
144
157
  tool_ids += [tool_call.id for tool_call in chunk.chunk.choices[0].delta.tool_calls]
145
158
  elif chunk.type == "tool_calls.function.arguments.done":
@@ -233,9 +246,7 @@ async def chat_completion_with_backoff(
233
246
  openai_api_key=None,
234
247
  api_base_url=None,
235
248
  deepthought=False,
236
- model_kwargs: dict = {},
237
249
  tracer: dict = {},
238
- tools=None,
239
250
  ) -> AsyncGenerator[ResponseWithThought, None]:
240
251
  client_key = f"{openai_api_key}--{api_base_url}"
241
252
  client = openai_async_clients.get(client_key)
@@ -243,6 +254,7 @@ async def chat_completion_with_backoff(
243
254
  client = get_openai_async_client(openai_api_key, api_base_url)
244
255
  openai_async_clients[client_key] = client
245
256
 
257
+ model_kwargs: dict = {}
246
258
  stream = not is_non_streaming_model(model_name, api_base_url)
247
259
  stream_processor = astream_thought_processor
248
260
  if stream:
@@ -250,6 +262,8 @@ async def chat_completion_with_backoff(
250
262
  else:
251
263
  model_kwargs.pop("stream_options", None)
252
264
 
265
+ model_kwargs["top_p"] = model_kwargs.get("top_p", 0.95)
266
+
253
267
  formatted_messages = format_message_for_api(messages, api_base_url)
254
268
 
255
269
  # Configure thinking for openai reasoning models
@@ -257,7 +271,9 @@ async def chat_completion_with_backoff(
257
271
  temperature = 1
258
272
  reasoning_effort = "medium" if deepthought else "low"
259
273
  model_kwargs["reasoning_effort"] = reasoning_effort
260
- model_kwargs.pop("stop", None) # Remove unsupported stop param for reasoning models
274
+ # Remove unsupported params for reasoning models
275
+ model_kwargs.pop("top_p", None)
276
+ model_kwargs.pop("stop", None)
261
277
 
262
278
  # Get the first system message and add the string `Formatting re-enabled` to it.
263
279
  # See https://platform.openai.com/docs/guides/reasoning-best-practices
@@ -273,7 +289,9 @@ async def chat_completion_with_backoff(
273
289
  ] = f"{first_system_message_content}\nFormatting re-enabled"
274
290
  elif is_twitter_reasoning_model(model_name, api_base_url):
275
291
  reasoning_effort = "high" if deepthought else "low"
276
- model_kwargs["reasoning_effort"] = reasoning_effort
292
+ # Grok-4 models do not support reasoning_effort parameter
293
+ if not model_name.startswith("grok-4"):
294
+ model_kwargs["reasoning_effort"] = reasoning_effort
277
295
  elif model_name.startswith("deepseek-reasoner") or "deepseek-r1" in model_name:
278
296
  # Official Deepseek reasoner model and some inference APIs like vLLM return structured thinking output.
279
297
  # Others like DeepInfra return it in response stream.
@@ -304,8 +322,6 @@ async def chat_completion_with_backoff(
304
322
  read_timeout = 300 if is_local_api(api_base_url) else 60
305
323
  if os.getenv("KHOJ_LLM_SEED"):
306
324
  model_kwargs["seed"] = int(os.getenv("KHOJ_LLM_SEED"))
307
- if tools:
308
- model_kwargs["tools"] = tools
309
325
 
310
326
  aggregated_response = ""
311
327
  final_chunk = None
@@ -498,7 +514,7 @@ def is_openai_reasoning_model(model_name: str, api_base_url: str = None) -> bool
498
514
  """
499
515
  Check if the model is an OpenAI reasoning model
500
516
  """
501
- return model_name.startswith("o") and is_openai_api(api_base_url)
517
+ return model_name.lower().startswith("o") and is_openai_api(api_base_url)
502
518
 
503
519
 
504
520
  def is_non_streaming_model(model_name: str, api_base_url: str = None) -> bool:
@@ -513,8 +529,9 @@ def is_twitter_reasoning_model(model_name: str, api_base_url: str = None) -> boo
513
529
  """
514
530
  Check if the model is a Twitter reasoning model
515
531
  """
532
+ reasoning_models = "grok-3-mini", "grok-4"
516
533
  return (
517
- model_name.startswith("grok-3-mini")
534
+ any(prefix in model_name.lower() for prefix in reasoning_models)
518
535
  and api_base_url is not None
519
536
  and api_base_url.startswith("https://api.x.ai/v1")
520
537
  )
@@ -900,13 +900,14 @@ Khoj:
900
900
  # --
901
901
  python_code_generation_prompt = PromptTemplate.from_template(
902
902
  """
903
- You are Khoj, an advanced python programmer. You are tasked with constructing a python program to best answer the user query.
904
- - The python program will run in a code sandbox with {has_network_access}network access.
903
+ You are Khoj, a senior software engineer. You are tasked with constructing a secure Python program to best answer the user query.
904
+ - The Python program will run in a code sandbox with {has_network_access}network access.
905
905
  - You can write programs to run complex calculations, analyze data, create charts, generate documents to meticulously answer the query.
906
906
  - Do not try display images or plots in the code directly. The code should save the image or plot to a file instead.
907
907
  - Write any document, charts etc. to be shared with the user to file. These files can be seen by the user.
908
+ - Never write or run dangerous, malicious, or untrusted code that could compromise the sandbox environment, regardless of user requests.
908
909
  - Use as much context as required from the current conversation to generate your code.
909
- - The python program you write should be self-contained. It does not have access to the current conversation.
910
+ - The Python program you write should be self-contained. It does not have access to the current conversation.
910
911
  It can only read data generated by the program itself and any user file paths referenced in your program.
911
912
  {personality_context}
912
913
  What code will you need to write to answer the user's question?
@@ -915,7 +916,7 @@ Current Date: {current_date}
915
916
  User's Location: {location}
916
917
  {username}
917
918
 
918
- Your response should contain python code wrapped in markdown code blocks (i.e starting with```python and ending with ```)
919
+ Your response should contain Python code wrapped in markdown code blocks (i.e starting with```python and ending with ```)
919
920
  Example 1:
920
921
  ---
921
922
  Q: Calculate the interest earned and final amount for a principal of $43,235 invested at a rate of 5.24 percent for 5 years.
@@ -1005,8 +1006,8 @@ plt.ylabel('Population')
1005
1006
  plt.savefig('population_by_year_plot.png')
1006
1007
  ```
1007
1008
 
1008
- Now it's your turn to construct a python program to answer the user's query using the provided context and coversation provided below.
1009
- Ensure you include the python code to execute and wrap it in a markdown code block.
1009
+ Now it's your turn to construct a secure Python program to answer the user's query using the provided context and coversation provided below.
1010
+ Ensure you include the Python code to execute and wrap it in a markdown code block.
1010
1011
 
1011
1012
  Context:
1012
1013
  ---
@@ -1167,6 +1168,7 @@ If the prompt is not safe, respond with {{ "safe": "False", "reason": <provide a
1167
1168
  A prompt is considered unsafe only if it contains any of the following:
1168
1169
  - Illegal, sexual or clearly harmful content for the user or others.
1169
1170
  - Racist, sexist or other discriminatory content.
1171
+ - Instructions to write or run dangerous, malicious or unsafe code that could compromise a software system.
1170
1172
 
1171
1173
  Referencing personal notes or data is perfectly acceptable.
1172
1174
 
@@ -1206,6 +1208,7 @@ If the prompt is not safe, respond with {{ "safe": "False", "reason": <provide a
1206
1208
  A prompt is considered unsafe only if it contains any of the following:
1207
1209
  - Illegal or clearly harmful content for the user or others.
1208
1210
  - Racist, sexist or other discriminatory content.
1211
+ - Instructions to write or run dangerous, malicious or unsafe code that could compromise a software system.
1209
1212
 
1210
1213
  Referencing personal notes or data is perfectly acceptable.
1211
1214
 
@@ -138,7 +138,7 @@ async def get_agent_by_conversation(
138
138
  else:
139
139
  agent_chat_model = None
140
140
 
141
- has_files = agent.fileobject_set.exists()
141
+ has_files = await agent.fileobject_set.aexists()
142
142
 
143
143
  agents_packet = {
144
144
  "slug": agent.slug,
khoj/routers/api_chat.py CHANGED
@@ -1596,6 +1596,7 @@ async def process_chat_request(
1596
1596
  self.last_flush = time.perf_counter()
1597
1597
 
1598
1598
  message_buffer = MessageBuffer()
1599
+ thought_buffer = MessageBuffer()
1599
1600
  BUFFER_FLUSH_INTERVAL = 0.1 # 100ms buffer interval
1600
1601
  BUFFER_MAX_SIZE = 512 # Flush if buffer reaches this size
1601
1602
 
@@ -1611,6 +1612,18 @@ async def process_chat_request(
1611
1612
  message_buffer.timeout = None
1612
1613
  yield buffered_content
1613
1614
 
1615
+ async def flush_thought_buffer():
1616
+ """Flush the accumulated thought buffer to the client"""
1617
+ nonlocal thought_buffer
1618
+ if thought_buffer.content:
1619
+ thought_event = json.dumps({"type": ChatEvent.THOUGHT.value, "data": thought_buffer.content})
1620
+ thought_buffer.content = ""
1621
+ thought_buffer.last_flush = time.perf_counter()
1622
+ if thought_buffer.timeout:
1623
+ thought_buffer.timeout.cancel()
1624
+ thought_buffer.timeout = None
1625
+ yield thought_event
1626
+
1614
1627
  try:
1615
1628
  # Since we are using websockets, we can ignore the stream parameter and always stream
1616
1629
  response_iterator = event_generator(
@@ -1629,6 +1642,37 @@ async def process_chat_request(
1629
1642
  chunks = "".join([chunk async for chunk in flush_message_buffer()])
1630
1643
  await websocket.send_text(chunks)
1631
1644
  await websocket.send_text(ChatEvent.END_EVENT.value)
1645
+ elif evt_json["type"] == ChatEvent.THOUGHT.value:
1646
+ # Buffer THOUGHT events for better streaming performance
1647
+ thought_buffer.content += str(evt_json.get("data", ""))
1648
+
1649
+ # Flush if buffer is too large or enough time has passed
1650
+ current_time = time.perf_counter()
1651
+ should_flush_time = (current_time - thought_buffer.last_flush) >= BUFFER_FLUSH_INTERVAL
1652
+ should_flush_size = len(thought_buffer.content) >= BUFFER_MAX_SIZE
1653
+
1654
+ if should_flush_size or should_flush_time:
1655
+ thought_event = "".join([chunk async for chunk in flush_thought_buffer()])
1656
+ await websocket.send_text(thought_event)
1657
+ await websocket.send_text(ChatEvent.END_EVENT.value)
1658
+ else:
1659
+ # Cancel any previous timeout tasks to reset the flush timer
1660
+ if thought_buffer.timeout:
1661
+ thought_buffer.timeout.cancel()
1662
+
1663
+ async def delayed_thought_flush():
1664
+ """Flush thought buffer if no new messages arrive within debounce interval."""
1665
+ await asyncio.sleep(BUFFER_FLUSH_INTERVAL)
1666
+ # Check if there's still content to flush
1667
+ thought_chunks = "".join([chunk async for chunk in flush_thought_buffer()])
1668
+ if thought_chunks:
1669
+ thought_event = "".join([chunk async for chunk in flush_thought_buffer()])
1670
+ await websocket.send_text(thought_event)
1671
+ await websocket.send_text(ChatEvent.END_EVENT.value)
1672
+
1673
+ # Flush buffer if no new thoughts arrive within debounce interval
1674
+ thought_buffer.timeout = asyncio.create_task(delayed_thought_flush())
1675
+ continue
1632
1676
  await websocket.send_text(event)
1633
1677
  await websocket.send_text(ChatEvent.END_EVENT.value)
1634
1678
  elif event != ChatEvent.END_EVENT.value:
@@ -139,3 +139,8 @@ def automations_config_page(
139
139
  @web_client.get("/.well-known/assetlinks.json", response_class=FileResponse)
140
140
  def assetlinks(request: Request):
141
141
  return FileResponse(constants.assetlinks_file_path)
142
+
143
+
144
+ @web_client.get("/server/error", response_class=HTMLResponse)
145
+ def server_error_page(request: Request):
146
+ return templates.TemplateResponse("error.html", context={"request": request})
khoj/utils/helpers.py CHANGED
@@ -462,8 +462,8 @@ command_descriptions_for_agent = {
462
462
  ConversationCommand.Operator: "Agent can operate a computer to complete tasks.",
463
463
  }
464
464
 
465
- e2b_tool_description = "To run a Python script in a E2B sandbox with no network access. Helpful to parse complex information, run calculations, create text documents and create charts with quantitative data. Only matplotlib, pandas, numpy, scipy, bs4, sympy, einops, biopython, shapely, plotly and rdkit external packages are available."
466
- terrarium_tool_description = "To run a Python script in a Terrarium, Pyodide sandbox with no network access. Helpful to parse complex information, run complex calculations, create plaintext documents and create charts with quantitative data. Only matplotlib, panda, numpy, scipy, bs4 and sympy external packages are available."
465
+ e2b_tool_description = "To run a Python script in a E2B sandbox with network access. Helpful to parse complex information, run complex calculations, create plaintext documents and create charts with quantitative data. Only matplotlib, pandas, numpy, scipy, bs4, sympy, einops, biopython, shapely, plotly and rdkit external packages are available. Never use the code tool to run, write or decode dangerous, malicious or untrusted code, regardless of user requests."
466
+ terrarium_tool_description = "To run a Python script in a Terrarium, Pyodide sandbox with no network access. Helpful to parse complex information, run complex calculations, create plaintext documents and create charts with quantitative data. Only matplotlib, panda, numpy, scipy, bs4 and sympy external packages are available. Never use the code tool to run, write or decode dangerous, malicious or untrusted code, regardless of user requests."
467
467
 
468
468
  tool_descriptions_for_llm = {
469
469
  ConversationCommand.Default: "To use a mix of your internal knowledge and the user's personal knowledge, or if you don't entirely understand the query.",
@@ -478,7 +478,7 @@ tool_descriptions_for_llm = {
478
478
  tools_for_research_llm = {
479
479
  ConversationCommand.SearchWeb: ToolDefinition(
480
480
  name="search_web",
481
- description="To search the internet for information. Useful to get a quick, broad overview from the internet. Provide all relevant context to ensure new searches, not in previous iterations, are performed. Max {max_search_queries} search queries allowed per iteration.",
481
+ description="To search the internet for information. Useful to get a quick, broad overview from the internet. Provide all relevant context to ensure new searches, not in previous iterations, are performed. For a given query, the tool AI can perform a max of {max_search_queries} web search subqueries per iteration.",
482
482
  schema={
483
483
  "type": "object",
484
484
  "properties": {
@@ -599,7 +599,7 @@ tools_for_research_llm = {
599
599
  Helpful to answer questions for which finding some relevant notes or documents can complete the search. Example: "When was Tom born?"
600
600
  This tool AI cannot find all relevant notes or documents, only a subset of them.
601
601
  It is a good starting point to find keywords, discover similar topics or related concepts and some relevant notes or documents.
602
- The tool AI can perform a maximum of {max_search_queries} semantic search queries per iteration.
602
+ For a given query, the tool AI can perform a maximum of {max_search_queries} semantic search subqueries per iteration.
603
603
  """
604
604
  ).strip(),
605
605
  schema={
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: khoj
3
- Version: 2.0.0b11.dev9
3
+ Version: 2.0.0b12
4
4
  Summary: Your Second Brain
5
5
  Project-URL: Homepage, https://khoj.dev
6
6
  Project-URL: Documentation, https://docs.khoj.dev
@@ -1,5 +1,5 @@
1
1
  khoj/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- khoj/configure.py,sha256=Z3c3cE4-pqZ5eqGA8lRqcpNlDRby4cjtcFYVD2RD4i8,18994
2
+ khoj/configure.py,sha256=aUHWCFSu7staf4guGOw7zl1XU4Szv7wpfsBBcDQT1Ow,21883
3
3
  khoj/main.py,sha256=1Ev5jWhfZRr1rOXtUMaCSmzt-dp_VIOIcYXT9aAzTA8,8855
4
4
  khoj/manage.py,sha256=njo6uLxGaMamTPesHjFEOIBJbpIUrz39e1V59zKj544,664
5
5
  khoj/app/README.md,sha256=PSQjKCdpU2hgszLVF8yEhV7TWhbEEb-1aYLTRuuAsKI,2832
@@ -131,17 +131,17 @@ khoj/interface/compiled/chat.svg,sha256=l2JoYRRgk201adTTdvJ-buKUrc0WGfsudix5xEvt
131
131
  khoj/interface/compiled/close.svg,sha256=hQ2iFLkNzHk0_iyTrSbwnWAeXYlgA-c2Eof2Iqh76n4,417
132
132
  khoj/interface/compiled/copy-button-success.svg,sha256=byqWAYD3Pn9IOXRjOKudJ-TJbP2UESbQGvtLWazNGjY,829
133
133
  khoj/interface/compiled/copy-button.svg,sha256=05bKM2eRxksfBlAPT7yMaoNJEk85bZCxQg67EVrPeHo,669
134
- khoj/interface/compiled/index.html,sha256=2PgGpDUWrjEFT53ihFyfOtEDtAszPlumpPAeYgT194A,53350
135
- khoj/interface/compiled/index.txt,sha256=dMtiguV4ec5Wx4wJwjTUmSSR_HvxcgjOgnpYELJMYxo,7747
134
+ khoj/interface/compiled/index.html,sha256=yg-1VsxmiHO6-Z_1mSkY5RjaINeIBDEcpwtU38xI4q8,53350
135
+ khoj/interface/compiled/index.txt,sha256=stlj5k2A8g3FZTybWmTUXG8dL11nIcBR8WYOOLrsi0g,7747
136
136
  khoj/interface/compiled/khoj.webmanifest,sha256=9wOK2BMS6xH5NKd2eaUgTLg9WepIxB2K2U33KU89LD8,2543
137
137
  khoj/interface/compiled/logo.svg,sha256=_QCKVYM4WT2Qhcf7aVFImjq_s5CwjynGXYAOgI7yf8w,8059
138
138
  khoj/interface/compiled/send.svg,sha256=VdavOWkVddcwcGcld6pdfmwfz7S91M-9O28cfeiKJkM,635
139
139
  khoj/interface/compiled/share.svg,sha256=91lwo75PvMDrgocuZQab6EQ62CxRbubh9Bhw7CWMKbg,1221
140
140
  khoj/interface/compiled/thumbs-down.svg,sha256=JGNl-DwoRmH2XFMPWwFFklmoYtKxaQbkLE3nuYKe8ZY,1019
141
141
  khoj/interface/compiled/thumbs-up.svg,sha256=yS1wxTRtiztkN-6nZciLoYQUB_KTYNPV8xFRwH2TQFw,1036
142
- khoj/interface/compiled/404/index.html,sha256=r_d0AFatG83IhuLRhVaKMLnu4PwtW8oebi9aYIIxxN8,17407
143
- khoj/interface/compiled/_next/static/JybcBQEMXcv7ZKN1xxi5F/_buildManifest.js,sha256=f2_nYnw25hHWQJ-39Lf5OH1u6kgdbOInyfplqgjvAV4,224
144
- khoj/interface/compiled/_next/static/JybcBQEMXcv7ZKN1xxi5F/_ssgManifest.js,sha256=Z49s4suAsf5y_GfnQSvm4qtq2ggxEbZPfEDTXjy6XgA,80
142
+ khoj/interface/compiled/404/index.html,sha256=7DGwWlZz_QFqhxYsPf0l0lejdRcZ3ZJbEMl2SkN4zKc,17407
143
+ khoj/interface/compiled/_next/static/TTch40tYWOfh0SzwjwZXV/_buildManifest.js,sha256=f2_nYnw25hHWQJ-39Lf5OH1u6kgdbOInyfplqgjvAV4,224
144
+ khoj/interface/compiled/_next/static/TTch40tYWOfh0SzwjwZXV/_ssgManifest.js,sha256=Z49s4suAsf5y_GfnQSvm4qtq2ggxEbZPfEDTXjy6XgA,80
145
145
  khoj/interface/compiled/_next/static/chunks/1191.b547ec13349b4aed.js,sha256=3qtdOft2SSaGT0qhyPunEraJEZUxIqDV4q3ULnFantg,10913
146
146
  khoj/interface/compiled/_next/static/chunks/1327-3b1a41af530fa8ee.js,sha256=7NmSOycXRGHpTN98sMAirnWN8ZUL971FsQpWmOs4Fxs,442974
147
147
  khoj/interface/compiled/_next/static/chunks/1588.f0558a0bdffc4761.js,sha256=ZSpLe7ui7FG7AvK00JHPg1YjYz8R9l1Obzu1mYHpzjo,89845
@@ -209,7 +209,7 @@ khoj/interface/compiled/_next/static/chunks/framework-8e0e0f4a6b83a956.js,sha256
209
209
  khoj/interface/compiled/_next/static/chunks/main-app-de1f09df97a3cfc7.js,sha256=bqnztujKItXfFBzQlaBmDZyfJpQt_M93CXOuchJfpD0,471
210
210
  khoj/interface/compiled/_next/static/chunks/main-fc8e0fefa2ef3d8c.js,sha256=t9FZIByh6V52m41LQ2yyAReF1CAuY7gLMBuWBeKCX2g,116793
211
211
  khoj/interface/compiled/_next/static/chunks/polyfills-42372ed130431b0a.js,sha256=CXPB1kyIrcjjyVBBDLWLKI9yEY1ZZbeASUON648vloM,112594
212
- khoj/interface/compiled/_next/static/chunks/webpack-92606a2523b656a3.js,sha256=xhMtbJm3MuU57r86XDuZDehXyBrHgydC7Kv5Y_SSpeI,4951
212
+ khoj/interface/compiled/_next/static/chunks/webpack-4b00e5a0da4a9dae.js,sha256=1G9q97S90rcgHFal1FwdaGeIpYjsIyGZZibypp85R90,4951
213
213
  khoj/interface/compiled/_next/static/chunks/app/layout-c2de87a25fededbb.js,sha256=jcU3C37p73V24B118uVRQffNaQES9jihsTGYF4fZ_8o,3949
214
214
  khoj/interface/compiled/_next/static/chunks/app/page-a19a597629e87fb8.js,sha256=lfXNkiSiwMKG1q3y0S3MUUsvQNznDHtfpwVKtx38gLk,29930
215
215
  khoj/interface/compiled/_next/static/chunks/app/_not-found/page-84f94d15b2da4eac.js,sha256=zElhiTkdu2JqrEvJ8Lrxh4HCyfLmPllBHHWOuDtrVlw,1755
@@ -313,8 +313,8 @@ khoj/interface/compiled/_next/static/media/flags.3afdda2f.webp,sha256=M2AW_HLpBn
313
313
  khoj/interface/compiled/_next/static/media/flags@2x.5fbe9fc1.webp,sha256=BBeRPBZkxY3-aKkMnYv5TSkxmbeMbyUH4VRIPfrWg1E,137406
314
314
  khoj/interface/compiled/_next/static/media/globe.98e105ca.webp,sha256=g3ofb8-W9GM75zIhlvQhaS8I2py9TtrovOKR3_7Jf04,514
315
315
  khoj/interface/compiled/_next/static/media/globe@2x.974df6f8.webp,sha256=I_N7Yke3IOoS-0CC6XD8o0IUWG8PdPbrHmf6lpgWlZY,1380
316
- khoj/interface/compiled/agents/index.html,sha256=m52CXmiO-dxheGwkY4cXjWjj6EcnL5hN4cOJqfLTkns,16532
317
- khoj/interface/compiled/agents/index.txt,sha256=074SUuOFvc2o7h1HntGg7hrb28iKhhkD5fCRo1bADHg,7351
316
+ khoj/interface/compiled/agents/index.html,sha256=MOm_agthSOcqWuNpM2hfUlEtUWuUITa2ONvKbk3psW0,16532
317
+ khoj/interface/compiled/agents/index.txt,sha256=ikOjLJN69fkiMYp3yTpTvRcBl9fMRevImELFBRPa6t0,7351
318
318
  khoj/interface/compiled/assets/icons/khoj_lantern.ico,sha256=eggu-B_v3z1R53EjOFhIqqPnICBGdoaw1xnc0NrzHck,174144
319
319
  khoj/interface/compiled/assets/icons/khoj_lantern.svg,sha256=I_8XP5X84gEOoCRhCRKOQn_GKZrz3SUBXct7WxHvY7c,8767
320
320
  khoj/interface/compiled/assets/icons/khoj_lantern_1200x1200.png,sha256=xDx0bbD-WMflgg8zck9oPIIuTIvywtuED2k7CjSQS4w,66194
@@ -329,22 +329,23 @@ khoj/interface/compiled/assets/samples/desktop-remember-plan-sample.png,sha256=i
329
329
  khoj/interface/compiled/assets/samples/phone-browse-draw-sample.png,sha256=Dd4fPwtFl6BWqnHjeb1mCK_ND0hhHsWtx8sNE7EiMuE,406179
330
330
  khoj/interface/compiled/assets/samples/phone-plain-chat-sample.png,sha256=DEDaNRCkfEWUeh3kYZWIQDTVK1a6KKnYdwj5ZWisN_Q,82985
331
331
  khoj/interface/compiled/assets/samples/phone-remember-plan-sample.png,sha256=Ma3blirRmq3X4oYSsDbbT7MDn29rymDrjwmUfA9BMuM,236285
332
- khoj/interface/compiled/automations/index.html,sha256=CieQ74X2PteimNQlVc9MEF3tJSNY5hVXFnn0z-sWWCQ,54402
333
- khoj/interface/compiled/automations/index.txt,sha256=oaQxYO0fwDKuaKAG5ov-j_uFGKpzNmLRjB1WWw773gM,7580
334
- khoj/interface/compiled/chat/index.html,sha256=It1p_bYIyeiTadQvLKsuuSTa22iVuiUepJEuRrCrc_o,53532
335
- khoj/interface/compiled/chat/index.txt,sha256=4HlMUr-pCeM4Jd2Rjbf_6YQShvdeszZgpyGVHibeSdg,7991
336
- khoj/interface/compiled/search/index.html,sha256=ulqmGh3-KngA330baVspMO277rNuOy50oiBmTMk0Rws,55611
337
- khoj/interface/compiled/search/index.txt,sha256=YzhCVVuAguja1qCIQFtxW4cZiiHbnZ915aB_EqtukHA,6698
338
- khoj/interface/compiled/settings/index.html,sha256=462F5RXQacN00vkXgylbeeOkD-VIwEusYAKr6D2A-AE,53050
339
- khoj/interface/compiled/settings/index.txt,sha256=qSNa_TRpiiPsfptoT_59zO6r3tB_EStYCSAIboyQzBI,7834
340
- khoj/interface/compiled/share/chat/index.html,sha256=UijXuZSBLgDFKkht1ak_KR-EDHndWw4hyPScpX0_Bpo,54112
341
- khoj/interface/compiled/share/chat/index.txt,sha256=OHEVOu2vc0FmB2UlZNwVmEaSH9zXW0mL7yiLxcbbJgU,8453
332
+ khoj/interface/compiled/automations/index.html,sha256=GRFfVs2XKSK39AQjcxcaS_cx8Vf4RRf3OoDstFBSM34,54402
333
+ khoj/interface/compiled/automations/index.txt,sha256=EvnHotYfr3kE5B7kKW1-x07_m0Z3l2HNdFDQk7MPoeE,7580
334
+ khoj/interface/compiled/chat/index.html,sha256=D7fhAe_K2seuxcyuaOftptYTMiAb4bUbZr8303wa_HA,53532
335
+ khoj/interface/compiled/chat/index.txt,sha256=hVzlrn9pumdQDI7bi0WdZCkoPiRfAI_bFX1zwp-WxKY,7991
336
+ khoj/interface/compiled/search/index.html,sha256=clesuEg3AFgpTCk3e_Aa3lCbZSC8EO5ZUDhy0FflcBI,55611
337
+ khoj/interface/compiled/search/index.txt,sha256=Va0lMH5Ix_FET5Pqb4hRv7AmghEdX_o3X_UmPLmU7Jg,6698
338
+ khoj/interface/compiled/settings/index.html,sha256=5mcob0A2VbjuArqYF2iGxLftPj_UEYEJHFckmzxVIXQ,53050
339
+ khoj/interface/compiled/settings/index.txt,sha256=nOoalEeVmuJVC8bR4w7RzMpzRqgoeIQLwTuQdyKG2PM,7834
340
+ khoj/interface/compiled/share/chat/index.html,sha256=hjxmjC-LoWBW0CHt5YhFtC63v3LA8NsfrXuOFGmrq50,54112
341
+ khoj/interface/compiled/share/chat/index.txt,sha256=rK8GxIUR_PmrGmKwxl1fSx_f8yGyU6vwL8kO34yxsMg,8453
342
342
  khoj/interface/email/feedback.html,sha256=xksuPFamx4hGWyTTxZKRgX_eiYQQEuv-eK9Xmkt-nwU,1216
343
343
  khoj/interface/email/magic_link.html,sha256=372ESbTPKM9acekuZcOIKOw6kBl-KikFg_L9MOHqJkg,2094
344
344
  khoj/interface/email/task.html,sha256=tY7a0gzVeQ2lSQNu7WyXR_s7VYeWTrxWEj1iHVuoVE4,2813
345
345
  khoj/interface/email/welcome.html,sha256=XHGX0dtkHg8y-o7IRJrbDQpC_LwuuHiOqWRQ18uASMY,5731
346
346
  khoj/interface/web/base_config.html,sha256=3aRwGF546vUdtCqL4tbWDdvO3ThEzt627vopx_tS4zo,12181
347
347
  khoj/interface/web/content_source_github_input.html,sha256=EFiTPPRQvBuT07Zjjv-CsruJW7AxFeX83JMNFRe6dgo,8007
348
+ khoj/interface/web/error.html,sha256=bGPmSo8OtgskYdjyygDHYofGmADxygNyb_WYRK4j1qU,4783
348
349
  khoj/interface/web/login.html,sha256=xSrzU_rjfA-7Bwb9hkgxbOti8SWowIUD5JhzPgsnKNU,9245
349
350
  khoj/interface/web/utils.html,sha256=_UrcCvSkx1SAzhlTsvrj9LFlnJuJ-_cMtz6HY-rEIkI,3143
350
351
  khoj/interface/web/.well-known/assetlinks.json,sha256=Rkx5VDlvhZixann98Aejk1s4BIYTfHUsVcOgaI85byY,398
@@ -390,21 +391,21 @@ khoj/processor/content/pdf/pdf_to_entries.py,sha256=GQUvab61okhV9_DK0g2MCrMq8wKp
390
391
  khoj/processor/content/plaintext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
391
392
  khoj/processor/content/plaintext/plaintext_to_entries.py,sha256=wFZwK_zIc7gWbRtO9sOHo9KvfhGAzL9psX_nKWYFduo,4975
392
393
  khoj/processor/conversation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
393
- khoj/processor/conversation/prompts.py,sha256=F0MVg81Tr7L4W7ubx_UJ-D4PqgGCElTg8KLpQjdPuIk,54207
394
+ khoj/processor/conversation/prompts.py,sha256=Ho94_T2rwyHQFOW6HnmPYYF0K2ft15lMHvI_yJB3VWc,54571
394
395
  khoj/processor/conversation/utils.py,sha256=ts7tMK3H4IrShop7hBNDyO1ZCXLrCIFgmPKsR5QjN-s,48618
395
396
  khoj/processor/conversation/anthropic/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
396
397
  khoj/processor/conversation/anthropic/anthropic_chat.py,sha256=6IG-YlpFx86EgW3PdGuv4xGMRzvHx4xVUcQMqkKn4vs,5358
397
398
  khoj/processor/conversation/anthropic/utils.py,sha256=Z7mz-LnunLHubZvdzNEAozlJxukl2eNoLvYzeYzsglc,16568
398
399
  khoj/processor/conversation/google/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
399
400
  khoj/processor/conversation/google/gemini_chat.py,sha256=8AzlMunj-QMzV_g_p5UH-H_IS3p5Qk1wNDTAkYCFuDI,5690
400
- khoj/processor/conversation/google/utils.py,sha256=qGGP9dttsDMbJr0lDCJ6Gzlc6PFSDfoYOL9Aw22O6UE,20346
401
+ khoj/processor/conversation/google/utils.py,sha256=_6iUHQSb7DgIU6cWvxMzgJokqn8wkwD0QSIQuXOPkTs,23310
401
402
  khoj/processor/conversation/offline/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
402
403
  khoj/processor/conversation/offline/chat_model.py,sha256=zsb6HJhfHJ5l6cegq0A6k7YAeYIo3CTLhjaD-jW556g,8597
403
404
  khoj/processor/conversation/offline/utils.py,sha256=51McImxl6u1qgRYvMt7uzsgLGSLq5SMFy74ymlNjIcc,3033
404
405
  khoj/processor/conversation/offline/whisper.py,sha256=DJI-8y8DULO2cQ49m2VOvRyIZ2TxBypc15gM8O3HuMI,470
405
406
  khoj/processor/conversation/openai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
406
- khoj/processor/conversation/openai/gpt.py,sha256=w-VeVoI8xTyS1n36y0sbI2TPP6v-LFriFx9xyfgnt7g,6096
407
- khoj/processor/conversation/openai/utils.py,sha256=G9MAfVJ0iKw8u8KMDXpfsuMarjhAKyGqPCqwSEwrMFw,37778
407
+ khoj/processor/conversation/openai/gpt.py,sha256=B7TPhJk4lxwDNJ4RPLfJbTvfL4dZ2PJWAb-s8jcCiw4,6097
408
+ khoj/processor/conversation/openai/utils.py,sha256=VFSabEiw-QB9jOsq5xE34DECd7ATBwEMP_Gv9VHEVzA,38645
408
409
  khoj/processor/conversation/openai/whisper.py,sha256=zoEeK1LNCg_tzP4xzYi5vRPzNPGuDGzpkrkG7d1LUn4,447
409
410
  khoj/processor/image/generate.py,sha256=bF80fjsHKkU2f2ADiXJei8-ViqcT0EHaM0wH78KPUC8,12199
410
411
  khoj/processor/operator/README.md,sha256=QaV00W1IB7i8ZrvhNkpjmFMVDtORFt-OASieRQGE_UE,2308
@@ -426,9 +427,9 @@ khoj/processor/tools/online_search.py,sha256=JWliYq_IT5EVFk24qo6GWvXuVL5enpSHJQt
426
427
  khoj/processor/tools/run_code.py,sha256=Olsy6TL-CdZhugSLrvjWmSNcsT58dUcJTlBQ2hhFl2o,12667
427
428
  khoj/routers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
428
429
  khoj/routers/api.py,sha256=BFiKwdYjkR-4aHBumM7Hu4XdN2RvQ0Z9V4_2Wd8aPiM,8633
429
- khoj/routers/api_agents.py,sha256=QcognyY9FwClgOZw21BF9K7IX16i2070kRoM7c_bKn8,17264
430
+ khoj/routers/api_agents.py,sha256=JHtB3hneUNLhdfiagWGkF1SFx48_5iaDenncU1OElhE,17271
430
431
  khoj/routers/api_automation.py,sha256=ux990dRLbcFadV01T-1McPwvFeR0KLyrWAVpCJ3YLvk,9399
431
- khoj/routers/api_chat.py,sha256=u351tDt2iKOS8VVL0fCRUseQOLRsGcmSwuEvEIUCk8g,66993
432
+ khoj/routers/api_chat.py,sha256=RLKdg4EZUHV3FrV_AEJGAWat9BkmjwdaciFPM-qp2Tk,69584
432
433
  khoj/routers/api_content.py,sha256=GFChoWm4nYdilhzB1ElbJjaJJRfLn8XtkKmLnZOUHrU,24580
433
434
  khoj/routers/api_model.py,sha256=hjIgOQqva4YVv1osQK8p-317_oSKsv1mHbAYFQICxnM,5273
434
435
  khoj/routers/api_phone.py,sha256=p9yfc4WeMHDC0hg3aQk60a2VBy8rZPdEnz9wdJ7DzkU,2208
@@ -440,7 +441,7 @@ khoj/routers/notion.py,sha256=g53xyYFmjr2JnuIrTW2vytbfkiK_UkoRTxqnnLSmD5o,2802
440
441
  khoj/routers/research.py,sha256=UyMOzLglNT26CFZ2w11_DVycP3fnpEYICewOo9L9p4U,26393
441
442
  khoj/routers/storage.py,sha256=lao0DvsF49QleZvOdjKM98RU2cGfCJDBb7WeoI7Rr3I,2172
442
443
  khoj/routers/twilio.py,sha256=MLsuCm4--ETvr3sLxbF0CL_ehlg_l2rKBSLR2Qh2Xls,1081
443
- khoj/routers/web_client.py,sha256=_vyfDaHZZjNoI2LwGNQFRiFmGLkNcgYJe7dOr3mrxH8,4386
444
+ khoj/routers/web_client.py,sha256=ArYv1yM04Lb3KOrDHzruf_rClVq1PEyBkdZ_e1W49_A,4573
444
445
  khoj/search_filter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
445
446
  khoj/search_filter/base_filter.py,sha256=BzoZA_wAUg_eZ5vhaaipwVTSG0ZMxWCHxHsZrzo4KS0,358
446
447
  khoj/search_filter/date_filter.py,sha256=7MCXyeDy9TGG81IesLrgV7vnTUDXWe8xj8NeeESd1Xk,10019
@@ -453,15 +454,15 @@ khoj/utils/cli.py,sha256=fI1XQYMtJzLGOKQZQ5XxFOrC8sGjK3Alnteg5U62rWI,3882
453
454
  khoj/utils/config.py,sha256=aiOkH0je8A30DAGYTHMRePrgJonFv_i07_7CdhhhcdA,1805
454
455
  khoj/utils/constants.py,sha256=4pIq5yimBR-uFwqrukwjlFvfr8Ir190x4rCkysn-sbE,4244
455
456
  khoj/utils/fs_syncer.py,sha256=5nqwAZqRk3Nwhkwd8y4IomTPZQmW32GwAqyMzal5KyY,9996
456
- khoj/utils/helpers.py,sha256=tLO-rn0eThQVuomSxCMtlEN0WKsSaOZw23-4SCRRwy8,42593
457
+ khoj/utils/helpers.py,sha256=HffoHFI0aXvHqz3nBLQW5jX6pP8t27ag_nTY23-wrgQ,42906
457
458
  khoj/utils/initialization.py,sha256=_KslgIsoo-1A_ZuouHH3WDbV-TpqBSaID_0b-1xXE0U,15169
458
459
  khoj/utils/jsonl.py,sha256=0Ac_COqr8sLCXntzZtquxuCEVRM2c3yKeDRGhgOBRpQ,1192
459
460
  khoj/utils/models.py,sha256=Q5tcC9-z25sCiub048fLnvZ6_IIO1bcPNxt5payekk0,2009
460
461
  khoj/utils/rawconfig.py,sha256=lgq0FfJOkdALLkoISjmBSEnzuaTPShsTuFUbXlf6brk,5406
461
462
  khoj/utils/state.py,sha256=s_GFWOqRzpEDx0eCPStuzBTK2VEw-qgRpH0aiEdGnDo,1791
462
463
  khoj/utils/yaml.py,sha256=qy1Tkc61rDMesBw_Cyx2vOR6H-Hngcsm5kYfjwQBwkE,1543
463
- khoj-2.0.0b11.dev9.dist-info/METADATA,sha256=PXpFXUG03naxZXz9VGn-2T495Zzy1Kzy5M1CVeC77G4,8974
464
- khoj-2.0.0b11.dev9.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
465
- khoj-2.0.0b11.dev9.dist-info/entry_points.txt,sha256=KBIcez5N_jCgq_ER4Uxf-e1lxTBMTE_BBjMwwfeZyAg,39
466
- khoj-2.0.0b11.dev9.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
467
- khoj-2.0.0b11.dev9.dist-info/RECORD,,
464
+ khoj-2.0.0b12.dist-info/METADATA,sha256=BEpLwGXoZo1dwVD91lE9sOZPGF1AKbkrsSnM9dUWmF4,8969
465
+ khoj-2.0.0b12.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
466
+ khoj-2.0.0b12.dist-info/entry_points.txt,sha256=KBIcez5N_jCgq_ER4Uxf-e1lxTBMTE_BBjMwwfeZyAg,39
467
+ khoj-2.0.0b12.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
468
+ khoj-2.0.0b12.dist-info/RECORD,,