khoj 1.33.3.dev35__py3-none-any.whl → 1.33.3.dev51__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. khoj/configure.py +1 -1
  2. khoj/database/adapters/__init__.py +1 -1
  3. khoj/database/migrations/0080_speechtotextmodeloptions_ai_model_api.py +24 -0
  4. khoj/database/models/__init__.py +2 -0
  5. khoj/interface/compiled/404/index.html +1 -1
  6. khoj/interface/compiled/_next/static/chunks/0d8bff65.4b1018183a8be7c2.js +1 -0
  7. khoj/interface/compiled/_next/static/chunks/1243.ea2826fb35adb15a.js +1 -0
  8. khoj/interface/compiled/_next/static/chunks/133.392ae90c3b2a67f2.js +1 -0
  9. khoj/interface/compiled/_next/static/chunks/1592.b069bdb7aaddd2eb.js +5 -0
  10. khoj/interface/compiled/_next/static/chunks/2069.f060659791c0c484.js +1 -0
  11. khoj/interface/compiled/_next/static/chunks/2170.45431769330fa5b7.js +134 -0
  12. khoj/interface/compiled/_next/static/chunks/2327-17d5d7dc987c0645.js +1 -0
  13. khoj/interface/compiled/_next/static/chunks/3237.c4754f02f1a388cb.js +1 -0
  14. khoj/interface/compiled/_next/static/chunks/3305.1fc7297ba154ee95.js +1 -0
  15. khoj/interface/compiled/_next/static/chunks/3332.6313db5217bb1b67.js +29 -0
  16. khoj/interface/compiled/_next/static/chunks/3451.421db0b49fdbd881.js +252 -0
  17. khoj/interface/compiled/_next/static/chunks/3460.39c2b9b6a3898384.js +159 -0
  18. khoj/interface/compiled/_next/static/chunks/4270-28ab75d62e71521c.js +1 -0
  19. khoj/interface/compiled/_next/static/chunks/4299.2aaa569416cfc208.js +2 -0
  20. khoj/interface/compiled/_next/static/chunks/4327.238d15c09a039b8b.js +60 -0
  21. khoj/interface/compiled/_next/static/chunks/4415.e0c0da5eedae4dd7.js +47 -0
  22. khoj/interface/compiled/_next/static/chunks/4610.196691887afb7fea.js +1 -0
  23. khoj/interface/compiled/_next/static/chunks/4650.41f041da0ad365ad.js +215 -0
  24. khoj/interface/compiled/_next/static/chunks/4872.5422e8324272ab40.js +1 -0
  25. khoj/interface/compiled/_next/static/chunks/4986-24b37356a9642ebb.js +1 -0
  26. khoj/interface/compiled/_next/static/chunks/5061.79a660faed0e16a9.js +1 -0
  27. khoj/interface/compiled/_next/static/chunks/5279.85d537880b8041ae.js +24 -0
  28. khoj/interface/compiled/_next/static/chunks/5402.b9460f1b16b57e92.js +63 -0
  29. khoj/interface/compiled/_next/static/chunks/5555.f7cf9795d675de51.js +88 -0
  30. khoj/interface/compiled/_next/static/chunks/58.f4bd8a3c9d88f5fe.js +46 -0
  31. khoj/interface/compiled/_next/static/chunks/5978.ddc72cd73130aadc.js +1 -0
  32. khoj/interface/compiled/_next/static/chunks/6002-7e3115f142731625.js +78 -0
  33. khoj/interface/compiled/_next/static/chunks/6434.e6cd986d690f2cef.js +1 -0
  34. khoj/interface/compiled/_next/static/chunks/7140.4be689873f6a2d1f.js +116 -0
  35. khoj/interface/compiled/_next/static/chunks/7647-5a935022fa3da0a6.js +1 -0
  36. khoj/interface/compiled/_next/static/chunks/7836.86884157d53d23a1.js +117 -0
  37. khoj/interface/compiled/_next/static/chunks/7890.f681eb1d1f83bea2.js +1 -0
  38. khoj/interface/compiled/_next/static/chunks/7953.f6fc335a23efb959.js +1 -0
  39. khoj/interface/compiled/_next/static/chunks/8254.3145a4fbdcfdca46.js +1 -0
  40. khoj/interface/compiled/_next/static/chunks/8400.c95e4e79bcd79a56.js +78 -0
  41. khoj/interface/compiled/_next/static/chunks/8673.be44c4d9e4f1d084.js +56 -0
  42. khoj/interface/compiled/_next/static/chunks/8698.a088118fef40d302.js +1 -0
  43. khoj/interface/compiled/_next/static/chunks/9022.33974a513a281fd2.js +151 -0
  44. khoj/interface/compiled/_next/static/chunks/90542734.fecd4dc1a89ed166.js +1 -0
  45. khoj/interface/compiled/_next/static/chunks/9433.b1b5f5d050c43e3c.js +36 -0
  46. khoj/interface/compiled/_next/static/chunks/9822.34f293973c4e9b04.js +1 -0
  47. khoj/interface/compiled/_next/static/chunks/app/agents/layout-e00fb81dca656a10.js +1 -0
  48. khoj/interface/compiled/_next/static/chunks/app/agents/{page-f6b7a9ce4a8b2bf7.js → page-05e8780174a478e6.js} +1 -1
  49. khoj/interface/compiled/_next/static/chunks/app/automations/{page-f3c16eb399159215.js → page-a951c331320b870a.js} +1 -1
  50. khoj/interface/compiled/_next/static/chunks/app/chat/layout-33934fc2d6ae6838.js +1 -0
  51. khoj/interface/compiled/_next/static/chunks/app/chat/{page-2917c081fd43ac17.js → page-f68d20917788cbbc.js} +1 -1
  52. khoj/interface/compiled/_next/static/chunks/app/{page-07395c7f0d25d7d4.js → page-0c6346ba12c1b885.js} +1 -1
  53. khoj/interface/compiled/_next/static/chunks/app/search/layout-f5881c7ae3ba0795.js +1 -0
  54. khoj/interface/compiled/_next/static/chunks/app/search/{page-2a09ae0883b04242.js → page-62be289d4136a73a.js} +1 -1
  55. khoj/interface/compiled/_next/static/chunks/app/settings/{page-827fe7bea045fefe.js → page-c244a441045a593e.js} +1 -1
  56. khoj/interface/compiled/_next/static/chunks/app/share/chat/layout-abb6c5f4239ad7be.js +1 -0
  57. khoj/interface/compiled/_next/static/chunks/app/share/chat/{page-ce8299b35835ed71.js → page-075278404a3f37b8.js} +1 -1
  58. khoj/interface/compiled/_next/static/chunks/ebbfb5c1-a73c51b6a41a5ff6.js +10 -0
  59. khoj/interface/compiled/_next/static/chunks/webpack-63793f58b6bd33ee.js +1 -0
  60. khoj/interface/compiled/_next/static/css/37a73b87f02df402.css +1 -0
  61. khoj/interface/compiled/_next/static/css/70b2e9482d96d4c2.css +25 -0
  62. khoj/interface/compiled/_next/static/css/9c164d9727dd8092.css +1 -0
  63. khoj/interface/compiled/_next/static/css/a85b8d56fc9ae551.css +1 -0
  64. khoj/interface/compiled/_next/static/css/df4b47a2d0d85eae.css +1 -0
  65. khoj/interface/compiled/agents/index.html +1 -1
  66. khoj/interface/compiled/agents/index.txt +2 -2
  67. khoj/interface/compiled/automations/index.html +1 -1
  68. khoj/interface/compiled/automations/index.txt +2 -2
  69. khoj/interface/compiled/chat/index.html +1 -1
  70. khoj/interface/compiled/chat/index.txt +2 -2
  71. khoj/interface/compiled/index.html +1 -1
  72. khoj/interface/compiled/index.txt +2 -2
  73. khoj/interface/compiled/search/index.html +1 -1
  74. khoj/interface/compiled/search/index.txt +2 -2
  75. khoj/interface/compiled/settings/index.html +1 -1
  76. khoj/interface/compiled/settings/index.txt +2 -2
  77. khoj/interface/compiled/share/chat/index.html +1 -1
  78. khoj/interface/compiled/share/chat/index.txt +2 -2
  79. khoj/processor/conversation/prompts.py +118 -1
  80. khoj/processor/conversation/utils.py +9 -4
  81. khoj/processor/image/generate.py +9 -7
  82. khoj/routers/api.py +13 -3
  83. khoj/routers/api_chat.py +9 -9
  84. khoj/routers/helpers.py +135 -7
  85. khoj/utils/initialization.py +6 -6
  86. {khoj-1.33.3.dev35.dist-info → khoj-1.33.3.dev51.dist-info}/METADATA +1 -1
  87. {khoj-1.33.3.dev35.dist-info → khoj-1.33.3.dev51.dist-info}/RECORD +92 -55
  88. khoj/interface/compiled/_next/static/chunks/1010-8f39bb4648b5ba10.js +0 -1
  89. khoj/interface/compiled/_next/static/chunks/2581-455000f8aeb08fc3.js +0 -1
  90. khoj/interface/compiled/_next/static/chunks/3789-a09e37a819171a9d.js +0 -1
  91. khoj/interface/compiled/_next/static/chunks/4124-6c28322ce218d2d5.js +0 -1
  92. khoj/interface/compiled/_next/static/chunks/6293-469dd16402ea8a6f.js +0 -3
  93. khoj/interface/compiled/_next/static/chunks/app/agents/layout-e3d72f0edda6aa0c.js +0 -1
  94. khoj/interface/compiled/_next/static/chunks/app/chat/layout-d5ae861e1ade9d08.js +0 -1
  95. khoj/interface/compiled/_next/static/chunks/app/search/layout-4505b79deb734a30.js +0 -1
  96. khoj/interface/compiled/_next/static/chunks/app/share/chat/layout-95998f0bdc22bb13.js +0 -1
  97. khoj/interface/compiled/_next/static/chunks/webpack-fddb78e7c09fff16.js +0 -1
  98. khoj/interface/compiled/_next/static/css/0f0e5dbeda138161.css +0 -1
  99. khoj/interface/compiled/_next/static/css/3b65ca1e74e399bd.css +0 -1
  100. khoj/interface/compiled/_next/static/css/41d6fe14d2c6b14d.css +0 -1
  101. khoj/interface/compiled/_next/static/css/8d02837c730f8d13.css +0 -25
  102. khoj/interface/compiled/_next/static/css/ba1602594aa7828f.css +0 -1
  103. khoj/interface/compiled/_next/static/css/dac88c17aaee5fcf.css +0 -1
  104. /khoj/interface/compiled/_next/static/{oqqUYx3DaH2vt9Te5vFEg → s3dazBmeqIlcdf6ERGzFs}/_buildManifest.js +0 -0
  105. /khoj/interface/compiled/_next/static/{oqqUYx3DaH2vt9Te5vFEg → s3dazBmeqIlcdf6ERGzFs}/_ssgManifest.js +0 -0
  106. {khoj-1.33.3.dev35.dist-info → khoj-1.33.3.dev51.dist-info}/WHEEL +0 -0
  107. {khoj-1.33.3.dev35.dist-info → khoj-1.33.3.dev51.dist-info}/entry_points.txt +0 -0
  108. {khoj-1.33.3.dev35.dist-info → khoj-1.33.3.dev51.dist-info}/licenses/LICENSE +0 -0
@@ -194,7 +194,7 @@ Limit your response to 3 sentences max. Be succinct, clear, and informative.
194
194
  ## Diagram Generation
195
195
  ## --
196
196
 
197
- improve_diagram_description_prompt = PromptTemplate.from_template(
197
+ improve_excalidraw_diagram_description_prompt = PromptTemplate.from_template(
198
198
  """
199
199
  You are an architect working with a novice digital artist using a diagramming software.
200
200
  {personality_context}
@@ -338,6 +338,123 @@ Diagram Description: {query}
338
338
  """.strip()
339
339
  )
340
340
 
341
+ improve_mermaid_js_diagram_description_prompt = PromptTemplate.from_template(
342
+ """
343
+ You are a senior architect working with an illustrator using a diagramming software.
344
+ {personality_context}
345
+
346
+ Given a particular request, you need to translate it to to a detailed description that the illustrator can use to create a diagram.
347
+
348
+ You can use the following diagram types in your instructions:
349
+ - Flowchart
350
+ - Sequence Diagram
351
+ - Gantt Chart (only for time-based queries after 0 AD)
352
+ - State Diagram
353
+ - Pie Chart
354
+
355
+ Use these primitives to describe what sort of diagram the drawer should create in natural language, not special syntax. We must recreate the diagram every time, so include all relevant prior information in your description.
356
+
357
+ - Describe the layout, components, and connections.
358
+ - Use simple, concise language.
359
+
360
+ Today's Date: {current_date}
361
+ User's Location: {location}
362
+
363
+ User's Notes:
364
+ {references}
365
+
366
+ Online References:
367
+ {online_results}
368
+
369
+ Conversation Log:
370
+ {chat_history}
371
+
372
+ Query: {query}
373
+
374
+ Enhanced Description:
375
+ """.strip()
376
+ )
377
+
378
+ mermaid_js_diagram_generation_prompt = PromptTemplate.from_template(
379
+ """
380
+ You are a designer with the ability to describe diagrams to compose in professional, fine detail. You dive into the details and make labels, connections, and shapes to represent complex systems.
381
+ {personality_context}
382
+
383
+ ----Goals----
384
+ You need to create a declarative description of the diagram and relevant components, using the Mermaid.js syntax.
385
+
386
+ You can choose from the following diagram types:
387
+ - Flowchart
388
+ - Sequence Diagram
389
+ - State Diagram
390
+ - Gantt Chart
391
+ - Pie Chart
392
+
393
+ ----Examples----
394
+ ---
395
+ title: Node
396
+ ---
397
+
398
+ flowchart LR
399
+ id["This is the start"] --> id2["This is the end"]
400
+
401
+ sequenceDiagram
402
+ Alice->>John: Hello John, how are you?
403
+ John-->>Alice: Great!
404
+ Alice-)John: See you later!
405
+
406
+ stateDiagram-v2
407
+ [*] --> Still
408
+ Still --> [*]
409
+
410
+ Still --> Moving
411
+ Moving --> Still
412
+ Moving --> Crash
413
+ Crash --> [*]
414
+
415
+ gantt
416
+ title A Gantt Diagram
417
+ dateFormat YYYY-MM-DD
418
+ section Section
419
+ A task :a1, 2014-01-01, 30d
420
+ Another task :after a1, 20d
421
+ section Another
422
+ Task in Another :2014-01-12, 12d
423
+ another task :24d
424
+
425
+ pie title Pets adopted by volunteers
426
+ "Dogs" : 10
427
+ "Cats" : 30
428
+ "Rats" : 60
429
+
430
+ flowchart TB
431
+ subgraph "Group 1"
432
+ a1["Start Node"] --> a2["End Node"]
433
+ end
434
+ subgraph "Group 2"
435
+ b1["Process 1"] --> b2["Process 2"]
436
+ end
437
+ subgraph "Group 3"
438
+ c1["Input"] --> c2["Output"]
439
+ end
440
+ a["Group 1"] --> b["Group 2"]
441
+ c["Group 3"] --> d["Group 2"]
442
+
443
+ ----Process----
444
+ Create your diagram with great composition and intuitiveness from the provided context and user prompt below.
445
+ - You may use subgraphs to group elements together. Each subgraph must have a title.
446
+ - **You must wrap ALL entity and node labels in double quotes**, example: "My Node Label"
447
+ - **All nodes MUST use the id["label"] format**. For example: node1["My Node Label"]
448
+ - Custom style are not permitted. Default styles only.
449
+ - JUST provide the diagram, no additional text or context. Say nothing else in your response except the diagram.
450
+ - Keep diagrams simple - maximum 15 nodes
451
+ - Every node inside a subgraph MUST use square bracket notation: id["label"]
452
+
453
+ output: {query}
454
+
455
+ """.strip()
456
+ )
457
+
341
458
  failed_diagram_generation = PromptTemplate.from_template(
342
459
  """
343
460
  You attempted to programmatically generate a diagram but failed due to a system issue. You are normally able to generate diagrams, but you encountered a system issue this time.
@@ -266,7 +266,7 @@ def save_to_conversation_log(
266
266
  raw_query_files: List[FileAttachment] = [],
267
267
  generated_images: List[str] = [],
268
268
  raw_generated_files: List[FileAttachment] = [],
269
- generated_excalidraw_diagram: str = None,
269
+ generated_mermaidjs_diagram: str = None,
270
270
  train_of_thought: List[Any] = [],
271
271
  tracer: Dict[str, Any] = {},
272
272
  ):
@@ -290,8 +290,8 @@ def save_to_conversation_log(
290
290
  "queryFiles": [file.model_dump(mode="json") for file in raw_generated_files],
291
291
  }
292
292
 
293
- if generated_excalidraw_diagram:
294
- khoj_message_metadata["excalidrawDiagram"] = generated_excalidraw_diagram
293
+ if generated_mermaidjs_diagram:
294
+ khoj_message_metadata["mermaidjsDiagram"] = generated_mermaidjs_diagram
295
295
 
296
296
  updated_conversation = message_to_log(
297
297
  user_message=q,
@@ -441,7 +441,7 @@ def generate_chatml_messages_with_context(
441
441
  "query": chat.get("intent", {}).get("inferred-queries", [user_message])[0],
442
442
  }
443
443
 
444
- if not is_none_or_empty(chat.get("excalidrawDiagram")) and role == "assistant":
444
+ if not is_none_or_empty(chat.get("mermaidjsDiagram")) and role == "assistant":
445
445
  generated_assets["diagram"] = {
446
446
  "query": chat.get("intent", {}).get("inferred-queries", [user_message])[0],
447
447
  }
@@ -593,6 +593,11 @@ def clean_json(response: str):
593
593
  return response.strip().replace("\n", "").removeprefix("```json").removesuffix("```")
594
594
 
595
595
 
596
+ def clean_mermaidjs(response: str):
597
+ """Remove any markdown mermaidjs codeblock and newline formatting if present. Useful for non schema enforceable models"""
598
+ return response.strip().removeprefix("```mermaid").removesuffix("```")
599
+
600
+
596
601
  def clean_code_python(code: str):
597
602
  """Remove any markdown codeblock and newline formatting if present. Useful for non schema enforceable models"""
598
603
  return code.strip().removeprefix("```python").removesuffix("```")
@@ -111,7 +111,7 @@ async def text_to_image(
111
111
  image_url = upload_image(webp_image_bytes, user.uuid)
112
112
 
113
113
  if not image_url:
114
- image = base64.b64encode(webp_image_bytes).decode("utf-8")
114
+ image = f"data:image/webp;base64,{base64.b64encode(webp_image_bytes).decode('utf-8')}"
115
115
 
116
116
  yield image_url or image, status_code, image_prompt
117
117
 
@@ -119,25 +119,27 @@ async def text_to_image(
119
119
  def generate_image_with_openai(
120
120
  improved_image_prompt: str, text_to_image_config: TextToImageModelConfig, text2image_model: str
121
121
  ):
122
- "Generate image using OpenAI API"
122
+ "Generate image using OpenAI (compatible) API"
123
123
 
124
- # Get the API key from the user's configuration
124
+ # Get the API config from the user's configuration
125
+ api_key = None
125
126
  if text_to_image_config.api_key:
126
127
  api_key = text_to_image_config.api_key
128
+ openai_client = openai.OpenAI(api_key=api_key)
127
129
  elif text_to_image_config.ai_model_api:
128
130
  api_key = text_to_image_config.ai_model_api.api_key
131
+ api_base_url = text_to_image_config.ai_model_api.api_base_url
132
+ openai_client = openai.OpenAI(api_key=api_key, base_url=api_base_url)
129
133
  elif state.openai_client:
130
- api_key = state.openai_client.api_key
131
- auth_header = {"Authorization": f"Bearer {api_key}"} if api_key else {}
134
+ openai_client = state.openai_client
132
135
 
133
136
  # Generate image using OpenAI API
134
137
  OPENAI_IMAGE_GEN_STYLE = "vivid"
135
- response = state.openai_client.images.generate(
138
+ response = openai_client.images.generate(
136
139
  prompt=improved_image_prompt,
137
140
  model=text2image_model,
138
141
  style=OPENAI_IMAGE_GEN_STYLE,
139
142
  response_format="b64_json",
140
- extra_headers=auth_header,
141
143
  )
142
144
 
143
145
  # Extract the base64 image from the response
khoj/routers/api.py CHANGED
@@ -9,6 +9,7 @@ import uuid
9
9
  from typing import Any, Callable, List, Optional, Set, Union
10
10
 
11
11
  import cron_descriptor
12
+ import openai
12
13
  import pytz
13
14
  from apscheduler.job import Job
14
15
  from apscheduler.triggers.cron import CronTrigger
@@ -264,12 +265,21 @@ async def transcribe(
264
265
  if not speech_to_text_config:
265
266
  # If the user has not configured a speech to text model, return an unsupported on server error
266
267
  status_code = 501
267
- elif state.openai_client and speech_to_text_config.model_type == SpeechToTextModelOptions.ModelType.OPENAI:
268
- speech2text_model = speech_to_text_config.model_name
269
- user_message = await transcribe_audio(audio_file, speech2text_model, client=state.openai_client)
270
268
  elif speech_to_text_config.model_type == SpeechToTextModelOptions.ModelType.OFFLINE:
271
269
  speech2text_model = speech_to_text_config.model_name
272
270
  user_message = await transcribe_audio_offline(audio_filename, speech2text_model)
271
+ elif speech_to_text_config.model_type == SpeechToTextModelOptions.ModelType.OPENAI:
272
+ speech2text_model = speech_to_text_config.model_name
273
+ if speech_to_text_config.ai_model_api:
274
+ api_key = speech_to_text_config.ai_model_api.api_key
275
+ api_base_url = speech_to_text_config.ai_model_api.api_base_url
276
+ openai_client = openai.OpenAI(api_key=api_key, base_url=api_base_url)
277
+ elif state.openai_client:
278
+ openai_client = state.openai_client
279
+ if openai_client:
280
+ user_message = await transcribe_audio(audio_file, speech2text_model, client=openai_client)
281
+ else:
282
+ status_code = 501
273
283
  finally:
274
284
  # Close and Delete the temporary audio file
275
285
  audio_file.close()
khoj/routers/api_chat.py CHANGED
@@ -51,7 +51,7 @@ from khoj.routers.helpers import (
51
51
  construct_automation_created_message,
52
52
  create_automation,
53
53
  gather_raw_query_files,
54
- generate_excalidraw_diagram,
54
+ generate_mermaidjs_diagram,
55
55
  generate_summary_from_files,
56
56
  get_conversation_command,
57
57
  is_query_empty,
@@ -781,7 +781,7 @@ async def chat(
781
781
 
782
782
  generated_images: List[str] = []
783
783
  generated_files: List[FileAttachment] = []
784
- generated_excalidraw_diagram: str = None
784
+ generated_mermaidjs_diagram: str = None
785
785
  program_execution_context: List[str] = []
786
786
 
787
787
  if conversation_commands == [ConversationCommand.Default]:
@@ -1161,7 +1161,7 @@ async def chat(
1161
1161
  inferred_queries = []
1162
1162
  diagram_description = ""
1163
1163
 
1164
- async for result in generate_excalidraw_diagram(
1164
+ async for result in generate_mermaidjs_diagram(
1165
1165
  q=defiltered_query,
1166
1166
  conversation_history=meta_log,
1167
1167
  location_data=location,
@@ -1177,12 +1177,12 @@ async def chat(
1177
1177
  if isinstance(result, dict) and ChatEvent.STATUS in result:
1178
1178
  yield result[ChatEvent.STATUS]
1179
1179
  else:
1180
- better_diagram_description_prompt, excalidraw_diagram_description = result
1181
- if better_diagram_description_prompt and excalidraw_diagram_description:
1180
+ better_diagram_description_prompt, mermaidjs_diagram_description = result
1181
+ if better_diagram_description_prompt and mermaidjs_diagram_description:
1182
1182
  inferred_queries.append(better_diagram_description_prompt)
1183
- diagram_description = excalidraw_diagram_description
1183
+ diagram_description = mermaidjs_diagram_description
1184
1184
 
1185
- generated_excalidraw_diagram = diagram_description
1185
+ generated_mermaidjs_diagram = diagram_description
1186
1186
 
1187
1187
  generated_asset_results["diagrams"] = {
1188
1188
  "query": better_diagram_description_prompt,
@@ -1191,7 +1191,7 @@ async def chat(
1191
1191
  async for result in send_event(
1192
1192
  ChatEvent.GENERATED_ASSETS,
1193
1193
  {
1194
- "excalidrawDiagram": excalidraw_diagram_description,
1194
+ "mermaidjsDiagram": mermaidjs_diagram_description,
1195
1195
  },
1196
1196
  ):
1197
1197
  yield result
@@ -1231,7 +1231,7 @@ async def chat(
1231
1231
  raw_query_files,
1232
1232
  generated_images,
1233
1233
  generated_files,
1234
- generated_excalidraw_diagram,
1234
+ generated_mermaidjs_diagram,
1235
1235
  program_execution_context,
1236
1236
  generated_asset_results,
1237
1237
  tracer,
khoj/routers/helpers.py CHANGED
@@ -97,6 +97,7 @@ from khoj.processor.conversation.utils import (
97
97
  ChatEvent,
98
98
  ThreadedGenerator,
99
99
  clean_json,
100
+ clean_mermaidjs,
100
101
  construct_chat_history,
101
102
  generate_chatml_messages_with_context,
102
103
  save_to_conversation_log,
@@ -823,7 +824,7 @@ async def generate_better_diagram_description(
823
824
  elif online_results[result].get("webpages"):
824
825
  simplified_online_results[result] = online_results[result]["webpages"]
825
826
 
826
- improve_diagram_description_prompt = prompts.improve_diagram_description_prompt.format(
827
+ improve_diagram_description_prompt = prompts.improve_excalidraw_diagram_description_prompt.format(
827
828
  query=q,
828
829
  chat_history=chat_history,
829
830
  location=location,
@@ -887,6 +888,133 @@ async def generate_excalidraw_diagram_from_description(
887
888
  return response
888
889
 
889
890
 
891
+ async def generate_mermaidjs_diagram(
892
+ q: str,
893
+ conversation_history: Dict[str, Any],
894
+ location_data: LocationData,
895
+ note_references: List[Dict[str, Any]],
896
+ online_results: Optional[dict] = None,
897
+ query_images: List[str] = None,
898
+ user: KhojUser = None,
899
+ agent: Agent = None,
900
+ send_status_func: Optional[Callable] = None,
901
+ query_files: str = None,
902
+ tracer: dict = {},
903
+ ):
904
+ if send_status_func:
905
+ async for event in send_status_func("**Enhancing the Diagramming Prompt**"):
906
+ yield {ChatEvent.STATUS: event}
907
+
908
+ better_diagram_description_prompt = await generate_better_mermaidjs_diagram_description(
909
+ q=q,
910
+ conversation_history=conversation_history,
911
+ location_data=location_data,
912
+ note_references=note_references,
913
+ online_results=online_results,
914
+ query_images=query_images,
915
+ user=user,
916
+ agent=agent,
917
+ query_files=query_files,
918
+ tracer=tracer,
919
+ )
920
+
921
+ if send_status_func:
922
+ async for event in send_status_func(f"**Diagram to Create:**:\n{better_diagram_description_prompt}"):
923
+ yield {ChatEvent.STATUS: event}
924
+
925
+ mermaidjs_diagram_description = await generate_mermaidjs_diagram_from_description(
926
+ q=better_diagram_description_prompt,
927
+ user=user,
928
+ agent=agent,
929
+ tracer=tracer,
930
+ )
931
+
932
+ inferred_queries = f"Instruction: {better_diagram_description_prompt}"
933
+
934
+ yield inferred_queries, mermaidjs_diagram_description
935
+
936
+
937
+ async def generate_better_mermaidjs_diagram_description(
938
+ q: str,
939
+ conversation_history: Dict[str, Any],
940
+ location_data: LocationData,
941
+ note_references: List[Dict[str, Any]],
942
+ online_results: Optional[dict] = None,
943
+ query_images: List[str] = None,
944
+ user: KhojUser = None,
945
+ agent: Agent = None,
946
+ query_files: str = None,
947
+ tracer: dict = {},
948
+ ) -> str:
949
+ """
950
+ Generate a diagram description from the given query and context
951
+ """
952
+
953
+ today_date = datetime.now(tz=timezone.utc).strftime("%Y-%m-%d, %A")
954
+ personality_context = (
955
+ prompts.personality_context.format(personality=agent.personality) if agent and agent.personality else ""
956
+ )
957
+
958
+ location = f"{location_data}" if location_data else "Unknown"
959
+
960
+ user_references = "\n\n".join([f"# {item['compiled']}" for item in note_references])
961
+
962
+ chat_history = construct_chat_history(conversation_history)
963
+
964
+ simplified_online_results = {}
965
+
966
+ if online_results:
967
+ for result in online_results:
968
+ if online_results[result].get("answerBox"):
969
+ simplified_online_results[result] = online_results[result]["answerBox"]
970
+ elif online_results[result].get("webpages"):
971
+ simplified_online_results[result] = online_results[result]["webpages"]
972
+
973
+ improve_diagram_description_prompt = prompts.improve_mermaid_js_diagram_description_prompt.format(
974
+ query=q,
975
+ chat_history=chat_history,
976
+ location=location,
977
+ current_date=today_date,
978
+ references=user_references,
979
+ online_results=simplified_online_results,
980
+ personality_context=personality_context,
981
+ )
982
+
983
+ with timer("Chat actor: Generate better Mermaid.js diagram description", logger):
984
+ response = await send_message_to_model_wrapper(
985
+ improve_diagram_description_prompt,
986
+ query_images=query_images,
987
+ user=user,
988
+ query_files=query_files,
989
+ tracer=tracer,
990
+ )
991
+ response = response.strip()
992
+ if response.startswith(('"', "'")) and response.endswith(('"', "'")):
993
+ response = response[1:-1]
994
+
995
+ return response
996
+
997
+
998
+ async def generate_mermaidjs_diagram_from_description(
999
+ q: str,
1000
+ user: KhojUser = None,
1001
+ agent: Agent = None,
1002
+ tracer: dict = {},
1003
+ ) -> str:
1004
+ personality_context = (
1005
+ prompts.personality_context.format(personality=agent.personality) if agent and agent.personality else ""
1006
+ )
1007
+
1008
+ mermaidjs_diagram_generation = prompts.mermaid_js_diagram_generation_prompt.format(
1009
+ personality_context=personality_context,
1010
+ query=q,
1011
+ )
1012
+
1013
+ with timer("Chat actor: Generate Mermaid.js diagram", logger):
1014
+ raw_response = await send_message_to_model_wrapper(query=mermaidjs_diagram_generation, user=user, tracer=tracer)
1015
+ return clean_mermaidjs(raw_response.strip())
1016
+
1017
+
890
1018
  async def generate_better_image_prompt(
891
1019
  q: str,
892
1020
  conversation_history: str,
@@ -1224,7 +1352,7 @@ def generate_chat_response(
1224
1352
  raw_query_files: List[FileAttachment] = None,
1225
1353
  generated_images: List[str] = None,
1226
1354
  raw_generated_files: List[FileAttachment] = [],
1227
- generated_excalidraw_diagram: str = None,
1355
+ generated_mermaidjs_diagram: str = None,
1228
1356
  program_execution_context: List[str] = [],
1229
1357
  generated_asset_results: Dict[str, Dict] = {},
1230
1358
  tracer: dict = {},
@@ -1252,7 +1380,7 @@ def generate_chat_response(
1252
1380
  raw_query_files=raw_query_files,
1253
1381
  generated_images=generated_images,
1254
1382
  raw_generated_files=raw_generated_files,
1255
- generated_excalidraw_diagram=generated_excalidraw_diagram,
1383
+ generated_mermaidjs_diagram=generated_mermaidjs_diagram,
1256
1384
  tracer=tracer,
1257
1385
  )
1258
1386
 
@@ -1967,7 +2095,7 @@ class MessageProcessor:
1967
2095
  self.raw_response = ""
1968
2096
  self.generated_images = []
1969
2097
  self.generated_files = []
1970
- self.generated_excalidraw_diagram = []
2098
+ self.generated_mermaidjs_diagram = []
1971
2099
 
1972
2100
  def convert_message_chunk_to_json(self, raw_chunk: str) -> Dict[str, Any]:
1973
2101
  if raw_chunk.startswith("{") and raw_chunk.endswith("}"):
@@ -2014,8 +2142,8 @@ class MessageProcessor:
2014
2142
  self.generated_images = chunk_data[key]
2015
2143
  elif key == "files":
2016
2144
  self.generated_files = chunk_data[key]
2017
- elif key == "excalidrawDiagram":
2018
- self.generated_excalidraw_diagram = chunk_data[key]
2145
+ elif key == "mermaidjsDiagram":
2146
+ self.generated_mermaidjs_diagram = chunk_data[key]
2019
2147
 
2020
2148
  def handle_json_response(self, json_data: Dict[str, str]) -> str | Dict[str, str]:
2021
2149
  if "image" in json_data or "details" in json_data:
@@ -2052,7 +2180,7 @@ async def read_chat_stream(response_iterator: AsyncGenerator[str, None]) -> Dict
2052
2180
  "usage": processor.usage,
2053
2181
  "images": processor.generated_images,
2054
2182
  "files": processor.generated_files,
2055
- "excalidrawDiagram": processor.generated_excalidraw_diagram,
2183
+ "mermaidjsDiagram": processor.generated_mermaidjs_diagram,
2056
2184
  }
2057
2185
 
2058
2186
 
@@ -43,14 +43,14 @@ def initialization(interactive: bool = True):
43
43
  "🗣️ Configure chat models available to your server. You can always update these at /server/admin using your admin account"
44
44
  )
45
45
 
46
- openai_api_base = os.getenv("OPENAI_API_BASE")
47
- provider = "Ollama" if openai_api_base and openai_api_base.endswith(":11434/v1/") else "OpenAI"
48
- openai_api_key = os.getenv("OPENAI_API_KEY", "placeholder" if openai_api_base else None)
46
+ openai_base_url = os.getenv("OPENAI_BASE_URL")
47
+ provider = "Ollama" if openai_base_url and openai_base_url.endswith(":11434/v1/") else "OpenAI"
48
+ openai_api_key = os.getenv("OPENAI_API_KEY", "placeholder" if openai_base_url else None)
49
49
  default_chat_models = default_openai_chat_models
50
- if openai_api_base:
50
+ if openai_base_url:
51
51
  # Get available chat models from OpenAI compatible API
52
52
  try:
53
- openai_client = openai.OpenAI(api_key=openai_api_key, base_url=openai_api_base)
53
+ openai_client = openai.OpenAI(api_key=openai_api_key, base_url=openai_base_url)
54
54
  default_chat_models = [model.id for model in openai_client.models.list()]
55
55
  # Put the available default OpenAI models at the top
56
56
  valid_default_models = [model for model in default_openai_chat_models if model in default_chat_models]
@@ -66,7 +66,7 @@ def initialization(interactive: bool = True):
66
66
  ChatModel.ModelType.OPENAI,
67
67
  default_chat_models,
68
68
  default_api_key=openai_api_key,
69
- api_base_url=openai_api_base,
69
+ api_base_url=openai_base_url,
70
70
  vision_enabled=True,
71
71
  is_offline=False,
72
72
  interactive=interactive,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: khoj
3
- Version: 1.33.3.dev35
3
+ Version: 1.33.3.dev51
4
4
  Summary: Your Second Brain
5
5
  Project-URL: Homepage, https://khoj.dev
6
6
  Project-URL: Documentation, https://docs.khoj.dev