khoj 2.0.0b12.dev5__py3-none-any.whl → 2.0.0b13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (187) hide show
  1. khoj/app/README.md +1 -1
  2. khoj/app/urls.py +1 -0
  3. khoj/configure.py +21 -54
  4. khoj/database/adapters/__init__.py +6 -15
  5. khoj/database/management/commands/delete_orphaned_fileobjects.py +0 -1
  6. khoj/database/migrations/0064_remove_conversation_temp_id_alter_conversation_id.py +1 -1
  7. khoj/database/migrations/0075_migrate_generated_assets_and_validate.py +1 -1
  8. khoj/database/migrations/0092_alter_chatmodel_model_type_alter_chatmodel_name_and_more.py +36 -0
  9. khoj/database/migrations/0093_remove_localorgconfig_user_and_more.py +36 -0
  10. khoj/database/models/__init__.py +10 -40
  11. khoj/database/tests.py +0 -2
  12. khoj/interface/compiled/404/index.html +2 -2
  13. khoj/interface/compiled/_next/static/chunks/{9245.a04e92d034540234.js → 1225.ecac11e7421504c4.js} +3 -3
  14. khoj/interface/compiled/_next/static/chunks/1320.ae930ad00affe685.js +5 -0
  15. khoj/interface/compiled/_next/static/chunks/{1327-1a9107b9a2a04a98.js → 1327-e254819a9172cfa7.js} +1 -1
  16. khoj/interface/compiled/_next/static/chunks/1626.15a8acc0d6639ec6.js +1 -0
  17. khoj/interface/compiled/_next/static/chunks/{3489.c523fe96a2eee74f.js → 1940.d082758bd04e08ae.js} +1 -1
  18. khoj/interface/compiled/_next/static/chunks/{2327-ea623ca2d22f78e9.js → 2327-438aaec1657c5ada.js} +1 -1
  19. khoj/interface/compiled/_next/static/chunks/2475.57a0d0fd93d07af0.js +93 -0
  20. khoj/interface/compiled/_next/static/chunks/2481.5ce6524ba0a73f90.js +55 -0
  21. khoj/interface/compiled/_next/static/chunks/297.4c4c823ff6e3255b.js +174 -0
  22. khoj/interface/compiled/_next/static/chunks/{5639-09e2009a2adedf8b.js → 3260-82d2521fab032ff1.js} +68 -23
  23. khoj/interface/compiled/_next/static/chunks/3353.1c6d553216a1acae.js +1 -0
  24. khoj/interface/compiled/_next/static/chunks/3855.f7b8131f78af046e.js +1 -0
  25. khoj/interface/compiled/_next/static/chunks/3973.dc54a39586ab48be.js +1 -0
  26. khoj/interface/compiled/_next/static/chunks/4241.c1cd170f7f37ac59.js +24 -0
  27. khoj/interface/compiled/_next/static/chunks/{4327.8d2a1b8f1ea78208.js → 4327.f3704dc398c67113.js} +19 -19
  28. khoj/interface/compiled/_next/static/chunks/4505.f09454a346269c3f.js +117 -0
  29. khoj/interface/compiled/_next/static/chunks/4801.96a152d49742b644.js +1 -0
  30. khoj/interface/compiled/_next/static/chunks/5427-a95ec748e52abb75.js +1 -0
  31. khoj/interface/compiled/_next/static/chunks/549.2bd27f59a91a9668.js +148 -0
  32. khoj/interface/compiled/_next/static/chunks/5765.71b1e1207b76b03f.js +1 -0
  33. khoj/interface/compiled/_next/static/chunks/584.d7ce3505f169b706.js +1 -0
  34. khoj/interface/compiled/_next/static/chunks/6240.34f7c1fa692edd61.js +24 -0
  35. khoj/interface/compiled/_next/static/chunks/6d3fe5a5-f9f3c16e0bc0cdf9.js +10 -0
  36. khoj/interface/compiled/_next/static/chunks/{7127-0f4a2a77d97fb5fa.js → 7127-97b83757db125ba6.js} +1 -1
  37. khoj/interface/compiled/_next/static/chunks/7200-93ab0072359b8028.js +1 -0
  38. khoj/interface/compiled/_next/static/chunks/{2612.bcf5a623b3da209e.js → 7553.f5ad54b1f6e92c49.js} +2 -2
  39. khoj/interface/compiled/_next/static/chunks/7626-1b630f1654172341.js +1 -0
  40. khoj/interface/compiled/_next/static/chunks/764.dadd316e8e16d191.js +63 -0
  41. khoj/interface/compiled/_next/static/chunks/78.08169ab541abab4f.js +43 -0
  42. khoj/interface/compiled/_next/static/chunks/784.e03acf460df213d1.js +1 -0
  43. khoj/interface/compiled/_next/static/chunks/{9537-d9ab442ce15d1e20.js → 8072-e1440cb482a0940e.js} +1 -1
  44. khoj/interface/compiled/_next/static/chunks/{3265.924139c4146ee344.js → 8086.8d39887215807fcd.js} +1 -1
  45. khoj/interface/compiled/_next/static/chunks/8168.f074ab8c7c16d82d.js +59 -0
  46. khoj/interface/compiled/_next/static/chunks/{8694.2bd9c2f65d8c5847.js → 8223.1705878fa7a09292.js} +1 -1
  47. khoj/interface/compiled/_next/static/chunks/8483.94f6c9e2bee86f50.js +215 -0
  48. khoj/interface/compiled/_next/static/chunks/{8888.ebe0e552b59e7fed.js → 8810.fc0e479de78c7c61.js} +1 -1
  49. khoj/interface/compiled/_next/static/chunks/8828.bc74dc4ce94e78f6.js +1 -0
  50. khoj/interface/compiled/_next/static/chunks/{7303.d0612f812a967a08.js → 8909.14ac3f43d0070cf1.js} +5 -5
  51. khoj/interface/compiled/_next/static/chunks/90542734.b1a1629065ba199b.js +1 -0
  52. khoj/interface/compiled/_next/static/chunks/9167.098534184f03fe92.js +56 -0
  53. khoj/interface/compiled/_next/static/chunks/{4980.63500d68b3bb1222.js → 9537.e934ce37bf314509.js} +5 -5
  54. khoj/interface/compiled/_next/static/chunks/9574.3fe8e26e95bf1c34.js +1 -0
  55. khoj/interface/compiled/_next/static/chunks/9599.ec50b5296c27dae9.js +1 -0
  56. khoj/interface/compiled/_next/static/chunks/9643.b34248df52ffc77c.js +262 -0
  57. khoj/interface/compiled/_next/static/chunks/9747.2fd9065b1435abb1.js +1 -0
  58. khoj/interface/compiled/_next/static/chunks/9922.98f2b2a9959b4ebe.js +1 -0
  59. khoj/interface/compiled/_next/static/chunks/app/agents/layout-e49165209d2e406c.js +1 -0
  60. khoj/interface/compiled/_next/static/chunks/app/agents/page-e291b49977f43880.js +1 -0
  61. khoj/interface/compiled/_next/static/chunks/app/automations/page-198b26df6e09bbb0.js +1 -0
  62. khoj/interface/compiled/_next/static/chunks/app/chat/{page-8e1c4f2af3c9429e.js → page-dfcc1e8e2ad62873.js} +1 -1
  63. khoj/interface/compiled/_next/static/chunks/app/{page-2b3056cba8aa96ce.js → page-1567cac7b79a7c59.js} +1 -1
  64. khoj/interface/compiled/_next/static/chunks/app/settings/{page-8be3b35178abf2ec.js → page-6081362437c82470.js} +1 -1
  65. khoj/interface/compiled/_next/static/chunks/app/share/chat/layout-6fb51c5c80f8ec67.js +1 -0
  66. khoj/interface/compiled/_next/static/chunks/app/share/chat/{page-4a4b0c0f4749c2b2.js → page-e0dcb1762f8c8f88.js} +1 -1
  67. khoj/interface/compiled/_next/static/chunks/webpack-5393aad3d824e0cb.js +1 -0
  68. khoj/interface/compiled/agents/index.html +2 -2
  69. khoj/interface/compiled/agents/index.txt +3 -3
  70. khoj/interface/compiled/automations/index.html +2 -2
  71. khoj/interface/compiled/automations/index.txt +3 -3
  72. khoj/interface/compiled/chat/index.html +2 -2
  73. khoj/interface/compiled/chat/index.txt +3 -3
  74. khoj/interface/compiled/index.html +2 -2
  75. khoj/interface/compiled/index.txt +3 -3
  76. khoj/interface/compiled/search/index.html +2 -2
  77. khoj/interface/compiled/search/index.txt +3 -3
  78. khoj/interface/compiled/settings/index.html +2 -2
  79. khoj/interface/compiled/settings/index.txt +3 -3
  80. khoj/interface/compiled/share/chat/index.html +2 -2
  81. khoj/interface/compiled/share/chat/index.txt +3 -3
  82. khoj/main.py +7 -9
  83. khoj/manage.py +1 -0
  84. khoj/processor/content/github/github_to_entries.py +6 -7
  85. khoj/processor/content/images/image_to_entries.py +0 -1
  86. khoj/processor/content/markdown/markdown_to_entries.py +2 -3
  87. khoj/processor/content/notion/notion_to_entries.py +5 -6
  88. khoj/processor/content/org_mode/org_to_entries.py +4 -5
  89. khoj/processor/content/org_mode/orgnode.py +4 -4
  90. khoj/processor/content/plaintext/plaintext_to_entries.py +1 -2
  91. khoj/processor/content/text_to_entries.py +1 -3
  92. khoj/processor/conversation/google/utils.py +3 -3
  93. khoj/processor/conversation/openai/gpt.py +65 -28
  94. khoj/processor/conversation/openai/utils.py +359 -28
  95. khoj/processor/conversation/prompts.py +16 -41
  96. khoj/processor/conversation/utils.py +29 -39
  97. khoj/processor/embeddings.py +0 -2
  98. khoj/processor/image/generate.py +3 -3
  99. khoj/processor/operator/__init__.py +2 -3
  100. khoj/processor/operator/grounding_agent.py +15 -2
  101. khoj/processor/operator/grounding_agent_uitars.py +34 -23
  102. khoj/processor/operator/operator_agent_anthropic.py +29 -4
  103. khoj/processor/operator/operator_agent_base.py +1 -1
  104. khoj/processor/operator/operator_agent_binary.py +4 -4
  105. khoj/processor/operator/operator_agent_openai.py +21 -6
  106. khoj/processor/operator/operator_environment_browser.py +1 -1
  107. khoj/processor/operator/operator_environment_computer.py +1 -1
  108. khoj/processor/speech/text_to_speech.py +0 -1
  109. khoj/processor/tools/online_search.py +1 -1
  110. khoj/processor/tools/run_code.py +1 -1
  111. khoj/routers/api.py +2 -15
  112. khoj/routers/api_agents.py +1 -2
  113. khoj/routers/api_automation.py +1 -1
  114. khoj/routers/api_chat.py +10 -16
  115. khoj/routers/api_content.py +3 -111
  116. khoj/routers/api_model.py +0 -1
  117. khoj/routers/api_subscription.py +1 -1
  118. khoj/routers/email.py +4 -4
  119. khoj/routers/helpers.py +44 -103
  120. khoj/routers/research.py +8 -8
  121. khoj/search_filter/base_filter.py +2 -4
  122. khoj/search_type/text_search.py +1 -2
  123. khoj/utils/cli.py +5 -53
  124. khoj/utils/config.py +0 -65
  125. khoj/utils/constants.py +6 -7
  126. khoj/utils/helpers.py +10 -18
  127. khoj/utils/initialization.py +7 -48
  128. khoj/utils/models.py +2 -4
  129. khoj/utils/rawconfig.py +1 -69
  130. khoj/utils/state.py +2 -8
  131. khoj/utils/yaml.py +0 -39
  132. {khoj-2.0.0b12.dev5.dist-info → khoj-2.0.0b13.dist-info}/METADATA +3 -3
  133. {khoj-2.0.0b12.dev5.dist-info → khoj-2.0.0b13.dist-info}/RECORD +139 -148
  134. khoj/interface/compiled/_next/static/chunks/1191.b547ec13349b4aed.js +0 -1
  135. khoj/interface/compiled/_next/static/chunks/1588.f0558a0bdffc4761.js +0 -117
  136. khoj/interface/compiled/_next/static/chunks/1918.925cb4a35518d258.js +0 -43
  137. khoj/interface/compiled/_next/static/chunks/2849.dc00ae5ba7219cfc.js +0 -1
  138. khoj/interface/compiled/_next/static/chunks/303.fe76de943e930fbd.js +0 -1
  139. khoj/interface/compiled/_next/static/chunks/4533.586e74b45a2bde25.js +0 -55
  140. khoj/interface/compiled/_next/static/chunks/4551.82ce1476b5516bc2.js +0 -5
  141. khoj/interface/compiled/_next/static/chunks/4748.0edd37cba3ea2809.js +0 -59
  142. khoj/interface/compiled/_next/static/chunks/5210.cd35a1c1ec594a20.js +0 -93
  143. khoj/interface/compiled/_next/static/chunks/5329.f8b3c5b3d16159cd.js +0 -1
  144. khoj/interface/compiled/_next/static/chunks/5427-13d6ffd380fdfab7.js +0 -1
  145. khoj/interface/compiled/_next/static/chunks/558-c14e76cff03f6a60.js +0 -1
  146. khoj/interface/compiled/_next/static/chunks/5830.8876eccb82da9b7d.js +0 -262
  147. khoj/interface/compiled/_next/static/chunks/6230.88a71d8145347b3f.js +0 -1
  148. khoj/interface/compiled/_next/static/chunks/7161.77e0530a40ad5ca8.js +0 -1
  149. khoj/interface/compiled/_next/static/chunks/7200-ac3b2e37ff30e126.js +0 -1
  150. khoj/interface/compiled/_next/static/chunks/7505.c31027a3695bdebb.js +0 -148
  151. khoj/interface/compiled/_next/static/chunks/7760.35649cc21d9585bd.js +0 -56
  152. khoj/interface/compiled/_next/static/chunks/83.48e2db193a940052.js +0 -1
  153. khoj/interface/compiled/_next/static/chunks/8427.844694e06133fb51.js +0 -1
  154. khoj/interface/compiled/_next/static/chunks/8665.4db7e6b2e8933497.js +0 -174
  155. khoj/interface/compiled/_next/static/chunks/872.caf84cc1a39ae59f.js +0 -1
  156. khoj/interface/compiled/_next/static/chunks/8890.6e8a59e4de6978bc.js +0 -215
  157. khoj/interface/compiled/_next/static/chunks/8950.5f2272e0ac923f9e.js +0 -1
  158. khoj/interface/compiled/_next/static/chunks/90542734.2c21f16f18b22411.js +0 -1
  159. khoj/interface/compiled/_next/static/chunks/9202.c703864fcedc8d1f.js +0 -63
  160. khoj/interface/compiled/_next/static/chunks/9320.6aca4885d541aa44.js +0 -24
  161. khoj/interface/compiled/_next/static/chunks/9535.f78cd92d03331e55.js +0 -1
  162. khoj/interface/compiled/_next/static/chunks/9968.b111fc002796da81.js +0 -1
  163. khoj/interface/compiled/_next/static/chunks/app/agents/layout-e00fb81dca656a10.js +0 -1
  164. khoj/interface/compiled/_next/static/chunks/app/agents/page-9a4610474cd59a71.js +0 -1
  165. khoj/interface/compiled/_next/static/chunks/app/automations/page-f7bb9d777b7745d4.js +0 -1
  166. khoj/interface/compiled/_next/static/chunks/app/share/chat/layout-e8e5db7830bf3f47.js +0 -1
  167. khoj/interface/compiled/_next/static/chunks/f3e3247b-1758d4651e4457c2.js +0 -10
  168. khoj/interface/compiled/_next/static/chunks/webpack-338a5000c912cc94.js +0 -1
  169. khoj/migrations/__init__.py +0 -0
  170. khoj/migrations/migrate_offline_chat_default_model.py +0 -69
  171. khoj/migrations/migrate_offline_chat_default_model_2.py +0 -71
  172. khoj/migrations/migrate_offline_chat_schema.py +0 -83
  173. khoj/migrations/migrate_offline_model.py +0 -29
  174. khoj/migrations/migrate_processor_config_openai.py +0 -67
  175. khoj/migrations/migrate_server_pg.py +0 -132
  176. khoj/migrations/migrate_version.py +0 -17
  177. khoj/processor/conversation/offline/__init__.py +0 -0
  178. khoj/processor/conversation/offline/chat_model.py +0 -224
  179. khoj/processor/conversation/offline/utils.py +0 -80
  180. khoj/processor/conversation/offline/whisper.py +0 -15
  181. khoj/utils/fs_syncer.py +0 -252
  182. /khoj/interface/compiled/_next/static/{7GoMcE8WpP9fbfYZXv4Nv → RYbQvo3AvgOR0bEVVfxF4}/_buildManifest.js +0 -0
  183. /khoj/interface/compiled/_next/static/{7GoMcE8WpP9fbfYZXv4Nv → RYbQvo3AvgOR0bEVVfxF4}/_ssgManifest.js +0 -0
  184. /khoj/interface/compiled/_next/static/chunks/app/search/{page-4885df3cd175c957.js → page-3639e50ec3e9acfd.js} +0 -0
  185. {khoj-2.0.0b12.dev5.dist-info → khoj-2.0.0b13.dist-info}/WHEEL +0 -0
  186. {khoj-2.0.0b12.dev5.dist-info → khoj-2.0.0b13.dist-info}/entry_points.txt +0 -0
  187. {khoj-2.0.0b12.dev5.dist-info → khoj-2.0.0b13.dist-info}/licenses/LICENSE +0 -0
@@ -1 +0,0 @@
1
- !function(){"use strict";var e,t,n,r,c,o,f,a,u,i={},d={};function b(e){var t=d[e];if(void 0!==t)return t.exports;var n=d[e]={exports:{}},r=!0;try{i[e].call(n.exports,n,n.exports,b),r=!1}finally{r&&delete d[e]}return n.exports}b.m=i,e=[],b.O=function(t,n,r,c){if(n){c=c||0;for(var o=e.length;o>0&&e[o-1][2]>c;o--)e[o]=e[o-1];e[o]=[n,r,c];return}for(var f=1/0,o=0;o<e.length;o++){for(var n=e[o][0],r=e[o][1],c=e[o][2],a=!0,u=0;u<n.length;u++)f>=c&&Object.keys(b.O).every(function(e){return b.O[e](n[u])})?n.splice(u--,1):(a=!1,c<f&&(f=c));if(a){e.splice(o--,1);var i=r();void 0!==i&&(t=i)}}return t},b.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return b.d(t,{a:t}),t},n=Object.getPrototypeOf?function(e){return Object.getPrototypeOf(e)}:function(e){return e.__proto__},b.t=function(e,r){if(1&r&&(e=this(e)),8&r||"object"==typeof e&&e&&(4&r&&e.__esModule||16&r&&"function"==typeof e.then))return e;var c=Object.create(null);b.r(c);var o={};t=t||[null,n({}),n([]),n(n)];for(var f=2&r&&e;"object"==typeof f&&!~t.indexOf(f);f=n(f))Object.getOwnPropertyNames(f).forEach(function(t){o[t]=function(){return e[t]}});return o.default=function(){return e},b.d(c,o),c},b.d=function(e,t){for(var n in t)b.o(t,n)&&!b.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:t[n]})},b.f={},b.e=function(e){return Promise.all(Object.keys(b.f).reduce(function(t,n){return b.f[n](e,t),t},[]))},b.u=function(e){return 4609===e?"static/chunks/4609-33aa487dff03a9fd.js":"static/chunks/"+(({1264:"90542734",6555:"964ecbae",7293:"94ca1967"})[e]||e)+"."+({83:"48e2db193a940052",303:"fe76de943e930fbd",872:"caf84cc1a39ae59f",1191:"b547ec13349b4aed",1264:"2c21f16f18b22411",1588:"f0558a0bdffc4761",1918:"925cb4a35518d258",2242:"9a07e19f1a3a8b16",2612:"bcf5a623b3da209e",2849:"dc00ae5ba7219cfc",3265:"924139c4146ee344",3489:"c523fe96a2eee74f",4327:"8d2a1b8f1ea78208",4533:"586e74b45a2bde25",4551:"82ce1476b5516bc2",4610:"196691887afb7fea",4748:"0edd37cba3ea2809",4980:"63500d68b3bb1222",5210:"cd35a1c1ec594a20",5329:"f8b3c5b3d16159cd",5830:"8876eccb82da9b7d",6230:"88a71d8145347b3f",6434:"e6cd986d690f2cef",6555:"d5be7c49c320d695",7161:"77e0530a40ad5ca8",7293:"1b3402358e0e1255",7303:"d0612f812a967a08",7505:"c31027a3695bdebb",7760:"35649cc21d9585bd",8427:"844694e06133fb51",8665:"4db7e6b2e8933497",8694:"2bd9c2f65d8c5847",8888:"ebe0e552b59e7fed",8890:"6e8a59e4de6978bc",8950:"5f2272e0ac923f9e",9202:"c703864fcedc8d1f",9245:"a04e92d034540234",9320:"6aca4885d541aa44",9535:"f78cd92d03331e55",9968:"b111fc002796da81"})[e]+".js"},b.miniCssF=function(e){},b.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||Function("return this")()}catch(e){if("object"==typeof window)return window}}(),b.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r={},c="_N_E:",b.l=function(e,t,n,o){if(r[e]){r[e].push(t);return}if(void 0!==n)for(var f,a,u=document.getElementsByTagName("script"),i=0;i<u.length;i++){var d=u[i];if(d.getAttribute("src")==e||d.getAttribute("data-webpack")==c+n){f=d;break}}f||(a=!0,(f=document.createElement("script")).charset="utf-8",f.timeout=120,b.nc&&f.setAttribute("nonce",b.nc),f.setAttribute("data-webpack",c+n),f.src=b.tu(e)),r[e]=[t];var l=function(t,n){f.onerror=f.onload=null,clearTimeout(s);var c=r[e];if(delete r[e],f.parentNode&&f.parentNode.removeChild(f),c&&c.forEach(function(e){return e(n)}),t)return t(n)},s=setTimeout(l.bind(null,void 0,{type:"timeout",target:f}),12e4);f.onerror=l.bind(null,f.onerror),f.onload=l.bind(null,f.onload),a&&document.head.appendChild(f)},b.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},b.tt=function(){return void 0===o&&(o={createScriptURL:function(e){return e}},"undefined"!=typeof trustedTypes&&trustedTypes.createPolicy&&(o=trustedTypes.createPolicy("nextjs#bundler",o))),o},b.tu=function(e){return b.tt().createScriptURL(e)},b.p="/_next/",f={2272:0,3254:0,3587:0,7400:0,6250:0,4842:0,5638:0,2026:0,9346:0,6008:0},b.f.j=function(e,t){var n=b.o(f,e)?f[e]:void 0;if(0!==n){if(n)t.push(n[2]);else if(/^(2026|2272|3254|3587|4842|5638|6008|6250|7400|9346)$/.test(e))f[e]=0;else{var r=new Promise(function(t,r){n=f[e]=[t,r]});t.push(n[2]=r);var c=b.p+b.u(e),o=Error();b.l(c,function(t){if(b.o(f,e)&&(0!==(n=f[e])&&(f[e]=void 0),n)){var r=t&&("load"===t.type?"missing":t.type),c=t&&t.target&&t.target.src;o.message="Loading chunk "+e+" failed.\n("+r+": "+c+")",o.name="ChunkLoadError",o.type=r,o.request=c,n[1](o)}},"chunk-"+e,e)}}},b.O.j=function(e){return 0===f[e]},a=function(e,t){var n,r,c=t[0],o=t[1],a=t[2],u=0;if(c.some(function(e){return 0!==f[e]})){for(n in o)b.o(o,n)&&(b.m[n]=o[n]);if(a)var i=a(b)}for(e&&e(t);u<c.length;u++)r=c[u],b.o(f,r)&&f[r]&&f[r][0](),f[r]=0;return b.O(i)},(u=self.webpackChunk_N_E=self.webpackChunk_N_E||[]).forEach(a.bind(null,0)),u.push=a.bind(null,u.push.bind(u)),b.nc=void 0}();
File without changes
@@ -1,69 +0,0 @@
1
- """
2
- Current format of khoj.yml
3
- ---
4
- app:
5
- ...
6
- content-type:
7
- ...
8
- processor:
9
- conversation:
10
- offline-chat:
11
- enable-offline-chat: false
12
- chat-model: llama-2-7b-chat.ggmlv3.q4_0.bin
13
- ...
14
- search-type:
15
- ...
16
-
17
- New format of khoj.yml
18
- ---
19
- app:
20
- ...
21
- content-type:
22
- ...
23
- processor:
24
- conversation:
25
- offline-chat:
26
- enable-offline-chat: false
27
- chat-model: mistral-7b-instruct-v0.1.Q4_0.gguf
28
- ...
29
- search-type:
30
- ...
31
- """
32
- import logging
33
-
34
- from packaging import version
35
-
36
- from khoj.utils.yaml import load_config_from_file, save_config_to_file
37
-
38
- logger = logging.getLogger(__name__)
39
-
40
-
41
- def migrate_offline_chat_default_model(args):
42
- schema_version = "0.12.4"
43
- raw_config = load_config_from_file(args.config_file)
44
- previous_version = raw_config.get("version")
45
-
46
- if "processor" not in raw_config:
47
- return args
48
- if raw_config["processor"] is None:
49
- return args
50
- if "conversation" not in raw_config["processor"]:
51
- return args
52
- if "offline-chat" not in raw_config["processor"]["conversation"]:
53
- return args
54
- if "chat-model" not in raw_config["processor"]["conversation"]["offline-chat"]:
55
- return args
56
-
57
- if previous_version is None or version.parse(previous_version) < version.parse("0.12.4"):
58
- logger.info(
59
- f"Upgrading config schema to {schema_version} from {previous_version} to change default (offline) chat model to mistral GGUF"
60
- )
61
- raw_config["version"] = schema_version
62
-
63
- # Update offline chat model to mistral in GGUF format to use latest GPT4All
64
- offline_chat_model = raw_config["processor"]["conversation"]["offline-chat"]["chat-model"]
65
- if offline_chat_model.endswith(".bin"):
66
- raw_config["processor"]["conversation"]["offline-chat"]["chat-model"] = "mistral-7b-instruct-v0.1.Q4_0.gguf"
67
-
68
- save_config_to_file(raw_config, args.config_file)
69
- return args
@@ -1,71 +0,0 @@
1
- """
2
- Current format of khoj.yml
3
- ---
4
- app:
5
- ...
6
- content-type:
7
- ...
8
- processor:
9
- conversation:
10
- offline-chat:
11
- enable-offline-chat: false
12
- chat-model: mistral-7b-instruct-v0.1.Q4_0.gguf
13
- ...
14
- search-type:
15
- ...
16
-
17
- New format of khoj.yml
18
- ---
19
- app:
20
- ...
21
- content-type:
22
- ...
23
- processor:
24
- conversation:
25
- offline-chat:
26
- enable-offline-chat: false
27
- chat-model: NousResearch/Hermes-2-Pro-Mistral-7B-GGUF
28
- ...
29
- search-type:
30
- ...
31
- """
32
- import logging
33
-
34
- from packaging import version
35
-
36
- from khoj.utils.yaml import load_config_from_file, save_config_to_file
37
-
38
- logger = logging.getLogger(__name__)
39
-
40
-
41
- def migrate_offline_chat_default_model(args):
42
- schema_version = "1.7.0"
43
- raw_config = load_config_from_file(args.config_file)
44
- previous_version = raw_config.get("version")
45
-
46
- if "processor" not in raw_config:
47
- return args
48
- if raw_config["processor"] is None:
49
- return args
50
- if "conversation" not in raw_config["processor"]:
51
- return args
52
- if "offline-chat" not in raw_config["processor"]["conversation"]:
53
- return args
54
- if "chat-model" not in raw_config["processor"]["conversation"]["offline-chat"]:
55
- return args
56
-
57
- if previous_version is None or version.parse(previous_version) < version.parse(schema_version):
58
- logger.info(
59
- f"Upgrading config schema to {schema_version} from {previous_version} to change default (offline) chat model to mistral GGUF"
60
- )
61
- raw_config["version"] = schema_version
62
-
63
- # Update offline chat model to use Nous Research's Hermes-2-Pro GGUF in path format suitable for llama-cpp
64
- offline_chat_model = raw_config["processor"]["conversation"]["offline-chat"]["chat-model"]
65
- if offline_chat_model == "mistral-7b-instruct-v0.1.Q4_0.gguf":
66
- raw_config["processor"]["conversation"]["offline-chat"][
67
- "chat-model"
68
- ] = "NousResearch/Hermes-2-Pro-Mistral-7B-GGUF"
69
-
70
- save_config_to_file(raw_config, args.config_file)
71
- return args
@@ -1,83 +0,0 @@
1
- """
2
- Current format of khoj.yml
3
- ---
4
- app:
5
- ...
6
- content-type:
7
- ...
8
- processor:
9
- conversation:
10
- enable-offline-chat: false
11
- conversation-logfile: ~/.khoj/processor/conversation/conversation_logs.json
12
- openai:
13
- ...
14
- search-type:
15
- ...
16
-
17
- New format of khoj.yml
18
- ---
19
- app:
20
- ...
21
- content-type:
22
- ...
23
- processor:
24
- conversation:
25
- offline-chat:
26
- enable-offline-chat: false
27
- chat-model: llama-2-7b-chat.ggmlv3.q4_0.bin
28
- tokenizer: null
29
- max_prompt_size: null
30
- conversation-logfile: ~/.khoj/processor/conversation/conversation_logs.json
31
- openai:
32
- ...
33
- search-type:
34
- ...
35
- """
36
- import logging
37
-
38
- from packaging import version
39
-
40
- from khoj.utils.yaml import load_config_from_file, save_config_to_file
41
-
42
- logger = logging.getLogger(__name__)
43
-
44
-
45
- def migrate_offline_chat_schema(args):
46
- schema_version = "0.12.3"
47
- raw_config = load_config_from_file(args.config_file)
48
- previous_version = raw_config.get("version")
49
-
50
- if "processor" not in raw_config:
51
- return args
52
- if raw_config["processor"] is None:
53
- return args
54
- if "conversation" not in raw_config["processor"]:
55
- return args
56
-
57
- if previous_version is None or version.parse(previous_version) < version.parse("0.12.3"):
58
- logger.info(
59
- f"Upgrading config schema to {schema_version} from {previous_version} to make (offline) chat more configuration"
60
- )
61
- raw_config["version"] = schema_version
62
-
63
- # Create max-prompt-size field in conversation processor schema
64
- raw_config["processor"]["conversation"]["max-prompt-size"] = None
65
- raw_config["processor"]["conversation"]["tokenizer"] = None
66
-
67
- # Create offline chat schema based on existing enable_offline_chat field in khoj config schema
68
- offline_chat_model = (
69
- raw_config["processor"]["conversation"]
70
- .get("offline-chat", {})
71
- .get("chat-model", "llama-2-7b-chat.ggmlv3.q4_0.bin")
72
- )
73
- raw_config["processor"]["conversation"]["offline-chat"] = {
74
- "enable-offline-chat": raw_config["processor"]["conversation"].get("enable-offline-chat", False),
75
- "chat-model": offline_chat_model,
76
- }
77
-
78
- # Delete old enable-offline-chat field from conversation processor schema
79
- if "enable-offline-chat" in raw_config["processor"]["conversation"]:
80
- del raw_config["processor"]["conversation"]["enable-offline-chat"]
81
-
82
- save_config_to_file(raw_config, args.config_file)
83
- return args
@@ -1,29 +0,0 @@
1
- import logging
2
- import os
3
-
4
- from packaging import version
5
-
6
- from khoj.utils.yaml import load_config_from_file, save_config_to_file
7
-
8
- logger = logging.getLogger(__name__)
9
-
10
-
11
- def migrate_offline_model(args):
12
- schema_version = "0.10.1"
13
- raw_config = load_config_from_file(args.config_file)
14
- previous_version = raw_config.get("version")
15
-
16
- if previous_version is None or version.parse(previous_version) < version.parse("0.10.1"):
17
- logger.info(
18
- f"Migrating offline model used for version {previous_version} to latest version for {args.version_no}"
19
- )
20
- raw_config["version"] = schema_version
21
-
22
- # If the user has downloaded the offline model, remove it from the cache.
23
- offline_model_path = os.path.expanduser("~/.cache/gpt4all/llama-2-7b-chat.ggmlv3.q4_K_S.bin")
24
- if os.path.exists(offline_model_path):
25
- os.remove(offline_model_path)
26
-
27
- save_config_to_file(raw_config, args.config_file)
28
-
29
- return args
@@ -1,67 +0,0 @@
1
- """
2
- Current format of khoj.yml
3
- ---
4
- app:
5
- should-log-telemetry: true
6
- content-type:
7
- ...
8
- processor:
9
- conversation:
10
- chat-model: gpt-3.5-turbo
11
- conversation-logfile: ~/.khoj/processor/conversation/conversation_logs.json
12
- model: text-davinci-003
13
- openai-api-key: sk-secret-key
14
- search-type:
15
- ...
16
-
17
- New format of khoj.yml
18
- ---
19
- app:
20
- should-log-telemetry: true
21
- content-type:
22
- ...
23
- processor:
24
- conversation:
25
- openai:
26
- chat-model: gpt-3.5-turbo
27
- openai-api-key: sk-secret-key
28
- conversation-logfile: ~/.khoj/processor/conversation/conversation_logs.json
29
- enable-offline-chat: false
30
- search-type:
31
- ...
32
- """
33
- from khoj.utils.yaml import load_config_from_file, save_config_to_file
34
-
35
-
36
- def migrate_processor_conversation_schema(args):
37
- schema_version = "0.10.0"
38
- raw_config = load_config_from_file(args.config_file)
39
-
40
- if "processor" not in raw_config:
41
- return args
42
- if raw_config["processor"] is None:
43
- return args
44
- if "conversation" not in raw_config["processor"]:
45
- return args
46
-
47
- current_openai_api_key = raw_config["processor"]["conversation"].get("openai-api-key", None)
48
- current_chat_model = raw_config["processor"]["conversation"].get("chat-model", None)
49
- if current_openai_api_key is None and current_chat_model is None:
50
- return args
51
-
52
- raw_config["version"] = schema_version
53
-
54
- # Add enable_offline_chat to khoj config schema
55
- if "enable-offline-chat" not in raw_config["processor"]["conversation"]:
56
- raw_config["processor"]["conversation"]["enable-offline-chat"] = False
57
-
58
- # Update conversation processor schema
59
- conversation_logfile = raw_config["processor"]["conversation"].get("conversation-logfile", None)
60
- raw_config["processor"]["conversation"] = {
61
- "openai": {"chat-model": current_chat_model, "api-key": current_openai_api_key},
62
- "conversation-logfile": conversation_logfile,
63
- "enable-offline-chat": False,
64
- }
65
-
66
- save_config_to_file(raw_config, args.config_file)
67
- return args
@@ -1,132 +0,0 @@
1
- """
2
- The application config currently looks like this:
3
- app:
4
- should-log-telemetry: true
5
- content-type:
6
- ...
7
- processor:
8
- conversation:
9
- conversation-logfile: ~/.khoj/processor/conversation/conversation_logs.json
10
- max-prompt-size: null
11
- offline-chat:
12
- chat-model: mistral-7b-instruct-v0.1.Q4_0.gguf
13
- enable-offline-chat: false
14
- openai:
15
- api-key: sk-blah
16
- chat-model: gpt-3.5-turbo
17
- tokenizer: null
18
- search-type:
19
- asymmetric:
20
- cross-encoder: cross-encoder/ms-marco-MiniLM-L-6-v2
21
- encoder: sentence-transformers/multi-qa-MiniLM-L6-cos-v1
22
- encoder-type: null
23
- model-directory: /Users/si/.khoj/search/asymmetric
24
- image:
25
- encoder: sentence-transformers/clip-ViT-B-32
26
- encoder-type: null
27
- model-directory: /Users/si/.khoj/search/image
28
- symmetric:
29
- cross-encoder: cross-encoder/ms-marco-MiniLM-L-6-v2
30
- encoder: sentence-transformers/all-MiniLM-L6-v2
31
- encoder-type: null
32
- model-directory: ~/.khoj/search/symmetric
33
- version: 0.14.0
34
-
35
-
36
- The new version will looks like this:
37
- app:
38
- should-log-telemetry: true
39
- processor:
40
- conversation:
41
- offline-chat:
42
- enabled: false
43
- openai:
44
- api-key: sk-blah
45
- chat-model-options:
46
- - chat-model: gpt-3.5-turbo
47
- tokenizer: null
48
- type: openai
49
- - chat-model: mistral-7b-instruct-v0.1.Q4_0.gguf
50
- tokenizer: null
51
- type: offline
52
- search-type:
53
- asymmetric:
54
- cross-encoder: cross-encoder/ms-marco-MiniLM-L-6-v2
55
- encoder: sentence-transformers/multi-qa-MiniLM-L6-cos-v1
56
- version: 0.15.0
57
- """
58
-
59
- import logging
60
-
61
- from packaging import version
62
-
63
- from khoj.database.models import AiModelApi, ChatModel, SearchModelConfig
64
- from khoj.utils.yaml import load_config_from_file, save_config_to_file
65
-
66
- logger = logging.getLogger(__name__)
67
-
68
-
69
- def migrate_server_pg(args):
70
- schema_version = "0.15.0"
71
- raw_config = load_config_from_file(args.config_file)
72
- previous_version = raw_config.get("version")
73
-
74
- if previous_version is None or version.parse(previous_version) < version.parse(schema_version):
75
- logger.info(
76
- f"Migrating configuration used for version {previous_version} to latest version for server with postgres in {args.version_no}"
77
- )
78
- raw_config["version"] = schema_version
79
-
80
- if raw_config is None:
81
- return args
82
-
83
- if "search-type" in raw_config and raw_config["search-type"]:
84
- if "asymmetric" in raw_config["search-type"]:
85
- # Delete all existing search models
86
- SearchModelConfig.objects.filter(model_type=SearchModelConfig.ModelType.TEXT).delete()
87
- # Create new search model from existing Khoj YAML config
88
- asymmetric_search = raw_config["search-type"]["asymmetric"]
89
- SearchModelConfig.objects.create(
90
- name="default",
91
- model_type=SearchModelConfig.ModelType.TEXT,
92
- bi_encoder=asymmetric_search.get("encoder"),
93
- cross_encoder=asymmetric_search.get("cross-encoder"),
94
- )
95
-
96
- if "processor" in raw_config and raw_config["processor"] and "conversation" in raw_config["processor"]:
97
- processor_conversation = raw_config["processor"]["conversation"]
98
-
99
- if "offline-chat" in raw_config["processor"]["conversation"]:
100
- offline_chat = raw_config["processor"]["conversation"]["offline-chat"]
101
- ChatModel.objects.create(
102
- name=offline_chat.get("chat-model"),
103
- tokenizer=processor_conversation.get("tokenizer"),
104
- max_prompt_size=processor_conversation.get("max-prompt-size"),
105
- model_type=ChatModel.ModelType.OFFLINE,
106
- )
107
-
108
- if (
109
- "openai" in raw_config["processor"]["conversation"]
110
- and raw_config["processor"]["conversation"]["openai"]
111
- ):
112
- openai = raw_config["processor"]["conversation"]["openai"]
113
-
114
- if openai.get("api-key") is None:
115
- logger.error("OpenAI API Key is not set. Will not be migrating OpenAI config.")
116
- else:
117
- if openai.get("chat-model") is None:
118
- openai["chat-model"] = "gpt-3.5-turbo"
119
-
120
- openai_model_api = AiModelApi.objects.create(api_key=openai.get("api-key"), name="default")
121
-
122
- ChatModel.objects.create(
123
- name=openai.get("chat-model"),
124
- tokenizer=processor_conversation.get("tokenizer"),
125
- max_prompt_size=processor_conversation.get("max-prompt-size"),
126
- model_type=ChatModel.ModelType.OPENAI,
127
- ai_model_api=openai_model_api,
128
- )
129
-
130
- save_config_to_file(raw_config, args.config_file)
131
-
132
- return args
@@ -1,17 +0,0 @@
1
- from khoj.utils.yaml import load_config_from_file, save_config_to_file
2
-
3
-
4
- def migrate_config_to_version(args):
5
- schema_version = "0.9.0"
6
- raw_config = load_config_from_file(args.config_file)
7
-
8
- # Add version to khoj config schema
9
- if "version" not in raw_config:
10
- raw_config["version"] = schema_version
11
- save_config_to_file(raw_config, args.config_file)
12
-
13
- # regenerate khoj index on first start of this version
14
- # this should refresh index and apply index corruption fixes from #325
15
- args.regenerate = True
16
-
17
- return args
File without changes