langchain 1.0.0-alpha.5 → 1.0.0-alpha.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (351) hide show
  1. package/dist/agents/ReactAgent.cjs +5 -5
  2. package/dist/agents/ReactAgent.cjs.map +1 -1
  3. package/dist/agents/ReactAgent.d.cts +1 -3
  4. package/dist/agents/ReactAgent.d.cts.map +1 -1
  5. package/dist/agents/ReactAgent.d.ts +1 -3
  6. package/dist/agents/ReactAgent.d.ts.map +1 -1
  7. package/dist/agents/ReactAgent.js +6 -6
  8. package/dist/agents/ReactAgent.js.map +1 -1
  9. package/dist/agents/annotation.cjs.map +1 -1
  10. package/dist/agents/annotation.d.cts +4 -6
  11. package/dist/agents/annotation.d.cts.map +1 -1
  12. package/dist/agents/annotation.d.ts +4 -6
  13. package/dist/agents/annotation.d.ts.map +1 -1
  14. package/dist/agents/annotation.js.map +1 -1
  15. package/dist/agents/createAgent.cjs.map +1 -1
  16. package/dist/agents/createAgent.js.map +1 -1
  17. package/dist/agents/index.cjs +2 -2
  18. package/dist/agents/index.cjs.map +1 -1
  19. package/dist/agents/index.d.cts +47 -47
  20. package/dist/agents/index.d.cts.map +1 -1
  21. package/dist/agents/index.d.ts +47 -47
  22. package/dist/agents/index.d.ts.map +1 -1
  23. package/dist/agents/index.js +2 -2
  24. package/dist/agents/index.js.map +1 -1
  25. package/dist/agents/middlewareAgent/ReactAgent.cjs +18 -18
  26. package/dist/agents/middlewareAgent/ReactAgent.cjs.map +1 -1
  27. package/dist/agents/middlewareAgent/ReactAgent.d.cts +8 -9
  28. package/dist/agents/middlewareAgent/ReactAgent.d.cts.map +1 -1
  29. package/dist/agents/middlewareAgent/ReactAgent.d.ts +8 -9
  30. package/dist/agents/middlewareAgent/ReactAgent.d.ts.map +1 -1
  31. package/dist/agents/middlewareAgent/ReactAgent.js +18 -18
  32. package/dist/agents/middlewareAgent/ReactAgent.js.map +1 -1
  33. package/dist/agents/middlewareAgent/annotation.cjs +2 -2
  34. package/dist/agents/middlewareAgent/annotation.cjs.map +1 -1
  35. package/dist/agents/middlewareAgent/annotation.js +2 -2
  36. package/dist/agents/middlewareAgent/annotation.js.map +1 -1
  37. package/dist/agents/middlewareAgent/index.cjs.map +1 -1
  38. package/dist/agents/middlewareAgent/index.js.map +1 -1
  39. package/dist/agents/middlewareAgent/{middlewares → middleware}/hitl.cjs +8 -8
  40. package/dist/agents/middlewareAgent/middleware/hitl.cjs.map +1 -0
  41. package/dist/agents/middlewareAgent/{middlewares → middleware}/hitl.d.cts +3 -3
  42. package/dist/agents/middlewareAgent/middleware/hitl.d.cts.map +1 -0
  43. package/dist/agents/middlewareAgent/{middlewares → middleware}/hitl.d.ts +3 -3
  44. package/dist/agents/middlewareAgent/middleware/hitl.d.ts.map +1 -0
  45. package/dist/agents/middlewareAgent/{middlewares → middleware}/hitl.js +3 -3
  46. package/dist/agents/middlewareAgent/middleware/hitl.js.map +1 -0
  47. package/dist/agents/middlewareAgent/middleware/index.cjs +26 -0
  48. package/dist/agents/middlewareAgent/middleware/index.cjs.map +1 -0
  49. package/dist/agents/middlewareAgent/middleware/index.js +17 -0
  50. package/dist/agents/middlewareAgent/middleware/index.js.map +1 -0
  51. package/dist/agents/middlewareAgent/{middlewares → middleware}/promptCaching.cjs +51 -22
  52. package/dist/agents/middlewareAgent/middleware/promptCaching.cjs.map +1 -0
  53. package/dist/agents/middlewareAgent/{middlewares → middleware}/promptCaching.d.cts +9 -9
  54. package/dist/agents/middlewareAgent/middleware/promptCaching.d.cts.map +1 -0
  55. package/dist/agents/middlewareAgent/{middlewares → middleware}/promptCaching.d.ts +9 -9
  56. package/dist/agents/middlewareAgent/middleware/promptCaching.d.ts.map +1 -0
  57. package/dist/agents/middlewareAgent/{middlewares → middleware}/promptCaching.js +50 -21
  58. package/dist/agents/middlewareAgent/middleware/promptCaching.js.map +1 -0
  59. package/dist/agents/middlewareAgent/{middlewares → middleware}/summarization.cjs +11 -11
  60. package/dist/agents/middlewareAgent/middleware/summarization.cjs.map +1 -0
  61. package/dist/agents/middlewareAgent/{middlewares → middleware}/summarization.d.cts +3 -3
  62. package/dist/agents/middlewareAgent/middleware/summarization.d.cts.map +1 -0
  63. package/dist/agents/middlewareAgent/{middlewares → middleware}/summarization.d.ts +10 -10
  64. package/dist/agents/middlewareAgent/{middlewares/summarization.d.cts.map → middleware/summarization.d.ts.map} +1 -1
  65. package/dist/agents/middlewareAgent/{middlewares → middleware}/summarization.js +4 -4
  66. package/dist/agents/middlewareAgent/middleware/summarization.js.map +1 -0
  67. package/dist/agents/middlewareAgent/middleware.cjs.map +1 -1
  68. package/dist/agents/middlewareAgent/middleware.d.cts +1 -1
  69. package/dist/agents/middlewareAgent/middleware.d.cts.map +1 -1
  70. package/dist/agents/middlewareAgent/middleware.d.ts +1 -1
  71. package/dist/agents/middlewareAgent/middleware.d.ts.map +1 -1
  72. package/dist/agents/middlewareAgent/middleware.js.map +1 -1
  73. package/dist/agents/middlewareAgent/nodes/AfterModalNode.cjs.map +1 -1
  74. package/dist/agents/middlewareAgent/nodes/AfterModalNode.js.map +1 -1
  75. package/dist/agents/middlewareAgent/nodes/AgentNode.cjs +12 -19
  76. package/dist/agents/middlewareAgent/nodes/AgentNode.cjs.map +1 -1
  77. package/dist/agents/middlewareAgent/nodes/AgentNode.js +12 -19
  78. package/dist/agents/middlewareAgent/nodes/AgentNode.js.map +1 -1
  79. package/dist/agents/middlewareAgent/nodes/BeforeModalNode.cjs.map +1 -1
  80. package/dist/agents/middlewareAgent/nodes/BeforeModalNode.js.map +1 -1
  81. package/dist/agents/middlewareAgent/nodes/middleware.cjs.map +1 -1
  82. package/dist/agents/middlewareAgent/nodes/middleware.js.map +1 -1
  83. package/dist/agents/middlewareAgent/nodes/utils.cjs +7 -7
  84. package/dist/agents/middlewareAgent/nodes/utils.cjs.map +1 -1
  85. package/dist/agents/middlewareAgent/nodes/utils.js +3 -3
  86. package/dist/agents/middlewareAgent/nodes/utils.js.map +1 -1
  87. package/dist/agents/middlewareAgent/types.d.cts +10 -23
  88. package/dist/agents/middlewareAgent/types.d.cts.map +1 -1
  89. package/dist/agents/middlewareAgent/types.d.ts +10 -23
  90. package/dist/agents/middlewareAgent/types.d.ts.map +1 -1
  91. package/dist/agents/nodes/AgentNode.cjs +4 -4
  92. package/dist/agents/nodes/AgentNode.cjs.map +1 -1
  93. package/dist/agents/nodes/AgentNode.js +4 -4
  94. package/dist/agents/nodes/AgentNode.js.map +1 -1
  95. package/dist/agents/nodes/ToolNode.cjs +3 -3
  96. package/dist/agents/nodes/ToolNode.cjs.map +1 -1
  97. package/dist/agents/nodes/ToolNode.d.cts +1 -2
  98. package/dist/agents/nodes/ToolNode.d.cts.map +1 -1
  99. package/dist/agents/nodes/ToolNode.d.ts +1 -2
  100. package/dist/agents/nodes/ToolNode.d.ts.map +1 -1
  101. package/dist/agents/nodes/ToolNode.js +4 -4
  102. package/dist/agents/nodes/ToolNode.js.map +1 -1
  103. package/dist/agents/responses.cjs +1 -1
  104. package/dist/agents/responses.cjs.map +1 -1
  105. package/dist/agents/responses.d.cts.map +1 -1
  106. package/dist/agents/responses.d.ts.map +1 -1
  107. package/dist/agents/responses.js +1 -1
  108. package/dist/agents/responses.js.map +1 -1
  109. package/dist/agents/types.d.cts +1 -3
  110. package/dist/agents/types.d.cts.map +1 -1
  111. package/dist/agents/types.d.ts +1 -3
  112. package/dist/agents/types.d.ts.map +1 -1
  113. package/dist/agents/utils.cjs +6 -6
  114. package/dist/agents/utils.cjs.map +1 -1
  115. package/dist/agents/utils.js +7 -7
  116. package/dist/agents/utils.js.map +1 -1
  117. package/dist/agents/withAgentName.cjs.map +1 -1
  118. package/dist/agents/withAgentName.js.map +1 -1
  119. package/dist/chains/api/prompts.cjs.map +1 -1
  120. package/dist/chains/api/prompts.js.map +1 -1
  121. package/dist/chains/constitutional_ai/constitutional_chain.cjs.map +1 -1
  122. package/dist/chains/constitutional_ai/constitutional_chain.js.map +1 -1
  123. package/dist/chains/index.cjs +0 -3
  124. package/dist/chains/index.cjs.map +1 -1
  125. package/dist/chains/index.d.cts +1 -2
  126. package/dist/chains/index.d.ts +1 -2
  127. package/dist/chains/index.js +1 -3
  128. package/dist/chains/index.js.map +1 -1
  129. package/dist/chains/openai_functions/extraction.cjs.map +1 -1
  130. package/dist/chains/openai_functions/extraction.d.cts +1 -3
  131. package/dist/chains/openai_functions/extraction.d.cts.map +1 -1
  132. package/dist/chains/openai_functions/extraction.d.ts +1 -3
  133. package/dist/chains/openai_functions/extraction.d.ts.map +1 -1
  134. package/dist/chains/openai_functions/extraction.js.map +1 -1
  135. package/dist/chains/openai_functions/index.cjs +0 -5
  136. package/dist/chains/openai_functions/index.cjs.map +1 -1
  137. package/dist/chains/openai_functions/index.d.cts +1 -2
  138. package/dist/chains/openai_functions/index.d.ts +1 -2
  139. package/dist/chains/openai_functions/index.js +1 -4
  140. package/dist/chains/openai_functions/index.js.map +1 -1
  141. package/dist/chains/openai_functions/openapi.cjs +4 -4
  142. package/dist/chains/openai_functions/openapi.cjs.map +1 -1
  143. package/dist/chains/openai_functions/openapi.d.cts +1 -1
  144. package/dist/chains/openai_functions/openapi.js +4 -4
  145. package/dist/chains/openai_functions/openapi.js.map +1 -1
  146. package/dist/chains/openai_functions/tagging.cjs.map +1 -1
  147. package/dist/chains/openai_functions/tagging.d.cts +1 -3
  148. package/dist/chains/openai_functions/tagging.d.cts.map +1 -1
  149. package/dist/chains/openai_functions/tagging.d.ts +1 -3
  150. package/dist/chains/openai_functions/tagging.d.ts.map +1 -1
  151. package/dist/chains/openai_functions/tagging.js.map +1 -1
  152. package/dist/chains/query_constructor/index.cjs +4 -4
  153. package/dist/chains/query_constructor/index.cjs.map +1 -1
  154. package/dist/chains/query_constructor/index.d.cts +4 -2
  155. package/dist/chains/query_constructor/index.d.cts.map +1 -1
  156. package/dist/chains/query_constructor/index.d.ts +4 -2
  157. package/dist/chains/query_constructor/index.d.ts.map +1 -1
  158. package/dist/chains/query_constructor/index.js +1 -1
  159. package/dist/chains/query_constructor/index.js.map +1 -1
  160. package/dist/chains/question_answering/load.d.ts +2 -2
  161. package/dist/chains/question_answering/load.d.ts.map +1 -1
  162. package/dist/chains/question_answering/map_reduce_prompts.cjs.map +1 -1
  163. package/dist/chains/question_answering/map_reduce_prompts.js.map +1 -1
  164. package/dist/chains/question_answering/refine_prompts.cjs.map +1 -1
  165. package/dist/chains/question_answering/refine_prompts.js.map +1 -1
  166. package/dist/chains/question_answering/stuff_prompts.cjs.map +1 -1
  167. package/dist/chains/question_answering/stuff_prompts.js.map +1 -1
  168. package/dist/chains/router/multi_prompt.cjs +4 -4
  169. package/dist/chains/router/multi_prompt.cjs.map +1 -1
  170. package/dist/chains/router/multi_prompt.js +1 -1
  171. package/dist/chains/router/multi_prompt.js.map +1 -1
  172. package/dist/chains/router/multi_retrieval_qa.cjs +4 -4
  173. package/dist/chains/router/multi_retrieval_qa.cjs.map +1 -1
  174. package/dist/chains/router/multi_retrieval_qa.js +1 -1
  175. package/dist/chains/router/multi_retrieval_qa.js.map +1 -1
  176. package/dist/chains/sql_db/sql_db_prompt.cjs.map +1 -1
  177. package/dist/chains/sql_db/sql_db_prompt.d.cts.map +1 -1
  178. package/dist/chains/sql_db/sql_db_prompt.d.ts.map +1 -1
  179. package/dist/chains/sql_db/sql_db_prompt.js.map +1 -1
  180. package/dist/chains/summarization/load.d.ts +2 -2
  181. package/dist/chains/summarization/load.d.ts.map +1 -1
  182. package/dist/chains/summarization/stuff_prompts.cjs.map +1 -1
  183. package/dist/chains/summarization/stuff_prompts.js.map +1 -1
  184. package/dist/chat_models/universal.cjs +8 -5
  185. package/dist/chat_models/universal.cjs.map +1 -1
  186. package/dist/chat_models/universal.d.cts +2 -2
  187. package/dist/chat_models/universal.d.cts.map +1 -1
  188. package/dist/chat_models/universal.d.ts +2 -2
  189. package/dist/chat_models/universal.d.ts.map +1 -1
  190. package/dist/chat_models/universal.js +8 -5
  191. package/dist/chat_models/universal.js.map +1 -1
  192. package/dist/document_loaders/fs/directory.cjs.map +1 -1
  193. package/dist/document_loaders/fs/directory.d.cts +0 -1
  194. package/dist/document_loaders/fs/directory.d.cts.map +1 -1
  195. package/dist/document_loaders/fs/directory.d.ts +0 -1
  196. package/dist/document_loaders/fs/directory.d.ts.map +1 -1
  197. package/dist/document_loaders/fs/directory.js.map +1 -1
  198. package/dist/document_loaders/fs/json.cjs +7 -1
  199. package/dist/document_loaders/fs/json.cjs.map +1 -1
  200. package/dist/document_loaders/fs/json.js +7 -1
  201. package/dist/document_loaders/fs/json.js.map +1 -1
  202. package/dist/embeddings/cache_backed.cjs +1 -1
  203. package/dist/embeddings/cache_backed.cjs.map +1 -1
  204. package/dist/embeddings/cache_backed.d.cts +1 -1
  205. package/dist/embeddings/cache_backed.d.ts +1 -1
  206. package/dist/embeddings/cache_backed.js +2 -2
  207. package/dist/embeddings/cache_backed.js.map +1 -1
  208. package/dist/evaluation/agents/trajectory.d.cts.map +1 -1
  209. package/dist/evaluation/comparison/pairwise.d.ts.map +1 -1
  210. package/dist/evaluation/criteria/criteria.d.ts.map +1 -1
  211. package/dist/evaluation/embedding_distance/base.cjs +2 -4
  212. package/dist/evaluation/embedding_distance/base.cjs.map +1 -1
  213. package/dist/evaluation/embedding_distance/base.js +2 -3
  214. package/dist/evaluation/embedding_distance/base.js.map +1 -1
  215. package/dist/evaluation/loader.cjs +7 -12
  216. package/dist/evaluation/loader.cjs.map +1 -1
  217. package/dist/evaluation/loader.d.cts +8 -2
  218. package/dist/evaluation/loader.d.cts.map +1 -1
  219. package/dist/evaluation/loader.d.ts +8 -2
  220. package/dist/evaluation/loader.d.ts.map +1 -1
  221. package/dist/evaluation/loader.js +7 -12
  222. package/dist/evaluation/loader.js.map +1 -1
  223. package/dist/hub/base.cjs.map +1 -1
  224. package/dist/hub/base.js.map +1 -1
  225. package/dist/langchain-core/dist/load/serializable.d.cts.map +1 -1
  226. package/dist/langchain-core/dist/messages/base.d.cts +24 -33
  227. package/dist/langchain-core/dist/messages/base.d.cts.map +1 -1
  228. package/dist/langchain-core/dist/messages/content/index.d.cts +1 -1
  229. package/dist/langchain-core/dist/messages/content/index.d.cts.map +1 -1
  230. package/dist/langchain-core/dist/messages/message.d.cts +598 -0
  231. package/dist/langchain-core/dist/messages/message.d.cts.map +1 -0
  232. package/dist/langchain-core/dist/messages/metadata.d.cts +97 -0
  233. package/dist/langchain-core/dist/messages/metadata.d.cts.map +1 -0
  234. package/dist/langchain-core/dist/messages/utils.d.cts +75 -0
  235. package/dist/langchain-core/dist/messages/utils.d.cts.map +1 -0
  236. package/dist/langchain-core/dist/prompt_values.d.cts.map +1 -1
  237. package/dist/libs/langchain-core/dist/load/serializable.d.ts.map +1 -1
  238. package/dist/libs/langchain-core/dist/messages/base.d.ts +24 -33
  239. package/dist/libs/langchain-core/dist/messages/base.d.ts.map +1 -1
  240. package/dist/libs/langchain-core/dist/messages/content/index.d.ts +1 -1
  241. package/dist/libs/langchain-core/dist/messages/content/index.d.ts.map +1 -1
  242. package/dist/libs/langchain-core/dist/messages/message.d.ts +598 -0
  243. package/dist/libs/langchain-core/dist/messages/message.d.ts.map +1 -0
  244. package/dist/libs/langchain-core/dist/messages/metadata.d.ts +97 -0
  245. package/dist/libs/langchain-core/dist/messages/metadata.d.ts.map +1 -0
  246. package/dist/libs/langchain-core/dist/messages/utils.d.ts +75 -0
  247. package/dist/libs/langchain-core/dist/messages/utils.d.ts.map +1 -0
  248. package/dist/libs/langchain-core/dist/prompt_values.d.ts.map +1 -1
  249. package/dist/libs/langchain-core/dist/utils/types/index.d.ts +2 -0
  250. package/dist/libs/langchain-core/dist/utils/types/index.d.ts.map +1 -1
  251. package/dist/libs/langchain-core/dist/utils/types/zod.d.ts +1 -0
  252. package/dist/load/import_map.cjs +3 -14
  253. package/dist/load/import_map.cjs.map +1 -1
  254. package/dist/load/import_map.js +3 -14
  255. package/dist/load/import_map.js.map +1 -1
  256. package/dist/memory/prompt.cjs.map +1 -1
  257. package/dist/memory/prompt.d.cts.map +1 -1
  258. package/dist/memory/prompt.d.ts.map +1 -1
  259. package/dist/memory/prompt.js.map +1 -1
  260. package/dist/output_parsers/combining.cjs +1 -1
  261. package/dist/output_parsers/combining.cjs.map +1 -1
  262. package/dist/output_parsers/combining.js +1 -1
  263. package/dist/output_parsers/combining.js.map +1 -1
  264. package/dist/output_parsers/expression_type_handlers/array_literal_expression_handler.cjs.map +1 -1
  265. package/dist/output_parsers/expression_type_handlers/array_literal_expression_handler.js.map +1 -1
  266. package/dist/output_parsers/expression_type_handlers/base.cjs +1 -1
  267. package/dist/output_parsers/expression_type_handlers/base.cjs.map +1 -1
  268. package/dist/output_parsers/expression_type_handlers/base.js +1 -1
  269. package/dist/output_parsers/expression_type_handlers/base.js.map +1 -1
  270. package/dist/output_parsers/regex.cjs.map +1 -1
  271. package/dist/output_parsers/regex.js.map +1 -1
  272. package/dist/output_parsers/structured.cjs +4 -4
  273. package/dist/output_parsers/structured.cjs.map +1 -1
  274. package/dist/output_parsers/structured.d.cts +1 -1
  275. package/dist/output_parsers/structured.d.cts.map +1 -1
  276. package/dist/output_parsers/structured.d.ts +1 -1
  277. package/dist/output_parsers/structured.d.ts.map +1 -1
  278. package/dist/output_parsers/structured.js +2 -2
  279. package/dist/output_parsers/structured.js.map +1 -1
  280. package/dist/retrievers/ensemble.cjs.map +1 -1
  281. package/dist/retrievers/ensemble.js.map +1 -1
  282. package/dist/storage/file_system.cjs +1 -1
  283. package/dist/storage/file_system.cjs.map +1 -1
  284. package/dist/storage/file_system.js +1 -1
  285. package/dist/storage/file_system.js.map +1 -1
  286. package/dist/tools/fs.cjs +5 -5
  287. package/dist/tools/fs.cjs.map +1 -1
  288. package/dist/tools/fs.d.cts +1 -1
  289. package/dist/tools/fs.d.cts.map +1 -1
  290. package/dist/tools/fs.d.ts +1 -1
  291. package/dist/tools/fs.d.ts.map +1 -1
  292. package/dist/tools/fs.js +1 -1
  293. package/dist/tools/fs.js.map +1 -1
  294. package/dist/tools/retriever.cjs +2 -2
  295. package/dist/tools/retriever.cjs.map +1 -1
  296. package/dist/tools/retriever.d.cts +1 -1
  297. package/dist/tools/retriever.d.cts.map +1 -1
  298. package/dist/tools/retriever.d.ts +1 -1
  299. package/dist/tools/retriever.d.ts.map +1 -1
  300. package/dist/tools/retriever.js +1 -1
  301. package/dist/tools/retriever.js.map +1 -1
  302. package/dist/tools/sql.cjs +1 -2
  303. package/dist/tools/sql.cjs.map +1 -1
  304. package/dist/tools/sql.d.cts +1 -1
  305. package/dist/tools/sql.d.cts.map +1 -1
  306. package/dist/tools/sql.d.ts +1 -1
  307. package/dist/tools/sql.d.ts.map +1 -1
  308. package/dist/tools/sql.js +1 -2
  309. package/dist/tools/sql.js.map +1 -1
  310. package/dist/types/expression-parser.d.cts +2 -0
  311. package/dist/types/expression-parser.d.cts.map +1 -1
  312. package/dist/types/expression-parser.d.ts +2 -0
  313. package/dist/types/expression-parser.d.ts.map +1 -1
  314. package/dist/util/hub.cjs +1 -1
  315. package/dist/util/hub.js +1 -1
  316. package/dist/util/openapi.cjs +1 -1
  317. package/dist/util/openapi.cjs.map +1 -1
  318. package/dist/util/openapi.js +1 -1
  319. package/dist/util/openapi.js.map +1 -1
  320. package/package.json +15 -21
  321. package/dist/agents/middlewareAgent/middlewares/hitl.cjs.map +0 -1
  322. package/dist/agents/middlewareAgent/middlewares/hitl.d.cts.map +0 -1
  323. package/dist/agents/middlewareAgent/middlewares/hitl.d.ts.map +0 -1
  324. package/dist/agents/middlewareAgent/middlewares/hitl.js.map +0 -1
  325. package/dist/agents/middlewareAgent/middlewares/index.cjs +0 -8
  326. package/dist/agents/middlewareAgent/middlewares/index.js +0 -5
  327. package/dist/agents/middlewareAgent/middlewares/promptCaching.cjs.map +0 -1
  328. package/dist/agents/middlewareAgent/middlewares/promptCaching.d.cts.map +0 -1
  329. package/dist/agents/middlewareAgent/middlewares/promptCaching.d.ts.map +0 -1
  330. package/dist/agents/middlewareAgent/middlewares/promptCaching.js.map +0 -1
  331. package/dist/agents/middlewareAgent/middlewares/summarization.cjs.map +0 -1
  332. package/dist/agents/middlewareAgent/middlewares/summarization.d.ts.map +0 -1
  333. package/dist/agents/middlewareAgent/middlewares/summarization.js.map +0 -1
  334. package/dist/chains/openai_functions/structured_output.cjs +0 -107
  335. package/dist/chains/openai_functions/structured_output.cjs.map +0 -1
  336. package/dist/chains/openai_functions/structured_output.d.cts +0 -38
  337. package/dist/chains/openai_functions/structured_output.d.cts.map +0 -1
  338. package/dist/chains/openai_functions/structured_output.d.ts +0 -38
  339. package/dist/chains/openai_functions/structured_output.d.ts.map +0 -1
  340. package/dist/chains/openai_functions/structured_output.js +0 -105
  341. package/dist/chains/openai_functions/structured_output.js.map +0 -1
  342. package/dist/chains/openai_moderation.cjs +0 -107
  343. package/dist/chains/openai_moderation.cjs.map +0 -1
  344. package/dist/chains/openai_moderation.d.cts +0 -74
  345. package/dist/chains/openai_moderation.d.cts.map +0 -1
  346. package/dist/chains/openai_moderation.d.ts +0 -74
  347. package/dist/chains/openai_moderation.d.ts.map +0 -1
  348. package/dist/chains/openai_moderation.js +0 -106
  349. package/dist/chains/openai_moderation.js.map +0 -1
  350. /package/dist/agents/middlewareAgent/{middlewares → middleware}/index.d.cts +0 -0
  351. /package/dist/agents/middlewareAgent/{middlewares → middleware}/index.d.ts +0 -0
@@ -1 +1 @@
1
- {"version":3,"file":"universal.js","names":["className: string","config","e: unknown","model: string","modelProvider?: string","params: Record<string, any>","modelName: string","fields: ConfigurableModelFields","config?: RunnableConfig","messages: BaseMessage[]","options?: this[\"ParsedCallOptions\"]","runManager?: CallbackManagerForLLMRun","tools: BindToolsInput[]","params?: Record<string, any>","modelParams: Record<string, any>","str: string","prefix: string","mergedConfig: RunnableConfig","remainingConfig: RunnableConfig","input: RunInput","options?: CallOptions","inputs: RunInput[]","options?: Partial<CallOptions> | Partial<CallOptions>[]","batchOptions?: RunnableBatchOptions","generator: AsyncGenerator<RunInput>","options: CallOptions","options?: Partial<CallOptions>","streamOptions?: Omit<LogStreamCallbackHandlerInput, \"autoClose\">","options: Partial<CallOptions> & {\n version: \"v1\" | \"v2\";\n encoding?: \"text/event-stream\" | undefined;\n }","streamOptions?: Omit<EventStreamCallbackHandlerInput, \"autoClose\">","model?: string","fields?: Partial<Record<string, any>> & {\n modelProvider?: string;\n configurableFields?: string[] | \"any\";\n configPrefix?: string;\n }","paramsCopy: Record<string, any>"],"sources":["../../src/chat_models/universal.ts"],"sourcesContent":["/* eslint-disable import/no-extraneous-dependencies */\nimport {\n BaseLanguageModelInput,\n ToolDefinition,\n} from \"@langchain/core/language_models/base\";\nimport {\n BaseChatModel,\n BaseChatModelParams,\n BindToolsInput,\n type BaseChatModelCallOptions,\n} from \"@langchain/core/language_models/chat_models\";\nimport { BaseMessage, type AIMessageChunk } from \"@langchain/core/messages\";\nimport {\n type RunnableBatchOptions,\n RunnableBinding,\n type RunnableConfig,\n type RunnableToolLike,\n ensureConfig,\n} from \"@langchain/core/runnables\";\nimport {\n AsyncGeneratorWithSetup,\n IterableReadableStream,\n} from \"@langchain/core/utils/stream\";\nimport {\n type LogStreamCallbackHandlerInput,\n type RunLogPatch,\n type StreamEvent,\n} from \"@langchain/core/tracers/log_stream\";\nimport { type StructuredToolInterface } from \"@langchain/core/tools\";\nimport { CallbackManagerForLLMRun } from \"@langchain/core/callbacks/manager\";\nimport { ChatResult } from \"@langchain/core/outputs\";\n\n// TODO: remove once `EventStreamCallbackHandlerInput` is exposed in core\ninterface EventStreamCallbackHandlerInput\n extends Omit<LogStreamCallbackHandlerInput, \"_schemaFormat\"> {}\n\nexport interface ConfigurableChatModelCallOptions\n extends BaseChatModelCallOptions {\n tools?: (\n | StructuredToolInterface\n | Record<string, unknown>\n | ToolDefinition\n | RunnableToolLike\n )[];\n}\n\n// Configuration map for model providers\nexport const MODEL_PROVIDER_CONFIG = {\n openai: {\n package: \"@langchain/openai\",\n className: \"ChatOpenAI\",\n },\n anthropic: {\n package: \"@langchain/anthropic\",\n className: \"ChatAnthropic\",\n },\n azure_openai: {\n package: \"@langchain/openai\",\n className: \"AzureChatOpenAI\",\n },\n cohere: {\n package: \"@langchain/cohere\",\n className: \"ChatCohere\",\n },\n \"google-vertexai\": {\n package: \"@langchain/google-vertexai\",\n className: \"ChatVertexAI\",\n },\n \"google-vertexai-web\": {\n package: \"@langchain/google-vertexai-web\",\n className: \"ChatVertexAI\",\n },\n \"google-genai\": {\n package: \"@langchain/google-genai\",\n className: \"ChatGoogleGenerativeAI\",\n },\n ollama: {\n package: \"@langchain/ollama\",\n className: \"ChatOllama\",\n },\n mistralai: {\n package: \"@langchain/mistralai\",\n className: \"ChatMistralAI\",\n },\n groq: {\n package: \"@langchain/groq\",\n className: \"ChatGroq\",\n },\n cerebras: {\n package: \"@langchain/cerebras\",\n className: \"ChatCerebras\",\n },\n bedrock: {\n package: \"@langchain/aws\",\n className: \"ChatBedrockConverse\",\n },\n deepseek: {\n package: \"@langchain/deepseek\",\n className: \"ChatDeepSeek\",\n },\n xai: {\n package: \"@langchain/xai\",\n className: \"ChatXAI\",\n },\n fireworks: {\n package: \"@langchain/community/chat_models/fireworks\",\n className: \"ChatFireworks\",\n hasCircularDependency: true,\n },\n together: {\n package: \"@langchain/community/chat_models/togetherai\",\n className: \"ChatTogetherAI\",\n hasCircularDependency: true,\n },\n} as const;\n\nconst SUPPORTED_PROVIDERS = Object.keys(\n MODEL_PROVIDER_CONFIG\n) as (keyof typeof MODEL_PROVIDER_CONFIG)[];\nexport type ChatModelProvider = keyof typeof MODEL_PROVIDER_CONFIG;\ntype ModelProviderConfig = {\n package: string;\n className: string;\n hasCircularDependency?: boolean;\n};\n\n/**\n * Helper function to get a chat model class by its class name\n * @param className The class name (e.g., \"ChatOpenAI\", \"ChatAnthropic\")\n * @returns The imported model class or undefined if not found\n */\nexport async function getChatModelByClassName(className: string) {\n // Find the provider config that matches the class name\n const providerEntry = Object.entries(MODEL_PROVIDER_CONFIG).find(\n ([, config]) => config.className === className\n );\n\n if (!providerEntry) {\n return undefined;\n }\n\n const [, config] = providerEntry;\n try {\n const module = await import(config.package);\n return module[config.className];\n } catch (e: unknown) {\n const err = e as Error;\n if (\n \"code\" in err &&\n err.code?.toString().includes(\"ERR_MODULE_NOT_FOUND\")\n ) {\n const attemptedPackage = err.message\n .split(\"Error: Cannot find package '\")[1]\n .split(\"'\")[0];\n throw new Error(\n `Unable to import ${attemptedPackage}. Please install with ` +\n `\\`npm install ${attemptedPackage}\\` or \\`pnpm install ${attemptedPackage}\\``\n );\n }\n throw e;\n }\n}\n\nasync function _initChatModelHelper(\n model: string,\n modelProvider?: string,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n params: Record<string, any> = {}\n): Promise<BaseChatModel> {\n const modelProviderCopy = modelProvider || _inferModelProvider(model);\n if (!modelProviderCopy) {\n throw new Error(\n `Unable to infer model provider for { model: ${model} }, please specify modelProvider directly.`\n );\n }\n\n const config = MODEL_PROVIDER_CONFIG[\n modelProviderCopy as keyof typeof MODEL_PROVIDER_CONFIG\n ] as ModelProviderConfig;\n if (!config) {\n const supported = SUPPORTED_PROVIDERS.join(\", \");\n throw new Error(\n `Unsupported { modelProvider: ${modelProviderCopy} }.\\n\\nSupported model providers are: ${supported}`\n );\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n const { modelProvider: _unused, ...passedParams } = params;\n const ProviderClass = await getChatModelByClassName(config.className);\n return new ProviderClass({ model, ...passedParams });\n}\n\n/**\n * Attempts to infer the model provider based on the given model name.\n *\n * @param {string} modelName - The name of the model to infer the provider for.\n * @returns {string | undefined} The inferred model provider name, or undefined if unable to infer.\n *\n * @example\n * _inferModelProvider(\"gpt-4\"); // returns \"openai\"\n * _inferModelProvider(\"claude-2\"); // returns \"anthropic\"\n * _inferModelProvider(\"unknown-model\"); // returns undefined\n */\nexport function _inferModelProvider(modelName: string): string | undefined {\n if (\n modelName.startsWith(\"gpt-3\") ||\n modelName.startsWith(\"gpt-4\") ||\n modelName.startsWith(\"o1\") ||\n modelName.startsWith(\"o3\") ||\n modelName.startsWith(\"o4\")\n ) {\n return \"openai\";\n } else if (modelName.startsWith(\"claude\")) {\n return \"anthropic\";\n } else if (modelName.startsWith(\"command\")) {\n return \"cohere\";\n } else if (modelName.startsWith(\"accounts/fireworks\")) {\n return \"fireworks\";\n } else if (modelName.startsWith(\"gemini\")) {\n return \"google-vertexai\";\n } else if (modelName.startsWith(\"amazon.\")) {\n return \"bedrock\";\n } else {\n return undefined;\n }\n}\n\ninterface ConfigurableModelFields extends BaseChatModelParams {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n defaultConfig?: Record<string, any>;\n /**\n * @default \"any\"\n */\n configurableFields?: string[] | \"any\";\n /**\n * @default \"\"\n */\n configPrefix?: string;\n /**\n * Methods which should be called after the model is initialized.\n * The key will be the method name, and the value will be the arguments.\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n queuedMethodOperations?: Record<string, any>;\n}\n\n/**\n * Internal class used to create chat models.\n *\n * @internal\n */\nexport class ConfigurableModel<\n RunInput extends BaseLanguageModelInput = BaseLanguageModelInput,\n CallOptions extends ConfigurableChatModelCallOptions = ConfigurableChatModelCallOptions\n> extends BaseChatModel<CallOptions, AIMessageChunk> {\n _llmType(): string {\n return \"chat_model\";\n }\n\n lc_namespace = [\"langchain\", \"chat_models\"];\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _defaultConfig?: Record<string, any> = {};\n\n /**\n * @default \"any\"\n */\n _configurableFields: string[] | \"any\" = \"any\";\n\n /**\n * @default \"\"\n */\n _configPrefix: string;\n\n /**\n * Methods which should be called after the model is initialized.\n * The key will be the method name, and the value will be the arguments.\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _queuedMethodOperations: Record<string, any> = {};\n\n constructor(fields: ConfigurableModelFields) {\n super(fields);\n this._defaultConfig = fields.defaultConfig ?? {};\n\n if (fields.configurableFields === \"any\") {\n this._configurableFields = \"any\";\n } else {\n this._configurableFields = fields.configurableFields ?? [\n \"model\",\n \"modelProvider\",\n ];\n }\n\n if (fields.configPrefix) {\n this._configPrefix = fields.configPrefix.endsWith(\"_\")\n ? fields.configPrefix\n : `${fields.configPrefix}_`;\n } else {\n this._configPrefix = \"\";\n }\n\n this._queuedMethodOperations =\n fields.queuedMethodOperations ?? this._queuedMethodOperations;\n }\n\n async _model(config?: RunnableConfig) {\n const params = { ...this._defaultConfig, ...this._modelParams(config) };\n let initializedModel = await _initChatModelHelper(\n params.model,\n params.modelProvider,\n params\n );\n\n // Apply queued method operations\n const queuedMethodOperationsEntries = Object.entries(\n this._queuedMethodOperations\n );\n if (queuedMethodOperationsEntries.length > 0) {\n for (const [method, args] of queuedMethodOperationsEntries) {\n if (\n method in initializedModel &&\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n typeof (initializedModel as any)[method] === \"function\"\n ) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n initializedModel = await (initializedModel as any)[method](...args);\n }\n }\n }\n\n return initializedModel;\n }\n\n async _generate(\n messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n const model = await this._model(options);\n return model._generate(messages, options ?? {}, runManager);\n }\n\n override bindTools(\n tools: BindToolsInput[],\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n params?: Record<string, any>\n ): ConfigurableModel<RunInput, CallOptions> {\n this._queuedMethodOperations.bindTools = [tools, params];\n return new ConfigurableModel<RunInput, CallOptions>({\n defaultConfig: this._defaultConfig,\n configurableFields: this._configurableFields,\n configPrefix: this._configPrefix,\n queuedMethodOperations: this._queuedMethodOperations,\n });\n }\n\n // Extract the input types from the `BaseModel` class.\n withStructuredOutput: BaseChatModel[\"withStructuredOutput\"] = (\n schema,\n ...args\n ): ReturnType<BaseChatModel[\"withStructuredOutput\"]> => {\n this._queuedMethodOperations.withStructuredOutput = [schema, ...args];\n return new ConfigurableModel<RunInput, CallOptions>({\n defaultConfig: this._defaultConfig,\n configurableFields: this._configurableFields,\n configPrefix: this._configPrefix,\n queuedMethodOperations: this._queuedMethodOperations,\n }) as unknown as ReturnType<BaseChatModel[\"withStructuredOutput\"]>;\n };\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _modelParams(config?: RunnableConfig): Record<string, any> {\n const configurable = config?.configurable ?? {};\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let modelParams: Record<string, any> = {};\n\n for (const [key, value] of Object.entries(configurable)) {\n if (key.startsWith(this._configPrefix)) {\n const strippedKey = this._removePrefix(key, this._configPrefix);\n modelParams[strippedKey] = value;\n }\n }\n\n if (this._configurableFields !== \"any\") {\n modelParams = Object.fromEntries(\n Object.entries(modelParams).filter(([key]) =>\n this._configurableFields.includes(key)\n )\n );\n }\n\n return modelParams;\n }\n\n _removePrefix(str: string, prefix: string): string {\n return str.startsWith(prefix) ? str.slice(prefix.length) : str;\n }\n\n /**\n * Bind config to a Runnable, returning a new Runnable.\n * @param {RunnableConfig | undefined} [config] - The config to bind.\n * @returns {RunnableBinding<RunInput, RunOutput, CallOptions>} A new RunnableBinding with the bound config.\n */\n withConfig(\n config?: RunnableConfig\n ): RunnableBinding<RunInput, AIMessageChunk, CallOptions> {\n const mergedConfig: RunnableConfig = { ...(config || {}) };\n const modelParams = this._modelParams(mergedConfig);\n\n const remainingConfig: RunnableConfig = Object.fromEntries(\n Object.entries(mergedConfig).filter(([k]) => k !== \"configurable\")\n );\n\n remainingConfig.configurable = Object.fromEntries(\n Object.entries(mergedConfig.configurable || {}).filter(\n ([k]) =>\n this._configPrefix &&\n !Object.keys(modelParams).includes(\n this._removePrefix(k, this._configPrefix)\n )\n )\n );\n\n const newConfigurableModel = new ConfigurableModel<RunInput, CallOptions>({\n defaultConfig: { ...this._defaultConfig, ...modelParams },\n configurableFields: Array.isArray(this._configurableFields)\n ? [...this._configurableFields]\n : this._configurableFields,\n configPrefix: this._configPrefix,\n queuedMethodOperations: this._queuedMethodOperations,\n });\n\n return new RunnableBinding<RunInput, AIMessageChunk, CallOptions>({\n config: mergedConfig,\n bound: newConfigurableModel,\n });\n }\n\n async invoke(\n input: RunInput,\n options?: CallOptions\n ): Promise<AIMessageChunk> {\n const model = await this._model(options);\n const config = ensureConfig(options);\n return model.invoke(input, config);\n }\n\n async stream(\n input: RunInput,\n options?: CallOptions\n ): Promise<IterableReadableStream<AIMessageChunk>> {\n const model = await this._model(options);\n const wrappedGenerator = new AsyncGeneratorWithSetup({\n generator: await model.stream(input, options),\n config: options,\n });\n await wrappedGenerator.setup;\n return IterableReadableStream.fromAsyncGenerator(wrappedGenerator);\n }\n\n async batch(\n inputs: RunInput[],\n options?: Partial<CallOptions> | Partial<CallOptions>[],\n batchOptions?: RunnableBatchOptions & { returnExceptions?: false }\n ): Promise<AIMessageChunk[]>;\n\n async batch(\n inputs: RunInput[],\n options?: Partial<CallOptions> | Partial<CallOptions>[],\n batchOptions?: RunnableBatchOptions & { returnExceptions: true }\n ): Promise<(AIMessageChunk | Error)[]>;\n\n async batch(\n inputs: RunInput[],\n options?: Partial<CallOptions> | Partial<CallOptions>[],\n batchOptions?: RunnableBatchOptions\n ): Promise<(AIMessageChunk | Error)[]>;\n\n async batch(\n inputs: RunInput[],\n options?: Partial<CallOptions> | Partial<CallOptions>[],\n batchOptions?: RunnableBatchOptions\n ): Promise<(AIMessageChunk | Error)[]> {\n // We can super this since the base runnable implementation of\n // `.batch` will call `.invoke` on each input.\n return super.batch(inputs, options, batchOptions);\n }\n\n async *transform(\n generator: AsyncGenerator<RunInput>,\n options: CallOptions\n ): AsyncGenerator<AIMessageChunk> {\n const model = await this._model(options);\n const config = ensureConfig(options);\n\n yield* model.transform(generator, config);\n }\n\n async *streamLog(\n input: RunInput,\n options?: Partial<CallOptions>,\n streamOptions?: Omit<LogStreamCallbackHandlerInput, \"autoClose\">\n ): AsyncGenerator<RunLogPatch> {\n const model = await this._model(options);\n const config = ensureConfig(options);\n\n yield* model.streamLog(input, config, {\n ...streamOptions,\n _schemaFormat: \"original\",\n includeNames: streamOptions?.includeNames,\n includeTypes: streamOptions?.includeTypes,\n includeTags: streamOptions?.includeTags,\n excludeNames: streamOptions?.excludeNames,\n excludeTypes: streamOptions?.excludeTypes,\n excludeTags: streamOptions?.excludeTags,\n });\n }\n\n streamEvents(\n input: RunInput,\n options: Partial<CallOptions> & { version: \"v1\" | \"v2\" },\n streamOptions?: Omit<EventStreamCallbackHandlerInput, \"autoClose\">\n ): IterableReadableStream<StreamEvent>;\n\n streamEvents(\n input: RunInput,\n options: Partial<CallOptions> & {\n version: \"v1\" | \"v2\";\n encoding: \"text/event-stream\";\n },\n streamOptions?: Omit<EventStreamCallbackHandlerInput, \"autoClose\">\n ): IterableReadableStream<Uint8Array>;\n\n streamEvents(\n input: RunInput,\n options: Partial<CallOptions> & {\n version: \"v1\" | \"v2\";\n encoding?: \"text/event-stream\" | undefined;\n },\n streamOptions?: Omit<EventStreamCallbackHandlerInput, \"autoClose\">\n ): IterableReadableStream<StreamEvent | Uint8Array> {\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const outerThis = this;\n async function* wrappedGenerator() {\n const model = await outerThis._model(options);\n const config = ensureConfig(options);\n const eventStream = model.streamEvents(input, config, streamOptions);\n\n for await (const chunk of eventStream) {\n yield chunk;\n }\n }\n return IterableReadableStream.fromAsyncGenerator(wrappedGenerator());\n }\n}\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport interface InitChatModelFields extends Partial<Record<string, any>> {\n modelProvider?: string;\n configurableFields?: string[] | \"any\";\n configPrefix?: string;\n}\n\nexport type ConfigurableFields = \"any\" | string[];\n\nexport async function initChatModel<\n RunInput extends BaseLanguageModelInput = BaseLanguageModelInput,\n CallOptions extends ConfigurableChatModelCallOptions = ConfigurableChatModelCallOptions\n>(\n model: string,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n fields?: Partial<Record<string, any>> & {\n modelProvider?: string;\n configurableFields?: never;\n configPrefix?: string;\n }\n): Promise<ConfigurableModel<RunInput, CallOptions>>;\n\nexport async function initChatModel<\n RunInput extends BaseLanguageModelInput = BaseLanguageModelInput,\n CallOptions extends ConfigurableChatModelCallOptions = ConfigurableChatModelCallOptions\n>(\n model: never,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n options?: Partial<Record<string, any>> & {\n modelProvider?: string;\n configurableFields?: never;\n configPrefix?: string;\n }\n): Promise<ConfigurableModel<RunInput, CallOptions>>;\n\nexport async function initChatModel<\n RunInput extends BaseLanguageModelInput = BaseLanguageModelInput,\n CallOptions extends ConfigurableChatModelCallOptions = ConfigurableChatModelCallOptions\n>(\n model?: string,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n options?: Partial<Record<string, any>> & {\n modelProvider?: string;\n configurableFields?: ConfigurableFields;\n configPrefix?: string;\n }\n): Promise<ConfigurableModel<RunInput, CallOptions>>;\n\n// ################################# FOR CONTRIBUTORS #################################\n//\n// If adding support for a new provider, please append the provider\n// name to the supported list in the docstring below.\n//\n// ####################################################################################\n\n/**\n * Initialize a ChatModel from the model name and provider.\n * Must have the integration package corresponding to the model provider installed.\n *\n * @template {extends BaseLanguageModelInput = BaseLanguageModelInput} RunInput - The input type for the model.\n * @template {extends ConfigurableChatModelCallOptions = ConfigurableChatModelCallOptions} CallOptions - Call options for the model.\n *\n * @param {string | ChatModelProvider} [model] - The name of the model, e.g. \"gpt-4\", \"claude-3-opus-20240229\".\n * Can be prefixed with the model provider, e.g. \"openai:gpt-4\", \"anthropic:claude-3-opus-20240229\".\n * @param {Object} [fields] - Additional configuration options.\n * @param {string} [fields.modelProvider] - The model provider. Supported values include:\n * - openai (@langchain/openai)\n * - anthropic (@langchain/anthropic)\n * - azure_openai (@langchain/openai)\n * - google-vertexai (@langchain/google-vertexai)\n * - google-vertexai-web (@langchain/google-vertexai-web)\n * - google-genai (@langchain/google-genai)\n * - bedrock (@langchain/aws)\n * - cohere (@langchain/cohere)\n * - fireworks (@langchain/community/chat_models/fireworks)\n * - together (@langchain/community/chat_models/togetherai)\n * - mistralai (@langchain/mistralai)\n * - groq (@langchain/groq)\n * - ollama (@langchain/ollama)\n * - cerebras (@langchain/cerebras)\n * - deepseek (@langchain/deepseek)\n * - xai (@langchain/xai)\n * @param {string[] | \"any\"} [fields.configurableFields] - Which model parameters are configurable:\n * - undefined: No configurable fields.\n * - \"any\": All fields are configurable. (See Security Note in description)\n * - string[]: Specified fields are configurable.\n * @param {string} [fields.configPrefix] - Prefix for configurable fields at runtime.\n * @param {Record<string, any>} [fields.params] - Additional keyword args to pass to the ChatModel constructor.\n * @returns {Promise<ConfigurableModel<RunInput, CallOptions>>} A class which extends BaseChatModel.\n * @throws {Error} If modelProvider cannot be inferred or isn't supported.\n * @throws {Error} If the model provider integration package is not installed.\n *\n * @example Initialize non-configurable models\n * ```typescript\n * import { initChatModel } from \"langchain/chat_models/universal\";\n *\n * const gpt4 = await initChatModel(\"openai:gpt-4\", {\n * temperature: 0.25,\n * });\n * const gpt4Result = await gpt4.invoke(\"what's your name\");\n *\n * const claude = await initChatModel(\"anthropic:claude-3-opus-20240229\", {\n * temperature: 0.25,\n * });\n * const claudeResult = await claude.invoke(\"what's your name\");\n *\n * const gemini = await initChatModel(\"gemini-1.5-pro\", {\n * modelProvider: \"google-vertexai\",\n * temperature: 0.25,\n * });\n * const geminiResult = await gemini.invoke(\"what's your name\");\n * ```\n *\n * @example Create a partially configurable model with no default model\n * ```typescript\n * import { initChatModel } from \"langchain/chat_models/universal\";\n *\n * const configurableModel = await initChatModel(undefined, {\n * temperature: 0,\n * configurableFields: [\"model\", \"apiKey\"],\n * });\n *\n * const gpt4Result = await configurableModel.invoke(\"what's your name\", {\n * configurable: {\n * model: \"gpt-4\",\n * },\n * });\n *\n * const claudeResult = await configurableModel.invoke(\"what's your name\", {\n * configurable: {\n * model: \"claude-3-5-sonnet-20240620\",\n * },\n * });\n * ```\n *\n * @example Create a fully configurable model with a default model and a config prefix\n * ```typescript\n * import { initChatModel } from \"langchain/chat_models/universal\";\n *\n * const configurableModelWithDefault = await initChatModel(\"gpt-4\", {\n * modelProvider: \"openai\",\n * configurableFields: \"any\",\n * configPrefix: \"foo\",\n * temperature: 0,\n * });\n *\n * const openaiResult = await configurableModelWithDefault.invoke(\n * \"what's your name\",\n * {\n * configurable: {\n * foo_apiKey: process.env.OPENAI_API_KEY,\n * },\n * }\n * );\n *\n * const claudeResult = await configurableModelWithDefault.invoke(\n * \"what's your name\",\n * {\n * configurable: {\n * foo_model: \"claude-3-5-sonnet-20240620\",\n * foo_modelProvider: \"anthropic\",\n * foo_temperature: 0.6,\n * foo_apiKey: process.env.ANTHROPIC_API_KEY,\n * },\n * }\n * );\n * ```\n *\n * @example Bind tools to a configurable model:\n * ```typescript\n * import { initChatModel } from \"langchain/chat_models/universal\";\n * import { z } from \"zod\";\n * import { tool } from \"@langchain/core/tools\";\n *\n * const getWeatherTool = tool(\n * (input) => {\n * // Do something with the input\n * return JSON.stringify(input);\n * },\n * {\n * schema: z\n * .object({\n * location: z\n * .string()\n * .describe(\"The city and state, e.g. San Francisco, CA\"),\n * })\n * .describe(\"Get the current weather in a given location\"),\n * name: \"GetWeather\",\n * description: \"Get the current weather in a given location\",\n * }\n * );\n *\n * const getPopulationTool = tool(\n * (input) => {\n * // Do something with the input\n * return JSON.stringify(input);\n * },\n * {\n * schema: z\n * .object({\n * location: z\n * .string()\n * .describe(\"The city and state, e.g. San Francisco, CA\"),\n * })\n * .describe(\"Get the current population in a given location\"),\n * name: \"GetPopulation\",\n * description: \"Get the current population in a given location\",\n * }\n * );\n *\n * const configurableModel = await initChatModel(\"gpt-4\", {\n * configurableFields: [\"model\", \"modelProvider\", \"apiKey\"],\n * temperature: 0,\n * });\n *\n * const configurableModelWithTools = configurableModel.bindTools([\n * getWeatherTool,\n * getPopulationTool,\n * ]);\n *\n * const configurableToolResult = await configurableModelWithTools.invoke(\n * \"Which city is hotter today and which is bigger: LA or NY?\",\n * {\n * configurable: {\n * apiKey: process.env.OPENAI_API_KEY,\n * },\n * }\n * );\n *\n * const configurableToolResult2 = await configurableModelWithTools.invoke(\n * \"Which city is hotter today and which is bigger: LA or NY?\",\n * {\n * configurable: {\n * model: \"claude-3-5-sonnet-20240620\",\n * apiKey: process.env.ANTHROPIC_API_KEY,\n * },\n * }\n * );\n * ```\n *\n * @description\n * This function initializes a ChatModel based on the provided model name and provider.\n * It supports various model providers and allows for runtime configuration of model parameters.\n *\n * Security Note: Setting `configurableFields` to \"any\" means fields like apiKey, baseUrl, etc.\n * can be altered at runtime, potentially redirecting model requests to a different service/user.\n * Make sure that if you're accepting untrusted configurations, you enumerate the\n * `configurableFields` explicitly.\n *\n * The function will attempt to infer the model provider from the model name if not specified.\n * Certain model name prefixes are associated with specific providers:\n * - gpt-3... or gpt-4... -> openai\n * - claude... -> anthropic\n * - amazon.... -> bedrock\n * - gemini... -> google-vertexai\n * - command... -> cohere\n * - accounts/fireworks... -> fireworks\n *\n * @since 0.2.11\n * @version 0.2.11\n */\nexport async function initChatModel<\n RunInput extends BaseLanguageModelInput = BaseLanguageModelInput,\n CallOptions extends ConfigurableChatModelCallOptions = ConfigurableChatModelCallOptions\n>(\n model?: string,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n fields?: Partial<Record<string, any>> & {\n modelProvider?: string;\n configurableFields?: string[] | \"any\";\n configPrefix?: string;\n }\n): Promise<ConfigurableModel<RunInput, CallOptions>> {\n // eslint-disable-next-line prefer-const\n let { configurableFields, configPrefix, modelProvider, ...params } = {\n configPrefix: \"\",\n ...(fields ?? {}),\n };\n if (modelProvider === undefined && model?.includes(\":\")) {\n const modelComponents = model.split(\":\", 2);\n if (SUPPORTED_PROVIDERS.includes(modelComponents[0] as ChatModelProvider)) {\n // eslint-disable-next-line no-param-reassign\n [modelProvider, model] = modelComponents;\n }\n }\n let configurableFieldsCopy = Array.isArray(configurableFields)\n ? [...configurableFields]\n : configurableFields;\n\n if (!model && configurableFieldsCopy === undefined) {\n configurableFieldsCopy = [\"model\", \"modelProvider\"];\n }\n if (configPrefix && configurableFieldsCopy === undefined) {\n console.warn(\n `{ configPrefix: ${configPrefix} } has been set but no fields are configurable. Set ` +\n `{ configurableFields: [...] } to specify the model params that are ` +\n `configurable.`\n );\n }\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const paramsCopy: Record<string, any> = { ...params };\n\n if (configurableFieldsCopy === undefined) {\n return new ConfigurableModel<RunInput, CallOptions>({\n defaultConfig: {\n ...paramsCopy,\n model,\n modelProvider,\n },\n configPrefix,\n });\n } else {\n if (model) {\n paramsCopy.model = model;\n }\n if (modelProvider) {\n paramsCopy.modelProvider = modelProvider;\n }\n return new ConfigurableModel<RunInput, CallOptions>({\n defaultConfig: paramsCopy,\n configPrefix,\n configurableFields: configurableFieldsCopy,\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;AA+CA,MAAa,wBAAwB;CACnC,QAAQ;EACN,SAAS;EACT,WAAW;CACZ;CACD,WAAW;EACT,SAAS;EACT,WAAW;CACZ;CACD,cAAc;EACZ,SAAS;EACT,WAAW;CACZ;CACD,QAAQ;EACN,SAAS;EACT,WAAW;CACZ;CACD,mBAAmB;EACjB,SAAS;EACT,WAAW;CACZ;CACD,uBAAuB;EACrB,SAAS;EACT,WAAW;CACZ;CACD,gBAAgB;EACd,SAAS;EACT,WAAW;CACZ;CACD,QAAQ;EACN,SAAS;EACT,WAAW;CACZ;CACD,WAAW;EACT,SAAS;EACT,WAAW;CACZ;CACD,MAAM;EACJ,SAAS;EACT,WAAW;CACZ;CACD,UAAU;EACR,SAAS;EACT,WAAW;CACZ;CACD,SAAS;EACP,SAAS;EACT,WAAW;CACZ;CACD,UAAU;EACR,SAAS;EACT,WAAW;CACZ;CACD,KAAK;EACH,SAAS;EACT,WAAW;CACZ;CACD,WAAW;EACT,SAAS;EACT,WAAW;EACX,uBAAuB;CACxB;CACD,UAAU;EACR,SAAS;EACT,WAAW;EACX,uBAAuB;CACxB;AACF;AAED,MAAM,sBAAsB,OAAO,KACjC,sBACD;;;;;;AAaD,eAAsB,wBAAwBA,WAAmB;CAE/D,MAAM,gBAAgB,OAAO,QAAQ,sBAAsB,CAAC,KAC1D,CAAC,GAAGC,SAAO,KAAKA,SAAO,cAAc,UACtC;AAED,KAAI,CAAC,cACH,QAAO;CAGT,MAAM,GAAG,OAAO,GAAG;AACnB,KAAI;EACF,MAAM,SAAS,MAAM,OAAO,OAAO;AACnC,SAAO,OAAO,OAAO;CACtB,SAAQC,GAAY;EACnB,MAAM,MAAM;AACZ,MACE,UAAU,OACV,IAAI,MAAM,UAAU,CAAC,SAAS,uBAAuB,EACrD;GACA,MAAM,mBAAmB,IAAI,QAC1B,MAAM,+BAA+B,CAAC,GACtC,MAAM,IAAI,CAAC;AACd,SAAM,IAAI,MACR,CAAC,iBAAiB,EAAE,iBAAiB,oCAAsB,EACxC,iBAAiB,qBAAqB,EAAE,iBAAiB,EAAE,CAAC;EAElF;AACD,QAAM;CACP;AACF;AAED,eAAe,qBACbC,OACAC,eAEAC,SAA8B,CAAE,GACR;CACxB,MAAM,oBAAoB,iBAAiB,oBAAoB,MAAM;AACrE,KAAI,CAAC,kBACH,OAAM,IAAI,MACR,CAAC,4CAA4C,EAAE,MAAM,0CAA0C,CAAC;CAIpG,MAAM,SAAS,sBACb;AAEF,KAAI,CAAC,QAAQ;EACX,MAAM,YAAY,oBAAoB,KAAK,KAAK;AAChD,QAAM,IAAI,MACR,CAAC,6BAA6B,EAAE,kBAAkB,sCAAsC,EAAE,WAAW;CAExG;CAGD,MAAM,EAAE,eAAe,QAAS,GAAG,cAAc,GAAG;CACpD,MAAM,gBAAgB,MAAM,wBAAwB,OAAO,UAAU;AACrE,QAAO,IAAI,cAAc;EAAE;EAAO,GAAG;CAAc;AACpD;;;;;;;;;;;;AAaD,SAAgB,oBAAoBC,WAAuC;AACzE,KACE,UAAU,WAAW,QAAQ,IAC7B,UAAU,WAAW,QAAQ,IAC7B,UAAU,WAAW,KAAK,IAC1B,UAAU,WAAW,KAAK,IAC1B,UAAU,WAAW,KAAK,CAE1B,QAAO;UACE,UAAU,WAAW,SAAS,CACvC,QAAO;UACE,UAAU,WAAW,UAAU,CACxC,QAAO;UACE,UAAU,WAAW,qBAAqB,CACnD,QAAO;UACE,UAAU,WAAW,SAAS,CACvC,QAAO;UACE,UAAU,WAAW,UAAU,CACxC,QAAO;KAEP,QAAO;AAEV;;;;;;AA0BD,IAAa,oBAAb,MAAa,0BAGH,cAA2C;CACnD,WAAmB;AACjB,SAAO;CACR;CAED,eAAe,CAAC,aAAa,aAAc;CAG3C,iBAAuC,CAAE;;;;CAKzC,sBAAwC;;;;CAKxC;;;;;CAOA,0BAA+C,CAAE;CAEjD,YAAYC,QAAiC;EAC3C,MAAM,OAAO;EACb,KAAK,iBAAiB,OAAO,iBAAiB,CAAE;AAEhD,MAAI,OAAO,uBAAuB,OAChC,KAAK,sBAAsB;OAE3B,KAAK,sBAAsB,OAAO,sBAAsB,CACtD,SACA,eACD;AAGH,MAAI,OAAO,cACT,KAAK,gBAAgB,OAAO,aAAa,SAAS,IAAI,GAClD,OAAO,eACP,GAAG,OAAO,aAAa,CAAC,CAAC;OAE7B,KAAK,gBAAgB;EAGvB,KAAK,0BACH,OAAO,0BAA0B,KAAK;CACzC;CAED,MAAM,OAAOC,QAAyB;EACpC,MAAM,SAAS;GAAE,GAAG,KAAK;GAAgB,GAAG,KAAK,aAAa,OAAO;EAAE;EACvE,IAAI,mBAAmB,MAAM,qBAC3B,OAAO,OACP,OAAO,eACP,OACD;EAGD,MAAM,gCAAgC,OAAO,QAC3C,KAAK,wBACN;AACD,MAAI,8BAA8B,SAAS,GACzC;QAAK,MAAM,CAAC,QAAQ,KAAK,IAAI,8BAC3B,KACE,UAAU,oBAEV,OAAQ,iBAAyB,YAAY,YAG7C,mBAAmB,MAAO,iBAAyB,QAAQ,GAAG,KAAK;EAEtE;AAGH,SAAO;CACR;CAED,MAAM,UACJC,UACAC,SACAC,YACqB;EACrB,MAAM,QAAQ,MAAM,KAAK,OAAO,QAAQ;AACxC,SAAO,MAAM,UAAU,UAAU,WAAW,CAAE,GAAE,WAAW;CAC5D;CAED,AAAS,UACPC,OAEAC,QAC0C;EAC1C,KAAK,wBAAwB,YAAY,CAAC,OAAO,MAAO;AACxD,SAAO,IAAI,kBAAyC;GAClD,eAAe,KAAK;GACpB,oBAAoB,KAAK;GACzB,cAAc,KAAK;GACnB,wBAAwB,KAAK;EAC9B;CACF;CAGD,uBAA8D,CAC5D,QACA,GAAG,SACmD;EACtD,KAAK,wBAAwB,uBAAuB,CAAC,QAAQ,GAAG,IAAK;AACrE,SAAO,IAAI,kBAAyC;GAClD,eAAe,KAAK;GACpB,oBAAoB,KAAK;GACzB,cAAc,KAAK;GACnB,wBAAwB,KAAK;EAC9B;CACF;CAGD,aAAaL,QAA8C;EACzD,MAAM,eAAe,QAAQ,gBAAgB,CAAE;EAE/C,IAAIM,cAAmC,CAAE;AAEzC,OAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,aAAa,CACrD,KAAI,IAAI,WAAW,KAAK,cAAc,EAAE;GACtC,MAAM,cAAc,KAAK,cAAc,KAAK,KAAK,cAAc;GAC/D,YAAY,eAAe;EAC5B;AAGH,MAAI,KAAK,wBAAwB,OAC/B,cAAc,OAAO,YACnB,OAAO,QAAQ,YAAY,CAAC,OAAO,CAAC,CAAC,IAAI,KACvC,KAAK,oBAAoB,SAAS,IAAI,CACvC,CACF;AAGH,SAAO;CACR;CAED,cAAcC,KAAaC,QAAwB;AACjD,SAAO,IAAI,WAAW,OAAO,GAAG,IAAI,MAAM,OAAO,OAAO,GAAG;CAC5D;;;;;;CAOD,WACER,QACwD;EACxD,MAAMS,eAA+B,EAAE,GAAI,UAAU,CAAE,EAAG;EAC1D,MAAM,cAAc,KAAK,aAAa,aAAa;EAEnD,MAAMC,kBAAkC,OAAO,YAC7C,OAAO,QAAQ,aAAa,CAAC,OAAO,CAAC,CAAC,EAAE,KAAK,MAAM,eAAe,CACnE;EAED,gBAAgB,eAAe,OAAO,YACpC,OAAO,QAAQ,aAAa,gBAAgB,CAAE,EAAC,CAAC,OAC9C,CAAC,CAAC,EAAE,KACF,KAAK,iBACL,CAAC,OAAO,KAAK,YAAY,CAAC,SACxB,KAAK,cAAc,GAAG,KAAK,cAAc,CAC1C,CACJ,CACF;EAED,MAAM,uBAAuB,IAAI,kBAAyC;GACxE,eAAe;IAAE,GAAG,KAAK;IAAgB,GAAG;GAAa;GACzD,oBAAoB,MAAM,QAAQ,KAAK,oBAAoB,GACvD,CAAC,GAAG,KAAK,mBAAoB,IAC7B,KAAK;GACT,cAAc,KAAK;GACnB,wBAAwB,KAAK;EAC9B;AAED,SAAO,IAAI,gBAAuD;GAChE,QAAQ;GACR,OAAO;EACR;CACF;CAED,MAAM,OACJC,OACAC,SACyB;EACzB,MAAM,QAAQ,MAAM,KAAK,OAAO,QAAQ;EACxC,MAAM,SAAS,aAAa,QAAQ;AACpC,SAAO,MAAM,OAAO,OAAO,OAAO;CACnC;CAED,MAAM,OACJD,OACAC,SACiD;EACjD,MAAM,QAAQ,MAAM,KAAK,OAAO,QAAQ;EACxC,MAAM,mBAAmB,IAAI,wBAAwB;GACnD,WAAW,MAAM,MAAM,OAAO,OAAO,QAAQ;GAC7C,QAAQ;EACT;EACD,MAAM,iBAAiB;AACvB,SAAO,uBAAuB,mBAAmB,iBAAiB;CACnE;CAoBD,MAAM,MACJC,QACAC,SACAC,cACqC;AAGrC,SAAO,MAAM,MAAM,QAAQ,SAAS,aAAa;CAClD;CAED,OAAO,UACLC,WACAC,SACgC;EAChC,MAAM,QAAQ,MAAM,KAAK,OAAO,QAAQ;EACxC,MAAM,SAAS,aAAa,QAAQ;EAEpC,OAAO,MAAM,UAAU,WAAW,OAAO;CAC1C;CAED,OAAO,UACLN,OACAO,SACAC,eAC6B;EAC7B,MAAM,QAAQ,MAAM,KAAK,OAAO,QAAQ;EACxC,MAAM,SAAS,aAAa,QAAQ;EAEpC,OAAO,MAAM,UAAU,OAAO,QAAQ;GACpC,GAAG;GACH,eAAe;GACf,cAAc,eAAe;GAC7B,cAAc,eAAe;GAC7B,aAAa,eAAe;GAC5B,cAAc,eAAe;GAC7B,cAAc,eAAe;GAC7B,aAAa,eAAe;EAC7B,EAAC;CACH;CAiBD,aACER,OACAS,SAIAC,eACkD;EAElD,MAAM,YAAY;EAClB,gBAAgB,mBAAmB;GACjC,MAAM,QAAQ,MAAM,UAAU,OAAO,QAAQ;GAC7C,MAAM,SAAS,aAAa,QAAQ;GACpC,MAAM,cAAc,MAAM,aAAa,OAAO,QAAQ,cAAc;AAEpE,cAAW,MAAM,SAAS,aACxB,MAAM;EAET;AACD,SAAO,uBAAuB,mBAAmB,kBAAkB,CAAC;CACrE;AACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAuQD,eAAsB,cAIpBC,OAEAC,QAKmD;CAEnD,IAAI,EAAE,oBAAoB,cAAc,cAAe,GAAG,QAAQ,GAAG;EACnE,cAAc;EACd,GAAI,UAAU,CAAE;CACjB;AACD,KAAI,kBAAkB,UAAa,OAAO,SAAS,IAAI,EAAE;EACvD,MAAM,kBAAkB,MAAM,MAAM,KAAK,EAAE;AAC3C,MAAI,oBAAoB,SAAS,gBAAgB,GAAwB,EAEvE,CAAC,eAAe,MAAM,GAAG;CAE5B;CACD,IAAI,yBAAyB,MAAM,QAAQ,mBAAmB,GAC1D,CAAC,GAAG,kBAAmB,IACvB;AAEJ,KAAI,CAAC,SAAS,2BAA2B,QACvC,yBAAyB,CAAC,SAAS,eAAgB;AAErD,KAAI,gBAAgB,2BAA2B,QAC7C,QAAQ,KACN,CAAC,gBAAgB,EAAE,aAAa,oIAAoD,CAEnE,CAClB;CAIH,MAAMC,aAAkC,EAAE,GAAG,OAAQ;AAErD,KAAI,2BAA2B,OAC7B,QAAO,IAAI,kBAAyC;EAClD,eAAe;GACb,GAAG;GACH;GACA;EACD;EACD;CACD;MACI;AACL,MAAI,OACF,WAAW,QAAQ;AAErB,MAAI,eACF,WAAW,gBAAgB;AAE7B,SAAO,IAAI,kBAAyC;GAClD,eAAe;GACf;GACA,oBAAoB;EACrB;CACF;AACF"}
1
+ {"version":3,"file":"universal.js","names":["className: string","config","e: unknown","model: string","modelProvider?: string","params: Record<string, any>","modelName: string","fields: ConfigurableModelFields","config?: RunnableConfig","messages: BaseMessage[]","options?: this[\"ParsedCallOptions\"]","runManager?: CallbackManagerForLLMRun","tools: BindToolsInput[]","params?: Record<string, any>","modelParams: Record<string, any>","str: string","prefix: string","mergedConfig: RunnableConfig","remainingConfig: RunnableConfig","input: RunInput","options?: CallOptions","inputs: RunInput[]","options?: Partial<CallOptions> | Partial<CallOptions>[]","batchOptions?: RunnableBatchOptions","generator: AsyncGenerator<RunInput>","options: CallOptions","options?: Partial<CallOptions>","streamOptions?: Omit<LogStreamCallbackHandlerInput, \"autoClose\">","options: Partial<CallOptions> & {\n version: \"v1\" | \"v2\";\n encoding?: \"text/event-stream\" | undefined;\n }","streamOptions?: Omit<EventStreamCallbackHandlerInput, \"autoClose\">","model?: string","fields?: Partial<Record<string, any>> & {\n modelProvider?: string;\n configurableFields?: string[] | \"any\";\n configPrefix?: string;\n }","paramsCopy: Record<string, any>"],"sources":["../../src/chat_models/universal.ts"],"sourcesContent":["import {\n BaseLanguageModelInput,\n ToolDefinition,\n} from \"@langchain/core/language_models/base\";\nimport {\n BaseChatModel,\n BaseChatModelParams,\n BindToolsInput,\n type BaseChatModelCallOptions,\n} from \"@langchain/core/language_models/chat_models\";\nimport {\n BaseMessage,\n type AIMessageChunk,\n MessageStructure,\n} from \"@langchain/core/messages\";\nimport {\n type RunnableBatchOptions,\n RunnableBinding,\n type RunnableConfig,\n type RunnableToolLike,\n ensureConfig,\n} from \"@langchain/core/runnables\";\nimport {\n AsyncGeneratorWithSetup,\n IterableReadableStream,\n} from \"@langchain/core/utils/stream\";\nimport {\n type LogStreamCallbackHandlerInput,\n type RunLogPatch,\n type StreamEvent,\n} from \"@langchain/core/tracers/log_stream\";\nimport { type StructuredToolInterface } from \"@langchain/core/tools\";\nimport { CallbackManagerForLLMRun } from \"@langchain/core/callbacks/manager\";\nimport { ChatResult } from \"@langchain/core/outputs\";\n\n// TODO: remove once `EventStreamCallbackHandlerInput` is exposed in core\ninterface EventStreamCallbackHandlerInput\n extends Omit<LogStreamCallbackHandlerInput, \"_schemaFormat\"> {}\n\nexport interface ConfigurableChatModelCallOptions\n extends BaseChatModelCallOptions {\n tools?: (\n | StructuredToolInterface\n | Record<string, unknown>\n | ToolDefinition\n | RunnableToolLike\n )[];\n}\n\n// Configuration map for model providers\nexport const MODEL_PROVIDER_CONFIG = {\n openai: {\n package: \"@langchain/openai\",\n className: \"ChatOpenAI\",\n },\n anthropic: {\n package: \"@langchain/anthropic\",\n className: \"ChatAnthropic\",\n },\n azure_openai: {\n package: \"@langchain/openai\",\n className: \"AzureChatOpenAI\",\n },\n cohere: {\n package: \"@langchain/cohere\",\n className: \"ChatCohere\",\n },\n \"google-vertexai\": {\n package: \"@langchain/google-vertexai\",\n className: \"ChatVertexAI\",\n },\n \"google-vertexai-web\": {\n package: \"@langchain/google-vertexai-web\",\n className: \"ChatVertexAI\",\n },\n \"google-genai\": {\n package: \"@langchain/google-genai\",\n className: \"ChatGoogleGenerativeAI\",\n },\n ollama: {\n package: \"@langchain/ollama\",\n className: \"ChatOllama\",\n },\n mistralai: {\n package: \"@langchain/mistralai\",\n className: \"ChatMistralAI\",\n },\n groq: {\n package: \"@langchain/groq\",\n className: \"ChatGroq\",\n },\n cerebras: {\n package: \"@langchain/cerebras\",\n className: \"ChatCerebras\",\n },\n bedrock: {\n package: \"@langchain/aws\",\n className: \"ChatBedrockConverse\",\n },\n deepseek: {\n package: \"@langchain/deepseek\",\n className: \"ChatDeepSeek\",\n },\n xai: {\n package: \"@langchain/xai\",\n className: \"ChatXAI\",\n },\n fireworks: {\n package: \"@langchain/community/chat_models/fireworks\",\n className: \"ChatFireworks\",\n hasCircularDependency: true,\n },\n together: {\n package: \"@langchain/community/chat_models/togetherai\",\n className: \"ChatTogetherAI\",\n hasCircularDependency: true,\n },\n} as const;\n\nconst SUPPORTED_PROVIDERS = Object.keys(\n MODEL_PROVIDER_CONFIG\n) as (keyof typeof MODEL_PROVIDER_CONFIG)[];\nexport type ChatModelProvider = keyof typeof MODEL_PROVIDER_CONFIG;\ntype ModelProviderConfig = {\n package: string;\n className: string;\n hasCircularDependency?: boolean;\n};\n\n/**\n * Helper function to get a chat model class by its class name\n * @param className The class name (e.g., \"ChatOpenAI\", \"ChatAnthropic\")\n * @returns The imported model class or undefined if not found\n */\nexport async function getChatModelByClassName(className: string) {\n // Find the provider config that matches the class name\n const providerEntry = Object.entries(MODEL_PROVIDER_CONFIG).find(\n ([, config]) => config.className === className\n );\n\n if (!providerEntry) {\n return undefined;\n }\n\n const [, config] = providerEntry;\n try {\n const module = await import(config.package);\n return module[config.className];\n } catch (e: unknown) {\n const err = e as Error;\n if (\n \"code\" in err &&\n err.code?.toString().includes(\"ERR_MODULE_NOT_FOUND\")\n ) {\n const attemptedPackage = err.message\n .split(\"Error: Cannot find package '\")[1]\n .split(\"'\")[0];\n throw new Error(\n `Unable to import ${attemptedPackage}. Please install with ` +\n `\\`npm install ${attemptedPackage}\\` or \\`pnpm install ${attemptedPackage}\\``\n );\n }\n throw e;\n }\n}\n\nasync function _initChatModelHelper(\n model: string,\n modelProvider?: string,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n params: Record<string, any> = {}\n): Promise<BaseChatModel> {\n const modelProviderCopy = modelProvider || _inferModelProvider(model);\n if (!modelProviderCopy) {\n throw new Error(\n `Unable to infer model provider for { model: ${model} }, please specify modelProvider directly.`\n );\n }\n\n const config = MODEL_PROVIDER_CONFIG[\n modelProviderCopy as keyof typeof MODEL_PROVIDER_CONFIG\n ] as ModelProviderConfig;\n if (!config) {\n const supported = SUPPORTED_PROVIDERS.join(\", \");\n throw new Error(\n `Unsupported { modelProvider: ${modelProviderCopy} }.\\n\\nSupported model providers are: ${supported}`\n );\n }\n\n const { modelProvider: _unused, ...passedParams } = params;\n const ProviderClass = await getChatModelByClassName(config.className);\n return new ProviderClass({ model, ...passedParams });\n}\n\n/**\n * Attempts to infer the model provider based on the given model name.\n *\n * @param {string} modelName - The name of the model to infer the provider for.\n * @returns {string | undefined} The inferred model provider name, or undefined if unable to infer.\n *\n * @example\n * _inferModelProvider(\"gpt-4\"); // returns \"openai\"\n * _inferModelProvider(\"claude-2\"); // returns \"anthropic\"\n * _inferModelProvider(\"unknown-model\"); // returns undefined\n */\nexport function _inferModelProvider(modelName: string): string | undefined {\n if (\n modelName.startsWith(\"gpt-3\") ||\n modelName.startsWith(\"gpt-4\") ||\n modelName.startsWith(\"o1\") ||\n modelName.startsWith(\"o3\") ||\n modelName.startsWith(\"o4\")\n ) {\n return \"openai\";\n } else if (modelName.startsWith(\"claude\")) {\n return \"anthropic\";\n } else if (modelName.startsWith(\"command\")) {\n return \"cohere\";\n } else if (modelName.startsWith(\"accounts/fireworks\")) {\n return \"fireworks\";\n } else if (modelName.startsWith(\"gemini\")) {\n return \"google-vertexai\";\n } else if (modelName.startsWith(\"amazon.\")) {\n return \"bedrock\";\n } else if (modelName.startsWith(\"mistral\")) {\n return \"mistralai\";\n } else {\n return undefined;\n }\n}\n\ninterface ConfigurableModelFields extends BaseChatModelParams {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n defaultConfig?: Record<string, any>;\n /**\n * @default \"any\"\n */\n configurableFields?: string[] | \"any\";\n /**\n * @default \"\"\n */\n configPrefix?: string;\n /**\n * Methods which should be called after the model is initialized.\n * The key will be the method name, and the value will be the arguments.\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n queuedMethodOperations?: Record<string, any>;\n}\n\n/**\n * Internal class used to create chat models.\n *\n * @internal\n */\nexport class ConfigurableModel<\n RunInput extends BaseLanguageModelInput = BaseLanguageModelInput,\n CallOptions extends ConfigurableChatModelCallOptions = ConfigurableChatModelCallOptions\n> extends BaseChatModel<CallOptions, AIMessageChunk> {\n _llmType(): string {\n return \"chat_model\";\n }\n\n lc_namespace = [\"langchain\", \"chat_models\"];\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _defaultConfig?: Record<string, any> = {};\n\n /**\n * @default \"any\"\n */\n _configurableFields: string[] | \"any\" = \"any\";\n\n /**\n * @default \"\"\n */\n _configPrefix: string;\n\n /**\n * Methods which should be called after the model is initialized.\n * The key will be the method name, and the value will be the arguments.\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _queuedMethodOperations: Record<string, any> = {};\n\n constructor(fields: ConfigurableModelFields) {\n super(fields);\n this._defaultConfig = fields.defaultConfig ?? {};\n\n if (fields.configurableFields === \"any\") {\n this._configurableFields = \"any\";\n } else {\n this._configurableFields = fields.configurableFields ?? [\n \"model\",\n \"modelProvider\",\n ];\n }\n\n if (fields.configPrefix) {\n this._configPrefix = fields.configPrefix.endsWith(\"_\")\n ? fields.configPrefix\n : `${fields.configPrefix}_`;\n } else {\n this._configPrefix = \"\";\n }\n\n this._queuedMethodOperations =\n fields.queuedMethodOperations ?? this._queuedMethodOperations;\n }\n\n async _model(\n config?: RunnableConfig\n ): Promise<\n BaseChatModel<BaseChatModelCallOptions, AIMessageChunk<MessageStructure>>\n > {\n const params = { ...this._defaultConfig, ...this._modelParams(config) };\n let initializedModel = await _initChatModelHelper(\n params.model,\n params.modelProvider,\n params\n );\n\n // Apply queued method operations\n const queuedMethodOperationsEntries = Object.entries(\n this._queuedMethodOperations\n );\n if (queuedMethodOperationsEntries.length > 0) {\n for (const [method, args] of queuedMethodOperationsEntries) {\n if (\n method in initializedModel &&\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n typeof (initializedModel as any)[method] === \"function\"\n ) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n initializedModel = await (initializedModel as any)[method](...args);\n }\n }\n }\n\n return initializedModel;\n }\n\n async _generate(\n messages: BaseMessage[],\n options?: this[\"ParsedCallOptions\"],\n runManager?: CallbackManagerForLLMRun\n ): Promise<ChatResult> {\n const model = await this._model(options);\n return model._generate(messages, options ?? {}, runManager);\n }\n\n override bindTools(\n tools: BindToolsInput[],\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n params?: Record<string, any>\n ): ConfigurableModel<RunInput, CallOptions> {\n const newQueuedOperations = { ...this._queuedMethodOperations };\n newQueuedOperations.bindTools = [tools, params];\n return new ConfigurableModel<RunInput, CallOptions>({\n defaultConfig: this._defaultConfig,\n configurableFields: this._configurableFields,\n configPrefix: this._configPrefix,\n queuedMethodOperations: newQueuedOperations,\n });\n }\n\n // Extract the input types from the `BaseModel` class.\n withStructuredOutput: BaseChatModel[\"withStructuredOutput\"] = (\n schema,\n ...args\n ): ReturnType<BaseChatModel[\"withStructuredOutput\"]> => {\n const newQueuedOperations = { ...this._queuedMethodOperations };\n newQueuedOperations.withStructuredOutput = [schema, ...args];\n return new ConfigurableModel<RunInput, CallOptions>({\n defaultConfig: this._defaultConfig,\n configurableFields: this._configurableFields,\n configPrefix: this._configPrefix,\n queuedMethodOperations: newQueuedOperations,\n }) as unknown as ReturnType<BaseChatModel[\"withStructuredOutput\"]>;\n };\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n _modelParams(config?: RunnableConfig): Record<string, any> {\n const configurable = config?.configurable ?? {};\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let modelParams: Record<string, any> = {};\n\n for (const [key, value] of Object.entries(configurable)) {\n if (key.startsWith(this._configPrefix)) {\n const strippedKey = this._removePrefix(key, this._configPrefix);\n modelParams[strippedKey] = value;\n }\n }\n\n if (this._configurableFields !== \"any\") {\n modelParams = Object.fromEntries(\n Object.entries(modelParams).filter(([key]) =>\n this._configurableFields.includes(key)\n )\n );\n }\n\n return modelParams;\n }\n\n _removePrefix(str: string, prefix: string): string {\n return str.startsWith(prefix) ? str.slice(prefix.length) : str;\n }\n\n /**\n * Bind config to a Runnable, returning a new Runnable.\n * @param {RunnableConfig | undefined} [config] - The config to bind.\n * @returns {RunnableBinding<RunInput, RunOutput, CallOptions>} A new RunnableBinding with the bound config.\n */\n withConfig(\n config?: RunnableConfig\n ): RunnableBinding<RunInput, AIMessageChunk, CallOptions> {\n const mergedConfig: RunnableConfig = { ...(config || {}) };\n const modelParams = this._modelParams(mergedConfig);\n\n const remainingConfig: RunnableConfig = Object.fromEntries(\n Object.entries(mergedConfig).filter(([k]) => k !== \"configurable\")\n );\n\n remainingConfig.configurable = Object.fromEntries(\n Object.entries(mergedConfig.configurable || {}).filter(\n ([k]) =>\n this._configPrefix &&\n !Object.keys(modelParams).includes(\n this._removePrefix(k, this._configPrefix)\n )\n )\n );\n\n const newConfigurableModel = new ConfigurableModel<RunInput, CallOptions>({\n defaultConfig: { ...this._defaultConfig, ...modelParams },\n configurableFields: Array.isArray(this._configurableFields)\n ? [...this._configurableFields]\n : this._configurableFields,\n configPrefix: this._configPrefix,\n queuedMethodOperations: this._queuedMethodOperations,\n });\n\n return new RunnableBinding<RunInput, AIMessageChunk, CallOptions>({\n config: mergedConfig,\n bound: newConfigurableModel,\n });\n }\n\n async invoke(\n input: RunInput,\n options?: CallOptions\n ): Promise<AIMessageChunk> {\n const model = await this._model(options);\n const config = ensureConfig(options);\n return model.invoke(input, config);\n }\n\n async stream(\n input: RunInput,\n options?: CallOptions\n ): Promise<IterableReadableStream<AIMessageChunk>> {\n const model = await this._model(options);\n const wrappedGenerator = new AsyncGeneratorWithSetup({\n generator: await model.stream(input, options),\n config: options,\n });\n await wrappedGenerator.setup;\n return IterableReadableStream.fromAsyncGenerator(wrappedGenerator);\n }\n\n async batch(\n inputs: RunInput[],\n options?: Partial<CallOptions> | Partial<CallOptions>[],\n batchOptions?: RunnableBatchOptions & { returnExceptions?: false }\n ): Promise<AIMessageChunk[]>;\n\n async batch(\n inputs: RunInput[],\n options?: Partial<CallOptions> | Partial<CallOptions>[],\n batchOptions?: RunnableBatchOptions & { returnExceptions: true }\n ): Promise<(AIMessageChunk | Error)[]>;\n\n async batch(\n inputs: RunInput[],\n options?: Partial<CallOptions> | Partial<CallOptions>[],\n batchOptions?: RunnableBatchOptions\n ): Promise<(AIMessageChunk | Error)[]>;\n\n async batch(\n inputs: RunInput[],\n options?: Partial<CallOptions> | Partial<CallOptions>[],\n batchOptions?: RunnableBatchOptions\n ): Promise<(AIMessageChunk | Error)[]> {\n // We can super this since the base runnable implementation of\n // `.batch` will call `.invoke` on each input.\n return super.batch(inputs, options, batchOptions);\n }\n\n async *transform(\n generator: AsyncGenerator<RunInput>,\n options: CallOptions\n ): AsyncGenerator<AIMessageChunk> {\n const model = await this._model(options);\n const config = ensureConfig(options);\n\n yield* model.transform(generator, config);\n }\n\n async *streamLog(\n input: RunInput,\n options?: Partial<CallOptions>,\n streamOptions?: Omit<LogStreamCallbackHandlerInput, \"autoClose\">\n ): AsyncGenerator<RunLogPatch> {\n const model = await this._model(options);\n const config = ensureConfig(options);\n\n yield* model.streamLog(input, config, {\n ...streamOptions,\n _schemaFormat: \"original\",\n includeNames: streamOptions?.includeNames,\n includeTypes: streamOptions?.includeTypes,\n includeTags: streamOptions?.includeTags,\n excludeNames: streamOptions?.excludeNames,\n excludeTypes: streamOptions?.excludeTypes,\n excludeTags: streamOptions?.excludeTags,\n });\n }\n\n streamEvents(\n input: RunInput,\n options: Partial<CallOptions> & { version: \"v1\" | \"v2\" },\n streamOptions?: Omit<EventStreamCallbackHandlerInput, \"autoClose\">\n ): IterableReadableStream<StreamEvent>;\n\n streamEvents(\n input: RunInput,\n options: Partial<CallOptions> & {\n version: \"v1\" | \"v2\";\n encoding: \"text/event-stream\";\n },\n streamOptions?: Omit<EventStreamCallbackHandlerInput, \"autoClose\">\n ): IterableReadableStream<Uint8Array>;\n\n streamEvents(\n input: RunInput,\n options: Partial<CallOptions> & {\n version: \"v1\" | \"v2\";\n encoding?: \"text/event-stream\" | undefined;\n },\n streamOptions?: Omit<EventStreamCallbackHandlerInput, \"autoClose\">\n ): IterableReadableStream<StreamEvent | Uint8Array> {\n const outerThis = this;\n async function* wrappedGenerator() {\n const model = await outerThis._model(options);\n const config = ensureConfig(options);\n const eventStream = model.streamEvents(input, config, streamOptions);\n\n for await (const chunk of eventStream) {\n yield chunk;\n }\n }\n return IterableReadableStream.fromAsyncGenerator(wrappedGenerator());\n }\n}\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport interface InitChatModelFields extends Partial<Record<string, any>> {\n modelProvider?: string;\n configurableFields?: string[] | \"any\";\n configPrefix?: string;\n}\n\nexport type ConfigurableFields = \"any\" | string[];\n\nexport async function initChatModel<\n RunInput extends BaseLanguageModelInput = BaseLanguageModelInput,\n CallOptions extends ConfigurableChatModelCallOptions = ConfigurableChatModelCallOptions\n>(\n model: string,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n fields?: Partial<Record<string, any>> & {\n modelProvider?: string;\n configurableFields?: never;\n configPrefix?: string;\n }\n): Promise<ConfigurableModel<RunInput, CallOptions>>;\n\nexport async function initChatModel<\n RunInput extends BaseLanguageModelInput = BaseLanguageModelInput,\n CallOptions extends ConfigurableChatModelCallOptions = ConfigurableChatModelCallOptions\n>(\n model: never,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n options?: Partial<Record<string, any>> & {\n modelProvider?: string;\n configurableFields?: never;\n configPrefix?: string;\n }\n): Promise<ConfigurableModel<RunInput, CallOptions>>;\n\nexport async function initChatModel<\n RunInput extends BaseLanguageModelInput = BaseLanguageModelInput,\n CallOptions extends ConfigurableChatModelCallOptions = ConfigurableChatModelCallOptions\n>(\n model?: string,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n options?: Partial<Record<string, any>> & {\n modelProvider?: string;\n configurableFields?: ConfigurableFields;\n configPrefix?: string;\n }\n): Promise<ConfigurableModel<RunInput, CallOptions>>;\n\n// ################################# FOR CONTRIBUTORS #################################\n//\n// If adding support for a new provider, please append the provider\n// name to the supported list in the docstring below.\n//\n// ####################################################################################\n\n/**\n * Initialize a ChatModel from the model name and provider.\n * Must have the integration package corresponding to the model provider installed.\n *\n * @template {extends BaseLanguageModelInput = BaseLanguageModelInput} RunInput - The input type for the model.\n * @template {extends ConfigurableChatModelCallOptions = ConfigurableChatModelCallOptions} CallOptions - Call options for the model.\n *\n * @param {string | ChatModelProvider} [model] - The name of the model, e.g. \"gpt-4\", \"claude-3-opus-20240229\".\n * Can be prefixed with the model provider, e.g. \"openai:gpt-4\", \"anthropic:claude-3-opus-20240229\".\n * @param {Object} [fields] - Additional configuration options.\n * @param {string} [fields.modelProvider] - The model provider. Supported values include:\n * - openai (@langchain/openai)\n * - anthropic (@langchain/anthropic)\n * - azure_openai (@langchain/openai)\n * - google-vertexai (@langchain/google-vertexai)\n * - google-vertexai-web (@langchain/google-vertexai-web)\n * - google-genai (@langchain/google-genai)\n * - bedrock (@langchain/aws)\n * - cohere (@langchain/cohere)\n * - fireworks (@langchain/community/chat_models/fireworks)\n * - together (@langchain/community/chat_models/togetherai)\n * - mistralai (@langchain/mistralai)\n * - groq (@langchain/groq)\n * - ollama (@langchain/ollama)\n * - cerebras (@langchain/cerebras)\n * - deepseek (@langchain/deepseek)\n * - xai (@langchain/xai)\n * @param {string[] | \"any\"} [fields.configurableFields] - Which model parameters are configurable:\n * - undefined: No configurable fields.\n * - \"any\": All fields are configurable. (See Security Note in description)\n * - string[]: Specified fields are configurable.\n * @param {string} [fields.configPrefix] - Prefix for configurable fields at runtime.\n * @param {Record<string, any>} [fields.params] - Additional keyword args to pass to the ChatModel constructor.\n * @returns {Promise<ConfigurableModel<RunInput, CallOptions>>} A class which extends BaseChatModel.\n * @throws {Error} If modelProvider cannot be inferred or isn't supported.\n * @throws {Error} If the model provider integration package is not installed.\n *\n * @example Initialize non-configurable models\n * ```typescript\n * import { initChatModel } from \"langchain/chat_models/universal\";\n *\n * const gpt4 = await initChatModel(\"openai:gpt-4\", {\n * temperature: 0.25,\n * });\n * const gpt4Result = await gpt4.invoke(\"what's your name\");\n *\n * const claude = await initChatModel(\"anthropic:claude-3-opus-20240229\", {\n * temperature: 0.25,\n * });\n * const claudeResult = await claude.invoke(\"what's your name\");\n *\n * const gemini = await initChatModel(\"gemini-1.5-pro\", {\n * modelProvider: \"google-vertexai\",\n * temperature: 0.25,\n * });\n * const geminiResult = await gemini.invoke(\"what's your name\");\n * ```\n *\n * @example Create a partially configurable model with no default model\n * ```typescript\n * import { initChatModel } from \"langchain/chat_models/universal\";\n *\n * const configurableModel = await initChatModel(undefined, {\n * temperature: 0,\n * configurableFields: [\"model\", \"apiKey\"],\n * });\n *\n * const gpt4Result = await configurableModel.invoke(\"what's your name\", {\n * configurable: {\n * model: \"gpt-4\",\n * },\n * });\n *\n * const claudeResult = await configurableModel.invoke(\"what's your name\", {\n * configurable: {\n * model: \"claude-3-5-sonnet-20240620\",\n * },\n * });\n * ```\n *\n * @example Create a fully configurable model with a default model and a config prefix\n * ```typescript\n * import { initChatModel } from \"langchain/chat_models/universal\";\n *\n * const configurableModelWithDefault = await initChatModel(\"gpt-4\", {\n * modelProvider: \"openai\",\n * configurableFields: \"any\",\n * configPrefix: \"foo\",\n * temperature: 0,\n * });\n *\n * const openaiResult = await configurableModelWithDefault.invoke(\n * \"what's your name\",\n * {\n * configurable: {\n * foo_apiKey: process.env.OPENAI_API_KEY,\n * },\n * }\n * );\n *\n * const claudeResult = await configurableModelWithDefault.invoke(\n * \"what's your name\",\n * {\n * configurable: {\n * foo_model: \"claude-3-5-sonnet-20240620\",\n * foo_modelProvider: \"anthropic\",\n * foo_temperature: 0.6,\n * foo_apiKey: process.env.ANTHROPIC_API_KEY,\n * },\n * }\n * );\n * ```\n *\n * @example Bind tools to a configurable model:\n * ```typescript\n * import { initChatModel } from \"langchain/chat_models/universal\";\n * import { z } from \"zod/v3\";\n * import { tool } from \"@langchain/core/tools\";\n *\n * const getWeatherTool = tool(\n * (input) => {\n * // Do something with the input\n * return JSON.stringify(input);\n * },\n * {\n * schema: z\n * .object({\n * location: z\n * .string()\n * .describe(\"The city and state, e.g. San Francisco, CA\"),\n * })\n * .describe(\"Get the current weather in a given location\"),\n * name: \"GetWeather\",\n * description: \"Get the current weather in a given location\",\n * }\n * );\n *\n * const getPopulationTool = tool(\n * (input) => {\n * // Do something with the input\n * return JSON.stringify(input);\n * },\n * {\n * schema: z\n * .object({\n * location: z\n * .string()\n * .describe(\"The city and state, e.g. San Francisco, CA\"),\n * })\n * .describe(\"Get the current population in a given location\"),\n * name: \"GetPopulation\",\n * description: \"Get the current population in a given location\",\n * }\n * );\n *\n * const configurableModel = await initChatModel(\"gpt-4\", {\n * configurableFields: [\"model\", \"modelProvider\", \"apiKey\"],\n * temperature: 0,\n * });\n *\n * const configurableModelWithTools = configurableModel.bindTools([\n * getWeatherTool,\n * getPopulationTool,\n * ]);\n *\n * const configurableToolResult = await configurableModelWithTools.invoke(\n * \"Which city is hotter today and which is bigger: LA or NY?\",\n * {\n * configurable: {\n * apiKey: process.env.OPENAI_API_KEY,\n * },\n * }\n * );\n *\n * const configurableToolResult2 = await configurableModelWithTools.invoke(\n * \"Which city is hotter today and which is bigger: LA or NY?\",\n * {\n * configurable: {\n * model: \"claude-3-5-sonnet-20240620\",\n * apiKey: process.env.ANTHROPIC_API_KEY,\n * },\n * }\n * );\n * ```\n *\n * @description\n * This function initializes a ChatModel based on the provided model name and provider.\n * It supports various model providers and allows for runtime configuration of model parameters.\n *\n * Security Note: Setting `configurableFields` to \"any\" means fields like apiKey, baseUrl, etc.\n * can be altered at runtime, potentially redirecting model requests to a different service/user.\n * Make sure that if you're accepting untrusted configurations, you enumerate the\n * `configurableFields` explicitly.\n *\n * The function will attempt to infer the model provider from the model name if not specified.\n * Certain model name prefixes are associated with specific providers:\n * - gpt-3... or gpt-4... -> openai\n * - claude... -> anthropic\n * - amazon.... -> bedrock\n * - gemini... -> google-vertexai\n * - command... -> cohere\n * - accounts/fireworks... -> fireworks\n *\n * @since 0.2.11\n * @version 0.2.11\n */\nexport async function initChatModel<\n RunInput extends BaseLanguageModelInput = BaseLanguageModelInput,\n CallOptions extends ConfigurableChatModelCallOptions = ConfigurableChatModelCallOptions\n>(\n model?: string,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n fields?: Partial<Record<string, any>> & {\n modelProvider?: string;\n configurableFields?: string[] | \"any\";\n configPrefix?: string;\n }\n): Promise<ConfigurableModel<RunInput, CallOptions>> {\n // eslint-disable-next-line prefer-const\n let { configurableFields, configPrefix, modelProvider, ...params } = {\n configPrefix: \"\",\n ...(fields ?? {}),\n };\n if (modelProvider === undefined && model?.includes(\":\")) {\n const modelComponents = model.split(\":\", 2);\n if (SUPPORTED_PROVIDERS.includes(modelComponents[0] as ChatModelProvider)) {\n // eslint-disable-next-line no-param-reassign\n [modelProvider, model] = modelComponents;\n }\n }\n let configurableFieldsCopy = Array.isArray(configurableFields)\n ? [...configurableFields]\n : configurableFields;\n\n if (!model && configurableFieldsCopy === undefined) {\n configurableFieldsCopy = [\"model\", \"modelProvider\"];\n }\n if (configPrefix && configurableFieldsCopy === undefined) {\n console.warn(\n `{ configPrefix: ${configPrefix} } has been set but no fields are configurable. Set ` +\n `{ configurableFields: [...] } to specify the model params that are ` +\n `configurable.`\n );\n }\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const paramsCopy: Record<string, any> = { ...params };\n\n if (configurableFieldsCopy === undefined) {\n return new ConfigurableModel<RunInput, CallOptions>({\n defaultConfig: {\n ...paramsCopy,\n model,\n modelProvider,\n },\n configPrefix,\n });\n } else {\n if (model) {\n paramsCopy.model = model;\n }\n if (modelProvider) {\n paramsCopy.modelProvider = modelProvider;\n }\n return new ConfigurableModel<RunInput, CallOptions>({\n defaultConfig: paramsCopy,\n configPrefix,\n configurableFields: configurableFieldsCopy,\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;AAkDA,MAAa,wBAAwB;CACnC,QAAQ;EACN,SAAS;EACT,WAAW;CACZ;CACD,WAAW;EACT,SAAS;EACT,WAAW;CACZ;CACD,cAAc;EACZ,SAAS;EACT,WAAW;CACZ;CACD,QAAQ;EACN,SAAS;EACT,WAAW;CACZ;CACD,mBAAmB;EACjB,SAAS;EACT,WAAW;CACZ;CACD,uBAAuB;EACrB,SAAS;EACT,WAAW;CACZ;CACD,gBAAgB;EACd,SAAS;EACT,WAAW;CACZ;CACD,QAAQ;EACN,SAAS;EACT,WAAW;CACZ;CACD,WAAW;EACT,SAAS;EACT,WAAW;CACZ;CACD,MAAM;EACJ,SAAS;EACT,WAAW;CACZ;CACD,UAAU;EACR,SAAS;EACT,WAAW;CACZ;CACD,SAAS;EACP,SAAS;EACT,WAAW;CACZ;CACD,UAAU;EACR,SAAS;EACT,WAAW;CACZ;CACD,KAAK;EACH,SAAS;EACT,WAAW;CACZ;CACD,WAAW;EACT,SAAS;EACT,WAAW;EACX,uBAAuB;CACxB;CACD,UAAU;EACR,SAAS;EACT,WAAW;EACX,uBAAuB;CACxB;AACF;AAED,MAAM,sBAAsB,OAAO,KACjC,sBACD;;;;;;AAaD,eAAsB,wBAAwBA,WAAmB;CAE/D,MAAM,gBAAgB,OAAO,QAAQ,sBAAsB,CAAC,KAC1D,CAAC,GAAGC,SAAO,KAAKA,SAAO,cAAc,UACtC;AAED,KAAI,CAAC,cACH,QAAO;CAGT,MAAM,GAAG,OAAO,GAAG;AACnB,KAAI;EACF,MAAM,SAAS,MAAM,OAAO,OAAO;AACnC,SAAO,OAAO,OAAO;CACtB,SAAQC,GAAY;EACnB,MAAM,MAAM;AACZ,MACE,UAAU,OACV,IAAI,MAAM,UAAU,CAAC,SAAS,uBAAuB,EACrD;GACA,MAAM,mBAAmB,IAAI,QAC1B,MAAM,+BAA+B,CAAC,GACtC,MAAM,IAAI,CAAC;AACd,SAAM,IAAI,MACR,CAAC,iBAAiB,EAAE,iBAAiB,oCAAsB,EACxC,iBAAiB,qBAAqB,EAAE,iBAAiB,EAAE,CAAC;EAElF;AACD,QAAM;CACP;AACF;AAED,eAAe,qBACbC,OACAC,eAEAC,SAA8B,CAAE,GACR;CACxB,MAAM,oBAAoB,iBAAiB,oBAAoB,MAAM;AACrE,KAAI,CAAC,kBACH,OAAM,IAAI,MACR,CAAC,4CAA4C,EAAE,MAAM,0CAA0C,CAAC;CAIpG,MAAM,SAAS,sBACb;AAEF,KAAI,CAAC,QAAQ;EACX,MAAM,YAAY,oBAAoB,KAAK,KAAK;AAChD,QAAM,IAAI,MACR,CAAC,6BAA6B,EAAE,kBAAkB,sCAAsC,EAAE,WAAW;CAExG;CAED,MAAM,EAAE,eAAe,QAAS,GAAG,cAAc,GAAG;CACpD,MAAM,gBAAgB,MAAM,wBAAwB,OAAO,UAAU;AACrE,QAAO,IAAI,cAAc;EAAE;EAAO,GAAG;CAAc;AACpD;;;;;;;;;;;;AAaD,SAAgB,oBAAoBC,WAAuC;AACzE,KACE,UAAU,WAAW,QAAQ,IAC7B,UAAU,WAAW,QAAQ,IAC7B,UAAU,WAAW,KAAK,IAC1B,UAAU,WAAW,KAAK,IAC1B,UAAU,WAAW,KAAK,CAE1B,QAAO;UACE,UAAU,WAAW,SAAS,CACvC,QAAO;UACE,UAAU,WAAW,UAAU,CACxC,QAAO;UACE,UAAU,WAAW,qBAAqB,CACnD,QAAO;UACE,UAAU,WAAW,SAAS,CACvC,QAAO;UACE,UAAU,WAAW,UAAU,CACxC,QAAO;UACE,UAAU,WAAW,UAAU,CACxC,QAAO;KAEP,QAAO;AAEV;;;;;;AA0BD,IAAa,oBAAb,MAAa,0BAGH,cAA2C;CACnD,WAAmB;AACjB,SAAO;CACR;CAED,eAAe,CAAC,aAAa,aAAc;CAG3C,iBAAuC,CAAE;;;;CAKzC,sBAAwC;;;;CAKxC;;;;;CAOA,0BAA+C,CAAE;CAEjD,YAAYC,QAAiC;EAC3C,MAAM,OAAO;EACb,KAAK,iBAAiB,OAAO,iBAAiB,CAAE;AAEhD,MAAI,OAAO,uBAAuB,OAChC,KAAK,sBAAsB;OAE3B,KAAK,sBAAsB,OAAO,sBAAsB,CACtD,SACA,eACD;AAGH,MAAI,OAAO,cACT,KAAK,gBAAgB,OAAO,aAAa,SAAS,IAAI,GAClD,OAAO,eACP,GAAG,OAAO,aAAa,CAAC,CAAC;OAE7B,KAAK,gBAAgB;EAGvB,KAAK,0BACH,OAAO,0BAA0B,KAAK;CACzC;CAED,MAAM,OACJC,QAGA;EACA,MAAM,SAAS;GAAE,GAAG,KAAK;GAAgB,GAAG,KAAK,aAAa,OAAO;EAAE;EACvE,IAAI,mBAAmB,MAAM,qBAC3B,OAAO,OACP,OAAO,eACP,OACD;EAGD,MAAM,gCAAgC,OAAO,QAC3C,KAAK,wBACN;AACD,MAAI,8BAA8B,SAAS,GACzC;QAAK,MAAM,CAAC,QAAQ,KAAK,IAAI,8BAC3B,KACE,UAAU,oBAEV,OAAQ,iBAAyB,YAAY,YAG7C,mBAAmB,MAAO,iBAAyB,QAAQ,GAAG,KAAK;EAEtE;AAGH,SAAO;CACR;CAED,MAAM,UACJC,UACAC,SACAC,YACqB;EACrB,MAAM,QAAQ,MAAM,KAAK,OAAO,QAAQ;AACxC,SAAO,MAAM,UAAU,UAAU,WAAW,CAAE,GAAE,WAAW;CAC5D;CAED,AAAS,UACPC,OAEAC,QAC0C;EAC1C,MAAM,sBAAsB,EAAE,GAAG,KAAK,wBAAyB;EAC/D,oBAAoB,YAAY,CAAC,OAAO,MAAO;AAC/C,SAAO,IAAI,kBAAyC;GAClD,eAAe,KAAK;GACpB,oBAAoB,KAAK;GACzB,cAAc,KAAK;GACnB,wBAAwB;EACzB;CACF;CAGD,uBAA8D,CAC5D,QACA,GAAG,SACmD;EACtD,MAAM,sBAAsB,EAAE,GAAG,KAAK,wBAAyB;EAC/D,oBAAoB,uBAAuB,CAAC,QAAQ,GAAG,IAAK;AAC5D,SAAO,IAAI,kBAAyC;GAClD,eAAe,KAAK;GACpB,oBAAoB,KAAK;GACzB,cAAc,KAAK;GACnB,wBAAwB;EACzB;CACF;CAGD,aAAaL,QAA8C;EACzD,MAAM,eAAe,QAAQ,gBAAgB,CAAE;EAE/C,IAAIM,cAAmC,CAAE;AAEzC,OAAK,MAAM,CAAC,KAAK,MAAM,IAAI,OAAO,QAAQ,aAAa,CACrD,KAAI,IAAI,WAAW,KAAK,cAAc,EAAE;GACtC,MAAM,cAAc,KAAK,cAAc,KAAK,KAAK,cAAc;GAC/D,YAAY,eAAe;EAC5B;AAGH,MAAI,KAAK,wBAAwB,OAC/B,cAAc,OAAO,YACnB,OAAO,QAAQ,YAAY,CAAC,OAAO,CAAC,CAAC,IAAI,KACvC,KAAK,oBAAoB,SAAS,IAAI,CACvC,CACF;AAGH,SAAO;CACR;CAED,cAAcC,KAAaC,QAAwB;AACjD,SAAO,IAAI,WAAW,OAAO,GAAG,IAAI,MAAM,OAAO,OAAO,GAAG;CAC5D;;;;;;CAOD,WACER,QACwD;EACxD,MAAMS,eAA+B,EAAE,GAAI,UAAU,CAAE,EAAG;EAC1D,MAAM,cAAc,KAAK,aAAa,aAAa;EAEnD,MAAMC,kBAAkC,OAAO,YAC7C,OAAO,QAAQ,aAAa,CAAC,OAAO,CAAC,CAAC,EAAE,KAAK,MAAM,eAAe,CACnE;EAED,gBAAgB,eAAe,OAAO,YACpC,OAAO,QAAQ,aAAa,gBAAgB,CAAE,EAAC,CAAC,OAC9C,CAAC,CAAC,EAAE,KACF,KAAK,iBACL,CAAC,OAAO,KAAK,YAAY,CAAC,SACxB,KAAK,cAAc,GAAG,KAAK,cAAc,CAC1C,CACJ,CACF;EAED,MAAM,uBAAuB,IAAI,kBAAyC;GACxE,eAAe;IAAE,GAAG,KAAK;IAAgB,GAAG;GAAa;GACzD,oBAAoB,MAAM,QAAQ,KAAK,oBAAoB,GACvD,CAAC,GAAG,KAAK,mBAAoB,IAC7B,KAAK;GACT,cAAc,KAAK;GACnB,wBAAwB,KAAK;EAC9B;AAED,SAAO,IAAI,gBAAuD;GAChE,QAAQ;GACR,OAAO;EACR;CACF;CAED,MAAM,OACJC,OACAC,SACyB;EACzB,MAAM,QAAQ,MAAM,KAAK,OAAO,QAAQ;EACxC,MAAM,SAAS,aAAa,QAAQ;AACpC,SAAO,MAAM,OAAO,OAAO,OAAO;CACnC;CAED,MAAM,OACJD,OACAC,SACiD;EACjD,MAAM,QAAQ,MAAM,KAAK,OAAO,QAAQ;EACxC,MAAM,mBAAmB,IAAI,wBAAwB;GACnD,WAAW,MAAM,MAAM,OAAO,OAAO,QAAQ;GAC7C,QAAQ;EACT;EACD,MAAM,iBAAiB;AACvB,SAAO,uBAAuB,mBAAmB,iBAAiB;CACnE;CAoBD,MAAM,MACJC,QACAC,SACAC,cACqC;AAGrC,SAAO,MAAM,MAAM,QAAQ,SAAS,aAAa;CAClD;CAED,OAAO,UACLC,WACAC,SACgC;EAChC,MAAM,QAAQ,MAAM,KAAK,OAAO,QAAQ;EACxC,MAAM,SAAS,aAAa,QAAQ;EAEpC,OAAO,MAAM,UAAU,WAAW,OAAO;CAC1C;CAED,OAAO,UACLN,OACAO,SACAC,eAC6B;EAC7B,MAAM,QAAQ,MAAM,KAAK,OAAO,QAAQ;EACxC,MAAM,SAAS,aAAa,QAAQ;EAEpC,OAAO,MAAM,UAAU,OAAO,QAAQ;GACpC,GAAG;GACH,eAAe;GACf,cAAc,eAAe;GAC7B,cAAc,eAAe;GAC7B,aAAa,eAAe;GAC5B,cAAc,eAAe;GAC7B,cAAc,eAAe;GAC7B,aAAa,eAAe;EAC7B,EAAC;CACH;CAiBD,aACER,OACAS,SAIAC,eACkD;EAClD,MAAM,YAAY;EAClB,gBAAgB,mBAAmB;GACjC,MAAM,QAAQ,MAAM,UAAU,OAAO,QAAQ;GAC7C,MAAM,SAAS,aAAa,QAAQ;GACpC,MAAM,cAAc,MAAM,aAAa,OAAO,QAAQ,cAAc;AAEpE,cAAW,MAAM,SAAS,aACxB,MAAM;EAET;AACD,SAAO,uBAAuB,mBAAmB,kBAAkB,CAAC;CACrE;AACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAuQD,eAAsB,cAIpBC,OAEAC,QAKmD;CAEnD,IAAI,EAAE,oBAAoB,cAAc,cAAe,GAAG,QAAQ,GAAG;EACnE,cAAc;EACd,GAAI,UAAU,CAAE;CACjB;AACD,KAAI,kBAAkB,UAAa,OAAO,SAAS,IAAI,EAAE;EACvD,MAAM,kBAAkB,MAAM,MAAM,KAAK,EAAE;AAC3C,MAAI,oBAAoB,SAAS,gBAAgB,GAAwB,EAEvE,CAAC,eAAe,MAAM,GAAG;CAE5B;CACD,IAAI,yBAAyB,MAAM,QAAQ,mBAAmB,GAC1D,CAAC,GAAG,kBAAmB,IACvB;AAEJ,KAAI,CAAC,SAAS,2BAA2B,QACvC,yBAAyB,CAAC,SAAS,eAAgB;AAErD,KAAI,gBAAgB,2BAA2B,QAC7C,QAAQ,KACN,CAAC,gBAAgB,EAAE,aAAa,oIAAoD,CAEnE,CAClB;CAIH,MAAMC,aAAkC,EAAE,GAAG,OAAQ;AAErD,KAAI,2BAA2B,OAC7B,QAAO,IAAI,kBAAyC;EAClD,eAAe;GACb,GAAG;GACH;GACA;EACD;EACD;CACD;MACI;AACL,MAAI,OACF,WAAW,QAAQ;AAErB,MAAI,eACF,WAAW,gBAAgB;AAE7B,SAAO,IAAI,kBAAyC;GAClD,eAAe;GACf;GACA,oBAAoB;EACrB;CACF;AACF"}
@@ -1 +1 @@
1
- {"version":3,"file":"directory.cjs","names":["BaseDocumentLoader","directoryPath: string","loaders: LoadersMapping","recursive: boolean","unknown: UnknownHandling","documents: Document[]"],"sources":["../../../src/document_loaders/fs/directory.ts"],"sourcesContent":["import type { extname as ExtnameT, resolve as ResolveT } from \"node:path\";\nimport type { readdir as ReaddirT } from \"node:fs/promises\";\nimport { Document } from \"@langchain/core/documents\";\nimport { getEnv } from \"@langchain/core/utils/env\";\nimport { BaseDocumentLoader } from \"@langchain/core/document_loaders/base\";\n\n// TypeScript enums are not tree-shakeable, so doing this instead\n// See https://bargsten.org/jsts/enums/\nexport const UnknownHandling = {\n Ignore: \"ignore\",\n Warn: \"warn\",\n Error: \"error\",\n} as const;\n// eslint-disable-next-line @typescript-eslint/no-redeclare\n/**\n * An enumeration of possible handling strategies for unknown file types.\n */\nexport type UnknownHandling =\n (typeof UnknownHandling)[keyof typeof UnknownHandling];\n\n/**\n * A mapping of file extensions to loader functions. Each loader function\n * takes a file path as a parameter and returns a `BaseDocumentLoader`\n * instance.\n */\nexport interface LoadersMapping {\n [extension: string]: (filePath: string) => BaseDocumentLoader;\n}\n\n/**\n * A document loader that loads documents from a directory. It extends the\n * `BaseDocumentLoader` class and implements the `load()` method.\n * @example\n * ```typescript\n *\n * const directoryLoader = new DirectoryLoader(\n * \"src/document_loaders/example_data/\",\n * {\n * \".pdf\": (path: string) => new PDFLoader(path),\n * },\n * );\n *\n * const docs = await directoryLoader.load();\n * console.log({ docs });\n *\n * ```\n */\nexport class DirectoryLoader extends BaseDocumentLoader {\n constructor(\n public directoryPath: string,\n public loaders: LoadersMapping,\n public recursive: boolean = true,\n public unknown: UnknownHandling = UnknownHandling.Warn\n ) {\n super();\n\n if (Object.keys(loaders).length === 0) {\n throw new Error(\"Must provide at least one loader\");\n }\n for (const extension in loaders) {\n if (Object.hasOwn(loaders, extension)) {\n if (extension[0] !== \".\") {\n throw new Error(`Extension must start with a dot: ${extension}`);\n }\n }\n }\n }\n\n /**\n * Loads the documents from the directory. If a file is a directory and\n * `recursive` is `true`, it recursively loads documents from the\n * subdirectory. If a file is a file, it checks if there is a\n * corresponding loader function for the file extension in the `loaders`\n * mapping. If there is, it loads the documents. If there is no\n * corresponding loader function and `unknown` is set to `Warn`, it logs a\n * warning message. If `unknown` is set to `Error`, it throws an error.\n * @returns A promise that resolves to an array of loaded documents.\n */\n public async load(): Promise<Document[]> {\n const { readdir, extname, resolve } = await DirectoryLoader.imports();\n const files = await readdir(this.directoryPath, { withFileTypes: true });\n\n const documents: Document[] = [];\n\n for (const file of files) {\n const fullPath = resolve(this.directoryPath, file.name);\n if (file.isDirectory()) {\n if (this.recursive) {\n const loader = new DirectoryLoader(\n fullPath,\n this.loaders,\n this.recursive,\n this.unknown\n );\n documents.push(...(await loader.load()));\n }\n } else {\n // I'm aware some things won't be files,\n // but they will be caught by the \"unknown\" handling below.\n const loaderFactory = this.loaders[extname(file.name)];\n if (loaderFactory) {\n const loader = loaderFactory(fullPath);\n documents.push(...(await loader.load()));\n } else {\n switch (this.unknown) {\n case UnknownHandling.Ignore:\n break;\n case UnknownHandling.Warn:\n console.warn(`Unknown file type: ${file.name}`);\n break;\n case UnknownHandling.Error:\n throw new Error(`Unknown file type: ${file.name}`);\n default:\n throw new Error(`Unknown unknown handling: ${this.unknown}`);\n }\n }\n }\n }\n\n return documents;\n }\n\n /**\n * Imports the necessary functions from the `node:path` and\n * `node:fs/promises` modules. It is used to dynamically import the\n * functions when needed. If the import fails, it throws an error\n * indicating that the modules failed to load.\n * @returns A promise that resolves to an object containing the imported functions.\n */\n static async imports(): Promise<{\n readdir: typeof ReaddirT;\n extname: typeof ExtnameT;\n resolve: typeof ResolveT;\n }> {\n try {\n const { extname, resolve } = await import(\"node:path\");\n const { readdir } = await import(\"node:fs/promises\");\n return { readdir, extname, resolve };\n } catch (e) {\n console.error(e);\n throw new Error(\n `Failed to load fs/promises. DirectoryLoader available only on environment 'node'. It appears you are running environment '${getEnv()}'. See https://<link to docs> for alternatives.`\n );\n }\n }\n}\n"],"mappings":";;;;;;;;;;AAQA,MAAa,kBAAkB;CAC7B,QAAQ;CACR,MAAM;CACN,OAAO;AACR;;;;;;;;;;;;;;;;;;;AAmCD,IAAa,kBAAb,MAAa,wBAAwBA,0DAAmB;CACtD,YACSC,eACAC,SACAC,YAAqB,MACrBC,UAA2B,gBAAgB,MAClD;EACA,OAAO;EALA;EACA;EACA;EACA;AAIP,MAAI,OAAO,KAAK,QAAQ,CAAC,WAAW,EAClC,OAAM,IAAI,MAAM;AAElB,OAAK,MAAM,aAAa,QACtB,KAAI,OAAO,OAAO,SAAS,UAAU,EACnC;OAAI,UAAU,OAAO,IACnB,OAAM,IAAI,MAAM,CAAC,iCAAiC,EAAE,WAAW;EAChE;CAGN;;;;;;;;;;;CAYD,MAAa,OAA4B;EACvC,MAAM,EAAE,SAAS,SAAS,SAAS,GAAG,MAAM,gBAAgB,SAAS;EACrE,MAAM,QAAQ,MAAM,QAAQ,KAAK,eAAe,EAAE,eAAe,KAAM,EAAC;EAExE,MAAMC,YAAwB,CAAE;AAEhC,OAAK,MAAM,QAAQ,OAAO;GACxB,MAAM,WAAW,QAAQ,KAAK,eAAe,KAAK,KAAK;AACvD,OAAI,KAAK,aAAa,EACpB;QAAI,KAAK,WAAW;KAClB,MAAM,SAAS,IAAI,gBACjB,UACA,KAAK,SACL,KAAK,WACL,KAAK;KAEP,UAAU,KAAK,GAAI,MAAM,OAAO,MAAM,CAAE;IACzC;UACI;IAGL,MAAM,gBAAgB,KAAK,QAAQ,QAAQ,KAAK,KAAK;AACrD,QAAI,eAAe;KACjB,MAAM,SAAS,cAAc,SAAS;KACtC,UAAU,KAAK,GAAI,MAAM,OAAO,MAAM,CAAE;IACzC,MACC,SAAQ,KAAK,SAAb;KACE,KAAK,gBAAgB,OACnB;KACF,KAAK,gBAAgB;MACnB,QAAQ,KAAK,CAAC,mBAAmB,EAAE,KAAK,MAAM,CAAC;AAC/C;KACF,KAAK,gBAAgB,MACnB,OAAM,IAAI,MAAM,CAAC,mBAAmB,EAAE,KAAK,MAAM;KACnD,QACE,OAAM,IAAI,MAAM,CAAC,0BAA0B,EAAE,KAAK,SAAS;IAC9D;GAEJ;EACF;AAED,SAAO;CACR;;;;;;;;CASD,aAAa,UAIV;AACD,MAAI;GACF,MAAM,EAAE,SAAS,SAAS,GAAG,MAAM,OAAO;GAC1C,MAAM,EAAE,SAAS,GAAG,MAAM,OAAO;AACjC,UAAO;IAAE;IAAS;IAAS;GAAS;EACrC,SAAQ,GAAG;GACV,QAAQ,MAAM,EAAE;AAChB,SAAM,IAAI,MACR,CAAC,0HAA0H,0CAAU,CAAC,+CAA+C,CAAC;EAEzL;CACF;AACF"}
1
+ {"version":3,"file":"directory.cjs","names":["BaseDocumentLoader","directoryPath: string","loaders: LoadersMapping","recursive: boolean","unknown: UnknownHandling","documents: Document[]"],"sources":["../../../src/document_loaders/fs/directory.ts"],"sourcesContent":["import type { extname as ExtnameT, resolve as ResolveT } from \"node:path\";\nimport type { readdir as ReaddirT } from \"node:fs/promises\";\nimport { Document } from \"@langchain/core/documents\";\nimport { getEnv } from \"@langchain/core/utils/env\";\nimport { BaseDocumentLoader } from \"@langchain/core/document_loaders/base\";\n\n// TypeScript enums are not tree-shakeable, so doing this instead\n// See https://bargsten.org/jsts/enums/\nexport const UnknownHandling = {\n Ignore: \"ignore\",\n Warn: \"warn\",\n Error: \"error\",\n} as const;\n/**\n * An enumeration of possible handling strategies for unknown file types.\n */\nexport type UnknownHandling =\n (typeof UnknownHandling)[keyof typeof UnknownHandling];\n\n/**\n * A mapping of file extensions to loader functions. Each loader function\n * takes a file path as a parameter and returns a `BaseDocumentLoader`\n * instance.\n */\nexport interface LoadersMapping {\n [extension: string]: (filePath: string) => BaseDocumentLoader;\n}\n\n/**\n * A document loader that loads documents from a directory. It extends the\n * `BaseDocumentLoader` class and implements the `load()` method.\n * @example\n * ```typescript\n *\n * const directoryLoader = new DirectoryLoader(\n * \"src/document_loaders/example_data/\",\n * {\n * \".pdf\": (path: string) => new PDFLoader(path),\n * },\n * );\n *\n * const docs = await directoryLoader.load();\n * console.log({ docs });\n *\n * ```\n */\nexport class DirectoryLoader extends BaseDocumentLoader {\n constructor(\n public directoryPath: string,\n public loaders: LoadersMapping,\n public recursive: boolean = true,\n public unknown: UnknownHandling = UnknownHandling.Warn\n ) {\n super();\n\n if (Object.keys(loaders).length === 0) {\n throw new Error(\"Must provide at least one loader\");\n }\n for (const extension in loaders) {\n if (Object.hasOwn(loaders, extension)) {\n if (extension[0] !== \".\") {\n throw new Error(`Extension must start with a dot: ${extension}`);\n }\n }\n }\n }\n\n /**\n * Loads the documents from the directory. If a file is a directory and\n * `recursive` is `true`, it recursively loads documents from the\n * subdirectory. If a file is a file, it checks if there is a\n * corresponding loader function for the file extension in the `loaders`\n * mapping. If there is, it loads the documents. If there is no\n * corresponding loader function and `unknown` is set to `Warn`, it logs a\n * warning message. If `unknown` is set to `Error`, it throws an error.\n * @returns A promise that resolves to an array of loaded documents.\n */\n public async load(): Promise<Document[]> {\n const { readdir, extname, resolve } = await DirectoryLoader.imports();\n const files = await readdir(this.directoryPath, { withFileTypes: true });\n\n const documents: Document[] = [];\n\n for (const file of files) {\n const fullPath = resolve(this.directoryPath, file.name);\n if (file.isDirectory()) {\n if (this.recursive) {\n const loader = new DirectoryLoader(\n fullPath,\n this.loaders,\n this.recursive,\n this.unknown\n );\n documents.push(...(await loader.load()));\n }\n } else {\n // I'm aware some things won't be files,\n // but they will be caught by the \"unknown\" handling below.\n const loaderFactory = this.loaders[extname(file.name)];\n if (loaderFactory) {\n const loader = loaderFactory(fullPath);\n documents.push(...(await loader.load()));\n } else {\n switch (this.unknown) {\n case UnknownHandling.Ignore:\n break;\n case UnknownHandling.Warn:\n console.warn(`Unknown file type: ${file.name}`);\n break;\n case UnknownHandling.Error:\n throw new Error(`Unknown file type: ${file.name}`);\n default:\n throw new Error(`Unknown unknown handling: ${this.unknown}`);\n }\n }\n }\n }\n\n return documents;\n }\n\n /**\n * Imports the necessary functions from the `node:path` and\n * `node:fs/promises` modules. It is used to dynamically import the\n * functions when needed. If the import fails, it throws an error\n * indicating that the modules failed to load.\n * @returns A promise that resolves to an object containing the imported functions.\n */\n static async imports(): Promise<{\n readdir: typeof ReaddirT;\n extname: typeof ExtnameT;\n resolve: typeof ResolveT;\n }> {\n try {\n const { extname, resolve } = await import(\"node:path\");\n const { readdir } = await import(\"node:fs/promises\");\n return { readdir, extname, resolve };\n } catch (e) {\n console.error(e);\n throw new Error(\n `Failed to load fs/promises. DirectoryLoader available only on environment 'node'. It appears you are running environment '${getEnv()}'. See https://<link to docs> for alternatives.`\n );\n }\n }\n}\n"],"mappings":";;;;;;;;;;AAQA,MAAa,kBAAkB;CAC7B,QAAQ;CACR,MAAM;CACN,OAAO;AACR;;;;;;;;;;;;;;;;;;;AAkCD,IAAa,kBAAb,MAAa,wBAAwBA,0DAAmB;CACtD,YACSC,eACAC,SACAC,YAAqB,MACrBC,UAA2B,gBAAgB,MAClD;EACA,OAAO;EALA;EACA;EACA;EACA;AAIP,MAAI,OAAO,KAAK,QAAQ,CAAC,WAAW,EAClC,OAAM,IAAI,MAAM;AAElB,OAAK,MAAM,aAAa,QACtB,KAAI,OAAO,OAAO,SAAS,UAAU,EACnC;OAAI,UAAU,OAAO,IACnB,OAAM,IAAI,MAAM,CAAC,iCAAiC,EAAE,WAAW;EAChE;CAGN;;;;;;;;;;;CAYD,MAAa,OAA4B;EACvC,MAAM,EAAE,SAAS,SAAS,SAAS,GAAG,MAAM,gBAAgB,SAAS;EACrE,MAAM,QAAQ,MAAM,QAAQ,KAAK,eAAe,EAAE,eAAe,KAAM,EAAC;EAExE,MAAMC,YAAwB,CAAE;AAEhC,OAAK,MAAM,QAAQ,OAAO;GACxB,MAAM,WAAW,QAAQ,KAAK,eAAe,KAAK,KAAK;AACvD,OAAI,KAAK,aAAa,EACpB;QAAI,KAAK,WAAW;KAClB,MAAM,SAAS,IAAI,gBACjB,UACA,KAAK,SACL,KAAK,WACL,KAAK;KAEP,UAAU,KAAK,GAAI,MAAM,OAAO,MAAM,CAAE;IACzC;UACI;IAGL,MAAM,gBAAgB,KAAK,QAAQ,QAAQ,KAAK,KAAK;AACrD,QAAI,eAAe;KACjB,MAAM,SAAS,cAAc,SAAS;KACtC,UAAU,KAAK,GAAI,MAAM,OAAO,MAAM,CAAE;IACzC,MACC,SAAQ,KAAK,SAAb;KACE,KAAK,gBAAgB,OACnB;KACF,KAAK,gBAAgB;MACnB,QAAQ,KAAK,CAAC,mBAAmB,EAAE,KAAK,MAAM,CAAC;AAC/C;KACF,KAAK,gBAAgB,MACnB,OAAM,IAAI,MAAM,CAAC,mBAAmB,EAAE,KAAK,MAAM;KACnD,QACE,OAAM,IAAI,MAAM,CAAC,0BAA0B,EAAE,KAAK,SAAS;IAC9D;GAEJ;EACF;AAED,SAAO;CACR;;;;;;;;CASD,aAAa,UAIV;AACD,MAAI;GACF,MAAM,EAAE,SAAS,SAAS,GAAG,MAAM,OAAO;GAC1C,MAAM,EAAE,SAAS,GAAG,MAAM,OAAO;AACjC,UAAO;IAAE;IAAS;IAAS;GAAS;EACrC,SAAQ,GAAG;GACV,QAAQ,MAAM,EAAE;AAChB,SAAM,IAAI,MACR,CAAC,0HAA0H,0CAAU,CAAC,+CAA+C,CAAC;EAEzL;CACF;AACF"}
@@ -11,7 +11,6 @@ declare const UnknownHandling: {
11
11
  readonly Warn: "warn";
12
12
  readonly Error: "error";
13
13
  };
14
- // eslint-disable-next-line @typescript-eslint/no-redeclare
15
14
  /**
16
15
  * An enumeration of possible handling strategies for unknown file types.
17
16
  */
@@ -1 +1 @@
1
- {"version":3,"file":"directory.d.cts","names":["extname","ExtnameT","resolve","ResolveT","readdir","ReaddirT","Document","BaseDocumentLoader","UnknownHandling","LoadersMapping","DirectoryLoader","Promise"],"sources":["../../../src/document_loaders/fs/directory.d.ts"],"sourcesContent":["import type { extname as ExtnameT, resolve as ResolveT } from \"node:path\";\nimport type { readdir as ReaddirT } from \"node:fs/promises\";\nimport { Document } from \"@langchain/core/documents\";\nimport { BaseDocumentLoader } from \"@langchain/core/document_loaders/base\";\n// TypeScript enums are not tree-shakeable, so doing this instead\n// See https://bargsten.org/jsts/enums/\nexport declare const UnknownHandling: {\n readonly Ignore: \"ignore\";\n readonly Warn: \"warn\";\n readonly Error: \"error\";\n};\n// eslint-disable-next-line @typescript-eslint/no-redeclare\n/**\n * An enumeration of possible handling strategies for unknown file types.\n */\nexport type UnknownHandling = (typeof UnknownHandling)[keyof typeof UnknownHandling];\n/**\n * A mapping of file extensions to loader functions. Each loader function\n * takes a file path as a parameter and returns a `BaseDocumentLoader`\n * instance.\n */\nexport interface LoadersMapping {\n [extension: string]: (filePath: string) => BaseDocumentLoader;\n}\n/**\n * A document loader that loads documents from a directory. It extends the\n * `BaseDocumentLoader` class and implements the `load()` method.\n * @example\n * ```typescript\n *\n * const directoryLoader = new DirectoryLoader(\n * \"src/document_loaders/example_data/\",\n * {\n * \".pdf\": (path: string) => new PDFLoader(path),\n * },\n * );\n *\n * const docs = await directoryLoader.load();\n * console.log({ docs });\n *\n * ```\n */\nexport declare class DirectoryLoader extends BaseDocumentLoader {\n directoryPath: string;\n loaders: LoadersMapping;\n recursive: boolean;\n unknown: UnknownHandling;\n constructor(directoryPath: string, loaders: LoadersMapping, recursive?: boolean, unknown?: UnknownHandling);\n /**\n * Loads the documents from the directory. If a file is a directory and\n * `recursive` is `true`, it recursively loads documents from the\n * subdirectory. If a file is a file, it checks if there is a\n * corresponding loader function for the file extension in the `loaders`\n * mapping. If there is, it loads the documents. If there is no\n * corresponding loader function and `unknown` is set to `Warn`, it logs a\n * warning message. If `unknown` is set to `Error`, it throws an error.\n * @returns A promise that resolves to an array of loaded documents.\n */\n load(): Promise<Document[]>;\n /**\n * Imports the necessary functions from the `node:path` and\n * `node:fs/promises` modules. It is used to dynamically import the\n * functions when needed. If the import fails, it throws an error\n * indicating that the modules failed to load.\n * @returns A promise that resolves to an object containing the imported functions.\n */\n static imports(): Promise<{\n readdir: typeof ReaddirT;\n extname: typeof ExtnameT;\n resolve: typeof ResolveT;\n }>;\n}\n"],"mappings":";;;;;;;;AAMqBQ,cAAAA,eAIpB,EAAA;EAKWA,SAAAA,MAAAA,EAAAA,QAAe;EAAA,SAAA,IAAA,EAAA,MAAA;EAAA,SAAWA,KAAAA,EAAAA,OAAAA;CAAe;AAA8B;AAMnF;AAqBA;;AAEaC,KA7BDD,eAAAA,GA6BCC,CAAAA,OA7ByBD,eA6BzBC,CAAAA,CAAAA,MAAAA,OA7BuDD,eA6BvDC,CAAAA;;;;;;AAuBWJ,UA9CPI,cAAAA,CA8COJ;EAAQ,CAAA,SACRJ,EAAAA,MAAAA,CAAAA,EAAAA,CAAAA,QAAAA,EAAAA,MAAAA,EAAAA,GA9CuBM,kBA8CvBN;;;;AA1BuC;;;;;;;;;;;;;;;;cAA1CS,eAAAA,SAAwBH,kBAAAA;;WAEhCE;;WAEAD;8CACmCC,+CAA+CD;;;;;;;;;;;UAWnFG,QAAQL;;;;;;;;oBAQEK;oBACEN;oBACAJ;oBACAE"}
1
+ {"version":3,"file":"directory.d.cts","names":["extname","ExtnameT","resolve","ResolveT","readdir","ReaddirT","Document","BaseDocumentLoader","UnknownHandling","LoadersMapping","DirectoryLoader","Promise"],"sources":["../../../src/document_loaders/fs/directory.d.ts"],"sourcesContent":["import type { extname as ExtnameT, resolve as ResolveT } from \"node:path\";\nimport type { readdir as ReaddirT } from \"node:fs/promises\";\nimport { Document } from \"@langchain/core/documents\";\nimport { BaseDocumentLoader } from \"@langchain/core/document_loaders/base\";\n// TypeScript enums are not tree-shakeable, so doing this instead\n// See https://bargsten.org/jsts/enums/\nexport declare const UnknownHandling: {\n readonly Ignore: \"ignore\";\n readonly Warn: \"warn\";\n readonly Error: \"error\";\n};\n/**\n * An enumeration of possible handling strategies for unknown file types.\n */\nexport type UnknownHandling = (typeof UnknownHandling)[keyof typeof UnknownHandling];\n/**\n * A mapping of file extensions to loader functions. Each loader function\n * takes a file path as a parameter and returns a `BaseDocumentLoader`\n * instance.\n */\nexport interface LoadersMapping {\n [extension: string]: (filePath: string) => BaseDocumentLoader;\n}\n/**\n * A document loader that loads documents from a directory. It extends the\n * `BaseDocumentLoader` class and implements the `load()` method.\n * @example\n * ```typescript\n *\n * const directoryLoader = new DirectoryLoader(\n * \"src/document_loaders/example_data/\",\n * {\n * \".pdf\": (path: string) => new PDFLoader(path),\n * },\n * );\n *\n * const docs = await directoryLoader.load();\n * console.log({ docs });\n *\n * ```\n */\nexport declare class DirectoryLoader extends BaseDocumentLoader {\n directoryPath: string;\n loaders: LoadersMapping;\n recursive: boolean;\n unknown: UnknownHandling;\n constructor(directoryPath: string, loaders: LoadersMapping, recursive?: boolean, unknown?: UnknownHandling);\n /**\n * Loads the documents from the directory. If a file is a directory and\n * `recursive` is `true`, it recursively loads documents from the\n * subdirectory. If a file is a file, it checks if there is a\n * corresponding loader function for the file extension in the `loaders`\n * mapping. If there is, it loads the documents. If there is no\n * corresponding loader function and `unknown` is set to `Warn`, it logs a\n * warning message. If `unknown` is set to `Error`, it throws an error.\n * @returns A promise that resolves to an array of loaded documents.\n */\n load(): Promise<Document[]>;\n /**\n * Imports the necessary functions from the `node:path` and\n * `node:fs/promises` modules. It is used to dynamically import the\n * functions when needed. If the import fails, it throws an error\n * indicating that the modules failed to load.\n * @returns A promise that resolves to an object containing the imported functions.\n */\n static imports(): Promise<{\n readdir: typeof ReaddirT;\n extname: typeof ExtnameT;\n resolve: typeof ResolveT;\n }>;\n}\n"],"mappings":";;;;;;;;AAMqBQ,cAAAA,eAIpB,EAAA;EAIWA,SAAAA,MAAAA,EAAAA,QAAe;EAAA,SAAA,IAAA,EAAA,MAAA;EAAA,SAAWA,KAAAA,EAAAA,OAAAA;CAAe;AAA8B;AAMnF;AAqBA;AAAoC,KA3BxBA,eAAAA,GA2BwB,CAAA,OA3BEA,eA2BF,CAAA,CAAA,MAAA,OA3BgCA,eA2BhC,CAAA;;;;;;AAgBxBG,UArCKF,cAAAA,CAqCLE;EAAO,CAAA,SASKN,EAAAA,MAAAA,CAAAA,EAAAA,CAAAA,QAAAA,EAAAA,MAAAA,EAAAA,GA7CuBE,kBA6CvBF;;;;;AAzBuC;;;;;;;;;;;;;;;cAA1CK,eAAAA,SAAwBH,kBAAAA;;WAEhCE;;WAEAD;8CACmCC,+CAA+CD;;;;;;;;;;;UAWnFG,QAAQL;;;;;;;;oBAQEK;oBACEN;oBACAJ;oBACAE"}
@@ -11,7 +11,6 @@ declare const UnknownHandling: {
11
11
  readonly Warn: "warn";
12
12
  readonly Error: "error";
13
13
  };
14
- // eslint-disable-next-line @typescript-eslint/no-redeclare
15
14
  /**
16
15
  * An enumeration of possible handling strategies for unknown file types.
17
16
  */
@@ -1 +1 @@
1
- {"version":3,"file":"directory.d.ts","names":["extname","ExtnameT","resolve","ResolveT","readdir","ReaddirT","Document","BaseDocumentLoader","UnknownHandling","LoadersMapping","DirectoryLoader","Promise"],"sources":["../../../src/document_loaders/fs/directory.d.ts"],"sourcesContent":["import type { extname as ExtnameT, resolve as ResolveT } from \"node:path\";\nimport type { readdir as ReaddirT } from \"node:fs/promises\";\nimport { Document } from \"@langchain/core/documents\";\nimport { BaseDocumentLoader } from \"@langchain/core/document_loaders/base\";\n// TypeScript enums are not tree-shakeable, so doing this instead\n// See https://bargsten.org/jsts/enums/\nexport declare const UnknownHandling: {\n readonly Ignore: \"ignore\";\n readonly Warn: \"warn\";\n readonly Error: \"error\";\n};\n// eslint-disable-next-line @typescript-eslint/no-redeclare\n/**\n * An enumeration of possible handling strategies for unknown file types.\n */\nexport type UnknownHandling = (typeof UnknownHandling)[keyof typeof UnknownHandling];\n/**\n * A mapping of file extensions to loader functions. Each loader function\n * takes a file path as a parameter and returns a `BaseDocumentLoader`\n * instance.\n */\nexport interface LoadersMapping {\n [extension: string]: (filePath: string) => BaseDocumentLoader;\n}\n/**\n * A document loader that loads documents from a directory. It extends the\n * `BaseDocumentLoader` class and implements the `load()` method.\n * @example\n * ```typescript\n *\n * const directoryLoader = new DirectoryLoader(\n * \"src/document_loaders/example_data/\",\n * {\n * \".pdf\": (path: string) => new PDFLoader(path),\n * },\n * );\n *\n * const docs = await directoryLoader.load();\n * console.log({ docs });\n *\n * ```\n */\nexport declare class DirectoryLoader extends BaseDocumentLoader {\n directoryPath: string;\n loaders: LoadersMapping;\n recursive: boolean;\n unknown: UnknownHandling;\n constructor(directoryPath: string, loaders: LoadersMapping, recursive?: boolean, unknown?: UnknownHandling);\n /**\n * Loads the documents from the directory. If a file is a directory and\n * `recursive` is `true`, it recursively loads documents from the\n * subdirectory. If a file is a file, it checks if there is a\n * corresponding loader function for the file extension in the `loaders`\n * mapping. If there is, it loads the documents. If there is no\n * corresponding loader function and `unknown` is set to `Warn`, it logs a\n * warning message. If `unknown` is set to `Error`, it throws an error.\n * @returns A promise that resolves to an array of loaded documents.\n */\n load(): Promise<Document[]>;\n /**\n * Imports the necessary functions from the `node:path` and\n * `node:fs/promises` modules. It is used to dynamically import the\n * functions when needed. If the import fails, it throws an error\n * indicating that the modules failed to load.\n * @returns A promise that resolves to an object containing the imported functions.\n */\n static imports(): Promise<{\n readdir: typeof ReaddirT;\n extname: typeof ExtnameT;\n resolve: typeof ResolveT;\n }>;\n}\n"],"mappings":";;;;;;;;AAMqBQ,cAAAA,eAIpB,EAAA;EAKWA,SAAAA,MAAAA,EAAAA,QAAe;EAAA,SAAA,IAAA,EAAA,MAAA;EAAA,SAAWA,KAAAA,EAAAA,OAAAA;CAAe;AAA8B;AAMnF;AAqBA;;AAEaC,KA7BDD,eAAAA,GA6BCC,CAAAA,OA7ByBD,eA6BzBC,CAAAA,CAAAA,MAAAA,OA7BuDD,eA6BvDC,CAAAA;;;;;;AAuBWJ,UA9CPI,cAAAA,CA8COJ;EAAQ,CAAA,SACRJ,EAAAA,MAAAA,CAAAA,EAAAA,CAAAA,QAAAA,EAAAA,MAAAA,EAAAA,GA9CuBM,kBA8CvBN;;;;AA1BuC;;;;;;;;;;;;;;;;cAA1CS,eAAAA,SAAwBH,kBAAAA;;WAEhCE;;WAEAD;8CACmCC,+CAA+CD;;;;;;;;;;;UAWnFG,QAAQL;;;;;;;;oBAQEK;oBACEN;oBACAJ;oBACAE"}
1
+ {"version":3,"file":"directory.d.ts","names":["extname","ExtnameT","resolve","ResolveT","readdir","ReaddirT","Document","BaseDocumentLoader","UnknownHandling","LoadersMapping","DirectoryLoader","Promise"],"sources":["../../../src/document_loaders/fs/directory.d.ts"],"sourcesContent":["import type { extname as ExtnameT, resolve as ResolveT } from \"node:path\";\nimport type { readdir as ReaddirT } from \"node:fs/promises\";\nimport { Document } from \"@langchain/core/documents\";\nimport { BaseDocumentLoader } from \"@langchain/core/document_loaders/base\";\n// TypeScript enums are not tree-shakeable, so doing this instead\n// See https://bargsten.org/jsts/enums/\nexport declare const UnknownHandling: {\n readonly Ignore: \"ignore\";\n readonly Warn: \"warn\";\n readonly Error: \"error\";\n};\n/**\n * An enumeration of possible handling strategies for unknown file types.\n */\nexport type UnknownHandling = (typeof UnknownHandling)[keyof typeof UnknownHandling];\n/**\n * A mapping of file extensions to loader functions. Each loader function\n * takes a file path as a parameter and returns a `BaseDocumentLoader`\n * instance.\n */\nexport interface LoadersMapping {\n [extension: string]: (filePath: string) => BaseDocumentLoader;\n}\n/**\n * A document loader that loads documents from a directory. It extends the\n * `BaseDocumentLoader` class and implements the `load()` method.\n * @example\n * ```typescript\n *\n * const directoryLoader = new DirectoryLoader(\n * \"src/document_loaders/example_data/\",\n * {\n * \".pdf\": (path: string) => new PDFLoader(path),\n * },\n * );\n *\n * const docs = await directoryLoader.load();\n * console.log({ docs });\n *\n * ```\n */\nexport declare class DirectoryLoader extends BaseDocumentLoader {\n directoryPath: string;\n loaders: LoadersMapping;\n recursive: boolean;\n unknown: UnknownHandling;\n constructor(directoryPath: string, loaders: LoadersMapping, recursive?: boolean, unknown?: UnknownHandling);\n /**\n * Loads the documents from the directory. If a file is a directory and\n * `recursive` is `true`, it recursively loads documents from the\n * subdirectory. If a file is a file, it checks if there is a\n * corresponding loader function for the file extension in the `loaders`\n * mapping. If there is, it loads the documents. If there is no\n * corresponding loader function and `unknown` is set to `Warn`, it logs a\n * warning message. If `unknown` is set to `Error`, it throws an error.\n * @returns A promise that resolves to an array of loaded documents.\n */\n load(): Promise<Document[]>;\n /**\n * Imports the necessary functions from the `node:path` and\n * `node:fs/promises` modules. It is used to dynamically import the\n * functions when needed. If the import fails, it throws an error\n * indicating that the modules failed to load.\n * @returns A promise that resolves to an object containing the imported functions.\n */\n static imports(): Promise<{\n readdir: typeof ReaddirT;\n extname: typeof ExtnameT;\n resolve: typeof ResolveT;\n }>;\n}\n"],"mappings":";;;;;;;;AAMqBQ,cAAAA,eAIpB,EAAA;EAIWA,SAAAA,MAAAA,EAAAA,QAAe;EAAA,SAAA,IAAA,EAAA,MAAA;EAAA,SAAWA,KAAAA,EAAAA,OAAAA;CAAe;AAA8B;AAMnF;AAqBA;AAAoC,KA3BxBA,eAAAA,GA2BwB,CAAA,OA3BEA,eA2BF,CAAA,CAAA,MAAA,OA3BgCA,eA2BhC,CAAA;;;;;;AAgBxBG,UArCKF,cAAAA,CAqCLE;EAAO,CAAA,SASKN,EAAAA,MAAAA,CAAAA,EAAAA,CAAAA,QAAAA,EAAAA,MAAAA,EAAAA,GA7CuBE,kBA6CvBF;;;;;AAzBuC;;;;;;;;;;;;;;;cAA1CK,eAAAA,SAAwBH,kBAAAA;;WAEhCE;;WAEAD;8CACmCC,+CAA+CD;;;;;;;;;;;UAWnFG,QAAQL;;;;;;;;oBAQEK;oBACEN;oBACAJ;oBACAE"}
@@ -1 +1 @@
1
- {"version":3,"file":"directory.js","names":["directoryPath: string","loaders: LoadersMapping","recursive: boolean","unknown: UnknownHandling","documents: Document[]"],"sources":["../../../src/document_loaders/fs/directory.ts"],"sourcesContent":["import type { extname as ExtnameT, resolve as ResolveT } from \"node:path\";\nimport type { readdir as ReaddirT } from \"node:fs/promises\";\nimport { Document } from \"@langchain/core/documents\";\nimport { getEnv } from \"@langchain/core/utils/env\";\nimport { BaseDocumentLoader } from \"@langchain/core/document_loaders/base\";\n\n// TypeScript enums are not tree-shakeable, so doing this instead\n// See https://bargsten.org/jsts/enums/\nexport const UnknownHandling = {\n Ignore: \"ignore\",\n Warn: \"warn\",\n Error: \"error\",\n} as const;\n// eslint-disable-next-line @typescript-eslint/no-redeclare\n/**\n * An enumeration of possible handling strategies for unknown file types.\n */\nexport type UnknownHandling =\n (typeof UnknownHandling)[keyof typeof UnknownHandling];\n\n/**\n * A mapping of file extensions to loader functions. Each loader function\n * takes a file path as a parameter and returns a `BaseDocumentLoader`\n * instance.\n */\nexport interface LoadersMapping {\n [extension: string]: (filePath: string) => BaseDocumentLoader;\n}\n\n/**\n * A document loader that loads documents from a directory. It extends the\n * `BaseDocumentLoader` class and implements the `load()` method.\n * @example\n * ```typescript\n *\n * const directoryLoader = new DirectoryLoader(\n * \"src/document_loaders/example_data/\",\n * {\n * \".pdf\": (path: string) => new PDFLoader(path),\n * },\n * );\n *\n * const docs = await directoryLoader.load();\n * console.log({ docs });\n *\n * ```\n */\nexport class DirectoryLoader extends BaseDocumentLoader {\n constructor(\n public directoryPath: string,\n public loaders: LoadersMapping,\n public recursive: boolean = true,\n public unknown: UnknownHandling = UnknownHandling.Warn\n ) {\n super();\n\n if (Object.keys(loaders).length === 0) {\n throw new Error(\"Must provide at least one loader\");\n }\n for (const extension in loaders) {\n if (Object.hasOwn(loaders, extension)) {\n if (extension[0] !== \".\") {\n throw new Error(`Extension must start with a dot: ${extension}`);\n }\n }\n }\n }\n\n /**\n * Loads the documents from the directory. If a file is a directory and\n * `recursive` is `true`, it recursively loads documents from the\n * subdirectory. If a file is a file, it checks if there is a\n * corresponding loader function for the file extension in the `loaders`\n * mapping. If there is, it loads the documents. If there is no\n * corresponding loader function and `unknown` is set to `Warn`, it logs a\n * warning message. If `unknown` is set to `Error`, it throws an error.\n * @returns A promise that resolves to an array of loaded documents.\n */\n public async load(): Promise<Document[]> {\n const { readdir, extname, resolve } = await DirectoryLoader.imports();\n const files = await readdir(this.directoryPath, { withFileTypes: true });\n\n const documents: Document[] = [];\n\n for (const file of files) {\n const fullPath = resolve(this.directoryPath, file.name);\n if (file.isDirectory()) {\n if (this.recursive) {\n const loader = new DirectoryLoader(\n fullPath,\n this.loaders,\n this.recursive,\n this.unknown\n );\n documents.push(...(await loader.load()));\n }\n } else {\n // I'm aware some things won't be files,\n // but they will be caught by the \"unknown\" handling below.\n const loaderFactory = this.loaders[extname(file.name)];\n if (loaderFactory) {\n const loader = loaderFactory(fullPath);\n documents.push(...(await loader.load()));\n } else {\n switch (this.unknown) {\n case UnknownHandling.Ignore:\n break;\n case UnknownHandling.Warn:\n console.warn(`Unknown file type: ${file.name}`);\n break;\n case UnknownHandling.Error:\n throw new Error(`Unknown file type: ${file.name}`);\n default:\n throw new Error(`Unknown unknown handling: ${this.unknown}`);\n }\n }\n }\n }\n\n return documents;\n }\n\n /**\n * Imports the necessary functions from the `node:path` and\n * `node:fs/promises` modules. It is used to dynamically import the\n * functions when needed. If the import fails, it throws an error\n * indicating that the modules failed to load.\n * @returns A promise that resolves to an object containing the imported functions.\n */\n static async imports(): Promise<{\n readdir: typeof ReaddirT;\n extname: typeof ExtnameT;\n resolve: typeof ResolveT;\n }> {\n try {\n const { extname, resolve } = await import(\"node:path\");\n const { readdir } = await import(\"node:fs/promises\");\n return { readdir, extname, resolve };\n } catch (e) {\n console.error(e);\n throw new Error(\n `Failed to load fs/promises. DirectoryLoader available only on environment 'node'. It appears you are running environment '${getEnv()}'. See https://<link to docs> for alternatives.`\n );\n }\n }\n}\n"],"mappings":";;;;;;;;;;AAQA,MAAa,kBAAkB;CAC7B,QAAQ;CACR,MAAM;CACN,OAAO;AACR;;;;;;;;;;;;;;;;;;;AAmCD,IAAa,kBAAb,MAAa,wBAAwB,mBAAmB;CACtD,YACSA,eACAC,SACAC,YAAqB,MACrBC,UAA2B,gBAAgB,MAClD;EACA,OAAO;EALA;EACA;EACA;EACA;AAIP,MAAI,OAAO,KAAK,QAAQ,CAAC,WAAW,EAClC,OAAM,IAAI,MAAM;AAElB,OAAK,MAAM,aAAa,QACtB,KAAI,OAAO,OAAO,SAAS,UAAU,EACnC;OAAI,UAAU,OAAO,IACnB,OAAM,IAAI,MAAM,CAAC,iCAAiC,EAAE,WAAW;EAChE;CAGN;;;;;;;;;;;CAYD,MAAa,OAA4B;EACvC,MAAM,EAAE,SAAS,SAAS,SAAS,GAAG,MAAM,gBAAgB,SAAS;EACrE,MAAM,QAAQ,MAAM,QAAQ,KAAK,eAAe,EAAE,eAAe,KAAM,EAAC;EAExE,MAAMC,YAAwB,CAAE;AAEhC,OAAK,MAAM,QAAQ,OAAO;GACxB,MAAM,WAAW,QAAQ,KAAK,eAAe,KAAK,KAAK;AACvD,OAAI,KAAK,aAAa,EACpB;QAAI,KAAK,WAAW;KAClB,MAAM,SAAS,IAAI,gBACjB,UACA,KAAK,SACL,KAAK,WACL,KAAK;KAEP,UAAU,KAAK,GAAI,MAAM,OAAO,MAAM,CAAE;IACzC;UACI;IAGL,MAAM,gBAAgB,KAAK,QAAQ,QAAQ,KAAK,KAAK;AACrD,QAAI,eAAe;KACjB,MAAM,SAAS,cAAc,SAAS;KACtC,UAAU,KAAK,GAAI,MAAM,OAAO,MAAM,CAAE;IACzC,MACC,SAAQ,KAAK,SAAb;KACE,KAAK,gBAAgB,OACnB;KACF,KAAK,gBAAgB;MACnB,QAAQ,KAAK,CAAC,mBAAmB,EAAE,KAAK,MAAM,CAAC;AAC/C;KACF,KAAK,gBAAgB,MACnB,OAAM,IAAI,MAAM,CAAC,mBAAmB,EAAE,KAAK,MAAM;KACnD,QACE,OAAM,IAAI,MAAM,CAAC,0BAA0B,EAAE,KAAK,SAAS;IAC9D;GAEJ;EACF;AAED,SAAO;CACR;;;;;;;;CASD,aAAa,UAIV;AACD,MAAI;GACF,MAAM,EAAE,SAAS,SAAS,GAAG,MAAM,OAAO;GAC1C,MAAM,EAAE,SAAS,GAAG,MAAM,OAAO;AACjC,UAAO;IAAE;IAAS;IAAS;GAAS;EACrC,SAAQ,GAAG;GACV,QAAQ,MAAM,EAAE;AAChB,SAAM,IAAI,MACR,CAAC,0HAA0H,EAAE,QAAQ,CAAC,+CAA+C,CAAC;EAEzL;CACF;AACF"}
1
+ {"version":3,"file":"directory.js","names":["directoryPath: string","loaders: LoadersMapping","recursive: boolean","unknown: UnknownHandling","documents: Document[]"],"sources":["../../../src/document_loaders/fs/directory.ts"],"sourcesContent":["import type { extname as ExtnameT, resolve as ResolveT } from \"node:path\";\nimport type { readdir as ReaddirT } from \"node:fs/promises\";\nimport { Document } from \"@langchain/core/documents\";\nimport { getEnv } from \"@langchain/core/utils/env\";\nimport { BaseDocumentLoader } from \"@langchain/core/document_loaders/base\";\n\n// TypeScript enums are not tree-shakeable, so doing this instead\n// See https://bargsten.org/jsts/enums/\nexport const UnknownHandling = {\n Ignore: \"ignore\",\n Warn: \"warn\",\n Error: \"error\",\n} as const;\n/**\n * An enumeration of possible handling strategies for unknown file types.\n */\nexport type UnknownHandling =\n (typeof UnknownHandling)[keyof typeof UnknownHandling];\n\n/**\n * A mapping of file extensions to loader functions. Each loader function\n * takes a file path as a parameter and returns a `BaseDocumentLoader`\n * instance.\n */\nexport interface LoadersMapping {\n [extension: string]: (filePath: string) => BaseDocumentLoader;\n}\n\n/**\n * A document loader that loads documents from a directory. It extends the\n * `BaseDocumentLoader` class and implements the `load()` method.\n * @example\n * ```typescript\n *\n * const directoryLoader = new DirectoryLoader(\n * \"src/document_loaders/example_data/\",\n * {\n * \".pdf\": (path: string) => new PDFLoader(path),\n * },\n * );\n *\n * const docs = await directoryLoader.load();\n * console.log({ docs });\n *\n * ```\n */\nexport class DirectoryLoader extends BaseDocumentLoader {\n constructor(\n public directoryPath: string,\n public loaders: LoadersMapping,\n public recursive: boolean = true,\n public unknown: UnknownHandling = UnknownHandling.Warn\n ) {\n super();\n\n if (Object.keys(loaders).length === 0) {\n throw new Error(\"Must provide at least one loader\");\n }\n for (const extension in loaders) {\n if (Object.hasOwn(loaders, extension)) {\n if (extension[0] !== \".\") {\n throw new Error(`Extension must start with a dot: ${extension}`);\n }\n }\n }\n }\n\n /**\n * Loads the documents from the directory. If a file is a directory and\n * `recursive` is `true`, it recursively loads documents from the\n * subdirectory. If a file is a file, it checks if there is a\n * corresponding loader function for the file extension in the `loaders`\n * mapping. If there is, it loads the documents. If there is no\n * corresponding loader function and `unknown` is set to `Warn`, it logs a\n * warning message. If `unknown` is set to `Error`, it throws an error.\n * @returns A promise that resolves to an array of loaded documents.\n */\n public async load(): Promise<Document[]> {\n const { readdir, extname, resolve } = await DirectoryLoader.imports();\n const files = await readdir(this.directoryPath, { withFileTypes: true });\n\n const documents: Document[] = [];\n\n for (const file of files) {\n const fullPath = resolve(this.directoryPath, file.name);\n if (file.isDirectory()) {\n if (this.recursive) {\n const loader = new DirectoryLoader(\n fullPath,\n this.loaders,\n this.recursive,\n this.unknown\n );\n documents.push(...(await loader.load()));\n }\n } else {\n // I'm aware some things won't be files,\n // but they will be caught by the \"unknown\" handling below.\n const loaderFactory = this.loaders[extname(file.name)];\n if (loaderFactory) {\n const loader = loaderFactory(fullPath);\n documents.push(...(await loader.load()));\n } else {\n switch (this.unknown) {\n case UnknownHandling.Ignore:\n break;\n case UnknownHandling.Warn:\n console.warn(`Unknown file type: ${file.name}`);\n break;\n case UnknownHandling.Error:\n throw new Error(`Unknown file type: ${file.name}`);\n default:\n throw new Error(`Unknown unknown handling: ${this.unknown}`);\n }\n }\n }\n }\n\n return documents;\n }\n\n /**\n * Imports the necessary functions from the `node:path` and\n * `node:fs/promises` modules. It is used to dynamically import the\n * functions when needed. If the import fails, it throws an error\n * indicating that the modules failed to load.\n * @returns A promise that resolves to an object containing the imported functions.\n */\n static async imports(): Promise<{\n readdir: typeof ReaddirT;\n extname: typeof ExtnameT;\n resolve: typeof ResolveT;\n }> {\n try {\n const { extname, resolve } = await import(\"node:path\");\n const { readdir } = await import(\"node:fs/promises\");\n return { readdir, extname, resolve };\n } catch (e) {\n console.error(e);\n throw new Error(\n `Failed to load fs/promises. DirectoryLoader available only on environment 'node'. It appears you are running environment '${getEnv()}'. See https://<link to docs> for alternatives.`\n );\n }\n }\n}\n"],"mappings":";;;;;;;;;;AAQA,MAAa,kBAAkB;CAC7B,QAAQ;CACR,MAAM;CACN,OAAO;AACR;;;;;;;;;;;;;;;;;;;AAkCD,IAAa,kBAAb,MAAa,wBAAwB,mBAAmB;CACtD,YACSA,eACAC,SACAC,YAAqB,MACrBC,UAA2B,gBAAgB,MAClD;EACA,OAAO;EALA;EACA;EACA;EACA;AAIP,MAAI,OAAO,KAAK,QAAQ,CAAC,WAAW,EAClC,OAAM,IAAI,MAAM;AAElB,OAAK,MAAM,aAAa,QACtB,KAAI,OAAO,OAAO,SAAS,UAAU,EACnC;OAAI,UAAU,OAAO,IACnB,OAAM,IAAI,MAAM,CAAC,iCAAiC,EAAE,WAAW;EAChE;CAGN;;;;;;;;;;;CAYD,MAAa,OAA4B;EACvC,MAAM,EAAE,SAAS,SAAS,SAAS,GAAG,MAAM,gBAAgB,SAAS;EACrE,MAAM,QAAQ,MAAM,QAAQ,KAAK,eAAe,EAAE,eAAe,KAAM,EAAC;EAExE,MAAMC,YAAwB,CAAE;AAEhC,OAAK,MAAM,QAAQ,OAAO;GACxB,MAAM,WAAW,QAAQ,KAAK,eAAe,KAAK,KAAK;AACvD,OAAI,KAAK,aAAa,EACpB;QAAI,KAAK,WAAW;KAClB,MAAM,SAAS,IAAI,gBACjB,UACA,KAAK,SACL,KAAK,WACL,KAAK;KAEP,UAAU,KAAK,GAAI,MAAM,OAAO,MAAM,CAAE;IACzC;UACI;IAGL,MAAM,gBAAgB,KAAK,QAAQ,QAAQ,KAAK,KAAK;AACrD,QAAI,eAAe;KACjB,MAAM,SAAS,cAAc,SAAS;KACtC,UAAU,KAAK,GAAI,MAAM,OAAO,MAAM,CAAE;IACzC,MACC,SAAQ,KAAK,SAAb;KACE,KAAK,gBAAgB,OACnB;KACF,KAAK,gBAAgB;MACnB,QAAQ,KAAK,CAAC,mBAAmB,EAAE,KAAK,MAAM,CAAC;AAC/C;KACF,KAAK,gBAAgB,MACnB,OAAM,IAAI,MAAM,CAAC,mBAAmB,EAAE,KAAK,MAAM;KACnD,QACE,OAAM,IAAI,MAAM,CAAC,0BAA0B,EAAE,KAAK,SAAS;IAC9D;GAEJ;EACF;AAED,SAAO;CACR;;;;;;;;CASD,aAAa,UAIV;AACD,MAAI;GACF,MAAM,EAAE,SAAS,SAAS,GAAG,MAAM,OAAO;GAC1C,MAAM,EAAE,SAAS,GAAG,MAAM,OAAO;AACjC,UAAO;IAAE;IAAS;IAAS;GAAS;EACrC,SAAQ,GAAG;GACV,QAAQ,MAAM,EAAE;AAChB,SAAM,IAAI,MACR,CAAC,0HAA0H,EAAE,QAAQ,CAAC,+CAA+C,CAAC;EAEzL;CACF;AACF"}
@@ -106,7 +106,13 @@ var JSONLinesLoader = class extends require_document_loaders_fs_text.TextLoader
106
106
  const lines = raw.split("\n");
107
107
  const jsons = lines.map((line) => line.trim()).filter(Boolean).map((line) => JSON.parse(line));
108
108
  const pointer = jsonpointer.default.compile(this.pointer);
109
- return jsons.map((json) => pointer.get(json));
109
+ return jsons.map((json) => {
110
+ const data = pointer.get(json);
111
+ if (typeof data === "string") return data;
112
+ if (!data) return "";
113
+ if (typeof data === "object") return JSON.stringify(data);
114
+ return "";
115
+ });
110
116
  }
111
117
  };
112
118
 
@@ -1 +1 @@
1
- {"version":3,"file":"json.cjs","names":["TextLoader","filePathOrBlob: string | Blob","pointers: string | string[]","raw: string","json: any","pointers: jsonpointer[]","extractedString: string[]","entry: object","extractedStrings: string[]","json: object","pointer: string"],"sources":["../../../src/document_loaders/fs/json.ts"],"sourcesContent":["import jsonpointer from \"jsonpointer\";\nimport { TextLoader } from \"./text.js\";\n\n/**\n * Class that extends the `TextLoader` class. It represents a document\n * loader that loads documents from JSON files. It has a constructor that\n * takes a `filePathOrBlob` parameter representing the path to the JSON\n * file or a `Blob` object, and an optional `pointers` parameter that\n * specifies the JSON pointers to extract.\n */\nexport class JSONLoader extends TextLoader {\n public pointers: string[];\n\n constructor(filePathOrBlob: string | Blob, pointers: string | string[] = []) {\n super(filePathOrBlob);\n this.pointers = Array.isArray(pointers) ? pointers : [pointers];\n }\n\n /**\n * Method that takes a `raw` string as a parameter and returns a promise\n * that resolves to an array of strings. It parses the raw JSON string and\n * extracts the values based on the specified JSON pointers. If no JSON\n * pointers are specified, it extracts all the strings from the JSON\n * object.\n * @param raw The raw JSON string to parse.\n * @returns A promise that resolves to an array of strings.\n */\n protected async parse(raw: string): Promise<string[]> {\n const json = JSON.parse(raw.trim());\n // If there is no pointers specified we extract all strings we found\n const extractAllStrings = !(this.pointers.length > 0);\n const compiledPointers = this.pointers.map((pointer) =>\n jsonpointer.compile(pointer)\n );\n\n return this.extractArrayStringsFromObject(\n json,\n compiledPointers,\n extractAllStrings\n );\n }\n\n /**\n * If JSON pointers are specified, return all strings below any of them\n * and exclude all other nodes expect if they match a JSON pointer (to allow to extract strings from different levels)\n *\n * If no JSON pointer is specified then return all string in the object\n */\n private extractArrayStringsFromObject(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n json: any,\n pointers: jsonpointer[],\n extractAllStrings = false,\n keyHasBeenFound = false\n ): string[] {\n if (!json) {\n return [];\n }\n\n if (typeof json === \"string\" && extractAllStrings) {\n return [json];\n }\n\n if (Array.isArray(json) && extractAllStrings) {\n let extractedString: string[] = [];\n for (const element of json) {\n extractedString = extractedString.concat(\n this.extractArrayStringsFromObject(element, pointers, true)\n );\n }\n\n return extractedString;\n }\n\n if (typeof json === \"object\") {\n if (extractAllStrings) {\n return this.extractArrayStringsFromObject(\n Object.values(json),\n pointers,\n true\n );\n }\n\n const targetedEntries = this.getTargetedEntries(json, pointers);\n const thisLevelEntries = Object.values(json) as object[];\n const notTargetedEntries = thisLevelEntries.filter(\n (entry: object) => !targetedEntries.includes(entry)\n );\n\n let extractedStrings: string[] = [];\n // If we found a targeted entry, we extract all strings from it\n if (targetedEntries.length > 0) {\n for (const oneEntry of targetedEntries) {\n extractedStrings = extractedStrings.concat(\n this.extractArrayStringsFromObject(oneEntry, pointers, true, true)\n );\n }\n\n for (const oneEntry of notTargetedEntries) {\n extractedStrings = extractedStrings.concat(\n this.extractArrayStringsFromObject(oneEntry, pointers, false, true)\n );\n }\n } else if (extractAllStrings || !keyHasBeenFound) {\n for (const oneEntry of notTargetedEntries) {\n extractedStrings = extractedStrings.concat(\n this.extractArrayStringsFromObject(\n oneEntry,\n pointers,\n extractAllStrings\n )\n );\n }\n }\n\n return extractedStrings;\n }\n\n return [];\n }\n\n /**\n * Method that takes a `json` object and an array of `pointers` as\n * parameters and returns an array of targeted entries. It iterates over\n * the JSON pointers and uses the `jsonpointer.get()` function to get the\n * targeted entries from the JSON object.\n * @param json The JSON object to get targeted entries from.\n * @param pointers The JSON pointers to get targeted entries.\n * @returns An array of targeted entries.\n */\n private getTargetedEntries(json: object, pointers: jsonpointer[]): object[] {\n const targetEntries = [];\n for (const pointer of pointers) {\n const targetedEntry = pointer.get(json);\n if (targetedEntry) {\n targetEntries.push(targetedEntry);\n }\n }\n\n return targetEntries;\n }\n}\n\n/**\n * Class that extends the `TextLoader` class. It represents a document\n * loader that loads documents from JSON Lines files. It has a constructor\n * that takes a `filePathOrBlob` parameter representing the path to the\n * JSON Lines file or a `Blob` object, and a `pointer` parameter that\n * specifies the JSON pointer to extract.\n */\nexport class JSONLinesLoader extends TextLoader {\n constructor(filePathOrBlob: string | Blob, public pointer: string) {\n super(filePathOrBlob);\n }\n\n /**\n * Method that takes a `raw` string as a parameter and returns a promise\n * that resolves to an array of strings. It parses the raw JSON Lines\n * string, splits it into lines, parses each line as JSON, and extracts\n * the values based on the specified JSON pointer.\n * @param raw The raw JSON Lines string to parse.\n * @returns A promise that resolves to an array of strings.\n */\n protected async parse(raw: string): Promise<string[]> {\n const lines = raw.split(\"\\n\");\n const jsons = lines\n .map((line) => line.trim())\n .filter(Boolean)\n .map((line) => JSON.parse(line));\n const pointer = jsonpointer.compile(this.pointer);\n return jsons.map((json) => pointer.get(json));\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AAUA,IAAa,aAAb,cAAgCA,4CAAW;CACzC,AAAO;CAEP,YAAYC,gBAA+BC,WAA8B,CAAE,GAAE;EAC3E,MAAM,eAAe;EACrB,KAAK,WAAW,MAAM,QAAQ,SAAS,GAAG,WAAW,CAAC,QAAS;CAChE;;;;;;;;;;CAWD,MAAgB,MAAMC,KAAgC;EACpD,MAAM,OAAO,KAAK,MAAM,IAAI,MAAM,CAAC;EAEnC,MAAM,oBAAoB,EAAE,KAAK,SAAS,SAAS;EACnD,MAAM,mBAAmB,KAAK,SAAS,IAAI,CAAC,YAC1C,oBAAY,QAAQ,QAAQ,CAC7B;AAED,SAAO,KAAK,8BACV,MACA,kBACA,kBACD;CACF;;;;;;;CAQD,AAAQ,8BAENC,MACAC,UACA,oBAAoB,OACpB,kBAAkB,OACR;AACV,MAAI,CAAC,KACH,QAAO,CAAE;AAGX,MAAI,OAAO,SAAS,YAAY,kBAC9B,QAAO,CAAC,IAAK;AAGf,MAAI,MAAM,QAAQ,KAAK,IAAI,mBAAmB;GAC5C,IAAIC,kBAA4B,CAAE;AAClC,QAAK,MAAM,WAAW,MACpB,kBAAkB,gBAAgB,OAChC,KAAK,8BAA8B,SAAS,UAAU,KAAK,CAC5D;AAGH,UAAO;EACR;AAED,MAAI,OAAO,SAAS,UAAU;AAC5B,OAAI,kBACF,QAAO,KAAK,8BACV,OAAO,OAAO,KAAK,EACnB,UACA,KACD;GAGH,MAAM,kBAAkB,KAAK,mBAAmB,MAAM,SAAS;GAC/D,MAAM,mBAAmB,OAAO,OAAO,KAAK;GAC5C,MAAM,qBAAqB,iBAAiB,OAC1C,CAACC,UAAkB,CAAC,gBAAgB,SAAS,MAAM,CACpD;GAED,IAAIC,mBAA6B,CAAE;AAEnC,OAAI,gBAAgB,SAAS,GAAG;AAC9B,SAAK,MAAM,YAAY,iBACrB,mBAAmB,iBAAiB,OAClC,KAAK,8BAA8B,UAAU,UAAU,MAAM,KAAK,CACnE;AAGH,SAAK,MAAM,YAAY,oBACrB,mBAAmB,iBAAiB,OAClC,KAAK,8BAA8B,UAAU,UAAU,OAAO,KAAK,CACpE;GAEJ,WAAU,qBAAqB,CAAC,gBAC/B,MAAK,MAAM,YAAY,oBACrB,mBAAmB,iBAAiB,OAClC,KAAK,8BACH,UACA,UACA,kBACD,CACF;AAIL,UAAO;EACR;AAED,SAAO,CAAE;CACV;;;;;;;;;;CAWD,AAAQ,mBAAmBC,MAAcJ,UAAmC;EAC1E,MAAM,gBAAgB,CAAE;AACxB,OAAK,MAAM,WAAW,UAAU;GAC9B,MAAM,gBAAgB,QAAQ,IAAI,KAAK;AACvC,OAAI,eACF,cAAc,KAAK,cAAc;EAEpC;AAED,SAAO;CACR;AACF;;;;;;;;AASD,IAAa,kBAAb,cAAqCL,4CAAW;CAC9C,YAAYC,gBAAsCS,SAAiB;EACjE,MAAM,eAAe;EAD2B;CAEjD;;;;;;;;;CAUD,MAAgB,MAAMP,KAAgC;EACpD,MAAM,QAAQ,IAAI,MAAM,KAAK;EAC7B,MAAM,QAAQ,MACX,IAAI,CAAC,SAAS,KAAK,MAAM,CAAC,CAC1B,OAAO,QAAQ,CACf,IAAI,CAAC,SAAS,KAAK,MAAM,KAAK,CAAC;EAClC,MAAM,UAAU,oBAAY,QAAQ,KAAK,QAAQ;AACjD,SAAO,MAAM,IAAI,CAAC,SAAS,QAAQ,IAAI,KAAK,CAAC;CAC9C;AACF"}
1
+ {"version":3,"file":"json.cjs","names":["TextLoader","filePathOrBlob: string | Blob","pointers: string | string[]","raw: string","json: any","pointers: jsonpointer[]","extractedString: string[]","entry: object","extractedStrings: string[]","json: object","pointer: string"],"sources":["../../../src/document_loaders/fs/json.ts"],"sourcesContent":["import jsonpointer from \"jsonpointer\";\nimport { TextLoader } from \"./text.js\";\n\n/**\n * Class that extends the `TextLoader` class. It represents a document\n * loader that loads documents from JSON files. It has a constructor that\n * takes a `filePathOrBlob` parameter representing the path to the JSON\n * file or a `Blob` object, and an optional `pointers` parameter that\n * specifies the JSON pointers to extract.\n */\nexport class JSONLoader extends TextLoader {\n public pointers: string[];\n\n constructor(filePathOrBlob: string | Blob, pointers: string | string[] = []) {\n super(filePathOrBlob);\n this.pointers = Array.isArray(pointers) ? pointers : [pointers];\n }\n\n /**\n * Method that takes a `raw` string as a parameter and returns a promise\n * that resolves to an array of strings. It parses the raw JSON string and\n * extracts the values based on the specified JSON pointers. If no JSON\n * pointers are specified, it extracts all the strings from the JSON\n * object.\n * @param raw The raw JSON string to parse.\n * @returns A promise that resolves to an array of strings.\n */\n protected async parse(raw: string): Promise<string[]> {\n const json = JSON.parse(raw.trim());\n // If there is no pointers specified we extract all strings we found\n const extractAllStrings = !(this.pointers.length > 0);\n const compiledPointers = this.pointers.map((pointer) =>\n jsonpointer.compile(pointer)\n );\n\n return this.extractArrayStringsFromObject(\n json,\n compiledPointers,\n extractAllStrings\n );\n }\n\n /**\n * If JSON pointers are specified, return all strings below any of them\n * and exclude all other nodes expect if they match a JSON pointer (to allow to extract strings from different levels)\n *\n * If no JSON pointer is specified then return all string in the object\n */\n private extractArrayStringsFromObject(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n json: any,\n pointers: jsonpointer[],\n extractAllStrings = false,\n keyHasBeenFound = false\n ): string[] {\n if (!json) {\n return [];\n }\n\n if (typeof json === \"string\" && extractAllStrings) {\n return [json];\n }\n\n if (Array.isArray(json) && extractAllStrings) {\n let extractedString: string[] = [];\n for (const element of json) {\n extractedString = extractedString.concat(\n this.extractArrayStringsFromObject(element, pointers, true)\n );\n }\n\n return extractedString;\n }\n\n if (typeof json === \"object\") {\n if (extractAllStrings) {\n return this.extractArrayStringsFromObject(\n Object.values(json),\n pointers,\n true\n );\n }\n\n const targetedEntries = this.getTargetedEntries(json, pointers);\n const thisLevelEntries = Object.values(json) as object[];\n const notTargetedEntries = thisLevelEntries.filter(\n (entry: object) => !targetedEntries.includes(entry)\n );\n\n let extractedStrings: string[] = [];\n // If we found a targeted entry, we extract all strings from it\n if (targetedEntries.length > 0) {\n for (const oneEntry of targetedEntries) {\n extractedStrings = extractedStrings.concat(\n this.extractArrayStringsFromObject(oneEntry, pointers, true, true)\n );\n }\n\n for (const oneEntry of notTargetedEntries) {\n extractedStrings = extractedStrings.concat(\n this.extractArrayStringsFromObject(oneEntry, pointers, false, true)\n );\n }\n } else if (extractAllStrings || !keyHasBeenFound) {\n for (const oneEntry of notTargetedEntries) {\n extractedStrings = extractedStrings.concat(\n this.extractArrayStringsFromObject(\n oneEntry,\n pointers,\n extractAllStrings\n )\n );\n }\n }\n\n return extractedStrings;\n }\n\n return [];\n }\n\n /**\n * Method that takes a `json` object and an array of `pointers` as\n * parameters and returns an array of targeted entries. It iterates over\n * the JSON pointers and uses the `jsonpointer.get()` function to get the\n * targeted entries from the JSON object.\n * @param json The JSON object to get targeted entries from.\n * @param pointers The JSON pointers to get targeted entries.\n * @returns An array of targeted entries.\n */\n private getTargetedEntries(json: object, pointers: jsonpointer[]): object[] {\n const targetEntries = [];\n for (const pointer of pointers) {\n const targetedEntry = pointer.get(json);\n if (targetedEntry) {\n targetEntries.push(targetedEntry);\n }\n }\n\n return targetEntries;\n }\n}\n\n/**\n * Class that extends the `TextLoader` class. It represents a document\n * loader that loads documents from JSON Lines files. It has a constructor\n * that takes a `filePathOrBlob` parameter representing the path to the\n * JSON Lines file or a `Blob` object, and a `pointer` parameter that\n * specifies the JSON pointer to extract.\n */\nexport class JSONLinesLoader extends TextLoader {\n constructor(filePathOrBlob: string | Blob, public pointer: string) {\n super(filePathOrBlob);\n }\n\n /**\n * Method that takes a `raw` string as a parameter and returns a promise\n * that resolves to an array of strings. It parses the raw JSON Lines\n * string, splits it into lines, parses each line as JSON, and extracts\n * the values based on the specified JSON pointer.\n * @param raw The raw JSON Lines string to parse.\n * @returns A promise that resolves to an array of strings.\n */\n protected async parse(raw: string): Promise<string[]> {\n const lines = raw.split(\"\\n\");\n const jsons = lines\n .map((line) => line.trim())\n .filter(Boolean)\n .map((line) => JSON.parse(line));\n const pointer = jsonpointer.compile(this.pointer);\n return jsons.map((json) => {\n const data = pointer.get(json);\n if (typeof data === \"string\") {\n return data;\n }\n if (!data) {\n return \"\";\n }\n if (typeof data === \"object\") {\n return JSON.stringify(data);\n }\n return \"\";\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AAUA,IAAa,aAAb,cAAgCA,4CAAW;CACzC,AAAO;CAEP,YAAYC,gBAA+BC,WAA8B,CAAE,GAAE;EAC3E,MAAM,eAAe;EACrB,KAAK,WAAW,MAAM,QAAQ,SAAS,GAAG,WAAW,CAAC,QAAS;CAChE;;;;;;;;;;CAWD,MAAgB,MAAMC,KAAgC;EACpD,MAAM,OAAO,KAAK,MAAM,IAAI,MAAM,CAAC;EAEnC,MAAM,oBAAoB,EAAE,KAAK,SAAS,SAAS;EACnD,MAAM,mBAAmB,KAAK,SAAS,IAAI,CAAC,YAC1C,oBAAY,QAAQ,QAAQ,CAC7B;AAED,SAAO,KAAK,8BACV,MACA,kBACA,kBACD;CACF;;;;;;;CAQD,AAAQ,8BAENC,MACAC,UACA,oBAAoB,OACpB,kBAAkB,OACR;AACV,MAAI,CAAC,KACH,QAAO,CAAE;AAGX,MAAI,OAAO,SAAS,YAAY,kBAC9B,QAAO,CAAC,IAAK;AAGf,MAAI,MAAM,QAAQ,KAAK,IAAI,mBAAmB;GAC5C,IAAIC,kBAA4B,CAAE;AAClC,QAAK,MAAM,WAAW,MACpB,kBAAkB,gBAAgB,OAChC,KAAK,8BAA8B,SAAS,UAAU,KAAK,CAC5D;AAGH,UAAO;EACR;AAED,MAAI,OAAO,SAAS,UAAU;AAC5B,OAAI,kBACF,QAAO,KAAK,8BACV,OAAO,OAAO,KAAK,EACnB,UACA,KACD;GAGH,MAAM,kBAAkB,KAAK,mBAAmB,MAAM,SAAS;GAC/D,MAAM,mBAAmB,OAAO,OAAO,KAAK;GAC5C,MAAM,qBAAqB,iBAAiB,OAC1C,CAACC,UAAkB,CAAC,gBAAgB,SAAS,MAAM,CACpD;GAED,IAAIC,mBAA6B,CAAE;AAEnC,OAAI,gBAAgB,SAAS,GAAG;AAC9B,SAAK,MAAM,YAAY,iBACrB,mBAAmB,iBAAiB,OAClC,KAAK,8BAA8B,UAAU,UAAU,MAAM,KAAK,CACnE;AAGH,SAAK,MAAM,YAAY,oBACrB,mBAAmB,iBAAiB,OAClC,KAAK,8BAA8B,UAAU,UAAU,OAAO,KAAK,CACpE;GAEJ,WAAU,qBAAqB,CAAC,gBAC/B,MAAK,MAAM,YAAY,oBACrB,mBAAmB,iBAAiB,OAClC,KAAK,8BACH,UACA,UACA,kBACD,CACF;AAIL,UAAO;EACR;AAED,SAAO,CAAE;CACV;;;;;;;;;;CAWD,AAAQ,mBAAmBC,MAAcJ,UAAmC;EAC1E,MAAM,gBAAgB,CAAE;AACxB,OAAK,MAAM,WAAW,UAAU;GAC9B,MAAM,gBAAgB,QAAQ,IAAI,KAAK;AACvC,OAAI,eACF,cAAc,KAAK,cAAc;EAEpC;AAED,SAAO;CACR;AACF;;;;;;;;AASD,IAAa,kBAAb,cAAqCL,4CAAW;CAC9C,YAAYC,gBAAsCS,SAAiB;EACjE,MAAM,eAAe;EAD2B;CAEjD;;;;;;;;;CAUD,MAAgB,MAAMP,KAAgC;EACpD,MAAM,QAAQ,IAAI,MAAM,KAAK;EAC7B,MAAM,QAAQ,MACX,IAAI,CAAC,SAAS,KAAK,MAAM,CAAC,CAC1B,OAAO,QAAQ,CACf,IAAI,CAAC,SAAS,KAAK,MAAM,KAAK,CAAC;EAClC,MAAM,UAAU,oBAAY,QAAQ,KAAK,QAAQ;AACjD,SAAO,MAAM,IAAI,CAAC,SAAS;GACzB,MAAM,OAAO,QAAQ,IAAI,KAAK;AAC9B,OAAI,OAAO,SAAS,SAClB,QAAO;AAET,OAAI,CAAC,KACH,QAAO;AAET,OAAI,OAAO,SAAS,SAClB,QAAO,KAAK,UAAU,KAAK;AAE7B,UAAO;EACR,EAAC;CACH;AACF"}
@@ -106,7 +106,13 @@ var JSONLinesLoader = class extends TextLoader {
106
106
  const lines = raw.split("\n");
107
107
  const jsons = lines.map((line) => line.trim()).filter(Boolean).map((line) => JSON.parse(line));
108
108
  const pointer = jsonpointer.compile(this.pointer);
109
- return jsons.map((json) => pointer.get(json));
109
+ return jsons.map((json) => {
110
+ const data = pointer.get(json);
111
+ if (typeof data === "string") return data;
112
+ if (!data) return "";
113
+ if (typeof data === "object") return JSON.stringify(data);
114
+ return "";
115
+ });
110
116
  }
111
117
  };
112
118
 
@@ -1 +1 @@
1
- {"version":3,"file":"json.js","names":["filePathOrBlob: string | Blob","pointers: string | string[]","raw: string","json: any","pointers: jsonpointer[]","extractedString: string[]","entry: object","extractedStrings: string[]","json: object","pointer: string"],"sources":["../../../src/document_loaders/fs/json.ts"],"sourcesContent":["import jsonpointer from \"jsonpointer\";\nimport { TextLoader } from \"./text.js\";\n\n/**\n * Class that extends the `TextLoader` class. It represents a document\n * loader that loads documents from JSON files. It has a constructor that\n * takes a `filePathOrBlob` parameter representing the path to the JSON\n * file or a `Blob` object, and an optional `pointers` parameter that\n * specifies the JSON pointers to extract.\n */\nexport class JSONLoader extends TextLoader {\n public pointers: string[];\n\n constructor(filePathOrBlob: string | Blob, pointers: string | string[] = []) {\n super(filePathOrBlob);\n this.pointers = Array.isArray(pointers) ? pointers : [pointers];\n }\n\n /**\n * Method that takes a `raw` string as a parameter and returns a promise\n * that resolves to an array of strings. It parses the raw JSON string and\n * extracts the values based on the specified JSON pointers. If no JSON\n * pointers are specified, it extracts all the strings from the JSON\n * object.\n * @param raw The raw JSON string to parse.\n * @returns A promise that resolves to an array of strings.\n */\n protected async parse(raw: string): Promise<string[]> {\n const json = JSON.parse(raw.trim());\n // If there is no pointers specified we extract all strings we found\n const extractAllStrings = !(this.pointers.length > 0);\n const compiledPointers = this.pointers.map((pointer) =>\n jsonpointer.compile(pointer)\n );\n\n return this.extractArrayStringsFromObject(\n json,\n compiledPointers,\n extractAllStrings\n );\n }\n\n /**\n * If JSON pointers are specified, return all strings below any of them\n * and exclude all other nodes expect if they match a JSON pointer (to allow to extract strings from different levels)\n *\n * If no JSON pointer is specified then return all string in the object\n */\n private extractArrayStringsFromObject(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n json: any,\n pointers: jsonpointer[],\n extractAllStrings = false,\n keyHasBeenFound = false\n ): string[] {\n if (!json) {\n return [];\n }\n\n if (typeof json === \"string\" && extractAllStrings) {\n return [json];\n }\n\n if (Array.isArray(json) && extractAllStrings) {\n let extractedString: string[] = [];\n for (const element of json) {\n extractedString = extractedString.concat(\n this.extractArrayStringsFromObject(element, pointers, true)\n );\n }\n\n return extractedString;\n }\n\n if (typeof json === \"object\") {\n if (extractAllStrings) {\n return this.extractArrayStringsFromObject(\n Object.values(json),\n pointers,\n true\n );\n }\n\n const targetedEntries = this.getTargetedEntries(json, pointers);\n const thisLevelEntries = Object.values(json) as object[];\n const notTargetedEntries = thisLevelEntries.filter(\n (entry: object) => !targetedEntries.includes(entry)\n );\n\n let extractedStrings: string[] = [];\n // If we found a targeted entry, we extract all strings from it\n if (targetedEntries.length > 0) {\n for (const oneEntry of targetedEntries) {\n extractedStrings = extractedStrings.concat(\n this.extractArrayStringsFromObject(oneEntry, pointers, true, true)\n );\n }\n\n for (const oneEntry of notTargetedEntries) {\n extractedStrings = extractedStrings.concat(\n this.extractArrayStringsFromObject(oneEntry, pointers, false, true)\n );\n }\n } else if (extractAllStrings || !keyHasBeenFound) {\n for (const oneEntry of notTargetedEntries) {\n extractedStrings = extractedStrings.concat(\n this.extractArrayStringsFromObject(\n oneEntry,\n pointers,\n extractAllStrings\n )\n );\n }\n }\n\n return extractedStrings;\n }\n\n return [];\n }\n\n /**\n * Method that takes a `json` object and an array of `pointers` as\n * parameters and returns an array of targeted entries. It iterates over\n * the JSON pointers and uses the `jsonpointer.get()` function to get the\n * targeted entries from the JSON object.\n * @param json The JSON object to get targeted entries from.\n * @param pointers The JSON pointers to get targeted entries.\n * @returns An array of targeted entries.\n */\n private getTargetedEntries(json: object, pointers: jsonpointer[]): object[] {\n const targetEntries = [];\n for (const pointer of pointers) {\n const targetedEntry = pointer.get(json);\n if (targetedEntry) {\n targetEntries.push(targetedEntry);\n }\n }\n\n return targetEntries;\n }\n}\n\n/**\n * Class that extends the `TextLoader` class. It represents a document\n * loader that loads documents from JSON Lines files. It has a constructor\n * that takes a `filePathOrBlob` parameter representing the path to the\n * JSON Lines file or a `Blob` object, and a `pointer` parameter that\n * specifies the JSON pointer to extract.\n */\nexport class JSONLinesLoader extends TextLoader {\n constructor(filePathOrBlob: string | Blob, public pointer: string) {\n super(filePathOrBlob);\n }\n\n /**\n * Method that takes a `raw` string as a parameter and returns a promise\n * that resolves to an array of strings. It parses the raw JSON Lines\n * string, splits it into lines, parses each line as JSON, and extracts\n * the values based on the specified JSON pointer.\n * @param raw The raw JSON Lines string to parse.\n * @returns A promise that resolves to an array of strings.\n */\n protected async parse(raw: string): Promise<string[]> {\n const lines = raw.split(\"\\n\");\n const jsons = lines\n .map((line) => line.trim())\n .filter(Boolean)\n .map((line) => JSON.parse(line));\n const pointer = jsonpointer.compile(this.pointer);\n return jsons.map((json) => pointer.get(json));\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AAUA,IAAa,aAAb,cAAgC,WAAW;CACzC,AAAO;CAEP,YAAYA,gBAA+BC,WAA8B,CAAE,GAAE;EAC3E,MAAM,eAAe;EACrB,KAAK,WAAW,MAAM,QAAQ,SAAS,GAAG,WAAW,CAAC,QAAS;CAChE;;;;;;;;;;CAWD,MAAgB,MAAMC,KAAgC;EACpD,MAAM,OAAO,KAAK,MAAM,IAAI,MAAM,CAAC;EAEnC,MAAM,oBAAoB,EAAE,KAAK,SAAS,SAAS;EACnD,MAAM,mBAAmB,KAAK,SAAS,IAAI,CAAC,YAC1C,YAAY,QAAQ,QAAQ,CAC7B;AAED,SAAO,KAAK,8BACV,MACA,kBACA,kBACD;CACF;;;;;;;CAQD,AAAQ,8BAENC,MACAC,UACA,oBAAoB,OACpB,kBAAkB,OACR;AACV,MAAI,CAAC,KACH,QAAO,CAAE;AAGX,MAAI,OAAO,SAAS,YAAY,kBAC9B,QAAO,CAAC,IAAK;AAGf,MAAI,MAAM,QAAQ,KAAK,IAAI,mBAAmB;GAC5C,IAAIC,kBAA4B,CAAE;AAClC,QAAK,MAAM,WAAW,MACpB,kBAAkB,gBAAgB,OAChC,KAAK,8BAA8B,SAAS,UAAU,KAAK,CAC5D;AAGH,UAAO;EACR;AAED,MAAI,OAAO,SAAS,UAAU;AAC5B,OAAI,kBACF,QAAO,KAAK,8BACV,OAAO,OAAO,KAAK,EACnB,UACA,KACD;GAGH,MAAM,kBAAkB,KAAK,mBAAmB,MAAM,SAAS;GAC/D,MAAM,mBAAmB,OAAO,OAAO,KAAK;GAC5C,MAAM,qBAAqB,iBAAiB,OAC1C,CAACC,UAAkB,CAAC,gBAAgB,SAAS,MAAM,CACpD;GAED,IAAIC,mBAA6B,CAAE;AAEnC,OAAI,gBAAgB,SAAS,GAAG;AAC9B,SAAK,MAAM,YAAY,iBACrB,mBAAmB,iBAAiB,OAClC,KAAK,8BAA8B,UAAU,UAAU,MAAM,KAAK,CACnE;AAGH,SAAK,MAAM,YAAY,oBACrB,mBAAmB,iBAAiB,OAClC,KAAK,8BAA8B,UAAU,UAAU,OAAO,KAAK,CACpE;GAEJ,WAAU,qBAAqB,CAAC,gBAC/B,MAAK,MAAM,YAAY,oBACrB,mBAAmB,iBAAiB,OAClC,KAAK,8BACH,UACA,UACA,kBACD,CACF;AAIL,UAAO;EACR;AAED,SAAO,CAAE;CACV;;;;;;;;;;CAWD,AAAQ,mBAAmBC,MAAcJ,UAAmC;EAC1E,MAAM,gBAAgB,CAAE;AACxB,OAAK,MAAM,WAAW,UAAU;GAC9B,MAAM,gBAAgB,QAAQ,IAAI,KAAK;AACvC,OAAI,eACF,cAAc,KAAK,cAAc;EAEpC;AAED,SAAO;CACR;AACF;;;;;;;;AASD,IAAa,kBAAb,cAAqC,WAAW;CAC9C,YAAYJ,gBAAsCS,SAAiB;EACjE,MAAM,eAAe;EAD2B;CAEjD;;;;;;;;;CAUD,MAAgB,MAAMP,KAAgC;EACpD,MAAM,QAAQ,IAAI,MAAM,KAAK;EAC7B,MAAM,QAAQ,MACX,IAAI,CAAC,SAAS,KAAK,MAAM,CAAC,CAC1B,OAAO,QAAQ,CACf,IAAI,CAAC,SAAS,KAAK,MAAM,KAAK,CAAC;EAClC,MAAM,UAAU,YAAY,QAAQ,KAAK,QAAQ;AACjD,SAAO,MAAM,IAAI,CAAC,SAAS,QAAQ,IAAI,KAAK,CAAC;CAC9C;AACF"}
1
+ {"version":3,"file":"json.js","names":["filePathOrBlob: string | Blob","pointers: string | string[]","raw: string","json: any","pointers: jsonpointer[]","extractedString: string[]","entry: object","extractedStrings: string[]","json: object","pointer: string"],"sources":["../../../src/document_loaders/fs/json.ts"],"sourcesContent":["import jsonpointer from \"jsonpointer\";\nimport { TextLoader } from \"./text.js\";\n\n/**\n * Class that extends the `TextLoader` class. It represents a document\n * loader that loads documents from JSON files. It has a constructor that\n * takes a `filePathOrBlob` parameter representing the path to the JSON\n * file or a `Blob` object, and an optional `pointers` parameter that\n * specifies the JSON pointers to extract.\n */\nexport class JSONLoader extends TextLoader {\n public pointers: string[];\n\n constructor(filePathOrBlob: string | Blob, pointers: string | string[] = []) {\n super(filePathOrBlob);\n this.pointers = Array.isArray(pointers) ? pointers : [pointers];\n }\n\n /**\n * Method that takes a `raw` string as a parameter and returns a promise\n * that resolves to an array of strings. It parses the raw JSON string and\n * extracts the values based on the specified JSON pointers. If no JSON\n * pointers are specified, it extracts all the strings from the JSON\n * object.\n * @param raw The raw JSON string to parse.\n * @returns A promise that resolves to an array of strings.\n */\n protected async parse(raw: string): Promise<string[]> {\n const json = JSON.parse(raw.trim());\n // If there is no pointers specified we extract all strings we found\n const extractAllStrings = !(this.pointers.length > 0);\n const compiledPointers = this.pointers.map((pointer) =>\n jsonpointer.compile(pointer)\n );\n\n return this.extractArrayStringsFromObject(\n json,\n compiledPointers,\n extractAllStrings\n );\n }\n\n /**\n * If JSON pointers are specified, return all strings below any of them\n * and exclude all other nodes expect if they match a JSON pointer (to allow to extract strings from different levels)\n *\n * If no JSON pointer is specified then return all string in the object\n */\n private extractArrayStringsFromObject(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n json: any,\n pointers: jsonpointer[],\n extractAllStrings = false,\n keyHasBeenFound = false\n ): string[] {\n if (!json) {\n return [];\n }\n\n if (typeof json === \"string\" && extractAllStrings) {\n return [json];\n }\n\n if (Array.isArray(json) && extractAllStrings) {\n let extractedString: string[] = [];\n for (const element of json) {\n extractedString = extractedString.concat(\n this.extractArrayStringsFromObject(element, pointers, true)\n );\n }\n\n return extractedString;\n }\n\n if (typeof json === \"object\") {\n if (extractAllStrings) {\n return this.extractArrayStringsFromObject(\n Object.values(json),\n pointers,\n true\n );\n }\n\n const targetedEntries = this.getTargetedEntries(json, pointers);\n const thisLevelEntries = Object.values(json) as object[];\n const notTargetedEntries = thisLevelEntries.filter(\n (entry: object) => !targetedEntries.includes(entry)\n );\n\n let extractedStrings: string[] = [];\n // If we found a targeted entry, we extract all strings from it\n if (targetedEntries.length > 0) {\n for (const oneEntry of targetedEntries) {\n extractedStrings = extractedStrings.concat(\n this.extractArrayStringsFromObject(oneEntry, pointers, true, true)\n );\n }\n\n for (const oneEntry of notTargetedEntries) {\n extractedStrings = extractedStrings.concat(\n this.extractArrayStringsFromObject(oneEntry, pointers, false, true)\n );\n }\n } else if (extractAllStrings || !keyHasBeenFound) {\n for (const oneEntry of notTargetedEntries) {\n extractedStrings = extractedStrings.concat(\n this.extractArrayStringsFromObject(\n oneEntry,\n pointers,\n extractAllStrings\n )\n );\n }\n }\n\n return extractedStrings;\n }\n\n return [];\n }\n\n /**\n * Method that takes a `json` object and an array of `pointers` as\n * parameters and returns an array of targeted entries. It iterates over\n * the JSON pointers and uses the `jsonpointer.get()` function to get the\n * targeted entries from the JSON object.\n * @param json The JSON object to get targeted entries from.\n * @param pointers The JSON pointers to get targeted entries.\n * @returns An array of targeted entries.\n */\n private getTargetedEntries(json: object, pointers: jsonpointer[]): object[] {\n const targetEntries = [];\n for (const pointer of pointers) {\n const targetedEntry = pointer.get(json);\n if (targetedEntry) {\n targetEntries.push(targetedEntry);\n }\n }\n\n return targetEntries;\n }\n}\n\n/**\n * Class that extends the `TextLoader` class. It represents a document\n * loader that loads documents from JSON Lines files. It has a constructor\n * that takes a `filePathOrBlob` parameter representing the path to the\n * JSON Lines file or a `Blob` object, and a `pointer` parameter that\n * specifies the JSON pointer to extract.\n */\nexport class JSONLinesLoader extends TextLoader {\n constructor(filePathOrBlob: string | Blob, public pointer: string) {\n super(filePathOrBlob);\n }\n\n /**\n * Method that takes a `raw` string as a parameter and returns a promise\n * that resolves to an array of strings. It parses the raw JSON Lines\n * string, splits it into lines, parses each line as JSON, and extracts\n * the values based on the specified JSON pointer.\n * @param raw The raw JSON Lines string to parse.\n * @returns A promise that resolves to an array of strings.\n */\n protected async parse(raw: string): Promise<string[]> {\n const lines = raw.split(\"\\n\");\n const jsons = lines\n .map((line) => line.trim())\n .filter(Boolean)\n .map((line) => JSON.parse(line));\n const pointer = jsonpointer.compile(this.pointer);\n return jsons.map((json) => {\n const data = pointer.get(json);\n if (typeof data === \"string\") {\n return data;\n }\n if (!data) {\n return \"\";\n }\n if (typeof data === \"object\") {\n return JSON.stringify(data);\n }\n return \"\";\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AAUA,IAAa,aAAb,cAAgC,WAAW;CACzC,AAAO;CAEP,YAAYA,gBAA+BC,WAA8B,CAAE,GAAE;EAC3E,MAAM,eAAe;EACrB,KAAK,WAAW,MAAM,QAAQ,SAAS,GAAG,WAAW,CAAC,QAAS;CAChE;;;;;;;;;;CAWD,MAAgB,MAAMC,KAAgC;EACpD,MAAM,OAAO,KAAK,MAAM,IAAI,MAAM,CAAC;EAEnC,MAAM,oBAAoB,EAAE,KAAK,SAAS,SAAS;EACnD,MAAM,mBAAmB,KAAK,SAAS,IAAI,CAAC,YAC1C,YAAY,QAAQ,QAAQ,CAC7B;AAED,SAAO,KAAK,8BACV,MACA,kBACA,kBACD;CACF;;;;;;;CAQD,AAAQ,8BAENC,MACAC,UACA,oBAAoB,OACpB,kBAAkB,OACR;AACV,MAAI,CAAC,KACH,QAAO,CAAE;AAGX,MAAI,OAAO,SAAS,YAAY,kBAC9B,QAAO,CAAC,IAAK;AAGf,MAAI,MAAM,QAAQ,KAAK,IAAI,mBAAmB;GAC5C,IAAIC,kBAA4B,CAAE;AAClC,QAAK,MAAM,WAAW,MACpB,kBAAkB,gBAAgB,OAChC,KAAK,8BAA8B,SAAS,UAAU,KAAK,CAC5D;AAGH,UAAO;EACR;AAED,MAAI,OAAO,SAAS,UAAU;AAC5B,OAAI,kBACF,QAAO,KAAK,8BACV,OAAO,OAAO,KAAK,EACnB,UACA,KACD;GAGH,MAAM,kBAAkB,KAAK,mBAAmB,MAAM,SAAS;GAC/D,MAAM,mBAAmB,OAAO,OAAO,KAAK;GAC5C,MAAM,qBAAqB,iBAAiB,OAC1C,CAACC,UAAkB,CAAC,gBAAgB,SAAS,MAAM,CACpD;GAED,IAAIC,mBAA6B,CAAE;AAEnC,OAAI,gBAAgB,SAAS,GAAG;AAC9B,SAAK,MAAM,YAAY,iBACrB,mBAAmB,iBAAiB,OAClC,KAAK,8BAA8B,UAAU,UAAU,MAAM,KAAK,CACnE;AAGH,SAAK,MAAM,YAAY,oBACrB,mBAAmB,iBAAiB,OAClC,KAAK,8BAA8B,UAAU,UAAU,OAAO,KAAK,CACpE;GAEJ,WAAU,qBAAqB,CAAC,gBAC/B,MAAK,MAAM,YAAY,oBACrB,mBAAmB,iBAAiB,OAClC,KAAK,8BACH,UACA,UACA,kBACD,CACF;AAIL,UAAO;EACR;AAED,SAAO,CAAE;CACV;;;;;;;;;;CAWD,AAAQ,mBAAmBC,MAAcJ,UAAmC;EAC1E,MAAM,gBAAgB,CAAE;AACxB,OAAK,MAAM,WAAW,UAAU;GAC9B,MAAM,gBAAgB,QAAQ,IAAI,KAAK;AACvC,OAAI,eACF,cAAc,KAAK,cAAc;EAEpC;AAED,SAAO;CACR;AACF;;;;;;;;AASD,IAAa,kBAAb,cAAqC,WAAW;CAC9C,YAAYJ,gBAAsCS,SAAiB;EACjE,MAAM,eAAe;EAD2B;CAEjD;;;;;;;;;CAUD,MAAgB,MAAMP,KAAgC;EACpD,MAAM,QAAQ,IAAI,MAAM,KAAK;EAC7B,MAAM,QAAQ,MACX,IAAI,CAAC,SAAS,KAAK,MAAM,CAAC,CAC1B,OAAO,QAAQ,CACf,IAAI,CAAC,SAAS,KAAK,MAAM,KAAK,CAAC;EAClC,MAAM,UAAU,YAAY,QAAQ,KAAK,QAAQ;AACjD,SAAO,MAAM,IAAI,CAAC,SAAS;GACzB,MAAM,OAAO,QAAQ,IAAI,KAAK;AAC9B,OAAI,OAAO,SAAS,SAClB,QAAO;AAET,OAAI,CAAC,KACH,QAAO;AAET,OAAI,OAAO,SAAS,SAClB,QAAO,KAAK,UAAU,KAAK;AAE7B,UAAO;EACR,EAAC;CACH;AACF"}
@@ -118,7 +118,7 @@ var CacheBackedEmbeddings = class extends __langchain_core_embeddings.Embeddings
118
118
  const decoder = new TextDecoder();
119
119
  const encoderBackedStore = new require_storage_encoder_backed.EncoderBackedStore({
120
120
  store: documentEmbeddingStore,
121
- keyEncoder: (key) => (options?.namespace ?? "") + (0, __langchain_core_utils_hash.insecureHash)(key),
121
+ keyEncoder: (key) => (options?.namespace ?? "") + (0, __langchain_core_utils_hash.sha256)(key),
122
122
  valueSerializer: (value) => encoder.encode(JSON.stringify(value)),
123
123
  valueDeserializer: (serializedValue) => JSON.parse(decoder.decode(serializedValue))
124
124
  });
@@ -1 +1 @@
1
- {"version":3,"file":"cache_backed.cjs","names":["Embeddings","fields: CacheBackedEmbeddingsFields","document: string","documents: string[]","keyValuePairs: [string, number[]][]","underlyingEmbeddings: EmbeddingsInterface","documentEmbeddingStore: BaseStore<string, Uint8Array>","options?: {\n namespace?: string;\n }","EncoderBackedStore"],"sources":["../../src/embeddings/cache_backed.ts"],"sourcesContent":["import { insecureHash } from \"@langchain/core/utils/hash\";\nimport {\n type EmbeddingsInterface,\n Embeddings,\n} from \"@langchain/core/embeddings\";\nimport { BaseStore } from \"@langchain/core/stores\";\n\nimport { AsyncCallerParams } from \"@langchain/core/utils/async_caller\";\nimport { EncoderBackedStore } from \"../storage/encoder_backed.js\";\n\n/**\n * Interface for the fields required to initialize an instance of the\n * CacheBackedEmbeddings class.\n */\nexport interface CacheBackedEmbeddingsFields extends AsyncCallerParams {\n underlyingEmbeddings: EmbeddingsInterface;\n documentEmbeddingStore: BaseStore<string, number[]>;\n}\n\n/**\n * Interface for caching results from embedding models.\n *\n * The interface allows works with any store that implements\n * the abstract store interface accepting keys of type str and values of list of\n * floats.\n *\n * If need be, the interface can be extended to accept other implementations\n * of the value serializer and deserializer, as well as the key encoder.\n * @example\n * ```typescript\n * const underlyingEmbeddings = new OpenAIEmbeddings();\n *\n * const cacheBackedEmbeddings = CacheBackedEmbeddings.fromBytesStore(\n * underlyingEmbeddings,\n * new ConvexKVStore({ ctx }),\n * {\n * namespace: underlyingEmbeddings.modelName,\n * },\n * );\n *\n * const loader = new TextLoader(\"./state_of_the_union.txt\");\n * const rawDocuments = await loader.load();\n * const splitter = new RecursiveCharacterTextSplitter({\n * chunkSize: 1000,\n * chunkOverlap: 0,\n * });\n * const documents = await splitter.splitDocuments(rawDocuments);\n *\n * let time = Date.now();\n * const vectorstore = await ConvexVectorStore.fromDocuments(\n * documents,\n * cacheBackedEmbeddings,\n * { ctx },\n * );\n * console.log(`Initial creation time: ${Date.now() - time}ms`);\n *\n * time = Date.now();\n * const vectorstore2 = await ConvexVectorStore.fromDocuments(\n * documents,\n * cacheBackedEmbeddings,\n * { ctx },\n * );\n * console.log(`Cached creation time: ${Date.now() - time}ms`);\n *\n * ```\n */\nexport class CacheBackedEmbeddings extends Embeddings {\n protected underlyingEmbeddings: EmbeddingsInterface;\n\n protected documentEmbeddingStore: BaseStore<string, number[]>;\n\n constructor(fields: CacheBackedEmbeddingsFields) {\n super(fields);\n this.underlyingEmbeddings = fields.underlyingEmbeddings;\n this.documentEmbeddingStore = fields.documentEmbeddingStore;\n }\n\n /**\n * Embed query text.\n *\n * This method does not support caching at the moment.\n *\n * Support for caching queries is easy to implement, but might make\n * sense to hold off to see the most common patterns.\n *\n * If the cache has an eviction policy, we may need to be a bit more careful\n * about sharing the cache between documents and queries. Generally,\n * one is OK evicting query caches, but document caches should be kept.\n *\n * @param document The text to embed.\n * @returns The embedding for the given text.\n */\n async embedQuery(document: string): Promise<number[]> {\n return this.underlyingEmbeddings.embedQuery(document);\n }\n\n /**\n * Embed a list of texts.\n *\n * The method first checks the cache for the embeddings.\n * If the embeddings are not found, the method uses the underlying embedder\n * to embed the documents and stores the results in the cache.\n *\n * @param documents\n * @returns A list of embeddings for the given texts.\n */\n async embedDocuments(documents: string[]): Promise<number[][]> {\n const vectors = await this.documentEmbeddingStore.mget(documents);\n const missingIndicies = [];\n const missingDocuments = [];\n for (let i = 0; i < vectors.length; i += 1) {\n if (vectors[i] === undefined) {\n missingIndicies.push(i);\n missingDocuments.push(documents[i]);\n }\n }\n if (missingDocuments.length) {\n const missingVectors = await this.underlyingEmbeddings.embedDocuments(\n missingDocuments\n );\n const keyValuePairs: [string, number[]][] = missingDocuments.map(\n (document, i) => [document, missingVectors[i]]\n );\n await this.documentEmbeddingStore.mset(keyValuePairs);\n for (let i = 0; i < missingIndicies.length; i += 1) {\n vectors[missingIndicies[i]] = missingVectors[i];\n }\n }\n return vectors as number[][];\n }\n\n /**\n * Create a new CacheBackedEmbeddings instance from another embeddings instance\n * and a storage instance.\n * @param underlyingEmbeddings Embeddings used to populate the cache for new documents.\n * @param documentEmbeddingStore Stores raw document embedding values. Keys are hashes of the document content.\n * @param options.namespace Optional namespace for store keys.\n * @returns A new CacheBackedEmbeddings instance.\n */\n static fromBytesStore(\n underlyingEmbeddings: EmbeddingsInterface,\n documentEmbeddingStore: BaseStore<string, Uint8Array>,\n options?: {\n namespace?: string;\n }\n ) {\n const encoder = new TextEncoder();\n const decoder = new TextDecoder();\n const encoderBackedStore = new EncoderBackedStore<\n string,\n number[],\n Uint8Array\n >({\n store: documentEmbeddingStore,\n keyEncoder: (key) => (options?.namespace ?? \"\") + insecureHash(key),\n valueSerializer: (value) => encoder.encode(JSON.stringify(value)),\n valueDeserializer: (serializedValue) =>\n JSON.parse(decoder.decode(serializedValue)),\n });\n return new this({\n underlyingEmbeddings,\n documentEmbeddingStore: encoderBackedStore,\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkEA,IAAa,wBAAb,cAA2CA,uCAAW;CACpD,AAAU;CAEV,AAAU;CAEV,YAAYC,QAAqC;EAC/C,MAAM,OAAO;EACb,KAAK,uBAAuB,OAAO;EACnC,KAAK,yBAAyB,OAAO;CACtC;;;;;;;;;;;;;;;;CAiBD,MAAM,WAAWC,UAAqC;AACpD,SAAO,KAAK,qBAAqB,WAAW,SAAS;CACtD;;;;;;;;;;;CAYD,MAAM,eAAeC,WAA0C;EAC7D,MAAM,UAAU,MAAM,KAAK,uBAAuB,KAAK,UAAU;EACjE,MAAM,kBAAkB,CAAE;EAC1B,MAAM,mBAAmB,CAAE;AAC3B,OAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK,EACvC,KAAI,QAAQ,OAAO,QAAW;GAC5B,gBAAgB,KAAK,EAAE;GACvB,iBAAiB,KAAK,UAAU,GAAG;EACpC;AAEH,MAAI,iBAAiB,QAAQ;GAC3B,MAAM,iBAAiB,MAAM,KAAK,qBAAqB,eACrD,iBACD;GACD,MAAMC,gBAAsC,iBAAiB,IAC3D,CAAC,UAAU,MAAM,CAAC,UAAU,eAAe,EAAG,EAC/C;GACD,MAAM,KAAK,uBAAuB,KAAK,cAAc;AACrD,QAAK,IAAI,IAAI,GAAG,IAAI,gBAAgB,QAAQ,KAAK,GAC/C,QAAQ,gBAAgB,MAAM,eAAe;EAEhD;AACD,SAAO;CACR;;;;;;;;;CAUD,OAAO,eACLC,sBACAC,wBACAC,SAGA;EACA,MAAM,UAAU,IAAI;EACpB,MAAM,UAAU,IAAI;EACpB,MAAM,qBAAqB,IAAIC,kDAI7B;GACA,OAAO;GACP,YAAY,CAAC,SAAS,SAAS,aAAa,oDAAmB,IAAI;GACnE,iBAAiB,CAAC,UAAU,QAAQ,OAAO,KAAK,UAAU,MAAM,CAAC;GACjE,mBAAmB,CAAC,oBAClB,KAAK,MAAM,QAAQ,OAAO,gBAAgB,CAAC;EAC9C;AACD,SAAO,IAAI,KAAK;GACd;GACA,wBAAwB;EACzB;CACF;AACF"}
1
+ {"version":3,"file":"cache_backed.cjs","names":["Embeddings","fields: CacheBackedEmbeddingsFields","document: string","documents: string[]","keyValuePairs: [string, number[]][]","underlyingEmbeddings: EmbeddingsInterface","documentEmbeddingStore: BaseStore<string, Uint8Array>","options?: {\n namespace?: string;\n }","EncoderBackedStore"],"sources":["../../src/embeddings/cache_backed.ts"],"sourcesContent":["import { sha256 } from \"@langchain/core/utils/hash\";\nimport {\n type EmbeddingsInterface,\n Embeddings,\n} from \"@langchain/core/embeddings\";\nimport { BaseStore } from \"@langchain/core/stores\";\n\nimport { AsyncCallerParams } from \"@langchain/core/utils/async_caller\";\nimport { EncoderBackedStore } from \"../storage/encoder_backed.js\";\n\n/**\n * Interface for the fields required to initialize an instance of the\n * CacheBackedEmbeddings class.\n */\nexport interface CacheBackedEmbeddingsFields extends AsyncCallerParams {\n underlyingEmbeddings: EmbeddingsInterface;\n documentEmbeddingStore: BaseStore<string, number[]>;\n}\n\n/**\n * Interface for caching results from embedding models.\n *\n * The interface allows works with any store that implements\n * the abstract store interface accepting keys of type str and values of list of\n * floats.\n *\n * If need be, the interface can be extended to accept other implementations\n * of the value serializer and deserializer, as well as the key encoder.\n * @example\n * ```typescript\n * const underlyingEmbeddings = new OpenAIEmbeddings();\n *\n * const cacheBackedEmbeddings = CacheBackedEmbeddings.fromBytesStore(\n * underlyingEmbeddings,\n * new ConvexKVStore({ ctx }),\n * {\n * namespace: underlyingEmbeddings.modelName,\n * },\n * );\n *\n * const loader = new TextLoader(\"./state_of_the_union.txt\");\n * const rawDocuments = await loader.load();\n * const splitter = new RecursiveCharacterTextSplitter({\n * chunkSize: 1000,\n * chunkOverlap: 0,\n * });\n * const documents = await splitter.splitDocuments(rawDocuments);\n *\n * let time = Date.now();\n * const vectorstore = await ConvexVectorStore.fromDocuments(\n * documents,\n * cacheBackedEmbeddings,\n * { ctx },\n * );\n * console.log(`Initial creation time: ${Date.now() - time}ms`);\n *\n * time = Date.now();\n * const vectorstore2 = await ConvexVectorStore.fromDocuments(\n * documents,\n * cacheBackedEmbeddings,\n * { ctx },\n * );\n * console.log(`Cached creation time: ${Date.now() - time}ms`);\n *\n * ```\n */\nexport class CacheBackedEmbeddings extends Embeddings {\n protected underlyingEmbeddings: EmbeddingsInterface;\n\n protected documentEmbeddingStore: BaseStore<string, number[]>;\n\n constructor(fields: CacheBackedEmbeddingsFields) {\n super(fields);\n this.underlyingEmbeddings = fields.underlyingEmbeddings;\n this.documentEmbeddingStore = fields.documentEmbeddingStore;\n }\n\n /**\n * Embed query text.\n *\n * This method does not support caching at the moment.\n *\n * Support for caching queries is easy to implement, but might make\n * sense to hold off to see the most common patterns.\n *\n * If the cache has an eviction policy, we may need to be a bit more careful\n * about sharing the cache between documents and queries. Generally,\n * one is OK evicting query caches, but document caches should be kept.\n *\n * @param document The text to embed.\n * @returns The embedding for the given text.\n */\n async embedQuery(document: string): Promise<number[]> {\n return this.underlyingEmbeddings.embedQuery(document);\n }\n\n /**\n * Embed a list of texts.\n *\n * The method first checks the cache for the embeddings.\n * If the embeddings are not found, the method uses the underlying embedder\n * to embed the documents and stores the results in the cache.\n *\n * @param documents\n * @returns A list of embeddings for the given texts.\n */\n async embedDocuments(documents: string[]): Promise<number[][]> {\n const vectors = await this.documentEmbeddingStore.mget(documents);\n const missingIndicies = [];\n const missingDocuments = [];\n for (let i = 0; i < vectors.length; i += 1) {\n if (vectors[i] === undefined) {\n missingIndicies.push(i);\n missingDocuments.push(documents[i]);\n }\n }\n if (missingDocuments.length) {\n const missingVectors = await this.underlyingEmbeddings.embedDocuments(\n missingDocuments\n );\n const keyValuePairs: [string, number[]][] = missingDocuments.map(\n (document, i) => [document, missingVectors[i]]\n );\n await this.documentEmbeddingStore.mset(keyValuePairs);\n for (let i = 0; i < missingIndicies.length; i += 1) {\n vectors[missingIndicies[i]] = missingVectors[i];\n }\n }\n return vectors as number[][];\n }\n\n /**\n * Create a new CacheBackedEmbeddings instance from another embeddings instance\n * and a storage instance.\n * @param underlyingEmbeddings Embeddings used to populate the cache for new documents.\n * @param documentEmbeddingStore Stores raw document embedding values. Keys are hashes of the document content.\n * @param options.namespace Optional namespace for store keys.\n * @returns A new CacheBackedEmbeddings instance.\n */\n static fromBytesStore(\n underlyingEmbeddings: EmbeddingsInterface,\n documentEmbeddingStore: BaseStore<string, Uint8Array>,\n options?: {\n namespace?: string;\n }\n ) {\n const encoder = new TextEncoder();\n const decoder = new TextDecoder();\n const encoderBackedStore = new EncoderBackedStore<\n string,\n number[],\n Uint8Array\n >({\n store: documentEmbeddingStore,\n keyEncoder: (key) => (options?.namespace ?? \"\") + sha256(key),\n valueSerializer: (value) => encoder.encode(JSON.stringify(value)),\n valueDeserializer: (serializedValue) =>\n JSON.parse(decoder.decode(serializedValue)),\n });\n return new this({\n underlyingEmbeddings,\n documentEmbeddingStore: encoderBackedStore,\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkEA,IAAa,wBAAb,cAA2CA,uCAAW;CACpD,AAAU;CAEV,AAAU;CAEV,YAAYC,QAAqC;EAC/C,MAAM,OAAO;EACb,KAAK,uBAAuB,OAAO;EACnC,KAAK,yBAAyB,OAAO;CACtC;;;;;;;;;;;;;;;;CAiBD,MAAM,WAAWC,UAAqC;AACpD,SAAO,KAAK,qBAAqB,WAAW,SAAS;CACtD;;;;;;;;;;;CAYD,MAAM,eAAeC,WAA0C;EAC7D,MAAM,UAAU,MAAM,KAAK,uBAAuB,KAAK,UAAU;EACjE,MAAM,kBAAkB,CAAE;EAC1B,MAAM,mBAAmB,CAAE;AAC3B,OAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK,EACvC,KAAI,QAAQ,OAAO,QAAW;GAC5B,gBAAgB,KAAK,EAAE;GACvB,iBAAiB,KAAK,UAAU,GAAG;EACpC;AAEH,MAAI,iBAAiB,QAAQ;GAC3B,MAAM,iBAAiB,MAAM,KAAK,qBAAqB,eACrD,iBACD;GACD,MAAMC,gBAAsC,iBAAiB,IAC3D,CAAC,UAAU,MAAM,CAAC,UAAU,eAAe,EAAG,EAC/C;GACD,MAAM,KAAK,uBAAuB,KAAK,cAAc;AACrD,QAAK,IAAI,IAAI,GAAG,IAAI,gBAAgB,QAAQ,KAAK,GAC/C,QAAQ,gBAAgB,MAAM,eAAe;EAEhD;AACD,SAAO;CACR;;;;;;;;;CAUD,OAAO,eACLC,sBACAC,wBACAC,SAGA;EACA,MAAM,UAAU,IAAI;EACpB,MAAM,UAAU,IAAI;EACpB,MAAM,qBAAqB,IAAIC,kDAI7B;GACA,OAAO;GACP,YAAY,CAAC,SAAS,SAAS,aAAa,8CAAa,IAAI;GAC7D,iBAAiB,CAAC,UAAU,QAAQ,OAAO,KAAK,UAAU,MAAM,CAAC;GACjE,mBAAmB,CAAC,oBAClB,KAAK,MAAM,QAAQ,OAAO,gBAAgB,CAAC;EAC9C;AACD,SAAO,IAAI,KAAK;GACd;GACA,wBAAwB;EACzB;CACF;AACF"}
@@ -1,6 +1,6 @@
1
- import { AsyncCallerParams } from "@langchain/core/utils/async_caller";
2
1
  import { Embeddings, EmbeddingsInterface } from "@langchain/core/embeddings";
3
2
  import { BaseStore } from "@langchain/core/stores";
3
+ import { AsyncCallerParams } from "@langchain/core/utils/async_caller";
4
4
 
5
5
  //#region src/embeddings/cache_backed.d.ts
6
6
 
@@ -1,6 +1,6 @@
1
- import { AsyncCallerParams } from "@langchain/core/utils/async_caller";
2
1
  import { Embeddings, EmbeddingsInterface } from "@langchain/core/embeddings";
3
2
  import { BaseStore } from "@langchain/core/stores";
3
+ import { AsyncCallerParams } from "@langchain/core/utils/async_caller";
4
4
 
5
5
  //#region src/embeddings/cache_backed.d.ts
6
6
 
@@ -1,6 +1,6 @@
1
1
  import { __export } from "../_virtual/rolldown_runtime.js";
2
2
  import { EncoderBackedStore } from "../storage/encoder_backed.js";
3
- import { insecureHash } from "@langchain/core/utils/hash";
3
+ import { sha256 } from "@langchain/core/utils/hash";
4
4
  import { Embeddings } from "@langchain/core/embeddings";
5
5
 
6
6
  //#region src/embeddings/cache_backed.ts
@@ -118,7 +118,7 @@ var CacheBackedEmbeddings = class extends Embeddings {
118
118
  const decoder = new TextDecoder();
119
119
  const encoderBackedStore = new EncoderBackedStore({
120
120
  store: documentEmbeddingStore,
121
- keyEncoder: (key) => (options?.namespace ?? "") + insecureHash(key),
121
+ keyEncoder: (key) => (options?.namespace ?? "") + sha256(key),
122
122
  valueSerializer: (value) => encoder.encode(JSON.stringify(value)),
123
123
  valueDeserializer: (serializedValue) => JSON.parse(decoder.decode(serializedValue))
124
124
  });
@@ -1 +1 @@
1
- {"version":3,"file":"cache_backed.js","names":["fields: CacheBackedEmbeddingsFields","document: string","documents: string[]","keyValuePairs: [string, number[]][]","underlyingEmbeddings: EmbeddingsInterface","documentEmbeddingStore: BaseStore<string, Uint8Array>","options?: {\n namespace?: string;\n }"],"sources":["../../src/embeddings/cache_backed.ts"],"sourcesContent":["import { insecureHash } from \"@langchain/core/utils/hash\";\nimport {\n type EmbeddingsInterface,\n Embeddings,\n} from \"@langchain/core/embeddings\";\nimport { BaseStore } from \"@langchain/core/stores\";\n\nimport { AsyncCallerParams } from \"@langchain/core/utils/async_caller\";\nimport { EncoderBackedStore } from \"../storage/encoder_backed.js\";\n\n/**\n * Interface for the fields required to initialize an instance of the\n * CacheBackedEmbeddings class.\n */\nexport interface CacheBackedEmbeddingsFields extends AsyncCallerParams {\n underlyingEmbeddings: EmbeddingsInterface;\n documentEmbeddingStore: BaseStore<string, number[]>;\n}\n\n/**\n * Interface for caching results from embedding models.\n *\n * The interface allows works with any store that implements\n * the abstract store interface accepting keys of type str and values of list of\n * floats.\n *\n * If need be, the interface can be extended to accept other implementations\n * of the value serializer and deserializer, as well as the key encoder.\n * @example\n * ```typescript\n * const underlyingEmbeddings = new OpenAIEmbeddings();\n *\n * const cacheBackedEmbeddings = CacheBackedEmbeddings.fromBytesStore(\n * underlyingEmbeddings,\n * new ConvexKVStore({ ctx }),\n * {\n * namespace: underlyingEmbeddings.modelName,\n * },\n * );\n *\n * const loader = new TextLoader(\"./state_of_the_union.txt\");\n * const rawDocuments = await loader.load();\n * const splitter = new RecursiveCharacterTextSplitter({\n * chunkSize: 1000,\n * chunkOverlap: 0,\n * });\n * const documents = await splitter.splitDocuments(rawDocuments);\n *\n * let time = Date.now();\n * const vectorstore = await ConvexVectorStore.fromDocuments(\n * documents,\n * cacheBackedEmbeddings,\n * { ctx },\n * );\n * console.log(`Initial creation time: ${Date.now() - time}ms`);\n *\n * time = Date.now();\n * const vectorstore2 = await ConvexVectorStore.fromDocuments(\n * documents,\n * cacheBackedEmbeddings,\n * { ctx },\n * );\n * console.log(`Cached creation time: ${Date.now() - time}ms`);\n *\n * ```\n */\nexport class CacheBackedEmbeddings extends Embeddings {\n protected underlyingEmbeddings: EmbeddingsInterface;\n\n protected documentEmbeddingStore: BaseStore<string, number[]>;\n\n constructor(fields: CacheBackedEmbeddingsFields) {\n super(fields);\n this.underlyingEmbeddings = fields.underlyingEmbeddings;\n this.documentEmbeddingStore = fields.documentEmbeddingStore;\n }\n\n /**\n * Embed query text.\n *\n * This method does not support caching at the moment.\n *\n * Support for caching queries is easy to implement, but might make\n * sense to hold off to see the most common patterns.\n *\n * If the cache has an eviction policy, we may need to be a bit more careful\n * about sharing the cache between documents and queries. Generally,\n * one is OK evicting query caches, but document caches should be kept.\n *\n * @param document The text to embed.\n * @returns The embedding for the given text.\n */\n async embedQuery(document: string): Promise<number[]> {\n return this.underlyingEmbeddings.embedQuery(document);\n }\n\n /**\n * Embed a list of texts.\n *\n * The method first checks the cache for the embeddings.\n * If the embeddings are not found, the method uses the underlying embedder\n * to embed the documents and stores the results in the cache.\n *\n * @param documents\n * @returns A list of embeddings for the given texts.\n */\n async embedDocuments(documents: string[]): Promise<number[][]> {\n const vectors = await this.documentEmbeddingStore.mget(documents);\n const missingIndicies = [];\n const missingDocuments = [];\n for (let i = 0; i < vectors.length; i += 1) {\n if (vectors[i] === undefined) {\n missingIndicies.push(i);\n missingDocuments.push(documents[i]);\n }\n }\n if (missingDocuments.length) {\n const missingVectors = await this.underlyingEmbeddings.embedDocuments(\n missingDocuments\n );\n const keyValuePairs: [string, number[]][] = missingDocuments.map(\n (document, i) => [document, missingVectors[i]]\n );\n await this.documentEmbeddingStore.mset(keyValuePairs);\n for (let i = 0; i < missingIndicies.length; i += 1) {\n vectors[missingIndicies[i]] = missingVectors[i];\n }\n }\n return vectors as number[][];\n }\n\n /**\n * Create a new CacheBackedEmbeddings instance from another embeddings instance\n * and a storage instance.\n * @param underlyingEmbeddings Embeddings used to populate the cache for new documents.\n * @param documentEmbeddingStore Stores raw document embedding values. Keys are hashes of the document content.\n * @param options.namespace Optional namespace for store keys.\n * @returns A new CacheBackedEmbeddings instance.\n */\n static fromBytesStore(\n underlyingEmbeddings: EmbeddingsInterface,\n documentEmbeddingStore: BaseStore<string, Uint8Array>,\n options?: {\n namespace?: string;\n }\n ) {\n const encoder = new TextEncoder();\n const decoder = new TextDecoder();\n const encoderBackedStore = new EncoderBackedStore<\n string,\n number[],\n Uint8Array\n >({\n store: documentEmbeddingStore,\n keyEncoder: (key) => (options?.namespace ?? \"\") + insecureHash(key),\n valueSerializer: (value) => encoder.encode(JSON.stringify(value)),\n valueDeserializer: (serializedValue) =>\n JSON.parse(decoder.decode(serializedValue)),\n });\n return new this({\n underlyingEmbeddings,\n documentEmbeddingStore: encoderBackedStore,\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkEA,IAAa,wBAAb,cAA2C,WAAW;CACpD,AAAU;CAEV,AAAU;CAEV,YAAYA,QAAqC;EAC/C,MAAM,OAAO;EACb,KAAK,uBAAuB,OAAO;EACnC,KAAK,yBAAyB,OAAO;CACtC;;;;;;;;;;;;;;;;CAiBD,MAAM,WAAWC,UAAqC;AACpD,SAAO,KAAK,qBAAqB,WAAW,SAAS;CACtD;;;;;;;;;;;CAYD,MAAM,eAAeC,WAA0C;EAC7D,MAAM,UAAU,MAAM,KAAK,uBAAuB,KAAK,UAAU;EACjE,MAAM,kBAAkB,CAAE;EAC1B,MAAM,mBAAmB,CAAE;AAC3B,OAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK,EACvC,KAAI,QAAQ,OAAO,QAAW;GAC5B,gBAAgB,KAAK,EAAE;GACvB,iBAAiB,KAAK,UAAU,GAAG;EACpC;AAEH,MAAI,iBAAiB,QAAQ;GAC3B,MAAM,iBAAiB,MAAM,KAAK,qBAAqB,eACrD,iBACD;GACD,MAAMC,gBAAsC,iBAAiB,IAC3D,CAAC,UAAU,MAAM,CAAC,UAAU,eAAe,EAAG,EAC/C;GACD,MAAM,KAAK,uBAAuB,KAAK,cAAc;AACrD,QAAK,IAAI,IAAI,GAAG,IAAI,gBAAgB,QAAQ,KAAK,GAC/C,QAAQ,gBAAgB,MAAM,eAAe;EAEhD;AACD,SAAO;CACR;;;;;;;;;CAUD,OAAO,eACLC,sBACAC,wBACAC,SAGA;EACA,MAAM,UAAU,IAAI;EACpB,MAAM,UAAU,IAAI;EACpB,MAAM,qBAAqB,IAAI,mBAI7B;GACA,OAAO;GACP,YAAY,CAAC,SAAS,SAAS,aAAa,MAAM,aAAa,IAAI;GACnE,iBAAiB,CAAC,UAAU,QAAQ,OAAO,KAAK,UAAU,MAAM,CAAC;GACjE,mBAAmB,CAAC,oBAClB,KAAK,MAAM,QAAQ,OAAO,gBAAgB,CAAC;EAC9C;AACD,SAAO,IAAI,KAAK;GACd;GACA,wBAAwB;EACzB;CACF;AACF"}
1
+ {"version":3,"file":"cache_backed.js","names":["fields: CacheBackedEmbeddingsFields","document: string","documents: string[]","keyValuePairs: [string, number[]][]","underlyingEmbeddings: EmbeddingsInterface","documentEmbeddingStore: BaseStore<string, Uint8Array>","options?: {\n namespace?: string;\n }"],"sources":["../../src/embeddings/cache_backed.ts"],"sourcesContent":["import { sha256 } from \"@langchain/core/utils/hash\";\nimport {\n type EmbeddingsInterface,\n Embeddings,\n} from \"@langchain/core/embeddings\";\nimport { BaseStore } from \"@langchain/core/stores\";\n\nimport { AsyncCallerParams } from \"@langchain/core/utils/async_caller\";\nimport { EncoderBackedStore } from \"../storage/encoder_backed.js\";\n\n/**\n * Interface for the fields required to initialize an instance of the\n * CacheBackedEmbeddings class.\n */\nexport interface CacheBackedEmbeddingsFields extends AsyncCallerParams {\n underlyingEmbeddings: EmbeddingsInterface;\n documentEmbeddingStore: BaseStore<string, number[]>;\n}\n\n/**\n * Interface for caching results from embedding models.\n *\n * The interface allows works with any store that implements\n * the abstract store interface accepting keys of type str and values of list of\n * floats.\n *\n * If need be, the interface can be extended to accept other implementations\n * of the value serializer and deserializer, as well as the key encoder.\n * @example\n * ```typescript\n * const underlyingEmbeddings = new OpenAIEmbeddings();\n *\n * const cacheBackedEmbeddings = CacheBackedEmbeddings.fromBytesStore(\n * underlyingEmbeddings,\n * new ConvexKVStore({ ctx }),\n * {\n * namespace: underlyingEmbeddings.modelName,\n * },\n * );\n *\n * const loader = new TextLoader(\"./state_of_the_union.txt\");\n * const rawDocuments = await loader.load();\n * const splitter = new RecursiveCharacterTextSplitter({\n * chunkSize: 1000,\n * chunkOverlap: 0,\n * });\n * const documents = await splitter.splitDocuments(rawDocuments);\n *\n * let time = Date.now();\n * const vectorstore = await ConvexVectorStore.fromDocuments(\n * documents,\n * cacheBackedEmbeddings,\n * { ctx },\n * );\n * console.log(`Initial creation time: ${Date.now() - time}ms`);\n *\n * time = Date.now();\n * const vectorstore2 = await ConvexVectorStore.fromDocuments(\n * documents,\n * cacheBackedEmbeddings,\n * { ctx },\n * );\n * console.log(`Cached creation time: ${Date.now() - time}ms`);\n *\n * ```\n */\nexport class CacheBackedEmbeddings extends Embeddings {\n protected underlyingEmbeddings: EmbeddingsInterface;\n\n protected documentEmbeddingStore: BaseStore<string, number[]>;\n\n constructor(fields: CacheBackedEmbeddingsFields) {\n super(fields);\n this.underlyingEmbeddings = fields.underlyingEmbeddings;\n this.documentEmbeddingStore = fields.documentEmbeddingStore;\n }\n\n /**\n * Embed query text.\n *\n * This method does not support caching at the moment.\n *\n * Support for caching queries is easy to implement, but might make\n * sense to hold off to see the most common patterns.\n *\n * If the cache has an eviction policy, we may need to be a bit more careful\n * about sharing the cache between documents and queries. Generally,\n * one is OK evicting query caches, but document caches should be kept.\n *\n * @param document The text to embed.\n * @returns The embedding for the given text.\n */\n async embedQuery(document: string): Promise<number[]> {\n return this.underlyingEmbeddings.embedQuery(document);\n }\n\n /**\n * Embed a list of texts.\n *\n * The method first checks the cache for the embeddings.\n * If the embeddings are not found, the method uses the underlying embedder\n * to embed the documents and stores the results in the cache.\n *\n * @param documents\n * @returns A list of embeddings for the given texts.\n */\n async embedDocuments(documents: string[]): Promise<number[][]> {\n const vectors = await this.documentEmbeddingStore.mget(documents);\n const missingIndicies = [];\n const missingDocuments = [];\n for (let i = 0; i < vectors.length; i += 1) {\n if (vectors[i] === undefined) {\n missingIndicies.push(i);\n missingDocuments.push(documents[i]);\n }\n }\n if (missingDocuments.length) {\n const missingVectors = await this.underlyingEmbeddings.embedDocuments(\n missingDocuments\n );\n const keyValuePairs: [string, number[]][] = missingDocuments.map(\n (document, i) => [document, missingVectors[i]]\n );\n await this.documentEmbeddingStore.mset(keyValuePairs);\n for (let i = 0; i < missingIndicies.length; i += 1) {\n vectors[missingIndicies[i]] = missingVectors[i];\n }\n }\n return vectors as number[][];\n }\n\n /**\n * Create a new CacheBackedEmbeddings instance from another embeddings instance\n * and a storage instance.\n * @param underlyingEmbeddings Embeddings used to populate the cache for new documents.\n * @param documentEmbeddingStore Stores raw document embedding values. Keys are hashes of the document content.\n * @param options.namespace Optional namespace for store keys.\n * @returns A new CacheBackedEmbeddings instance.\n */\n static fromBytesStore(\n underlyingEmbeddings: EmbeddingsInterface,\n documentEmbeddingStore: BaseStore<string, Uint8Array>,\n options?: {\n namespace?: string;\n }\n ) {\n const encoder = new TextEncoder();\n const decoder = new TextDecoder();\n const encoderBackedStore = new EncoderBackedStore<\n string,\n number[],\n Uint8Array\n >({\n store: documentEmbeddingStore,\n keyEncoder: (key) => (options?.namespace ?? \"\") + sha256(key),\n valueSerializer: (value) => encoder.encode(JSON.stringify(value)),\n valueDeserializer: (serializedValue) =>\n JSON.parse(decoder.decode(serializedValue)),\n });\n return new this({\n underlyingEmbeddings,\n documentEmbeddingStore: encoderBackedStore,\n });\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkEA,IAAa,wBAAb,cAA2C,WAAW;CACpD,AAAU;CAEV,AAAU;CAEV,YAAYA,QAAqC;EAC/C,MAAM,OAAO;EACb,KAAK,uBAAuB,OAAO;EACnC,KAAK,yBAAyB,OAAO;CACtC;;;;;;;;;;;;;;;;CAiBD,MAAM,WAAWC,UAAqC;AACpD,SAAO,KAAK,qBAAqB,WAAW,SAAS;CACtD;;;;;;;;;;;CAYD,MAAM,eAAeC,WAA0C;EAC7D,MAAM,UAAU,MAAM,KAAK,uBAAuB,KAAK,UAAU;EACjE,MAAM,kBAAkB,CAAE;EAC1B,MAAM,mBAAmB,CAAE;AAC3B,OAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK,EACvC,KAAI,QAAQ,OAAO,QAAW;GAC5B,gBAAgB,KAAK,EAAE;GACvB,iBAAiB,KAAK,UAAU,GAAG;EACpC;AAEH,MAAI,iBAAiB,QAAQ;GAC3B,MAAM,iBAAiB,MAAM,KAAK,qBAAqB,eACrD,iBACD;GACD,MAAMC,gBAAsC,iBAAiB,IAC3D,CAAC,UAAU,MAAM,CAAC,UAAU,eAAe,EAAG,EAC/C;GACD,MAAM,KAAK,uBAAuB,KAAK,cAAc;AACrD,QAAK,IAAI,IAAI,GAAG,IAAI,gBAAgB,QAAQ,KAAK,GAC/C,QAAQ,gBAAgB,MAAM,eAAe;EAEhD;AACD,SAAO;CACR;;;;;;;;;CAUD,OAAO,eACLC,sBACAC,wBACAC,SAGA;EACA,MAAM,UAAU,IAAI;EACpB,MAAM,UAAU,IAAI;EACpB,MAAM,qBAAqB,IAAI,mBAI7B;GACA,OAAO;GACP,YAAY,CAAC,SAAS,SAAS,aAAa,MAAM,OAAO,IAAI;GAC7D,iBAAiB,CAAC,UAAU,QAAQ,OAAO,KAAK,UAAU,MAAM,CAAC;GACjE,mBAAmB,CAAC,oBAClB,KAAK,MAAM,QAAQ,OAAO,gBAAgB,CAAC;EAC9C;AACD,SAAO,IAAI,KAAK;GACd;GACA,wBAAwB;EACzB;CACF;AACF"}