langchain 0.3.26__py3-none-any.whl → 0.4.0.dev0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (591) hide show
  1. langchain/__init__.py +110 -96
  2. langchain/_api/__init__.py +2 -2
  3. langchain/_api/deprecation.py +3 -3
  4. langchain/_api/module_import.py +51 -46
  5. langchain/_api/path.py +1 -1
  6. langchain/adapters/openai.py +8 -8
  7. langchain/agents/__init__.py +15 -12
  8. langchain/agents/agent.py +174 -151
  9. langchain/agents/agent_iterator.py +50 -26
  10. langchain/agents/agent_toolkits/__init__.py +7 -6
  11. langchain/agents/agent_toolkits/ainetwork/toolkit.py +1 -1
  12. langchain/agents/agent_toolkits/amadeus/toolkit.py +1 -1
  13. langchain/agents/agent_toolkits/azure_cognitive_services.py +1 -1
  14. langchain/agents/agent_toolkits/clickup/toolkit.py +1 -1
  15. langchain/agents/agent_toolkits/conversational_retrieval/openai_functions.py +6 -4
  16. langchain/agents/agent_toolkits/csv/__init__.py +4 -2
  17. langchain/agents/agent_toolkits/file_management/__init__.py +1 -1
  18. langchain/agents/agent_toolkits/file_management/toolkit.py +1 -1
  19. langchain/agents/agent_toolkits/github/toolkit.py +9 -9
  20. langchain/agents/agent_toolkits/gitlab/toolkit.py +1 -1
  21. langchain/agents/agent_toolkits/json/base.py +1 -1
  22. langchain/agents/agent_toolkits/multion/toolkit.py +1 -1
  23. langchain/agents/agent_toolkits/office365/toolkit.py +1 -1
  24. langchain/agents/agent_toolkits/openapi/base.py +1 -1
  25. langchain/agents/agent_toolkits/openapi/planner.py +2 -2
  26. langchain/agents/agent_toolkits/openapi/planner_prompt.py +10 -10
  27. langchain/agents/agent_toolkits/openapi/prompt.py +1 -1
  28. langchain/agents/agent_toolkits/openapi/toolkit.py +1 -1
  29. langchain/agents/agent_toolkits/pandas/__init__.py +4 -2
  30. langchain/agents/agent_toolkits/playwright/__init__.py +1 -1
  31. langchain/agents/agent_toolkits/playwright/toolkit.py +1 -1
  32. langchain/agents/agent_toolkits/powerbi/base.py +1 -1
  33. langchain/agents/agent_toolkits/powerbi/chat_base.py +1 -1
  34. langchain/agents/agent_toolkits/powerbi/prompt.py +2 -2
  35. langchain/agents/agent_toolkits/powerbi/toolkit.py +1 -1
  36. langchain/agents/agent_toolkits/python/__init__.py +4 -2
  37. langchain/agents/agent_toolkits/spark/__init__.py +4 -2
  38. langchain/agents/agent_toolkits/spark_sql/base.py +1 -1
  39. langchain/agents/agent_toolkits/spark_sql/toolkit.py +1 -1
  40. langchain/agents/agent_toolkits/sql/prompt.py +1 -1
  41. langchain/agents/agent_toolkits/sql/toolkit.py +1 -1
  42. langchain/agents/agent_toolkits/vectorstore/base.py +4 -2
  43. langchain/agents/agent_toolkits/vectorstore/prompt.py +2 -4
  44. langchain/agents/agent_toolkits/vectorstore/toolkit.py +12 -11
  45. langchain/agents/agent_toolkits/xorbits/__init__.py +4 -2
  46. langchain/agents/agent_toolkits/zapier/toolkit.py +1 -1
  47. langchain/agents/agent_types.py +6 -6
  48. langchain/agents/chat/base.py +8 -12
  49. langchain/agents/chat/output_parser.py +9 -6
  50. langchain/agents/chat/prompt.py +3 -4
  51. langchain/agents/conversational/base.py +11 -5
  52. langchain/agents/conversational/output_parser.py +4 -2
  53. langchain/agents/conversational/prompt.py +2 -3
  54. langchain/agents/conversational_chat/base.py +9 -5
  55. langchain/agents/conversational_chat/output_parser.py +9 -11
  56. langchain/agents/conversational_chat/prompt.py +5 -6
  57. langchain/agents/format_scratchpad/__init__.py +3 -3
  58. langchain/agents/format_scratchpad/log_to_messages.py +1 -1
  59. langchain/agents/format_scratchpad/openai_functions.py +8 -6
  60. langchain/agents/format_scratchpad/tools.py +5 -3
  61. langchain/agents/format_scratchpad/xml.py +33 -2
  62. langchain/agents/initialize.py +17 -9
  63. langchain/agents/json_chat/base.py +19 -18
  64. langchain/agents/json_chat/prompt.py +2 -3
  65. langchain/agents/load_tools.py +2 -1
  66. langchain/agents/loading.py +28 -18
  67. langchain/agents/mrkl/base.py +11 -4
  68. langchain/agents/mrkl/output_parser.py +17 -13
  69. langchain/agents/mrkl/prompt.py +1 -2
  70. langchain/agents/openai_assistant/base.py +81 -71
  71. langchain/agents/openai_functions_agent/agent_token_buffer_memory.py +2 -0
  72. langchain/agents/openai_functions_agent/base.py +47 -37
  73. langchain/agents/openai_functions_multi_agent/base.py +40 -27
  74. langchain/agents/openai_tools/base.py +9 -8
  75. langchain/agents/output_parsers/__init__.py +3 -3
  76. langchain/agents/output_parsers/json.py +8 -6
  77. langchain/agents/output_parsers/openai_functions.py +24 -9
  78. langchain/agents/output_parsers/openai_tools.py +16 -4
  79. langchain/agents/output_parsers/react_json_single_input.py +13 -5
  80. langchain/agents/output_parsers/react_single_input.py +18 -11
  81. langchain/agents/output_parsers/self_ask.py +5 -2
  82. langchain/agents/output_parsers/tools.py +32 -13
  83. langchain/agents/output_parsers/xml.py +102 -28
  84. langchain/agents/react/agent.py +5 -4
  85. langchain/agents/react/base.py +26 -17
  86. langchain/agents/react/output_parser.py +7 -6
  87. langchain/agents/react/textworld_prompt.py +0 -1
  88. langchain/agents/react/wiki_prompt.py +14 -15
  89. langchain/agents/schema.py +5 -2
  90. langchain/agents/self_ask_with_search/base.py +23 -15
  91. langchain/agents/self_ask_with_search/prompt.py +0 -1
  92. langchain/agents/structured_chat/base.py +19 -11
  93. langchain/agents/structured_chat/output_parser.py +29 -18
  94. langchain/agents/structured_chat/prompt.py +3 -4
  95. langchain/agents/tool_calling_agent/base.py +8 -6
  96. langchain/agents/tools.py +5 -2
  97. langchain/agents/utils.py +2 -3
  98. langchain/agents/xml/base.py +12 -6
  99. langchain/agents/xml/prompt.py +1 -2
  100. langchain/cache.py +12 -12
  101. langchain/callbacks/__init__.py +11 -11
  102. langchain/callbacks/aim_callback.py +2 -2
  103. langchain/callbacks/argilla_callback.py +1 -1
  104. langchain/callbacks/arize_callback.py +1 -1
  105. langchain/callbacks/arthur_callback.py +1 -1
  106. langchain/callbacks/base.py +7 -7
  107. langchain/callbacks/clearml_callback.py +1 -1
  108. langchain/callbacks/comet_ml_callback.py +1 -1
  109. langchain/callbacks/confident_callback.py +1 -1
  110. langchain/callbacks/context_callback.py +1 -1
  111. langchain/callbacks/flyte_callback.py +1 -1
  112. langchain/callbacks/human.py +2 -2
  113. langchain/callbacks/infino_callback.py +1 -1
  114. langchain/callbacks/labelstudio_callback.py +1 -1
  115. langchain/callbacks/llmonitor_callback.py +1 -1
  116. langchain/callbacks/manager.py +5 -5
  117. langchain/callbacks/mlflow_callback.py +2 -2
  118. langchain/callbacks/openai_info.py +1 -1
  119. langchain/callbacks/promptlayer_callback.py +1 -1
  120. langchain/callbacks/sagemaker_callback.py +1 -1
  121. langchain/callbacks/streaming_aiter.py +17 -3
  122. langchain/callbacks/streaming_aiter_final_only.py +16 -5
  123. langchain/callbacks/streaming_stdout_final_only.py +10 -3
  124. langchain/callbacks/streamlit/__init__.py +3 -2
  125. langchain/callbacks/streamlit/mutable_expander.py +1 -1
  126. langchain/callbacks/streamlit/streamlit_callback_handler.py +3 -3
  127. langchain/callbacks/tracers/__init__.py +1 -1
  128. langchain/callbacks/tracers/comet.py +1 -1
  129. langchain/callbacks/tracers/evaluation.py +1 -1
  130. langchain/callbacks/tracers/log_stream.py +1 -1
  131. langchain/callbacks/tracers/logging.py +12 -1
  132. langchain/callbacks/tracers/stdout.py +1 -1
  133. langchain/callbacks/trubrics_callback.py +1 -1
  134. langchain/callbacks/utils.py +4 -4
  135. langchain/callbacks/wandb_callback.py +1 -1
  136. langchain/callbacks/whylabs_callback.py +1 -1
  137. langchain/chains/api/base.py +41 -23
  138. langchain/chains/api/news_docs.py +1 -2
  139. langchain/chains/api/open_meteo_docs.py +1 -2
  140. langchain/chains/api/openapi/requests_chain.py +1 -1
  141. langchain/chains/api/openapi/response_chain.py +1 -1
  142. langchain/chains/api/podcast_docs.py +1 -2
  143. langchain/chains/api/prompt.py +1 -2
  144. langchain/chains/api/tmdb_docs.py +1 -2
  145. langchain/chains/base.py +96 -56
  146. langchain/chains/chat_vector_db/prompts.py +2 -3
  147. langchain/chains/combine_documents/__init__.py +1 -1
  148. langchain/chains/combine_documents/base.py +30 -11
  149. langchain/chains/combine_documents/map_reduce.py +41 -30
  150. langchain/chains/combine_documents/map_rerank.py +39 -24
  151. langchain/chains/combine_documents/reduce.py +48 -26
  152. langchain/chains/combine_documents/refine.py +27 -17
  153. langchain/chains/combine_documents/stuff.py +24 -13
  154. langchain/chains/constitutional_ai/base.py +11 -4
  155. langchain/chains/constitutional_ai/principles.py +22 -25
  156. langchain/chains/constitutional_ai/prompts.py +25 -28
  157. langchain/chains/conversation/base.py +9 -4
  158. langchain/chains/conversation/memory.py +5 -5
  159. langchain/chains/conversation/prompt.py +5 -5
  160. langchain/chains/conversational_retrieval/base.py +108 -79
  161. langchain/chains/conversational_retrieval/prompts.py +2 -3
  162. langchain/chains/elasticsearch_database/base.py +10 -10
  163. langchain/chains/elasticsearch_database/prompts.py +2 -3
  164. langchain/chains/ernie_functions/__init__.py +2 -2
  165. langchain/chains/example_generator.py +3 -1
  166. langchain/chains/flare/base.py +28 -12
  167. langchain/chains/flare/prompts.py +2 -0
  168. langchain/chains/graph_qa/cypher.py +2 -2
  169. langchain/chains/graph_qa/falkordb.py +1 -1
  170. langchain/chains/graph_qa/gremlin.py +1 -1
  171. langchain/chains/graph_qa/neptune_sparql.py +1 -1
  172. langchain/chains/graph_qa/prompts.py +2 -2
  173. langchain/chains/history_aware_retriever.py +2 -1
  174. langchain/chains/hyde/base.py +6 -5
  175. langchain/chains/hyde/prompts.py +5 -6
  176. langchain/chains/llm.py +82 -61
  177. langchain/chains/llm_bash/__init__.py +3 -2
  178. langchain/chains/llm_checker/base.py +19 -6
  179. langchain/chains/llm_checker/prompt.py +3 -4
  180. langchain/chains/llm_math/base.py +25 -10
  181. langchain/chains/llm_math/prompt.py +1 -2
  182. langchain/chains/llm_summarization_checker/base.py +22 -7
  183. langchain/chains/llm_symbolic_math/__init__.py +3 -2
  184. langchain/chains/loading.py +155 -97
  185. langchain/chains/mapreduce.py +4 -3
  186. langchain/chains/moderation.py +11 -9
  187. langchain/chains/natbot/base.py +11 -9
  188. langchain/chains/natbot/crawler.py +102 -76
  189. langchain/chains/natbot/prompt.py +2 -3
  190. langchain/chains/openai_functions/__init__.py +7 -7
  191. langchain/chains/openai_functions/base.py +15 -10
  192. langchain/chains/openai_functions/citation_fuzzy_match.py +21 -11
  193. langchain/chains/openai_functions/extraction.py +19 -19
  194. langchain/chains/openai_functions/openapi.py +39 -35
  195. langchain/chains/openai_functions/qa_with_structure.py +22 -15
  196. langchain/chains/openai_functions/tagging.py +4 -4
  197. langchain/chains/openai_tools/extraction.py +7 -8
  198. langchain/chains/qa_generation/base.py +8 -3
  199. langchain/chains/qa_generation/prompt.py +5 -5
  200. langchain/chains/qa_with_sources/base.py +17 -6
  201. langchain/chains/qa_with_sources/loading.py +16 -8
  202. langchain/chains/qa_with_sources/map_reduce_prompt.py +8 -9
  203. langchain/chains/qa_with_sources/refine_prompts.py +0 -1
  204. langchain/chains/qa_with_sources/retrieval.py +15 -6
  205. langchain/chains/qa_with_sources/stuff_prompt.py +6 -7
  206. langchain/chains/qa_with_sources/vector_db.py +21 -8
  207. langchain/chains/query_constructor/base.py +37 -34
  208. langchain/chains/query_constructor/ir.py +4 -4
  209. langchain/chains/query_constructor/parser.py +101 -34
  210. langchain/chains/query_constructor/prompt.py +5 -6
  211. langchain/chains/question_answering/chain.py +21 -10
  212. langchain/chains/question_answering/map_reduce_prompt.py +14 -14
  213. langchain/chains/question_answering/map_rerank_prompt.py +3 -3
  214. langchain/chains/question_answering/refine_prompts.py +2 -5
  215. langchain/chains/question_answering/stuff_prompt.py +5 -5
  216. langchain/chains/retrieval.py +1 -3
  217. langchain/chains/retrieval_qa/base.py +38 -27
  218. langchain/chains/retrieval_qa/prompt.py +1 -2
  219. langchain/chains/router/__init__.py +3 -3
  220. langchain/chains/router/base.py +38 -22
  221. langchain/chains/router/embedding_router.py +15 -8
  222. langchain/chains/router/llm_router.py +23 -20
  223. langchain/chains/router/multi_prompt.py +5 -2
  224. langchain/chains/router/multi_retrieval_qa.py +28 -5
  225. langchain/chains/sequential.py +30 -18
  226. langchain/chains/sql_database/prompt.py +14 -16
  227. langchain/chains/sql_database/query.py +7 -5
  228. langchain/chains/structured_output/__init__.py +1 -1
  229. langchain/chains/structured_output/base.py +77 -67
  230. langchain/chains/summarize/chain.py +11 -5
  231. langchain/chains/summarize/map_reduce_prompt.py +0 -1
  232. langchain/chains/summarize/stuff_prompt.py +0 -1
  233. langchain/chains/transform.py +9 -6
  234. langchain/chat_loaders/facebook_messenger.py +1 -1
  235. langchain/chat_loaders/langsmith.py +1 -1
  236. langchain/chat_loaders/utils.py +3 -3
  237. langchain/chat_models/__init__.py +20 -19
  238. langchain/chat_models/anthropic.py +1 -1
  239. langchain/chat_models/azureml_endpoint.py +1 -1
  240. langchain/chat_models/baidu_qianfan_endpoint.py +1 -1
  241. langchain/chat_models/base.py +213 -139
  242. langchain/chat_models/bedrock.py +1 -1
  243. langchain/chat_models/fake.py +1 -1
  244. langchain/chat_models/meta.py +1 -1
  245. langchain/chat_models/pai_eas_endpoint.py +1 -1
  246. langchain/chat_models/promptlayer_openai.py +1 -1
  247. langchain/chat_models/volcengine_maas.py +1 -1
  248. langchain/docstore/base.py +1 -1
  249. langchain/document_loaders/__init__.py +9 -9
  250. langchain/document_loaders/airbyte.py +3 -3
  251. langchain/document_loaders/assemblyai.py +1 -1
  252. langchain/document_loaders/azure_blob_storage_container.py +1 -1
  253. langchain/document_loaders/azure_blob_storage_file.py +1 -1
  254. langchain/document_loaders/baiducloud_bos_file.py +1 -1
  255. langchain/document_loaders/base.py +1 -1
  256. langchain/document_loaders/blob_loaders/__init__.py +1 -1
  257. langchain/document_loaders/blob_loaders/schema.py +1 -4
  258. langchain/document_loaders/blockchain.py +1 -1
  259. langchain/document_loaders/chatgpt.py +1 -1
  260. langchain/document_loaders/college_confidential.py +1 -1
  261. langchain/document_loaders/confluence.py +1 -1
  262. langchain/document_loaders/email.py +1 -1
  263. langchain/document_loaders/facebook_chat.py +1 -1
  264. langchain/document_loaders/markdown.py +1 -1
  265. langchain/document_loaders/notebook.py +1 -1
  266. langchain/document_loaders/org_mode.py +1 -1
  267. langchain/document_loaders/parsers/__init__.py +1 -1
  268. langchain/document_loaders/parsers/docai.py +1 -1
  269. langchain/document_loaders/parsers/generic.py +1 -1
  270. langchain/document_loaders/parsers/html/__init__.py +1 -1
  271. langchain/document_loaders/parsers/html/bs4.py +1 -1
  272. langchain/document_loaders/parsers/language/cobol.py +1 -1
  273. langchain/document_loaders/parsers/language/python.py +1 -1
  274. langchain/document_loaders/parsers/msword.py +1 -1
  275. langchain/document_loaders/parsers/pdf.py +5 -5
  276. langchain/document_loaders/parsers/registry.py +1 -1
  277. langchain/document_loaders/pdf.py +8 -8
  278. langchain/document_loaders/powerpoint.py +1 -1
  279. langchain/document_loaders/pyspark_dataframe.py +1 -1
  280. langchain/document_loaders/telegram.py +2 -2
  281. langchain/document_loaders/tencent_cos_directory.py +1 -1
  282. langchain/document_loaders/unstructured.py +5 -5
  283. langchain/document_loaders/url_playwright.py +1 -1
  284. langchain/document_loaders/whatsapp_chat.py +1 -1
  285. langchain/document_loaders/youtube.py +2 -2
  286. langchain/document_transformers/__init__.py +3 -3
  287. langchain/document_transformers/beautiful_soup_transformer.py +1 -1
  288. langchain/document_transformers/doctran_text_extract.py +1 -1
  289. langchain/document_transformers/doctran_text_qa.py +1 -1
  290. langchain/document_transformers/doctran_text_translate.py +1 -1
  291. langchain/document_transformers/embeddings_redundant_filter.py +3 -3
  292. langchain/document_transformers/google_translate.py +1 -1
  293. langchain/document_transformers/html2text.py +1 -1
  294. langchain/document_transformers/nuclia_text_transform.py +1 -1
  295. langchain/embeddings/__init__.py +5 -5
  296. langchain/embeddings/base.py +35 -24
  297. langchain/embeddings/cache.py +37 -32
  298. langchain/embeddings/fake.py +1 -1
  299. langchain/embeddings/huggingface.py +2 -2
  300. langchain/evaluation/__init__.py +22 -22
  301. langchain/evaluation/agents/trajectory_eval_chain.py +26 -25
  302. langchain/evaluation/agents/trajectory_eval_prompt.py +6 -9
  303. langchain/evaluation/comparison/__init__.py +1 -1
  304. langchain/evaluation/comparison/eval_chain.py +21 -13
  305. langchain/evaluation/comparison/prompt.py +1 -2
  306. langchain/evaluation/criteria/__init__.py +1 -1
  307. langchain/evaluation/criteria/eval_chain.py +23 -11
  308. langchain/evaluation/criteria/prompt.py +2 -3
  309. langchain/evaluation/embedding_distance/base.py +34 -20
  310. langchain/evaluation/exact_match/base.py +14 -1
  311. langchain/evaluation/loading.py +16 -11
  312. langchain/evaluation/parsing/base.py +20 -4
  313. langchain/evaluation/parsing/json_distance.py +24 -10
  314. langchain/evaluation/parsing/json_schema.py +13 -12
  315. langchain/evaluation/qa/__init__.py +1 -1
  316. langchain/evaluation/qa/eval_chain.py +20 -5
  317. langchain/evaluation/qa/eval_prompt.py +7 -8
  318. langchain/evaluation/qa/generate_chain.py +4 -1
  319. langchain/evaluation/qa/generate_prompt.py +2 -4
  320. langchain/evaluation/regex_match/base.py +9 -1
  321. langchain/evaluation/schema.py +38 -30
  322. langchain/evaluation/scoring/__init__.py +1 -1
  323. langchain/evaluation/scoring/eval_chain.py +23 -15
  324. langchain/evaluation/scoring/prompt.py +0 -1
  325. langchain/evaluation/string_distance/base.py +20 -9
  326. langchain/globals.py +12 -11
  327. langchain/graphs/__init__.py +6 -6
  328. langchain/graphs/graph_document.py +1 -1
  329. langchain/graphs/networkx_graph.py +2 -2
  330. langchain/hub.py +9 -11
  331. langchain/indexes/__init__.py +3 -3
  332. langchain/indexes/_sql_record_manager.py +63 -46
  333. langchain/indexes/prompts/entity_extraction.py +1 -2
  334. langchain/indexes/prompts/entity_summarization.py +1 -2
  335. langchain/indexes/prompts/knowledge_triplet_extraction.py +1 -3
  336. langchain/indexes/vectorstore.py +35 -19
  337. langchain/llms/__init__.py +13 -13
  338. langchain/llms/ai21.py +1 -1
  339. langchain/llms/azureml_endpoint.py +4 -4
  340. langchain/llms/base.py +15 -7
  341. langchain/llms/bedrock.py +1 -1
  342. langchain/llms/cloudflare_workersai.py +1 -1
  343. langchain/llms/gradient_ai.py +1 -1
  344. langchain/llms/loading.py +1 -1
  345. langchain/llms/openai.py +1 -1
  346. langchain/llms/sagemaker_endpoint.py +1 -1
  347. langchain/load/dump.py +1 -1
  348. langchain/load/load.py +1 -1
  349. langchain/load/serializable.py +3 -3
  350. langchain/memory/__init__.py +3 -3
  351. langchain/memory/buffer.py +14 -7
  352. langchain/memory/buffer_window.py +2 -0
  353. langchain/memory/chat_memory.py +14 -8
  354. langchain/memory/chat_message_histories/__init__.py +1 -1
  355. langchain/memory/chat_message_histories/astradb.py +1 -1
  356. langchain/memory/chat_message_histories/cassandra.py +1 -1
  357. langchain/memory/chat_message_histories/cosmos_db.py +1 -1
  358. langchain/memory/chat_message_histories/dynamodb.py +1 -1
  359. langchain/memory/chat_message_histories/elasticsearch.py +1 -1
  360. langchain/memory/chat_message_histories/file.py +1 -1
  361. langchain/memory/chat_message_histories/firestore.py +1 -1
  362. langchain/memory/chat_message_histories/momento.py +1 -1
  363. langchain/memory/chat_message_histories/mongodb.py +1 -1
  364. langchain/memory/chat_message_histories/neo4j.py +1 -1
  365. langchain/memory/chat_message_histories/postgres.py +1 -1
  366. langchain/memory/chat_message_histories/redis.py +1 -1
  367. langchain/memory/chat_message_histories/rocksetdb.py +1 -1
  368. langchain/memory/chat_message_histories/singlestoredb.py +1 -1
  369. langchain/memory/chat_message_histories/streamlit.py +1 -1
  370. langchain/memory/chat_message_histories/upstash_redis.py +1 -1
  371. langchain/memory/chat_message_histories/xata.py +1 -1
  372. langchain/memory/chat_message_histories/zep.py +1 -1
  373. langchain/memory/combined.py +14 -13
  374. langchain/memory/entity.py +131 -61
  375. langchain/memory/prompt.py +10 -11
  376. langchain/memory/readonly.py +0 -2
  377. langchain/memory/simple.py +4 -3
  378. langchain/memory/summary.py +43 -11
  379. langchain/memory/summary_buffer.py +20 -8
  380. langchain/memory/token_buffer.py +2 -0
  381. langchain/memory/utils.py +3 -2
  382. langchain/memory/vectorstore.py +12 -5
  383. langchain/memory/vectorstore_token_buffer_memory.py +5 -5
  384. langchain/model_laboratory.py +12 -11
  385. langchain/output_parsers/__init__.py +4 -4
  386. langchain/output_parsers/boolean.py +7 -4
  387. langchain/output_parsers/combining.py +14 -7
  388. langchain/output_parsers/datetime.py +32 -31
  389. langchain/output_parsers/enum.py +10 -4
  390. langchain/output_parsers/fix.py +60 -53
  391. langchain/output_parsers/format_instructions.py +6 -8
  392. langchain/output_parsers/json.py +2 -2
  393. langchain/output_parsers/list.py +2 -2
  394. langchain/output_parsers/loading.py +9 -9
  395. langchain/output_parsers/openai_functions.py +3 -3
  396. langchain/output_parsers/openai_tools.py +1 -1
  397. langchain/output_parsers/pandas_dataframe.py +59 -48
  398. langchain/output_parsers/prompts.py +1 -2
  399. langchain/output_parsers/rail_parser.py +1 -1
  400. langchain/output_parsers/regex.py +9 -8
  401. langchain/output_parsers/regex_dict.py +7 -10
  402. langchain/output_parsers/retry.py +99 -80
  403. langchain/output_parsers/structured.py +21 -6
  404. langchain/output_parsers/yaml.py +19 -11
  405. langchain/prompts/__init__.py +5 -3
  406. langchain/prompts/base.py +5 -5
  407. langchain/prompts/chat.py +8 -8
  408. langchain/prompts/example_selector/__init__.py +3 -1
  409. langchain/prompts/example_selector/semantic_similarity.py +2 -2
  410. langchain/prompts/few_shot.py +1 -1
  411. langchain/prompts/loading.py +3 -3
  412. langchain/prompts/prompt.py +1 -1
  413. langchain/pydantic_v1/__init__.py +1 -1
  414. langchain/retrievers/__init__.py +5 -5
  415. langchain/retrievers/bedrock.py +2 -2
  416. langchain/retrievers/bm25.py +1 -1
  417. langchain/retrievers/contextual_compression.py +14 -8
  418. langchain/retrievers/docarray.py +1 -1
  419. langchain/retrievers/document_compressors/__init__.py +5 -4
  420. langchain/retrievers/document_compressors/base.py +12 -6
  421. langchain/retrievers/document_compressors/chain_extract.py +5 -3
  422. langchain/retrievers/document_compressors/chain_extract_prompt.py +2 -3
  423. langchain/retrievers/document_compressors/chain_filter.py +9 -9
  424. langchain/retrievers/document_compressors/chain_filter_prompt.py +1 -2
  425. langchain/retrievers/document_compressors/cohere_rerank.py +17 -15
  426. langchain/retrievers/document_compressors/cross_encoder_rerank.py +2 -0
  427. langchain/retrievers/document_compressors/embeddings_filter.py +24 -17
  428. langchain/retrievers/document_compressors/flashrank_rerank.py +1 -1
  429. langchain/retrievers/document_compressors/listwise_rerank.py +8 -5
  430. langchain/retrievers/ensemble.py +30 -27
  431. langchain/retrievers/google_cloud_documentai_warehouse.py +1 -1
  432. langchain/retrievers/google_vertex_ai_search.py +2 -2
  433. langchain/retrievers/kendra.py +10 -10
  434. langchain/retrievers/llama_index.py +1 -1
  435. langchain/retrievers/merger_retriever.py +11 -11
  436. langchain/retrievers/milvus.py +1 -1
  437. langchain/retrievers/multi_query.py +35 -27
  438. langchain/retrievers/multi_vector.py +24 -9
  439. langchain/retrievers/parent_document_retriever.py +33 -9
  440. langchain/retrievers/re_phraser.py +6 -5
  441. langchain/retrievers/self_query/base.py +157 -127
  442. langchain/retrievers/time_weighted_retriever.py +21 -7
  443. langchain/retrievers/zilliz.py +1 -1
  444. langchain/runnables/hub.py +12 -0
  445. langchain/runnables/openai_functions.py +12 -2
  446. langchain/schema/__init__.py +23 -23
  447. langchain/schema/cache.py +1 -1
  448. langchain/schema/callbacks/base.py +7 -7
  449. langchain/schema/callbacks/manager.py +19 -19
  450. langchain/schema/callbacks/tracers/base.py +1 -1
  451. langchain/schema/callbacks/tracers/evaluation.py +1 -1
  452. langchain/schema/callbacks/tracers/langchain.py +1 -1
  453. langchain/schema/callbacks/tracers/langchain_v1.py +1 -1
  454. langchain/schema/callbacks/tracers/log_stream.py +1 -1
  455. langchain/schema/callbacks/tracers/schemas.py +8 -8
  456. langchain/schema/callbacks/tracers/stdout.py +3 -3
  457. langchain/schema/document.py +1 -1
  458. langchain/schema/language_model.py +2 -2
  459. langchain/schema/messages.py +12 -12
  460. langchain/schema/output.py +3 -3
  461. langchain/schema/output_parser.py +3 -3
  462. langchain/schema/runnable/__init__.py +3 -3
  463. langchain/schema/runnable/base.py +9 -9
  464. langchain/schema/runnable/config.py +5 -5
  465. langchain/schema/runnable/configurable.py +1 -1
  466. langchain/schema/runnable/history.py +1 -1
  467. langchain/schema/runnable/passthrough.py +1 -1
  468. langchain/schema/runnable/utils.py +16 -16
  469. langchain/schema/vectorstore.py +1 -1
  470. langchain/smith/__init__.py +2 -1
  471. langchain/smith/evaluation/__init__.py +2 -2
  472. langchain/smith/evaluation/config.py +9 -23
  473. langchain/smith/evaluation/name_generation.py +3 -3
  474. langchain/smith/evaluation/progress.py +22 -4
  475. langchain/smith/evaluation/runner_utils.py +416 -247
  476. langchain/smith/evaluation/string_run_evaluator.py +102 -68
  477. langchain/storage/__init__.py +2 -2
  478. langchain/storage/_lc_store.py +4 -2
  479. langchain/storage/encoder_backed.py +7 -2
  480. langchain/storage/file_system.py +19 -16
  481. langchain/storage/in_memory.py +1 -1
  482. langchain/storage/upstash_redis.py +1 -1
  483. langchain/text_splitter.py +15 -15
  484. langchain/tools/__init__.py +28 -26
  485. langchain/tools/ainetwork/app.py +1 -1
  486. langchain/tools/ainetwork/base.py +1 -1
  487. langchain/tools/ainetwork/owner.py +1 -1
  488. langchain/tools/ainetwork/rule.py +1 -1
  489. langchain/tools/ainetwork/transfer.py +1 -1
  490. langchain/tools/ainetwork/value.py +1 -1
  491. langchain/tools/amadeus/closest_airport.py +1 -1
  492. langchain/tools/amadeus/flight_search.py +1 -1
  493. langchain/tools/azure_cognitive_services/__init__.py +1 -1
  494. langchain/tools/base.py +4 -4
  495. langchain/tools/bearly/tool.py +1 -1
  496. langchain/tools/bing_search/__init__.py +1 -1
  497. langchain/tools/bing_search/tool.py +1 -1
  498. langchain/tools/dataforseo_api_search/__init__.py +1 -1
  499. langchain/tools/dataforseo_api_search/tool.py +1 -1
  500. langchain/tools/ddg_search/tool.py +1 -1
  501. langchain/tools/e2b_data_analysis/tool.py +2 -2
  502. langchain/tools/edenai/__init__.py +1 -1
  503. langchain/tools/file_management/__init__.py +1 -1
  504. langchain/tools/file_management/copy.py +1 -1
  505. langchain/tools/file_management/delete.py +1 -1
  506. langchain/tools/gmail/__init__.py +2 -2
  507. langchain/tools/gmail/get_message.py +1 -1
  508. langchain/tools/gmail/search.py +1 -1
  509. langchain/tools/gmail/send_message.py +1 -1
  510. langchain/tools/google_finance/__init__.py +1 -1
  511. langchain/tools/google_finance/tool.py +1 -1
  512. langchain/tools/google_scholar/__init__.py +1 -1
  513. langchain/tools/google_scholar/tool.py +1 -1
  514. langchain/tools/google_search/__init__.py +1 -1
  515. langchain/tools/google_search/tool.py +1 -1
  516. langchain/tools/google_serper/__init__.py +1 -1
  517. langchain/tools/google_serper/tool.py +1 -1
  518. langchain/tools/google_trends/__init__.py +1 -1
  519. langchain/tools/google_trends/tool.py +1 -1
  520. langchain/tools/jira/tool.py +20 -1
  521. langchain/tools/json/tool.py +25 -3
  522. langchain/tools/memorize/tool.py +1 -1
  523. langchain/tools/multion/__init__.py +1 -1
  524. langchain/tools/multion/update_session.py +1 -1
  525. langchain/tools/office365/__init__.py +2 -2
  526. langchain/tools/office365/events_search.py +1 -1
  527. langchain/tools/office365/messages_search.py +1 -1
  528. langchain/tools/office365/send_event.py +1 -1
  529. langchain/tools/office365/send_message.py +1 -1
  530. langchain/tools/openapi/utils/api_models.py +6 -6
  531. langchain/tools/playwright/__init__.py +5 -5
  532. langchain/tools/playwright/click.py +1 -1
  533. langchain/tools/playwright/extract_hyperlinks.py +1 -1
  534. langchain/tools/playwright/get_elements.py +1 -1
  535. langchain/tools/playwright/navigate.py +1 -1
  536. langchain/tools/plugin.py +2 -2
  537. langchain/tools/powerbi/tool.py +1 -1
  538. langchain/tools/python/__init__.py +3 -2
  539. langchain/tools/reddit_search/tool.py +1 -1
  540. langchain/tools/render.py +2 -2
  541. langchain/tools/requests/tool.py +2 -2
  542. langchain/tools/searchapi/tool.py +1 -1
  543. langchain/tools/searx_search/tool.py +1 -1
  544. langchain/tools/slack/get_message.py +1 -1
  545. langchain/tools/spark_sql/tool.py +1 -1
  546. langchain/tools/sql_database/tool.py +1 -1
  547. langchain/tools/tavily_search/__init__.py +1 -1
  548. langchain/tools/tavily_search/tool.py +1 -1
  549. langchain/tools/zapier/__init__.py +1 -1
  550. langchain/tools/zapier/tool.py +24 -2
  551. langchain/utilities/__init__.py +4 -4
  552. langchain/utilities/arcee.py +4 -4
  553. langchain/utilities/clickup.py +4 -4
  554. langchain/utilities/dalle_image_generator.py +1 -1
  555. langchain/utilities/dataforseo_api_search.py +1 -1
  556. langchain/utilities/opaqueprompts.py +1 -1
  557. langchain/utilities/reddit_search.py +1 -1
  558. langchain/utilities/sql_database.py +1 -1
  559. langchain/utilities/tavily_search.py +1 -1
  560. langchain/utilities/vertexai.py +2 -2
  561. langchain/utils/__init__.py +1 -1
  562. langchain/utils/aiter.py +1 -1
  563. langchain/utils/html.py +3 -3
  564. langchain/utils/input.py +1 -1
  565. langchain/utils/iter.py +1 -1
  566. langchain/utils/json_schema.py +1 -3
  567. langchain/utils/strings.py +1 -1
  568. langchain/utils/utils.py +6 -6
  569. langchain/vectorstores/__init__.py +5 -5
  570. langchain/vectorstores/alibabacloud_opensearch.py +1 -1
  571. langchain/vectorstores/azure_cosmos_db.py +1 -1
  572. langchain/vectorstores/clickhouse.py +1 -1
  573. langchain/vectorstores/elastic_vector_search.py +1 -1
  574. langchain/vectorstores/elasticsearch.py +2 -2
  575. langchain/vectorstores/myscale.py +1 -1
  576. langchain/vectorstores/neo4j_vector.py +1 -1
  577. langchain/vectorstores/pgembedding.py +1 -1
  578. langchain/vectorstores/qdrant.py +1 -1
  579. langchain/vectorstores/redis/__init__.py +1 -1
  580. langchain/vectorstores/redis/base.py +1 -1
  581. langchain/vectorstores/redis/filters.py +4 -4
  582. langchain/vectorstores/redis/schema.py +6 -6
  583. langchain/vectorstores/sklearn.py +2 -2
  584. langchain/vectorstores/starrocks.py +1 -1
  585. langchain/vectorstores/utils.py +1 -1
  586. {langchain-0.3.26.dist-info → langchain-0.4.0.dev0.dist-info}/METADATA +4 -14
  587. {langchain-0.3.26.dist-info → langchain-0.4.0.dev0.dist-info}/RECORD +590 -591
  588. {langchain-0.3.26.dist-info → langchain-0.4.0.dev0.dist-info}/WHEEL +1 -1
  589. langchain/smith/evaluation/utils.py +0 -0
  590. {langchain-0.3.26.dist-info → langchain-0.4.0.dev0.dist-info}/entry_points.txt +0 -0
  591. {langchain-0.3.26.dist-info → langchain-0.4.0.dev0.dist-info}/licenses/LICENSE +0 -0
@@ -1,5 +1,3 @@
1
- # flake8: noqa
2
- from langchain.chains.prompt_selector import ConditionalPromptSelector, is_chat_model
3
1
  from langchain_core.prompts.chat import (
4
2
  ChatPromptTemplate,
5
3
  HumanMessagePromptTemplate,
@@ -7,6 +5,8 @@ from langchain_core.prompts.chat import (
7
5
  )
8
6
  from langchain_core.prompts.prompt import PromptTemplate
9
7
 
8
+ from langchain.chains.prompt_selector import ConditionalPromptSelector, is_chat_model
9
+
10
10
  templ1 = """You are a smart assistant designed to help high school teachers come up with reading comprehension questions.
11
11
  Given a piece of text, you must come up with a question and answer pair that can be used to test a student's reading comprehension abilities.
12
12
  When coming up with this question/answer pair, you must respond in the following format:
@@ -18,10 +18,10 @@ When coming up with this question/answer pair, you must respond in the following
18
18
  ```
19
19
 
20
20
  Everything between the ``` must be valid json.
21
- """
21
+ """ # noqa: E501
22
22
  templ2 = """Please come up with a question/answer pair, in the specified JSON format, for the following text:
23
23
  ----------------
24
- {text}"""
24
+ {text}""" # noqa: E501
25
25
  CHAT_PROMPT = ChatPromptTemplate.from_messages(
26
26
  [
27
27
  SystemMessagePromptTemplate.from_template(templ1),
@@ -42,7 +42,7 @@ Everything between the ``` must be valid json.
42
42
 
43
43
  Please come up with a question/answer pair, in the specified JSON format, for the following text:
44
44
  ----------------
45
- {text}"""
45
+ {text}""" # noqa: E501
46
46
  PROMPT = PromptTemplate.from_template(templ)
47
47
 
48
48
  PROMPT_SELECTOR = ConditionalPromptSelector(
@@ -16,6 +16,7 @@ from langchain_core.documents import Document
16
16
  from langchain_core.language_models import BaseLanguageModel
17
17
  from langchain_core.prompts import BasePromptTemplate
18
18
  from pydantic import ConfigDict, model_validator
19
+ from typing_extensions import override
19
20
 
20
21
  from langchain.chains import ReduceDocumentsChain
21
22
  from langchain.chains.base import Chain
@@ -70,7 +71,7 @@ class BaseQAWithSourcesChain(Chain, ABC):
70
71
  document_variable_name="summaries",
71
72
  )
72
73
  reduce_documents_chain = ReduceDocumentsChain(
73
- combine_documents_chain=combine_results_chain
74
+ combine_documents_chain=combine_results_chain,
74
75
  )
75
76
  combine_documents_chain = MapReduceDocumentsChain(
76
77
  llm_chain=llm_question_chain,
@@ -93,7 +94,9 @@ class BaseQAWithSourcesChain(Chain, ABC):
93
94
  """Load chain from chain type."""
94
95
  _chain_kwargs = chain_type_kwargs or {}
95
96
  combine_documents_chain = load_qa_with_sources_chain(
96
- llm, chain_type=chain_type, **_chain_kwargs
97
+ llm,
98
+ chain_type=chain_type,
99
+ **_chain_kwargs,
97
100
  )
98
101
  return cls(combine_documents_chain=combine_documents_chain, **kwargs)
99
102
 
@@ -118,7 +121,7 @@ class BaseQAWithSourcesChain(Chain, ABC):
118
121
  """
119
122
  _output_keys = [self.answer_key, self.sources_answer_key]
120
123
  if self.return_source_documents:
121
- _output_keys = _output_keys + ["source_documents"]
124
+ _output_keys = [*_output_keys, "source_documents"]
122
125
  return _output_keys
123
126
 
124
127
  @model_validator(mode="before")
@@ -133,7 +136,9 @@ class BaseQAWithSourcesChain(Chain, ABC):
133
136
  """Split sources from answer."""
134
137
  if re.search(r"SOURCES?:", answer, re.IGNORECASE):
135
138
  answer, sources = re.split(
136
- r"SOURCES?:|QUESTION:\s", answer, flags=re.IGNORECASE
139
+ r"SOURCES?:|QUESTION:\s",
140
+ answer,
141
+ flags=re.IGNORECASE,
137
142
  )[:2]
138
143
  sources = re.split(r"\n", sources)[0].strip()
139
144
  else:
@@ -164,7 +169,9 @@ class BaseQAWithSourcesChain(Chain, ABC):
164
169
  docs = self._get_docs(inputs) # type: ignore[call-arg]
165
170
 
166
171
  answer = self.combine_documents_chain.run(
167
- input_documents=docs, callbacks=_run_manager.get_child(), **inputs
172
+ input_documents=docs,
173
+ callbacks=_run_manager.get_child(),
174
+ **inputs,
168
175
  )
169
176
  answer, sources = self._split_sources(answer)
170
177
  result: dict[str, Any] = {
@@ -198,7 +205,9 @@ class BaseQAWithSourcesChain(Chain, ABC):
198
205
  else:
199
206
  docs = await self._aget_docs(inputs) # type: ignore[call-arg]
200
207
  answer = await self.combine_documents_chain.arun(
201
- input_documents=docs, callbacks=_run_manager.get_child(), **inputs
208
+ input_documents=docs,
209
+ callbacks=_run_manager.get_child(),
210
+ **inputs,
202
211
  )
203
212
  answer, sources = self._split_sources(answer)
204
213
  result: dict[str, Any] = {
@@ -232,6 +241,7 @@ class QAWithSourcesChain(BaseQAWithSourcesChain):
232
241
  """
233
242
  return [self.input_docs_key, self.question_key]
234
243
 
244
+ @override
235
245
  def _get_docs(
236
246
  self,
237
247
  inputs: dict[str, Any],
@@ -241,6 +251,7 @@ class QAWithSourcesChain(BaseQAWithSourcesChain):
241
251
  """Get docs to run questioning over."""
242
252
  return inputs.pop(self.input_docs_key)
243
253
 
254
+ @override
244
255
  async def _aget_docs(
245
256
  self,
246
257
  inputs: dict[str, Any],
@@ -30,13 +30,16 @@ class LoadingCallable(Protocol):
30
30
  """Interface for loading the combine documents chain."""
31
31
 
32
32
  def __call__(
33
- self, llm: BaseLanguageModel, **kwargs: Any
33
+ self,
34
+ llm: BaseLanguageModel,
35
+ **kwargs: Any,
34
36
  ) -> BaseCombineDocumentsChain:
35
37
  """Callable to load the combine documents chain."""
36
38
 
37
39
 
38
40
  def _load_map_rerank_chain(
39
41
  llm: BaseLanguageModel,
42
+ *,
40
43
  prompt: BasePromptTemplate = MAP_RERANK_PROMPT,
41
44
  verbose: bool = False,
42
45
  document_variable_name: str = "context",
@@ -56,6 +59,7 @@ def _load_map_rerank_chain(
56
59
 
57
60
  def _load_stuff_chain(
58
61
  llm: BaseLanguageModel,
62
+ *,
59
63
  prompt: BasePromptTemplate = stuff_prompt.PROMPT,
60
64
  document_prompt: BasePromptTemplate = stuff_prompt.EXAMPLE_PROMPT,
61
65
  document_variable_name: str = "summaries",
@@ -74,6 +78,7 @@ def _load_stuff_chain(
74
78
 
75
79
  def _load_map_reduce_chain(
76
80
  llm: BaseLanguageModel,
81
+ *,
77
82
  question_prompt: BasePromptTemplate = map_reduce_prompt.QUESTION_PROMPT,
78
83
  combine_prompt: BasePromptTemplate = map_reduce_prompt.COMBINE_PROMPT,
79
84
  document_prompt: BasePromptTemplate = map_reduce_prompt.EXAMPLE_PROMPT,
@@ -98,10 +103,11 @@ def _load_map_reduce_chain(
98
103
  if collapse_prompt is None:
99
104
  collapse_chain = None
100
105
  if collapse_llm is not None:
101
- raise ValueError(
106
+ msg = (
102
107
  "collapse_llm provided, but collapse_prompt was not: please "
103
108
  "provide one or stop providing collapse_llm."
104
109
  )
110
+ raise ValueError(msg)
105
111
  else:
106
112
  _collapse_llm = collapse_llm or llm
107
113
  collapse_chain = StuffDocumentsChain(
@@ -130,6 +136,7 @@ def _load_map_reduce_chain(
130
136
 
131
137
  def _load_refine_chain(
132
138
  llm: BaseLanguageModel,
139
+ *,
133
140
  question_prompt: BasePromptTemplate = refine_prompts.DEFAULT_TEXT_QA_PROMPT,
134
141
  refine_prompt: BasePromptTemplate = refine_prompts.DEFAULT_REFINE_PROMPT,
135
142
  document_prompt: BasePromptTemplate = refine_prompts.EXAMPLE_PROMPT,
@@ -162,16 +169,16 @@ def _load_refine_chain(
162
169
  "https://python.langchain.com/docs/how_to/qa_sources/"
163
170
  "\nSee also the following migration guides for replacements "
164
171
  "based on `chain_type`:\n"
165
- "stuff: https://python.langchain.com/docs/versions/migrating_chains/stuff_docs_chain\n" # noqa: E501
166
- "map_reduce: https://python.langchain.com/docs/versions/migrating_chains/map_reduce_chain\n" # noqa: E501
167
- "refine: https://python.langchain.com/docs/versions/migrating_chains/refine_chain\n" # noqa: E501
168
- "map_rerank: https://python.langchain.com/docs/versions/migrating_chains/map_rerank_docs_chain\n" # noqa: E501
172
+ "stuff: https://python.langchain.com/docs/versions/migrating_chains/stuff_docs_chain\n"
173
+ "map_reduce: https://python.langchain.com/docs/versions/migrating_chains/map_reduce_chain\n"
174
+ "refine: https://python.langchain.com/docs/versions/migrating_chains/refine_chain\n"
175
+ "map_rerank: https://python.langchain.com/docs/versions/migrating_chains/map_rerank_docs_chain\n"
169
176
  ),
170
177
  )
171
178
  def load_qa_with_sources_chain(
172
179
  llm: BaseLanguageModel,
173
180
  chain_type: str = "stuff",
174
- verbose: Optional[bool] = None,
181
+ verbose: Optional[bool] = None, # noqa: FBT001
175
182
  **kwargs: Any,
176
183
  ) -> BaseCombineDocumentsChain:
177
184
  """Load a question answering with sources chain.
@@ -193,9 +200,10 @@ def load_qa_with_sources_chain(
193
200
  "map_rerank": _load_map_rerank_chain,
194
201
  }
195
202
  if chain_type not in loader_mapping:
196
- raise ValueError(
203
+ msg = (
197
204
  f"Got unsupported chain type: {chain_type}. "
198
205
  f"Should be one of {loader_mapping.keys()}"
199
206
  )
207
+ raise ValueError(msg)
200
208
  _func: LoadingCallable = loader_mapping[chain_type]
201
209
  return _func(llm, verbose=verbose, **kwargs)
@@ -1,16 +1,15 @@
1
- # flake8: noqa
2
1
  from langchain_core.prompts import PromptTemplate
3
2
 
4
- question_prompt_template = """Use the following portion of a long document to see if any of the text is relevant to answer the question.
3
+ question_prompt_template = """Use the following portion of a long document to see if any of the text is relevant to answer the question.
5
4
  Return any relevant text verbatim.
6
5
  {context}
7
6
  Question: {question}
8
- Relevant text, if any:"""
7
+ Relevant text, if any:""" # noqa: E501
9
8
  QUESTION_PROMPT = PromptTemplate(
10
9
  template=question_prompt_template, input_variables=["context", "question"]
11
10
  )
12
11
 
13
- combine_prompt_template = """Given the following extracted parts of a long document and a question, create a final answer with references ("SOURCES").
12
+ combine_prompt_template = """Given the following extracted parts of a long document and a question, create a final answer with references ("SOURCES").
14
13
  If you don't know the answer, just say that you don't know. Don't try to make up an answer.
15
14
  ALWAYS return a "SOURCES" part in your answer.
16
15
 
@@ -28,13 +27,13 @@ SOURCES: 28-pl
28
27
 
29
28
  QUESTION: What did the president say about Michael Jackson?
30
29
  =========
31
- Content: Madam Speaker, Madam Vice President, our First Lady and Second Gentleman. Members of Congress and the Cabinet. Justices of the Supreme Court. My fellow Americans. \n\nLast year COVID-19 kept us apart. This year we are finally together again. \n\nTonight, we meet as Democrats Republicans and Independents. But most importantly as Americans. \n\nWith a duty to one another to the American people to the Constitution. \n\nAnd with an unwavering resolve that freedom will always triumph over tyranny. \n\nSix days ago, Russias Vladimir Putin sought to shake the foundations of the free world thinking he could make it bend to his menacing ways. But he badly miscalculated. \n\nHe thought he could roll into Ukraine and the world would roll over. Instead he met a wall of strength he never imagined. \n\nHe met the Ukrainian people. \n\nFrom President Zelenskyy to every Ukrainian, their fearlessness, their courage, their determination, inspires the world. \n\nGroups of citizens blocking tanks with their bodies. Everyone from students to retirees teachers turned soldiers defending their homeland.
30
+ Content: Madam Speaker, Madam Vice President, our First Lady and Second Gentleman. Members of Congress and the Cabinet. Justices of the Supreme Court. My fellow Americans. \n\nLast year COVID-19 kept us apart. This year we are finally together again. \n\nTonight, we meet as Democrats Republicans and Independents. But most importantly as Americans. \n\nWith a duty to one another to the American people to the Constitution. \n\nAnd with an unwavering resolve that freedom will always triumph over tyranny. \n\nSix days ago, Russia's Vladimir Putin sought to shake the foundations of the free world thinking he could make it bend to his menacing ways. But he badly miscalculated. \n\nHe thought he could roll into Ukraine and the world would roll over. Instead he met a wall of strength he never imagined. \n\nHe met the Ukrainian people. \n\nFrom President Zelenskyy to every Ukrainian, their fearlessness, their courage, their determination, inspires the world. \n\nGroups of citizens blocking tanks with their bodies. Everyone from students to retirees teachers turned soldiers defending their homeland.
32
31
  Source: 0-pl
33
- Content: And we wont stop. \n\nWe have lost so much to COVID-19. Time with one another. And worst of all, so much loss of life. \n\nLets use this moment to reset. Lets stop looking at COVID-19 as a partisan dividing line and see it for what it is: A God-awful disease. \n\nLets stop seeing each other as enemies, and start seeing each other for who we really are: Fellow Americans. \n\nWe cant change how divided weve been. But we can change how we move forward—on COVID-19 and other issues we must face together. \n\nI recently visited the New York City Police Department days after the funerals of Officer Wilbert Mora and his partner, Officer Jason Rivera. \n\nThey were responding to a 9-1-1 call when a man shot and killed them with a stolen gun. \n\nOfficer Mora was 27 years old. \n\nOfficer Rivera was 22. \n\nBoth Dominican Americans whod grown up on the same streets they later chose to patrol as police officers. \n\nI spoke with their families and told them that we are forever in debt for their sacrifice, and we will carry on their mission to restore the trust and safety every community deserves.
32
+ Content: And we won't stop. \n\nWe have lost so much to COVID-19. Time with one another. And worst of all, so much loss of life. \n\nLet's use this moment to reset. Let's stop looking at COVID-19 as a partisan dividing line and see it for what it is: A God-awful disease. \n\nLet's stop seeing each other as enemies, and start seeing each other for who we really are: Fellow Americans. \n\nWe can't change how divided we've been. But we can change how we move forward—on COVID-19 and other issues we must face together. \n\nI recently visited the New York City Police Department days after the funerals of Officer Wilbert Mora and his partner, Officer Jason Rivera. \n\nThey were responding to a 9-1-1 call when a man shot and killed them with a stolen gun. \n\nOfficer Mora was 27 years old. \n\nOfficer Rivera was 22. \n\nBoth Dominican Americans who'd grown up on the same streets they later chose to patrol as police officers. \n\nI spoke with their families and told them that we are forever in debt for their sacrifice, and we will carry on their mission to restore the trust and safety every community deserves.
34
33
  Source: 24-pl
35
- Content: And a proud Ukrainian people, who have known 30 years of independence, have repeatedly shown that they will not tolerate anyone who tries to take their country backwards. \n\nTo all Americans, I will be honest with you, as Ive always promised. A Russian dictator, invading a foreign country, has costs around the world. \n\nAnd Im taking robust action to make sure the pain of our sanctions is targeted at Russias economy. And I will use every tool at our disposal to protect American businesses and consumers. \n\nTonight, I can announce that the United States has worked with 30 other countries to release 60 Million barrels of oil from reserves around the world. \n\nAmerica will lead that effort, releasing 30 Million barrels from our own Strategic Petroleum Reserve. And we stand ready to do more if necessary, unified with our allies. \n\nThese steps will help blunt gas prices here at home. And I know the news about whats happening can seem alarming. \n\nBut I want you to know that we are going to be okay.
34
+ Content: And a proud Ukrainian people, who have known 30 years of independence, have repeatedly shown that they will not tolerate anyone who tries to take their country backwards. \n\nTo all Americans, I will be honest with you, as I've always promised. A Russian dictator, invading a foreign country, has costs around the world. \n\nAnd I'm taking robust action to make sure the pain of our sanctions is targeted at Russia's economy. And I will use every tool at our disposal to protect American businesses and consumers. \n\nTonight, I can announce that the United States has worked with 30 other countries to release 60 Million barrels of oil from reserves around the world. \n\nAmerica will lead that effort, releasing 30 Million barrels from our own Strategic Petroleum Reserve. And we stand ready to do more if necessary, unified with our allies. \n\nThese steps will help blunt gas prices here at home. And I know the news about what's happening can seem alarming. \n\nBut I want you to know that we are going to be okay.
36
35
  Source: 5-pl
37
- Content: More support for patients and families. \n\nTo get there, I call on Congress to fund ARPA-H, the Advanced Research Projects Agency for Health. \n\nIts based on DARPA—the Defense Department project that led to the Internet, GPS, and so much more. \n\nARPA-H will have a singular purpose—to drive breakthroughs in cancer, Alzheimers, diabetes, and more. \n\nA unity agenda for the nation. \n\nWe can do this. \n\nMy fellow Americans—tonight , we have gathered in a sacred space—the citadel of our democracy. \n\nIn this Capitol, generation after generation, Americans have debated great questions amid great strife, and have done great things. \n\nWe have fought for freedom, expanded liberty, defeated totalitarianism and terror. \n\nAnd built the strongest, freest, and most prosperous nation the world has ever known. \n\nNow is the hour. \n\nOur moment of responsibility. \n\nOur test of resolve and conscience, of history itself. \n\nIt is in this moment that our character is formed. Our purpose is found. Our future is forged. \n\nWell I know this nation.
36
+ Content: More support for patients and families. \n\nTo get there, I call on Congress to fund ARPA-H, the Advanced Research Projects Agency for Health. \n\nIt's based on DARPA—the Defense Department project that led to the Internet, GPS, and so much more. \n\nARPA-H will have a singular purpose—to drive breakthroughs in cancer, Alzheimer's, diabetes, and more. \n\nA unity agenda for the nation. \n\nWe can do this. \n\nMy fellow Americans—tonight , we have gathered in a sacred space—the citadel of our democracy. \n\nIn this Capitol, generation after generation, Americans have debated great questions amid great strife, and have done great things. \n\nWe have fought for freedom, expanded liberty, defeated totalitarianism and terror. \n\nAnd built the strongest, freest, and most prosperous nation the world has ever known. \n\nNow is the hour. \n\nOur moment of responsibility. \n\nOur test of resolve and conscience, of history itself. \n\nIt is in this moment that our character is formed. Our purpose is found. Our future is forged. \n\nWell I know this nation.
38
37
  Source: 34-pl
39
38
  =========
40
39
  FINAL ANSWER: The president did not mention Michael Jackson.
@@ -44,7 +43,7 @@ QUESTION: {question}
44
43
  =========
45
44
  {summaries}
46
45
  =========
47
- FINAL ANSWER:"""
46
+ FINAL ANSWER:""" # noqa: E501
48
47
  COMBINE_PROMPT = PromptTemplate(
49
48
  template=combine_prompt_template, input_variables=["summaries", "question"]
50
49
  )
@@ -1,4 +1,3 @@
1
- # flake8: noqa
2
1
  from langchain_core.prompts import PromptTemplate
3
2
 
4
3
  DEFAULT_REFINE_PROMPT_TMPL = (
@@ -29,10 +29,11 @@ class RetrievalQAWithSourcesChain(BaseQAWithSourcesChain):
29
29
  num_docs = len(docs)
30
30
 
31
31
  if self.reduce_k_below_max_tokens and isinstance(
32
- self.combine_documents_chain, StuffDocumentsChain
32
+ self.combine_documents_chain,
33
+ StuffDocumentsChain,
33
34
  ):
34
35
  tokens = [
35
- self.combine_documents_chain.llm_chain._get_num_tokens(doc.page_content)
36
+ self.combine_documents_chain.llm_chain._get_num_tokens(doc.page_content) # noqa: SLF001
36
37
  for doc in docs
37
38
  ]
38
39
  token_count = sum(tokens[:num_docs])
@@ -43,20 +44,28 @@ class RetrievalQAWithSourcesChain(BaseQAWithSourcesChain):
43
44
  return docs[:num_docs]
44
45
 
45
46
  def _get_docs(
46
- self, inputs: dict[str, Any], *, run_manager: CallbackManagerForChainRun
47
+ self,
48
+ inputs: dict[str, Any],
49
+ *,
50
+ run_manager: CallbackManagerForChainRun,
47
51
  ) -> list[Document]:
48
52
  question = inputs[self.question_key]
49
53
  docs = self.retriever.invoke(
50
- question, config={"callbacks": run_manager.get_child()}
54
+ question,
55
+ config={"callbacks": run_manager.get_child()},
51
56
  )
52
57
  return self._reduce_tokens_below_limit(docs)
53
58
 
54
59
  async def _aget_docs(
55
- self, inputs: dict[str, Any], *, run_manager: AsyncCallbackManagerForChainRun
60
+ self,
61
+ inputs: dict[str, Any],
62
+ *,
63
+ run_manager: AsyncCallbackManagerForChainRun,
56
64
  ) -> list[Document]:
57
65
  question = inputs[self.question_key]
58
66
  docs = await self.retriever.ainvoke(
59
- question, config={"callbacks": run_manager.get_child()}
67
+ question,
68
+ config={"callbacks": run_manager.get_child()},
60
69
  )
61
70
  return self._reduce_tokens_below_limit(docs)
62
71
 
@@ -1,7 +1,6 @@
1
- # flake8: noqa
2
1
  from langchain_core.prompts import PromptTemplate
3
2
 
4
- template = """Given the following extracted parts of a long document and a question, create a final answer with references ("SOURCES").
3
+ template = """Given the following extracted parts of a long document and a question, create a final answer with references ("SOURCES").
5
4
  If you don't know the answer, just say that you don't know. Don't try to make up an answer.
6
5
  ALWAYS return a "SOURCES" part in your answer.
7
6
 
@@ -19,13 +18,13 @@ SOURCES: 28-pl
19
18
 
20
19
  QUESTION: What did the president say about Michael Jackson?
21
20
  =========
22
- Content: Madam Speaker, Madam Vice President, our First Lady and Second Gentleman. Members of Congress and the Cabinet. Justices of the Supreme Court. My fellow Americans. \n\nLast year COVID-19 kept us apart. This year we are finally together again. \n\nTonight, we meet as Democrats Republicans and Independents. But most importantly as Americans. \n\nWith a duty to one another to the American people to the Constitution. \n\nAnd with an unwavering resolve that freedom will always triumph over tyranny. \n\nSix days ago, Russias Vladimir Putin sought to shake the foundations of the free world thinking he could make it bend to his menacing ways. But he badly miscalculated. \n\nHe thought he could roll into Ukraine and the world would roll over. Instead he met a wall of strength he never imagined. \n\nHe met the Ukrainian people. \n\nFrom President Zelenskyy to every Ukrainian, their fearlessness, their courage, their determination, inspires the world. \n\nGroups of citizens blocking tanks with their bodies. Everyone from students to retirees teachers turned soldiers defending their homeland.
21
+ Content: Madam Speaker, Madam Vice President, our First Lady and Second Gentleman. Members of Congress and the Cabinet. Justices of the Supreme Court. My fellow Americans. \n\nLast year COVID-19 kept us apart. This year we are finally together again. \n\nTonight, we meet as Democrats Republicans and Independents. But most importantly as Americans. \n\nWith a duty to one another to the American people to the Constitution. \n\nAnd with an unwavering resolve that freedom will always triumph over tyranny. \n\nSix days ago, Russia's Vladimir Putin sought to shake the foundations of the free world thinking he could make it bend to his menacing ways. But he badly miscalculated. \n\nHe thought he could roll into Ukraine and the world would roll over. Instead he met a wall of strength he never imagined. \n\nHe met the Ukrainian people. \n\nFrom President Zelenskyy to every Ukrainian, their fearlessness, their courage, their determination, inspires the world. \n\nGroups of citizens blocking tanks with their bodies. Everyone from students to retirees teachers turned soldiers defending their homeland.
23
22
  Source: 0-pl
24
- Content: And we wont stop. \n\nWe have lost so much to COVID-19. Time with one another. And worst of all, so much loss of life. \n\nLets use this moment to reset. Lets stop looking at COVID-19 as a partisan dividing line and see it for what it is: A God-awful disease. \n\nLets stop seeing each other as enemies, and start seeing each other for who we really are: Fellow Americans. \n\nWe cant change how divided weve been. But we can change how we move forward—on COVID-19 and other issues we must face together. \n\nI recently visited the New York City Police Department days after the funerals of Officer Wilbert Mora and his partner, Officer Jason Rivera. \n\nThey were responding to a 9-1-1 call when a man shot and killed them with a stolen gun. \n\nOfficer Mora was 27 years old. \n\nOfficer Rivera was 22. \n\nBoth Dominican Americans whod grown up on the same streets they later chose to patrol as police officers. \n\nI spoke with their families and told them that we are forever in debt for their sacrifice, and we will carry on their mission to restore the trust and safety every community deserves.
23
+ Content: And we won't stop. \n\nWe have lost so much to COVID-19. Time with one another. And worst of all, so much loss of life. \n\nLet's use this moment to reset. Let's stop looking at COVID-19 as a partisan dividing line and see it for what it is: A God-awful disease. \n\nLet's stop seeing each other as enemies, and start seeing each other for who we really are: Fellow Americans. \n\nWe can't change how divided we've been. But we can change how we move forward—on COVID-19 and other issues we must face together. \n\nI recently visited the New York City Police Department days after the funerals of Officer Wilbert Mora and his partner, Officer Jason Rivera. \n\nThey were responding to a 9-1-1 call when a man shot and killed them with a stolen gun. \n\nOfficer Mora was 27 years old. \n\nOfficer Rivera was 22. \n\nBoth Dominican Americans who'd grown up on the same streets they later chose to patrol as police officers. \n\nI spoke with their families and told them that we are forever in debt for their sacrifice, and we will carry on their mission to restore the trust and safety every community deserves.
25
24
  Source: 24-pl
26
- Content: And a proud Ukrainian people, who have known 30 years of independence, have repeatedly shown that they will not tolerate anyone who tries to take their country backwards. \n\nTo all Americans, I will be honest with you, as Ive always promised. A Russian dictator, invading a foreign country, has costs around the world. \n\nAnd Im taking robust action to make sure the pain of our sanctions is targeted at Russias economy. And I will use every tool at our disposal to protect American businesses and consumers. \n\nTonight, I can announce that the United States has worked with 30 other countries to release 60 Million barrels of oil from reserves around the world. \n\nAmerica will lead that effort, releasing 30 Million barrels from our own Strategic Petroleum Reserve. And we stand ready to do more if necessary, unified with our allies. \n\nThese steps will help blunt gas prices here at home. And I know the news about whats happening can seem alarming. \n\nBut I want you to know that we are going to be okay.
25
+ Content: And a proud Ukrainian people, who have known 30 years of independence, have repeatedly shown that they will not tolerate anyone who tries to take their country backwards. \n\nTo all Americans, I will be honest with you, as I've always promised. A Russian dictator, invading a foreign country, has costs around the world. \n\nAnd I'm taking robust action to make sure the pain of our sanctions is targeted at Russia's economy. And I will use every tool at our disposal to protect American businesses and consumers. \n\nTonight, I can announce that the United States has worked with 30 other countries to release 60 Million barrels of oil from reserves around the world. \n\nAmerica will lead that effort, releasing 30 Million barrels from our own Strategic Petroleum Reserve. And we stand ready to do more if necessary, unified with our allies. \n\nThese steps will help blunt gas prices here at home. And I know the news about what's happening can seem alarming. \n\nBut I want you to know that we are going to be okay.
27
26
  Source: 5-pl
28
- Content: More support for patients and families. \n\nTo get there, I call on Congress to fund ARPA-H, the Advanced Research Projects Agency for Health. \n\nIts based on DARPA—the Defense Department project that led to the Internet, GPS, and so much more. \n\nARPA-H will have a singular purpose—to drive breakthroughs in cancer, Alzheimers, diabetes, and more. \n\nA unity agenda for the nation. \n\nWe can do this. \n\nMy fellow Americans—tonight , we have gathered in a sacred space—the citadel of our democracy. \n\nIn this Capitol, generation after generation, Americans have debated great questions amid great strife, and have done great things. \n\nWe have fought for freedom, expanded liberty, defeated totalitarianism and terror. \n\nAnd built the strongest, freest, and most prosperous nation the world has ever known. \n\nNow is the hour. \n\nOur moment of responsibility. \n\nOur test of resolve and conscience, of history itself. \n\nIt is in this moment that our character is formed. Our purpose is found. Our future is forged. \n\nWell I know this nation.
27
+ Content: More support for patients and families. \n\nTo get there, I call on Congress to fund ARPA-H, the Advanced Research Projects Agency for Health. \n\nIt's based on DARPA—the Defense Department project that led to the Internet, GPS, and so much more. \n\nARPA-H will have a singular purpose—to drive breakthroughs in cancer, Alzheimer's, diabetes, and more. \n\nA unity agenda for the nation. \n\nWe can do this. \n\nMy fellow Americans—tonight , we have gathered in a sacred space—the citadel of our democracy. \n\nIn this Capitol, generation after generation, Americans have debated great questions amid great strife, and have done great things. \n\nWe have fought for freedom, expanded liberty, defeated totalitarianism and terror. \n\nAnd built the strongest, freest, and most prosperous nation the world has ever known. \n\nNow is the hour. \n\nOur moment of responsibility. \n\nOur test of resolve and conscience, of history itself. \n\nIt is in this moment that our character is formed. Our purpose is found. Our future is forged. \n\nWell I know this nation.
29
28
  Source: 34-pl
30
29
  =========
31
30
  FINAL ANSWER: The president did not mention Michael Jackson.
@@ -35,7 +34,7 @@ QUESTION: {question}
35
34
  =========
36
35
  {summaries}
37
36
  =========
38
- FINAL ANSWER:"""
37
+ FINAL ANSWER:""" # noqa: E501
39
38
  PROMPT = PromptTemplate(template=template, input_variables=["summaries", "question"])
40
39
 
41
40
  EXAMPLE_PROMPT = PromptTemplate(
@@ -10,6 +10,7 @@ from langchain_core.callbacks import (
10
10
  from langchain_core.documents import Document
11
11
  from langchain_core.vectorstores import VectorStore
12
12
  from pydantic import Field, model_validator
13
+ from typing_extensions import override
13
14
 
14
15
  from langchain.chains.combine_documents.stuff import StuffDocumentsChain
15
16
  from langchain.chains.qa_with_sources.base import BaseQAWithSourcesChain
@@ -34,10 +35,11 @@ class VectorDBQAWithSourcesChain(BaseQAWithSourcesChain):
34
35
  num_docs = len(docs)
35
36
 
36
37
  if self.reduce_k_below_max_tokens and isinstance(
37
- self.combine_documents_chain, StuffDocumentsChain
38
+ self.combine_documents_chain,
39
+ StuffDocumentsChain,
38
40
  ):
39
41
  tokens = [
40
- self.combine_documents_chain.llm_chain._get_num_tokens(doc.page_content)
42
+ self.combine_documents_chain.llm_chain._get_num_tokens(doc.page_content) # noqa: SLF001
41
43
  for doc in docs
42
44
  ]
43
45
  token_count = sum(tokens[:num_docs])
@@ -47,26 +49,37 @@ class VectorDBQAWithSourcesChain(BaseQAWithSourcesChain):
47
49
 
48
50
  return docs[:num_docs]
49
51
 
52
+ @override
50
53
  def _get_docs(
51
- self, inputs: dict[str, Any], *, run_manager: CallbackManagerForChainRun
54
+ self,
55
+ inputs: dict[str, Any],
56
+ *,
57
+ run_manager: CallbackManagerForChainRun,
52
58
  ) -> list[Document]:
53
59
  question = inputs[self.question_key]
54
60
  docs = self.vectorstore.similarity_search(
55
- question, k=self.k, **self.search_kwargs
61
+ question,
62
+ k=self.k,
63
+ **self.search_kwargs,
56
64
  )
57
65
  return self._reduce_tokens_below_limit(docs)
58
66
 
59
67
  async def _aget_docs(
60
- self, inputs: dict[str, Any], *, run_manager: AsyncCallbackManagerForChainRun
68
+ self,
69
+ inputs: dict[str, Any],
70
+ *,
71
+ run_manager: AsyncCallbackManagerForChainRun,
61
72
  ) -> list[Document]:
62
- raise NotImplementedError("VectorDBQAWithSourcesChain does not support async")
73
+ msg = "VectorDBQAWithSourcesChain does not support async"
74
+ raise NotImplementedError(msg)
63
75
 
64
76
  @model_validator(mode="before")
65
77
  @classmethod
66
- def raise_deprecation(cls, values: dict) -> Any:
78
+ def _raise_deprecation(cls, values: dict) -> Any:
67
79
  warnings.warn(
68
80
  "`VectorDBQAWithSourcesChain` is deprecated - "
69
- "please use `from langchain.chains import RetrievalQAWithSourcesChain`"
81
+ "please use `from langchain.chains import RetrievalQAWithSourcesChain`",
82
+ stacklevel=5,
70
83
  )
71
84
  return values
72
85
 
@@ -22,6 +22,7 @@ from langchain_core.structured_query import (
22
22
  Operator,
23
23
  StructuredQuery,
24
24
  )
25
+ from typing_extensions import override
25
26
 
26
27
  from langchain.chains.llm import LLMChain
27
28
  from langchain.chains.query_constructor.parser import get_parser
@@ -46,6 +47,7 @@ class StructuredQueryOutputParser(BaseOutputParser[StructuredQuery]):
46
47
  ast_parse: Callable
47
48
  """Callable that parses dict into internal representation of query language."""
48
49
 
50
+ @override
49
51
  def parse(self, text: str) -> StructuredQuery:
50
52
  try:
51
53
  expected_keys = ["query", "filter"]
@@ -60,12 +62,11 @@ class StructuredQueryOutputParser(BaseOutputParser[StructuredQuery]):
60
62
  if not parsed.get("limit"):
61
63
  parsed.pop("limit", None)
62
64
  return StructuredQuery(
63
- **{k: v for k, v in parsed.items() if k in allowed_keys}
65
+ **{k: v for k, v in parsed.items() if k in allowed_keys},
64
66
  )
65
67
  except Exception as e:
66
- raise OutputParserException(
67
- f"Parsing text\n{text}\n raised following error:\n{e}"
68
- )
68
+ msg = f"Parsing text\n{text}\n raised following error:\n{e}"
69
+ raise OutputParserException(msg) from e
69
70
 
70
71
  @classmethod
71
72
  def from_components(
@@ -73,7 +74,7 @@ class StructuredQueryOutputParser(BaseOutputParser[StructuredQuery]):
73
74
  allowed_comparators: Optional[Sequence[Comparator]] = None,
74
75
  allowed_operators: Optional[Sequence[Operator]] = None,
75
76
  allowed_attributes: Optional[Sequence[str]] = None,
76
- fix_invalid: bool = False,
77
+ fix_invalid: bool = False, # noqa: FBT001,FBT002
77
78
  ) -> StructuredQueryOutputParser:
78
79
  """
79
80
  Create a structured query output parser from components.
@@ -89,14 +90,16 @@ class StructuredQueryOutputParser(BaseOutputParser[StructuredQuery]):
89
90
  if fix_invalid:
90
91
 
91
92
  def ast_parse(raw_filter: str) -> Optional[FilterDirective]:
92
- filter = cast(Optional[FilterDirective], get_parser().parse(raw_filter))
93
- fixed = fix_filter_directive(
94
- filter,
93
+ filter_directive = cast(
94
+ "Optional[FilterDirective]",
95
+ get_parser().parse(raw_filter),
96
+ )
97
+ return fix_filter_directive(
98
+ filter_directive,
95
99
  allowed_comparators=allowed_comparators,
96
100
  allowed_operators=allowed_operators,
97
101
  allowed_attributes=allowed_attributes,
98
102
  )
99
- return fixed
100
103
 
101
104
  else:
102
105
  ast_parse = get_parser(
@@ -108,7 +111,7 @@ class StructuredQueryOutputParser(BaseOutputParser[StructuredQuery]):
108
111
 
109
112
 
110
113
  def fix_filter_directive(
111
- filter: Optional[FilterDirective],
114
+ filter: Optional[FilterDirective], # noqa: A002
112
115
  *,
113
116
  allowed_comparators: Optional[Sequence[Comparator]] = None,
114
117
  allowed_operators: Optional[Sequence[Operator]] = None,
@@ -130,18 +133,18 @@ def fix_filter_directive(
130
133
  ) or not filter:
131
134
  return filter
132
135
 
133
- elif isinstance(filter, Comparison):
136
+ if isinstance(filter, Comparison):
134
137
  if allowed_comparators and filter.comparator not in allowed_comparators:
135
138
  return None
136
139
  if allowed_attributes and filter.attribute not in allowed_attributes:
137
140
  return None
138
141
  return filter
139
- elif isinstance(filter, Operation):
142
+ if isinstance(filter, Operation):
140
143
  if allowed_operators and filter.operator not in allowed_operators:
141
144
  return None
142
145
  args = [
143
146
  cast(
144
- FilterDirective,
147
+ "FilterDirective",
145
148
  fix_filter_directive(
146
149
  arg,
147
150
  allowed_comparators=allowed_comparators,
@@ -154,15 +157,13 @@ def fix_filter_directive(
154
157
  ]
155
158
  if not args:
156
159
  return None
157
- elif len(args) == 1 and filter.operator in (Operator.AND, Operator.OR):
160
+ if len(args) == 1 and filter.operator in (Operator.AND, Operator.OR):
158
161
  return args[0]
159
- else:
160
- return Operation(
161
- operator=filter.operator,
162
- arguments=args,
163
- )
164
- else:
165
- return filter
162
+ return Operation(
163
+ operator=filter.operator,
164
+ arguments=args,
165
+ )
166
+ return filter
166
167
 
167
168
 
168
169
  def _format_attribute_info(info: Sequence[Union[AttributeInfo, dict]]) -> str:
@@ -237,7 +238,9 @@ def get_query_constructor_prompt(
237
238
  examples = construct_examples(examples)
238
239
  example_prompt = USER_SPECIFIED_EXAMPLE_PROMPT
239
240
  prefix = PREFIX_WITH_DATA_SOURCE.format(
240
- schema=schema, content=document_contents, attributes=attribute_str
241
+ schema=schema,
242
+ content=document_contents,
243
+ attributes=attribute_str,
241
244
  )
242
245
  suffix = SUFFIX_WITHOUT_DATA_SOURCE.format(i=len(examples) + 1)
243
246
  else:
@@ -247,7 +250,9 @@ def get_query_constructor_prompt(
247
250
  example_prompt = EXAMPLE_PROMPT
248
251
  prefix = DEFAULT_PREFIX.format(schema=schema)
249
252
  suffix = DEFAULT_SUFFIX.format(
250
- i=len(examples) + 1, content=document_contents, attributes=attribute_str
253
+ i=len(examples) + 1,
254
+ content=document_contents,
255
+ attributes=attribute_str,
251
256
  )
252
257
  return FewShotPromptTemplate(
253
258
  examples=list(examples),
@@ -271,7 +276,7 @@ def load_query_constructor_chain(
271
276
  examples: Optional[list] = None,
272
277
  allowed_comparators: Sequence[Comparator] = tuple(Comparator),
273
278
  allowed_operators: Sequence[Operator] = tuple(Operator),
274
- enable_limit: bool = False,
279
+ enable_limit: bool = False, # noqa: FBT001,FBT002
275
280
  schema_prompt: Optional[BasePromptTemplate] = None,
276
281
  **kwargs: Any,
277
282
  ) -> LLMChain:
@@ -302,11 +307,10 @@ def load_query_constructor_chain(
302
307
  enable_limit=enable_limit,
303
308
  schema_prompt=schema_prompt,
304
309
  )
305
- allowed_attributes = []
306
- for ainfo in attribute_info:
307
- allowed_attributes.append(
308
- ainfo.name if isinstance(ainfo, AttributeInfo) else ainfo["name"]
309
- )
310
+ allowed_attributes = [
311
+ ainfo.name if isinstance(ainfo, AttributeInfo) else ainfo["name"]
312
+ for ainfo in attribute_info
313
+ ]
310
314
  output_parser = StructuredQueryOutputParser.from_components(
311
315
  allowed_comparators=allowed_comparators,
312
316
  allowed_operators=allowed_operators,
@@ -361,11 +365,10 @@ def load_query_constructor_runnable(
361
365
  schema_prompt=schema_prompt,
362
366
  **kwargs,
363
367
  )
364
- allowed_attributes = []
365
- for ainfo in attribute_info:
366
- allowed_attributes.append(
367
- ainfo.name if isinstance(ainfo, AttributeInfo) else ainfo["name"]
368
- )
368
+ allowed_attributes = [
369
+ ainfo.name if isinstance(ainfo, AttributeInfo) else ainfo["name"]
370
+ for ainfo in attribute_info
371
+ ]
369
372
  output_parser = StructuredQueryOutputParser.from_components(
370
373
  allowed_comparators=allowed_comparators,
371
374
  allowed_operators=allowed_operators,
@@ -12,12 +12,12 @@ from langchain_core.structured_query import (
12
12
  )
13
13
 
14
14
  __all__ = [
15
- "Visitor",
16
- "Expr",
17
- "Operator",
18
15
  "Comparator",
19
- "FilterDirective",
20
16
  "Comparison",
17
+ "Expr",
18
+ "FilterDirective",
21
19
  "Operation",
20
+ "Operator",
22
21
  "StructuredQuery",
22
+ "Visitor",
23
23
  ]