langchain-core 0.4.0.dev0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-core might be problematic. Click here for more details.

Files changed (172) hide show
  1. langchain_core/__init__.py +1 -1
  2. langchain_core/_api/__init__.py +3 -4
  3. langchain_core/_api/beta_decorator.py +45 -70
  4. langchain_core/_api/deprecation.py +80 -80
  5. langchain_core/_api/path.py +22 -8
  6. langchain_core/_import_utils.py +10 -4
  7. langchain_core/agents.py +25 -21
  8. langchain_core/caches.py +53 -63
  9. langchain_core/callbacks/__init__.py +1 -8
  10. langchain_core/callbacks/base.py +341 -348
  11. langchain_core/callbacks/file.py +55 -44
  12. langchain_core/callbacks/manager.py +546 -683
  13. langchain_core/callbacks/stdout.py +29 -30
  14. langchain_core/callbacks/streaming_stdout.py +35 -36
  15. langchain_core/callbacks/usage.py +65 -70
  16. langchain_core/chat_history.py +48 -55
  17. langchain_core/document_loaders/base.py +46 -21
  18. langchain_core/document_loaders/langsmith.py +39 -36
  19. langchain_core/documents/__init__.py +0 -1
  20. langchain_core/documents/base.py +96 -74
  21. langchain_core/documents/compressor.py +12 -9
  22. langchain_core/documents/transformers.py +29 -28
  23. langchain_core/embeddings/fake.py +56 -57
  24. langchain_core/env.py +2 -3
  25. langchain_core/example_selectors/base.py +12 -0
  26. langchain_core/example_selectors/length_based.py +1 -1
  27. langchain_core/example_selectors/semantic_similarity.py +21 -25
  28. langchain_core/exceptions.py +15 -9
  29. langchain_core/globals.py +4 -163
  30. langchain_core/indexing/api.py +132 -125
  31. langchain_core/indexing/base.py +64 -67
  32. langchain_core/indexing/in_memory.py +26 -6
  33. langchain_core/language_models/__init__.py +15 -27
  34. langchain_core/language_models/_utils.py +267 -117
  35. langchain_core/language_models/base.py +92 -177
  36. langchain_core/language_models/chat_models.py +547 -407
  37. langchain_core/language_models/fake.py +11 -11
  38. langchain_core/language_models/fake_chat_models.py +72 -118
  39. langchain_core/language_models/llms.py +168 -242
  40. langchain_core/load/dump.py +8 -11
  41. langchain_core/load/load.py +32 -28
  42. langchain_core/load/mapping.py +2 -4
  43. langchain_core/load/serializable.py +50 -56
  44. langchain_core/messages/__init__.py +36 -51
  45. langchain_core/messages/ai.py +377 -150
  46. langchain_core/messages/base.py +239 -47
  47. langchain_core/messages/block_translators/__init__.py +111 -0
  48. langchain_core/messages/block_translators/anthropic.py +470 -0
  49. langchain_core/messages/block_translators/bedrock.py +94 -0
  50. langchain_core/messages/block_translators/bedrock_converse.py +297 -0
  51. langchain_core/messages/block_translators/google_genai.py +530 -0
  52. langchain_core/messages/block_translators/google_vertexai.py +21 -0
  53. langchain_core/messages/block_translators/groq.py +143 -0
  54. langchain_core/messages/block_translators/langchain_v0.py +301 -0
  55. langchain_core/messages/block_translators/openai.py +1010 -0
  56. langchain_core/messages/chat.py +2 -3
  57. langchain_core/messages/content.py +1423 -0
  58. langchain_core/messages/function.py +7 -7
  59. langchain_core/messages/human.py +44 -38
  60. langchain_core/messages/modifier.py +3 -2
  61. langchain_core/messages/system.py +40 -27
  62. langchain_core/messages/tool.py +160 -58
  63. langchain_core/messages/utils.py +527 -638
  64. langchain_core/output_parsers/__init__.py +1 -14
  65. langchain_core/output_parsers/base.py +68 -104
  66. langchain_core/output_parsers/json.py +13 -17
  67. langchain_core/output_parsers/list.py +11 -33
  68. langchain_core/output_parsers/openai_functions.py +56 -74
  69. langchain_core/output_parsers/openai_tools.py +68 -109
  70. langchain_core/output_parsers/pydantic.py +15 -13
  71. langchain_core/output_parsers/string.py +6 -2
  72. langchain_core/output_parsers/transform.py +17 -60
  73. langchain_core/output_parsers/xml.py +34 -44
  74. langchain_core/outputs/__init__.py +1 -1
  75. langchain_core/outputs/chat_generation.py +26 -11
  76. langchain_core/outputs/chat_result.py +1 -3
  77. langchain_core/outputs/generation.py +17 -6
  78. langchain_core/outputs/llm_result.py +15 -8
  79. langchain_core/prompt_values.py +29 -123
  80. langchain_core/prompts/__init__.py +3 -27
  81. langchain_core/prompts/base.py +48 -63
  82. langchain_core/prompts/chat.py +259 -288
  83. langchain_core/prompts/dict.py +19 -11
  84. langchain_core/prompts/few_shot.py +84 -90
  85. langchain_core/prompts/few_shot_with_templates.py +14 -12
  86. langchain_core/prompts/image.py +19 -14
  87. langchain_core/prompts/loading.py +6 -8
  88. langchain_core/prompts/message.py +7 -8
  89. langchain_core/prompts/prompt.py +42 -43
  90. langchain_core/prompts/string.py +37 -16
  91. langchain_core/prompts/structured.py +43 -46
  92. langchain_core/rate_limiters.py +51 -60
  93. langchain_core/retrievers.py +52 -192
  94. langchain_core/runnables/base.py +1727 -1683
  95. langchain_core/runnables/branch.py +52 -73
  96. langchain_core/runnables/config.py +89 -103
  97. langchain_core/runnables/configurable.py +128 -130
  98. langchain_core/runnables/fallbacks.py +93 -82
  99. langchain_core/runnables/graph.py +127 -127
  100. langchain_core/runnables/graph_ascii.py +63 -41
  101. langchain_core/runnables/graph_mermaid.py +87 -70
  102. langchain_core/runnables/graph_png.py +31 -36
  103. langchain_core/runnables/history.py +145 -161
  104. langchain_core/runnables/passthrough.py +141 -144
  105. langchain_core/runnables/retry.py +84 -68
  106. langchain_core/runnables/router.py +33 -37
  107. langchain_core/runnables/schema.py +79 -72
  108. langchain_core/runnables/utils.py +95 -139
  109. langchain_core/stores.py +85 -131
  110. langchain_core/structured_query.py +11 -15
  111. langchain_core/sys_info.py +31 -32
  112. langchain_core/tools/__init__.py +1 -14
  113. langchain_core/tools/base.py +221 -247
  114. langchain_core/tools/convert.py +144 -161
  115. langchain_core/tools/render.py +10 -10
  116. langchain_core/tools/retriever.py +12 -19
  117. langchain_core/tools/simple.py +52 -29
  118. langchain_core/tools/structured.py +56 -60
  119. langchain_core/tracers/__init__.py +1 -9
  120. langchain_core/tracers/_streaming.py +6 -7
  121. langchain_core/tracers/base.py +103 -112
  122. langchain_core/tracers/context.py +29 -48
  123. langchain_core/tracers/core.py +142 -105
  124. langchain_core/tracers/evaluation.py +30 -34
  125. langchain_core/tracers/event_stream.py +162 -117
  126. langchain_core/tracers/langchain.py +34 -36
  127. langchain_core/tracers/log_stream.py +87 -49
  128. langchain_core/tracers/memory_stream.py +3 -3
  129. langchain_core/tracers/root_listeners.py +18 -34
  130. langchain_core/tracers/run_collector.py +8 -20
  131. langchain_core/tracers/schemas.py +0 -125
  132. langchain_core/tracers/stdout.py +3 -3
  133. langchain_core/utils/__init__.py +1 -4
  134. langchain_core/utils/_merge.py +47 -9
  135. langchain_core/utils/aiter.py +70 -66
  136. langchain_core/utils/env.py +12 -9
  137. langchain_core/utils/function_calling.py +139 -206
  138. langchain_core/utils/html.py +7 -8
  139. langchain_core/utils/input.py +6 -6
  140. langchain_core/utils/interactive_env.py +6 -2
  141. langchain_core/utils/iter.py +48 -45
  142. langchain_core/utils/json.py +14 -4
  143. langchain_core/utils/json_schema.py +159 -43
  144. langchain_core/utils/mustache.py +32 -25
  145. langchain_core/utils/pydantic.py +67 -40
  146. langchain_core/utils/strings.py +5 -5
  147. langchain_core/utils/usage.py +1 -1
  148. langchain_core/utils/utils.py +104 -62
  149. langchain_core/vectorstores/base.py +131 -179
  150. langchain_core/vectorstores/in_memory.py +113 -182
  151. langchain_core/vectorstores/utils.py +23 -17
  152. langchain_core/version.py +1 -1
  153. langchain_core-1.0.0.dist-info/METADATA +68 -0
  154. langchain_core-1.0.0.dist-info/RECORD +172 -0
  155. {langchain_core-0.4.0.dev0.dist-info → langchain_core-1.0.0.dist-info}/WHEEL +1 -1
  156. langchain_core/beta/__init__.py +0 -1
  157. langchain_core/beta/runnables/__init__.py +0 -1
  158. langchain_core/beta/runnables/context.py +0 -448
  159. langchain_core/memory.py +0 -116
  160. langchain_core/messages/content_blocks.py +0 -1435
  161. langchain_core/prompts/pipeline.py +0 -133
  162. langchain_core/pydantic_v1/__init__.py +0 -30
  163. langchain_core/pydantic_v1/dataclasses.py +0 -23
  164. langchain_core/pydantic_v1/main.py +0 -23
  165. langchain_core/tracers/langchain_v1.py +0 -23
  166. langchain_core/utils/loading.py +0 -31
  167. langchain_core/v1/__init__.py +0 -1
  168. langchain_core/v1/chat_models.py +0 -1047
  169. langchain_core/v1/messages.py +0 -755
  170. langchain_core-0.4.0.dev0.dist-info/METADATA +0 -108
  171. langchain_core-0.4.0.dev0.dist-info/RECORD +0 -177
  172. langchain_core-0.4.0.dev0.dist-info/entry_points.txt +0 -4
@@ -4,17 +4,15 @@ from __future__ import annotations
4
4
 
5
5
  import json
6
6
  import uuid
7
+ from collections.abc import Callable
7
8
  from pathlib import Path
8
9
  from typing import (
9
10
  TYPE_CHECKING,
10
11
  Any,
11
- Callable,
12
- Optional,
13
12
  )
14
13
 
15
14
  from typing_extensions import override
16
15
 
17
- from langchain_core._api import deprecated
18
16
  from langchain_core.documents import Document
19
17
  from langchain_core.load import dumpd, load
20
18
  from langchain_core.vectorstores import VectorStore
@@ -25,7 +23,13 @@ if TYPE_CHECKING:
25
23
  from collections.abc import Iterator, Sequence
26
24
 
27
25
  from langchain_core.embeddings import Embeddings
28
- from langchain_core.indexing import UpsertResponse
26
+
27
+ try:
28
+ import numpy as np
29
+
30
+ _HAS_NUMPY = True
31
+ except ImportError:
32
+ _HAS_NUMPY = False
29
33
 
30
34
 
31
35
  class InMemoryVectorStore(VectorStore):
@@ -34,127 +38,124 @@ class InMemoryVectorStore(VectorStore):
34
38
  Uses a dictionary, and computes cosine similarity for search using numpy.
35
39
 
36
40
  Setup:
37
- Install ``langchain-core``.
38
-
39
- .. code-block:: bash
41
+ Install `langchain-core`.
40
42
 
41
- pip install -U langchain-core
43
+ ```bash
44
+ pip install -U langchain-core
45
+ ```
42
46
 
43
47
  Key init args — indexing params:
44
48
  embedding_function: Embeddings
45
49
  Embedding function to use.
46
50
 
47
51
  Instantiate:
48
- .. code-block:: python
52
+ ```python
53
+ from langchain_core.vectorstores import InMemoryVectorStore
54
+ from langchain_openai import OpenAIEmbeddings
49
55
 
50
- from langchain_core.vectorstores import InMemoryVectorStore
51
- from langchain_openai import OpenAIEmbeddings
52
-
53
- vector_store = InMemoryVectorStore(OpenAIEmbeddings())
56
+ vector_store = InMemoryVectorStore(OpenAIEmbeddings())
57
+ ```
54
58
 
55
59
  Add Documents:
56
- .. code-block:: python
57
-
58
- from langchain_core.documents import Document
60
+ ```python
61
+ from langchain_core.documents import Document
59
62
 
60
- document_1 = Document(id="1", page_content="foo", metadata={"baz": "bar"})
61
- document_2 = Document(id="2", page_content="thud", metadata={"bar": "baz"})
62
- document_3 = Document(id="3", page_content="i will be deleted :(")
63
+ document_1 = Document(id="1", page_content="foo", metadata={"baz": "bar"})
64
+ document_2 = Document(id="2", page_content="thud", metadata={"bar": "baz"})
65
+ document_3 = Document(id="3", page_content="i will be deleted :(")
63
66
 
64
- documents = [document_1, document_2, document_3]
65
- vector_store.add_documents(documents=documents)
67
+ documents = [document_1, document_2, document_3]
68
+ vector_store.add_documents(documents=documents)
69
+ ```
66
70
 
67
71
  Inspect documents:
68
- .. code-block:: python
69
-
70
- top_n = 10
71
- for index, (id, doc) in enumerate(vector_store.store.items()):
72
- if index < top_n:
73
- # docs have keys 'id', 'vector', 'text', 'metadata'
74
- print(f"{id}: {doc['text']}")
75
- else:
76
- break
72
+ ```python
73
+ top_n = 10
74
+ for index, (id, doc) in enumerate(vector_store.store.items()):
75
+ if index < top_n:
76
+ # docs have keys 'id', 'vector', 'text', 'metadata'
77
+ print(f"{id}: {doc['text']}")
78
+ else:
79
+ break
80
+ ```
77
81
 
78
82
  Delete Documents:
79
- .. code-block:: python
80
-
81
- vector_store.delete(ids=["3"])
83
+ ```python
84
+ vector_store.delete(ids=["3"])
85
+ ```
82
86
 
83
87
  Search:
84
- .. code-block:: python
85
-
86
- results = vector_store.similarity_search(query="thud",k=1)
87
- for doc in results:
88
- print(f"* {doc.page_content} [{doc.metadata}]")
89
-
90
- .. code-block:: none
88
+ ```python
89
+ results = vector_store.similarity_search(query="thud", k=1)
90
+ for doc in results:
91
+ print(f"* {doc.page_content} [{doc.metadata}]")
92
+ ```
91
93
 
92
- * thud [{'bar': 'baz'}]
94
+ ```txt
95
+ * thud [{'bar': 'baz'}]
96
+ ```
93
97
 
94
98
  Search with filter:
95
- .. code-block:: python
99
+ ```python
100
+ def _filter_function(doc: Document) -> bool:
101
+ return doc.metadata.get("bar") == "baz"
96
102
 
97
- def _filter_function(doc: Document) -> bool:
98
- return doc.metadata.get("bar") == "baz"
99
-
100
- results = vector_store.similarity_search(
101
- query="thud", k=1, filter=_filter_function
102
- )
103
- for doc in results:
104
- print(f"* {doc.page_content} [{doc.metadata}]")
105
103
 
106
- .. code-block:: none
107
-
108
- * thud [{'bar': 'baz'}]
104
+ results = vector_store.similarity_search(
105
+ query="thud", k=1, filter=_filter_function
106
+ )
107
+ for doc in results:
108
+ print(f"* {doc.page_content} [{doc.metadata}]")
109
+ ```
109
110
 
111
+ ```txt
112
+ * thud [{'bar': 'baz'}]
113
+ ```
110
114
 
111
115
  Search with score:
112
- .. code-block:: python
113
-
114
- results = vector_store.similarity_search_with_score(
115
- query="qux", k=1
116
- )
117
- for doc, score in results:
118
- print(f"* [SIM={score:3f}] {doc.page_content} [{doc.metadata}]")
119
-
120
- .. code-block:: none
116
+ ```python
117
+ results = vector_store.similarity_search_with_score(query="qux", k=1)
118
+ for doc, score in results:
119
+ print(f"* [SIM={score:3f}] {doc.page_content} [{doc.metadata}]")
120
+ ```
121
121
 
122
- * [SIM=0.832268] foo [{'baz': 'bar'}]
122
+ ```txt
123
+ * [SIM=0.832268] foo [{'baz': 'bar'}]
124
+ ```
123
125
 
124
126
  Async:
125
- .. code-block:: python
127
+ ```python
128
+ # add documents
129
+ # await vector_store.aadd_documents(documents=documents)
126
130
 
127
- # add documents
128
- # await vector_store.aadd_documents(documents=documents)
131
+ # delete documents
132
+ # await vector_store.adelete(ids=["3"])
129
133
 
130
- # delete documents
131
- # await vector_store.adelete(ids=["3"])
134
+ # search
135
+ # results = vector_store.asimilarity_search(query="thud", k=1)
132
136
 
133
- # search
134
- # results = vector_store.asimilarity_search(query="thud", k=1)
137
+ # search with score
138
+ results = await vector_store.asimilarity_search_with_score(query="qux", k=1)
139
+ for doc, score in results:
140
+ print(f"* [SIM={score:3f}] {doc.page_content} [{doc.metadata}]")
141
+ ```
135
142
 
136
- # search with score
137
- results = await vector_store.asimilarity_search_with_score(query="qux", k=1)
138
- for doc,score in results:
139
- print(f"* [SIM={score:3f}] {doc.page_content} [{doc.metadata}]")
140
-
141
- .. code-block:: none
142
-
143
- * [SIM=0.832268] foo [{'baz': 'bar'}]
143
+ ```txt
144
+ * [SIM=0.832268] foo [{'baz': 'bar'}]
145
+ ```
144
146
 
145
147
  Use as Retriever:
146
- .. code-block:: python
147
-
148
- retriever = vector_store.as_retriever(
149
- search_type="mmr",
150
- search_kwargs={"k": 1, "fetch_k": 2, "lambda_mult": 0.5},
151
- )
152
- retriever.invoke("thud")
153
-
154
- .. code-block:: none
155
-
156
- [Document(id='2', metadata={'bar': 'baz'}, page_content='thud')]
148
+ ```python
149
+ retriever = vector_store.as_retriever(
150
+ search_type="mmr",
151
+ search_kwargs={"k": 1, "fetch_k": 2, "lambda_mult": 0.5},
152
+ )
153
+ retriever.invoke("thud")
154
+ ```
157
155
 
156
+ ```txt
157
+ [Document(id='2', metadata={'bar': 'baz'}, page_content='thud')]
158
+ ```
158
159
  """
159
160
 
160
161
  def __init__(self, embedding: Embeddings) -> None:
@@ -174,23 +175,22 @@ class InMemoryVectorStore(VectorStore):
174
175
  return self.embedding
175
176
 
176
177
  @override
177
- def delete(self, ids: Optional[Sequence[str]] = None, **kwargs: Any) -> None:
178
+ def delete(self, ids: Sequence[str] | None = None, **kwargs: Any) -> None:
178
179
  if ids:
179
180
  for _id in ids:
180
181
  self.store.pop(_id, None)
181
182
 
182
183
  @override
183
- async def adelete(self, ids: Optional[Sequence[str]] = None, **kwargs: Any) -> None:
184
+ async def adelete(self, ids: Sequence[str] | None = None, **kwargs: Any) -> None:
184
185
  self.delete(ids)
185
186
 
186
187
  @override
187
188
  def add_documents(
188
189
  self,
189
190
  documents: list[Document],
190
- ids: Optional[list[str]] = None,
191
+ ids: list[str] | None = None,
191
192
  **kwargs: Any,
192
193
  ) -> list[str]:
193
- """Add documents to the store."""
194
194
  texts = [doc.page_content for doc in documents]
195
195
  vectors = self.embedding.embed_documents(texts)
196
196
 
@@ -201,13 +201,13 @@ class InMemoryVectorStore(VectorStore):
201
201
  )
202
202
  raise ValueError(msg)
203
203
 
204
- id_iterator: Iterator[Optional[str]] = (
204
+ id_iterator: Iterator[str | None] = (
205
205
  iter(ids) if ids else iter(doc.id for doc in documents)
206
206
  )
207
207
 
208
208
  ids_ = []
209
209
 
210
- for doc, vector in zip(documents, vectors):
210
+ for doc, vector in zip(documents, vectors, strict=False):
211
211
  doc_id = next(id_iterator)
212
212
  doc_id_ = doc_id or str(uuid.uuid4())
213
213
  ids_.append(doc_id_)
@@ -222,9 +222,8 @@ class InMemoryVectorStore(VectorStore):
222
222
 
223
223
  @override
224
224
  async def aadd_documents(
225
- self, documents: list[Document], ids: Optional[list[str]] = None, **kwargs: Any
225
+ self, documents: list[Document], ids: list[str] | None = None, **kwargs: Any
226
226
  ) -> list[str]:
227
- """Add documents to the store."""
228
227
  texts = [doc.page_content for doc in documents]
229
228
  vectors = await self.embedding.aembed_documents(texts)
230
229
 
@@ -235,12 +234,12 @@ class InMemoryVectorStore(VectorStore):
235
234
  )
236
235
  raise ValueError(msg)
237
236
 
238
- id_iterator: Iterator[Optional[str]] = (
237
+ id_iterator: Iterator[str | None] = (
239
238
  iter(ids) if ids else iter(doc.id for doc in documents)
240
239
  )
241
240
  ids_: list[str] = []
242
241
 
243
- for doc, vector in zip(documents, vectors):
242
+ for doc, vector in zip(documents, vectors, strict=False):
244
243
  doc_id = next(id_iterator)
245
244
  doc_id_ = doc_id or str(uuid.uuid4())
246
245
  ids_.append(doc_id_)
@@ -277,76 +276,6 @@ class InMemoryVectorStore(VectorStore):
277
276
  )
278
277
  return documents
279
278
 
280
- @deprecated(
281
- alternative="VectorStore.add_documents",
282
- message=(
283
- "This was a beta API that was added in 0.2.11. It'll be removed in 0.3.0."
284
- ),
285
- since="0.2.29",
286
- removal="1.0",
287
- )
288
- def upsert(self, items: Sequence[Document], /, **_kwargs: Any) -> UpsertResponse:
289
- """[DEPRECATED] Upsert documents into the store.
290
-
291
- Args:
292
- items: The documents to upsert.
293
-
294
- Returns:
295
- The upsert response.
296
- """
297
- vectors = self.embedding.embed_documents([item.page_content for item in items])
298
- ids = []
299
- for item, vector in zip(items, vectors):
300
- doc_id = item.id or str(uuid.uuid4())
301
- ids.append(doc_id)
302
- self.store[doc_id] = {
303
- "id": doc_id,
304
- "vector": vector,
305
- "text": item.page_content,
306
- "metadata": item.metadata,
307
- }
308
- return {
309
- "succeeded": ids,
310
- "failed": [],
311
- }
312
-
313
- @deprecated(
314
- alternative="VectorStore.aadd_documents",
315
- message=(
316
- "This was a beta API that was added in 0.2.11. It'll be removed in 0.3.0."
317
- ),
318
- since="0.2.29",
319
- removal="1.0",
320
- )
321
- async def aupsert(
322
- self, items: Sequence[Document], /, **_kwargs: Any
323
- ) -> UpsertResponse:
324
- """[DEPRECATED] Upsert documents into the store.
325
-
326
- Args:
327
- items: The documents to upsert.
328
-
329
- Returns:
330
- The upsert response.
331
- """
332
- vectors = await self.embedding.aembed_documents(
333
- [item.page_content for item in items]
334
- )
335
- ids = []
336
- for item, vector in zip(items, vectors):
337
- doc_id = item.id or str(uuid.uuid4())
338
- ids.append(doc_id)
339
- self.store[doc_id] = {
340
- "id": doc_id,
341
- "vector": vector,
342
- "text": item.page_content,
343
- "metadata": item.metadata,
344
- }
345
- return {
346
- "succeeded": ids,
347
- "failed": [],
348
- }
349
-
350
279
  @override
351
280
  async def aget_by_ids(self, ids: Sequence[str], /) -> list[Document]:
352
281
  """Async get documents by their ids.
@@ -363,7 +292,7 @@ class InMemoryVectorStore(VectorStore):
363
292
  self,
364
293
  embedding: list[float],
365
294
  k: int = 4,
366
- filter: Optional[Callable[[Document], bool]] = None, # noqa: A002
295
+ filter: Callable[[Document], bool] | None = None, # noqa: A002
367
296
  ) -> list[tuple[Document, float, list[float]]]:
368
297
  # get all docs with fixed order in list
369
298
  docs = list(self.store.values())
@@ -372,7 +301,11 @@ class InMemoryVectorStore(VectorStore):
372
301
  docs = [
373
302
  doc
374
303
  for doc in docs
375
- if filter(Document(page_content=doc["text"], metadata=doc["metadata"]))
304
+ if filter(
305
+ Document(
306
+ id=doc["id"], page_content=doc["text"], metadata=doc["metadata"]
307
+ )
308
+ )
376
309
  ]
377
310
 
378
311
  if not docs:
@@ -402,7 +335,7 @@ class InMemoryVectorStore(VectorStore):
402
335
  self,
403
336
  embedding: list[float],
404
337
  k: int = 4,
405
- filter: Optional[Callable[[Document], bool]] = None, # noqa: A002
338
+ filter: Callable[[Document], bool] | None = None, # noqa: A002
406
339
  **_kwargs: Any,
407
340
  ) -> list[tuple[Document, float]]:
408
341
  """Search for the most similar documents to the given embedding.
@@ -490,7 +423,7 @@ class InMemoryVectorStore(VectorStore):
490
423
  fetch_k: int = 20,
491
424
  lambda_mult: float = 0.5,
492
425
  *,
493
- filter: Optional[Callable[[Document], bool]] = None,
426
+ filter: Callable[[Document], bool] | None = None,
494
427
  **kwargs: Any,
495
428
  ) -> list[Document]:
496
429
  prefetch_hits = self._similarity_search_with_score_by_vector(
@@ -499,14 +432,12 @@ class InMemoryVectorStore(VectorStore):
499
432
  filter=filter,
500
433
  )
501
434
 
502
- try:
503
- import numpy as np
504
- except ImportError as e:
435
+ if not _HAS_NUMPY:
505
436
  msg = (
506
437
  "numpy must be installed to use max_marginal_relevance_search "
507
438
  "pip install numpy"
508
439
  )
509
- raise ImportError(msg) from e
440
+ raise ImportError(msg)
510
441
 
511
442
  mmr_chosen_indices = maximal_marginal_relevance(
512
443
  np.array(embedding, dtype=np.float32),
@@ -558,7 +489,7 @@ class InMemoryVectorStore(VectorStore):
558
489
  cls,
559
490
  texts: list[str],
560
491
  embedding: Embeddings,
561
- metadatas: Optional[list[dict]] = None,
492
+ metadatas: list[dict] | None = None,
562
493
  **kwargs: Any,
563
494
  ) -> InMemoryVectorStore:
564
495
  store = cls(
@@ -573,7 +504,7 @@ class InMemoryVectorStore(VectorStore):
573
504
  cls,
574
505
  texts: list[str],
575
506
  embedding: Embeddings,
576
- metadatas: Optional[list[dict]] = None,
507
+ metadatas: list[dict] | None = None,
577
508
  **kwargs: Any,
578
509
  ) -> InMemoryVectorStore:
579
510
  store = cls(
@@ -591,13 +522,13 @@ class InMemoryVectorStore(VectorStore):
591
522
  Args:
592
523
  path: The path to load the vector store from.
593
524
  embedding: The embedding to use.
594
- kwargs: Additional arguments to pass to the constructor.
525
+ **kwargs: Additional arguments to pass to the constructor.
595
526
 
596
527
  Returns:
597
528
  A VectorStore object.
598
529
  """
599
530
  path_: Path = Path(path)
600
- with path_.open("r") as f:
531
+ with path_.open("r", encoding="utf-8") as f:
601
532
  store = load(json.load(f))
602
533
  vectorstore = cls(embedding=embedding, **kwargs)
603
534
  vectorstore.store = store
@@ -611,5 +542,5 @@ class InMemoryVectorStore(VectorStore):
611
542
  """
612
543
  path_: Path = Path(path)
613
544
  path_.parent.mkdir(exist_ok=True, parents=True)
614
- with path_.open("w") as f:
545
+ with path_.open("w", encoding="utf-8") as f:
615
546
  json.dump(dumpd(self.store), f, indent=2)
@@ -1,4 +1,4 @@
1
- """Internal utilities for the in memory implementation of VectorStore.
1
+ """Internal utilities for the in memory implementation of `VectorStore`.
2
2
 
3
3
  These are part of a private API, and users should not use them directly
4
4
  as they can change without notice.
@@ -8,12 +8,24 @@ from __future__ import annotations
8
8
 
9
9
  import logging
10
10
  import warnings
11
- from typing import TYPE_CHECKING, Union
11
+ from typing import TYPE_CHECKING
12
12
 
13
- if TYPE_CHECKING:
13
+ try:
14
14
  import numpy as np
15
15
 
16
- Matrix = Union[list[list[float]], list[np.ndarray], np.ndarray]
16
+ _HAS_NUMPY = True
17
+ except ImportError:
18
+ _HAS_NUMPY = False
19
+
20
+ try:
21
+ import simsimd as simd # type: ignore[import-not-found]
22
+
23
+ _HAS_SIMSIMD = True
24
+ except ImportError:
25
+ _HAS_SIMSIMD = False
26
+
27
+ if TYPE_CHECKING:
28
+ Matrix = list[list[float]] | list[np.ndarray] | np.ndarray
17
29
 
18
30
  logger = logging.getLogger(__name__)
19
31
 
@@ -33,14 +45,12 @@ def _cosine_similarity(x: Matrix, y: Matrix) -> np.ndarray:
33
45
  ValueError: If the number of columns in X and Y are not the same.
34
46
  ImportError: If numpy is not installed.
35
47
  """
36
- try:
37
- import numpy as np
38
- except ImportError as e:
48
+ if not _HAS_NUMPY:
39
49
  msg = (
40
50
  "cosine_similarity requires numpy to be installed. "
41
51
  "Please install numpy with `pip install numpy`."
42
52
  )
43
- raise ImportError(msg) from e
53
+ raise ImportError(msg)
44
54
 
45
55
  if len(x) == 0 or len(y) == 0:
46
56
  return np.array([[]])
@@ -70,9 +80,7 @@ def _cosine_similarity(x: Matrix, y: Matrix) -> np.ndarray:
70
80
  f"and Y has shape {y.shape}."
71
81
  )
72
82
  raise ValueError(msg)
73
- try:
74
- import simsimd as simd # type: ignore[import-not-found]
75
- except ImportError:
83
+ if not _HAS_SIMSIMD:
76
84
  logger.debug(
77
85
  "Unable to import simsimd, defaulting to NumPy implementation. If you want "
78
86
  "to use simsimd please install with `pip install simsimd`."
@@ -104,8 +112,8 @@ def maximal_marginal_relevance(
104
112
  Args:
105
113
  query_embedding: The query embedding.
106
114
  embedding_list: A list of embeddings.
107
- lambda_mult: The lambda parameter for MMR. Default is 0.5.
108
- k: The number of embeddings to return. Default is 4.
115
+ lambda_mult: The lambda parameter for MMR.
116
+ k: The number of embeddings to return.
109
117
 
110
118
  Returns:
111
119
  A list of indices of the embeddings to return.
@@ -113,14 +121,12 @@ def maximal_marginal_relevance(
113
121
  Raises:
114
122
  ImportError: If numpy is not installed.
115
123
  """
116
- try:
117
- import numpy as np
118
- except ImportError as e:
124
+ if not _HAS_NUMPY:
119
125
  msg = (
120
126
  "maximal_marginal_relevance requires numpy to be installed. "
121
127
  "Please install numpy with `pip install numpy`."
122
128
  )
123
- raise ImportError(msg) from e
129
+ raise ImportError(msg)
124
130
 
125
131
  if min(k, len(embedding_list)) <= 0:
126
132
  return []
langchain_core/version.py CHANGED
@@ -1,3 +1,3 @@
1
1
  """langchain-core version information and utilities."""
2
2
 
3
- VERSION = "0.4.0.dev0"
3
+ VERSION = "1.0.0"
@@ -0,0 +1,68 @@
1
+ Metadata-Version: 2.4
2
+ Name: langchain-core
3
+ Version: 1.0.0
4
+ Summary: Building applications with LLMs through composability
5
+ Project-URL: homepage, https://docs.langchain.com/
6
+ Project-URL: repository, https://github.com/langchain-ai/langchain/tree/master/libs/core
7
+ Project-URL: changelog, https://github.com/langchain-ai/langchain/releases?q=%22langchain-core%3D%3D1%22
8
+ Project-URL: twitter, https://x.com/LangChainAI
9
+ Project-URL: slack, https://www.langchain.com/join-community
10
+ Project-URL: reddit, https://www.reddit.com/r/LangChain/
11
+ License: MIT
12
+ Requires-Python: <4.0.0,>=3.10.0
13
+ Requires-Dist: jsonpatch<2.0.0,>=1.33.0
14
+ Requires-Dist: langsmith<1.0.0,>=0.3.45
15
+ Requires-Dist: packaging<26.0.0,>=23.2.0
16
+ Requires-Dist: pydantic<3.0.0,>=2.7.4
17
+ Requires-Dist: pyyaml<7.0.0,>=5.3.0
18
+ Requires-Dist: tenacity!=8.4.0,<10.0.0,>=8.1.0
19
+ Requires-Dist: typing-extensions<5.0.0,>=4.7.0
20
+ Description-Content-Type: text/markdown
21
+
22
+ # 🦜🍎️ LangChain Core
23
+
24
+ [![PyPI - Version](https://img.shields.io/pypi/v/langchain-core?label=%20)](https://pypi.org/project/langchain-core/#history)
25
+ [![PyPI - License](https://img.shields.io/pypi/l/langchain-core)](https://opensource.org/licenses/MIT)
26
+ [![PyPI - Downloads](https://img.shields.io/pepy/dt/langchain-core)](https://pypistats.org/packages/langchain-core)
27
+ [![Twitter](https://img.shields.io/twitter/url/https/twitter.com/langchainai.svg?style=social&label=Follow%20%40LangChainAI)](https://twitter.com/langchainai)
28
+
29
+ Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langchain-ai/langchainjs).
30
+
31
+ To help you ship LangChain apps to production faster, check out [LangSmith](https://smith.langchain.com).
32
+ [LangSmith](https://smith.langchain.com) is a unified developer platform for building, testing, and monitoring LLM applications.
33
+
34
+ ## Quick Install
35
+
36
+ ```bash
37
+ pip install langchain-core
38
+ ```
39
+
40
+ ## 🤔 What is this?
41
+
42
+ LangChain Core contains the base abstractions that power the LangChain ecosystem.
43
+
44
+ These abstractions are designed to be as modular and simple as possible.
45
+
46
+ The benefit of having these abstractions is that any provider can implement the required interface and then easily be used in the rest of the LangChain ecosystem.
47
+
48
+ ## ⛰️ Why build on top of LangChain Core?
49
+
50
+ The LangChain ecosystem is built on top of `langchain-core`. Some of the benefits:
51
+
52
+ - **Modularity**: We've designed Core around abstractions that are independent of each other, and not tied to any specific model provider.
53
+ - **Stability**: We are committed to a stable versioning scheme, and will communicate any breaking changes with advance notice and version bumps.
54
+ - **Battle-tested**: Core components have the largest install base in the LLM ecosystem, and are used in production by many companies.
55
+
56
+ ## 📖 Documentation
57
+
58
+ For full documentation, see the [API reference](https://reference.langchain.com/python/langchain_core/).
59
+
60
+ ## 📕 Releases & Versioning
61
+
62
+ See our [Releases](https://docs.langchain.com/oss/python/release-policy) and [Versioning](https://docs.langchain.com/oss/python/versioning) policies.
63
+
64
+ ## 💁 Contributing
65
+
66
+ As an open-source project in a rapidly developing field, we are extremely open to contributions, whether it be in the form of a new feature, improved infrastructure, or better documentation.
67
+
68
+ For detailed information on how to contribute, see the [Contributing Guide](https://docs.langchain.com/oss/python/contributing/overview).