langchain 0.0.137 → 0.0.139

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. package/chat_models/minimax.cjs +1 -0
  2. package/chat_models/minimax.d.ts +1 -0
  3. package/chat_models/minimax.js +1 -0
  4. package/dist/agents/initialize.cjs +11 -0
  5. package/dist/agents/initialize.d.ts +4 -0
  6. package/dist/agents/initialize.js +11 -0
  7. package/dist/agents/xml/index.cjs +119 -0
  8. package/dist/agents/xml/index.d.ts +51 -0
  9. package/dist/agents/xml/index.js +114 -0
  10. package/dist/agents/xml/prompt.cjs +23 -0
  11. package/dist/agents/xml/prompt.d.ts +1 -0
  12. package/dist/agents/xml/prompt.js +20 -0
  13. package/dist/callbacks/base.d.ts +12 -4
  14. package/dist/callbacks/handlers/run_collector.cjs +50 -0
  15. package/dist/callbacks/handlers/run_collector.d.ts +26 -0
  16. package/dist/callbacks/handlers/run_collector.js +46 -0
  17. package/dist/callbacks/handlers/tracer.cjs +33 -20
  18. package/dist/callbacks/handlers/tracer.d.ts +7 -3
  19. package/dist/callbacks/handlers/tracer.js +33 -20
  20. package/dist/callbacks/handlers/tracer_langchain.cjs +1 -0
  21. package/dist/callbacks/handlers/tracer_langchain.d.ts +2 -1
  22. package/dist/callbacks/handlers/tracer_langchain.js +1 -0
  23. package/dist/callbacks/index.cjs +3 -1
  24. package/dist/callbacks/index.d.ts +1 -0
  25. package/dist/callbacks/index.js +1 -0
  26. package/dist/callbacks/manager.cjs +29 -14
  27. package/dist/callbacks/manager.d.ts +9 -4
  28. package/dist/callbacks/manager.js +29 -14
  29. package/dist/chains/openai_functions/extraction.cjs +2 -2
  30. package/dist/chains/openai_functions/extraction.d.ts +5 -4
  31. package/dist/chains/openai_functions/extraction.js +2 -2
  32. package/dist/chains/openai_functions/openapi.d.ts +2 -1
  33. package/dist/chains/openai_functions/structured_output.d.ts +4 -3
  34. package/dist/chains/openai_functions/tagging.cjs +2 -2
  35. package/dist/chains/openai_functions/tagging.d.ts +5 -4
  36. package/dist/chains/openai_functions/tagging.js +2 -2
  37. package/dist/chat_models/anthropic.cjs +7 -5
  38. package/dist/chat_models/anthropic.d.ts +17 -12
  39. package/dist/chat_models/anthropic.js +4 -2
  40. package/dist/chat_models/minimax.cjs +547 -0
  41. package/dist/chat_models/minimax.d.ts +364 -0
  42. package/dist/chat_models/minimax.js +543 -0
  43. package/dist/chat_models/ollama.cjs +136 -0
  44. package/dist/chat_models/ollama.d.ts +34 -0
  45. package/dist/chat_models/ollama.js +136 -0
  46. package/dist/embeddings/minimax.cjs +152 -0
  47. package/dist/embeddings/minimax.d.ts +104 -0
  48. package/dist/embeddings/minimax.js +148 -0
  49. package/dist/experimental/chat_models/anthropic_functions.cjs +129 -0
  50. package/dist/experimental/chat_models/anthropic_functions.d.ts +20 -0
  51. package/dist/experimental/chat_models/anthropic_functions.js +125 -0
  52. package/dist/llms/ollama.cjs +136 -0
  53. package/dist/llms/ollama.d.ts +34 -0
  54. package/dist/llms/ollama.js +136 -0
  55. package/dist/load/import_constants.cjs +1 -0
  56. package/dist/load/import_constants.js +1 -0
  57. package/dist/load/import_map.cjs +4 -2
  58. package/dist/load/import_map.d.ts +2 -0
  59. package/dist/load/import_map.js +2 -0
  60. package/dist/schema/output_parser.cjs +1 -1
  61. package/dist/schema/output_parser.js +1 -1
  62. package/dist/schema/runnable.cjs +54 -15
  63. package/dist/schema/runnable.d.ts +9 -3
  64. package/dist/schema/runnable.js +55 -16
  65. package/dist/sql_db.cjs +3 -1
  66. package/dist/sql_db.js +3 -1
  67. package/dist/util/ollama.d.ts +34 -0
  68. package/dist/vectorstores/redis.cjs +17 -2
  69. package/dist/vectorstores/redis.d.ts +10 -1
  70. package/dist/vectorstores/redis.js +17 -2
  71. package/dist/vectorstores/zep.cjs +2 -1
  72. package/dist/vectorstores/zep.js +3 -2
  73. package/embeddings/minimax.cjs +1 -0
  74. package/embeddings/minimax.d.ts +1 -0
  75. package/embeddings/minimax.js +1 -0
  76. package/experimental/chat_models/anthropic_functions.cjs +1 -0
  77. package/experimental/chat_models/anthropic_functions.d.ts +1 -0
  78. package/experimental/chat_models/anthropic_functions.js +1 -0
  79. package/package.json +34 -5
@@ -32,6 +32,42 @@ class Ollama extends base_js_1.LLM {
32
32
  writable: true,
33
33
  value: "http://localhost:11434"
34
34
  });
35
+ Object.defineProperty(this, "embeddingOnly", {
36
+ enumerable: true,
37
+ configurable: true,
38
+ writable: true,
39
+ value: void 0
40
+ });
41
+ Object.defineProperty(this, "f16KV", {
42
+ enumerable: true,
43
+ configurable: true,
44
+ writable: true,
45
+ value: void 0
46
+ });
47
+ Object.defineProperty(this, "frequencyPenalty", {
48
+ enumerable: true,
49
+ configurable: true,
50
+ writable: true,
51
+ value: void 0
52
+ });
53
+ Object.defineProperty(this, "logitsAll", {
54
+ enumerable: true,
55
+ configurable: true,
56
+ writable: true,
57
+ value: void 0
58
+ });
59
+ Object.defineProperty(this, "lowVram", {
60
+ enumerable: true,
61
+ configurable: true,
62
+ writable: true,
63
+ value: void 0
64
+ });
65
+ Object.defineProperty(this, "mainGpu", {
66
+ enumerable: true,
67
+ configurable: true,
68
+ writable: true,
69
+ value: void 0
70
+ });
35
71
  Object.defineProperty(this, "mirostat", {
36
72
  enumerable: true,
37
73
  configurable: true,
@@ -50,6 +86,12 @@ class Ollama extends base_js_1.LLM {
50
86
  writable: true,
51
87
  value: void 0
52
88
  });
89
+ Object.defineProperty(this, "numBatch", {
90
+ enumerable: true,
91
+ configurable: true,
92
+ writable: true,
93
+ value: void 0
94
+ });
53
95
  Object.defineProperty(this, "numCtx", {
54
96
  enumerable: true,
55
97
  configurable: true,
@@ -62,12 +104,36 @@ class Ollama extends base_js_1.LLM {
62
104
  writable: true,
63
105
  value: void 0
64
106
  });
107
+ Object.defineProperty(this, "numGqa", {
108
+ enumerable: true,
109
+ configurable: true,
110
+ writable: true,
111
+ value: void 0
112
+ });
113
+ Object.defineProperty(this, "numKeep", {
114
+ enumerable: true,
115
+ configurable: true,
116
+ writable: true,
117
+ value: void 0
118
+ });
65
119
  Object.defineProperty(this, "numThread", {
66
120
  enumerable: true,
67
121
  configurable: true,
68
122
  writable: true,
69
123
  value: void 0
70
124
  });
125
+ Object.defineProperty(this, "penalizeNewline", {
126
+ enumerable: true,
127
+ configurable: true,
128
+ writable: true,
129
+ value: void 0
130
+ });
131
+ Object.defineProperty(this, "presencePenalty", {
132
+ enumerable: true,
133
+ configurable: true,
134
+ writable: true,
135
+ value: void 0
136
+ });
71
137
  Object.defineProperty(this, "repeatLastN", {
72
138
  enumerable: true,
73
139
  configurable: true,
@@ -80,6 +146,18 @@ class Ollama extends base_js_1.LLM {
80
146
  writable: true,
81
147
  value: void 0
82
148
  });
149
+ Object.defineProperty(this, "ropeFrequencyBase", {
150
+ enumerable: true,
151
+ configurable: true,
152
+ writable: true,
153
+ value: void 0
154
+ });
155
+ Object.defineProperty(this, "ropeFrequencyScale", {
156
+ enumerable: true,
157
+ configurable: true,
158
+ writable: true,
159
+ value: void 0
160
+ });
83
161
  Object.defineProperty(this, "temperature", {
84
162
  enumerable: true,
85
163
  configurable: true,
@@ -110,23 +188,64 @@ class Ollama extends base_js_1.LLM {
110
188
  writable: true,
111
189
  value: void 0
112
190
  });
191
+ Object.defineProperty(this, "typicalP", {
192
+ enumerable: true,
193
+ configurable: true,
194
+ writable: true,
195
+ value: void 0
196
+ });
197
+ Object.defineProperty(this, "useMLock", {
198
+ enumerable: true,
199
+ configurable: true,
200
+ writable: true,
201
+ value: void 0
202
+ });
203
+ Object.defineProperty(this, "useMMap", {
204
+ enumerable: true,
205
+ configurable: true,
206
+ writable: true,
207
+ value: void 0
208
+ });
209
+ Object.defineProperty(this, "vocabOnly", {
210
+ enumerable: true,
211
+ configurable: true,
212
+ writable: true,
213
+ value: void 0
214
+ });
113
215
  this.model = fields.model ?? this.model;
114
216
  this.baseUrl = fields.baseUrl?.endsWith("/")
115
217
  ? fields.baseUrl.slice(0, -1)
116
218
  : fields.baseUrl ?? this.baseUrl;
219
+ this.embeddingOnly = fields.embeddingOnly;
220
+ this.f16KV = fields.f16KV;
221
+ this.frequencyPenalty = fields.frequencyPenalty;
222
+ this.logitsAll = fields.logitsAll;
223
+ this.lowVram = fields.lowVram;
224
+ this.mainGpu = fields.mainGpu;
117
225
  this.mirostat = fields.mirostat;
118
226
  this.mirostatEta = fields.mirostatEta;
119
227
  this.mirostatTau = fields.mirostatTau;
228
+ this.numBatch = fields.numBatch;
120
229
  this.numCtx = fields.numCtx;
121
230
  this.numGpu = fields.numGpu;
231
+ this.numGqa = fields.numGqa;
232
+ this.numKeep = fields.numKeep;
122
233
  this.numThread = fields.numThread;
234
+ this.penalizeNewline = fields.penalizeNewline;
235
+ this.presencePenalty = fields.presencePenalty;
123
236
  this.repeatLastN = fields.repeatLastN;
124
237
  this.repeatPenalty = fields.repeatPenalty;
238
+ this.ropeFrequencyBase = fields.ropeFrequencyBase;
239
+ this.ropeFrequencyScale = fields.ropeFrequencyScale;
125
240
  this.temperature = fields.temperature;
126
241
  this.stop = fields.stop;
127
242
  this.tfsZ = fields.tfsZ;
128
243
  this.topK = fields.topK;
129
244
  this.topP = fields.topP;
245
+ this.typicalP = fields.typicalP;
246
+ this.useMLock = fields.useMLock;
247
+ this.useMMap = fields.useMMap;
248
+ this.vocabOnly = fields.vocabOnly;
130
249
  }
131
250
  _llmType() {
132
251
  return "ollama";
@@ -135,19 +254,36 @@ class Ollama extends base_js_1.LLM {
135
254
  return {
136
255
  model: this.model,
137
256
  options: {
257
+ embedding_only: this.embeddingOnly,
258
+ f16_kv: this.f16KV,
259
+ frequency_penalty: this.frequencyPenalty,
260
+ logits_all: this.logitsAll,
261
+ low_vram: this.lowVram,
262
+ main_gpu: this.mainGpu,
138
263
  mirostat: this.mirostat,
139
264
  mirostat_eta: this.mirostatEta,
140
265
  mirostat_tau: this.mirostatTau,
266
+ num_batch: this.numBatch,
141
267
  num_ctx: this.numCtx,
142
268
  num_gpu: this.numGpu,
269
+ num_gqa: this.numGqa,
270
+ num_keep: this.numKeep,
143
271
  num_thread: this.numThread,
272
+ penalize_newline: this.penalizeNewline,
273
+ presence_penalty: this.presencePenalty,
144
274
  repeat_last_n: this.repeatLastN,
145
275
  repeat_penalty: this.repeatPenalty,
276
+ rope_frequency_base: this.ropeFrequencyBase,
277
+ rope_frequency_scale: this.ropeFrequencyScale,
146
278
  temperature: this.temperature,
147
279
  stop: options?.stop ?? this.stop,
148
280
  tfs_z: this.tfsZ,
149
281
  top_k: this.topK,
150
282
  top_p: this.topP,
283
+ typical_p: this.typicalP,
284
+ use_mlock: this.useMLock,
285
+ use_mmap: this.useMMap,
286
+ vocab_only: this.vocabOnly,
151
287
  },
152
288
  };
153
289
  }
@@ -12,37 +12,71 @@ export declare class Ollama extends LLM implements OllamaInput {
12
12
  lc_serializable: boolean;
13
13
  model: string;
14
14
  baseUrl: string;
15
+ embeddingOnly?: boolean;
16
+ f16KV?: boolean;
17
+ frequencyPenalty?: number;
18
+ logitsAll?: boolean;
19
+ lowVram?: boolean;
20
+ mainGpu?: number;
15
21
  mirostat?: number;
16
22
  mirostatEta?: number;
17
23
  mirostatTau?: number;
24
+ numBatch?: number;
18
25
  numCtx?: number;
19
26
  numGpu?: number;
27
+ numGqa?: number;
28
+ numKeep?: number;
20
29
  numThread?: number;
30
+ penalizeNewline?: boolean;
31
+ presencePenalty?: number;
21
32
  repeatLastN?: number;
22
33
  repeatPenalty?: number;
34
+ ropeFrequencyBase?: number;
35
+ ropeFrequencyScale?: number;
23
36
  temperature?: number;
24
37
  stop?: string[];
25
38
  tfsZ?: number;
26
39
  topK?: number;
27
40
  topP?: number;
41
+ typicalP?: number;
42
+ useMLock?: boolean;
43
+ useMMap?: boolean;
44
+ vocabOnly?: boolean;
28
45
  constructor(fields: OllamaInput & BaseLLMParams);
29
46
  _llmType(): string;
30
47
  invocationParams(options?: this["ParsedCallOptions"]): {
31
48
  model: string;
32
49
  options: {
50
+ embedding_only: boolean | undefined;
51
+ f16_kv: boolean | undefined;
52
+ frequency_penalty: number | undefined;
53
+ logits_all: boolean | undefined;
54
+ low_vram: boolean | undefined;
55
+ main_gpu: number | undefined;
33
56
  mirostat: number | undefined;
34
57
  mirostat_eta: number | undefined;
35
58
  mirostat_tau: number | undefined;
59
+ num_batch: number | undefined;
36
60
  num_ctx: number | undefined;
37
61
  num_gpu: number | undefined;
62
+ num_gqa: number | undefined;
63
+ num_keep: number | undefined;
38
64
  num_thread: number | undefined;
65
+ penalize_newline: boolean | undefined;
66
+ presence_penalty: number | undefined;
39
67
  repeat_last_n: number | undefined;
40
68
  repeat_penalty: number | undefined;
69
+ rope_frequency_base: number | undefined;
70
+ rope_frequency_scale: number | undefined;
41
71
  temperature: number | undefined;
42
72
  stop: string[] | undefined;
43
73
  tfs_z: number | undefined;
44
74
  top_k: number | undefined;
45
75
  top_p: number | undefined;
76
+ typical_p: number | undefined;
77
+ use_mlock: boolean | undefined;
78
+ use_mmap: boolean | undefined;
79
+ vocab_only: boolean | undefined;
46
80
  };
47
81
  };
48
82
  _streamResponseChunks(input: string, options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<GenerationChunk>;
@@ -29,6 +29,42 @@ export class Ollama extends LLM {
29
29
  writable: true,
30
30
  value: "http://localhost:11434"
31
31
  });
32
+ Object.defineProperty(this, "embeddingOnly", {
33
+ enumerable: true,
34
+ configurable: true,
35
+ writable: true,
36
+ value: void 0
37
+ });
38
+ Object.defineProperty(this, "f16KV", {
39
+ enumerable: true,
40
+ configurable: true,
41
+ writable: true,
42
+ value: void 0
43
+ });
44
+ Object.defineProperty(this, "frequencyPenalty", {
45
+ enumerable: true,
46
+ configurable: true,
47
+ writable: true,
48
+ value: void 0
49
+ });
50
+ Object.defineProperty(this, "logitsAll", {
51
+ enumerable: true,
52
+ configurable: true,
53
+ writable: true,
54
+ value: void 0
55
+ });
56
+ Object.defineProperty(this, "lowVram", {
57
+ enumerable: true,
58
+ configurable: true,
59
+ writable: true,
60
+ value: void 0
61
+ });
62
+ Object.defineProperty(this, "mainGpu", {
63
+ enumerable: true,
64
+ configurable: true,
65
+ writable: true,
66
+ value: void 0
67
+ });
32
68
  Object.defineProperty(this, "mirostat", {
33
69
  enumerable: true,
34
70
  configurable: true,
@@ -47,6 +83,12 @@ export class Ollama extends LLM {
47
83
  writable: true,
48
84
  value: void 0
49
85
  });
86
+ Object.defineProperty(this, "numBatch", {
87
+ enumerable: true,
88
+ configurable: true,
89
+ writable: true,
90
+ value: void 0
91
+ });
50
92
  Object.defineProperty(this, "numCtx", {
51
93
  enumerable: true,
52
94
  configurable: true,
@@ -59,12 +101,36 @@ export class Ollama extends LLM {
59
101
  writable: true,
60
102
  value: void 0
61
103
  });
104
+ Object.defineProperty(this, "numGqa", {
105
+ enumerable: true,
106
+ configurable: true,
107
+ writable: true,
108
+ value: void 0
109
+ });
110
+ Object.defineProperty(this, "numKeep", {
111
+ enumerable: true,
112
+ configurable: true,
113
+ writable: true,
114
+ value: void 0
115
+ });
62
116
  Object.defineProperty(this, "numThread", {
63
117
  enumerable: true,
64
118
  configurable: true,
65
119
  writable: true,
66
120
  value: void 0
67
121
  });
122
+ Object.defineProperty(this, "penalizeNewline", {
123
+ enumerable: true,
124
+ configurable: true,
125
+ writable: true,
126
+ value: void 0
127
+ });
128
+ Object.defineProperty(this, "presencePenalty", {
129
+ enumerable: true,
130
+ configurable: true,
131
+ writable: true,
132
+ value: void 0
133
+ });
68
134
  Object.defineProperty(this, "repeatLastN", {
69
135
  enumerable: true,
70
136
  configurable: true,
@@ -77,6 +143,18 @@ export class Ollama extends LLM {
77
143
  writable: true,
78
144
  value: void 0
79
145
  });
146
+ Object.defineProperty(this, "ropeFrequencyBase", {
147
+ enumerable: true,
148
+ configurable: true,
149
+ writable: true,
150
+ value: void 0
151
+ });
152
+ Object.defineProperty(this, "ropeFrequencyScale", {
153
+ enumerable: true,
154
+ configurable: true,
155
+ writable: true,
156
+ value: void 0
157
+ });
80
158
  Object.defineProperty(this, "temperature", {
81
159
  enumerable: true,
82
160
  configurable: true,
@@ -107,23 +185,64 @@ export class Ollama extends LLM {
107
185
  writable: true,
108
186
  value: void 0
109
187
  });
188
+ Object.defineProperty(this, "typicalP", {
189
+ enumerable: true,
190
+ configurable: true,
191
+ writable: true,
192
+ value: void 0
193
+ });
194
+ Object.defineProperty(this, "useMLock", {
195
+ enumerable: true,
196
+ configurable: true,
197
+ writable: true,
198
+ value: void 0
199
+ });
200
+ Object.defineProperty(this, "useMMap", {
201
+ enumerable: true,
202
+ configurable: true,
203
+ writable: true,
204
+ value: void 0
205
+ });
206
+ Object.defineProperty(this, "vocabOnly", {
207
+ enumerable: true,
208
+ configurable: true,
209
+ writable: true,
210
+ value: void 0
211
+ });
110
212
  this.model = fields.model ?? this.model;
111
213
  this.baseUrl = fields.baseUrl?.endsWith("/")
112
214
  ? fields.baseUrl.slice(0, -1)
113
215
  : fields.baseUrl ?? this.baseUrl;
216
+ this.embeddingOnly = fields.embeddingOnly;
217
+ this.f16KV = fields.f16KV;
218
+ this.frequencyPenalty = fields.frequencyPenalty;
219
+ this.logitsAll = fields.logitsAll;
220
+ this.lowVram = fields.lowVram;
221
+ this.mainGpu = fields.mainGpu;
114
222
  this.mirostat = fields.mirostat;
115
223
  this.mirostatEta = fields.mirostatEta;
116
224
  this.mirostatTau = fields.mirostatTau;
225
+ this.numBatch = fields.numBatch;
117
226
  this.numCtx = fields.numCtx;
118
227
  this.numGpu = fields.numGpu;
228
+ this.numGqa = fields.numGqa;
229
+ this.numKeep = fields.numKeep;
119
230
  this.numThread = fields.numThread;
231
+ this.penalizeNewline = fields.penalizeNewline;
232
+ this.presencePenalty = fields.presencePenalty;
120
233
  this.repeatLastN = fields.repeatLastN;
121
234
  this.repeatPenalty = fields.repeatPenalty;
235
+ this.ropeFrequencyBase = fields.ropeFrequencyBase;
236
+ this.ropeFrequencyScale = fields.ropeFrequencyScale;
122
237
  this.temperature = fields.temperature;
123
238
  this.stop = fields.stop;
124
239
  this.tfsZ = fields.tfsZ;
125
240
  this.topK = fields.topK;
126
241
  this.topP = fields.topP;
242
+ this.typicalP = fields.typicalP;
243
+ this.useMLock = fields.useMLock;
244
+ this.useMMap = fields.useMMap;
245
+ this.vocabOnly = fields.vocabOnly;
127
246
  }
128
247
  _llmType() {
129
248
  return "ollama";
@@ -132,19 +251,36 @@ export class Ollama extends LLM {
132
251
  return {
133
252
  model: this.model,
134
253
  options: {
254
+ embedding_only: this.embeddingOnly,
255
+ f16_kv: this.f16KV,
256
+ frequency_penalty: this.frequencyPenalty,
257
+ logits_all: this.logitsAll,
258
+ low_vram: this.lowVram,
259
+ main_gpu: this.mainGpu,
135
260
  mirostat: this.mirostat,
136
261
  mirostat_eta: this.mirostatEta,
137
262
  mirostat_tau: this.mirostatTau,
263
+ num_batch: this.numBatch,
138
264
  num_ctx: this.numCtx,
139
265
  num_gpu: this.numGpu,
266
+ num_gqa: this.numGqa,
267
+ num_keep: this.numKeep,
140
268
  num_thread: this.numThread,
269
+ penalize_newline: this.penalizeNewline,
270
+ presence_penalty: this.presencePenalty,
141
271
  repeat_last_n: this.repeatLastN,
142
272
  repeat_penalty: this.repeatPenalty,
273
+ rope_frequency_base: this.ropeFrequencyBase,
274
+ rope_frequency_scale: this.ropeFrequencyScale,
143
275
  temperature: this.temperature,
144
276
  stop: options?.stop ?? this.stop,
145
277
  tfs_z: this.tfsZ,
146
278
  top_k: this.topK,
147
279
  top_p: this.topP,
280
+ typical_p: this.typicalP,
281
+ use_mlock: this.useMLock,
282
+ use_mmap: this.useMMap,
283
+ vocab_only: this.vocabOnly,
148
284
  },
149
285
  };
150
286
  }
@@ -121,4 +121,5 @@ exports.optionalImportEntrypoints = [
121
121
  "langchain/storage/ioredis",
122
122
  "langchain/hub",
123
123
  "langchain/experimental/multimodal_embeddings/googlevertexai",
124
+ "langchain/experimental/chat_models/anthropic_functions",
124
125
  ];
@@ -118,4 +118,5 @@ export const optionalImportEntrypoints = [
118
118
  "langchain/storage/ioredis",
119
119
  "langchain/hub",
120
120
  "langchain/experimental/multimodal_embeddings/googlevertexai",
121
+ "langchain/experimental/chat_models/anthropic_functions",
121
122
  ];
@@ -24,8 +24,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
24
24
  return result;
25
25
  };
26
26
  Object.defineProperty(exports, "__esModule", { value: true });
27
- exports.retrievers__document_compressors__chain_extract = exports.retrievers__time_weighted = exports.retrievers__parent_document = exports.retrievers__multi_vector = exports.retrievers__document_compressors = exports.retrievers__contextual_compression = exports.retrievers__databerry = exports.retrievers__remote = exports.output_parsers = exports.callbacks = exports.schema__storage = exports.schema__runnable = exports.schema__retriever = exports.schema__query_constructor = exports.schema__output_parser = exports.schema = exports.chat_models__ollama = exports.chat_models__baiduwenxin = exports.chat_models__anthropic = exports.chat_models__openai = exports.chat_models__base = exports.document_transformers__openai_functions = exports.document_loaders__web__sort_xyz_blockchain = exports.document_loaders__web__serpapi = exports.document_loaders__base = exports.document = exports.memory = exports.text_splitter = exports.vectorstores__xata = exports.vectorstores__vectara = exports.vectorstores__prisma = exports.vectorstores__memory = exports.vectorstores__base = exports.prompts = exports.llms__ollama = exports.llms__aleph_alpha = exports.llms__ai21 = exports.llms__openai = exports.llms__base = exports.embeddings__openai = exports.embeddings__fake = exports.embeddings__cache_backed = exports.embeddings__base = exports.chains__openai_functions = exports.chains = exports.tools = exports.base_language = exports.agents__toolkits = exports.agents = exports.load__serializable = void 0;
28
- exports.evaluation = exports.experimental__plan_and_execute = exports.experimental__generative_agents = exports.experimental__babyagi = exports.experimental__autogpt = exports.util__math = exports.storage__in_memory = exports.stores__message__in_memory = exports.stores__file__in_memory = exports.stores__doc__in_memory = exports.cache = exports.retrievers__vespa = exports.retrievers__score_threshold = exports.retrievers__hyde = void 0;
27
+ exports.retrievers__parent_document = exports.retrievers__multi_vector = exports.retrievers__document_compressors = exports.retrievers__contextual_compression = exports.retrievers__databerry = exports.retrievers__remote = exports.output_parsers = exports.callbacks = exports.schema__storage = exports.schema__runnable = exports.schema__retriever = exports.schema__query_constructor = exports.schema__output_parser = exports.schema = exports.chat_models__minimax = exports.chat_models__ollama = exports.chat_models__baiduwenxin = exports.chat_models__anthropic = exports.chat_models__openai = exports.chat_models__base = exports.document_transformers__openai_functions = exports.document_loaders__web__sort_xyz_blockchain = exports.document_loaders__web__serpapi = exports.document_loaders__base = exports.document = exports.memory = exports.text_splitter = exports.vectorstores__xata = exports.vectorstores__vectara = exports.vectorstores__prisma = exports.vectorstores__memory = exports.vectorstores__base = exports.prompts = exports.llms__ollama = exports.llms__aleph_alpha = exports.llms__ai21 = exports.llms__openai = exports.llms__base = exports.embeddings__minimax = exports.embeddings__openai = exports.embeddings__fake = exports.embeddings__cache_backed = exports.embeddings__base = exports.chains__openai_functions = exports.chains = exports.tools = exports.base_language = exports.agents__toolkits = exports.agents = exports.load__serializable = void 0;
28
+ exports.evaluation = exports.experimental__plan_and_execute = exports.experimental__generative_agents = exports.experimental__babyagi = exports.experimental__autogpt = exports.util__math = exports.storage__in_memory = exports.stores__message__in_memory = exports.stores__file__in_memory = exports.stores__doc__in_memory = exports.cache = exports.retrievers__vespa = exports.retrievers__score_threshold = exports.retrievers__hyde = exports.retrievers__document_compressors__chain_extract = exports.retrievers__time_weighted = void 0;
29
29
  exports.load__serializable = __importStar(require("../load/serializable.cjs"));
30
30
  exports.agents = __importStar(require("../agents/index.cjs"));
31
31
  exports.agents__toolkits = __importStar(require("../agents/toolkits/index.cjs"));
@@ -37,6 +37,7 @@ exports.embeddings__base = __importStar(require("../embeddings/base.cjs"));
37
37
  exports.embeddings__cache_backed = __importStar(require("../embeddings/cache_backed.cjs"));
38
38
  exports.embeddings__fake = __importStar(require("../embeddings/fake.cjs"));
39
39
  exports.embeddings__openai = __importStar(require("../embeddings/openai.cjs"));
40
+ exports.embeddings__minimax = __importStar(require("../embeddings/minimax.cjs"));
40
41
  exports.llms__base = __importStar(require("../llms/base.cjs"));
41
42
  exports.llms__openai = __importStar(require("../llms/openai.cjs"));
42
43
  exports.llms__ai21 = __importStar(require("../llms/ai21.cjs"));
@@ -60,6 +61,7 @@ exports.chat_models__openai = __importStar(require("../chat_models/openai.cjs"))
60
61
  exports.chat_models__anthropic = __importStar(require("../chat_models/anthropic.cjs"));
61
62
  exports.chat_models__baiduwenxin = __importStar(require("../chat_models/baiduwenxin.cjs"));
62
63
  exports.chat_models__ollama = __importStar(require("../chat_models/ollama.cjs"));
64
+ exports.chat_models__minimax = __importStar(require("../chat_models/minimax.cjs"));
63
65
  exports.schema = __importStar(require("../schema/index.cjs"));
64
66
  exports.schema__output_parser = __importStar(require("../schema/output_parser.cjs"));
65
67
  exports.schema__query_constructor = __importStar(require("../schema/query_constructor.cjs"));
@@ -9,6 +9,7 @@ export * as embeddings__base from "../embeddings/base.js";
9
9
  export * as embeddings__cache_backed from "../embeddings/cache_backed.js";
10
10
  export * as embeddings__fake from "../embeddings/fake.js";
11
11
  export * as embeddings__openai from "../embeddings/openai.js";
12
+ export * as embeddings__minimax from "../embeddings/minimax.js";
12
13
  export * as llms__base from "../llms/base.js";
13
14
  export * as llms__openai from "../llms/openai.js";
14
15
  export * as llms__ai21 from "../llms/ai21.js";
@@ -32,6 +33,7 @@ export * as chat_models__openai from "../chat_models/openai.js";
32
33
  export * as chat_models__anthropic from "../chat_models/anthropic.js";
33
34
  export * as chat_models__baiduwenxin from "../chat_models/baiduwenxin.js";
34
35
  export * as chat_models__ollama from "../chat_models/ollama.js";
36
+ export * as chat_models__minimax from "../chat_models/minimax.js";
35
37
  export * as schema from "../schema/index.js";
36
38
  export * as schema__output_parser from "../schema/output_parser.js";
37
39
  export * as schema__query_constructor from "../schema/query_constructor.js";
@@ -10,6 +10,7 @@ export * as embeddings__base from "../embeddings/base.js";
10
10
  export * as embeddings__cache_backed from "../embeddings/cache_backed.js";
11
11
  export * as embeddings__fake from "../embeddings/fake.js";
12
12
  export * as embeddings__openai from "../embeddings/openai.js";
13
+ export * as embeddings__minimax from "../embeddings/minimax.js";
13
14
  export * as llms__base from "../llms/base.js";
14
15
  export * as llms__openai from "../llms/openai.js";
15
16
  export * as llms__ai21 from "../llms/ai21.js";
@@ -33,6 +34,7 @@ export * as chat_models__openai from "../chat_models/openai.js";
33
34
  export * as chat_models__anthropic from "../chat_models/anthropic.js";
34
35
  export * as chat_models__baiduwenxin from "../chat_models/baiduwenxin.js";
35
36
  export * as chat_models__ollama from "../chat_models/ollama.js";
37
+ export * as chat_models__minimax from "../chat_models/minimax.js";
36
38
  export * as schema from "../schema/index.js";
37
39
  export * as schema__output_parser from "../schema/output_parser.js";
38
40
  export * as schema__query_constructor from "../schema/query_constructor.js";
@@ -79,7 +79,7 @@ class BaseTransformOutputParser extends BaseOutputParser {
79
79
  * @returns An asynchronous generator of parsed output.
80
80
  */
81
81
  async *transform(inputGenerator, options) {
82
- yield* this._streamWithConfig(this._transform(inputGenerator), {
82
+ yield* this._transformStreamWithConfig(inputGenerator, this._transform.bind(this), {
83
83
  ...options,
84
84
  runType: "parser",
85
85
  });
@@ -74,7 +74,7 @@ export class BaseTransformOutputParser extends BaseOutputParser {
74
74
  * @returns An asynchronous generator of parsed output.
75
75
  */
76
76
  async *transform(inputGenerator, options) {
77
- yield* this._streamWithConfig(this._transform(inputGenerator), {
77
+ yield* this._transformStreamWithConfig(inputGenerator, this._transform.bind(this), {
78
78
  ...options,
79
79
  runType: "parser",
80
80
  });