langchain 0.3.8 → 0.3.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -75,7 +75,8 @@ class QueryTransformer {
75
75
  }
76
76
  if (funcName in ir_js_1.Comparators) {
77
77
  if (node.args && node.args.length === 2) {
78
- return new ir_js_1.Comparison(funcName, traverse(node.args[0]), traverse(node.args[1]));
78
+ const [attribute, value] = node.args;
79
+ return new ir_js_1.Comparison(funcName, traverse(attribute), traverse(value));
79
80
  }
80
81
  throw new Error("Comparator must have exactly 2 arguments");
81
82
  }
@@ -72,7 +72,8 @@ export class QueryTransformer {
72
72
  }
73
73
  if (funcName in Comparators) {
74
74
  if (node.args && node.args.length === 2) {
75
- return new Comparison(funcName, traverse(node.args[0]), traverse(node.args[1]));
75
+ const [attribute, value] = node.args;
76
+ return new Comparison(funcName, traverse(attribute), traverse(value));
76
77
  }
77
78
  throw new Error("Comparator must have exactly 2 arguments");
78
79
  }
@@ -10,6 +10,8 @@ const _SUPPORTED_PROVIDERS = [
10
10
  "azure_openai",
11
11
  "cohere",
12
12
  "google-vertexai",
13
+ "google-vertexai-web",
14
+ "google-genai",
13
15
  "google-genai",
14
16
  "ollama",
15
17
  "together",
@@ -48,6 +50,10 @@ params = {}) {
48
50
  const { ChatVertexAI } = await import("@langchain/google-vertexai");
49
51
  return new ChatVertexAI({ model, ...passedParams });
50
52
  }
53
+ case "google-vertexai-web": {
54
+ const { ChatVertexAI } = await import("@langchain/google-vertexai-web");
55
+ return new ChatVertexAI({ model, ...passedParams });
56
+ }
51
57
  case "google-genai": {
52
58
  const { ChatGoogleGenerativeAI } = await import("@langchain/google-genai");
53
59
  return new ChatGoogleGenerativeAI({ model, ...passedParams });
@@ -64,6 +70,10 @@ params = {}) {
64
70
  const { ChatGroq } = await import("@langchain/groq");
65
71
  return new ChatGroq({ model, ...passedParams });
66
72
  }
73
+ case "cerebras": {
74
+ const { ChatCerebras } = await import("@langchain/cerebras");
75
+ return new ChatCerebras({ model, ...passedParams });
76
+ }
67
77
  case "bedrock": {
68
78
  const { ChatBedrockConverse } = await import("@langchain/aws");
69
79
  return new ChatBedrockConverse({ model, ...passedParams });
@@ -374,6 +384,7 @@ class _ConfigurableModel extends chat_models_1.BaseChatModel {
374
384
  * - anthropic (@langchain/anthropic)
375
385
  * - azure_openai (@langchain/openai)
376
386
  * - google-vertexai (@langchain/google-vertexai)
387
+ * - google-vertexai-web (@langchain/google-vertexai-web)
377
388
  * - google-genai (@langchain/google-genai)
378
389
  * - bedrock (@langchain/aws)
379
390
  * - cohere (@langchain/cohere)
@@ -382,6 +393,7 @@ class _ConfigurableModel extends chat_models_1.BaseChatModel {
382
393
  * - mistralai (@langchain/mistralai)
383
394
  * - groq (@langchain/groq)
384
395
  * - ollama (@langchain/ollama)
396
+ * - cerebras (@langchain/cerebras)
385
397
  * @param {string[] | "any"} [fields.configurableFields] - Which model parameters are configurable:
386
398
  * - undefined: No configurable fields.
387
399
  * - "any": All fields are configurable. (See Security Note in description)
@@ -9,7 +9,7 @@ import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
9
9
  import { ChatResult } from "@langchain/core/outputs";
10
10
  interface EventStreamCallbackHandlerInput extends Omit<LogStreamCallbackHandlerInput, "_schemaFormat"> {
11
11
  }
12
- declare const _SUPPORTED_PROVIDERS: readonly ["openai", "anthropic", "azure_openai", "cohere", "google-vertexai", "google-genai", "ollama", "together", "fireworks", "mistralai", "groq", "bedrock"];
12
+ declare const _SUPPORTED_PROVIDERS: readonly ["openai", "anthropic", "azure_openai", "cohere", "google-vertexai", "google-vertexai-web", "google-genai", "google-genai", "ollama", "together", "fireworks", "mistralai", "groq", "bedrock"];
13
13
  export type ChatModelProvider = (typeof _SUPPORTED_PROVIDERS)[number];
14
14
  export interface ConfigurableChatModelCallOptions extends BaseChatModelCallOptions {
15
15
  tools?: (StructuredToolInterface | Record<string, unknown> | ToolDefinition | RunnableToolLike)[];
@@ -7,6 +7,8 @@ const _SUPPORTED_PROVIDERS = [
7
7
  "azure_openai",
8
8
  "cohere",
9
9
  "google-vertexai",
10
+ "google-vertexai-web",
11
+ "google-genai",
10
12
  "google-genai",
11
13
  "ollama",
12
14
  "together",
@@ -45,6 +47,10 @@ params = {}) {
45
47
  const { ChatVertexAI } = await import("@langchain/google-vertexai");
46
48
  return new ChatVertexAI({ model, ...passedParams });
47
49
  }
50
+ case "google-vertexai-web": {
51
+ const { ChatVertexAI } = await import("@langchain/google-vertexai-web");
52
+ return new ChatVertexAI({ model, ...passedParams });
53
+ }
48
54
  case "google-genai": {
49
55
  const { ChatGoogleGenerativeAI } = await import("@langchain/google-genai");
50
56
  return new ChatGoogleGenerativeAI({ model, ...passedParams });
@@ -61,6 +67,10 @@ params = {}) {
61
67
  const { ChatGroq } = await import("@langchain/groq");
62
68
  return new ChatGroq({ model, ...passedParams });
63
69
  }
70
+ case "cerebras": {
71
+ const { ChatCerebras } = await import("@langchain/cerebras");
72
+ return new ChatCerebras({ model, ...passedParams });
73
+ }
64
74
  case "bedrock": {
65
75
  const { ChatBedrockConverse } = await import("@langchain/aws");
66
76
  return new ChatBedrockConverse({ model, ...passedParams });
@@ -370,6 +380,7 @@ class _ConfigurableModel extends BaseChatModel {
370
380
  * - anthropic (@langchain/anthropic)
371
381
  * - azure_openai (@langchain/openai)
372
382
  * - google-vertexai (@langchain/google-vertexai)
383
+ * - google-vertexai-web (@langchain/google-vertexai-web)
373
384
  * - google-genai (@langchain/google-genai)
374
385
  * - bedrock (@langchain/aws)
375
386
  * - cohere (@langchain/cohere)
@@ -378,6 +389,7 @@ class _ConfigurableModel extends BaseChatModel {
378
389
  * - mistralai (@langchain/mistralai)
379
390
  * - groq (@langchain/groq)
380
391
  * - ollama (@langchain/ollama)
392
+ * - cerebras (@langchain/cerebras)
381
393
  * @param {string[] | "any"} [fields.configurableFields] - Which model parameters are configurable:
382
394
  * - undefined: No configurable fields.
383
395
  * - "any": All fields are configurable. (See Security Note in description)