langchain 0.0.197-rc.1 → 0.0.198

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. package/dist/chains/openai_moderation.cjs +2 -2
  2. package/dist/chains/openai_moderation.d.ts +1 -1
  3. package/dist/chains/openai_moderation.js +1 -1
  4. package/dist/chat_models/anthropic.cjs +351 -15
  5. package/dist/chat_models/anthropic.d.ts +157 -1
  6. package/dist/chat_models/anthropic.js +348 -1
  7. package/dist/chat_models/cloudflare_workersai.cjs +5 -0
  8. package/dist/chat_models/cloudflare_workersai.d.ts +3 -0
  9. package/dist/chat_models/cloudflare_workersai.js +5 -0
  10. package/dist/chat_models/fireworks.d.ts +1 -1
  11. package/dist/chat_models/iflytek_xinghuo/common.d.ts +1 -1
  12. package/dist/chat_models/llama_cpp.cjs +24 -0
  13. package/dist/chat_models/llama_cpp.d.ts +3 -1
  14. package/dist/chat_models/llama_cpp.js +24 -0
  15. package/dist/chat_models/minimax.d.ts +1 -1
  16. package/dist/chat_models/openai.cjs +698 -4
  17. package/dist/chat_models/openai.d.ts +137 -4
  18. package/dist/chat_models/openai.js +695 -2
  19. package/dist/document_loaders/fs/openai_whisper_audio.cjs +2 -2
  20. package/dist/document_loaders/fs/openai_whisper_audio.d.ts +1 -1
  21. package/dist/document_loaders/fs/openai_whisper_audio.js +1 -1
  22. package/dist/document_loaders/fs/pptx.cjs +39 -0
  23. package/dist/document_loaders/fs/pptx.d.ts +23 -0
  24. package/dist/document_loaders/fs/pptx.js +35 -0
  25. package/dist/embeddings/openai.cjs +240 -2
  26. package/dist/embeddings/openai.d.ts +82 -1
  27. package/dist/embeddings/openai.js +239 -1
  28. package/dist/experimental/openai_assistant/index.cjs +35 -3
  29. package/dist/experimental/openai_assistant/index.d.ts +27 -1
  30. package/dist/experimental/openai_assistant/index.js +33 -1
  31. package/dist/experimental/openai_assistant/schema.d.ts +1 -1
  32. package/dist/experimental/openai_files/index.cjs +2 -2
  33. package/dist/experimental/openai_files/index.d.ts +1 -1
  34. package/dist/experimental/openai_files/index.js +1 -1
  35. package/dist/experimental/tools/pyinterpreter.cjs +248 -0
  36. package/dist/experimental/tools/pyinterpreter.d.ts +18 -0
  37. package/dist/experimental/tools/pyinterpreter.js +244 -0
  38. package/dist/graphs/neo4j_graph.cjs +49 -14
  39. package/dist/graphs/neo4j_graph.d.ts +30 -0
  40. package/dist/graphs/neo4j_graph.js +49 -14
  41. package/dist/llms/fireworks.d.ts +1 -1
  42. package/dist/llms/hf.cjs +13 -2
  43. package/dist/llms/hf.d.ts +5 -0
  44. package/dist/llms/hf.js +13 -2
  45. package/dist/llms/llama_cpp.cjs +17 -3
  46. package/dist/llms/llama_cpp.d.ts +4 -1
  47. package/dist/llms/llama_cpp.js +17 -3
  48. package/dist/llms/openai-chat.cjs +445 -3
  49. package/dist/llms/openai-chat.d.ts +123 -4
  50. package/dist/llms/openai-chat.js +443 -2
  51. package/dist/llms/openai.cjs +530 -6
  52. package/dist/llms/openai.d.ts +123 -4
  53. package/dist/llms/openai.js +525 -2
  54. package/dist/load/import_constants.cjs +3 -0
  55. package/dist/load/import_constants.js +3 -0
  56. package/dist/output_parsers/json.cjs +4 -0
  57. package/dist/output_parsers/json.js +4 -0
  58. package/dist/schema/index.d.ts +1 -1
  59. package/dist/tools/convert_to_openai.cjs +38 -4
  60. package/dist/tools/convert_to_openai.d.ts +11 -1
  61. package/dist/tools/convert_to_openai.js +35 -1
  62. package/dist/types/openai-types.d.ts +133 -1
  63. package/dist/util/env.cjs +9 -70
  64. package/dist/util/env.d.ts +1 -21
  65. package/dist/util/env.js +1 -62
  66. package/dist/util/openai-format-fndef.cjs +81 -0
  67. package/dist/util/openai-format-fndef.d.ts +44 -0
  68. package/dist/util/openai-format-fndef.js +77 -0
  69. package/dist/util/openai.cjs +18 -2
  70. package/dist/util/openai.d.ts +1 -1
  71. package/dist/util/openai.js +17 -1
  72. package/dist/util/openapi.d.ts +2 -2
  73. package/dist/util/prompt-layer.d.ts +1 -1
  74. package/dist/vectorstores/clickhouse.cjs +286 -0
  75. package/dist/vectorstores/clickhouse.d.ts +126 -0
  76. package/dist/vectorstores/clickhouse.js +259 -0
  77. package/dist/vectorstores/pgvector.cjs +142 -18
  78. package/dist/vectorstores/pgvector.d.ts +21 -0
  79. package/dist/vectorstores/pgvector.js +142 -18
  80. package/dist/vectorstores/weaviate.cjs +45 -2
  81. package/dist/vectorstores/weaviate.d.ts +27 -1
  82. package/dist/vectorstores/weaviate.js +45 -2
  83. package/document_loaders/fs/pptx.cjs +1 -0
  84. package/document_loaders/fs/pptx.d.ts +1 -0
  85. package/document_loaders/fs/pptx.js +1 -0
  86. package/experimental/tools/pyinterpreter.cjs +1 -0
  87. package/experimental/tools/pyinterpreter.d.ts +1 -0
  88. package/experimental/tools/pyinterpreter.js +1 -0
  89. package/package.json +41 -9
  90. package/vectorstores/clickhouse.cjs +1 -0
  91. package/vectorstores/clickhouse.d.ts +1 -0
  92. package/vectorstores/clickhouse.js +1 -0
@@ -1,8 +1,141 @@
1
- import { ChatOpenAI, type ChatOpenAICallOptions } from "@langchain/openai";
1
+ import { type ClientOptions, OpenAI as OpenAIClient } from "openai";
2
2
  import { CallbackManagerForLLMRun } from "../callbacks/manager.js";
3
- import { BaseMessage, ChatResult } from "../schema/index.js";
4
- export { type AzureOpenAIInput, type OpenAICallOptions, type OpenAIChatInput, } from "@langchain/openai";
5
- export { type ChatOpenAICallOptions, ChatOpenAI };
3
+ import { BaseMessage, ChatGenerationChunk, ChatResult } from "../schema/index.js";
4
+ import { StructuredTool } from "../tools/base.js";
5
+ import { AzureOpenAIInput, OpenAICallOptions, OpenAIChatInput, OpenAICoreRequestOptions, LegacyOpenAIInput } from "../types/openai-types.js";
6
+ import { BaseChatModel, BaseChatModelParams } from "./base.js";
7
+ import { BaseFunctionCallOptions } from "../base_language/index.js";
8
+ export type { AzureOpenAIInput, OpenAICallOptions, OpenAIChatInput };
9
+ interface TokenUsage {
10
+ completionTokens?: number;
11
+ promptTokens?: number;
12
+ totalTokens?: number;
13
+ }
14
+ interface OpenAILLMOutput {
15
+ tokenUsage: TokenUsage;
16
+ }
17
+ export interface ChatOpenAICallOptions extends OpenAICallOptions, BaseFunctionCallOptions {
18
+ tools?: StructuredTool[] | OpenAIClient.ChatCompletionTool[];
19
+ tool_choice?: OpenAIClient.ChatCompletionToolChoiceOption;
20
+ promptIndex?: number;
21
+ response_format?: {
22
+ type: "json_object";
23
+ };
24
+ seed?: number;
25
+ }
26
+ /**
27
+ * Wrapper around OpenAI large language models that use the Chat endpoint.
28
+ *
29
+ * To use you should have the `openai` package installed, with the
30
+ * `OPENAI_API_KEY` environment variable set.
31
+ *
32
+ * To use with Azure you should have the `openai` package installed, with the
33
+ * `AZURE_OPENAI_API_KEY`,
34
+ * `AZURE_OPENAI_API_INSTANCE_NAME`,
35
+ * `AZURE_OPENAI_API_DEPLOYMENT_NAME`
36
+ * and `AZURE_OPENAI_API_VERSION` environment variable set.
37
+ * `AZURE_OPENAI_BASE_PATH` is optional and will override `AZURE_OPENAI_API_INSTANCE_NAME` if you need to use a custom endpoint.
38
+ *
39
+ * @remarks
40
+ * Any parameters that are valid to be passed to {@link
41
+ * https://platform.openai.com/docs/api-reference/chat/create |
42
+ * `openai.createChatCompletion`} can be passed through {@link modelKwargs}, even
43
+ * if not explicitly available on this class.
44
+ * @example
45
+ * ```typescript
46
+ * // Create a new instance of ChatOpenAI with specific temperature and model name settings
47
+ * const model = new ChatOpenAI({
48
+ * temperature: 0.9,
49
+ * modelName: "ft:gpt-3.5-turbo-0613:{ORG_NAME}::{MODEL_ID}",
50
+ * });
51
+ *
52
+ * // Invoke the model with a message and await the response
53
+ * const message = await model.invoke("Hi there!");
54
+ *
55
+ * // Log the response to the console
56
+ * console.log(message);
57
+ *
58
+ * ```
59
+ */
60
+ export declare class ChatOpenAI<CallOptions extends ChatOpenAICallOptions = ChatOpenAICallOptions> extends BaseChatModel<CallOptions> implements OpenAIChatInput, AzureOpenAIInput {
61
+ static lc_name(): string;
62
+ get callKeys(): string[];
63
+ lc_serializable: boolean;
64
+ get lc_secrets(): {
65
+ [key: string]: string;
66
+ } | undefined;
67
+ get lc_aliases(): Record<string, string>;
68
+ temperature: number;
69
+ topP: number;
70
+ frequencyPenalty: number;
71
+ presencePenalty: number;
72
+ n: number;
73
+ logitBias?: Record<string, number>;
74
+ modelName: string;
75
+ modelKwargs?: OpenAIChatInput["modelKwargs"];
76
+ stop?: string[];
77
+ user?: string;
78
+ timeout?: number;
79
+ streaming: boolean;
80
+ maxTokens?: number;
81
+ openAIApiKey?: string;
82
+ azureOpenAIApiVersion?: string;
83
+ azureOpenAIApiKey?: string;
84
+ azureOpenAIApiInstanceName?: string;
85
+ azureOpenAIApiDeploymentName?: string;
86
+ azureOpenAIBasePath?: string;
87
+ organization?: string;
88
+ private client;
89
+ private clientConfig;
90
+ constructor(fields?: Partial<OpenAIChatInput> & Partial<AzureOpenAIInput> & BaseChatModelParams & {
91
+ configuration?: ClientOptions & LegacyOpenAIInput;
92
+ },
93
+ /** @deprecated */
94
+ configuration?: ClientOptions & LegacyOpenAIInput);
95
+ /**
96
+ * Get the parameters used to invoke the model
97
+ */
98
+ invocationParams(options?: this["ParsedCallOptions"]): Omit<OpenAIClient.Chat.ChatCompletionCreateParams, "messages">;
99
+ /** @ignore */
100
+ _identifyingParams(): Omit<OpenAIClient.Chat.ChatCompletionCreateParams, "messages"> & {
101
+ model_name: string;
102
+ } & ClientOptions;
103
+ _streamResponseChunks(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
104
+ /**
105
+ * Get the identifying parameters for the model
106
+ *
107
+ */
108
+ identifyingParams(): Omit<OpenAIClient.Chat.Completions.ChatCompletionCreateParams, "messages"> & {
109
+ model_name: string;
110
+ } & ClientOptions;
111
+ /** @ignore */
112
+ _generate(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
113
+ /**
114
+ * Estimate the number of tokens a prompt will use.
115
+ * Modified from: https://github.com/hmarr/openai-chat-tokens/blob/main/src/index.ts
116
+ */
117
+ private getEstimatedTokenCountFromPrompt;
118
+ /**
119
+ * Estimate the number of tokens an array of generations have used.
120
+ */
121
+ private getNumTokensFromGenerations;
122
+ getNumTokensFromMessages(messages: BaseMessage[]): Promise<{
123
+ totalCount: number;
124
+ countPerMessage: number[];
125
+ }>;
126
+ /**
127
+ * Calls the OpenAI API with retry logic in case of failures.
128
+ * @param request The request to send to the OpenAI API.
129
+ * @param options Optional configuration for the API call.
130
+ * @returns The response from the OpenAI API.
131
+ */
132
+ completionWithRetry(request: OpenAIClient.Chat.ChatCompletionCreateParamsStreaming, options?: OpenAICoreRequestOptions): Promise<AsyncIterable<OpenAIClient.Chat.Completions.ChatCompletionChunk>>;
133
+ completionWithRetry(request: OpenAIClient.Chat.ChatCompletionCreateParamsNonStreaming, options?: OpenAICoreRequestOptions): Promise<OpenAIClient.Chat.Completions.ChatCompletion>;
134
+ private _getClientOptions;
135
+ _llmType(): string;
136
+ /** @ignore */
137
+ _combineLLMOutput(...llmOutputs: OpenAILLMOutput[]): OpenAILLMOutput;
138
+ }
6
139
  export declare class PromptLayerChatOpenAI extends ChatOpenAI {
7
140
  promptLayerApiKey?: string;
8
141
  plTags?: string[];