langchain 0.1.0 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,144 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.createStructuredOutputRunnable = exports.createOpenAIFnRunnable = void 0;
4
+ const zod_to_json_schema_1 = require("zod-to-json-schema");
5
+ const openai_functions_js_1 = require("../../output_parsers/openai_functions.cjs");
6
+ /**
7
+ * Creates a runnable sequence that calls OpenAI functions.
8
+ * @param config - The parameters required to create the runnable.
9
+ * @returns A runnable sequence that will pass the given functions to the model when run.
10
+ *
11
+ * @example
12
+ * ```typescript
13
+ * const openAIFunction = {
14
+ * name: "get_person_details",
15
+ * description: "Get details about a person",
16
+ * parameters: {
17
+ * title: "Person",
18
+ * description: "Identifying information about a person.",
19
+ * type: "object",
20
+ * properties: {
21
+ * name: { title: "Name", description: "The person's name", type: "string" },
22
+ * age: { title: "Age", description: "The person's age", type: "integer" },
23
+ * fav_food: {
24
+ * title: "Fav Food",
25
+ * description: "The person's favorite food",
26
+ * type: "string",
27
+ * },
28
+ * },
29
+ * required: ["name", "age"],
30
+ * },
31
+ * };
32
+ *
33
+ * const model = new ChatOpenAI();
34
+ * const prompt = ChatPromptTemplate.fromMessages([
35
+ * ["human", "Human description: {description}"],
36
+ * ]);
37
+ * const outputParser = new JsonOutputFunctionsParser();
38
+ *
39
+ * const runnable = createOpenAIFnRunnable({
40
+ * functions: [openAIFunction],
41
+ * llm: model,
42
+ * prompt,
43
+ * enforceSingleFunctionUsage: true, // Default is true
44
+ * outputParser
45
+ * });
46
+ * const response = await runnable.invoke({
47
+ * description:
48
+ * "My name's John Doe and I'm 30 years old. My favorite kind of food are chocolate chip cookies.",
49
+ * });
50
+ *
51
+ * console.log(response);
52
+ *
53
+ * // { name: 'John Doe', age: 30, fav_food: 'chocolate chip cookies' }
54
+ * ```
55
+ */
56
+ function createOpenAIFnRunnable(config) {
57
+ const { functions, llm, prompt, enforceSingleFunctionUsage = true, outputParser = new openai_functions_js_1.JsonOutputFunctionsParser(), } = config;
58
+ const llmKwargs = {
59
+ functions,
60
+ };
61
+ if (functions.length === 1 && enforceSingleFunctionUsage) {
62
+ llmKwargs.function_call = {
63
+ name: functions[0].name,
64
+ };
65
+ }
66
+ const llmWithKwargs = llm.bind(llmKwargs);
67
+ return prompt.pipe(llmWithKwargs).pipe(outputParser);
68
+ }
69
+ exports.createOpenAIFnRunnable = createOpenAIFnRunnable;
70
+ function isZodSchema(schema) {
71
+ return typeof schema.safeParse === "function";
72
+ }
73
+ /**
74
+ * Create a runnable that uses an OpenAI function to get a structured output.
75
+ * @param config Params required to create the runnable.
76
+ * @returns A runnable sequence that will pass the given function to the model when run.
77
+ *
78
+ * @example
79
+ * ```typescript
80
+ * import { createStructuredOutputRunnable } from "langchain/chains/openai_functions";
81
+ * import { ChatOpenAI } from "@langchain/openai";
82
+ * import { ChatPromptTemplate } from "@langchain/core/prompts";
83
+ * import { JsonOutputFunctionsParser } from "langchain/output_parsers";
84
+ *
85
+ * const jsonSchema = {
86
+ * title: "Person",
87
+ * description: "Identifying information about a person.",
88
+ * type: "object",
89
+ * properties: {
90
+ * name: { title: "Name", description: "The person's name", type: "string" },
91
+ * age: { title: "Age", description: "The person's age", type: "integer" },
92
+ * fav_food: {
93
+ * title: "Fav Food",
94
+ * description: "The person's favorite food",
95
+ * type: "string",
96
+ * },
97
+ * },
98
+ * required: ["name", "age"],
99
+ * };
100
+ *
101
+ * const model = new ChatOpenAI();
102
+ * const prompt = ChatPromptTemplate.fromMessages([
103
+ * ["human", "Human description: {description}"],
104
+ * ]);
105
+ *
106
+ * const outputParser = new JsonOutputFunctionsParser();
107
+ *
108
+ * // Also works with Zod schema
109
+ * const runnable = createStructuredOutputRunnable({
110
+ * outputSchema: jsonSchema,
111
+ * llm: model,
112
+ * prompt,
113
+ * outputParser
114
+ * });
115
+ *
116
+ * const response = await runnable.invoke({
117
+ * description:
118
+ * "My name's John Doe and I'm 30 years old. My favorite kind of food are chocolate chip cookies.",
119
+ * });
120
+ *
121
+ * console.log(response);
122
+ *
123
+ * // { name: 'John Doe', age: 30, fav_food: 'chocolate chip cookies' }
124
+ * ```
125
+ */
126
+ function createStructuredOutputRunnable(config) {
127
+ const { outputSchema, llm, prompt, outputParser } = config;
128
+ const jsonSchema = isZodSchema(outputSchema)
129
+ ? (0, zod_to_json_schema_1.zodToJsonSchema)(outputSchema)
130
+ : outputSchema;
131
+ const oaiFunction = {
132
+ name: "outputFormatter",
133
+ description: "Output formatter. Should always be used to format your response to the user",
134
+ parameters: jsonSchema,
135
+ };
136
+ return createOpenAIFnRunnable({
137
+ functions: [oaiFunction],
138
+ llm,
139
+ prompt,
140
+ enforceSingleFunctionUsage: true,
141
+ outputParser,
142
+ });
143
+ }
144
+ exports.createStructuredOutputRunnable = createStructuredOutputRunnable;
@@ -0,0 +1,153 @@
1
+ import type { z } from "zod";
2
+ import { JsonSchema7Type } from "zod-to-json-schema/src/parseDef.js";
3
+ import type { BaseOutputParser } from "@langchain/core/output_parsers";
4
+ import type { BasePromptTemplate } from "@langchain/core/prompts";
5
+ import type { RunnableInterface } from "@langchain/core/runnables";
6
+ import type { BaseFunctionCallOptions, BaseLanguageModelInput, FunctionDefinition } from "@langchain/core/language_models/base";
7
+ import type { InputValues } from "@langchain/core/utils/types";
8
+ import type { BaseMessage } from "@langchain/core/messages";
9
+ /**
10
+ * Configuration params for the createOpenAIFnRunnable method.
11
+ */
12
+ export type CreateOpenAIFnRunnableConfig<RunInput extends Record<string, any>, RunOutput> = {
13
+ functions: FunctionDefinition[];
14
+ /** Language model to use, assumed to support the OpenAI function-calling API. */
15
+ llm: RunnableInterface<BaseLanguageModelInput, BaseMessage, BaseFunctionCallOptions>;
16
+ /** BasePromptTemplate to pass to the model. */
17
+ prompt: BasePromptTemplate<InputValues<Extract<keyof RunInput, string>>>;
18
+ /**
19
+ * Only used if a single function is passed in. If `true`, then the model will be
20
+ * forced to use the given function. If `false`, then the model will be given the
21
+ * option to use the given function or not.
22
+ */
23
+ enforceSingleFunctionUsage?: boolean;
24
+ /**
25
+ * BaseLLMOutputParser to use for parsing model outputs.
26
+ * By default will be inferred from the function types.
27
+ */
28
+ outputParser?: BaseOutputParser<RunOutput>;
29
+ };
30
+ /**
31
+ * Creates a runnable sequence that calls OpenAI functions.
32
+ * @param config - The parameters required to create the runnable.
33
+ * @returns A runnable sequence that will pass the given functions to the model when run.
34
+ *
35
+ * @example
36
+ * ```typescript
37
+ * const openAIFunction = {
38
+ * name: "get_person_details",
39
+ * description: "Get details about a person",
40
+ * parameters: {
41
+ * title: "Person",
42
+ * description: "Identifying information about a person.",
43
+ * type: "object",
44
+ * properties: {
45
+ * name: { title: "Name", description: "The person's name", type: "string" },
46
+ * age: { title: "Age", description: "The person's age", type: "integer" },
47
+ * fav_food: {
48
+ * title: "Fav Food",
49
+ * description: "The person's favorite food",
50
+ * type: "string",
51
+ * },
52
+ * },
53
+ * required: ["name", "age"],
54
+ * },
55
+ * };
56
+ *
57
+ * const model = new ChatOpenAI();
58
+ * const prompt = ChatPromptTemplate.fromMessages([
59
+ * ["human", "Human description: {description}"],
60
+ * ]);
61
+ * const outputParser = new JsonOutputFunctionsParser();
62
+ *
63
+ * const runnable = createOpenAIFnRunnable({
64
+ * functions: [openAIFunction],
65
+ * llm: model,
66
+ * prompt,
67
+ * enforceSingleFunctionUsage: true, // Default is true
68
+ * outputParser
69
+ * });
70
+ * const response = await runnable.invoke({
71
+ * description:
72
+ * "My name's John Doe and I'm 30 years old. My favorite kind of food are chocolate chip cookies.",
73
+ * });
74
+ *
75
+ * console.log(response);
76
+ *
77
+ * // { name: 'John Doe', age: 30, fav_food: 'chocolate chip cookies' }
78
+ * ```
79
+ */
80
+ export declare function createOpenAIFnRunnable<RunInput extends Record<string, any> = Record<string, any>, RunOutput extends Record<string, any> = Record<string, any>>(config: CreateOpenAIFnRunnableConfig<RunInput, RunOutput>): RunnableInterface<RunInput, RunOutput>;
81
+ /**
82
+ * Configuration params for the createStructuredOutputRunnable method.
83
+ */
84
+ export type CreateStructuredOutputRunnableConfig<RunInput extends Record<string, any>, RunOutput> = {
85
+ /**
86
+ * Schema to output. Must be either valid JSONSchema or a Zod schema.
87
+ */
88
+ outputSchema: z.AnyZodObject | JsonSchema7Type;
89
+ /**
90
+ * Language model to use, assumed to support the OpenAI function-calling API.
91
+ */
92
+ llm: RunnableInterface<BaseLanguageModelInput, BaseMessage, BaseFunctionCallOptions>;
93
+ /** BasePromptTemplate to pass to the model. */
94
+ prompt: BasePromptTemplate<InputValues<Extract<keyof RunInput, string>>>;
95
+ /**
96
+ * BaseLLMOutputParser to use for parsing model outputs.
97
+ */
98
+ outputParser?: BaseOutputParser<RunOutput>;
99
+ };
100
+ /**
101
+ * Create a runnable that uses an OpenAI function to get a structured output.
102
+ * @param config Params required to create the runnable.
103
+ * @returns A runnable sequence that will pass the given function to the model when run.
104
+ *
105
+ * @example
106
+ * ```typescript
107
+ * import { createStructuredOutputRunnable } from "langchain/chains/openai_functions";
108
+ * import { ChatOpenAI } from "@langchain/openai";
109
+ * import { ChatPromptTemplate } from "@langchain/core/prompts";
110
+ * import { JsonOutputFunctionsParser } from "langchain/output_parsers";
111
+ *
112
+ * const jsonSchema = {
113
+ * title: "Person",
114
+ * description: "Identifying information about a person.",
115
+ * type: "object",
116
+ * properties: {
117
+ * name: { title: "Name", description: "The person's name", type: "string" },
118
+ * age: { title: "Age", description: "The person's age", type: "integer" },
119
+ * fav_food: {
120
+ * title: "Fav Food",
121
+ * description: "The person's favorite food",
122
+ * type: "string",
123
+ * },
124
+ * },
125
+ * required: ["name", "age"],
126
+ * };
127
+ *
128
+ * const model = new ChatOpenAI();
129
+ * const prompt = ChatPromptTemplate.fromMessages([
130
+ * ["human", "Human description: {description}"],
131
+ * ]);
132
+ *
133
+ * const outputParser = new JsonOutputFunctionsParser();
134
+ *
135
+ * // Also works with Zod schema
136
+ * const runnable = createStructuredOutputRunnable({
137
+ * outputSchema: jsonSchema,
138
+ * llm: model,
139
+ * prompt,
140
+ * outputParser
141
+ * });
142
+ *
143
+ * const response = await runnable.invoke({
144
+ * description:
145
+ * "My name's John Doe and I'm 30 years old. My favorite kind of food are chocolate chip cookies.",
146
+ * });
147
+ *
148
+ * console.log(response);
149
+ *
150
+ * // { name: 'John Doe', age: 30, fav_food: 'chocolate chip cookies' }
151
+ * ```
152
+ */
153
+ export declare function createStructuredOutputRunnable<RunInput extends Record<string, any> = Record<string, any>, RunOutput extends Record<string, any> = Record<string, any>>(config: CreateStructuredOutputRunnableConfig<RunInput, RunOutput>): RunnableInterface<RunInput, RunOutput>;
@@ -0,0 +1,139 @@
1
+ import { zodToJsonSchema } from "zod-to-json-schema";
2
+ import { JsonOutputFunctionsParser } from "../../output_parsers/openai_functions.js";
3
+ /**
4
+ * Creates a runnable sequence that calls OpenAI functions.
5
+ * @param config - The parameters required to create the runnable.
6
+ * @returns A runnable sequence that will pass the given functions to the model when run.
7
+ *
8
+ * @example
9
+ * ```typescript
10
+ * const openAIFunction = {
11
+ * name: "get_person_details",
12
+ * description: "Get details about a person",
13
+ * parameters: {
14
+ * title: "Person",
15
+ * description: "Identifying information about a person.",
16
+ * type: "object",
17
+ * properties: {
18
+ * name: { title: "Name", description: "The person's name", type: "string" },
19
+ * age: { title: "Age", description: "The person's age", type: "integer" },
20
+ * fav_food: {
21
+ * title: "Fav Food",
22
+ * description: "The person's favorite food",
23
+ * type: "string",
24
+ * },
25
+ * },
26
+ * required: ["name", "age"],
27
+ * },
28
+ * };
29
+ *
30
+ * const model = new ChatOpenAI();
31
+ * const prompt = ChatPromptTemplate.fromMessages([
32
+ * ["human", "Human description: {description}"],
33
+ * ]);
34
+ * const outputParser = new JsonOutputFunctionsParser();
35
+ *
36
+ * const runnable = createOpenAIFnRunnable({
37
+ * functions: [openAIFunction],
38
+ * llm: model,
39
+ * prompt,
40
+ * enforceSingleFunctionUsage: true, // Default is true
41
+ * outputParser
42
+ * });
43
+ * const response = await runnable.invoke({
44
+ * description:
45
+ * "My name's John Doe and I'm 30 years old. My favorite kind of food are chocolate chip cookies.",
46
+ * });
47
+ *
48
+ * console.log(response);
49
+ *
50
+ * // { name: 'John Doe', age: 30, fav_food: 'chocolate chip cookies' }
51
+ * ```
52
+ */
53
+ export function createOpenAIFnRunnable(config) {
54
+ const { functions, llm, prompt, enforceSingleFunctionUsage = true, outputParser = new JsonOutputFunctionsParser(), } = config;
55
+ const llmKwargs = {
56
+ functions,
57
+ };
58
+ if (functions.length === 1 && enforceSingleFunctionUsage) {
59
+ llmKwargs.function_call = {
60
+ name: functions[0].name,
61
+ };
62
+ }
63
+ const llmWithKwargs = llm.bind(llmKwargs);
64
+ return prompt.pipe(llmWithKwargs).pipe(outputParser);
65
+ }
66
+ function isZodSchema(schema) {
67
+ return typeof schema.safeParse === "function";
68
+ }
69
+ /**
70
+ * Create a runnable that uses an OpenAI function to get a structured output.
71
+ * @param config Params required to create the runnable.
72
+ * @returns A runnable sequence that will pass the given function to the model when run.
73
+ *
74
+ * @example
75
+ * ```typescript
76
+ * import { createStructuredOutputRunnable } from "langchain/chains/openai_functions";
77
+ * import { ChatOpenAI } from "@langchain/openai";
78
+ * import { ChatPromptTemplate } from "@langchain/core/prompts";
79
+ * import { JsonOutputFunctionsParser } from "langchain/output_parsers";
80
+ *
81
+ * const jsonSchema = {
82
+ * title: "Person",
83
+ * description: "Identifying information about a person.",
84
+ * type: "object",
85
+ * properties: {
86
+ * name: { title: "Name", description: "The person's name", type: "string" },
87
+ * age: { title: "Age", description: "The person's age", type: "integer" },
88
+ * fav_food: {
89
+ * title: "Fav Food",
90
+ * description: "The person's favorite food",
91
+ * type: "string",
92
+ * },
93
+ * },
94
+ * required: ["name", "age"],
95
+ * };
96
+ *
97
+ * const model = new ChatOpenAI();
98
+ * const prompt = ChatPromptTemplate.fromMessages([
99
+ * ["human", "Human description: {description}"],
100
+ * ]);
101
+ *
102
+ * const outputParser = new JsonOutputFunctionsParser();
103
+ *
104
+ * // Also works with Zod schema
105
+ * const runnable = createStructuredOutputRunnable({
106
+ * outputSchema: jsonSchema,
107
+ * llm: model,
108
+ * prompt,
109
+ * outputParser
110
+ * });
111
+ *
112
+ * const response = await runnable.invoke({
113
+ * description:
114
+ * "My name's John Doe and I'm 30 years old. My favorite kind of food are chocolate chip cookies.",
115
+ * });
116
+ *
117
+ * console.log(response);
118
+ *
119
+ * // { name: 'John Doe', age: 30, fav_food: 'chocolate chip cookies' }
120
+ * ```
121
+ */
122
+ export function createStructuredOutputRunnable(config) {
123
+ const { outputSchema, llm, prompt, outputParser } = config;
124
+ const jsonSchema = isZodSchema(outputSchema)
125
+ ? zodToJsonSchema(outputSchema)
126
+ : outputSchema;
127
+ const oaiFunction = {
128
+ name: "outputFormatter",
129
+ description: "Output formatter. Should always be used to format your response to the user",
130
+ parameters: jsonSchema,
131
+ };
132
+ return createOpenAIFnRunnable({
133
+ functions: [oaiFunction],
134
+ llm,
135
+ prompt,
136
+ enforceSingleFunctionUsage: true,
137
+ outputParser,
138
+ });
139
+ }
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.createStructuredOutputChainFromZod = exports.createStructuredOutputChain = exports.createOpenAPIChain = exports.createTaggingChainFromZod = exports.createTaggingChain = exports.createExtractionChainFromZod = exports.createExtractionChain = void 0;
3
+ exports.createOpenAIFnRunnable = exports.createStructuredOutputRunnable = exports.createStructuredOutputChainFromZod = exports.createStructuredOutputChain = exports.createOpenAPIChain = exports.createTaggingChainFromZod = exports.createTaggingChain = exports.createExtractionChainFromZod = exports.createExtractionChain = void 0;
4
4
  var extraction_js_1 = require("./extraction.cjs");
5
5
  Object.defineProperty(exports, "createExtractionChain", { enumerable: true, get: function () { return extraction_js_1.createExtractionChain; } });
6
6
  Object.defineProperty(exports, "createExtractionChainFromZod", { enumerable: true, get: function () { return extraction_js_1.createExtractionChainFromZod; } });
@@ -12,3 +12,6 @@ Object.defineProperty(exports, "createOpenAPIChain", { enumerable: true, get: fu
12
12
  var structured_output_js_1 = require("./structured_output.cjs");
13
13
  Object.defineProperty(exports, "createStructuredOutputChain", { enumerable: true, get: function () { return structured_output_js_1.createStructuredOutputChain; } });
14
14
  Object.defineProperty(exports, "createStructuredOutputChainFromZod", { enumerable: true, get: function () { return structured_output_js_1.createStructuredOutputChainFromZod; } });
15
+ var base_js_1 = require("./base.cjs");
16
+ Object.defineProperty(exports, "createStructuredOutputRunnable", { enumerable: true, get: function () { return base_js_1.createStructuredOutputRunnable; } });
17
+ Object.defineProperty(exports, "createOpenAIFnRunnable", { enumerable: true, get: function () { return base_js_1.createOpenAIFnRunnable; } });
@@ -2,3 +2,4 @@ export { createExtractionChain, createExtractionChainFromZod, } from "./extracti
2
2
  export { type TaggingChainOptions, createTaggingChain, createTaggingChainFromZod, } from "./tagging.js";
3
3
  export { type OpenAPIChainOptions, createOpenAPIChain } from "./openapi.js";
4
4
  export { type StructuredOutputChainInput, createStructuredOutputChain, createStructuredOutputChainFromZod, } from "./structured_output.js";
5
+ export { createStructuredOutputRunnable, createOpenAIFnRunnable, } from "./base.js";
@@ -2,3 +2,4 @@ export { createExtractionChain, createExtractionChainFromZod, } from "./extracti
2
2
  export { createTaggingChain, createTaggingChainFromZod, } from "./tagging.js";
3
3
  export { createOpenAPIChain } from "./openapi.js";
4
4
  export { createStructuredOutputChain, createStructuredOutputChainFromZod, } from "./structured_output.js";
5
+ export { createStructuredOutputRunnable, createOpenAIFnRunnable, } from "./base.js";
@@ -31,7 +31,7 @@ export declare class OutputFunctionsParser extends BaseLLMOutputParser<string> {
31
31
  * Class for parsing the output of an LLM into a JSON object. Uses an
32
32
  * instance of `OutputFunctionsParser` to parse the output.
33
33
  */
34
- export declare class JsonOutputFunctionsParser extends BaseCumulativeTransformOutputParser<object> {
34
+ export declare class JsonOutputFunctionsParser<Output extends object = object> extends BaseCumulativeTransformOutputParser<Output> {
35
35
  static lc_name(): string;
36
36
  lc_namespace: string[];
37
37
  lc_serializable: boolean;
@@ -41,15 +41,15 @@ export declare class JsonOutputFunctionsParser extends BaseCumulativeTransformOu
41
41
  argsOnly?: boolean;
42
42
  } & BaseCumulativeTransformOutputParserInput);
43
43
  protected _diff(prev: unknown | undefined, next: unknown): JSONPatchOperation[] | undefined;
44
- parsePartialResult(generations: ChatGeneration[]): Promise<object | undefined>;
44
+ parsePartialResult(generations: ChatGeneration[]): Promise<Output | undefined>;
45
45
  /**
46
46
  * Parses the output and returns a JSON object. If `argsOnly` is true,
47
47
  * only the arguments of the function call are returned.
48
48
  * @param generations The output of the LLM to parse.
49
49
  * @returns A JSON object representation of the function call or its arguments.
50
50
  */
51
- parseResult(generations: Generation[] | ChatGeneration[]): Promise<object>;
52
- parse(text: string): Promise<object>;
51
+ parseResult(generations: Generation[] | ChatGeneration[]): Promise<Output>;
52
+ parse(text: string): Promise<Output>;
53
53
  getFormatInstructions(): string;
54
54
  }
55
55
  /**
@@ -61,7 +61,7 @@ export declare class JsonKeyOutputFunctionsParser<T = object> extends BaseLLMOut
61
61
  static lc_name(): string;
62
62
  lc_namespace: string[];
63
63
  lc_serializable: boolean;
64
- outputParser: JsonOutputFunctionsParser;
64
+ outputParser: JsonOutputFunctionsParser<object>;
65
65
  attrName: string;
66
66
  constructor(fields: {
67
67
  attrName: string;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "langchain",
3
- "version": "0.1.0",
3
+ "version": "0.1.1",
4
4
  "description": "Typescript bindings for langchain",
5
5
  "type": "module",
6
6
  "engines": {