langchain 0.0.184 → 0.0.186

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/agents/format_scratchpad/openai_tools.cjs +1 -0
  2. package/agents/format_scratchpad/openai_tools.d.ts +1 -0
  3. package/agents/format_scratchpad/openai_tools.js +1 -0
  4. package/dist/agents/format_scratchpad/openai_tools.cjs +19 -0
  5. package/dist/agents/format_scratchpad/openai_tools.d.ts +3 -0
  6. package/dist/agents/format_scratchpad/openai_tools.js +15 -0
  7. package/dist/agents/openai/index.cjs +2 -1
  8. package/dist/agents/openai/index.js +2 -1
  9. package/dist/agents/openai/output_parser.cjs +66 -1
  10. package/dist/agents/openai/output_parser.d.ts +26 -2
  11. package/dist/agents/openai/output_parser.js +65 -1
  12. package/dist/agents/structured_chat/index.cjs +1 -2
  13. package/dist/agents/structured_chat/index.d.ts +2 -0
  14. package/dist/agents/structured_chat/index.js +1 -2
  15. package/dist/agents/types.cjs +8 -1
  16. package/dist/agents/types.d.ts +6 -0
  17. package/dist/agents/types.js +6 -0
  18. package/dist/chains/combine_docs_chain.cjs +1 -1
  19. package/dist/chains/combine_docs_chain.js +1 -1
  20. package/dist/chains/llm_chain.cjs +52 -7
  21. package/dist/chains/llm_chain.d.ts +20 -12
  22. package/dist/chains/llm_chain.js +53 -8
  23. package/dist/chat_models/ollama.cjs +8 -0
  24. package/dist/chat_models/ollama.d.ts +3 -0
  25. package/dist/chat_models/ollama.js +8 -0
  26. package/dist/chat_models/openai.cjs +3 -0
  27. package/dist/chat_models/openai.js +3 -0
  28. package/dist/document_loaders/fs/pdf.cjs +17 -3
  29. package/dist/document_loaders/fs/pdf.js +17 -3
  30. package/dist/document_loaders/fs/unstructured.d.ts +1 -5
  31. package/dist/document_loaders/web/apify_dataset.cjs +12 -6
  32. package/dist/document_loaders/web/apify_dataset.d.ts +9 -6
  33. package/dist/document_loaders/web/apify_dataset.js +12 -6
  34. package/dist/document_loaders/web/pdf.cjs +17 -3
  35. package/dist/document_loaders/web/pdf.js +17 -3
  36. package/dist/document_loaders/web/puppeteer.cjs +37 -0
  37. package/dist/document_loaders/web/puppeteer.d.ts +17 -0
  38. package/dist/document_loaders/web/puppeteer.js +37 -0
  39. package/dist/embeddings/ollama.d.ts +1 -1
  40. package/dist/experimental/plan_and_execute/agent_executor.cjs +28 -2
  41. package/dist/experimental/plan_and_execute/agent_executor.d.ts +10 -3
  42. package/dist/experimental/plan_and_execute/agent_executor.js +26 -1
  43. package/dist/experimental/plan_and_execute/prompt.d.ts +2 -1
  44. package/dist/llms/ollama.cjs +8 -0
  45. package/dist/llms/ollama.d.ts +3 -0
  46. package/dist/llms/ollama.js +8 -0
  47. package/dist/llms/openai.cjs +1 -1
  48. package/dist/llms/openai.js +1 -1
  49. package/dist/load/import_map.cjs +3 -2
  50. package/dist/load/import_map.d.ts +1 -0
  51. package/dist/load/import_map.js +1 -0
  52. package/dist/output_parsers/index.cjs +3 -1
  53. package/dist/output_parsers/index.d.ts +1 -0
  54. package/dist/output_parsers/index.js +1 -0
  55. package/dist/output_parsers/openai_functions.cjs +3 -3
  56. package/dist/output_parsers/openai_functions.js +3 -3
  57. package/dist/output_parsers/openai_tools.cjs +53 -0
  58. package/dist/output_parsers/openai_tools.d.ts +22 -0
  59. package/dist/output_parsers/openai_tools.js +49 -0
  60. package/dist/prompts/base.d.ts +2 -1
  61. package/dist/prompts/chat.cjs +23 -2
  62. package/dist/prompts/chat.d.ts +1 -0
  63. package/dist/prompts/chat.js +23 -2
  64. package/dist/schema/index.d.ts +3 -4
  65. package/dist/schema/runnable/base.d.ts +2 -2
  66. package/dist/tools/convert_to_openai.cjs +2 -1
  67. package/dist/tools/convert_to_openai.js +2 -1
  68. package/dist/tools/index.cjs +2 -1
  69. package/dist/tools/index.d.ts +1 -1
  70. package/dist/tools/index.js +1 -1
  71. package/dist/util/ollama.d.ts +3 -0
  72. package/dist/util/types.cjs +5 -0
  73. package/dist/util/types.d.ts +4 -0
  74. package/dist/util/types.js +4 -0
  75. package/dist/vectorstores/momento_vector_index.cjs +1 -1
  76. package/dist/vectorstores/momento_vector_index.js +1 -1
  77. package/package.json +15 -7
@@ -2,6 +2,7 @@ import { LLM, BaseLLMParams } from "./base.js";
2
2
  import { OllamaInput, OllamaCallOptions } from "../util/ollama.js";
3
3
  import { CallbackManagerForLLMRun } from "../callbacks/manager.js";
4
4
  import { GenerationChunk } from "../schema/index.js";
5
+ import type { StringWithAutocomplete } from "../util/types.js";
5
6
  /**
6
7
  * Class that represents the Ollama language model. It extends the base
7
8
  * LLM class and implements the OllamaInput interface.
@@ -41,10 +42,12 @@ export declare class Ollama extends LLM<OllamaCallOptions> implements OllamaInpu
41
42
  useMLock?: boolean;
42
43
  useMMap?: boolean;
43
44
  vocabOnly?: boolean;
45
+ format?: StringWithAutocomplete<"json">;
44
46
  constructor(fields: OllamaInput & BaseLLMParams);
45
47
  _llmType(): string;
46
48
  invocationParams(options?: this["ParsedCallOptions"]): {
47
49
  model: string;
50
+ format: StringWithAutocomplete<"json"> | undefined;
48
51
  options: {
49
52
  embedding_only: boolean | undefined;
50
53
  f16_kv: boolean | undefined;
@@ -209,6 +209,12 @@ export class Ollama extends LLM {
209
209
  writable: true,
210
210
  value: void 0
211
211
  });
212
+ Object.defineProperty(this, "format", {
213
+ enumerable: true,
214
+ configurable: true,
215
+ writable: true,
216
+ value: void 0
217
+ });
212
218
  this.model = fields.model ?? this.model;
213
219
  this.baseUrl = fields.baseUrl?.endsWith("/")
214
220
  ? fields.baseUrl.slice(0, -1)
@@ -243,6 +249,7 @@ export class Ollama extends LLM {
243
249
  this.useMLock = fields.useMLock;
244
250
  this.useMMap = fields.useMMap;
245
251
  this.vocabOnly = fields.vocabOnly;
252
+ this.format = fields.format;
246
253
  }
247
254
  _llmType() {
248
255
  return "ollama";
@@ -250,6 +257,7 @@ export class Ollama extends LLM {
250
257
  invocationParams(options) {
251
258
  return {
252
259
  model: this.model,
260
+ format: this.format,
253
261
  options: {
254
262
  embedding_only: this.embeddingOnly,
255
263
  f16_kv: this.f16KV,
@@ -121,7 +121,7 @@ class OpenAI extends base_js_1.BaseLLM {
121
121
  enumerable: true,
122
122
  configurable: true,
123
123
  writable: true,
124
- value: "text-davinci-003"
124
+ value: "gpt-3.5-turbo-instruct"
125
125
  });
126
126
  Object.defineProperty(this, "modelKwargs", {
127
127
  enumerable: true,
@@ -118,7 +118,7 @@ export class OpenAI extends BaseLLM {
118
118
  enumerable: true,
119
119
  configurable: true,
120
120
  writable: true,
121
- value: "text-davinci-003"
121
+ value: "gpt-3.5-turbo-instruct"
122
122
  });
123
123
  Object.defineProperty(this, "modelKwargs", {
124
124
  enumerable: true,
@@ -24,12 +24,13 @@ var __importStar = (this && this.__importStar) || function (mod) {
24
24
  return result;
25
25
  };
26
26
  Object.defineProperty(exports, "__esModule", { value: true });
27
- exports.chat_models__cloudflare_workersai = exports.chat_models__anthropic = exports.chat_models__openai = exports.chat_models__base = exports.document_transformers__openai_functions = exports.document_loaders__web__sort_xyz_blockchain = exports.document_loaders__web__serpapi = exports.document_loaders__web__searchapi = exports.document_loaders__base = exports.document = exports.memory = exports.text_splitter = exports.vectorstores__xata = exports.vectorstores__vectara = exports.vectorstores__prisma = exports.vectorstores__memory = exports.vectorstores__base = exports.prompts = exports.llms__fake = exports.llms__yandex = exports.llms__fireworks = exports.llms__ollama = exports.llms__cloudflare_workersai = exports.llms__aleph_alpha = exports.llms__ai21 = exports.llms__openai = exports.llms__base = exports.embeddings__voyage = exports.embeddings__minimax = exports.embeddings__openai = exports.embeddings__ollama = exports.embeddings__fake = exports.embeddings__cache_backed = exports.embeddings__base = exports.chains__openai_functions = exports.chains__combine_documents__reduce = exports.chains = exports.tools__render = exports.tools = exports.base_language = exports.agents__openai__output_parser = exports.agents__xml__output_parser = exports.agents__react__output_parser = exports.agents__format_scratchpad__log_to_message = exports.agents__format_scratchpad__xml = exports.agents__format_scratchpad__log = exports.agents__format_scratchpad = exports.agents__toolkits = exports.agents = exports.load__serializable = void 0;
28
- exports.runnables__remote = exports.evaluation = exports.experimental__chains__violation_of_expectations = exports.experimental__chat_models__bittensor = exports.experimental__plan_and_execute = exports.experimental__generative_agents = exports.experimental__babyagi = exports.experimental__openai_assistant = exports.experimental__autogpt = exports.util__time = exports.util__math = exports.util__document = exports.storage__in_memory = exports.storage__encoder_backed = exports.stores__message__in_memory = exports.stores__file__in_memory = exports.stores__doc__in_memory = exports.cache = exports.retrievers__vespa = exports.retrievers__score_threshold = exports.retrievers__hyde = exports.retrievers__document_compressors__embeddings_filter = exports.retrievers__document_compressors__chain_extract = exports.retrievers__time_weighted = exports.retrievers__tavily_search_api = exports.retrievers__parent_document = exports.retrievers__multi_vector = exports.retrievers__multi_query = exports.retrievers__document_compressors = exports.retrievers__contextual_compression = exports.retrievers__databerry = exports.retrievers__chaindesk = exports.retrievers__remote = exports.output_parsers = exports.callbacks = exports.schema__storage = exports.schema__runnable = exports.schema__retriever = exports.schema__query_constructor = exports.schema__prompt_template = exports.schema__output_parser = exports.schema__document = exports.schema = exports.chat_models__fake = exports.chat_models__yandex = exports.chat_models__minimax = exports.chat_models__ollama = exports.chat_models__baiduwenxin = exports.chat_models__fireworks = void 0;
27
+ exports.chat_models__anthropic = exports.chat_models__openai = exports.chat_models__base = exports.document_transformers__openai_functions = exports.document_loaders__web__sort_xyz_blockchain = exports.document_loaders__web__serpapi = exports.document_loaders__web__searchapi = exports.document_loaders__base = exports.document = exports.memory = exports.text_splitter = exports.vectorstores__xata = exports.vectorstores__vectara = exports.vectorstores__prisma = exports.vectorstores__memory = exports.vectorstores__base = exports.prompts = exports.llms__fake = exports.llms__yandex = exports.llms__fireworks = exports.llms__ollama = exports.llms__cloudflare_workersai = exports.llms__aleph_alpha = exports.llms__ai21 = exports.llms__openai = exports.llms__base = exports.embeddings__voyage = exports.embeddings__minimax = exports.embeddings__openai = exports.embeddings__ollama = exports.embeddings__fake = exports.embeddings__cache_backed = exports.embeddings__base = exports.chains__openai_functions = exports.chains__combine_documents__reduce = exports.chains = exports.tools__render = exports.tools = exports.base_language = exports.agents__openai__output_parser = exports.agents__xml__output_parser = exports.agents__react__output_parser = exports.agents__format_scratchpad__log_to_message = exports.agents__format_scratchpad__xml = exports.agents__format_scratchpad__log = exports.agents__format_scratchpad__openai_tools = exports.agents__format_scratchpad = exports.agents__toolkits = exports.agents = exports.load__serializable = void 0;
28
+ exports.runnables__remote = exports.evaluation = exports.experimental__chains__violation_of_expectations = exports.experimental__chat_models__bittensor = exports.experimental__plan_and_execute = exports.experimental__generative_agents = exports.experimental__babyagi = exports.experimental__openai_assistant = exports.experimental__autogpt = exports.util__time = exports.util__math = exports.util__document = exports.storage__in_memory = exports.storage__encoder_backed = exports.stores__message__in_memory = exports.stores__file__in_memory = exports.stores__doc__in_memory = exports.cache = exports.retrievers__vespa = exports.retrievers__score_threshold = exports.retrievers__hyde = exports.retrievers__document_compressors__embeddings_filter = exports.retrievers__document_compressors__chain_extract = exports.retrievers__time_weighted = exports.retrievers__tavily_search_api = exports.retrievers__parent_document = exports.retrievers__multi_vector = exports.retrievers__multi_query = exports.retrievers__document_compressors = exports.retrievers__contextual_compression = exports.retrievers__databerry = exports.retrievers__chaindesk = exports.retrievers__remote = exports.output_parsers = exports.callbacks = exports.schema__storage = exports.schema__runnable = exports.schema__retriever = exports.schema__query_constructor = exports.schema__prompt_template = exports.schema__output_parser = exports.schema__document = exports.schema = exports.chat_models__fake = exports.chat_models__yandex = exports.chat_models__minimax = exports.chat_models__ollama = exports.chat_models__baiduwenxin = exports.chat_models__fireworks = exports.chat_models__cloudflare_workersai = void 0;
29
29
  exports.load__serializable = __importStar(require("../load/serializable.cjs"));
30
30
  exports.agents = __importStar(require("../agents/index.cjs"));
31
31
  exports.agents__toolkits = __importStar(require("../agents/toolkits/index.cjs"));
32
32
  exports.agents__format_scratchpad = __importStar(require("../agents/format_scratchpad/openai_functions.cjs"));
33
+ exports.agents__format_scratchpad__openai_tools = __importStar(require("../agents/format_scratchpad/openai_tools.cjs"));
33
34
  exports.agents__format_scratchpad__log = __importStar(require("../agents/format_scratchpad/log.cjs"));
34
35
  exports.agents__format_scratchpad__xml = __importStar(require("../agents/format_scratchpad/xml.cjs"));
35
36
  exports.agents__format_scratchpad__log_to_message = __importStar(require("../agents/format_scratchpad/log_to_message.cjs"));
@@ -2,6 +2,7 @@ export * as load__serializable from "../load/serializable.js";
2
2
  export * as agents from "../agents/index.js";
3
3
  export * as agents__toolkits from "../agents/toolkits/index.js";
4
4
  export * as agents__format_scratchpad from "../agents/format_scratchpad/openai_functions.js";
5
+ export * as agents__format_scratchpad__openai_tools from "../agents/format_scratchpad/openai_tools.js";
5
6
  export * as agents__format_scratchpad__log from "../agents/format_scratchpad/log.js";
6
7
  export * as agents__format_scratchpad__xml from "../agents/format_scratchpad/xml.js";
7
8
  export * as agents__format_scratchpad__log_to_message from "../agents/format_scratchpad/log_to_message.js";
@@ -3,6 +3,7 @@ export * as load__serializable from "../load/serializable.js";
3
3
  export * as agents from "../agents/index.js";
4
4
  export * as agents__toolkits from "../agents/toolkits/index.js";
5
5
  export * as agents__format_scratchpad from "../agents/format_scratchpad/openai_functions.js";
6
+ export * as agents__format_scratchpad__openai_tools from "../agents/format_scratchpad/openai_tools.js";
6
7
  export * as agents__format_scratchpad__log from "../agents/format_scratchpad/log.js";
7
8
  export * as agents__format_scratchpad__xml from "../agents/format_scratchpad/xml.js";
8
9
  export * as agents__format_scratchpad__log_to_message from "../agents/format_scratchpad/log_to_message.js";
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.JsonKeyOutputFunctionsParser = exports.JsonOutputFunctionsParser = exports.OutputFunctionsParser = exports.CustomListOutputParser = exports.RouterOutputParser = exports.CombiningOutputParser = exports.OutputFixingParser = exports.JsonMarkdownStructuredOutputParser = exports.AsymmetricStructuredOutputParser = exports.StructuredOutputParser = exports.RegexParser = exports.CommaSeparatedListOutputParser = exports.ListOutputParser = void 0;
3
+ exports.JsonOutputToolsParser = exports.JsonKeyOutputFunctionsParser = exports.JsonOutputFunctionsParser = exports.OutputFunctionsParser = exports.CustomListOutputParser = exports.RouterOutputParser = exports.CombiningOutputParser = exports.OutputFixingParser = exports.JsonMarkdownStructuredOutputParser = exports.AsymmetricStructuredOutputParser = exports.StructuredOutputParser = exports.RegexParser = exports.CommaSeparatedListOutputParser = exports.ListOutputParser = void 0;
4
4
  var list_js_1 = require("./list.cjs");
5
5
  Object.defineProperty(exports, "ListOutputParser", { enumerable: true, get: function () { return list_js_1.ListOutputParser; } });
6
6
  Object.defineProperty(exports, "CommaSeparatedListOutputParser", { enumerable: true, get: function () { return list_js_1.CommaSeparatedListOutputParser; } });
@@ -22,3 +22,5 @@ var openai_functions_js_1 = require("../output_parsers/openai_functions.cjs");
22
22
  Object.defineProperty(exports, "OutputFunctionsParser", { enumerable: true, get: function () { return openai_functions_js_1.OutputFunctionsParser; } });
23
23
  Object.defineProperty(exports, "JsonOutputFunctionsParser", { enumerable: true, get: function () { return openai_functions_js_1.JsonOutputFunctionsParser; } });
24
24
  Object.defineProperty(exports, "JsonKeyOutputFunctionsParser", { enumerable: true, get: function () { return openai_functions_js_1.JsonKeyOutputFunctionsParser; } });
25
+ var openai_tools_js_1 = require("../output_parsers/openai_tools.cjs");
26
+ Object.defineProperty(exports, "JsonOutputToolsParser", { enumerable: true, get: function () { return openai_tools_js_1.JsonOutputToolsParser; } });
@@ -6,3 +6,4 @@ export { CombiningOutputParser } from "./combining.js";
6
6
  export { RouterOutputParser, type RouterOutputParserInput } from "./router.js";
7
7
  export { CustomListOutputParser } from "./list.js";
8
8
  export { type FunctionParameters, OutputFunctionsParser, JsonOutputFunctionsParser, JsonKeyOutputFunctionsParser, } from "../output_parsers/openai_functions.js";
9
+ export { type ParsedToolCall, JsonOutputToolsParser, } from "../output_parsers/openai_tools.js";
@@ -6,3 +6,4 @@ export { CombiningOutputParser } from "./combining.js";
6
6
  export { RouterOutputParser } from "./router.js";
7
7
  export { CustomListOutputParser } from "./list.js";
8
8
  export { OutputFunctionsParser, JsonOutputFunctionsParser, JsonKeyOutputFunctionsParser, } from "../output_parsers/openai_functions.js";
9
+ export { JsonOutputToolsParser, } from "../output_parsers/openai_tools.js";
@@ -18,7 +18,7 @@ class OutputFunctionsParser extends output_parser_js_1.BaseLLMOutputParser {
18
18
  enumerable: true,
19
19
  configurable: true,
20
20
  writable: true,
21
- value: ["langchain", "chains", "openai_functions"]
21
+ value: ["langchain", "output_parsers"]
22
22
  });
23
23
  Object.defineProperty(this, "lc_serializable", {
24
24
  enumerable: true,
@@ -75,7 +75,7 @@ class JsonOutputFunctionsParser extends output_parser_js_1.BaseCumulativeTransfo
75
75
  enumerable: true,
76
76
  configurable: true,
77
77
  writable: true,
78
- value: ["langchain", "chains", "openai_functions"]
78
+ value: ["langchain", "output_parsers"]
79
79
  });
80
80
  Object.defineProperty(this, "lc_serializable", {
81
81
  enumerable: true,
@@ -166,7 +166,7 @@ class JsonKeyOutputFunctionsParser extends output_parser_js_1.BaseLLMOutputParse
166
166
  enumerable: true,
167
167
  configurable: true,
168
168
  writable: true,
169
- value: ["langchain", "chains", "openai_functions"]
169
+ value: ["langchain", "output_parsers"]
170
170
  });
171
171
  Object.defineProperty(this, "lc_serializable", {
172
172
  enumerable: true,
@@ -15,7 +15,7 @@ export class OutputFunctionsParser extends BaseLLMOutputParser {
15
15
  enumerable: true,
16
16
  configurable: true,
17
17
  writable: true,
18
- value: ["langchain", "chains", "openai_functions"]
18
+ value: ["langchain", "output_parsers"]
19
19
  });
20
20
  Object.defineProperty(this, "lc_serializable", {
21
21
  enumerable: true,
@@ -71,7 +71,7 @@ export class JsonOutputFunctionsParser extends BaseCumulativeTransformOutputPars
71
71
  enumerable: true,
72
72
  configurable: true,
73
73
  writable: true,
74
- value: ["langchain", "chains", "openai_functions"]
74
+ value: ["langchain", "output_parsers"]
75
75
  });
76
76
  Object.defineProperty(this, "lc_serializable", {
77
77
  enumerable: true,
@@ -161,7 +161,7 @@ export class JsonKeyOutputFunctionsParser extends BaseLLMOutputParser {
161
161
  enumerable: true,
162
162
  configurable: true,
163
163
  writable: true,
164
- value: ["langchain", "chains", "openai_functions"]
164
+ value: ["langchain", "output_parsers"]
165
165
  });
166
166
  Object.defineProperty(this, "lc_serializable", {
167
167
  enumerable: true,
@@ -0,0 +1,53 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.JsonOutputToolsParser = void 0;
4
+ const output_parser_js_1 = require("../schema/output_parser.cjs");
5
+ /**
6
+ * Class for parsing the output of an LLM into a JSON object. Uses an
7
+ * instance of `OutputToolsParser` to parse the output.
8
+ */
9
+ class JsonOutputToolsParser extends output_parser_js_1.BaseLLMOutputParser {
10
+ constructor() {
11
+ super(...arguments);
12
+ Object.defineProperty(this, "lc_namespace", {
13
+ enumerable: true,
14
+ configurable: true,
15
+ writable: true,
16
+ value: ["langchain", "output_parsers"]
17
+ });
18
+ Object.defineProperty(this, "lc_serializable", {
19
+ enumerable: true,
20
+ configurable: true,
21
+ writable: true,
22
+ value: true
23
+ });
24
+ }
25
+ static lc_name() {
26
+ return "JsonOutputToolsParser";
27
+ }
28
+ /**
29
+ * Parses the output and returns a JSON object. If `argsOnly` is true,
30
+ * only the arguments of the function call are returned.
31
+ * @param generations The output of the LLM to parse.
32
+ * @returns A JSON object representation of the function call or its arguments.
33
+ */
34
+ async parseResult(generations) {
35
+ const toolCalls = generations[0].message.additional_kwargs.tool_calls;
36
+ if (!toolCalls) {
37
+ throw new Error(`No tools_call in message ${JSON.stringify(generations)}`);
38
+ }
39
+ const clonedToolCalls = JSON.parse(JSON.stringify(toolCalls));
40
+ const parsedToolCalls = [];
41
+ for (const toolCall of clonedToolCalls) {
42
+ if (toolCall.function !== undefined) {
43
+ const functionArgs = toolCall.function.arguments;
44
+ parsedToolCalls.push({
45
+ name: toolCall.function.name,
46
+ arguments: JSON.parse(functionArgs),
47
+ });
48
+ }
49
+ }
50
+ return parsedToolCalls;
51
+ }
52
+ }
53
+ exports.JsonOutputToolsParser = JsonOutputToolsParser;
@@ -0,0 +1,22 @@
1
+ import { BaseLLMOutputParser } from "../schema/output_parser.js";
2
+ import type { ChatGeneration } from "../schema/index.js";
3
+ export type ParsedToolCall = {
4
+ name: string;
5
+ arguments: Record<string, any>;
6
+ };
7
+ /**
8
+ * Class for parsing the output of an LLM into a JSON object. Uses an
9
+ * instance of `OutputToolsParser` to parse the output.
10
+ */
11
+ export declare class JsonOutputToolsParser extends BaseLLMOutputParser<ParsedToolCall[]> {
12
+ static lc_name(): string;
13
+ lc_namespace: string[];
14
+ lc_serializable: boolean;
15
+ /**
16
+ * Parses the output and returns a JSON object. If `argsOnly` is true,
17
+ * only the arguments of the function call are returned.
18
+ * @param generations The output of the LLM to parse.
19
+ * @returns A JSON object representation of the function call or its arguments.
20
+ */
21
+ parseResult(generations: ChatGeneration[]): Promise<ParsedToolCall[]>;
22
+ }
@@ -0,0 +1,49 @@
1
+ import { BaseLLMOutputParser } from "../schema/output_parser.js";
2
+ /**
3
+ * Class for parsing the output of an LLM into a JSON object. Uses an
4
+ * instance of `OutputToolsParser` to parse the output.
5
+ */
6
+ export class JsonOutputToolsParser extends BaseLLMOutputParser {
7
+ constructor() {
8
+ super(...arguments);
9
+ Object.defineProperty(this, "lc_namespace", {
10
+ enumerable: true,
11
+ configurable: true,
12
+ writable: true,
13
+ value: ["langchain", "output_parsers"]
14
+ });
15
+ Object.defineProperty(this, "lc_serializable", {
16
+ enumerable: true,
17
+ configurable: true,
18
+ writable: true,
19
+ value: true
20
+ });
21
+ }
22
+ static lc_name() {
23
+ return "JsonOutputToolsParser";
24
+ }
25
+ /**
26
+ * Parses the output and returns a JSON object. If `argsOnly` is true,
27
+ * only the arguments of the function call are returned.
28
+ * @param generations The output of the LLM to parse.
29
+ * @returns A JSON object representation of the function call or its arguments.
30
+ */
31
+ async parseResult(generations) {
32
+ const toolCalls = generations[0].message.additional_kwargs.tool_calls;
33
+ if (!toolCalls) {
34
+ throw new Error(`No tools_call in message ${JSON.stringify(generations)}`);
35
+ }
36
+ const clonedToolCalls = JSON.parse(JSON.stringify(toolCalls));
37
+ const parsedToolCalls = [];
38
+ for (const toolCall of clonedToolCalls) {
39
+ if (toolCall.function !== undefined) {
40
+ const functionArgs = toolCall.function.arguments;
41
+ parsedToolCalls.push({
42
+ name: toolCall.function.name,
43
+ arguments: JSON.parse(functionArgs),
44
+ });
45
+ }
46
+ }
47
+ return parsedToolCalls;
48
+ }
49
+ }
@@ -5,7 +5,8 @@ import { SerializedBasePromptTemplate } from "./serde.js";
5
5
  import { SerializedFields } from "../load/map_keys.js";
6
6
  import { Runnable } from "../schema/runnable/index.js";
7
7
  import { BaseCallbackConfig } from "../callbacks/manager.js";
8
- export type TypedPromptInputValues<RunInput> = InputValues<Extract<keyof RunInput, string> | (string & Record<never, never>)>;
8
+ import type { StringWithAutocomplete } from "../util/types.js";
9
+ export type TypedPromptInputValues<RunInput> = InputValues<StringWithAutocomplete<Extract<keyof RunInput, string>>>;
9
10
  /**
10
11
  * Represents a prompt value as a string. It extends the BasePromptValue
11
12
  * class and overrides the toString and toChatMessages methods.
@@ -53,7 +53,7 @@ class ChatPromptValue extends index_js_1.BasePromptValue {
53
53
  // eslint-disable-next-line no-param-reassign
54
54
  fields = { messages: fields };
55
55
  }
56
- super(...arguments);
56
+ super(fields);
57
57
  Object.defineProperty(this, "lc_namespace", {
58
58
  enumerable: true,
59
59
  configurable: true,
@@ -337,13 +337,34 @@ class ChatPromptTemplate extends BaseChatPromptTemplate {
337
337
  _getPromptType() {
338
338
  return "chat";
339
339
  }
340
+ async _parseImagePrompts(message, inputValues) {
341
+ if (typeof message.content === "string") {
342
+ return message;
343
+ }
344
+ const formattedMessageContent = await Promise.all(message.content.map(async (item) => {
345
+ if (item.type !== "image_url" ||
346
+ typeof item.image_url === "string" ||
347
+ !item.image_url?.url) {
348
+ return item;
349
+ }
350
+ const imageUrl = item.image_url.url;
351
+ const promptTemplatePlaceholder = prompt_js_1.PromptTemplate.fromTemplate(imageUrl);
352
+ const formattedUrl = await promptTemplatePlaceholder.format(inputValues);
353
+ // eslint-disable-next-line no-param-reassign
354
+ item.image_url.url = formattedUrl;
355
+ return item;
356
+ }));
357
+ // eslint-disable-next-line no-param-reassign
358
+ message.content = formattedMessageContent;
359
+ return message;
360
+ }
340
361
  async formatMessages(values) {
341
362
  const allValues = await this.mergePartialAndUserVariables(values);
342
363
  let resultMessages = [];
343
364
  for (const promptMessage of this.promptMessages) {
344
365
  // eslint-disable-next-line no-instanceof/no-instanceof
345
366
  if (promptMessage instanceof index_js_1.BaseMessage) {
346
- resultMessages.push(promptMessage);
367
+ resultMessages.push(await this._parseImagePrompts(promptMessage, allValues));
347
368
  }
348
369
  else {
349
370
  const inputValues = promptMessage.inputVariables.reduce((acc, inputVariable) => {
@@ -169,6 +169,7 @@ export declare class ChatPromptTemplate<RunInput extends InputValues = any, Part
169
169
  validateTemplate: boolean;
170
170
  constructor(input: ChatPromptTemplateInput<RunInput, PartialVariableName>);
171
171
  _getPromptType(): "chat";
172
+ private _parseImagePrompts;
172
173
  formatMessages(values: TypedPromptInputValues<RunInput>): Promise<BaseMessage[]>;
173
174
  partial<NewPartialVariableName extends string>(values: PartialValues<NewPartialVariableName>): Promise<ChatPromptTemplate<InputValues<Exclude<Extract<keyof RunInput, string>, NewPartialVariableName>>, any>>;
174
175
  /**
@@ -49,7 +49,7 @@ export class ChatPromptValue extends BasePromptValue {
49
49
  // eslint-disable-next-line no-param-reassign
50
50
  fields = { messages: fields };
51
51
  }
52
- super(...arguments);
52
+ super(fields);
53
53
  Object.defineProperty(this, "lc_namespace", {
54
54
  enumerable: true,
55
55
  configurable: true,
@@ -325,13 +325,34 @@ export class ChatPromptTemplate extends BaseChatPromptTemplate {
325
325
  _getPromptType() {
326
326
  return "chat";
327
327
  }
328
+ async _parseImagePrompts(message, inputValues) {
329
+ if (typeof message.content === "string") {
330
+ return message;
331
+ }
332
+ const formattedMessageContent = await Promise.all(message.content.map(async (item) => {
333
+ if (item.type !== "image_url" ||
334
+ typeof item.image_url === "string" ||
335
+ !item.image_url?.url) {
336
+ return item;
337
+ }
338
+ const imageUrl = item.image_url.url;
339
+ const promptTemplatePlaceholder = PromptTemplate.fromTemplate(imageUrl);
340
+ const formattedUrl = await promptTemplatePlaceholder.format(inputValues);
341
+ // eslint-disable-next-line no-param-reassign
342
+ item.image_url.url = formattedUrl;
343
+ return item;
344
+ }));
345
+ // eslint-disable-next-line no-param-reassign
346
+ message.content = formattedMessageContent;
347
+ return message;
348
+ }
328
349
  async formatMessages(values) {
329
350
  const allValues = await this.mergePartialAndUserVariables(values);
330
351
  let resultMessages = [];
331
352
  for (const promptMessage of this.promptMessages) {
332
353
  // eslint-disable-next-line no-instanceof/no-instanceof
333
354
  if (promptMessage instanceof BaseMessage) {
334
- resultMessages.push(promptMessage);
355
+ resultMessages.push(await this._parseImagePrompts(promptMessage, allValues));
335
356
  }
336
357
  else {
337
358
  const inputValues = promptMessage.inputVariables.reduce((acc, inputVariable) => {
@@ -1,6 +1,7 @@
1
1
  import type { OpenAI as OpenAIClient } from "openai";
2
2
  import { Document } from "../document.js";
3
3
  import { Serializable } from "../load/serializable.js";
4
+ import type { StringWithAutocomplete } from "../util/types.js";
4
5
  export declare const RUN_KEY = "__run";
5
6
  export type Example = Record<string, string>;
6
7
  export type InputValues<K extends string = string> = Record<K, any>;
@@ -70,6 +71,7 @@ export type MessageContent = string | {
70
71
  text?: string;
71
72
  image_url?: string | {
72
73
  url: string;
74
+ detail?: "low" | "high";
73
75
  };
74
76
  }[];
75
77
  export interface BaseMessageFields {
@@ -251,10 +253,7 @@ export declare class ChatMessage extends BaseMessage implements ChatMessageField
251
253
  _getType(): MessageType;
252
254
  static isInstance(message: BaseMessage): message is ChatMessage;
253
255
  }
254
- export type BaseMessageLike = BaseMessage | [
255
- MessageType | "user" | "assistant" | (string & Record<never, never>),
256
- string
257
- ] | string;
256
+ export type BaseMessageLike = BaseMessage | [StringWithAutocomplete<MessageType | "user" | "assistant">, string] | string;
258
257
  export declare function isBaseMessage(messageLike?: unknown): messageLike is BaseMessage;
259
258
  export declare function isBaseMessageChunk(messageLike?: unknown): messageLike is BaseMessageChunk;
260
259
  export declare function coerceMessageLikeToMessage(messageLike: BaseMessageLike): BaseMessage;
@@ -317,8 +317,8 @@ export declare class RunnableWithFallbacks<RunInput, RunOutput> extends Runnable
317
317
  static lc_name(): string;
318
318
  lc_namespace: string[];
319
319
  lc_serializable: boolean;
320
- protected runnable: Runnable<RunInput, RunOutput>;
321
- protected fallbacks: Runnable<RunInput, RunOutput>[];
320
+ runnable: Runnable<RunInput, RunOutput>;
321
+ fallbacks: Runnable<RunInput, RunOutput>[];
322
322
  constructor(fields: {
323
323
  runnable: Runnable<RunInput, RunOutput>;
324
324
  fallbacks: Runnable<RunInput, RunOutput>[];
@@ -17,12 +17,13 @@ function formatToOpenAIFunction(tool) {
17
17
  }
18
18
  exports.formatToOpenAIFunction = formatToOpenAIFunction;
19
19
  function formatToOpenAITool(tool) {
20
+ const schema = (0, zod_to_json_schema_1.zodToJsonSchema)(tool.schema);
20
21
  return {
21
22
  type: "function",
22
23
  function: {
23
24
  name: tool.name,
24
25
  description: tool.description,
25
- parameters: (0, zod_to_json_schema_1.zodToJsonSchema)(tool.schema),
26
+ parameters: schema,
26
27
  },
27
28
  };
28
29
  }
@@ -13,12 +13,13 @@ export function formatToOpenAIFunction(tool) {
13
13
  };
14
14
  }
15
15
  export function formatToOpenAITool(tool) {
16
+ const schema = zodToJsonSchema(tool.schema);
16
17
  return {
17
18
  type: "function",
18
19
  function: {
19
20
  name: tool.name,
20
21
  description: tool.description,
21
- parameters: zodToJsonSchema(tool.schema),
22
+ parameters: schema,
22
23
  },
23
24
  };
24
25
  }
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.formatToOpenAIFunction = exports.SearchApi = exports.SearxngSearch = exports.DataForSeoAPISearch = exports.WolframAlphaTool = exports.WikipediaQueryRun = exports.BraveSearch = exports.WriteFileTool = exports.ReadFileTool = exports.AIPluginTool = exports.GoogleCustomSearch = exports.Serper = exports.ZapierNLAWrapper = exports.ZapierNLARunAction = exports.VectorStoreQATool = exports.RequestsPostTool = exports.RequestsGetTool = exports.JsonGetValueTool = exports.JsonListKeysTool = exports.JsonSpec = exports.ChainTool = exports.IFTTTWebhook = exports.DynamicStructuredTool = exports.DynamicTool = exports.StructuredTool = exports.Tool = exports.BingSerpAPI = exports.DadJokeAPI = exports.SerpAPI = void 0;
3
+ exports.formatToOpenAITool = exports.formatToOpenAIFunction = exports.SearchApi = exports.SearxngSearch = exports.DataForSeoAPISearch = exports.WolframAlphaTool = exports.WikipediaQueryRun = exports.BraveSearch = exports.WriteFileTool = exports.ReadFileTool = exports.AIPluginTool = exports.GoogleCustomSearch = exports.Serper = exports.ZapierNLAWrapper = exports.ZapierNLARunAction = exports.VectorStoreQATool = exports.RequestsPostTool = exports.RequestsGetTool = exports.JsonGetValueTool = exports.JsonListKeysTool = exports.JsonSpec = exports.ChainTool = exports.IFTTTWebhook = exports.DynamicStructuredTool = exports.DynamicTool = exports.StructuredTool = exports.Tool = exports.BingSerpAPI = exports.DadJokeAPI = exports.SerpAPI = void 0;
4
4
  var serpapi_js_1 = require("./serpapi.cjs");
5
5
  Object.defineProperty(exports, "SerpAPI", { enumerable: true, get: function () { return serpapi_js_1.SerpAPI; } });
6
6
  var dadjokeapi_js_1 = require("./dadjokeapi.cjs");
@@ -52,3 +52,4 @@ var searchapi_js_1 = require("./searchapi.cjs");
52
52
  Object.defineProperty(exports, "SearchApi", { enumerable: true, get: function () { return searchapi_js_1.SearchApi; } });
53
53
  var convert_to_openai_js_1 = require("./convert_to_openai.cjs");
54
54
  Object.defineProperty(exports, "formatToOpenAIFunction", { enumerable: true, get: function () { return convert_to_openai_js_1.formatToOpenAIFunction; } });
55
+ Object.defineProperty(exports, "formatToOpenAITool", { enumerable: true, get: function () { return convert_to_openai_js_1.formatToOpenAITool; } });
@@ -19,4 +19,4 @@ export { WolframAlphaTool } from "./wolframalpha.js";
19
19
  export { DataForSeoAPISearch, type DataForSeoApiConfig, } from "./dataforseo_api_search.js";
20
20
  export { SearxngSearch } from "./searxng_search.js";
21
21
  export { SearchApi, type SearchApiParameters } from "./searchapi.js";
22
- export { formatToOpenAIFunction } from "./convert_to_openai.js";
22
+ export { formatToOpenAIFunction, formatToOpenAITool, } from "./convert_to_openai.js";
@@ -19,4 +19,4 @@ export { WolframAlphaTool } from "./wolframalpha.js";
19
19
  export { DataForSeoAPISearch, } from "./dataforseo_api_search.js";
20
20
  export { SearxngSearch } from "./searxng_search.js";
21
21
  export { SearchApi } from "./searchapi.js";
22
- export { formatToOpenAIFunction } from "./convert_to_openai.js";
22
+ export { formatToOpenAIFunction, formatToOpenAITool, } from "./convert_to_openai.js";
@@ -1,4 +1,5 @@
1
1
  import { BaseLanguageModelCallOptions } from "../base_language/index.js";
2
+ import type { StringWithAutocomplete } from "./types.js";
2
3
  export interface OllamaInput {
3
4
  embeddingOnly?: boolean;
4
5
  f16KV?: boolean;
@@ -32,10 +33,12 @@ export interface OllamaInput {
32
33
  useMLock?: boolean;
33
34
  useMMap?: boolean;
34
35
  vocabOnly?: boolean;
36
+ format?: StringWithAutocomplete<"json">;
35
37
  }
36
38
  export interface OllamaRequestParams {
37
39
  model: string;
38
40
  prompt: string;
41
+ format?: StringWithAutocomplete<"json">;
39
42
  options: {
40
43
  embedding_only?: boolean;
41
44
  f16_kv?: boolean;
@@ -0,0 +1,5 @@
1
+ "use strict";
2
+ /**
3
+ * Represents a string value with autocompleted, but not required, suggestions.
4
+ */
5
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -0,0 +1,4 @@
1
+ /**
2
+ * Represents a string value with autocompleted, but not required, suggestions.
3
+ */
4
+ export type StringWithAutocomplete<T> = T | (string & Record<never, never>);
@@ -0,0 +1,4 @@
1
+ /**
2
+ * Represents a string value with autocompleted, but not required, suggestions.
3
+ */
4
+ export {};
@@ -224,7 +224,7 @@ class MomentoVectorIndex extends base_js_1.VectorStore {
224
224
  pageContent: hit.metadata[this.textField]?.toString() ?? "",
225
225
  metadata: Object.fromEntries(Object.entries(hit.metadata).filter(([key]) => key !== this.textField)),
226
226
  }),
227
- hit.distance,
227
+ hit.score,
228
228
  ]);
229
229
  }
230
230
  else if (response instanceof sdk_core_1.VectorSearch.Error) {
@@ -198,7 +198,7 @@ export class MomentoVectorIndex extends VectorStore {
198
198
  pageContent: hit.metadata[this.textField]?.toString() ?? "",
199
199
  metadata: Object.fromEntries(Object.entries(hit.metadata).filter(([key]) => key !== this.textField)),
200
200
  }),
201
- hit.distance,
201
+ hit.score,
202
202
  ]);
203
203
  }
204
204
  else if (response instanceof VectorSearch.Error) {