langchain 0.0.186 → 0.0.188
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/callbacks/handlers/llmonitor.cjs +31 -17
- package/dist/callbacks/handlers/llmonitor.js +31 -17
- package/dist/chat_models/bedrock/web.cjs +5 -3
- package/dist/chat_models/bedrock/web.js +5 -3
- package/dist/embeddings/cohere.cjs +18 -9
- package/dist/embeddings/cohere.d.ts +13 -1
- package/dist/embeddings/cohere.js +18 -9
- package/dist/experimental/chat_models/ollama_functions.cjs +140 -0
- package/dist/experimental/chat_models/ollama_functions.d.ts +76 -0
- package/dist/experimental/chat_models/ollama_functions.js +136 -0
- package/dist/llms/bedrock/web.cjs +5 -3
- package/dist/llms/bedrock/web.js +5 -3
- package/dist/llms/cohere.cjs +9 -7
- package/dist/llms/cohere.d.ts +1 -1
- package/dist/llms/cohere.js +9 -7
- package/dist/load/import_map.cjs +3 -1
- package/dist/load/import_map.d.ts +1 -0
- package/dist/load/import_map.js +1 -0
- package/dist/memory/buffer_token_memory.cjs +92 -0
- package/dist/memory/buffer_token_memory.d.ts +41 -0
- package/dist/memory/buffer_token_memory.js +88 -0
- package/dist/memory/index.cjs +3 -1
- package/dist/memory/index.d.ts +1 -0
- package/dist/memory/index.js +1 -0
- package/dist/output_parsers/http_response.cjs +82 -0
- package/dist/output_parsers/http_response.d.ts +28 -0
- package/dist/output_parsers/http_response.js +78 -0
- package/dist/output_parsers/index.cjs +3 -1
- package/dist/output_parsers/index.d.ts +1 -0
- package/dist/output_parsers/index.js +1 -0
- package/dist/output_parsers/openai_functions.cjs +4 -6
- package/dist/output_parsers/openai_functions.d.ts +1 -1
- package/dist/output_parsers/openai_functions.js +4 -6
- package/dist/prompts/base.cjs +1 -1
- package/dist/prompts/base.js +1 -1
- package/dist/schema/index.cjs +2 -2
- package/dist/schema/index.d.ts +2 -2
- package/dist/schema/index.js +2 -2
- package/dist/schema/output_parser.d.ts +2 -2
- package/dist/util/bedrock.cjs +8 -0
- package/dist/util/bedrock.js +8 -0
- package/dist/util/ollama.cjs +10 -12
- package/dist/util/ollama.js +10 -12
- package/dist/util/openapi.cjs +5 -2
- package/dist/util/openapi.js +5 -2
- package/experimental/chat_models/ollama_functions.cjs +1 -0
- package/experimental/chat_models/ollama_functions.d.ts +1 -0
- package/experimental/chat_models/ollama_functions.js +1 -0
- package/package.json +16 -6
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { BaseMessage } from "../schema/index.js";
|
|
2
|
+
import { BaseTransformOutputParser } from "../schema/output_parser.js";
|
|
3
|
+
export type HttpResponseOutputParserInput = {
|
|
4
|
+
outputParser?: BaseTransformOutputParser;
|
|
5
|
+
contentType?: "text/plain" | "text/event-stream";
|
|
6
|
+
};
|
|
7
|
+
/**
|
|
8
|
+
* OutputParser that formats chunks emitted from an LLM for different HTTP content types.
|
|
9
|
+
*/
|
|
10
|
+
export declare class HttpResponseOutputParser extends BaseTransformOutputParser<Uint8Array> {
|
|
11
|
+
static lc_name(): string;
|
|
12
|
+
lc_namespace: string[];
|
|
13
|
+
lc_serializable: boolean;
|
|
14
|
+
outputParser: BaseTransformOutputParser;
|
|
15
|
+
contentType: "text/plain" | "text/event-stream";
|
|
16
|
+
constructor(fields?: HttpResponseOutputParserInput);
|
|
17
|
+
_transform(inputGenerator: AsyncGenerator<string | BaseMessage>): AsyncGenerator<Uint8Array>;
|
|
18
|
+
/**
|
|
19
|
+
* Parses a string output from an LLM call. This method is meant to be
|
|
20
|
+
* implemented by subclasses to define how a string output from an LLM
|
|
21
|
+
* should be parsed.
|
|
22
|
+
* @param text The string output from an LLM call.
|
|
23
|
+
* @param callbacks Optional callbacks.
|
|
24
|
+
* @returns A promise of the parsed output.
|
|
25
|
+
*/
|
|
26
|
+
parse(text: string): Promise<Uint8Array>;
|
|
27
|
+
getFormatInstructions(): string;
|
|
28
|
+
}
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import { BaseTransformOutputParser, StringOutputParser, } from "../schema/output_parser.js";
|
|
2
|
+
/**
|
|
3
|
+
* OutputParser that formats chunks emitted from an LLM for different HTTP content types.
|
|
4
|
+
*/
|
|
5
|
+
export class HttpResponseOutputParser extends BaseTransformOutputParser {
|
|
6
|
+
static lc_name() {
|
|
7
|
+
return "HttpResponseOutputParser";
|
|
8
|
+
}
|
|
9
|
+
constructor(fields) {
|
|
10
|
+
super(fields);
|
|
11
|
+
Object.defineProperty(this, "lc_namespace", {
|
|
12
|
+
enumerable: true,
|
|
13
|
+
configurable: true,
|
|
14
|
+
writable: true,
|
|
15
|
+
value: ["langchain", "output_parser"]
|
|
16
|
+
});
|
|
17
|
+
Object.defineProperty(this, "lc_serializable", {
|
|
18
|
+
enumerable: true,
|
|
19
|
+
configurable: true,
|
|
20
|
+
writable: true,
|
|
21
|
+
value: true
|
|
22
|
+
});
|
|
23
|
+
Object.defineProperty(this, "outputParser", {
|
|
24
|
+
enumerable: true,
|
|
25
|
+
configurable: true,
|
|
26
|
+
writable: true,
|
|
27
|
+
value: new StringOutputParser()
|
|
28
|
+
});
|
|
29
|
+
Object.defineProperty(this, "contentType", {
|
|
30
|
+
enumerable: true,
|
|
31
|
+
configurable: true,
|
|
32
|
+
writable: true,
|
|
33
|
+
value: "text/plain"
|
|
34
|
+
});
|
|
35
|
+
this.outputParser = fields?.outputParser ?? this.outputParser;
|
|
36
|
+
this.contentType = fields?.contentType ?? this.contentType;
|
|
37
|
+
}
|
|
38
|
+
async *_transform(inputGenerator) {
|
|
39
|
+
for await (const chunk of this.outputParser._transform(inputGenerator)) {
|
|
40
|
+
if (typeof chunk === "string") {
|
|
41
|
+
yield this.parse(chunk);
|
|
42
|
+
}
|
|
43
|
+
else {
|
|
44
|
+
yield this.parse(JSON.stringify(chunk));
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
if (this.contentType === "text/event-stream") {
|
|
48
|
+
const encoder = new TextEncoder();
|
|
49
|
+
yield encoder.encode(`event: end\n\n`);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Parses a string output from an LLM call. This method is meant to be
|
|
54
|
+
* implemented by subclasses to define how a string output from an LLM
|
|
55
|
+
* should be parsed.
|
|
56
|
+
* @param text The string output from an LLM call.
|
|
57
|
+
* @param callbacks Optional callbacks.
|
|
58
|
+
* @returns A promise of the parsed output.
|
|
59
|
+
*/
|
|
60
|
+
async parse(text) {
|
|
61
|
+
const chunk = await this.outputParser.parse(text);
|
|
62
|
+
let parsedChunk;
|
|
63
|
+
if (typeof chunk === "string") {
|
|
64
|
+
parsedChunk = chunk;
|
|
65
|
+
}
|
|
66
|
+
else {
|
|
67
|
+
parsedChunk = JSON.stringify(chunk);
|
|
68
|
+
}
|
|
69
|
+
const encoder = new TextEncoder();
|
|
70
|
+
if (this.contentType === "text/event-stream") {
|
|
71
|
+
return encoder.encode(`event: data\ndata: ${parsedChunk}\n\n`);
|
|
72
|
+
}
|
|
73
|
+
return encoder.encode(parsedChunk);
|
|
74
|
+
}
|
|
75
|
+
getFormatInstructions() {
|
|
76
|
+
return "";
|
|
77
|
+
}
|
|
78
|
+
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.JsonOutputToolsParser = exports.JsonKeyOutputFunctionsParser = exports.JsonOutputFunctionsParser = exports.OutputFunctionsParser = exports.CustomListOutputParser = exports.RouterOutputParser = exports.CombiningOutputParser = exports.OutputFixingParser = exports.JsonMarkdownStructuredOutputParser = exports.AsymmetricStructuredOutputParser = exports.StructuredOutputParser = exports.RegexParser = exports.CommaSeparatedListOutputParser = exports.ListOutputParser = void 0;
|
|
3
|
+
exports.HttpResponseOutputParser = exports.JsonOutputToolsParser = exports.JsonKeyOutputFunctionsParser = exports.JsonOutputFunctionsParser = exports.OutputFunctionsParser = exports.CustomListOutputParser = exports.RouterOutputParser = exports.CombiningOutputParser = exports.OutputFixingParser = exports.JsonMarkdownStructuredOutputParser = exports.AsymmetricStructuredOutputParser = exports.StructuredOutputParser = exports.RegexParser = exports.CommaSeparatedListOutputParser = exports.ListOutputParser = void 0;
|
|
4
4
|
var list_js_1 = require("./list.cjs");
|
|
5
5
|
Object.defineProperty(exports, "ListOutputParser", { enumerable: true, get: function () { return list_js_1.ListOutputParser; } });
|
|
6
6
|
Object.defineProperty(exports, "CommaSeparatedListOutputParser", { enumerable: true, get: function () { return list_js_1.CommaSeparatedListOutputParser; } });
|
|
@@ -24,3 +24,5 @@ Object.defineProperty(exports, "JsonOutputFunctionsParser", { enumerable: true,
|
|
|
24
24
|
Object.defineProperty(exports, "JsonKeyOutputFunctionsParser", { enumerable: true, get: function () { return openai_functions_js_1.JsonKeyOutputFunctionsParser; } });
|
|
25
25
|
var openai_tools_js_1 = require("../output_parsers/openai_tools.cjs");
|
|
26
26
|
Object.defineProperty(exports, "JsonOutputToolsParser", { enumerable: true, get: function () { return openai_tools_js_1.JsonOutputToolsParser; } });
|
|
27
|
+
var http_response_js_1 = require("./http_response.cjs");
|
|
28
|
+
Object.defineProperty(exports, "HttpResponseOutputParser", { enumerable: true, get: function () { return http_response_js_1.HttpResponseOutputParser; } });
|
|
@@ -7,3 +7,4 @@ export { RouterOutputParser, type RouterOutputParserInput } from "./router.js";
|
|
|
7
7
|
export { CustomListOutputParser } from "./list.js";
|
|
8
8
|
export { type FunctionParameters, OutputFunctionsParser, JsonOutputFunctionsParser, JsonKeyOutputFunctionsParser, } from "../output_parsers/openai_functions.js";
|
|
9
9
|
export { type ParsedToolCall, JsonOutputToolsParser, } from "../output_parsers/openai_tools.js";
|
|
10
|
+
export { HttpResponseOutputParser, type HttpResponseOutputParserInput, } from "./http_response.js";
|
|
@@ -7,3 +7,4 @@ export { RouterOutputParser } from "./router.js";
|
|
|
7
7
|
export { CustomListOutputParser } from "./list.js";
|
|
8
8
|
export { OutputFunctionsParser, JsonOutputFunctionsParser, JsonKeyOutputFunctionsParser, } from "../output_parsers/openai_functions.js";
|
|
9
9
|
export { JsonOutputToolsParser, } from "../output_parsers/openai_tools.js";
|
|
10
|
+
export { HttpResponseOutputParser, } from "./http_response.js";
|
|
@@ -134,18 +134,16 @@ class JsonOutputFunctionsParser extends output_parser_js_1.BaseCumulativeTransfo
|
|
|
134
134
|
if (!result) {
|
|
135
135
|
throw new Error(`No result from "OutputFunctionsParser" ${JSON.stringify(generations)}`);
|
|
136
136
|
}
|
|
137
|
-
|
|
137
|
+
return this.parse(result);
|
|
138
|
+
}
|
|
139
|
+
async parse(text) {
|
|
140
|
+
const parsedResult = JSON.parse(text);
|
|
138
141
|
if (this.argsOnly) {
|
|
139
142
|
return parsedResult;
|
|
140
143
|
}
|
|
141
144
|
parsedResult.arguments = JSON.parse(parsedResult.arguments);
|
|
142
145
|
return parsedResult;
|
|
143
146
|
}
|
|
144
|
-
// This method would be called by the default implementation of `parse_result`
|
|
145
|
-
// but we're overriding that method so it's not needed.
|
|
146
|
-
async parse(_text) {
|
|
147
|
-
throw new Error("Not implemented.");
|
|
148
|
-
}
|
|
149
147
|
getFormatInstructions() {
|
|
150
148
|
return "";
|
|
151
149
|
}
|
|
@@ -49,7 +49,7 @@ export declare class JsonOutputFunctionsParser extends BaseCumulativeTransformOu
|
|
|
49
49
|
* @returns A JSON object representation of the function call or its arguments.
|
|
50
50
|
*/
|
|
51
51
|
parseResult(generations: Generation[] | ChatGeneration[]): Promise<object>;
|
|
52
|
-
parse(
|
|
52
|
+
parse(text: string): Promise<object>;
|
|
53
53
|
getFormatInstructions(): string;
|
|
54
54
|
}
|
|
55
55
|
/**
|
|
@@ -130,18 +130,16 @@ export class JsonOutputFunctionsParser extends BaseCumulativeTransformOutputPars
|
|
|
130
130
|
if (!result) {
|
|
131
131
|
throw new Error(`No result from "OutputFunctionsParser" ${JSON.stringify(generations)}`);
|
|
132
132
|
}
|
|
133
|
-
|
|
133
|
+
return this.parse(result);
|
|
134
|
+
}
|
|
135
|
+
async parse(text) {
|
|
136
|
+
const parsedResult = JSON.parse(text);
|
|
134
137
|
if (this.argsOnly) {
|
|
135
138
|
return parsedResult;
|
|
136
139
|
}
|
|
137
140
|
parsedResult.arguments = JSON.parse(parsedResult.arguments);
|
|
138
141
|
return parsedResult;
|
|
139
142
|
}
|
|
140
|
-
// This method would be called by the default implementation of `parse_result`
|
|
141
|
-
// but we're overriding that method so it's not needed.
|
|
142
|
-
async parse(_text) {
|
|
143
|
-
throw new Error("Not implemented.");
|
|
144
|
-
}
|
|
145
143
|
getFormatInstructions() {
|
|
146
144
|
return "";
|
|
147
145
|
}
|
package/dist/prompts/base.cjs
CHANGED
|
@@ -12,7 +12,7 @@ const index_js_2 = require("../schema/runnable/index.cjs");
|
|
|
12
12
|
*/
|
|
13
13
|
class StringPromptValue extends index_js_1.BasePromptValue {
|
|
14
14
|
constructor(value) {
|
|
15
|
-
super(
|
|
15
|
+
super({ value });
|
|
16
16
|
Object.defineProperty(this, "lc_namespace", {
|
|
17
17
|
enumerable: true,
|
|
18
18
|
configurable: true,
|
package/dist/prompts/base.js
CHANGED
|
@@ -9,7 +9,7 @@ import { Runnable } from "../schema/runnable/index.js";
|
|
|
9
9
|
*/
|
|
10
10
|
export class StringPromptValue extends BasePromptValue {
|
|
11
11
|
constructor(value) {
|
|
12
|
-
super(
|
|
12
|
+
super({ value });
|
|
13
13
|
Object.defineProperty(this, "lc_namespace", {
|
|
14
14
|
enumerable: true,
|
|
15
15
|
configurable: true,
|
package/dist/schema/index.cjs
CHANGED
|
@@ -396,8 +396,8 @@ class ToolMessage extends BaseMessage {
|
|
|
396
396
|
}
|
|
397
397
|
exports.ToolMessage = ToolMessage;
|
|
398
398
|
/**
|
|
399
|
-
* Represents a chunk of a
|
|
400
|
-
* with other
|
|
399
|
+
* Represents a chunk of a tool message, which can be concatenated
|
|
400
|
+
* with other tool message chunks.
|
|
401
401
|
*/
|
|
402
402
|
class ToolMessageChunk extends BaseMessageChunk {
|
|
403
403
|
constructor(fields) {
|
package/dist/schema/index.d.ts
CHANGED
|
@@ -232,8 +232,8 @@ export declare class ToolMessage extends BaseMessage {
|
|
|
232
232
|
_getType(): MessageType;
|
|
233
233
|
}
|
|
234
234
|
/**
|
|
235
|
-
* Represents a chunk of a
|
|
236
|
-
* with other
|
|
235
|
+
* Represents a chunk of a tool message, which can be concatenated
|
|
236
|
+
* with other tool message chunks.
|
|
237
237
|
*/
|
|
238
238
|
export declare class ToolMessageChunk extends BaseMessageChunk {
|
|
239
239
|
tool_call_id: string;
|
package/dist/schema/index.js
CHANGED
|
@@ -381,8 +381,8 @@ export class ToolMessage extends BaseMessage {
|
|
|
381
381
|
}
|
|
382
382
|
}
|
|
383
383
|
/**
|
|
384
|
-
* Represents a chunk of a
|
|
385
|
-
* with other
|
|
384
|
+
* Represents a chunk of a tool message, which can be concatenated
|
|
385
|
+
* with other tool message chunks.
|
|
386
386
|
*/
|
|
387
387
|
export class ToolMessageChunk extends BaseMessageChunk {
|
|
388
388
|
constructor(fields) {
|
|
@@ -77,7 +77,7 @@ export declare abstract class BaseOutputParser<T = unknown> extends BaseLLMOutpu
|
|
|
77
77
|
* Class to parse the output of an LLM call that also allows streaming inputs.
|
|
78
78
|
*/
|
|
79
79
|
export declare abstract class BaseTransformOutputParser<T = unknown> extends BaseOutputParser<T> {
|
|
80
|
-
|
|
80
|
+
_transform(inputGenerator: AsyncGenerator<string | BaseMessage>): AsyncGenerator<T>;
|
|
81
81
|
/**
|
|
82
82
|
* Transforms an asynchronous generator of input into an asynchronous
|
|
83
83
|
* generator of parsed output.
|
|
@@ -100,7 +100,7 @@ export declare abstract class BaseCumulativeTransformOutputParser<T = unknown> e
|
|
|
100
100
|
constructor(fields?: BaseCumulativeTransformOutputParserInput);
|
|
101
101
|
protected abstract _diff(prev: any | undefined, next: any): any;
|
|
102
102
|
abstract parsePartialResult(generations: Generation[] | ChatGeneration[]): Promise<T | undefined>;
|
|
103
|
-
|
|
103
|
+
_transform(inputGenerator: AsyncGenerator<string | BaseMessage>): AsyncGenerator<T>;
|
|
104
104
|
}
|
|
105
105
|
/**
|
|
106
106
|
* OutputParser that parses LLMResult into the top likely string.
|
package/dist/util/bedrock.cjs
CHANGED
|
@@ -25,6 +25,11 @@ class BedrockLLMInputOutputAdapter {
|
|
|
25
25
|
inputBody.temperature = temperature;
|
|
26
26
|
inputBody.stopSequences = stopSequences;
|
|
27
27
|
}
|
|
28
|
+
else if (provider === "meta") {
|
|
29
|
+
inputBody.prompt = prompt;
|
|
30
|
+
inputBody.max_gen_len = maxTokens;
|
|
31
|
+
inputBody.temperature = temperature;
|
|
32
|
+
}
|
|
28
33
|
else if (provider === "amazon") {
|
|
29
34
|
inputBody.inputText = prompt;
|
|
30
35
|
inputBody.textGenerationConfig = {
|
|
@@ -60,6 +65,9 @@ class BedrockLLMInputOutputAdapter {
|
|
|
60
65
|
else if (provider === "cohere") {
|
|
61
66
|
return responseBody?.generations?.[0]?.text ?? responseBody?.text ?? "";
|
|
62
67
|
}
|
|
68
|
+
else if (provider === "meta") {
|
|
69
|
+
return responseBody.generation;
|
|
70
|
+
}
|
|
63
71
|
// I haven't been able to get a response with more than one result in it.
|
|
64
72
|
return responseBody.results?.[0]?.outputText;
|
|
65
73
|
}
|
package/dist/util/bedrock.js
CHANGED
|
@@ -22,6 +22,11 @@ export class BedrockLLMInputOutputAdapter {
|
|
|
22
22
|
inputBody.temperature = temperature;
|
|
23
23
|
inputBody.stopSequences = stopSequences;
|
|
24
24
|
}
|
|
25
|
+
else if (provider === "meta") {
|
|
26
|
+
inputBody.prompt = prompt;
|
|
27
|
+
inputBody.max_gen_len = maxTokens;
|
|
28
|
+
inputBody.temperature = temperature;
|
|
29
|
+
}
|
|
25
30
|
else if (provider === "amazon") {
|
|
26
31
|
inputBody.inputText = prompt;
|
|
27
32
|
inputBody.textGenerationConfig = {
|
|
@@ -57,6 +62,9 @@ export class BedrockLLMInputOutputAdapter {
|
|
|
57
62
|
else if (provider === "cohere") {
|
|
58
63
|
return responseBody?.generations?.[0]?.text ?? responseBody?.text ?? "";
|
|
59
64
|
}
|
|
65
|
+
else if (provider === "meta") {
|
|
66
|
+
return responseBody.generation;
|
|
67
|
+
}
|
|
60
68
|
// I haven't been able to get a response with more than one result in it.
|
|
61
69
|
return responseBody.results?.[0]?.outputText;
|
|
62
70
|
}
|
package/dist/util/ollama.cjs
CHANGED
|
@@ -29,20 +29,18 @@ async function* createOllamaStream(baseUrl, params, options) {
|
|
|
29
29
|
}
|
|
30
30
|
const stream = stream_js_1.IterableReadableStream.fromReadableStream(response.body);
|
|
31
31
|
const decoder = new TextDecoder();
|
|
32
|
+
let extra = "";
|
|
32
33
|
for await (const chunk of stream) {
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
}
|
|
34
|
+
const decoded = extra + decoder.decode(chunk);
|
|
35
|
+
const lines = decoded.split("\n");
|
|
36
|
+
extra = lines.pop() || "";
|
|
37
|
+
for (const line of lines) {
|
|
38
|
+
try {
|
|
39
|
+
yield JSON.parse(line);
|
|
40
|
+
}
|
|
41
|
+
catch (e) {
|
|
42
|
+
console.warn(`Received a non-JSON parseable chunk: ${line}`);
|
|
42
43
|
}
|
|
43
|
-
}
|
|
44
|
-
catch (e) {
|
|
45
|
-
console.warn(`Received a non-JSON parseable chunk: ${decoder.decode(chunk)}`);
|
|
46
44
|
}
|
|
47
45
|
}
|
|
48
46
|
}
|
package/dist/util/ollama.js
CHANGED
|
@@ -26,20 +26,18 @@ export async function* createOllamaStream(baseUrl, params, options) {
|
|
|
26
26
|
}
|
|
27
27
|
const stream = IterableReadableStream.fromReadableStream(response.body);
|
|
28
28
|
const decoder = new TextDecoder();
|
|
29
|
+
let extra = "";
|
|
29
30
|
for await (const chunk of stream) {
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
}
|
|
31
|
+
const decoded = extra + decoder.decode(chunk);
|
|
32
|
+
const lines = decoded.split("\n");
|
|
33
|
+
extra = lines.pop() || "";
|
|
34
|
+
for (const line of lines) {
|
|
35
|
+
try {
|
|
36
|
+
yield JSON.parse(line);
|
|
37
|
+
}
|
|
38
|
+
catch (e) {
|
|
39
|
+
console.warn(`Received a non-JSON parseable chunk: ${line}`);
|
|
39
40
|
}
|
|
40
|
-
}
|
|
41
|
-
catch (e) {
|
|
42
|
-
console.warn(`Received a non-JSON parseable chunk: ${decoder.decode(chunk)}`);
|
|
43
41
|
}
|
|
44
42
|
}
|
|
45
43
|
}
|
package/dist/util/openapi.cjs
CHANGED
|
@@ -177,10 +177,13 @@ class OpenAPISpec {
|
|
|
177
177
|
static getCleanedOperationId(operation, path, method) {
|
|
178
178
|
let { operationId } = operation;
|
|
179
179
|
if (operationId === undefined) {
|
|
180
|
-
const updatedPath = path.
|
|
180
|
+
const updatedPath = path.replaceAll(/[^a-zA-Z0-9]/, "_");
|
|
181
181
|
operationId = `${updatedPath.startsWith("/") ? updatedPath.slice(1) : updatedPath}_${method}`;
|
|
182
182
|
}
|
|
183
|
-
return operationId
|
|
183
|
+
return operationId
|
|
184
|
+
.replaceAll("-", "_")
|
|
185
|
+
.replaceAll(".", "_")
|
|
186
|
+
.replaceAll("/", "_");
|
|
184
187
|
}
|
|
185
188
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
186
189
|
static alertUnsupportedSpec(document) {
|
package/dist/util/openapi.js
CHANGED
|
@@ -151,10 +151,13 @@ export class OpenAPISpec {
|
|
|
151
151
|
static getCleanedOperationId(operation, path, method) {
|
|
152
152
|
let { operationId } = operation;
|
|
153
153
|
if (operationId === undefined) {
|
|
154
|
-
const updatedPath = path.
|
|
154
|
+
const updatedPath = path.replaceAll(/[^a-zA-Z0-9]/, "_");
|
|
155
155
|
operationId = `${updatedPath.startsWith("/") ? updatedPath.slice(1) : updatedPath}_${method}`;
|
|
156
156
|
}
|
|
157
|
-
return operationId
|
|
157
|
+
return operationId
|
|
158
|
+
.replaceAll("-", "_")
|
|
159
|
+
.replaceAll(".", "_")
|
|
160
|
+
.replaceAll("/", "_");
|
|
158
161
|
}
|
|
159
162
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
160
163
|
static alertUnsupportedSpec(document) {
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
module.exports = require('../../dist/experimental/chat_models/ollama_functions.cjs');
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from '../../dist/experimental/chat_models/ollama_functions.js'
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from '../../dist/experimental/chat_models/ollama_functions.js'
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "langchain",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.188",
|
|
4
4
|
"description": "Typescript bindings for langchain",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -778,6 +778,9 @@
|
|
|
778
778
|
"experimental/chat_models/bittensor.cjs",
|
|
779
779
|
"experimental/chat_models/bittensor.js",
|
|
780
780
|
"experimental/chat_models/bittensor.d.ts",
|
|
781
|
+
"experimental/chat_models/ollama_functions.cjs",
|
|
782
|
+
"experimental/chat_models/ollama_functions.js",
|
|
783
|
+
"experimental/chat_models/ollama_functions.d.ts",
|
|
781
784
|
"experimental/llms/bittensor.cjs",
|
|
782
785
|
"experimental/llms/bittensor.js",
|
|
783
786
|
"experimental/llms/bittensor.d.ts",
|
|
@@ -806,7 +809,8 @@
|
|
|
806
809
|
"build:esm": "NODE_OPTIONS=--max-old-space-size=4096 tsc --outDir dist/ && rimraf dist/tests dist/**/tests",
|
|
807
810
|
"build:cjs": "NODE_OPTIONS=--max-old-space-size=4096 tsc --outDir dist-cjs/ -p tsconfig.cjs.json && node scripts/move-cjs-to-dist.js && rimraf dist-cjs",
|
|
808
811
|
"build:watch": "node scripts/create-entrypoints.js && tsc --outDir dist/ --watch",
|
|
809
|
-
"build:scripts": "node scripts/create-entrypoints.js && node scripts/check-tree-shaking.js && node scripts/generate-docs-llm-compatibility-table",
|
|
812
|
+
"build:scripts": "node scripts/create-entrypoints.js && node scripts/check-tree-shaking.js && yarn conditional:api_refs && node scripts/generate-docs-llm-compatibility-table",
|
|
813
|
+
"conditional:api_refs": "bash scripts/build-api-refs.sh",
|
|
810
814
|
"lint": "NODE_OPTIONS=--max-old-space-size=4096 eslint src && dpdm --exit-code circular:1 --no-warning --no-tree src/*.ts src/**/*.ts",
|
|
811
815
|
"lint:fix": "yarn lint --fix",
|
|
812
816
|
"precommit": "lint-staged",
|
|
@@ -898,7 +902,7 @@
|
|
|
898
902
|
"closevector-common": "0.1.0-alpha.1",
|
|
899
903
|
"closevector-node": "0.1.0-alpha.10",
|
|
900
904
|
"closevector-web": "0.1.0-alpha.15",
|
|
901
|
-
"cohere-ai": "
|
|
905
|
+
"cohere-ai": "^7.2.0",
|
|
902
906
|
"convex": "^1.3.1",
|
|
903
907
|
"d3-dsv": "^2.0.0",
|
|
904
908
|
"dotenv": "^16.0.3",
|
|
@@ -923,7 +927,7 @@
|
|
|
923
927
|
"jest": "^29.5.0",
|
|
924
928
|
"jest-environment-node": "^29.6.4",
|
|
925
929
|
"jsdom": "^22.1.0",
|
|
926
|
-
"llmonitor": "^0.5.
|
|
930
|
+
"llmonitor": "^0.5.9",
|
|
927
931
|
"lodash": "^4.17.21",
|
|
928
932
|
"mammoth": "^1.5.1",
|
|
929
933
|
"ml-matrix": "^6.10.4",
|
|
@@ -950,6 +954,7 @@
|
|
|
950
954
|
"sqlite3": "^5.1.4",
|
|
951
955
|
"srt-parser-2": "^1.2.2",
|
|
952
956
|
"ts-jest": "^29.1.0",
|
|
957
|
+
"ts-morph": "^20.0.0",
|
|
953
958
|
"typeorm": "^0.3.12",
|
|
954
959
|
"typescript": "^5.0.0",
|
|
955
960
|
"typesense": "^1.5.3",
|
|
@@ -1015,7 +1020,7 @@
|
|
|
1015
1020
|
"closevector-common": "0.1.0-alpha.1",
|
|
1016
1021
|
"closevector-node": "0.1.0-alpha.10",
|
|
1017
1022
|
"closevector-web": "0.1.0-alpha.16",
|
|
1018
|
-
"cohere-ai": "
|
|
1023
|
+
"cohere-ai": "^7.2.0",
|
|
1019
1024
|
"convex": "^1.3.1",
|
|
1020
1025
|
"d3-dsv": "^2.0.0",
|
|
1021
1026
|
"epub2": "^3.0.1",
|
|
@@ -1029,7 +1034,7 @@
|
|
|
1029
1034
|
"ignore": "^5.2.0",
|
|
1030
1035
|
"ioredis": "^5.3.2",
|
|
1031
1036
|
"jsdom": "*",
|
|
1032
|
-
"llmonitor": "^0.5.
|
|
1037
|
+
"llmonitor": "^0.5.9",
|
|
1033
1038
|
"lodash": "^4.17.21",
|
|
1034
1039
|
"mammoth": "*",
|
|
1035
1040
|
"mongodb": "^5.2.0",
|
|
@@ -2674,6 +2679,11 @@
|
|
|
2674
2679
|
"import": "./experimental/chat_models/bittensor.js",
|
|
2675
2680
|
"require": "./experimental/chat_models/bittensor.cjs"
|
|
2676
2681
|
},
|
|
2682
|
+
"./experimental/chat_models/ollama_functions": {
|
|
2683
|
+
"types": "./experimental/chat_models/ollama_functions.d.ts",
|
|
2684
|
+
"import": "./experimental/chat_models/ollama_functions.js",
|
|
2685
|
+
"require": "./experimental/chat_models/ollama_functions.cjs"
|
|
2686
|
+
},
|
|
2677
2687
|
"./experimental/llms/bittensor": {
|
|
2678
2688
|
"types": "./experimental/llms/bittensor.d.ts",
|
|
2679
2689
|
"import": "./experimental/llms/bittensor.js",
|