langchain 0.0.197 → 0.0.198
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chat_models/llama_cpp.cjs +24 -0
- package/dist/chat_models/llama_cpp.d.ts +3 -1
- package/dist/chat_models/llama_cpp.js +24 -0
- package/dist/document_loaders/fs/pptx.cjs +39 -0
- package/dist/document_loaders/fs/pptx.d.ts +23 -0
- package/dist/document_loaders/fs/pptx.js +35 -0
- package/dist/experimental/openai_assistant/index.cjs +32 -0
- package/dist/experimental/openai_assistant/index.d.ts +26 -0
- package/dist/experimental/openai_assistant/index.js +32 -0
- package/dist/experimental/tools/pyinterpreter.cjs +248 -0
- package/dist/experimental/tools/pyinterpreter.d.ts +18 -0
- package/dist/experimental/tools/pyinterpreter.js +244 -0
- package/dist/graphs/neo4j_graph.cjs +49 -14
- package/dist/graphs/neo4j_graph.d.ts +30 -0
- package/dist/graphs/neo4j_graph.js +49 -14
- package/dist/llms/hf.cjs +13 -2
- package/dist/llms/hf.d.ts +5 -0
- package/dist/llms/hf.js +13 -2
- package/dist/llms/llama_cpp.cjs +17 -3
- package/dist/llms/llama_cpp.d.ts +4 -1
- package/dist/llms/llama_cpp.js +17 -3
- package/dist/load/import_constants.cjs +3 -0
- package/dist/load/import_constants.js +3 -0
- package/dist/output_parsers/json.cjs +4 -0
- package/dist/output_parsers/json.js +4 -0
- package/dist/vectorstores/clickhouse.cjs +286 -0
- package/dist/vectorstores/clickhouse.d.ts +126 -0
- package/dist/vectorstores/clickhouse.js +259 -0
- package/dist/vectorstores/pgvector.cjs +142 -18
- package/dist/vectorstores/pgvector.d.ts +21 -0
- package/dist/vectorstores/pgvector.js +142 -18
- package/dist/vectorstores/weaviate.cjs +45 -2
- package/dist/vectorstores/weaviate.d.ts +27 -1
- package/dist/vectorstores/weaviate.js +45 -2
- package/document_loaders/fs/pptx.cjs +1 -0
- package/document_loaders/fs/pptx.d.ts +1 -0
- package/document_loaders/fs/pptx.js +1 -0
- package/experimental/tools/pyinterpreter.cjs +1 -0
- package/experimental/tools/pyinterpreter.d.ts +1 -0
- package/experimental/tools/pyinterpreter.js +1 -0
- package/package.json +39 -4
- package/vectorstores/clickhouse.cjs +1 -0
- package/vectorstores/clickhouse.d.ts +1 -0
- package/vectorstores/clickhouse.js +1 -0
|
@@ -4,6 +4,7 @@ exports.ChatLlamaCpp = void 0;
|
|
|
4
4
|
const node_llama_cpp_1 = require("node-llama-cpp");
|
|
5
5
|
const base_js_1 = require("./base.cjs");
|
|
6
6
|
const llama_cpp_js_1 = require("../util/llama_cpp.cjs");
|
|
7
|
+
const index_js_1 = require("../schema/index.cjs");
|
|
7
8
|
/**
|
|
8
9
|
* To use this model you need to have the `node-llama-cpp` module installed.
|
|
9
10
|
* This can be installed using `npm install -S node-llama-cpp` and the minimum
|
|
@@ -139,6 +140,29 @@ class ChatLlamaCpp extends base_js_1.SimpleChatModel {
|
|
|
139
140
|
throw new Error("Error getting prompt completion.");
|
|
140
141
|
}
|
|
141
142
|
}
|
|
143
|
+
async *_streamResponseChunks(input, _options, runManager) {
|
|
144
|
+
if (input.length !== 1) {
|
|
145
|
+
throw new Error("Only one human message should be provided.");
|
|
146
|
+
}
|
|
147
|
+
else {
|
|
148
|
+
const promptOptions = {
|
|
149
|
+
temperature: this?.temperature,
|
|
150
|
+
topK: this?.topK,
|
|
151
|
+
topP: this?.topP,
|
|
152
|
+
};
|
|
153
|
+
const stream = await this.caller.call(async () => this._context.evaluate(this._context.encode(`${input[0].content}`), promptOptions));
|
|
154
|
+
for await (const chunk of stream) {
|
|
155
|
+
yield new index_js_1.ChatGenerationChunk({
|
|
156
|
+
text: this._context.decode([chunk]),
|
|
157
|
+
message: new index_js_1.AIMessageChunk({
|
|
158
|
+
content: this._context.decode([chunk]),
|
|
159
|
+
}),
|
|
160
|
+
generationInfo: {},
|
|
161
|
+
});
|
|
162
|
+
await runManager?.handleLLMNewToken(this._context.decode([chunk]) ?? "");
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
}
|
|
142
166
|
// This constructs a new session if we need to adding in any sys messages or previous chats
|
|
143
167
|
_buildSession(messages) {
|
|
144
168
|
let prompt = "";
|
|
@@ -2,7 +2,8 @@ import { LlamaModel, LlamaContext, LlamaChatSession, type ConversationInteractio
|
|
|
2
2
|
import { SimpleChatModel, BaseChatModelParams } from "./base.js";
|
|
3
3
|
import { LlamaBaseCppInputs } from "../util/llama_cpp.js";
|
|
4
4
|
import { BaseLanguageModelCallOptions } from "../base_language/index.js";
|
|
5
|
-
import
|
|
5
|
+
import { CallbackManagerForLLMRun } from "../callbacks/manager.js";
|
|
6
|
+
import { BaseMessage, ChatGenerationChunk } from "../schema/index.js";
|
|
6
7
|
/**
|
|
7
8
|
* Note that the modelPath is the only required parameter. For testing you
|
|
8
9
|
* can set this in the environment variable `LLAMA_PATH`.
|
|
@@ -63,6 +64,7 @@ export declare class ChatLlamaCpp extends SimpleChatModel<LlamaCppCallOptions> {
|
|
|
63
64
|
};
|
|
64
65
|
/** @ignore */
|
|
65
66
|
_call(messages: BaseMessage[], _options: this["ParsedCallOptions"]): Promise<string>;
|
|
67
|
+
_streamResponseChunks(input: BaseMessage[], _options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
66
68
|
protected _buildSession(messages: BaseMessage[]): string;
|
|
67
69
|
protected _convertMessagesToInteractions(messages: BaseMessage[]): ConversationInteraction[];
|
|
68
70
|
}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { LlamaChatSession, } from "node-llama-cpp";
|
|
2
2
|
import { SimpleChatModel } from "./base.js";
|
|
3
3
|
import { createLlamaModel, createLlamaContext, } from "../util/llama_cpp.js";
|
|
4
|
+
import { ChatGenerationChunk, AIMessageChunk, } from "../schema/index.js";
|
|
4
5
|
/**
|
|
5
6
|
* To use this model you need to have the `node-llama-cpp` module installed.
|
|
6
7
|
* This can be installed using `npm install -S node-llama-cpp` and the minimum
|
|
@@ -136,6 +137,29 @@ export class ChatLlamaCpp extends SimpleChatModel {
|
|
|
136
137
|
throw new Error("Error getting prompt completion.");
|
|
137
138
|
}
|
|
138
139
|
}
|
|
140
|
+
async *_streamResponseChunks(input, _options, runManager) {
|
|
141
|
+
if (input.length !== 1) {
|
|
142
|
+
throw new Error("Only one human message should be provided.");
|
|
143
|
+
}
|
|
144
|
+
else {
|
|
145
|
+
const promptOptions = {
|
|
146
|
+
temperature: this?.temperature,
|
|
147
|
+
topK: this?.topK,
|
|
148
|
+
topP: this?.topP,
|
|
149
|
+
};
|
|
150
|
+
const stream = await this.caller.call(async () => this._context.evaluate(this._context.encode(`${input[0].content}`), promptOptions));
|
|
151
|
+
for await (const chunk of stream) {
|
|
152
|
+
yield new ChatGenerationChunk({
|
|
153
|
+
text: this._context.decode([chunk]),
|
|
154
|
+
message: new AIMessageChunk({
|
|
155
|
+
content: this._context.decode([chunk]),
|
|
156
|
+
}),
|
|
157
|
+
generationInfo: {},
|
|
158
|
+
});
|
|
159
|
+
await runManager?.handleLLMNewToken(this._context.decode([chunk]) ?? "");
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
139
163
|
// This constructs a new session if we need to adding in any sys messages or previous chats
|
|
140
164
|
_buildSession(messages) {
|
|
141
165
|
let prompt = "";
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.PPTXLoader = void 0;
|
|
4
|
+
const officeparser_1 = require("officeparser");
|
|
5
|
+
const document_js_1 = require("../../document.cjs");
|
|
6
|
+
const buffer_js_1 = require("./buffer.cjs");
|
|
7
|
+
/**
|
|
8
|
+
* A class that extends the `BufferLoader` class. It represents a document
|
|
9
|
+
* loader that loads documents from PDF files.
|
|
10
|
+
*/
|
|
11
|
+
class PPTXLoader extends buffer_js_1.BufferLoader {
|
|
12
|
+
constructor(filePathOrBlob) {
|
|
13
|
+
super(filePathOrBlob);
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* A method that takes a `raw` buffer and `metadata` as parameters and
|
|
17
|
+
* returns a promise that resolves to an array of `Document` instances. It
|
|
18
|
+
* uses the `parseOfficeAsync` function from the `officeparser` module to extract
|
|
19
|
+
* the raw text content from the buffer. If the extracted powerpoint content is
|
|
20
|
+
* empty, it returns an empty array. Otherwise, it creates a new
|
|
21
|
+
* `Document` instance with the extracted powerpoint content and the provided
|
|
22
|
+
* metadata, and returns it as an array.
|
|
23
|
+
* @param raw The buffer to be parsed.
|
|
24
|
+
* @param metadata The metadata of the document.
|
|
25
|
+
* @returns A promise that resolves to an array of `Document` instances.
|
|
26
|
+
*/
|
|
27
|
+
async parse(raw, metadata) {
|
|
28
|
+
const pptx = await (0, officeparser_1.parseOfficeAsync)(raw, { outputErrorToConsole: true });
|
|
29
|
+
if (!pptx)
|
|
30
|
+
return [];
|
|
31
|
+
return [
|
|
32
|
+
new document_js_1.Document({
|
|
33
|
+
pageContent: pptx,
|
|
34
|
+
metadata,
|
|
35
|
+
}),
|
|
36
|
+
];
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
exports.PPTXLoader = PPTXLoader;
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
/// <reference types="node" resolution-mode="require"/>
|
|
2
|
+
import { Document } from "../../document.js";
|
|
3
|
+
import { BufferLoader } from "./buffer.js";
|
|
4
|
+
/**
|
|
5
|
+
* A class that extends the `BufferLoader` class. It represents a document
|
|
6
|
+
* loader that loads documents from PDF files.
|
|
7
|
+
*/
|
|
8
|
+
export declare class PPTXLoader extends BufferLoader {
|
|
9
|
+
constructor(filePathOrBlob: string | Blob);
|
|
10
|
+
/**
|
|
11
|
+
* A method that takes a `raw` buffer and `metadata` as parameters and
|
|
12
|
+
* returns a promise that resolves to an array of `Document` instances. It
|
|
13
|
+
* uses the `parseOfficeAsync` function from the `officeparser` module to extract
|
|
14
|
+
* the raw text content from the buffer. If the extracted powerpoint content is
|
|
15
|
+
* empty, it returns an empty array. Otherwise, it creates a new
|
|
16
|
+
* `Document` instance with the extracted powerpoint content and the provided
|
|
17
|
+
* metadata, and returns it as an array.
|
|
18
|
+
* @param raw The buffer to be parsed.
|
|
19
|
+
* @param metadata The metadata of the document.
|
|
20
|
+
* @returns A promise that resolves to an array of `Document` instances.
|
|
21
|
+
*/
|
|
22
|
+
parse(raw: Buffer, metadata: Document["metadata"]): Promise<Document[]>;
|
|
23
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { parseOfficeAsync } from "officeparser";
|
|
2
|
+
import { Document } from "../../document.js";
|
|
3
|
+
import { BufferLoader } from "./buffer.js";
|
|
4
|
+
/**
|
|
5
|
+
* A class that extends the `BufferLoader` class. It represents a document
|
|
6
|
+
* loader that loads documents from PDF files.
|
|
7
|
+
*/
|
|
8
|
+
export class PPTXLoader extends BufferLoader {
|
|
9
|
+
constructor(filePathOrBlob) {
|
|
10
|
+
super(filePathOrBlob);
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* A method that takes a `raw` buffer and `metadata` as parameters and
|
|
14
|
+
* returns a promise that resolves to an array of `Document` instances. It
|
|
15
|
+
* uses the `parseOfficeAsync` function from the `officeparser` module to extract
|
|
16
|
+
* the raw text content from the buffer. If the extracted powerpoint content is
|
|
17
|
+
* empty, it returns an empty array. Otherwise, it creates a new
|
|
18
|
+
* `Document` instance with the extracted powerpoint content and the provided
|
|
19
|
+
* metadata, and returns it as an array.
|
|
20
|
+
* @param raw The buffer to be parsed.
|
|
21
|
+
* @param metadata The metadata of the document.
|
|
22
|
+
* @returns A promise that resolves to an array of `Document` instances.
|
|
23
|
+
*/
|
|
24
|
+
async parse(raw, metadata) {
|
|
25
|
+
const pptx = await parseOfficeAsync(raw, { outputErrorToConsole: true });
|
|
26
|
+
if (!pptx)
|
|
27
|
+
return [];
|
|
28
|
+
return [
|
|
29
|
+
new Document({
|
|
30
|
+
pageContent: pptx,
|
|
31
|
+
metadata,
|
|
32
|
+
}),
|
|
33
|
+
];
|
|
34
|
+
}
|
|
35
|
+
}
|
|
@@ -109,6 +109,38 @@ class OpenAIAssistantRunnable extends base_js_1.Runnable {
|
|
|
109
109
|
}
|
|
110
110
|
return this._getResponse(run.id, run.thread_id);
|
|
111
111
|
}
|
|
112
|
+
/**
|
|
113
|
+
* Delete an assistant.
|
|
114
|
+
*
|
|
115
|
+
* @link {https://platform.openai.com/docs/api-reference/assistants/deleteAssistant}
|
|
116
|
+
* @returns {Promise<AssistantDeleted>}
|
|
117
|
+
*/
|
|
118
|
+
async deleteAssistant() {
|
|
119
|
+
return await this.client.beta.assistants.del(this.assistantId);
|
|
120
|
+
}
|
|
121
|
+
/**
|
|
122
|
+
* Retrieves an assistant.
|
|
123
|
+
*
|
|
124
|
+
* @link {https://platform.openai.com/docs/api-reference/assistants/getAssistant}
|
|
125
|
+
* @returns {Promise<OpenAIClient.Beta.Assistants.Assistant>}
|
|
126
|
+
*/
|
|
127
|
+
async getAssistant() {
|
|
128
|
+
return await this.client.beta.assistants.retrieve(this.assistantId);
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Modifies an assistant.
|
|
132
|
+
*
|
|
133
|
+
* @link {https://platform.openai.com/docs/api-reference/assistants/modifyAssistant}
|
|
134
|
+
* @returns {Promise<OpenAIClient.Beta.Assistants.Assistant>}
|
|
135
|
+
*/
|
|
136
|
+
async modifyAssistant({ model, name, instructions, fileIds, }) {
|
|
137
|
+
return await this.client.beta.assistants.update(this.assistantId, {
|
|
138
|
+
name,
|
|
139
|
+
instructions,
|
|
140
|
+
model,
|
|
141
|
+
file_ids: fileIds,
|
|
142
|
+
});
|
|
143
|
+
}
|
|
112
144
|
async _parseStepsInput(input) {
|
|
113
145
|
const { action: { runId, threadId }, } = input.steps[input.steps.length - 1];
|
|
114
146
|
const run = await this._waitForRun(runId, threadId);
|
|
@@ -28,6 +28,32 @@ export declare class OpenAIAssistantRunnable<AsAgent extends boolean | undefined
|
|
|
28
28
|
fileIds?: string[];
|
|
29
29
|
}): Promise<OpenAIAssistantRunnable<AsAgent, Record<string, any>>>;
|
|
30
30
|
invoke(input: RunInput, _options?: RunnableConfig): Promise<ExtractRunOutput<AsAgent>>;
|
|
31
|
+
/**
|
|
32
|
+
* Delete an assistant.
|
|
33
|
+
*
|
|
34
|
+
* @link {https://platform.openai.com/docs/api-reference/assistants/deleteAssistant}
|
|
35
|
+
* @returns {Promise<AssistantDeleted>}
|
|
36
|
+
*/
|
|
37
|
+
deleteAssistant(): Promise<OpenAIClient.Beta.Assistants.AssistantDeleted>;
|
|
38
|
+
/**
|
|
39
|
+
* Retrieves an assistant.
|
|
40
|
+
*
|
|
41
|
+
* @link {https://platform.openai.com/docs/api-reference/assistants/getAssistant}
|
|
42
|
+
* @returns {Promise<OpenAIClient.Beta.Assistants.Assistant>}
|
|
43
|
+
*/
|
|
44
|
+
getAssistant(): Promise<OpenAIClient.Beta.Assistants.Assistant>;
|
|
45
|
+
/**
|
|
46
|
+
* Modifies an assistant.
|
|
47
|
+
*
|
|
48
|
+
* @link {https://platform.openai.com/docs/api-reference/assistants/modifyAssistant}
|
|
49
|
+
* @returns {Promise<OpenAIClient.Beta.Assistants.Assistant>}
|
|
50
|
+
*/
|
|
51
|
+
modifyAssistant<AsAgent extends boolean>({ model, name, instructions, fileIds, }: Omit<OpenAIAssistantRunnableInput<AsAgent>, "assistantId" | "tools"> & {
|
|
52
|
+
model?: string;
|
|
53
|
+
name?: string;
|
|
54
|
+
instructions?: string;
|
|
55
|
+
fileIds?: string[];
|
|
56
|
+
}): Promise<OpenAIClient.Beta.Assistants.Assistant>;
|
|
31
57
|
private _parseStepsInput;
|
|
32
58
|
private _createRun;
|
|
33
59
|
private _createThreadAndRun;
|
|
@@ -106,6 +106,38 @@ export class OpenAIAssistantRunnable extends Runnable {
|
|
|
106
106
|
}
|
|
107
107
|
return this._getResponse(run.id, run.thread_id);
|
|
108
108
|
}
|
|
109
|
+
/**
|
|
110
|
+
* Delete an assistant.
|
|
111
|
+
*
|
|
112
|
+
* @link {https://platform.openai.com/docs/api-reference/assistants/deleteAssistant}
|
|
113
|
+
* @returns {Promise<AssistantDeleted>}
|
|
114
|
+
*/
|
|
115
|
+
async deleteAssistant() {
|
|
116
|
+
return await this.client.beta.assistants.del(this.assistantId);
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Retrieves an assistant.
|
|
120
|
+
*
|
|
121
|
+
* @link {https://platform.openai.com/docs/api-reference/assistants/getAssistant}
|
|
122
|
+
* @returns {Promise<OpenAIClient.Beta.Assistants.Assistant>}
|
|
123
|
+
*/
|
|
124
|
+
async getAssistant() {
|
|
125
|
+
return await this.client.beta.assistants.retrieve(this.assistantId);
|
|
126
|
+
}
|
|
127
|
+
/**
|
|
128
|
+
* Modifies an assistant.
|
|
129
|
+
*
|
|
130
|
+
* @link {https://platform.openai.com/docs/api-reference/assistants/modifyAssistant}
|
|
131
|
+
* @returns {Promise<OpenAIClient.Beta.Assistants.Assistant>}
|
|
132
|
+
*/
|
|
133
|
+
async modifyAssistant({ model, name, instructions, fileIds, }) {
|
|
134
|
+
return await this.client.beta.assistants.update(this.assistantId, {
|
|
135
|
+
name,
|
|
136
|
+
instructions,
|
|
137
|
+
model,
|
|
138
|
+
file_ids: fileIds,
|
|
139
|
+
});
|
|
140
|
+
}
|
|
109
141
|
async _parseStepsInput(input) {
|
|
110
142
|
const { action: { runId, threadId }, } = input.steps[input.steps.length - 1];
|
|
111
143
|
const run = await this._waitForRun(runId, threadId);
|
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.PythonInterpreterTool = void 0;
|
|
4
|
+
const pyodide_1 = require("pyodide");
|
|
5
|
+
const base_js_1 = require("../../tools/base.cjs");
|
|
6
|
+
class PythonInterpreterTool extends base_js_1.Tool {
|
|
7
|
+
static lc_name() {
|
|
8
|
+
return "PythonInterpreterTool";
|
|
9
|
+
}
|
|
10
|
+
constructor(options) {
|
|
11
|
+
super(options);
|
|
12
|
+
Object.defineProperty(this, "name", {
|
|
13
|
+
enumerable: true,
|
|
14
|
+
configurable: true,
|
|
15
|
+
writable: true,
|
|
16
|
+
value: "python_interpreter"
|
|
17
|
+
});
|
|
18
|
+
Object.defineProperty(this, "description", {
|
|
19
|
+
enumerable: true,
|
|
20
|
+
configurable: true,
|
|
21
|
+
writable: true,
|
|
22
|
+
value: `Evaluates python code in a sandbox environment. The environment resets on every execution. You must send the whole script every time and print your outputs. Script should be pure python code that can be evaluated. Packages available:
|
|
23
|
+
${this.availableDefaultPackages}`
|
|
24
|
+
});
|
|
25
|
+
Object.defineProperty(this, "pyodideInstance", {
|
|
26
|
+
enumerable: true,
|
|
27
|
+
configurable: true,
|
|
28
|
+
writable: true,
|
|
29
|
+
value: void 0
|
|
30
|
+
});
|
|
31
|
+
Object.defineProperty(this, "stdout", {
|
|
32
|
+
enumerable: true,
|
|
33
|
+
configurable: true,
|
|
34
|
+
writable: true,
|
|
35
|
+
value: ""
|
|
36
|
+
});
|
|
37
|
+
Object.defineProperty(this, "stderr", {
|
|
38
|
+
enumerable: true,
|
|
39
|
+
configurable: true,
|
|
40
|
+
writable: true,
|
|
41
|
+
value: ""
|
|
42
|
+
});
|
|
43
|
+
this.pyodideInstance = options.instance;
|
|
44
|
+
this.pyodideInstance.setStderr({
|
|
45
|
+
batched: (text) => {
|
|
46
|
+
this.stderr += text;
|
|
47
|
+
},
|
|
48
|
+
});
|
|
49
|
+
this.pyodideInstance.setStdout({
|
|
50
|
+
batched: (text) => {
|
|
51
|
+
this.stdout += text;
|
|
52
|
+
},
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
async addPackage(packageName) {
|
|
56
|
+
await this.pyodideInstance.loadPackage(packageName);
|
|
57
|
+
this.description += `, ${packageName}`;
|
|
58
|
+
}
|
|
59
|
+
get availableDefaultPackages() {
|
|
60
|
+
return [
|
|
61
|
+
"asciitree",
|
|
62
|
+
"astropy",
|
|
63
|
+
"atomicwrites",
|
|
64
|
+
"attrs",
|
|
65
|
+
"autograd",
|
|
66
|
+
"awkward-cpp",
|
|
67
|
+
"bcrypt",
|
|
68
|
+
"beautifulsoup4",
|
|
69
|
+
"biopython",
|
|
70
|
+
"bitarray",
|
|
71
|
+
"bitstring",
|
|
72
|
+
"bleach",
|
|
73
|
+
"bokeh",
|
|
74
|
+
"boost-histogram",
|
|
75
|
+
"brotli",
|
|
76
|
+
"cachetools",
|
|
77
|
+
"Cartopy",
|
|
78
|
+
"cbor-diag",
|
|
79
|
+
"certifi",
|
|
80
|
+
"cffi",
|
|
81
|
+
"cffi_example",
|
|
82
|
+
"cftime",
|
|
83
|
+
"click",
|
|
84
|
+
"cligj",
|
|
85
|
+
"cloudpickle",
|
|
86
|
+
"cmyt",
|
|
87
|
+
"colorspacious",
|
|
88
|
+
"contourpy",
|
|
89
|
+
"coolprop",
|
|
90
|
+
"coverage",
|
|
91
|
+
"cramjam",
|
|
92
|
+
"cryptography",
|
|
93
|
+
"cssselect",
|
|
94
|
+
"cycler",
|
|
95
|
+
"cytoolz",
|
|
96
|
+
"decorator",
|
|
97
|
+
"demes",
|
|
98
|
+
"deprecation",
|
|
99
|
+
"distlib",
|
|
100
|
+
"docutils",
|
|
101
|
+
"exceptiongroup",
|
|
102
|
+
"fastparquet",
|
|
103
|
+
"fiona",
|
|
104
|
+
"fonttools",
|
|
105
|
+
"freesasa",
|
|
106
|
+
"fsspec",
|
|
107
|
+
"future",
|
|
108
|
+
"galpy",
|
|
109
|
+
"gensim",
|
|
110
|
+
"geopandas",
|
|
111
|
+
"gmpy2",
|
|
112
|
+
"gsw",
|
|
113
|
+
"h5py",
|
|
114
|
+
"html5lib",
|
|
115
|
+
"idna",
|
|
116
|
+
"igraph",
|
|
117
|
+
"imageio",
|
|
118
|
+
"iniconfig",
|
|
119
|
+
"jedi",
|
|
120
|
+
"Jinja2",
|
|
121
|
+
"joblib",
|
|
122
|
+
"jsonschema",
|
|
123
|
+
"kiwisolver",
|
|
124
|
+
"lazy-object-proxy",
|
|
125
|
+
"lazy_loader",
|
|
126
|
+
"lightgbm",
|
|
127
|
+
"logbook",
|
|
128
|
+
"lxml",
|
|
129
|
+
"MarkupSafe",
|
|
130
|
+
"matplotlib",
|
|
131
|
+
"matplotlib-pyodide",
|
|
132
|
+
"micropip",
|
|
133
|
+
"mne",
|
|
134
|
+
"more-itertools",
|
|
135
|
+
"mpmath",
|
|
136
|
+
"msgpack",
|
|
137
|
+
"msprime",
|
|
138
|
+
"multidict",
|
|
139
|
+
"munch",
|
|
140
|
+
"mypy",
|
|
141
|
+
"netcdf4",
|
|
142
|
+
"networkx",
|
|
143
|
+
"newick",
|
|
144
|
+
"nlopt",
|
|
145
|
+
"nltk",
|
|
146
|
+
"nose",
|
|
147
|
+
"numcodecs",
|
|
148
|
+
"numpy",
|
|
149
|
+
"opencv-python",
|
|
150
|
+
"optlang",
|
|
151
|
+
"orjson",
|
|
152
|
+
"packaging",
|
|
153
|
+
"pandas",
|
|
154
|
+
"parso",
|
|
155
|
+
"patsy",
|
|
156
|
+
"peewee",
|
|
157
|
+
"Pillow",
|
|
158
|
+
"pillow_heif",
|
|
159
|
+
"pkgconfig",
|
|
160
|
+
"pluggy",
|
|
161
|
+
"protobuf",
|
|
162
|
+
"py",
|
|
163
|
+
"pyb2d",
|
|
164
|
+
"pyclipper",
|
|
165
|
+
"pycparser",
|
|
166
|
+
"pycryptodome",
|
|
167
|
+
"pydantic",
|
|
168
|
+
"pyerfa",
|
|
169
|
+
"Pygments",
|
|
170
|
+
"pyheif",
|
|
171
|
+
"pyinstrument",
|
|
172
|
+
"pynacl",
|
|
173
|
+
"pyodide-http",
|
|
174
|
+
"pyodide-tblib",
|
|
175
|
+
"pyparsing",
|
|
176
|
+
"pyproj",
|
|
177
|
+
"pyrsistent",
|
|
178
|
+
"pyshp",
|
|
179
|
+
"pytest",
|
|
180
|
+
"pytest-benchmark",
|
|
181
|
+
"python-dateutil",
|
|
182
|
+
"python-magic",
|
|
183
|
+
"python-sat",
|
|
184
|
+
"python_solvespace",
|
|
185
|
+
"pytz",
|
|
186
|
+
"pywavelets",
|
|
187
|
+
"pyxel",
|
|
188
|
+
"pyyaml",
|
|
189
|
+
"rebound",
|
|
190
|
+
"reboundx",
|
|
191
|
+
"regex",
|
|
192
|
+
"retrying",
|
|
193
|
+
"RobotRaconteur",
|
|
194
|
+
"ruamel.yaml",
|
|
195
|
+
"rust-panic-test",
|
|
196
|
+
"scikit-image",
|
|
197
|
+
"scikit-learn",
|
|
198
|
+
"scipy",
|
|
199
|
+
"screed",
|
|
200
|
+
"setuptools",
|
|
201
|
+
"shapely",
|
|
202
|
+
"simplejson",
|
|
203
|
+
"six",
|
|
204
|
+
"smart_open",
|
|
205
|
+
"soupsieve",
|
|
206
|
+
"sourmash",
|
|
207
|
+
"sparseqr",
|
|
208
|
+
"sqlalchemy",
|
|
209
|
+
"statsmodels",
|
|
210
|
+
"svgwrite",
|
|
211
|
+
"swiglpk",
|
|
212
|
+
"sympy",
|
|
213
|
+
"termcolor",
|
|
214
|
+
"texttable",
|
|
215
|
+
"threadpoolctl",
|
|
216
|
+
"tomli",
|
|
217
|
+
"tomli-w",
|
|
218
|
+
"toolz",
|
|
219
|
+
"tqdm",
|
|
220
|
+
"traits",
|
|
221
|
+
"tskit",
|
|
222
|
+
"typing-extensions",
|
|
223
|
+
"uncertainties",
|
|
224
|
+
"unyt",
|
|
225
|
+
"webencodings",
|
|
226
|
+
"wordcloud",
|
|
227
|
+
"wrapt",
|
|
228
|
+
"xarray",
|
|
229
|
+
"xgboost",
|
|
230
|
+
"xlrd",
|
|
231
|
+
"xyzservices",
|
|
232
|
+
"yarl",
|
|
233
|
+
"yt",
|
|
234
|
+
"zarr",
|
|
235
|
+
].join(", ");
|
|
236
|
+
}
|
|
237
|
+
static async initialize(options) {
|
|
238
|
+
const instance = await (0, pyodide_1.loadPyodide)(options);
|
|
239
|
+
return new this({ ...options, instance });
|
|
240
|
+
}
|
|
241
|
+
async _call(script) {
|
|
242
|
+
this.stdout = "";
|
|
243
|
+
this.stderr = "";
|
|
244
|
+
await this.pyodideInstance.runPythonAsync(script);
|
|
245
|
+
return JSON.stringify({ stdout: this.stdout, stderr: this.stderr });
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
exports.PythonInterpreterTool = PythonInterpreterTool;
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { loadPyodide, type PyodideInterface } from "pyodide";
|
|
2
|
+
import { Tool, ToolParams } from "../../tools/base.js";
|
|
3
|
+
export type PythonInterpreterToolParams = Parameters<typeof loadPyodide>[0] & ToolParams & {
|
|
4
|
+
instance: PyodideInterface;
|
|
5
|
+
};
|
|
6
|
+
export declare class PythonInterpreterTool extends Tool {
|
|
7
|
+
static lc_name(): string;
|
|
8
|
+
name: string;
|
|
9
|
+
description: string;
|
|
10
|
+
pyodideInstance: PyodideInterface;
|
|
11
|
+
stdout: string;
|
|
12
|
+
stderr: string;
|
|
13
|
+
constructor(options: PythonInterpreterToolParams);
|
|
14
|
+
addPackage(packageName: string): Promise<void>;
|
|
15
|
+
get availableDefaultPackages(): string;
|
|
16
|
+
static initialize(options: Omit<PythonInterpreterToolParams, "instance">): Promise<PythonInterpreterTool>;
|
|
17
|
+
_call(script: string): Promise<string>;
|
|
18
|
+
}
|