langchain 0.0.164 → 0.0.165
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/chat_models/portkey.cjs +1 -0
- package/chat_models/portkey.d.ts +1 -0
- package/chat_models/portkey.js +1 -0
- package/dist/chat_models/portkey.cjs +159 -0
- package/dist/chat_models/portkey.d.ts +17 -0
- package/dist/chat_models/portkey.js +155 -0
- package/dist/embeddings/minimax.cjs +1 -1
- package/dist/embeddings/minimax.js +1 -1
- package/dist/llms/portkey.cjs +147 -0
- package/dist/llms/portkey.d.ts +33 -0
- package/dist/llms/portkey.js +138 -0
- package/dist/llms/sagemaker_endpoint.cjs +76 -14
- package/dist/llms/sagemaker_endpoint.d.ts +39 -20
- package/dist/llms/sagemaker_endpoint.js +77 -15
- package/dist/load/import_constants.cjs +3 -0
- package/dist/load/import_constants.js +3 -0
- package/dist/output_parsers/list.cjs +1 -1
- package/dist/output_parsers/list.js +1 -1
- package/dist/vectorstores/cassandra.cjs +212 -0
- package/dist/vectorstores/cassandra.d.ts +98 -0
- package/dist/vectorstores/cassandra.js +208 -0
- package/dist/vectorstores/mongodb_atlas.cjs +29 -39
- package/dist/vectorstores/mongodb_atlas.js +29 -39
- package/llms/portkey.cjs +1 -0
- package/llms/portkey.d.ts +1 -0
- package/llms/portkey.js +1 -0
- package/package.json +42 -2
- package/vectorstores/cassandra.cjs +1 -0
- package/vectorstores/cassandra.d.ts +1 -0
- package/vectorstores/cassandra.js +1 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
module.exports = require('../dist/chat_models/portkey.cjs');
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from '../dist/chat_models/portkey.js'
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from '../dist/chat_models/portkey.js'
|
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.PortkeyChat = void 0;
|
|
4
|
+
const portkey_js_1 = require("../llms/portkey.cjs");
|
|
5
|
+
const index_js_1 = require("../schema/index.cjs");
|
|
6
|
+
const base_js_1 = require("./base.cjs");
|
|
7
|
+
function portkeyResponseToChatMessage(message) {
|
|
8
|
+
switch (message.role) {
|
|
9
|
+
case "user":
|
|
10
|
+
return new index_js_1.HumanMessage(message.content || "");
|
|
11
|
+
case "assistant":
|
|
12
|
+
return new index_js_1.AIMessage(message.content || "");
|
|
13
|
+
case "system":
|
|
14
|
+
return new index_js_1.SystemMessage(message.content || "");
|
|
15
|
+
default:
|
|
16
|
+
return new index_js_1.ChatMessage(message.content || "", message.role ?? "unknown");
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
function _convertDeltaToMessageChunk(
|
|
20
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
21
|
+
delta) {
|
|
22
|
+
const { role } = delta;
|
|
23
|
+
const content = delta.content ?? "";
|
|
24
|
+
let additional_kwargs;
|
|
25
|
+
if (delta.function_call) {
|
|
26
|
+
additional_kwargs = {
|
|
27
|
+
function_call: delta.function_call,
|
|
28
|
+
};
|
|
29
|
+
}
|
|
30
|
+
else {
|
|
31
|
+
additional_kwargs = {};
|
|
32
|
+
}
|
|
33
|
+
if (role === "user") {
|
|
34
|
+
return new index_js_1.HumanMessageChunk({ content });
|
|
35
|
+
}
|
|
36
|
+
else if (role === "assistant") {
|
|
37
|
+
return new index_js_1.AIMessageChunk({ content, additional_kwargs });
|
|
38
|
+
}
|
|
39
|
+
else if (role === "system") {
|
|
40
|
+
return new index_js_1.SystemMessageChunk({ content });
|
|
41
|
+
}
|
|
42
|
+
else if (role === "function") {
|
|
43
|
+
return new index_js_1.FunctionMessageChunk({
|
|
44
|
+
content,
|
|
45
|
+
additional_kwargs,
|
|
46
|
+
name: delta.name,
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
else {
|
|
50
|
+
return new index_js_1.ChatMessageChunk({ content, role });
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
class PortkeyChat extends base_js_1.BaseChatModel {
|
|
54
|
+
constructor(init) {
|
|
55
|
+
super(init ?? {});
|
|
56
|
+
Object.defineProperty(this, "apiKey", {
|
|
57
|
+
enumerable: true,
|
|
58
|
+
configurable: true,
|
|
59
|
+
writable: true,
|
|
60
|
+
value: undefined
|
|
61
|
+
});
|
|
62
|
+
Object.defineProperty(this, "baseURL", {
|
|
63
|
+
enumerable: true,
|
|
64
|
+
configurable: true,
|
|
65
|
+
writable: true,
|
|
66
|
+
value: undefined
|
|
67
|
+
});
|
|
68
|
+
Object.defineProperty(this, "mode", {
|
|
69
|
+
enumerable: true,
|
|
70
|
+
configurable: true,
|
|
71
|
+
writable: true,
|
|
72
|
+
value: undefined
|
|
73
|
+
});
|
|
74
|
+
Object.defineProperty(this, "llms", {
|
|
75
|
+
enumerable: true,
|
|
76
|
+
configurable: true,
|
|
77
|
+
writable: true,
|
|
78
|
+
value: undefined
|
|
79
|
+
});
|
|
80
|
+
Object.defineProperty(this, "session", {
|
|
81
|
+
enumerable: true,
|
|
82
|
+
configurable: true,
|
|
83
|
+
writable: true,
|
|
84
|
+
value: void 0
|
|
85
|
+
});
|
|
86
|
+
this.apiKey = init?.apiKey;
|
|
87
|
+
this.baseURL = init?.baseURL;
|
|
88
|
+
this.mode = init?.mode;
|
|
89
|
+
this.llms = init?.llms;
|
|
90
|
+
this.session = (0, portkey_js_1.getPortkeySession)({
|
|
91
|
+
apiKey: this.apiKey,
|
|
92
|
+
baseURL: this.baseURL,
|
|
93
|
+
llms: this.llms,
|
|
94
|
+
mode: this.mode,
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
_llmType() {
|
|
98
|
+
return "portkey";
|
|
99
|
+
}
|
|
100
|
+
async _generate(messages, options, _) {
|
|
101
|
+
const messagesList = messages.map((message) => ({
|
|
102
|
+
role: message._getType(),
|
|
103
|
+
content: message.content,
|
|
104
|
+
}));
|
|
105
|
+
const response = await this.session.portkey.chatCompletions.create({
|
|
106
|
+
messages: messagesList,
|
|
107
|
+
...options,
|
|
108
|
+
stream: false,
|
|
109
|
+
});
|
|
110
|
+
const generations = [];
|
|
111
|
+
for (const data of response.choices ?? []) {
|
|
112
|
+
const text = data.message?.content ?? "";
|
|
113
|
+
const generation = {
|
|
114
|
+
text,
|
|
115
|
+
message: portkeyResponseToChatMessage(data.message ?? {}),
|
|
116
|
+
};
|
|
117
|
+
if (data.finish_reason) {
|
|
118
|
+
generation.generationInfo = { finish_reason: data.finish_reason };
|
|
119
|
+
}
|
|
120
|
+
generations.push(generation);
|
|
121
|
+
}
|
|
122
|
+
return {
|
|
123
|
+
generations,
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
async *_streamResponseChunks(messages, options, runManager) {
|
|
127
|
+
const messagesList = messages.map((message) => ({
|
|
128
|
+
role: message._getType(),
|
|
129
|
+
content: message.content,
|
|
130
|
+
}));
|
|
131
|
+
const response = await this.session.portkey.chatCompletions.create({
|
|
132
|
+
messages: messagesList,
|
|
133
|
+
...options,
|
|
134
|
+
stream: true,
|
|
135
|
+
});
|
|
136
|
+
for await (const data of response) {
|
|
137
|
+
const choice = data?.choices[0];
|
|
138
|
+
if (!choice) {
|
|
139
|
+
continue;
|
|
140
|
+
}
|
|
141
|
+
const chunk = new index_js_1.ChatGenerationChunk({
|
|
142
|
+
message: _convertDeltaToMessageChunk(choice.delta ?? {}),
|
|
143
|
+
text: choice.message?.content ?? "",
|
|
144
|
+
generationInfo: {
|
|
145
|
+
finishReason: choice.finish_reason,
|
|
146
|
+
},
|
|
147
|
+
});
|
|
148
|
+
yield chunk;
|
|
149
|
+
void runManager?.handleLLMNewToken(chunk.text ?? "");
|
|
150
|
+
}
|
|
151
|
+
if (options.signal?.aborted) {
|
|
152
|
+
throw new Error("AbortError");
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
_combineLLMOutput() {
|
|
156
|
+
return {};
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
exports.PortkeyChat = PortkeyChat;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { LLMOptions } from "portkey-ai";
|
|
2
|
+
import { CallbackManagerForLLMRun } from "../callbacks/manager.js";
|
|
3
|
+
import { PortkeySession } from "../llms/portkey.js";
|
|
4
|
+
import { BaseMessage, ChatGenerationChunk, ChatResult } from "../schema/index.js";
|
|
5
|
+
import { BaseChatModel } from "./base.js";
|
|
6
|
+
export declare class PortkeyChat extends BaseChatModel {
|
|
7
|
+
apiKey?: string;
|
|
8
|
+
baseURL?: string;
|
|
9
|
+
mode?: string;
|
|
10
|
+
llms?: [LLMOptions] | null;
|
|
11
|
+
session: PortkeySession;
|
|
12
|
+
constructor(init?: Partial<PortkeyChat>);
|
|
13
|
+
_llmType(): string;
|
|
14
|
+
_generate(messages: BaseMessage[], options: this["ParsedCallOptions"], _?: CallbackManagerForLLMRun): Promise<ChatResult>;
|
|
15
|
+
_streamResponseChunks(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
16
|
+
_combineLLMOutput(): {};
|
|
17
|
+
}
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
import { getPortkeySession } from "../llms/portkey.js";
|
|
2
|
+
import { AIMessage, AIMessageChunk, ChatGenerationChunk, ChatMessage, ChatMessageChunk, FunctionMessageChunk, HumanMessage, HumanMessageChunk, SystemMessage, SystemMessageChunk, } from "../schema/index.js";
|
|
3
|
+
import { BaseChatModel } from "./base.js";
|
|
4
|
+
function portkeyResponseToChatMessage(message) {
|
|
5
|
+
switch (message.role) {
|
|
6
|
+
case "user":
|
|
7
|
+
return new HumanMessage(message.content || "");
|
|
8
|
+
case "assistant":
|
|
9
|
+
return new AIMessage(message.content || "");
|
|
10
|
+
case "system":
|
|
11
|
+
return new SystemMessage(message.content || "");
|
|
12
|
+
default:
|
|
13
|
+
return new ChatMessage(message.content || "", message.role ?? "unknown");
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
function _convertDeltaToMessageChunk(
|
|
17
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
18
|
+
delta) {
|
|
19
|
+
const { role } = delta;
|
|
20
|
+
const content = delta.content ?? "";
|
|
21
|
+
let additional_kwargs;
|
|
22
|
+
if (delta.function_call) {
|
|
23
|
+
additional_kwargs = {
|
|
24
|
+
function_call: delta.function_call,
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
else {
|
|
28
|
+
additional_kwargs = {};
|
|
29
|
+
}
|
|
30
|
+
if (role === "user") {
|
|
31
|
+
return new HumanMessageChunk({ content });
|
|
32
|
+
}
|
|
33
|
+
else if (role === "assistant") {
|
|
34
|
+
return new AIMessageChunk({ content, additional_kwargs });
|
|
35
|
+
}
|
|
36
|
+
else if (role === "system") {
|
|
37
|
+
return new SystemMessageChunk({ content });
|
|
38
|
+
}
|
|
39
|
+
else if (role === "function") {
|
|
40
|
+
return new FunctionMessageChunk({
|
|
41
|
+
content,
|
|
42
|
+
additional_kwargs,
|
|
43
|
+
name: delta.name,
|
|
44
|
+
});
|
|
45
|
+
}
|
|
46
|
+
else {
|
|
47
|
+
return new ChatMessageChunk({ content, role });
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
export class PortkeyChat extends BaseChatModel {
|
|
51
|
+
constructor(init) {
|
|
52
|
+
super(init ?? {});
|
|
53
|
+
Object.defineProperty(this, "apiKey", {
|
|
54
|
+
enumerable: true,
|
|
55
|
+
configurable: true,
|
|
56
|
+
writable: true,
|
|
57
|
+
value: undefined
|
|
58
|
+
});
|
|
59
|
+
Object.defineProperty(this, "baseURL", {
|
|
60
|
+
enumerable: true,
|
|
61
|
+
configurable: true,
|
|
62
|
+
writable: true,
|
|
63
|
+
value: undefined
|
|
64
|
+
});
|
|
65
|
+
Object.defineProperty(this, "mode", {
|
|
66
|
+
enumerable: true,
|
|
67
|
+
configurable: true,
|
|
68
|
+
writable: true,
|
|
69
|
+
value: undefined
|
|
70
|
+
});
|
|
71
|
+
Object.defineProperty(this, "llms", {
|
|
72
|
+
enumerable: true,
|
|
73
|
+
configurable: true,
|
|
74
|
+
writable: true,
|
|
75
|
+
value: undefined
|
|
76
|
+
});
|
|
77
|
+
Object.defineProperty(this, "session", {
|
|
78
|
+
enumerable: true,
|
|
79
|
+
configurable: true,
|
|
80
|
+
writable: true,
|
|
81
|
+
value: void 0
|
|
82
|
+
});
|
|
83
|
+
this.apiKey = init?.apiKey;
|
|
84
|
+
this.baseURL = init?.baseURL;
|
|
85
|
+
this.mode = init?.mode;
|
|
86
|
+
this.llms = init?.llms;
|
|
87
|
+
this.session = getPortkeySession({
|
|
88
|
+
apiKey: this.apiKey,
|
|
89
|
+
baseURL: this.baseURL,
|
|
90
|
+
llms: this.llms,
|
|
91
|
+
mode: this.mode,
|
|
92
|
+
});
|
|
93
|
+
}
|
|
94
|
+
_llmType() {
|
|
95
|
+
return "portkey";
|
|
96
|
+
}
|
|
97
|
+
async _generate(messages, options, _) {
|
|
98
|
+
const messagesList = messages.map((message) => ({
|
|
99
|
+
role: message._getType(),
|
|
100
|
+
content: message.content,
|
|
101
|
+
}));
|
|
102
|
+
const response = await this.session.portkey.chatCompletions.create({
|
|
103
|
+
messages: messagesList,
|
|
104
|
+
...options,
|
|
105
|
+
stream: false,
|
|
106
|
+
});
|
|
107
|
+
const generations = [];
|
|
108
|
+
for (const data of response.choices ?? []) {
|
|
109
|
+
const text = data.message?.content ?? "";
|
|
110
|
+
const generation = {
|
|
111
|
+
text,
|
|
112
|
+
message: portkeyResponseToChatMessage(data.message ?? {}),
|
|
113
|
+
};
|
|
114
|
+
if (data.finish_reason) {
|
|
115
|
+
generation.generationInfo = { finish_reason: data.finish_reason };
|
|
116
|
+
}
|
|
117
|
+
generations.push(generation);
|
|
118
|
+
}
|
|
119
|
+
return {
|
|
120
|
+
generations,
|
|
121
|
+
};
|
|
122
|
+
}
|
|
123
|
+
async *_streamResponseChunks(messages, options, runManager) {
|
|
124
|
+
const messagesList = messages.map((message) => ({
|
|
125
|
+
role: message._getType(),
|
|
126
|
+
content: message.content,
|
|
127
|
+
}));
|
|
128
|
+
const response = await this.session.portkey.chatCompletions.create({
|
|
129
|
+
messages: messagesList,
|
|
130
|
+
...options,
|
|
131
|
+
stream: true,
|
|
132
|
+
});
|
|
133
|
+
for await (const data of response) {
|
|
134
|
+
const choice = data?.choices[0];
|
|
135
|
+
if (!choice) {
|
|
136
|
+
continue;
|
|
137
|
+
}
|
|
138
|
+
const chunk = new ChatGenerationChunk({
|
|
139
|
+
message: _convertDeltaToMessageChunk(choice.delta ?? {}),
|
|
140
|
+
text: choice.message?.content ?? "",
|
|
141
|
+
generationInfo: {
|
|
142
|
+
finishReason: choice.finish_reason,
|
|
143
|
+
},
|
|
144
|
+
});
|
|
145
|
+
yield chunk;
|
|
146
|
+
void runManager?.handleLLMNewToken(chunk.text ?? "");
|
|
147
|
+
}
|
|
148
|
+
if (options.signal?.aborted) {
|
|
149
|
+
throw new Error("AbortError");
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
_combineLLMOutput() {
|
|
153
|
+
return {};
|
|
154
|
+
}
|
|
155
|
+
}
|
|
@@ -81,8 +81,8 @@ class MinimaxEmbeddings extends base_js_1.Embeddings {
|
|
|
81
81
|
this.type = fieldsWithDefaults?.type ?? this.type;
|
|
82
82
|
this.stripNewLines =
|
|
83
83
|
fieldsWithDefaults?.stripNewLines ?? this.stripNewLines;
|
|
84
|
-
this.apiUrl = `${this.basePath}/embeddings`;
|
|
85
84
|
this.basePath = fields?.configuration?.basePath ?? this.basePath;
|
|
85
|
+
this.apiUrl = `${this.basePath}/embeddings`;
|
|
86
86
|
this.headers = fields?.configuration?.headers ?? this.headers;
|
|
87
87
|
}
|
|
88
88
|
/**
|
|
@@ -78,8 +78,8 @@ export class MinimaxEmbeddings extends Embeddings {
|
|
|
78
78
|
this.type = fieldsWithDefaults?.type ?? this.type;
|
|
79
79
|
this.stripNewLines =
|
|
80
80
|
fieldsWithDefaults?.stripNewLines ?? this.stripNewLines;
|
|
81
|
-
this.apiUrl = `${this.basePath}/embeddings`;
|
|
82
81
|
this.basePath = fields?.configuration?.basePath ?? this.basePath;
|
|
82
|
+
this.apiUrl = `${this.basePath}/embeddings`;
|
|
83
83
|
this.headers = fields?.configuration?.headers ?? this.headers;
|
|
84
84
|
}
|
|
85
85
|
/**
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.Portkey = exports.getPortkeySession = exports.PortkeySession = void 0;
|
|
7
|
+
const lodash_1 = __importDefault(require("lodash"));
|
|
8
|
+
const portkey_ai_1 = require("portkey-ai");
|
|
9
|
+
const index_js_1 = require("../schema/index.cjs");
|
|
10
|
+
const env_js_1 = require("../util/env.cjs");
|
|
11
|
+
const base_js_1 = require("./base.cjs");
|
|
12
|
+
const readEnv = (env, default_val) => (0, env_js_1.getEnvironmentVariable)(env) ?? default_val;
|
|
13
|
+
class PortkeySession {
|
|
14
|
+
constructor(options = {}) {
|
|
15
|
+
Object.defineProperty(this, "portkey", {
|
|
16
|
+
enumerable: true,
|
|
17
|
+
configurable: true,
|
|
18
|
+
writable: true,
|
|
19
|
+
value: void 0
|
|
20
|
+
});
|
|
21
|
+
if (!options.apiKey) {
|
|
22
|
+
/* eslint-disable no-param-reassign */
|
|
23
|
+
options.apiKey = readEnv("PORTKEY_API_KEY");
|
|
24
|
+
}
|
|
25
|
+
if (!options.baseURL) {
|
|
26
|
+
/* eslint-disable no-param-reassign */
|
|
27
|
+
options.baseURL = readEnv("PORTKEY_BASE_URL", "https://api.portkey.ai");
|
|
28
|
+
}
|
|
29
|
+
this.portkey = new portkey_ai_1.Portkey({});
|
|
30
|
+
this.portkey.llms = [{}];
|
|
31
|
+
if (!options.apiKey) {
|
|
32
|
+
throw new Error("Set Portkey ApiKey in PORTKEY_API_KEY env variable");
|
|
33
|
+
}
|
|
34
|
+
this.portkey = new portkey_ai_1.Portkey(options);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
exports.PortkeySession = PortkeySession;
|
|
38
|
+
const defaultPortkeySession = [];
|
|
39
|
+
/**
|
|
40
|
+
* Get a session for the Portkey API. If one already exists with the same options,
|
|
41
|
+
* it will be returned. Otherwise, a new session will be created.
|
|
42
|
+
* @param options
|
|
43
|
+
* @returns
|
|
44
|
+
*/
|
|
45
|
+
function getPortkeySession(options = {}) {
|
|
46
|
+
let session = defaultPortkeySession.find((session) => lodash_1.default.isEqual(session.options, options))?.session;
|
|
47
|
+
if (!session) {
|
|
48
|
+
session = new PortkeySession(options);
|
|
49
|
+
defaultPortkeySession.push({ session, options });
|
|
50
|
+
}
|
|
51
|
+
return session;
|
|
52
|
+
}
|
|
53
|
+
exports.getPortkeySession = getPortkeySession;
|
|
54
|
+
class Portkey extends base_js_1.BaseLLM {
|
|
55
|
+
constructor(init) {
|
|
56
|
+
super(init ?? {});
|
|
57
|
+
Object.defineProperty(this, "apiKey", {
|
|
58
|
+
enumerable: true,
|
|
59
|
+
configurable: true,
|
|
60
|
+
writable: true,
|
|
61
|
+
value: undefined
|
|
62
|
+
});
|
|
63
|
+
Object.defineProperty(this, "baseURL", {
|
|
64
|
+
enumerable: true,
|
|
65
|
+
configurable: true,
|
|
66
|
+
writable: true,
|
|
67
|
+
value: undefined
|
|
68
|
+
});
|
|
69
|
+
Object.defineProperty(this, "mode", {
|
|
70
|
+
enumerable: true,
|
|
71
|
+
configurable: true,
|
|
72
|
+
writable: true,
|
|
73
|
+
value: undefined
|
|
74
|
+
});
|
|
75
|
+
Object.defineProperty(this, "llms", {
|
|
76
|
+
enumerable: true,
|
|
77
|
+
configurable: true,
|
|
78
|
+
writable: true,
|
|
79
|
+
value: undefined
|
|
80
|
+
});
|
|
81
|
+
Object.defineProperty(this, "session", {
|
|
82
|
+
enumerable: true,
|
|
83
|
+
configurable: true,
|
|
84
|
+
writable: true,
|
|
85
|
+
value: void 0
|
|
86
|
+
});
|
|
87
|
+
this.apiKey = init?.apiKey;
|
|
88
|
+
this.baseURL = init?.baseURL;
|
|
89
|
+
this.mode = init?.mode;
|
|
90
|
+
this.llms = init?.llms;
|
|
91
|
+
this.session = getPortkeySession({
|
|
92
|
+
apiKey: this.apiKey,
|
|
93
|
+
baseURL: this.baseURL,
|
|
94
|
+
llms: this.llms,
|
|
95
|
+
mode: this.mode,
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
_llmType() {
|
|
99
|
+
return "portkey";
|
|
100
|
+
}
|
|
101
|
+
async _generate(prompts, options, _) {
|
|
102
|
+
const choices = [];
|
|
103
|
+
for (let i = 0; i < prompts.length; i += 1) {
|
|
104
|
+
const response = await this.session.portkey.completions.create({
|
|
105
|
+
prompt: prompts[i],
|
|
106
|
+
...options,
|
|
107
|
+
stream: false,
|
|
108
|
+
});
|
|
109
|
+
choices.push(response.choices);
|
|
110
|
+
}
|
|
111
|
+
const generations = choices.map((promptChoices) => promptChoices.map((choice) => ({
|
|
112
|
+
text: choice.text ?? "",
|
|
113
|
+
generationInfo: {
|
|
114
|
+
finishReason: choice.finish_reason,
|
|
115
|
+
logprobs: choice.logprobs,
|
|
116
|
+
},
|
|
117
|
+
})));
|
|
118
|
+
return {
|
|
119
|
+
generations,
|
|
120
|
+
};
|
|
121
|
+
}
|
|
122
|
+
async *_streamResponseChunks(input, options, runManager) {
|
|
123
|
+
const response = await this.session.portkey.completions.create({
|
|
124
|
+
prompt: input,
|
|
125
|
+
...options,
|
|
126
|
+
stream: true,
|
|
127
|
+
});
|
|
128
|
+
for await (const data of response) {
|
|
129
|
+
const choice = data?.choices[0];
|
|
130
|
+
if (!choice) {
|
|
131
|
+
continue;
|
|
132
|
+
}
|
|
133
|
+
const chunk = new index_js_1.GenerationChunk({
|
|
134
|
+
text: choice.text ?? "",
|
|
135
|
+
generationInfo: {
|
|
136
|
+
finishReason: choice.finish_reason,
|
|
137
|
+
},
|
|
138
|
+
});
|
|
139
|
+
yield chunk;
|
|
140
|
+
void runManager?.handleLLMNewToken(chunk.text ?? "");
|
|
141
|
+
}
|
|
142
|
+
if (options.signal?.aborted) {
|
|
143
|
+
throw new Error("AbortError");
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
exports.Portkey = Portkey;
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { LLMOptions, Portkey as _Portkey } from "portkey-ai";
|
|
2
|
+
import { CallbackManagerForLLMRun } from "../callbacks/manager.js";
|
|
3
|
+
import { GenerationChunk, LLMResult } from "../schema/index.js";
|
|
4
|
+
import { BaseLLM } from "./base.js";
|
|
5
|
+
interface PortkeyOptions {
|
|
6
|
+
apiKey?: string;
|
|
7
|
+
baseURL?: string;
|
|
8
|
+
mode?: string;
|
|
9
|
+
llms?: [LLMOptions] | null;
|
|
10
|
+
}
|
|
11
|
+
export declare class PortkeySession {
|
|
12
|
+
portkey: _Portkey;
|
|
13
|
+
constructor(options?: PortkeyOptions);
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* Get a session for the Portkey API. If one already exists with the same options,
|
|
17
|
+
* it will be returned. Otherwise, a new session will be created.
|
|
18
|
+
* @param options
|
|
19
|
+
* @returns
|
|
20
|
+
*/
|
|
21
|
+
export declare function getPortkeySession(options?: PortkeyOptions): PortkeySession;
|
|
22
|
+
export declare class Portkey extends BaseLLM {
|
|
23
|
+
apiKey?: string;
|
|
24
|
+
baseURL?: string;
|
|
25
|
+
mode?: string;
|
|
26
|
+
llms?: [LLMOptions] | null;
|
|
27
|
+
session: PortkeySession;
|
|
28
|
+
constructor(init?: Partial<Portkey>);
|
|
29
|
+
_llmType(): string;
|
|
30
|
+
_generate(prompts: string[], options: this["ParsedCallOptions"], _?: CallbackManagerForLLMRun): Promise<LLMResult>;
|
|
31
|
+
_streamResponseChunks(input: string, options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<GenerationChunk>;
|
|
32
|
+
}
|
|
33
|
+
export {};
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
import _ from "lodash";
|
|
2
|
+
import { Portkey as _Portkey } from "portkey-ai";
|
|
3
|
+
import { GenerationChunk } from "../schema/index.js";
|
|
4
|
+
import { getEnvironmentVariable } from "../util/env.js";
|
|
5
|
+
import { BaseLLM } from "./base.js";
|
|
6
|
+
const readEnv = (env, default_val) => getEnvironmentVariable(env) ?? default_val;
|
|
7
|
+
export class PortkeySession {
|
|
8
|
+
constructor(options = {}) {
|
|
9
|
+
Object.defineProperty(this, "portkey", {
|
|
10
|
+
enumerable: true,
|
|
11
|
+
configurable: true,
|
|
12
|
+
writable: true,
|
|
13
|
+
value: void 0
|
|
14
|
+
});
|
|
15
|
+
if (!options.apiKey) {
|
|
16
|
+
/* eslint-disable no-param-reassign */
|
|
17
|
+
options.apiKey = readEnv("PORTKEY_API_KEY");
|
|
18
|
+
}
|
|
19
|
+
if (!options.baseURL) {
|
|
20
|
+
/* eslint-disable no-param-reassign */
|
|
21
|
+
options.baseURL = readEnv("PORTKEY_BASE_URL", "https://api.portkey.ai");
|
|
22
|
+
}
|
|
23
|
+
this.portkey = new _Portkey({});
|
|
24
|
+
this.portkey.llms = [{}];
|
|
25
|
+
if (!options.apiKey) {
|
|
26
|
+
throw new Error("Set Portkey ApiKey in PORTKEY_API_KEY env variable");
|
|
27
|
+
}
|
|
28
|
+
this.portkey = new _Portkey(options);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
const defaultPortkeySession = [];
|
|
32
|
+
/**
|
|
33
|
+
* Get a session for the Portkey API. If one already exists with the same options,
|
|
34
|
+
* it will be returned. Otherwise, a new session will be created.
|
|
35
|
+
* @param options
|
|
36
|
+
* @returns
|
|
37
|
+
*/
|
|
38
|
+
export function getPortkeySession(options = {}) {
|
|
39
|
+
let session = defaultPortkeySession.find((session) => _.isEqual(session.options, options))?.session;
|
|
40
|
+
if (!session) {
|
|
41
|
+
session = new PortkeySession(options);
|
|
42
|
+
defaultPortkeySession.push({ session, options });
|
|
43
|
+
}
|
|
44
|
+
return session;
|
|
45
|
+
}
|
|
46
|
+
export class Portkey extends BaseLLM {
|
|
47
|
+
constructor(init) {
|
|
48
|
+
super(init ?? {});
|
|
49
|
+
Object.defineProperty(this, "apiKey", {
|
|
50
|
+
enumerable: true,
|
|
51
|
+
configurable: true,
|
|
52
|
+
writable: true,
|
|
53
|
+
value: undefined
|
|
54
|
+
});
|
|
55
|
+
Object.defineProperty(this, "baseURL", {
|
|
56
|
+
enumerable: true,
|
|
57
|
+
configurable: true,
|
|
58
|
+
writable: true,
|
|
59
|
+
value: undefined
|
|
60
|
+
});
|
|
61
|
+
Object.defineProperty(this, "mode", {
|
|
62
|
+
enumerable: true,
|
|
63
|
+
configurable: true,
|
|
64
|
+
writable: true,
|
|
65
|
+
value: undefined
|
|
66
|
+
});
|
|
67
|
+
Object.defineProperty(this, "llms", {
|
|
68
|
+
enumerable: true,
|
|
69
|
+
configurable: true,
|
|
70
|
+
writable: true,
|
|
71
|
+
value: undefined
|
|
72
|
+
});
|
|
73
|
+
Object.defineProperty(this, "session", {
|
|
74
|
+
enumerable: true,
|
|
75
|
+
configurable: true,
|
|
76
|
+
writable: true,
|
|
77
|
+
value: void 0
|
|
78
|
+
});
|
|
79
|
+
this.apiKey = init?.apiKey;
|
|
80
|
+
this.baseURL = init?.baseURL;
|
|
81
|
+
this.mode = init?.mode;
|
|
82
|
+
this.llms = init?.llms;
|
|
83
|
+
this.session = getPortkeySession({
|
|
84
|
+
apiKey: this.apiKey,
|
|
85
|
+
baseURL: this.baseURL,
|
|
86
|
+
llms: this.llms,
|
|
87
|
+
mode: this.mode,
|
|
88
|
+
});
|
|
89
|
+
}
|
|
90
|
+
_llmType() {
|
|
91
|
+
return "portkey";
|
|
92
|
+
}
|
|
93
|
+
async _generate(prompts, options, _) {
|
|
94
|
+
const choices = [];
|
|
95
|
+
for (let i = 0; i < prompts.length; i += 1) {
|
|
96
|
+
const response = await this.session.portkey.completions.create({
|
|
97
|
+
prompt: prompts[i],
|
|
98
|
+
...options,
|
|
99
|
+
stream: false,
|
|
100
|
+
});
|
|
101
|
+
choices.push(response.choices);
|
|
102
|
+
}
|
|
103
|
+
const generations = choices.map((promptChoices) => promptChoices.map((choice) => ({
|
|
104
|
+
text: choice.text ?? "",
|
|
105
|
+
generationInfo: {
|
|
106
|
+
finishReason: choice.finish_reason,
|
|
107
|
+
logprobs: choice.logprobs,
|
|
108
|
+
},
|
|
109
|
+
})));
|
|
110
|
+
return {
|
|
111
|
+
generations,
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
async *_streamResponseChunks(input, options, runManager) {
|
|
115
|
+
const response = await this.session.portkey.completions.create({
|
|
116
|
+
prompt: input,
|
|
117
|
+
...options,
|
|
118
|
+
stream: true,
|
|
119
|
+
});
|
|
120
|
+
for await (const data of response) {
|
|
121
|
+
const choice = data?.choices[0];
|
|
122
|
+
if (!choice) {
|
|
123
|
+
continue;
|
|
124
|
+
}
|
|
125
|
+
const chunk = new GenerationChunk({
|
|
126
|
+
text: choice.text ?? "",
|
|
127
|
+
generationInfo: {
|
|
128
|
+
finishReason: choice.finish_reason,
|
|
129
|
+
},
|
|
130
|
+
});
|
|
131
|
+
yield chunk;
|
|
132
|
+
void runManager?.handleLLMNewToken(chunk.text ?? "");
|
|
133
|
+
}
|
|
134
|
+
if (options.signal?.aborted) {
|
|
135
|
+
throw new Error("AbortError");
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
}
|