llm-proxy 1.3.0 → 1.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +4 -4
- package/dist/index.js +32 -25
- package/dist/index.js.map +1 -1
- package/dist/middleware/InputFormatAdapter.d.ts +2 -3
- package/dist/middleware/InputFormatAdapter.js +24 -26
- package/dist/middleware/InputFormatAdapter.js.map +1 -1
- package/dist/services/AwsBedrockAnthropicService.d.ts +2 -2
- package/dist/services/AwsBedrockAnthropicService.js +16 -6
- package/dist/services/AwsBedrockAnthropicService.js.map +1 -1
- package/dist/services/ClientService.d.ts +2 -2
- package/dist/services/OpenAIService.d.ts +2 -5
- package/dist/services/OpenAIService.js +2 -7
- package/dist/services/OpenAIService.js.map +1 -1
- package/dist/types/index.js.map +1 -1
- package/package.json +2 -2
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { OpenAIResponse } from "./types";
|
|
1
|
+
import { Messages, OpenAIResponse } from "./types";
|
|
2
2
|
interface Credentials {
|
|
3
3
|
apiKey?: string;
|
|
4
4
|
awsConfig?: {
|
|
@@ -7,6 +7,6 @@ interface Credentials {
|
|
|
7
7
|
region: string;
|
|
8
8
|
};
|
|
9
9
|
}
|
|
10
|
-
export declare function generateLLMResponse(messages:
|
|
11
|
-
export declare function generateLLMStreamResponse(messages:
|
|
12
|
-
export
|
|
10
|
+
export declare function generateLLMResponse(messages: Messages, model: string, maxTokens: number, temperature: number, credentials: Credentials): Promise<OpenAIResponse>;
|
|
11
|
+
export declare function generateLLMStreamResponse(messages: Messages, model: string, maxTokens: number, temperature: number, credentials: Credentials): Promise<AsyncGenerator<OpenAIResponse>>;
|
|
12
|
+
export * from "./types";
|
package/dist/index.js
CHANGED
|
@@ -1,4 +1,18 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
2
16
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
17
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
18
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
@@ -38,15 +52,11 @@ const AwsBedrockAnthropicService_1 = require("./services/AwsBedrockAnthropicServ
|
|
|
38
52
|
const OpenAIService_1 = require("./services/OpenAIService");
|
|
39
53
|
const types_1 = require("./types");
|
|
40
54
|
// Main function for non-streaming requests
|
|
41
|
-
function generateLLMResponse(messages, model, maxTokens, temperature,
|
|
55
|
+
function generateLLMResponse(messages, model, maxTokens, temperature, credentials) {
|
|
42
56
|
return __awaiter(this, void 0, void 0, function* () {
|
|
43
|
-
// Step 1:
|
|
44
|
-
if (systemPrompt) {
|
|
45
|
-
messages = [{ role: "system", content: systemPrompt }, ...messages];
|
|
46
|
-
}
|
|
47
|
-
// Step 2: Identify the provider based on the model
|
|
57
|
+
// Step 1: Identify the provider based on the model
|
|
48
58
|
const provider = ProviderFinder_1.ProviderFinder.getProvider(model);
|
|
49
|
-
//
|
|
59
|
+
// Initialize the correct service based on the provider
|
|
50
60
|
let service;
|
|
51
61
|
if (provider === types_1.Providers.OPENAI) {
|
|
52
62
|
if (!credentials.apiKey) {
|
|
@@ -64,27 +74,22 @@ function generateLLMResponse(messages, model, maxTokens, temperature, systemProm
|
|
|
64
74
|
else {
|
|
65
75
|
throw new Error("Unsupported provider");
|
|
66
76
|
}
|
|
67
|
-
// Step
|
|
68
|
-
const {
|
|
69
|
-
// Step
|
|
70
|
-
const
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
// Step 6: Make the request
|
|
74
|
-
const response = yield service.generateCompletion(formattedMessages, model, maxTokens, temperature, extractedPrompt || systemPrompt, tools);
|
|
75
|
-
// Step 7: Return the unified response
|
|
77
|
+
// Step 2: Adapt messages and extract the system prompt
|
|
78
|
+
const { adaptedMessages, systemPrompt } = InputFormatAdapter_1.InputFormatAdapter.adaptMessages(messages, provider);
|
|
79
|
+
// Step 3: Generate the completion
|
|
80
|
+
const response = yield service.generateCompletion(adaptedMessages, // TODO: fix this any
|
|
81
|
+
model, maxTokens, temperature, systemPrompt);
|
|
82
|
+
// Step 4: Adapt the response if needed
|
|
76
83
|
return provider === types_1.Providers.OPENAI
|
|
77
84
|
? response
|
|
78
85
|
: OutputFormatAdapter_1.OutputFormatAdapter.adaptResponse(response, provider);
|
|
79
86
|
});
|
|
80
87
|
}
|
|
81
88
|
// Main function for streaming requests
|
|
82
|
-
function generateLLMStreamResponse(messages, model, maxTokens, temperature,
|
|
89
|
+
function generateLLMStreamResponse(messages, model, maxTokens, temperature, credentials) {
|
|
83
90
|
return __awaiter(this, void 0, void 0, function* () {
|
|
84
|
-
if (systemPrompt) {
|
|
85
|
-
messages = [{ role: "system", content: systemPrompt }, ...messages];
|
|
86
|
-
}
|
|
87
91
|
const provider = ProviderFinder_1.ProviderFinder.getProvider(model);
|
|
92
|
+
// Initialize the correct service based on the provider
|
|
88
93
|
let service;
|
|
89
94
|
if (provider === types_1.Providers.OPENAI) {
|
|
90
95
|
if (!credentials.apiKey) {
|
|
@@ -102,11 +107,12 @@ function generateLLMStreamResponse(messages, model, maxTokens, temperature, syst
|
|
|
102
107
|
else {
|
|
103
108
|
throw new Error("Unsupported provider");
|
|
104
109
|
}
|
|
105
|
-
|
|
106
|
-
const
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
+
// Adapt messages and extract the system prompt
|
|
111
|
+
const { adaptedMessages, systemPrompt } = InputFormatAdapter_1.InputFormatAdapter.adaptMessages(messages, provider);
|
|
112
|
+
// Generate the streaming completion
|
|
113
|
+
const stream = service.generateStreamCompletion(adaptedMessages, // TODO: Fix this any
|
|
114
|
+
model, maxTokens, temperature, systemPrompt);
|
|
115
|
+
// Create and return the async generator
|
|
110
116
|
function streamGenerator() {
|
|
111
117
|
return __asyncGenerator(this, arguments, function* streamGenerator_1() {
|
|
112
118
|
var _a, e_1, _b, _c;
|
|
@@ -132,4 +138,5 @@ function generateLLMStreamResponse(messages, model, maxTokens, temperature, syst
|
|
|
132
138
|
return streamGenerator();
|
|
133
139
|
});
|
|
134
140
|
}
|
|
141
|
+
__exportStar(require("./types"), exports);
|
|
135
142
|
//# sourceMappingURL=index.js.map
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAcA,kDAkDC;AAGD,8DA0DC;AA7HD,gEAA6D;AAC7D,wEAAqE;AACrE,0EAAuE;AACvE,sFAAmF;AACnF,4DAAyD;AACzD,mCAA8D;AAQ9D,2CAA2C;AAC3C,SAAsB,mBAAmB,CACvC,QAAkB,EAClB,KAAa,EACb,SAAiB,EACjB,WAAmB,EACnB,WAAwB;;QAExB,mDAAmD;QACnD,MAAM,QAAQ,GAAG,+BAAc,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;QAEnD,uDAAuD;QACvD,IAAI,OAAmD,CAAC;QACxD,IAAI,QAAQ,KAAK,iBAAS,CAAC,MAAM,EAAE,CAAC;YAClC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,CAAC;gBACxB,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;YACnE,CAAC;YACD,OAAO,GAAG,IAAI,6BAAa,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;QAClD,CAAC;aAAM,IAAI,QAAQ,KAAK,iBAAS,CAAC,iBAAiB,EAAE,CAAC;YACpD,MAAM,SAAS,GAAG,WAAW,CAAC,SAAS,CAAC;YACxC,IAAI,CAAC,SAAS,EAAE,CAAC;gBACf,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAC;YACtE,CAAC;YACD,OAAO,GAAG,IAAI,uDAA0B,CACtC,SAAS,CAAC,WAAW,EACrB,SAAS,CAAC,eAAe,EACzB,SAAS,CAAC,MAAM,CACjB,CAAC;QACJ,CAAC;aAAM,CAAC;YACN,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;QAC1C,CAAC;QAED,uDAAuD;QACvD,MAAM,EAAE,eAAe,EAAE,YAAY,EAAE,GAAG,uCAAkB,CAAC,aAAa,CACxE,QAAQ,EACR,QAAQ,CACT,CAAC;QAEF,kCAAkC;QAClC,MAAM,QAAQ,GAAG,MAAM,OAAO,CAAC,kBAAkB,CAC/C,eAAsB,EAAE,qBAAqB;QAC7C,KAAK,EACL,SAAS,EACT,WAAW,EACX,YAAY,CACb,CAAC;QAEF,uCAAuC;QACvC,OAAO,QAAQ,KAAK,iBAAS,CAAC,MAAM;YAClC,CAAC,CAAE,QAA2B;YAC9B,CAAC,CAAE,yCAAmB,CAAC,aAAa,CAAC,QAAQ,EAAE,QAAQ,CAAoB,CAAC;IAChF,CAAC;CAAA;AAED,uCAAuC;AACvC,SAAsB,yBAAyB,CAC7C,QAAkB,EAClB,KAAa,EACb,SAAiB,EACjB,WAAmB,EACnB,WAAwB;;QAExB,MAAM,QAAQ,GAAG,+BAAc,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;QAEnD,uDAAuD;QACvD,IAAI,OAAmD,CAAC;QACxD,IAAI,QAAQ,KAAK,iBAAS,CAAC,MAAM,EAAE,CAAC;YAClC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,CAAC;gBACxB,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;YACnE,CAAC;YACD,OAAO,GAAG,IAAI,6BAAa,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;QAClD,CAAC;aAAM,IAAI,QAAQ,KAAK,iBAAS,CAAC,iBAAiB,EAAE,CAAC;YACpD,MAAM,SAAS,GAAG,WAAW,CAAC,SAAS,CAAC;YACxC,IAAI,CAAC,SAAS,EAAE,CAAC;gBACf,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAC;YACtE,CAAC;YACD,OAAO,GAAG,IAAI,uDAA0B,CACtC,SAAS,CAAC,WAAW,EACrB,SAAS,CAAC,eAAe,EACzB,SAAS,CAAC,MAAM,CACjB,CAAC;QACJ,CAAC;aAAM,CAAC;YACN,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;QAC1C,CAAC;QAED,+CAA+C;QAC/C,MAAM,EAAE,eAAe,EAAE,YAAY,EAAE,GAAG,uCAAkB,CAAC,aAAa,CACxE,QAAQ,EACR,QAAQ,CACT,CAAC;QAEF,oCAAoC;QACpC,MAAM,MAAM,GAAG,OAAO,CAAC,wBAAwB,CAC7C,eAAsB,EAAE,qBAAqB;QAC7C,KAAK,EACL,SAAS,EACT,WAAW,EACX,YAAY,CACb,CAAC;QAEF,wCAAwC;QACxC,SAAgB,eAAe;;;;oBAC7B,KAA0B,eAAA,WAAA,cAAA,MAAM,CAAA,YAAA,qFAAE,CAAC;wBAAT,sBAAM;wBAAN,WAAM;wBAArB,MAAM,KAAK,KAAA,CAAA;wBACpB,oBAAM,QAAQ,KAAK,iBAAS,CAAC,MAAM;4BACjC,CAAC,CAAE,KAAwB;4BAC3B,CAAC,CAAE,yCAAmB,CAAC,aAAa,CAChC,KAAK,EACL,QAAQ,CACU,CAAA,CAAC;oBAC3B,CAAC;;;;;;;;;YACH,CAAC;SAAA;QAED,OAAO,eAAe,EAAE,CAAC;IAC3B,CAAC;CAAA;AAED,0CAAwB"}
|
|
@@ -1,8 +1,7 @@
|
|
|
1
1
|
import { BedrockAnthropicMessage, Messages, OpenAIMessages, Providers } from "../types";
|
|
2
2
|
export declare class InputFormatAdapter {
|
|
3
|
-
static adaptMessages(messages: Messages, provider: Providers):
|
|
4
|
-
|
|
5
|
-
messages: Messages;
|
|
3
|
+
static adaptMessages(messages: Messages, provider: Providers): {
|
|
4
|
+
adaptedMessages: OpenAIMessages | BedrockAnthropicMessage[];
|
|
6
5
|
systemPrompt?: string;
|
|
7
6
|
};
|
|
8
7
|
}
|
|
@@ -6,24 +6,32 @@ class InputFormatAdapter {
|
|
|
6
6
|
static adaptMessages(messages, provider) {
|
|
7
7
|
switch (provider) {
|
|
8
8
|
case types_1.Providers.OPENAI:
|
|
9
|
-
return
|
|
10
|
-
|
|
9
|
+
return {
|
|
10
|
+
adaptedMessages: messages.map((msg) => {
|
|
11
|
+
if (msg.role === "function") {
|
|
12
|
+
return {
|
|
13
|
+
role: msg.role,
|
|
14
|
+
content: msg.content,
|
|
15
|
+
name: msg.name,
|
|
16
|
+
};
|
|
17
|
+
}
|
|
11
18
|
return {
|
|
12
19
|
role: msg.role,
|
|
13
20
|
content: msg.content,
|
|
14
|
-
name: msg.name,
|
|
15
21
|
};
|
|
16
|
-
}
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
22
|
+
}),
|
|
23
|
+
};
|
|
24
|
+
case types_1.Providers.ANTHROPIC_BEDROCK: {
|
|
25
|
+
if (!messages.length) {
|
|
26
|
+
throw new Error("Messages array cannot be empty for Anthropic.");
|
|
27
|
+
}
|
|
28
|
+
// Extract the first message as the system prompt
|
|
29
|
+
const [firstMessage, ...restMessages] = messages;
|
|
30
|
+
if (firstMessage.role !== "system") {
|
|
31
|
+
throw new Error("The first message must have a role of 'system' for Anthropic.");
|
|
32
|
+
}
|
|
33
|
+
const systemPrompt = firstMessage.content;
|
|
34
|
+
const adaptedMessages = restMessages.map((msg) => ({
|
|
27
35
|
role: msg.role === "user" ? "user" : "assistant",
|
|
28
36
|
content: [
|
|
29
37
|
{
|
|
@@ -32,22 +40,12 @@ class InputFormatAdapter {
|
|
|
32
40
|
},
|
|
33
41
|
],
|
|
34
42
|
}));
|
|
43
|
+
return { adaptedMessages, systemPrompt };
|
|
44
|
+
}
|
|
35
45
|
default:
|
|
36
46
|
throw new Error(`Unsupported provider: ${provider}`);
|
|
37
47
|
}
|
|
38
48
|
}
|
|
39
|
-
static extractSystemPrompt(messages, provider) {
|
|
40
|
-
if (provider === types_1.Providers.ANTHROPIC_BEDROCK) {
|
|
41
|
-
const [systemMessage, ...restMessages] = messages;
|
|
42
|
-
if ((systemMessage === null || systemMessage === void 0 ? void 0 : systemMessage.role) === "system") {
|
|
43
|
-
return {
|
|
44
|
-
messages: restMessages,
|
|
45
|
-
systemPrompt: systemMessage.content,
|
|
46
|
-
};
|
|
47
|
-
}
|
|
48
|
-
}
|
|
49
|
-
return { messages, systemPrompt: undefined };
|
|
50
|
-
}
|
|
51
49
|
}
|
|
52
50
|
exports.InputFormatAdapter = InputFormatAdapter;
|
|
53
51
|
//# sourceMappingURL=InputFormatAdapter.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"InputFormatAdapter.js","sourceRoot":"","sources":["../../src/middleware/InputFormatAdapter.ts"],"names":[],"mappings":";;;AAAA,oCAOkB;AAElB,MAAa,kBAAkB;IAC7B,MAAM,CAAC,aAAa,CAClB,QAAkB,EAClB,QAAmB;
|
|
1
|
+
{"version":3,"file":"InputFormatAdapter.js","sourceRoot":"","sources":["../../src/middleware/InputFormatAdapter.ts"],"names":[],"mappings":";;;AAAA,oCAOkB;AAElB,MAAa,kBAAkB;IAC7B,MAAM,CAAC,aAAa,CAClB,QAAkB,EAClB,QAAmB;QAKnB,QAAQ,QAAQ,EAAE,CAAC;YACjB,KAAK,iBAAS,CAAC,MAAM;gBACnB,OAAO;oBACL,eAAe,EAAE,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE;wBACpC,IAAI,GAAG,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;4BAC5B,OAAO;gCACL,IAAI,EAAE,GAAG,CAAC,IAAI;gCACd,OAAO,EAAE,GAAG,CAAC,OAAO;gCACpB,IAAI,EAAG,GAA6B,CAAC,IAAI;6BAC1C,CAAC;wBACJ,CAAC;wBACD,OAAO;4BACL,IAAI,EAAE,GAAG,CAAC,IAAI;4BACd,OAAO,EAAE,GAAG,CAAC,OAAiB;yBAC/B,CAAC;oBACJ,CAAC,CAAmB;iBACrB,CAAC;YAEJ,KAAK,iBAAS,CAAC,iBAAiB,CAAC,CAAC,CAAC;gBACjC,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC;oBACrB,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;gBACnE,CAAC;gBAED,iDAAiD;gBACjD,MAAM,CAAC,YAAY,EAAE,GAAG,YAAY,CAAC,GAAG,QAAQ,CAAC;gBAEjD,IAAI,YAAY,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;oBACnC,MAAM,IAAI,KAAK,CACb,+DAA+D,CAChE,CAAC;gBACJ,CAAC;gBAED,MAAM,YAAY,GAAG,YAAY,CAAC,OAAiB,CAAC;gBAEpD,MAAM,eAAe,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,CAAC;oBACjD,IAAI,EAAE,GAAG,CAAC,IAAI,KAAK,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,WAAW;oBAChD,OAAO,EAAE;wBACP;4BACE,IAAI,EAAE,mCAA2B,CAAC,IAAI;4BACtC,IAAI,EAAE,GAAG,CAAC,OAAiB;yBAC5B;qBACF;iBACF,CAAC,CAA8B,CAAC;gBAEjC,OAAO,EAAE,eAAe,EAAE,YAAY,EAAE,CAAC;YAC3C,CAAC;YAED;gBACE,MAAM,IAAI,KAAK,CAAC,yBAAyB,QAAQ,EAAE,CAAC,CAAC;QACzD,CAAC;IACH,CAAC;CACF;AA3DD,gDA2DC"}
|
|
@@ -3,6 +3,6 @@ import { ClientService } from "./ClientService";
|
|
|
3
3
|
export declare class AwsBedrockAnthropicService implements ClientService {
|
|
4
4
|
private bedrock;
|
|
5
5
|
constructor(awsAccessKey: string, awsSecretKey: string, region: string);
|
|
6
|
-
generateCompletion(messages: Messages, model?: string, maxTokens?: number, temperature?: number, systemPrompt?: string
|
|
7
|
-
generateStreamCompletion(messages: Messages, model?: string, maxTokens?: number, temperature?: number, systemPrompt?: string
|
|
6
|
+
generateCompletion(messages: Messages, model?: string, maxTokens?: number, temperature?: number, systemPrompt?: string): Promise<BedrockAnthropicResponse>;
|
|
7
|
+
generateStreamCompletion(messages: Messages, model?: string, maxTokens?: number, temperature?: number, systemPrompt?: string): AsyncGenerator<BedrockAnthropicParsedChunk, void, unknown>;
|
|
8
8
|
}
|
|
@@ -41,13 +41,18 @@ class AwsBedrockAnthropicService {
|
|
|
41
41
|
},
|
|
42
42
|
});
|
|
43
43
|
}
|
|
44
|
-
generateCompletion(messages, model, maxTokens, temperature, systemPrompt
|
|
44
|
+
generateCompletion(messages, model, maxTokens, temperature, systemPrompt) {
|
|
45
45
|
return __awaiter(this, void 0, void 0, function* () {
|
|
46
46
|
if (!model) {
|
|
47
47
|
throw new Error("Model ID is required for AwsBedrockAnthropicService");
|
|
48
48
|
}
|
|
49
|
-
const body = JSON.stringify(
|
|
50
|
-
|
|
49
|
+
const body = JSON.stringify({
|
|
50
|
+
anthropic_version: "bedrock-2023-05-31",
|
|
51
|
+
max_tokens: maxTokens,
|
|
52
|
+
temperature,
|
|
53
|
+
messages,
|
|
54
|
+
system: systemPrompt,
|
|
55
|
+
});
|
|
51
56
|
const command = new client_bedrock_runtime_1.InvokeModelCommand({
|
|
52
57
|
modelId: model,
|
|
53
58
|
body,
|
|
@@ -58,15 +63,20 @@ class AwsBedrockAnthropicService {
|
|
|
58
63
|
return JSON.parse(new TextDecoder().decode(response.body));
|
|
59
64
|
});
|
|
60
65
|
}
|
|
61
|
-
generateStreamCompletion(messages, model, maxTokens, temperature, systemPrompt
|
|
66
|
+
generateStreamCompletion(messages, model, maxTokens, temperature, systemPrompt) {
|
|
62
67
|
return __asyncGenerator(this, arguments, function* generateStreamCompletion_1() {
|
|
63
68
|
var _a, e_1, _b, _c;
|
|
64
69
|
var _d;
|
|
65
70
|
if (!model) {
|
|
66
71
|
throw new Error("Model ID is required for AwsBedrockAnthropicService");
|
|
67
72
|
}
|
|
68
|
-
const body = JSON.stringify(
|
|
69
|
-
|
|
73
|
+
const body = JSON.stringify({
|
|
74
|
+
anthropic_version: "bedrock-2023-05-31",
|
|
75
|
+
max_tokens: maxTokens,
|
|
76
|
+
temperature,
|
|
77
|
+
messages,
|
|
78
|
+
system: systemPrompt,
|
|
79
|
+
});
|
|
70
80
|
const command = new client_bedrock_runtime_1.InvokeModelWithResponseStreamCommand({
|
|
71
81
|
modelId: model,
|
|
72
82
|
body,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"AwsBedrockAnthropicService.js","sourceRoot":"","sources":["../../src/services/AwsBedrockAnthropicService.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAKA,4EAIyC;AAGzC,MAAa,0BAA0B;IAGrC,YAAY,YAAoB,EAAE,YAAoB,EAAE,MAAc;QACpE,IAAI,CAAC,OAAO,GAAG,IAAI,6CAAoB,CAAC;YACtC,MAAM;YACN,WAAW,EAAE;gBACX,WAAW,EAAE,YAAY;gBACzB,eAAe,EAAE,YAAY;aAC9B;SACF,CAAC,CAAC;IACL,CAAC;IAEK,kBAAkB,CACtB,QAAkB,EAClB,KAAc,EACd,SAAkB,EAClB,WAAoB,EACpB,YAAqB
|
|
1
|
+
{"version":3,"file":"AwsBedrockAnthropicService.js","sourceRoot":"","sources":["../../src/services/AwsBedrockAnthropicService.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAKA,4EAIyC;AAGzC,MAAa,0BAA0B;IAGrC,YAAY,YAAoB,EAAE,YAAoB,EAAE,MAAc;QACpE,IAAI,CAAC,OAAO,GAAG,IAAI,6CAAoB,CAAC;YACtC,MAAM;YACN,WAAW,EAAE;gBACX,WAAW,EAAE,YAAY;gBACzB,eAAe,EAAE,YAAY;aAC9B;SACF,CAAC,CAAC;IACL,CAAC;IAEK,kBAAkB,CACtB,QAAkB,EAClB,KAAc,EACd,SAAkB,EAClB,WAAoB,EACpB,YAAqB;;YAErB,IAAI,CAAC,KAAK,EAAE,CAAC;gBACX,MAAM,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;YACzE,CAAC;YAED,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC;gBAC1B,iBAAiB,EAAE,oBAAoB;gBACvC,UAAU,EAAE,SAAS;gBACrB,WAAW;gBACX,QAAQ;gBACR,MAAM,EAAE,YAAY;aACrB,CAAC,CAAC;YAEH,MAAM,OAAO,GAAG,IAAI,2CAAkB,CAAC;gBACrC,OAAO,EAAE,KAAK;gBACd,IAAI;gBACJ,WAAW,EAAE,kBAAkB;gBAC/B,MAAM,EAAE,kBAAkB;aAC3B,CAAC,CAAC;YAEH,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;YAClD,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC;QAC7D,CAAC;KAAA;IAEM,wBAAwB,CAC7B,QAAkB,EAClB,KAAc,EACd,SAAkB,EAClB,WAAoB,EACpB,YAAqB;;;;YAErB,IAAI,CAAC,KAAK,EAAE,CAAC;gBACX,MAAM,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;YACzE,CAAC;YAED,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC;gBAC1B,iBAAiB,EAAE,oBAAoB;gBACvC,UAAU,EAAE,SAAS;gBACrB,WAAW;gBACX,QAAQ;gBACR,MAAM,EAAE,YAAY;aACrB,CAAC,CAAC;YAEH,MAAM,OAAO,GAAG,IAAI,6DAAoC,CAAC;gBACvD,OAAO,EAAE,KAAK;gBACd,IAAI;gBACJ,WAAW,EAAE,kBAAkB;gBAC/B,MAAM,EAAE,kBAAkB;aAC3B,CAAC,CAAC;YAEH,MAAM,QAAQ,GAAG,cAAM,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA,CAAC;YAElD,IAAI,QAAQ,CAAC,IAAI,EAAE,CAAC;gBAClB,MAAM,OAAO,GAAG,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;;oBAEzC,KAA4B,eAAA,KAAA,cAAA,QAAQ,CAAC,IAAI,CAAA,IAAA,+DAAE,CAAC;wBAAhB,cAAa;wBAAb,WAAa;wBAA9B,MAAM,OAAO,KAAA,CAAA;wBACtB,MAAM,aAAa,GAAG,OAAO,CAAC,MAAM,CAAC,MAAA,OAAO,CAAC,KAAK,0CAAE,KAAK,EAAE;4BACzD,MAAM,EAAE,IAAI;yBACb,CAAC,CAAC;wBAEH,IAAI,CAAC;4BACH,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC;4BAC7C,oBAAM,UAAU,CAAA,CAAC;wBACnB,CAAC;wBAAC,OAAO,KAAK,EAAE,CAAC;4BACf,OAAO,CAAC,KAAK,CAAC,gCAAgC,EAAE,KAAK,CAAC,CAAC;wBACzD,CAAC;oBACH,CAAC;;;;;;;;;YACH,CAAC;QACH,CAAC;KAAA;CACF;AAxFD,gEAwFC"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { BedrockAnthropicParsedChunk, LLMResponse, Messages } from "../types";
|
|
2
2
|
export interface ClientService {
|
|
3
|
-
generateCompletion(messages: Messages, model?: string, maxTokens?: number, temperature?: number, systemPrompt?: string
|
|
4
|
-
generateStreamCompletion(messages: Messages, model?: string, maxTokens?: number, temperature?: number, systemPrompt?: string
|
|
3
|
+
generateCompletion(messages: Messages, model?: string, maxTokens?: number, temperature?: number, systemPrompt?: string): Promise<LLMResponse>;
|
|
4
|
+
generateStreamCompletion(messages: Messages, model?: string, maxTokens?: number, temperature?: number, systemPrompt?: string): AsyncGenerator<BedrockAnthropicParsedChunk, void, unknown>;
|
|
5
5
|
}
|
|
@@ -3,9 +3,6 @@ import { ClientService } from "./ClientService";
|
|
|
3
3
|
export declare class OpenAIService implements ClientService {
|
|
4
4
|
private openai;
|
|
5
5
|
constructor(apiKey: string);
|
|
6
|
-
generateCompletion(messages: OpenAIMessages, model: string, maxTokens: number, temperature: number, systemPrompt?: string
|
|
7
|
-
|
|
8
|
-
generateStreamCompletion(messages: OpenAIMessages, model: string, maxTokens: number, temperature: number, systemPrompt?: string, // Optional parameter
|
|
9
|
-
tools?: any, // Optional parameter
|
|
10
|
-
stream?: boolean): AsyncGenerator<any, void, unknown>;
|
|
6
|
+
generateCompletion(messages: OpenAIMessages, model: string, maxTokens: number, temperature: number, systemPrompt?: string): Promise<OpenAIResponse>;
|
|
7
|
+
generateStreamCompletion(messages: OpenAIMessages, model: string, maxTokens: number, temperature: number, systemPrompt?: string): AsyncGenerator<any, void, unknown>;
|
|
11
8
|
}
|
|
@@ -38,9 +38,7 @@ class OpenAIService {
|
|
|
38
38
|
constructor(apiKey) {
|
|
39
39
|
this.openai = new openai_1.default({ apiKey });
|
|
40
40
|
}
|
|
41
|
-
generateCompletion(messages, model, maxTokens, temperature, systemPrompt
|
|
42
|
-
tools // Optional parameter -- tools are part of the system prompt or the message are being managed on the client side
|
|
43
|
-
) {
|
|
41
|
+
generateCompletion(messages, model, maxTokens, temperature, systemPrompt) {
|
|
44
42
|
return __awaiter(this, void 0, void 0, function* () {
|
|
45
43
|
if (!model) {
|
|
46
44
|
throw new Error("Model ID is required for OpenAIService.");
|
|
@@ -60,10 +58,7 @@ class OpenAIService {
|
|
|
60
58
|
}
|
|
61
59
|
});
|
|
62
60
|
}
|
|
63
|
-
generateStreamCompletion(messages, model, maxTokens, temperature, systemPrompt
|
|
64
|
-
tools, // Optional parameter
|
|
65
|
-
stream // Optional parameter
|
|
66
|
-
) {
|
|
61
|
+
generateStreamCompletion(messages, model, maxTokens, temperature, systemPrompt) {
|
|
67
62
|
return __asyncGenerator(this, arguments, function* generateStreamCompletion_1() {
|
|
68
63
|
var _a, e_1, _b, _c;
|
|
69
64
|
if (!model) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"OpenAIService.js","sourceRoot":"","sources":["../../src/services/OpenAIService.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAA4B;AAI5B,MAAa,aAAa;IAGxB,YAAY,MAAc;QACxB,IAAI,CAAC,MAAM,GAAG,IAAI,gBAAM,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC;IACvC,CAAC;IAEK,kBAAkB,CACtB,QAAwB,EACxB,KAAa,EACb,SAAiB,EACjB,WAAmB,EACnB,YAAqB,
|
|
1
|
+
{"version":3,"file":"OpenAIService.js","sourceRoot":"","sources":["../../src/services/OpenAIService.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAA4B;AAI5B,MAAa,aAAa;IAGxB,YAAY,MAAc;QACxB,IAAI,CAAC,MAAM,GAAG,IAAI,gBAAM,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC;IACvC,CAAC;IAEK,kBAAkB,CACtB,QAAwB,EACxB,KAAa,EACb,SAAiB,EACjB,WAAmB,EACnB,YAAqB;;YAErB,IAAI,CAAC,KAAK,EAAE,CAAC;gBACX,MAAM,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;YAC7D,CAAC;YAED,IAAI,CAAC;gBACH,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;oBACzD,KAAK,EAAE,0BAA0B;oBACjC,QAAQ;oBACR,UAAU,EAAE,SAAS;oBACrB,WAAW;iBACZ,CAAC,CAAC;gBACH,OAAO,QAA0B,CAAC;YACpC,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBACf,OAAO,CAAC,KAAK,CAAC,wBAAwB,EAAE,KAAK,CAAC,CAAC;gBAC/C,MAAM,KAAK,CAAC;YACd,CAAC;QACH,CAAC;KAAA;IAEM,wBAAwB,CAC7B,QAAwB,EACxB,KAAa,EACb,SAAiB,EACjB,WAAmB,EACnB,YAAqB;;;YAErB,IAAI,CAAC,KAAK,EAAE,CAAC;gBACX,MAAM,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;YAC7D,CAAC;YAED,IAAI,CAAC;gBACH,MAAM,MAAM,GAAG,cAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;oBACvD,KAAK;oBACL,QAAQ;oBACR,UAAU,EAAE,SAAS;oBACrB,WAAW;oBACX,MAAM,EAAE,IAAI;oBACZ,cAAc,EAAE;wBACd,aAAa,EAAE,IAAI;qBACpB;iBACF,CAAC,CAAA,CAAC;;oBAEH,KAA0B,eAAA,WAAA,cAAA,MAAM,CAAA,YAAA,qFAAE,CAAC;wBAAT,sBAAM;wBAAN,WAAM;wBAArB,MAAM,KAAK,KAAA,CAAA;wBACpB,oBAAM,KAAK,CAAA,CAAC;oBACd,CAAC;;;;;;;;;YACH,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBACf,OAAO,CAAC,KAAK,CAAC,6BAA6B,EAAE,KAAK,CAAC,CAAC;gBACpD,MAAM,KAAK,CAAC;YACd,CAAC;QACH,CAAC;KAAA;CACF;AA/DD,sCA+DC"}
|
package/dist/types/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/types/index.ts"],"names":[],"mappings":";;;AAAA,UAAU;AACV,IAAY,SAIX;AAJD,WAAY,SAAS;IACnB,8BAAiB,CAAA;IACjB,mDAAsC,CAAA;IACtC,6CAAgC,CAAA;AAClC,CAAC,EAJW,SAAS,yBAAT,SAAS,QAIpB;
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/types/index.ts"],"names":[],"mappings":";;;AAAA,UAAU;AACV,IAAY,SAIX;AAJD,WAAY,SAAS;IACnB,8BAAiB,CAAA;IACjB,mDAAsC,CAAA;IACtC,6CAAgC,CAAA;AAClC,CAAC,EAJW,SAAS,yBAAT,SAAS,QAIpB;AA6DD,IAAY,mBAOX;AAPD,WAAY,mBAAmB;IAC7B,0DAAmC,CAAA;IACnC,yCAAkB,CAAA;IAClB,mDAA4B,CAAA;IAC5B,kDAA2B,CAAA;IAC3B,kEAA2C,CAAA;IAC3C,sDAA+B,CAAA;AACjC,CAAC,EAPW,mBAAmB,mCAAnB,mBAAmB,QAO9B;AAyCD,cAAc;AAEd,wBAAwB;AAExB,IAAY,6BAKX;AALD,WAAY,6BAA6B;IACvC,0FAAyD,CAAA;IACzD,4FAA2D,CAAA;IAC3D,wFAAuD,CAAA;IACvD,gGAA+D,CAAA;AACjE,CAAC,EALW,6BAA6B,6CAA7B,6BAA6B,QAKxC;AAED,IAAY,2BAKX;AALD,WAAY,2BAA2B;IACrC,4CAAa,CAAA;IACb,8CAAe,CAAA;IACf,oDAAqB,CAAA;IACrB,0DAA2B,CAAA;AAC7B,CAAC,EALW,2BAA2B,2CAA3B,2BAA2B,QAKtC"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "llm-proxy",
|
|
3
|
-
"version": "1.3.
|
|
3
|
+
"version": "1.3.2",
|
|
4
4
|
"description": "An LLM Proxy that allows the user to interact with different language models from different providers using unified request and response formats.",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -30,7 +30,7 @@
|
|
|
30
30
|
"aws-sdk": "^2.1691.0",
|
|
31
31
|
"axios": "^1.7.7",
|
|
32
32
|
"dotenv": "^16.4.5",
|
|
33
|
-
"llm-proxy": "^1.3.
|
|
33
|
+
"llm-proxy": "^1.3.2",
|
|
34
34
|
"openai": "^4.69.0"
|
|
35
35
|
}
|
|
36
36
|
}
|