llm-proxy 1.2.0 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { OpenAIResponse } from "./types";
|
|
2
2
|
interface Credentials {
|
|
3
3
|
apiKey?: string;
|
|
4
4
|
awsConfig?: {
|
|
@@ -7,6 +7,6 @@ interface Credentials {
|
|
|
7
7
|
region: string;
|
|
8
8
|
};
|
|
9
9
|
}
|
|
10
|
-
export declare function generateLLMResponse(messages:
|
|
11
|
-
export declare function generateLLMStreamResponse(messages:
|
|
12
|
-
export
|
|
10
|
+
export declare function generateLLMResponse(messages: any, model: string, maxTokens: number, temperature: number, systemPrompt: string, tools: any, credentials: Credentials): Promise<OpenAIResponse>;
|
|
11
|
+
export declare function generateLLMStreamResponse(messages: any, model: string, maxTokens: number, temperature: number, systemPrompt: string, tools: any, credentials: Credentials): Promise<AsyncGenerator<OpenAIResponse>>;
|
|
12
|
+
export {};
|
package/dist/index.js
CHANGED
|
@@ -1,18 +1,4 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
-
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
-
};
|
|
16
2
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
17
3
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
18
4
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
@@ -54,9 +40,13 @@ const types_1 = require("./types");
|
|
|
54
40
|
// Main function for non-streaming requests
|
|
55
41
|
function generateLLMResponse(messages, model, maxTokens, temperature, systemPrompt, tools, credentials) {
|
|
56
42
|
return __awaiter(this, void 0, void 0, function* () {
|
|
43
|
+
// Step 1: Prepend the system prompt if provided
|
|
44
|
+
if (systemPrompt) {
|
|
45
|
+
messages = [{ role: "system", content: systemPrompt }, ...messages];
|
|
46
|
+
}
|
|
57
47
|
// Step 2: Identify the provider based on the model
|
|
58
48
|
const provider = ProviderFinder_1.ProviderFinder.getProvider(model);
|
|
59
|
-
// Initialize the correct service based on the provider
|
|
49
|
+
// Step 3: Initialize the correct service based on the provider
|
|
60
50
|
let service;
|
|
61
51
|
if (provider === types_1.Providers.OPENAI) {
|
|
62
52
|
if (!credentials.apiKey) {
|
|
@@ -74,14 +64,15 @@ function generateLLMResponse(messages, model, maxTokens, temperature, systemProm
|
|
|
74
64
|
else {
|
|
75
65
|
throw new Error("Unsupported provider");
|
|
76
66
|
}
|
|
77
|
-
// Step
|
|
78
|
-
const adaptedMessages
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
67
|
+
// Step 4: Extract system prompt for Anthropic
|
|
68
|
+
const { messages: adaptedMessages, systemPrompt: extractedPrompt } = InputFormatAdapter_1.InputFormatAdapter.extractSystemPrompt(messages, provider);
|
|
69
|
+
// Step 5: Adapt the input messages to the provider's format
|
|
70
|
+
const formattedMessages = provider !== types_1.Providers.OPENAI
|
|
71
|
+
? InputFormatAdapter_1.InputFormatAdapter.adaptMessages(adaptedMessages, provider)
|
|
72
|
+
: adaptedMessages;
|
|
73
|
+
// Step 6: Make the request
|
|
74
|
+
const response = yield service.generateCompletion(formattedMessages, model, maxTokens, temperature, extractedPrompt || systemPrompt, tools);
|
|
75
|
+
// Step 7: Return the unified response
|
|
85
76
|
return provider === types_1.Providers.OPENAI
|
|
86
77
|
? response
|
|
87
78
|
: OutputFormatAdapter_1.OutputFormatAdapter.adaptResponse(response, provider);
|
|
@@ -90,6 +81,9 @@ function generateLLMResponse(messages, model, maxTokens, temperature, systemProm
|
|
|
90
81
|
// Main function for streaming requests
|
|
91
82
|
function generateLLMStreamResponse(messages, model, maxTokens, temperature, systemPrompt, tools, credentials) {
|
|
92
83
|
return __awaiter(this, void 0, void 0, function* () {
|
|
84
|
+
if (systemPrompt) {
|
|
85
|
+
messages = [{ role: "system", content: systemPrompt }, ...messages];
|
|
86
|
+
}
|
|
93
87
|
const provider = ProviderFinder_1.ProviderFinder.getProvider(model);
|
|
94
88
|
let service;
|
|
95
89
|
if (provider === types_1.Providers.OPENAI) {
|
|
@@ -108,13 +102,11 @@ function generateLLMStreamResponse(messages, model, maxTokens, temperature, syst
|
|
|
108
102
|
else {
|
|
109
103
|
throw new Error("Unsupported provider");
|
|
110
104
|
}
|
|
111
|
-
const adaptedMessages
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
: adaptedMessages, model, maxTokens, temperature, systemPrompt, tools, true);
|
|
117
|
-
// Create and return the async generator
|
|
105
|
+
const { messages: adaptedMessages, systemPrompt: extractedPrompt } = InputFormatAdapter_1.InputFormatAdapter.extractSystemPrompt(messages, provider);
|
|
106
|
+
const formattedMessages = provider !== types_1.Providers.OPENAI
|
|
107
|
+
? InputFormatAdapter_1.InputFormatAdapter.adaptMessages(adaptedMessages, provider)
|
|
108
|
+
: adaptedMessages;
|
|
109
|
+
const stream = service.generateStreamCompletion(formattedMessages, model, maxTokens, temperature, extractedPrompt || systemPrompt, tools, true);
|
|
118
110
|
function streamGenerator() {
|
|
119
111
|
return __asyncGenerator(this, arguments, function* streamGenerator_1() {
|
|
120
112
|
var _a, e_1, _b, _c;
|
|
@@ -140,5 +132,4 @@ function generateLLMStreamResponse(messages, model, maxTokens, temperature, syst
|
|
|
140
132
|
return streamGenerator();
|
|
141
133
|
});
|
|
142
134
|
}
|
|
143
|
-
__exportStar(require("./types"), exports);
|
|
144
135
|
//# sourceMappingURL=index.js.map
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAmBA,kDA8DC;AAGD,8DAiEC;AArJD,gEAA6D;AAC7D,wEAAqE;AACrE,0EAAuE;AACvE,sFAAmF;AACnF,4DAAyD;AACzD,mCAMiB;AAOjB,2CAA2C;AAC3C,SAAsB,mBAAmB,CACvC,QAAa,EACb,KAAa,EACb,SAAiB,EACjB,WAAmB,EACnB,YAAoB,EACpB,KAAU,EACV,WAAwB;;QAExB,gDAAgD;QAChD,IAAI,YAAY,EAAE,CAAC;YACjB,QAAQ,GAAG,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,YAAY,EAAE,EAAE,GAAG,QAAQ,CAAC,CAAC;QACtE,CAAC;QAED,mDAAmD;QACnD,MAAM,QAAQ,GAAG,+BAAc,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;QAEnD,+DAA+D;QAC/D,IAAI,OAAmD,CAAC;QACxD,IAAI,QAAQ,KAAK,iBAAS,CAAC,MAAM,EAAE,CAAC;YAClC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,CAAC;gBACxB,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;YACnE,CAAC;YACD,OAAO,GAAG,IAAI,6BAAa,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;QAClD,CAAC;aAAM,IAAI,QAAQ,KAAK,iBAAS,CAAC,iBAAiB,EAAE,CAAC;YACpD,MAAM,SAAS,GAAG,WAAW,CAAC,SAAS,CAAC;YACxC,IAAI,CAAC,SAAS,EAAE,CAAC;gBACf,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAC;YACtE,CAAC;YACD,OAAO,GAAG,IAAI,uDAA0B,CACtC,SAAS,CAAC,WAAW,EACrB,SAAS,CAAC,eAAe,EACzB,SAAS,CAAC,MAAM,CACjB,CAAC;QACJ,CAAC;aAAM,CAAC;YACN,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;QAC1C,CAAC;QAED,8CAA8C;QAC9C,MAAM,EAAE,QAAQ,EAAE,eAAe,EAAE,YAAY,EAAE,eAAe,EAAE,GAChE,uCAAkB,CAAC,mBAAmB,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;QAE7D,4DAA4D;QAC5D,MAAM,iBAAiB,GACrB,QAAQ,KAAK,iBAAS,CAAC,MAAM;YAC3B,CAAC,CAAC,uCAAkB,CAAC,aAAa,CAAC,eAAe,EAAE,QAAQ,CAAC;YAC7D,CAAC,CAAC,eAAe,CAAC;QAEtB,2BAA2B;QAC3B,MAAM,QAAQ,GAAG,MAAM,OAAO,CAAC,kBAAkB,CAC/C,iBAAwB,EACxB,KAAK,EACL,SAAS,EACT,WAAW,EACX,eAAe,IAAI,YAAY,EAC/B,KAAK,CACN,CAAC;QAEF,sCAAsC;QACtC,OAAO,QAAQ,KAAK,iBAAS,CAAC,MAAM;YAClC,CAAC,CAAE,QAA2B;YAC9B,CAAC,CAAE,yCAAmB,CAAC,aAAa,CAAC,QAAQ,EAAE,QAAQ,CAAoB,CAAC;IAChF,CAAC;CAAA;AAED,uCAAuC;AACvC,SAAsB,yBAAyB,CAC7C,QAAa,EACb,KAAa,EACb,SAAiB,EACjB,WAAmB,EACnB,YAAoB,EACpB,KAAU,EACV,WAAwB;;QAExB,IAAI,YAAY,EAAE,CAAC;YACjB,QAAQ,GAAG,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,YAAY,EAAE,EAAE,GAAG,QAAQ,CAAC,CAAC;QACtE,CAAC;QAED,MAAM,QAAQ,GAAG,+BAAc,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;QAEnD,IAAI,OAAmD,CAAC;QACxD,IAAI,QAAQ,KAAK,iBAAS,CAAC,MAAM,EAAE,CAAC;YAClC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,CAAC;gBACxB,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;YACnE,CAAC;YACD,OAAO,GAAG,IAAI,6BAAa,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;QAClD,CAAC;aAAM,IAAI,QAAQ,KAAK,iBAAS,CAAC,iBAAiB,EAAE,CAAC;YACpD,MAAM,SAAS,GAAG,WAAW,CAAC,SAAS,CAAC;YACxC,IAAI,CAAC,SAAS,EAAE,CAAC;gBACf,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAC;YACtE,CAAC;YACD,OAAO,GAAG,IAAI,uDAA0B,CACtC,SAAS,CAAC,WAAW,EACrB,SAAS,CAAC,eAAe,EACzB,SAAS,CAAC,MAAM,CACjB,CAAC;QACJ,CAAC;aAAM,CAAC;YACN,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;QAC1C,CAAC;QAED,MAAM,EAAE,QAAQ,EAAE,eAAe,EAAE,YAAY,EAAE,eAAe,EAAE,GAChE,uCAAkB,CAAC,mBAAmB,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;QAE7D,MAAM,iBAAiB,GACrB,QAAQ,KAAK,iBAAS,CAAC,MAAM;YAC3B,CAAC,CAAC,uCAAkB,CAAC,aAAa,CAAC,eAAe,EAAE,QAAQ,CAAC;YAC7D,CAAC,CAAC,eAAe,CAAC;QAEtB,MAAM,MAAM,GAAG,OAAO,CAAC,wBAAwB,CAC7C,iBAAwB,EACxB,KAAK,EACL,SAAS,EACT,WAAW,EACX,eAAe,IAAI,YAAY,EAC/B,KAAK,EACL,IAAI,CACL,CAAC;QAEF,SAAgB,eAAe;;;;oBAC7B,KAA0B,eAAA,WAAA,cAAA,MAAM,CAAA,YAAA,qFAAE,CAAC;wBAAT,sBAAM;wBAAN,WAAM;wBAArB,MAAM,KAAK,KAAA,CAAA;wBACpB,oBAAM,QAAQ,KAAK,iBAAS,CAAC,MAAM;4BACjC,CAAC,CAAE,KAAwB;4BAC3B,CAAC,CAAE,yCAAmB,CAAC,aAAa,CAChC,KAAK,EACL,QAAQ,CACU,CAAA,CAAC;oBAC3B,CAAC;;;;;;;;;YACH,CAAC;SAAA;QAED,OAAO,eAAe,EAAE,CAAC;IAC3B,CAAC;CAAA"}
|
|
@@ -1,4 +1,8 @@
|
|
|
1
1
|
import { BedrockAnthropicMessage, Messages, OpenAIMessages, Providers } from "../types";
|
|
2
2
|
export declare class InputFormatAdapter {
|
|
3
3
|
static adaptMessages(messages: Messages, provider: Providers): OpenAIMessages | BedrockAnthropicMessage[];
|
|
4
|
+
static extractSystemPrompt(messages: any, provider: Providers): {
|
|
5
|
+
messages: Messages;
|
|
6
|
+
systemPrompt?: string;
|
|
7
|
+
};
|
|
4
8
|
}
|
|
@@ -20,7 +20,10 @@ class InputFormatAdapter {
|
|
|
20
20
|
};
|
|
21
21
|
});
|
|
22
22
|
case types_1.Providers.ANTHROPIC_BEDROCK:
|
|
23
|
-
|
|
23
|
+
// Map messages for Anthropic, ignoring 'system' role in messages
|
|
24
|
+
return messages
|
|
25
|
+
.filter((msg) => msg.role !== "system")
|
|
26
|
+
.map((msg) => ({
|
|
24
27
|
role: msg.role === "user" ? "user" : "assistant",
|
|
25
28
|
content: [
|
|
26
29
|
{
|
|
@@ -33,6 +36,18 @@ class InputFormatAdapter {
|
|
|
33
36
|
throw new Error(`Unsupported provider: ${provider}`);
|
|
34
37
|
}
|
|
35
38
|
}
|
|
39
|
+
static extractSystemPrompt(messages, provider) {
|
|
40
|
+
if (provider === types_1.Providers.ANTHROPIC_BEDROCK) {
|
|
41
|
+
const [systemMessage, ...restMessages] = messages;
|
|
42
|
+
if ((systemMessage === null || systemMessage === void 0 ? void 0 : systemMessage.role) === "system") {
|
|
43
|
+
return {
|
|
44
|
+
messages: restMessages,
|
|
45
|
+
systemPrompt: systemMessage.content,
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
return { messages, systemPrompt: undefined };
|
|
50
|
+
}
|
|
36
51
|
}
|
|
37
52
|
exports.InputFormatAdapter = InputFormatAdapter;
|
|
38
53
|
//# sourceMappingURL=InputFormatAdapter.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"InputFormatAdapter.js","sourceRoot":"","sources":["../../src/middleware/InputFormatAdapter.ts"],"names":[],"mappings":";;;AAAA,oCAOkB;AAElB,MAAa,kBAAkB;IAC7B,MAAM,CAAC,aAAa,CAClB,QAAkB,EAClB,QAAmB;QAEnB,QAAQ,QAAQ,EAAE,CAAC;YACjB,KAAK,iBAAS,CAAC,MAAM;gBACnB,OAAO,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE;oBAC1B,IAAI,GAAG,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;wBAC5B,OAAO;4BACL,IAAI,EAAE,GAAG,CAAC,IAAI;4BACd,OAAO,EAAE,GAAG,CAAC,OAAO;4BACpB,IAAI,EAAG,GAA6B,CAAC,IAAI;yBAC1C,CAAC;oBACJ,CAAC;oBACD,OAAO;wBACL,IAAI,EAAE,GAAG,CAAC,IAAI;wBACd,OAAO,EAAE,GAAG,CAAC,OAAiB;qBAC/B,CAAC;gBACJ,CAAC,CAAmB,CAAC;YAEvB,KAAK,iBAAS,CAAC,iBAAiB;gBAC9B,OAAO,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,CAAC;
|
|
1
|
+
{"version":3,"file":"InputFormatAdapter.js","sourceRoot":"","sources":["../../src/middleware/InputFormatAdapter.ts"],"names":[],"mappings":";;;AAAA,oCAOkB;AAElB,MAAa,kBAAkB;IAC7B,MAAM,CAAC,aAAa,CAClB,QAAkB,EAClB,QAAmB;QAEnB,QAAQ,QAAQ,EAAE,CAAC;YACjB,KAAK,iBAAS,CAAC,MAAM;gBACnB,OAAO,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE;oBAC1B,IAAI,GAAG,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;wBAC5B,OAAO;4BACL,IAAI,EAAE,GAAG,CAAC,IAAI;4BACd,OAAO,EAAE,GAAG,CAAC,OAAO;4BACpB,IAAI,EAAG,GAA6B,CAAC,IAAI;yBAC1C,CAAC;oBACJ,CAAC;oBACD,OAAO;wBACL,IAAI,EAAE,GAAG,CAAC,IAAI;wBACd,OAAO,EAAE,GAAG,CAAC,OAAiB;qBAC/B,CAAC;gBACJ,CAAC,CAAmB,CAAC;YAEvB,KAAK,iBAAS,CAAC,iBAAiB;gBAC9B,iEAAiE;gBACjE,OAAO,QAAQ;qBACZ,MAAM,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,IAAI,KAAK,QAAQ,CAAC;qBACtC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,CAAC;oBACb,IAAI,EAAE,GAAG,CAAC,IAAI,KAAK,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,WAAW;oBAChD,OAAO,EAAE;wBACP;4BACE,IAAI,EAAE,mCAA2B,CAAC,IAAI;4BACtC,IAAI,EAAE,GAAG,CAAC,OAAiB;yBAC5B;qBACF;iBACF,CAAC,CAA8B,CAAC;YAErC;gBACE,MAAM,IAAI,KAAK,CAAC,yBAAyB,QAAQ,EAAE,CAAC,CAAC;QACzD,CAAC;IACH,CAAC;IAED,MAAM,CAAC,mBAAmB,CACxB,QAAa,EACb,QAAmB;QAEnB,IAAI,QAAQ,KAAK,iBAAS,CAAC,iBAAiB,EAAE,CAAC;YAC7C,MAAM,CAAC,aAAa,EAAE,GAAG,YAAY,CAAC,GAAG,QAAQ,CAAC;YAClD,IAAI,CAAA,aAAa,aAAb,aAAa,uBAAb,aAAa,CAAE,IAAI,MAAK,QAAQ,EAAE,CAAC;gBACrC,OAAO;oBACL,QAAQ,EAAE,YAAY;oBACtB,YAAY,EAAE,aAAa,CAAC,OAAiB;iBAC9C,CAAC;YACJ,CAAC;QACH,CAAC;QACD,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,SAAS,EAAE,CAAC;IAC/C,CAAC;CACF;AAvDD,gDAuDC"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "llm-proxy",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.3.0",
|
|
4
4
|
"description": "An LLM Proxy that allows the user to interact with different language models from different providers using unified request and response formats.",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -30,7 +30,7 @@
|
|
|
30
30
|
"aws-sdk": "^2.1691.0",
|
|
31
31
|
"axios": "^1.7.7",
|
|
32
32
|
"dotenv": "^16.4.5",
|
|
33
|
-
"llm-proxy": "^1.
|
|
33
|
+
"llm-proxy": "^1.3.0",
|
|
34
34
|
"openai": "^4.69.0"
|
|
35
35
|
}
|
|
36
36
|
}
|