llm-proxy 1.3.13 → 1.3.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js
CHANGED
|
@@ -48,9 +48,9 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
48
48
|
exports.generateLLMStreamResponse = exports.generateLLMResponse = void 0;
|
|
49
49
|
const ProviderFinder_1 = require("./middleware/ProviderFinder");
|
|
50
50
|
const InputFormatAdapter_1 = require("./middleware/InputFormatAdapter");
|
|
51
|
-
const OutputFormatAdapter_1 = require("./middleware/OutputFormatAdapter");
|
|
52
51
|
const types_1 = require("./types");
|
|
53
52
|
const OpenAIService_1 = __importDefault(require("./services/OpenAIService"));
|
|
53
|
+
const OutputFormatAdapter_1 = require("./middleware/OutputFormatAdapter");
|
|
54
54
|
const AwsBedrockAnthropicService_1 = __importDefault(require("./services/AwsBedrockAnthropicService"));
|
|
55
55
|
// Main function for non-streaming requests
|
|
56
56
|
function generateLLMResponse(params) {
|
|
@@ -82,7 +82,7 @@ function generateLLMResponse(params) {
|
|
|
82
82
|
const response = yield service.generateCompletion({
|
|
83
83
|
messages: adaptedMessages,
|
|
84
84
|
model,
|
|
85
|
-
max_tokens
|
|
85
|
+
max_tokens,
|
|
86
86
|
temperature: temperature || 0,
|
|
87
87
|
functions,
|
|
88
88
|
systemPrompt,
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,gEAA6D;AAC7D,wEAAqE;AACrE,
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,gEAA6D;AAC7D,wEAAqE;AACrE,mCAA8D;AAC9D,6EAAqD;AACrD,0EAAuE;AACvE,uGAA+E;AAkB/E,2CAA2C;AAC3C,SAAsB,mBAAmB,CACvC,MAAiC;;QAEjC,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,SAAS,EAAE,UAAU,EAAE,WAAW,EAAE,WAAW,EAAE,GACxE,MAAM,CAAC;QAET,mDAAmD;QACnD,MAAM,QAAQ,GAAG,+BAAc,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;QAEnD,uDAAuD;QACvD,IAAI,OAAmD,CAAC;QACxD,IAAI,QAAQ,KAAK,iBAAS,CAAC,MAAM,EAAE;YACjC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE;gBACvB,OAAO,OAAO,CAAC,MAAM,CAAC,+CAA+C,CAAC,CAAC;aACxE;YACD,OAAO,GAAG,IAAI,uBAAa,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;SACjD;aAAM,IAAI,QAAQ,KAAK,iBAAS,CAAC,iBAAiB,EAAE;YACnD,MAAM,SAAS,GAAG,WAAW,CAAC,SAAS,CAAC;YACxC,IAAI,CAAC,SAAS,EAAE;gBACd,OAAO,OAAO,CAAC,MAAM,CAAC,kDAAkD,CAAC,CAAC;aAC3E;YACD,OAAO,GAAG,IAAI,oCAA0B,CACtC,SAAS,CAAC,WAAW,EACrB,SAAS,CAAC,eAAe,EACzB,SAAS,CAAC,MAAM,CACjB,CAAC;SACH;aAAM;YACL,OAAO,OAAO,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;SAC/C;QAED,uDAAuD;QACvD,MAAM,EAAE,eAAe,EAAE,YAAY,EAAE,GAAG,uCAAkB,CAAC,aAAa,CACxE,QAAQ,EACR,QAAQ,CACT,CAAC;QAEF,kCAAkC;QAClC,MAAM,QAAQ,GAAG,MAAM,OAAO,CAAC,kBAAkB,CAAC;YAChD,QAAQ,EAAE,eAAsB;YAChC,KAAK;YACL,UAAU;YACV,WAAW,EAAE,WAAW,IAAI,CAAC;YAC7B,SAAS;YACT,YAAY;SACb,CAAC,CAAC;QAEH,uCAAuC;QACvC,OAAO,QAAQ,KAAK,iBAAS,CAAC,MAAM;YAClC,CAAC,CAAE,QAA2B;YAC9B,CAAC,CAAE,yCAAmB,CAAC,aAAa,CAAC,QAAQ,EAAE,QAAQ,CAAoB,CAAC;IAChF,CAAC;CAAA;AAlDD,kDAkDC;AAED,uCAAuC;AACvC,SAAsB,yBAAyB,CAC7C,MAAiC;;QAEjC,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,SAAS,EAAE,UAAU,EAAE,WAAW,EAAE,WAAW,EAAE,GACxE,MAAM,CAAC;QAET,mDAAmD;QACnD,MAAM,QAAQ,GAAG,+BAAc,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;QAEnD,uDAAuD;QACvD,IAAI,OAAmD,CAAC;QACxD,IAAI,QAAQ,KAAK,iBAAS,CAAC,MAAM,EAAE;YACjC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE;gBACvB,OAAO,OAAO,CAAC,MAAM,CAAC,+CAA+C,CAAC,CAAC;aACxE;YACD,OAAO,GAAG,IAAI,uBAAa,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;SACjD;aAAM,IAAI,QAAQ,KAAK,iBAAS,CAAC,iBAAiB,EAAE;YACnD,MAAM,SAAS,GAAG,WAAW,CAAC,SAAS,CAAC;YACxC,IAAI,CAAC,SAAS,EAAE;gBACd,OAAO,OAAO,CAAC,MAAM,CAAC,kDAAkD,CAAC,CAAC;aAC3E;YACD,OAAO,GAAG,IAAI,oCAA0B,CACtC,SAAS,CAAC,WAAW,EACrB,SAAS,CAAC,eAAe,EACzB,SAAS,CAAC,MAAM,CACjB,CAAC;SACH;aAAM;YACL,OAAO,OAAO,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC;SAC/C;QAED,uDAAuD;QACvD,MAAM,EAAE,eAAe,EAAE,YAAY,EAAE,GAAG,uCAAkB,CAAC,aAAa,CACxE,QAAQ,EACR,QAAQ,CACT,CAAC;QAEF,4CAA4C;QAC5C,MAAM,MAAM,GAAG,OAAO,CAAC,wBAAwB,CAAC;YAC9C,QAAQ,EAAE,eAAsB;YAChC,KAAK;YACL,UAAU;YACV,WAAW,EAAE,WAAW,IAAI,CAAC;YAC7B,SAAS;YACT,YAAY;SACb,CAAC,CAAC;QAEH,gDAAgD;QAChD,SAAgB,eAAe;;;;oBAC7B,KAA0B,eAAA,WAAA,cAAA,MAAM,CAAA,YAAA;wBAAN,sBAAM;wBAAN,WAAM;;4BAArB,MAAM,KAAK,KAAA,CAAA;4BACpB,oBAAM,QAAQ,KAAK,iBAAS,CAAC,MAAM;gCACjC,CAAC,CAAE,KAAwB;gCAC3B,CAAC,CAAE,yCAAmB,CAAC,aAAa,CAChC,KAAK,EACL,QAAQ,CACU,CAAA,CAAC;;;;;qBAC1B;;;;;;;;;YACH,CAAC;SAAA;QAED,OAAO,eAAe,EAAE,CAAC;IAC3B,CAAC;CAAA;AA3DD,8DA2DC;AAED,0CAAwB"}
|
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
import { LLMResponse, Providers } from "../types";
|
|
2
2
|
export declare class OutputFormatAdapter {
|
|
3
|
+
private static cachedModel;
|
|
3
4
|
static adaptResponse(response: any, provider: Providers): LLMResponse;
|
|
4
5
|
private static adaptCompleteResponse;
|
|
5
6
|
private static adaptStreamingResponse;
|
|
7
|
+
private static getModel;
|
|
6
8
|
private static mapRole;
|
|
7
9
|
private static extractContent;
|
|
8
10
|
}
|
|
@@ -12,7 +12,6 @@ class OutputFormatAdapter {
|
|
|
12
12
|
case types_1.Providers.OPENAI:
|
|
13
13
|
return response;
|
|
14
14
|
case types_1.Providers.ANTHROPIC_BEDROCK:
|
|
15
|
-
// Check if it's a streaming chunk or complete response
|
|
16
15
|
if (response.type === "message" && !response.delta) {
|
|
17
16
|
return this.adaptCompleteResponse(response);
|
|
18
17
|
}
|
|
@@ -28,11 +27,13 @@ class OutputFormatAdapter {
|
|
|
28
27
|
}
|
|
29
28
|
}
|
|
30
29
|
static adaptCompleteResponse(response) {
|
|
30
|
+
var _a, _b, _c, _d;
|
|
31
|
+
const model = this.getModel(response);
|
|
31
32
|
return {
|
|
32
33
|
id: response.id,
|
|
33
34
|
object: "text_completion",
|
|
34
35
|
created: Date.now(),
|
|
35
|
-
model
|
|
36
|
+
model,
|
|
36
37
|
choices: response.content.map((contentBlock, index) => ({
|
|
37
38
|
index,
|
|
38
39
|
message: {
|
|
@@ -43,74 +44,66 @@ class OutputFormatAdapter {
|
|
|
43
44
|
finish_reason: response.stop_reason || null,
|
|
44
45
|
})),
|
|
45
46
|
usage: {
|
|
46
|
-
prompt_tokens: response.usage.input_tokens,
|
|
47
|
-
completion_tokens: response.usage.output_tokens,
|
|
48
|
-
total_tokens: response.usage.input_tokens +
|
|
47
|
+
prompt_tokens: ((_a = response.usage) === null || _a === void 0 ? void 0 : _a.input_tokens) || 0,
|
|
48
|
+
completion_tokens: ((_b = response.usage) === null || _b === void 0 ? void 0 : _b.output_tokens) || 0,
|
|
49
|
+
total_tokens: (((_c = response.usage) === null || _c === void 0 ? void 0 : _c.input_tokens) || 0) +
|
|
50
|
+
(((_d = response.usage) === null || _d === void 0 ? void 0 : _d.output_tokens) || 0),
|
|
49
51
|
prompt_tokens_details: { cached_tokens: 0 },
|
|
50
52
|
completion_tokens_details: { reasoning_tokens: 0 },
|
|
51
53
|
},
|
|
52
|
-
system_fingerprint: "
|
|
54
|
+
system_fingerprint: response.system_fingerprint || "default_fingerprint",
|
|
53
55
|
};
|
|
54
56
|
}
|
|
55
57
|
static adaptStreamingResponse(chunk) {
|
|
56
|
-
var _a, _b;
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
object: "text_completion",
|
|
63
|
-
created: Date.now(),
|
|
64
|
-
model: "anthropic.claude-3-haiku",
|
|
65
|
-
choices: [
|
|
66
|
-
{
|
|
67
|
-
index: 0,
|
|
68
|
-
delta: {
|
|
69
|
-
role: "assistant",
|
|
70
|
-
content: "",
|
|
71
|
-
},
|
|
72
|
-
logprobs: null,
|
|
73
|
-
finish_reason: "stop",
|
|
74
|
-
},
|
|
75
|
-
],
|
|
76
|
-
usage: {
|
|
77
|
-
prompt_tokens: (metrics === null || metrics === void 0 ? void 0 : metrics.inputTokenCount) || 0,
|
|
78
|
-
completion_tokens: (metrics === null || metrics === void 0 ? void 0 : metrics.outputTokenCount) || 0,
|
|
79
|
-
total_tokens: ((metrics === null || metrics === void 0 ? void 0 : metrics.inputTokenCount) || 0) + ((metrics === null || metrics === void 0 ? void 0 : metrics.outputTokenCount) || 0),
|
|
80
|
-
prompt_tokens_details: { cached_tokens: 0 },
|
|
81
|
-
completion_tokens_details: { reasoning_tokens: 0 },
|
|
82
|
-
},
|
|
83
|
-
system_fingerprint: "anthropic_translation",
|
|
84
|
-
};
|
|
58
|
+
var _a, _b, _c;
|
|
59
|
+
const metrics = chunk["amazon-bedrock-invocationMetrics"];
|
|
60
|
+
const isStop = chunk.type === "content_block_stop" || chunk.type === "message_stop";
|
|
61
|
+
// Cache model on the first message_start chunk
|
|
62
|
+
if (chunk.type === "message_start" && ((_a = chunk.message) === null || _a === void 0 ? void 0 : _a.model)) {
|
|
63
|
+
this.cachedModel = chunk.message.model;
|
|
85
64
|
}
|
|
86
|
-
//
|
|
87
|
-
const content = ((
|
|
65
|
+
// Extract content properly
|
|
66
|
+
const content = ((_b = chunk.content_block) === null || _b === void 0 ? void 0 : _b.text) || ((_c = chunk.delta) === null || _c === void 0 ? void 0 : _c.text) || "";
|
|
67
|
+
// Generate the adapted chunk
|
|
88
68
|
return {
|
|
89
69
|
id: `stream-${Date.now()}`,
|
|
90
|
-
object: "
|
|
70
|
+
object: "chat.completion.chunk",
|
|
91
71
|
created: Date.now(),
|
|
92
|
-
model: "
|
|
72
|
+
model: this.cachedModel || "unknown-model",
|
|
93
73
|
choices: [
|
|
94
74
|
{
|
|
95
75
|
index: 0,
|
|
96
|
-
delta:
|
|
97
|
-
|
|
98
|
-
content,
|
|
99
|
-
},
|
|
76
|
+
delta: isStop
|
|
77
|
+
? {} // Send an empty delta for stop messages
|
|
78
|
+
: { content },
|
|
100
79
|
logprobs: null,
|
|
101
|
-
finish_reason: "null"
|
|
80
|
+
finish_reason: isStop ? "stop" : null, // Properly use `null` or `"stop"`
|
|
102
81
|
},
|
|
103
82
|
],
|
|
104
|
-
usage:
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
83
|
+
usage: isStop
|
|
84
|
+
? {
|
|
85
|
+
prompt_tokens: (metrics === null || metrics === void 0 ? void 0 : metrics.inputTokenCount) || 0,
|
|
86
|
+
completion_tokens: (metrics === null || metrics === void 0 ? void 0 : metrics.outputTokenCount) || 0,
|
|
87
|
+
total_tokens: ((metrics === null || metrics === void 0 ? void 0 : metrics.inputTokenCount) || 0) +
|
|
88
|
+
((metrics === null || metrics === void 0 ? void 0 : metrics.outputTokenCount) || 0),
|
|
89
|
+
prompt_tokens_details: { cached_tokens: 0 },
|
|
90
|
+
completion_tokens_details: { reasoning_tokens: 0 },
|
|
91
|
+
}
|
|
92
|
+
: null,
|
|
111
93
|
system_fingerprint: "anthropic_translation",
|
|
112
94
|
};
|
|
113
95
|
}
|
|
96
|
+
static getModel(response) {
|
|
97
|
+
var _a;
|
|
98
|
+
// Try to retrieve the model from the response
|
|
99
|
+
if ((_a = response === null || response === void 0 ? void 0 : response.message) === null || _a === void 0 ? void 0 : _a.model) {
|
|
100
|
+
return response.message.model;
|
|
101
|
+
}
|
|
102
|
+
if (response === null || response === void 0 ? void 0 : response.model) {
|
|
103
|
+
return response.model;
|
|
104
|
+
}
|
|
105
|
+
return "unknown-model";
|
|
106
|
+
}
|
|
114
107
|
static mapRole(content) {
|
|
115
108
|
if (!content || !content.type) {
|
|
116
109
|
throw new Error("Invalid content block structure");
|
|
@@ -122,7 +115,7 @@ class OutputFormatAdapter {
|
|
|
122
115
|
case types_1.BedrockAnthropicContentType.TEXT:
|
|
123
116
|
return "assistant";
|
|
124
117
|
default:
|
|
125
|
-
return "assistant";
|
|
118
|
+
return "assistant";
|
|
126
119
|
}
|
|
127
120
|
}
|
|
128
121
|
static extractContent(content) {
|
|
@@ -131,8 +124,7 @@ class OutputFormatAdapter {
|
|
|
131
124
|
}
|
|
132
125
|
switch (content.type) {
|
|
133
126
|
case types_1.BedrockAnthropicContentType.TEXT:
|
|
134
|
-
|
|
135
|
-
return textContent.text || "";
|
|
127
|
+
return content.text || "";
|
|
136
128
|
case types_1.BedrockAnthropicContentType.TOOL_RESULT:
|
|
137
129
|
return content.content || "";
|
|
138
130
|
case types_1.BedrockAnthropicContentType.TOOL_USE:
|
|
@@ -143,4 +135,5 @@ class OutputFormatAdapter {
|
|
|
143
135
|
}
|
|
144
136
|
}
|
|
145
137
|
exports.OutputFormatAdapter = OutputFormatAdapter;
|
|
138
|
+
OutputFormatAdapter.cachedModel = null; // Cache the model for streaming responses
|
|
146
139
|
//# sourceMappingURL=OutputFormatAdapter.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"OutputFormatAdapter.js","sourceRoot":"","sources":["../../src/middleware/OutputFormatAdapter.ts"],"names":[],"mappings":";;;AAAA,
|
|
1
|
+
{"version":3,"file":"OutputFormatAdapter.js","sourceRoot":"","sources":["../../src/middleware/OutputFormatAdapter.ts"],"names":[],"mappings":";;;AAAA,oCAQkB;AAElB,MAAa,mBAAmB;IAG9B,MAAM,CAAC,aAAa,CAAC,QAAa,EAAE,QAAmB;QACrD,IAAI,CAAC,QAAQ,EAAE;YACb,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;SACzD;QAED,IAAI;YACF,QAAQ,QAAQ,EAAE;gBAChB,KAAK,iBAAS,CAAC,MAAM;oBACnB,OAAO,QAAuB,CAAC;gBACjC,KAAK,iBAAS,CAAC,iBAAiB;oBAC9B,IAAI,QAAQ,CAAC,IAAI,KAAK,SAAS,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE;wBAClD,OAAO,IAAI,CAAC,qBAAqB,CAAC,QAAQ,CAAC,CAAC;qBAC7C;yBAAM;wBACL,OAAO,IAAI,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC;qBAC9C;gBACH;oBACE,MAAM,IAAI,KAAK,CAAC,yBAAyB,QAAQ,EAAE,CAAC,CAAC;aACxD;SACF;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,6BAA8B,KAAe,CAAC,OAAO,EAAE,CAAC,CAAC;SAC1E;IACH,CAAC;IAEO,MAAM,CAAC,qBAAqB,CAAC,QAAa;;QAChD,MAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;QACtC,OAAO;YACL,EAAE,EAAE,QAAQ,CAAC,EAAE;YACf,MAAM,EAAE,iBAAiB;YACzB,OAAO,EAAE,IAAI,CAAC,GAAG,EAAE;YACnB,KAAK;YACL,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,YAAqC,EAAE,KAAU,EAAE,EAAE,CAAC,CAAC;gBACpF,KAAK;gBACL,OAAO,EAAE;oBACP,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,YAAY,CAAC;oBAChC,OAAO,EAAE,IAAI,CAAC,cAAc,CAAC,YAAY,CAAC;iBAC3C;gBACD,QAAQ,EAAE,IAAI;gBACd,aAAa,EAAE,QAAQ,CAAC,WAAW,IAAI,IAAI;aAC5C,CAAC,CAAC;YACH,KAAK,EAAE;gBACL,aAAa,EAAE,CAAA,MAAA,QAAQ,CAAC,KAAK,0CAAE,YAAY,KAAI,CAAC;gBAChD,iBAAiB,EAAE,CAAA,MAAA,QAAQ,CAAC,KAAK,0CAAE,aAAa,KAAI,CAAC;gBACrD,YAAY,EACV,CAAC,CAAA,MAAA,QAAQ,CAAC,KAAK,0CAAE,YAAY,KAAI,CAAC,CAAC;oBACnC,CAAC,CAAA,MAAA,QAAQ,CAAC,KAAK,0CAAE,aAAa,KAAI,CAAC,CAAC;gBACtC,qBAAqB,EAAE,EAAE,aAAa,EAAE,CAAC,EAAE;gBAC3C,yBAAyB,EAAE,EAAE,gBAAgB,EAAE,CAAC,EAAE;aACnD;YACD,kBAAkB,EAAE,QAAQ,CAAC,kBAAkB,IAAI,qBAAqB;SACzE,CAAC;IACJ,CAAC;IAEO,MAAM,CAAC,sBAAsB,CAAC,KAAU;;QAC9C,MAAM,OAAO,GAAG,KAAK,CAAC,kCAAkC,CAAC,CAAC;QAC1D,MAAM,MAAM,GACV,KAAK,CAAC,IAAI,KAAK,oBAAoB,IAAI,KAAK,CAAC,IAAI,KAAK,cAAc,CAAC;QAEvE,+CAA+C;QAC/C,IAAI,KAAK,CAAC,IAAI,KAAK,eAAe,KAAI,MAAA,KAAK,CAAC,OAAO,0CAAE,KAAK,CAAA,EAAE;YAC1D,IAAI,CAAC,WAAW,GAAG,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;SACxC;QAED,2BAA2B;QAC3B,MAAM,OAAO,GAAG,CAAA,MAAA,KAAK,CAAC,aAAa,0CAAE,IAAI,MAAI,MAAA,KAAK,CAAC,KAAK,0CAAE,IAAI,CAAA,IAAI,EAAE,CAAC;QAErE,6BAA6B;QAC7B,OAAO;YACL,EAAE,EAAE,UAAU,IAAI,CAAC,GAAG,EAAE,EAAE;YAC1B,MAAM,EAAE,uBAAuB;YAC/B,OAAO,EAAE,IAAI,CAAC,GAAG,EAAE;YACnB,KAAK,EAAE,IAAI,CAAC,WAAW,IAAI,eAAe;YAC1C,OAAO,EAAE;gBACP;oBACE,KAAK,EAAE,CAAC;oBACR,KAAK,EAAE,MAAM;wBACX,CAAC,CAAC,EAAE,CAAC,wCAAwC;wBAC7C,CAAC,CAAC,EAAE,OAAO,EAAE;oBACf,QAAQ,EAAE,IAAI;oBACd,aAAa,EAAE,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,EAAE,kCAAkC;iBAC1E;aACF;YACD,KAAK,EAAE,MAAM;gBACX,CAAC,CAAC;oBACE,aAAa,EAAE,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,eAAe,KAAI,CAAC;oBAC5C,iBAAiB,EAAE,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,KAAI,CAAC;oBACjD,YAAY,EACV,CAAC,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,eAAe,KAAI,CAAC,CAAC;wBAC/B,CAAC,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,KAAI,CAAC,CAAC;oBAClC,qBAAqB,EAAE,EAAE,aAAa,EAAE,CAAC,EAAE;oBAC3C,yBAAyB,EAAE,EAAE,gBAAgB,EAAE,CAAC,EAAE;iBACnD;gBACH,CAAC,CAAC,IAAI;YACR,kBAAkB,EAAE,uBAAuB;SAC5C,CAAC;IACJ,CAAC;IAEO,MAAM,CAAC,QAAQ,CAAC,QAAa;;QACnC,8CAA8C;QAC9C,IAAI,MAAA,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,OAAO,0CAAE,KAAK,EAAE;YAC5B,OAAO,QAAQ,CAAC,OAAO,CAAC,KAAK,CAAC;SAC/B;QACD,IAAI,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,KAAK,EAAE;YACnB,OAAO,QAAQ,CAAC,KAAK,CAAC;SACvB;QACD,OAAO,eAAe,CAAC;IACzB,CAAC;IAEO,MAAM,CAAC,OAAO,CAAC,OAAgC;QACrD,IAAI,CAAC,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE;YAC7B,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;SACpD;QAED,QAAQ,OAAO,CAAC,IAAI,EAAE;YACpB,KAAK,mCAA2B,CAAC,QAAQ,CAAC;YAC1C,KAAK,mCAA2B,CAAC,WAAW;gBAC1C,OAAO,MAAM,CAAC;YAChB,KAAK,mCAA2B,CAAC,IAAI;gBACnC,OAAO,WAAW,CAAC;YACrB;gBACE,OAAO,WAAW,CAAC;SACtB;IACH,CAAC;IAEO,MAAM,CAAC,cAAc,CAAC,OAAgC;QAC5D,IAAI,CAAC,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE;YAC7B,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;SACpD;QAED,QAAQ,OAAO,CAAC,IAAI,EAAE;YACpB,KAAK,mCAA2B,CAAC,IAAI;gBACnC,OAAQ,OAAuC,CAAC,IAAI,IAAI,EAAE,CAAC;YAC7D,KAAK,mCAA2B,CAAC,WAAW;gBAC1C,OAAQ,OAA6C,CAAC,OAAO,IAAI,EAAE,CAAC;YACtE,KAAK,mCAA2B,CAAC,QAAQ;gBACvC,OAAQ,OAA0C,CAAC,EAAE,IAAI,EAAE,CAAC;YAC9D;gBACE,OAAO,EAAE,CAAC;SACb;IACH,CAAC;;AA7IH,kDA8IC;AA7IgB,+BAAW,GAAkB,IAAI,CAAC,CAAC,0CAA0C"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "llm-proxy",
|
|
3
|
-
"version": "1.3.
|
|
3
|
+
"version": "1.3.14",
|
|
4
4
|
"description": "An LLM Proxy that allows the user to interact with different language models from different providers using unified request and response formats.",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -30,7 +30,7 @@
|
|
|
30
30
|
"aws-sdk": "^2.1691.0",
|
|
31
31
|
"axios": "^1.7.7",
|
|
32
32
|
"dotenv": "^16.4.5",
|
|
33
|
-
"llm-proxy": "^1.3.
|
|
33
|
+
"llm-proxy": "^1.3.14",
|
|
34
34
|
"openai": "^4.69.0"
|
|
35
35
|
}
|
|
36
36
|
}
|