llm-proxy 1.3.7 → 1.3.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +1 -0
- package/dist/index.js +4 -4
- package/dist/index.js.map +1 -1
- package/dist/services/ClientService.d.ts +4 -2
- package/dist/services/ClientService.js +6 -0
- package/dist/services/ClientService.js.map +1 -1
- package/dist/services/OpenAIService.d.ts +2 -2
- package/dist/services/OpenAIService.js +4 -2
- package/dist/services/OpenAIService.js.map +1 -1
- package/package.json +2 -2
package/dist/index.d.ts
CHANGED
package/dist/index.js
CHANGED
|
@@ -54,7 +54,7 @@ const types_1 = require("./types");
|
|
|
54
54
|
// Main function for non-streaming requests
|
|
55
55
|
function generateLLMResponse(params) {
|
|
56
56
|
return __awaiter(this, void 0, void 0, function* () {
|
|
57
|
-
const { messages, model, max_tokens, temperature, credentials } = params;
|
|
57
|
+
const { messages, model, functions, max_tokens, temperature, credentials } = params;
|
|
58
58
|
// Step 1: Identify the provider based on the model
|
|
59
59
|
const provider = ProviderFinder_1.ProviderFinder.getProvider(model);
|
|
60
60
|
// Initialize the correct service based on the provider
|
|
@@ -79,7 +79,7 @@ function generateLLMResponse(params) {
|
|
|
79
79
|
const { adaptedMessages, systemPrompt } = InputFormatAdapter_1.InputFormatAdapter.adaptMessages(messages, provider);
|
|
80
80
|
// Step 3: Generate the completion
|
|
81
81
|
const response = yield service.generateCompletion(adaptedMessages, // TODO: fix this any
|
|
82
|
-
model, max_tokens, temperature, systemPrompt);
|
|
82
|
+
model, max_tokens, temperature, functions, systemPrompt);
|
|
83
83
|
// Step 4: Adapt the response if needed
|
|
84
84
|
return provider === types_1.Providers.OPENAI
|
|
85
85
|
? response
|
|
@@ -89,7 +89,7 @@ function generateLLMResponse(params) {
|
|
|
89
89
|
// Main function for streaming requests
|
|
90
90
|
function generateLLMStreamResponse(params) {
|
|
91
91
|
return __awaiter(this, void 0, void 0, function* () {
|
|
92
|
-
const { messages, model, max_tokens, temperature, credentials } = params;
|
|
92
|
+
const { messages, model, functions, max_tokens, temperature, credentials } = params;
|
|
93
93
|
// Step 1: Identify the provider based on the model
|
|
94
94
|
const provider = ProviderFinder_1.ProviderFinder.getProvider(model);
|
|
95
95
|
// Initialize the correct service based on the provider
|
|
@@ -114,7 +114,7 @@ function generateLLMStreamResponse(params) {
|
|
|
114
114
|
const { adaptedMessages, systemPrompt } = InputFormatAdapter_1.InputFormatAdapter.adaptMessages(messages, provider);
|
|
115
115
|
// Step 3: Generate the streaming completion
|
|
116
116
|
const stream = service.generateStreamCompletion(adaptedMessages, // TODO: Fix this any
|
|
117
|
-
model, max_tokens, temperature, systemPrompt);
|
|
117
|
+
model, max_tokens, temperature, functions, systemPrompt);
|
|
118
118
|
// Step 4: Create and return the async generator
|
|
119
119
|
function streamGenerator() {
|
|
120
120
|
return __asyncGenerator(this, arguments, function* streamGenerator_1() {
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAwBA,kDAkDC;AAGD,8DA2DC;AAxID,gEAA6D;AAC7D,wEAAqE;AACrE,0EAAuE;AACvE,sFAAmF;AACnF,4DAAyD;AACzD,mCAA8D;AAkB9D,2CAA2C;AAC3C,SAAsB,mBAAmB,CACvC,MAAiC;;QAEjC,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,SAAS,EAAE,UAAU,EAAE,WAAW,EAAE,WAAW,EAAE,GACxE,MAAM,CAAC;QAET,mDAAmD;QACnD,MAAM,QAAQ,GAAG,+BAAc,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;QAEnD,uDAAuD;QACvD,IAAI,OAAmD,CAAC;QACxD,IAAI,QAAQ,KAAK,iBAAS,CAAC,MAAM,EAAE,CAAC;YAClC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,CAAC;gBACxB,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;YACnE,CAAC;YACD,OAAO,GAAG,IAAI,6BAAa,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;QAClD,CAAC;aAAM,IAAI,QAAQ,KAAK,iBAAS,CAAC,iBAAiB,EAAE,CAAC;YACpD,MAAM,SAAS,GAAG,WAAW,CAAC,SAAS,CAAC;YACxC,IAAI,CAAC,SAAS,EAAE,CAAC;gBACf,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAC;YACtE,CAAC;YACD,OAAO,GAAG,IAAI,uDAA0B,CACtC,SAAS,CAAC,WAAW,EACrB,SAAS,CAAC,eAAe,EACzB,SAAS,CAAC,MAAM,CACjB,CAAC;QACJ,CAAC;aAAM,CAAC;YACN,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;QAC1C,CAAC;QAED,uDAAuD;QACvD,MAAM,EAAE,eAAe,EAAE,YAAY,EAAE,GAAG,uCAAkB,CAAC,aAAa,CACxE,QAAQ,EACR,QAAQ,CACT,CAAC;QAEF,kCAAkC;QAClC,MAAM,QAAQ,GAAG,MAAM,OAAO,CAAC,kBAAkB,CAC/C,eAAsB,EAAE,qBAAqB;QAC7C,KAAK,EACL,UAAU,EACV,WAAW,EACX,SAAS,EACT,YAAY,CACb,CAAC;QAEF,uCAAuC;QACvC,OAAO,QAAQ,KAAK,iBAAS,CAAC,MAAM;YAClC,CAAC,CAAE,QAA2B;YAC9B,CAAC,CAAE,yCAAmB,CAAC,aAAa,CAAC,QAAQ,EAAE,QAAQ,CAAoB,CAAC;IAChF,CAAC;CAAA;AAED,uCAAuC;AACvC,SAAsB,yBAAyB,CAC7C,MAAiC;;QAEjC,MAAM,EAAE,QAAQ,EAAE,KAAK,EAAE,SAAS,EAAE,UAAU,EAAE,WAAW,EAAE,WAAW,EAAE,GACxE,MAAM,CAAC;QAET,mDAAmD;QACnD,MAAM,QAAQ,GAAG,+BAAc,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;QAEnD,uDAAuD;QACvD,IAAI,OAAmD,CAAC;QACxD,IAAI,QAAQ,KAAK,iBAAS,CAAC,MAAM,EAAE,CAAC;YAClC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,CAAC;gBACxB,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;YACnE,CAAC;YACD,OAAO,GAAG,IAAI,6BAAa,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;QAClD,CAAC;aAAM,IAAI,QAAQ,KAAK,iBAAS,CAAC,iBAAiB,EAAE,CAAC;YACpD,MAAM,SAAS,GAAG,WAAW,CAAC,SAAS,CAAC;YACxC,IAAI,CAAC,SAAS,EAAE,CAAC;gBACf,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAC;YACtE,CAAC;YACD,OAAO,GAAG,IAAI,uDAA0B,CACtC,SAAS,CAAC,WAAW,EACrB,SAAS,CAAC,eAAe,EACzB,SAAS,CAAC,MAAM,CACjB,CAAC;QACJ,CAAC;aAAM,CAAC;YACN,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;QAC1C,CAAC;QAED,uDAAuD;QACvD,MAAM,EAAE,eAAe,EAAE,YAAY,EAAE,GAAG,uCAAkB,CAAC,aAAa,CACxE,QAAQ,EACR,QAAQ,CACT,CAAC;QAEF,4CAA4C;QAC5C,MAAM,MAAM,GAAG,OAAO,CAAC,wBAAwB,CAC7C,eAAsB,EAAE,qBAAqB;QAC7C,KAAK,EACL,UAAU,EACV,WAAW,EACX,SAAS,EACT,YAAY,CACb,CAAC;QAEF,gDAAgD;QAChD,SAAgB,eAAe;;;;oBAC7B,KAA0B,eAAA,WAAA,cAAA,MAAM,CAAA,YAAA,qFAAE,CAAC;wBAAT,sBAAM;wBAAN,WAAM;wBAArB,MAAM,KAAK,KAAA,CAAA;wBACpB,oBAAM,QAAQ,KAAK,iBAAS,CAAC,MAAM;4BACjC,CAAC,CAAE,KAAwB;4BAC3B,CAAC,CAAE,yCAAmB,CAAC,aAAa,CAChC,KAAK,EACL,QAAQ,CACU,CAAA,CAAC;oBAC3B,CAAC;;;;;;;;;YACH,CAAC;SAAA;QAED,OAAO,eAAe,EAAE,CAAC;IAC3B,CAAC;CAAA;AAED,0CAAwB"}
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
import { BedrockAnthropicParsedChunk, LLMResponse, Messages } from "../types";
|
|
2
2
|
export interface ClientService {
|
|
3
|
-
generateCompletion(messages: Messages, model?: string, max_tokens?: number, temperature?: number,
|
|
4
|
-
|
|
3
|
+
generateCompletion(messages: Messages, model?: string, max_tokens?: number, temperature?: number, functions?: any, // todo: sort out the type
|
|
4
|
+
systemPrompt?: string): Promise<LLMResponse>;
|
|
5
|
+
generateStreamCompletion(messages: Messages, model?: string, max_tokens?: number, temperature?: number, functions?: any, // todo: sort out the type it might be like this i guess(down)
|
|
6
|
+
systemPrompt?: string): AsyncGenerator<BedrockAnthropicParsedChunk, void, unknown>;
|
|
5
7
|
}
|
|
@@ -1,3 +1,9 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
// functions: [
|
|
4
|
+
// {
|
|
5
|
+
// name: "function1",
|
|
6
|
+
// description: "Description of function1",
|
|
7
|
+
// parameters: {/* JSON schema for function1 */}
|
|
8
|
+
// },
|
|
3
9
|
//# sourceMappingURL=ClientService.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ClientService.js","sourceRoot":"","sources":["../../src/services/ClientService.ts"],"names":[],"mappings":""}
|
|
1
|
+
{"version":3,"file":"ClientService.js","sourceRoot":"","sources":["../../src/services/ClientService.ts"],"names":[],"mappings":";;AAwBA,gBAAgB;AAChB,MAAM;AACN,yBAAyB;AACzB,+CAA+C;AAC/C,oDAAoD;AACpD,OAAO"}
|
|
@@ -3,6 +3,6 @@ import { ClientService } from "./ClientService";
|
|
|
3
3
|
export declare class OpenAIService implements ClientService {
|
|
4
4
|
private openai;
|
|
5
5
|
constructor(apiKey: string);
|
|
6
|
-
generateCompletion(messages: OpenAIMessages, model: string, max_tokens: number, temperature: number, systemPrompt?: string): Promise<OpenAIResponse>;
|
|
7
|
-
generateStreamCompletion(messages: OpenAIMessages, model: string, max_tokens: number, temperature: number, systemPrompt?: string): AsyncGenerator<any, void, unknown>;
|
|
6
|
+
generateCompletion(messages: OpenAIMessages, model: string, max_tokens: number, temperature: number, systemPrompt?: string, functions?: any): Promise<OpenAIResponse>;
|
|
7
|
+
generateStreamCompletion(messages: OpenAIMessages, model: string, max_tokens: number, temperature: number, systemPrompt?: string, functions?: any): AsyncGenerator<any, void, unknown>;
|
|
8
8
|
}
|
|
@@ -38,7 +38,7 @@ class OpenAIService {
|
|
|
38
38
|
constructor(apiKey) {
|
|
39
39
|
this.openai = new openai_1.default({ apiKey });
|
|
40
40
|
}
|
|
41
|
-
generateCompletion(messages, model, max_tokens, temperature, systemPrompt) {
|
|
41
|
+
generateCompletion(messages, model, max_tokens, temperature, systemPrompt, functions) {
|
|
42
42
|
return __awaiter(this, void 0, void 0, function* () {
|
|
43
43
|
if (!model) {
|
|
44
44
|
throw new Error("Model ID is required for OpenAIService.");
|
|
@@ -49,6 +49,7 @@ class OpenAIService {
|
|
|
49
49
|
messages,
|
|
50
50
|
max_tokens,
|
|
51
51
|
temperature,
|
|
52
|
+
functions,
|
|
52
53
|
});
|
|
53
54
|
return response;
|
|
54
55
|
}
|
|
@@ -58,7 +59,7 @@ class OpenAIService {
|
|
|
58
59
|
}
|
|
59
60
|
});
|
|
60
61
|
}
|
|
61
|
-
generateStreamCompletion(messages, model, max_tokens, temperature, systemPrompt) {
|
|
62
|
+
generateStreamCompletion(messages, model, max_tokens, temperature, systemPrompt, functions) {
|
|
62
63
|
return __asyncGenerator(this, arguments, function* generateStreamCompletion_1() {
|
|
63
64
|
var _a, e_1, _b, _c;
|
|
64
65
|
if (!model) {
|
|
@@ -70,6 +71,7 @@ class OpenAIService {
|
|
|
70
71
|
messages,
|
|
71
72
|
max_tokens,
|
|
72
73
|
temperature,
|
|
74
|
+
functions,
|
|
73
75
|
stream: true,
|
|
74
76
|
stream_options: {
|
|
75
77
|
include_usage: true,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"OpenAIService.js","sourceRoot":"","sources":["../../src/services/OpenAIService.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAA4B;AAI5B,MAAa,aAAa;IAGxB,YAAY,MAAc;QACxB,IAAI,CAAC,MAAM,GAAG,IAAI,gBAAM,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC;IACvC,CAAC;IAEK,kBAAkB,CACtB,QAAwB,EACxB,KAAa,EACb,UAAkB,EAClB,WAAmB,EACnB,YAAqB;;
|
|
1
|
+
{"version":3,"file":"OpenAIService.js","sourceRoot":"","sources":["../../src/services/OpenAIService.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAA4B;AAI5B,MAAa,aAAa;IAGxB,YAAY,MAAc;QACxB,IAAI,CAAC,MAAM,GAAG,IAAI,gBAAM,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC;IACvC,CAAC;IAEK,kBAAkB,CACtB,QAAwB,EACxB,KAAa,EACb,UAAkB,EAClB,WAAmB,EACnB,YAAqB,EACrB,SAAe;;YAEf,IAAI,CAAC,KAAK,EAAE,CAAC;gBACX,MAAM,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;YAC7D,CAAC;YAED,IAAI,CAAC;gBACH,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;oBACzD,KAAK,EAAE,0BAA0B;oBACjC,QAAQ;oBACR,UAAU;oBACV,WAAW;oBACX,SAAS;iBACV,CAAC,CAAC;gBACH,OAAO,QAA0B,CAAC;YACpC,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBACf,OAAO,CAAC,KAAK,CAAC,wBAAwB,EAAE,KAAK,CAAC,CAAC;gBAC/C,MAAM,KAAK,CAAC;YACd,CAAC;QACH,CAAC;KAAA;IAEM,wBAAwB,CAC7B,QAAwB,EACxB,KAAa,EACb,UAAkB,EAClB,WAAmB,EACnB,YAAqB,EACrB,SAAe;;;YAEf,IAAI,CAAC,KAAK,EAAE,CAAC;gBACX,MAAM,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;YAC7D,CAAC;YAED,IAAI,CAAC;gBACH,MAAM,MAAM,GAAG,cAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;oBACvD,KAAK;oBACL,QAAQ;oBACR,UAAU;oBACV,WAAW;oBACX,SAAS;oBACT,MAAM,EAAE,IAAI;oBACZ,cAAc,EAAE;wBACd,aAAa,EAAE,IAAI;qBACpB;iBACF,CAAC,CAAA,CAAC;;oBAEH,KAA0B,eAAA,WAAA,cAAA,MAAM,CAAA,YAAA,qFAAE,CAAC;wBAAT,sBAAM;wBAAN,WAAM;wBAArB,MAAM,KAAK,KAAA,CAAA;wBACpB,oBAAM,KAAK,CAAA,CAAC;oBACd,CAAC;;;;;;;;;YACH,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBACf,OAAO,CAAC,KAAK,CAAC,6BAA6B,EAAE,KAAK,CAAC,CAAC;gBACpD,MAAM,KAAK,CAAC;YACd,CAAC;QACH,CAAC;KAAA;CACF;AAnED,sCAmEC"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "llm-proxy",
|
|
3
|
-
"version": "1.3.
|
|
3
|
+
"version": "1.3.9",
|
|
4
4
|
"description": "An LLM Proxy that allows the user to interact with different language models from different providers using unified request and response formats.",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -30,7 +30,7 @@
|
|
|
30
30
|
"aws-sdk": "^2.1691.0",
|
|
31
31
|
"axios": "^1.7.7",
|
|
32
32
|
"dotenv": "^16.4.5",
|
|
33
|
-
"llm-proxy": "^1.3.
|
|
33
|
+
"llm-proxy": "^1.3.9",
|
|
34
34
|
"openai": "^4.69.0"
|
|
35
35
|
}
|
|
36
36
|
}
|