llm-proxy 1.3.6 → 1.3.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/services/ClientService.d.ts +4 -2
- package/dist/services/ClientService.js +6 -0
- package/dist/services/ClientService.js.map +1 -1
- package/dist/services/OpenAIService.d.ts +2 -2
- package/dist/services/OpenAIService.js +4 -2
- package/dist/services/OpenAIService.js.map +1 -1
- package/dist/types/index.d.ts +7 -3
- package/dist/types/index.js.map +1 -1
- package/package.json +2 -2
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
import { BedrockAnthropicParsedChunk, LLMResponse, Messages } from "../types";
|
|
2
2
|
export interface ClientService {
|
|
3
|
-
generateCompletion(messages: Messages, model?: string, max_tokens?: number, temperature?: number,
|
|
4
|
-
|
|
3
|
+
generateCompletion(messages: Messages, model?: string, max_tokens?: number, temperature?: number, functions?: any, // todo: sort out the type
|
|
4
|
+
systemPrompt?: string): Promise<LLMResponse>;
|
|
5
|
+
generateStreamCompletion(messages: Messages, model?: string, max_tokens?: number, temperature?: number, functions?: any, // todo: sort out the type it might be like this i guess(down)
|
|
6
|
+
systemPrompt?: string): AsyncGenerator<BedrockAnthropicParsedChunk, void, unknown>;
|
|
5
7
|
}
|
|
@@ -1,3 +1,9 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
// functions: [
|
|
4
|
+
// {
|
|
5
|
+
// name: "function1",
|
|
6
|
+
// description: "Description of function1",
|
|
7
|
+
// parameters: {/* JSON schema for function1 */}
|
|
8
|
+
// },
|
|
3
9
|
//# sourceMappingURL=ClientService.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ClientService.js","sourceRoot":"","sources":["../../src/services/ClientService.ts"],"names":[],"mappings":""}
|
|
1
|
+
{"version":3,"file":"ClientService.js","sourceRoot":"","sources":["../../src/services/ClientService.ts"],"names":[],"mappings":";;AAwBA,gBAAgB;AAChB,MAAM;AACN,yBAAyB;AACzB,+CAA+C;AAC/C,oDAAoD;AACpD,OAAO"}
|
|
@@ -3,6 +3,6 @@ import { ClientService } from "./ClientService";
|
|
|
3
3
|
export declare class OpenAIService implements ClientService {
|
|
4
4
|
private openai;
|
|
5
5
|
constructor(apiKey: string);
|
|
6
|
-
generateCompletion(messages: OpenAIMessages, model: string, max_tokens: number, temperature: number, systemPrompt?: string): Promise<OpenAIResponse>;
|
|
7
|
-
generateStreamCompletion(messages: OpenAIMessages, model: string, max_tokens: number, temperature: number, systemPrompt?: string): AsyncGenerator<any, void, unknown>;
|
|
6
|
+
generateCompletion(messages: OpenAIMessages, model: string, max_tokens: number, temperature: number, systemPrompt?: string, functions?: any): Promise<OpenAIResponse>;
|
|
7
|
+
generateStreamCompletion(messages: OpenAIMessages, model: string, max_tokens: number, temperature: number, systemPrompt?: string, functions?: any): AsyncGenerator<any, void, unknown>;
|
|
8
8
|
}
|
|
@@ -38,7 +38,7 @@ class OpenAIService {
|
|
|
38
38
|
constructor(apiKey) {
|
|
39
39
|
this.openai = new openai_1.default({ apiKey });
|
|
40
40
|
}
|
|
41
|
-
generateCompletion(messages, model, max_tokens, temperature, systemPrompt) {
|
|
41
|
+
generateCompletion(messages, model, max_tokens, temperature, systemPrompt, functions) {
|
|
42
42
|
return __awaiter(this, void 0, void 0, function* () {
|
|
43
43
|
if (!model) {
|
|
44
44
|
throw new Error("Model ID is required for OpenAIService.");
|
|
@@ -49,6 +49,7 @@ class OpenAIService {
|
|
|
49
49
|
messages,
|
|
50
50
|
max_tokens,
|
|
51
51
|
temperature,
|
|
52
|
+
functions,
|
|
52
53
|
});
|
|
53
54
|
return response;
|
|
54
55
|
}
|
|
@@ -58,7 +59,7 @@ class OpenAIService {
|
|
|
58
59
|
}
|
|
59
60
|
});
|
|
60
61
|
}
|
|
61
|
-
generateStreamCompletion(messages, model, max_tokens, temperature, systemPrompt) {
|
|
62
|
+
generateStreamCompletion(messages, model, max_tokens, temperature, systemPrompt, functions) {
|
|
62
63
|
return __asyncGenerator(this, arguments, function* generateStreamCompletion_1() {
|
|
63
64
|
var _a, e_1, _b, _c;
|
|
64
65
|
if (!model) {
|
|
@@ -70,6 +71,7 @@ class OpenAIService {
|
|
|
70
71
|
messages,
|
|
71
72
|
max_tokens,
|
|
72
73
|
temperature,
|
|
74
|
+
functions,
|
|
73
75
|
stream: true,
|
|
74
76
|
stream_options: {
|
|
75
77
|
include_usage: true,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"OpenAIService.js","sourceRoot":"","sources":["../../src/services/OpenAIService.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAA4B;AAI5B,MAAa,aAAa;IAGxB,YAAY,MAAc;QACxB,IAAI,CAAC,MAAM,GAAG,IAAI,gBAAM,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC;IACvC,CAAC;IAEK,kBAAkB,CACtB,QAAwB,EACxB,KAAa,EACb,UAAkB,EAClB,WAAmB,EACnB,YAAqB;;
|
|
1
|
+
{"version":3,"file":"OpenAIService.js","sourceRoot":"","sources":["../../src/services/OpenAIService.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAA4B;AAI5B,MAAa,aAAa;IAGxB,YAAY,MAAc;QACxB,IAAI,CAAC,MAAM,GAAG,IAAI,gBAAM,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC;IACvC,CAAC;IAEK,kBAAkB,CACtB,QAAwB,EACxB,KAAa,EACb,UAAkB,EAClB,WAAmB,EACnB,YAAqB,EACrB,SAAe;;YAEf,IAAI,CAAC,KAAK,EAAE,CAAC;gBACX,MAAM,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;YAC7D,CAAC;YAED,IAAI,CAAC;gBACH,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;oBACzD,KAAK,EAAE,0BAA0B;oBACjC,QAAQ;oBACR,UAAU;oBACV,WAAW;oBACX,SAAS;iBACV,CAAC,CAAC;gBACH,OAAO,QAA0B,CAAC;YACpC,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBACf,OAAO,CAAC,KAAK,CAAC,wBAAwB,EAAE,KAAK,CAAC,CAAC;gBAC/C,MAAM,KAAK,CAAC;YACd,CAAC;QACH,CAAC;KAAA;IAEM,wBAAwB,CAC7B,QAAwB,EACxB,KAAa,EACb,UAAkB,EAClB,WAAmB,EACnB,YAAqB,EACrB,SAAe;;;YAEf,IAAI,CAAC,KAAK,EAAE,CAAC;gBACX,MAAM,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;YAC7D,CAAC;YAED,IAAI,CAAC;gBACH,MAAM,MAAM,GAAG,cAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;oBACvD,KAAK;oBACL,QAAQ;oBACR,UAAU;oBACV,WAAW;oBACX,SAAS;oBACT,MAAM,EAAE,IAAI;oBACZ,cAAc,EAAE;wBACd,aAAa,EAAE,IAAI;qBACpB;iBACF,CAAC,CAAA,CAAC;;oBAEH,KAA0B,eAAA,WAAA,cAAA,MAAM,CAAA,YAAA,qFAAE,CAAC;wBAAT,sBAAM;wBAAN,WAAM;wBAArB,MAAM,KAAK,KAAA,CAAA;wBACpB,oBAAM,KAAK,CAAA,CAAC;oBACd,CAAC;;;;;;;;;YACH,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBACf,OAAO,CAAC,KAAK,CAAC,6BAA6B,EAAE,KAAK,CAAC,CAAC;gBACpD,MAAM,KAAK,CAAC;YACd,CAAC;QACH,CAAC;KAAA;CACF;AAnED,sCAmEC"}
|
package/dist/types/index.d.ts
CHANGED
|
@@ -14,7 +14,7 @@ export interface OpenAIStreamResponse {
|
|
|
14
14
|
index: number;
|
|
15
15
|
delta: {
|
|
16
16
|
content?: string;
|
|
17
|
-
|
|
17
|
+
function_call?: {
|
|
18
18
|
id?: string;
|
|
19
19
|
name?: string;
|
|
20
20
|
arguments?: string;
|
|
@@ -94,6 +94,10 @@ export interface OpenAIResponse {
|
|
|
94
94
|
message: {
|
|
95
95
|
role: string;
|
|
96
96
|
content: string;
|
|
97
|
+
function_call?: {
|
|
98
|
+
name: string;
|
|
99
|
+
arguments: any;
|
|
100
|
+
};
|
|
97
101
|
};
|
|
98
102
|
logprobs: null | object;
|
|
99
103
|
finish_reason: string;
|
|
@@ -102,10 +106,10 @@ export interface OpenAIResponse {
|
|
|
102
106
|
prompt_tokens: number;
|
|
103
107
|
completion_tokens: number;
|
|
104
108
|
total_tokens: number;
|
|
105
|
-
prompt_tokens_details
|
|
109
|
+
prompt_tokens_details?: {
|
|
106
110
|
cached_tokens: number;
|
|
107
111
|
};
|
|
108
|
-
completion_tokens_details
|
|
112
|
+
completion_tokens_details?: {
|
|
109
113
|
reasoning_tokens: number;
|
|
110
114
|
};
|
|
111
115
|
};
|
package/dist/types/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/types/index.ts"],"names":[],"mappings":";;;AAAA,UAAU;AACV,IAAY,SAIX;AAJD,WAAY,SAAS;IACnB,8BAAiB,CAAA;IACjB,mDAAsC,CAAA;IACtC,6CAAgC,CAAA;AAClC,CAAC,EAJW,SAAS,yBAAT,SAAS,QAIpB;AA8ED,IAAY,mBAOX;AAPD,WAAY,mBAAmB;IAC7B,0DAAmC,CAAA;IACnC,yCAAkB,CAAA;IAClB,mDAA4B,CAAA;IAC5B,kDAA2B,CAAA;IAC3B,kEAA2C,CAAA;IAC3C,sDAA+B,CAAA;AACjC,CAAC,EAPW,mBAAmB,mCAAnB,mBAAmB,QAO9B;
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/types/index.ts"],"names":[],"mappings":";;;AAAA,UAAU;AACV,IAAY,SAIX;AAJD,WAAY,SAAS;IACnB,8BAAiB,CAAA;IACjB,mDAAsC,CAAA;IACtC,6CAAgC,CAAA;AAClC,CAAC,EAJW,SAAS,yBAAT,SAAS,QAIpB;AA8ED,IAAY,mBAOX;AAPD,WAAY,mBAAmB;IAC7B,0DAAmC,CAAA;IACnC,yCAAkB,CAAA;IAClB,mDAA4B,CAAA;IAC5B,kDAA2B,CAAA;IAC3B,kEAA2C,CAAA;IAC3C,sDAA+B,CAAA;AACjC,CAAC,EAPW,mBAAmB,mCAAnB,mBAAmB,QAO9B;AA6CD,cAAc;AAEd,wBAAwB;AAExB,IAAY,6BAKX;AALD,WAAY,6BAA6B;IACvC,0FAAyD,CAAA;IACzD,4FAA2D,CAAA;IAC3D,wFAAuD,CAAA;IACvD,gGAA+D,CAAA;AACjE,CAAC,EALW,6BAA6B,6CAA7B,6BAA6B,QAKxC;AAED,IAAY,2BAKX;AALD,WAAY,2BAA2B;IACrC,4CAAa,CAAA;IACb,8CAAe,CAAA;IACf,oDAAqB,CAAA;IACrB,0DAA2B,CAAA;AAC7B,CAAC,EALW,2BAA2B,2CAA3B,2BAA2B,QAKtC"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "llm-proxy",
|
|
3
|
-
"version": "1.3.
|
|
3
|
+
"version": "1.3.8",
|
|
4
4
|
"description": "An LLM Proxy that allows the user to interact with different language models from different providers using unified request and response formats.",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -30,7 +30,7 @@
|
|
|
30
30
|
"aws-sdk": "^2.1691.0",
|
|
31
31
|
"axios": "^1.7.7",
|
|
32
32
|
"dotenv": "^16.4.5",
|
|
33
|
-
"llm-proxy": "^1.3.
|
|
33
|
+
"llm-proxy": "^1.3.8",
|
|
34
34
|
"openai": "^4.69.0"
|
|
35
35
|
}
|
|
36
36
|
}
|