macrocosmos 1.2.13 → 1.2.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/__tests__/apex/client-chat.test.js +20 -45
- package/dist/lib/apex/Client.d.ts +15 -3
- package/dist/lib/apex/Client.js +25 -30
- package/dist/lib/apex/Stream.d.ts +3 -0
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/package.json +1 -1
|
@@ -6,34 +6,32 @@ describe("ApexClient", () => {
|
|
|
6
6
|
if (!API_KEY) {
|
|
7
7
|
throw new Error("MACROCOSMOS_API_KEY environment variable is required");
|
|
8
8
|
}
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
9
|
+
const messages = [
|
|
10
|
+
{
|
|
11
|
+
role: "user",
|
|
12
|
+
content: "What is the capital of France?",
|
|
13
|
+
},
|
|
14
|
+
];
|
|
15
|
+
const samplingParameters = {
|
|
16
|
+
temperature: 0.7,
|
|
17
|
+
topP: 0.9,
|
|
18
|
+
maxNewTokens: 100,
|
|
19
|
+
doSample: true,
|
|
20
|
+
};
|
|
21
|
+
let client;
|
|
22
|
+
beforeEach(() => {
|
|
23
|
+
client = new macrocosmos_1.ApexClient({
|
|
12
24
|
apiKey: API_KEY,
|
|
13
25
|
appName: "apex-client.test.ts",
|
|
14
26
|
});
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
{
|
|
18
|
-
role: "user",
|
|
19
|
-
content: "What is the capital of France?",
|
|
20
|
-
},
|
|
21
|
-
];
|
|
27
|
+
});
|
|
28
|
+
it("should make a streaming chat completion call", async () => {
|
|
22
29
|
// Create streaming completion
|
|
23
30
|
const result = await client.chat.completions.create({
|
|
24
31
|
messages,
|
|
25
32
|
stream: true,
|
|
26
|
-
samplingParameters
|
|
27
|
-
temperature: 0.7,
|
|
28
|
-
topP: 0.9,
|
|
29
|
-
maxNewTokens: 100,
|
|
30
|
-
doSample: true,
|
|
31
|
-
},
|
|
33
|
+
samplingParameters,
|
|
32
34
|
});
|
|
33
|
-
// Check if it's a Stream
|
|
34
|
-
if (!(result instanceof macrocosmos_1.ApexStream)) {
|
|
35
|
-
throw new Error("Expected a Stream but got a regular response");
|
|
36
|
-
}
|
|
37
35
|
// Handle streaming response
|
|
38
36
|
let fullResponse = "";
|
|
39
37
|
const stream = result;
|
|
@@ -48,35 +46,12 @@ describe("ApexClient", () => {
|
|
|
48
46
|
expect(fullResponse.toLowerCase()).toContain("paris");
|
|
49
47
|
}, 30000); // Increase timeout to 30 seconds for streaming
|
|
50
48
|
it("should make a non-streaming chat completion call", async () => {
|
|
51
|
-
// Create ApexClient
|
|
52
|
-
const client = new macrocosmos_1.ApexClient({
|
|
53
|
-
apiKey: API_KEY,
|
|
54
|
-
appName: "apex-client.test.ts",
|
|
55
|
-
});
|
|
56
|
-
// Create request with the proper message type
|
|
57
|
-
const messages = [
|
|
58
|
-
{
|
|
59
|
-
role: "user",
|
|
60
|
-
content: "What is the capital of France?",
|
|
61
|
-
},
|
|
62
|
-
];
|
|
63
49
|
// Create non-streaming completion
|
|
64
|
-
const
|
|
50
|
+
const response = await client.chat.completions.create({
|
|
65
51
|
messages,
|
|
66
52
|
stream: false,
|
|
67
|
-
samplingParameters
|
|
68
|
-
temperature: 0.7,
|
|
69
|
-
topP: 0.9,
|
|
70
|
-
maxNewTokens: 100,
|
|
71
|
-
doSample: true,
|
|
72
|
-
},
|
|
53
|
+
samplingParameters,
|
|
73
54
|
});
|
|
74
|
-
// Check if it's a regular response
|
|
75
|
-
if (result instanceof macrocosmos_1.ApexStream) {
|
|
76
|
-
throw new Error("Expected a regular response but got a Stream");
|
|
77
|
-
}
|
|
78
|
-
// Cast to the correct type
|
|
79
|
-
const response = result;
|
|
80
55
|
console.log("Response:", response.choices?.[0]?.message?.content);
|
|
81
56
|
expect(response.choices?.[0]?.message?.content).toBeTruthy();
|
|
82
57
|
expect(response.choices?.[0]?.message?.content?.toLowerCase()).toContain("paris");
|
|
@@ -16,6 +16,18 @@ export interface ApexProtoClient {
|
|
|
16
16
|
new (address: string, credentials: grpc.ChannelCredentials): ApexService;
|
|
17
17
|
};
|
|
18
18
|
}
|
|
19
|
+
export interface ChatCompletionsCreate {
|
|
20
|
+
(params: ChatCompletionRequest & {
|
|
21
|
+
stream: true;
|
|
22
|
+
},
|
|
23
|
+
/** options are not used, but are accepted for compatibility with the OpenAI API */
|
|
24
|
+
_options?: unknown): Promise<ApexStream<ChatCompletionChunkResponse>>;
|
|
25
|
+
(params: ChatCompletionRequest & {
|
|
26
|
+
stream?: false | undefined;
|
|
27
|
+
},
|
|
28
|
+
/** options are not used, but are accepted for compatibility with the OpenAI API */
|
|
29
|
+
_options?: unknown): Promise<ChatCompletionResponse>;
|
|
30
|
+
}
|
|
19
31
|
/**
|
|
20
32
|
* Client for interacting with the Apex API
|
|
21
33
|
* Provides OpenAI-compatible interface over gRPC
|
|
@@ -24,17 +36,17 @@ export declare class ApexClient extends BaseClient {
|
|
|
24
36
|
private _grpcClient?;
|
|
25
37
|
private defaultTimeout;
|
|
26
38
|
constructor(options: ApexClientOptions, grpcClient?: ApexServiceClient);
|
|
27
|
-
|
|
39
|
+
protected createGrpcClient(): ApexServiceClient;
|
|
28
40
|
/**
|
|
29
41
|
* Get the default timeout for chat completions
|
|
30
42
|
*/
|
|
31
|
-
|
|
43
|
+
protected getDefaultTimeout(): number;
|
|
32
44
|
/**
|
|
33
45
|
* OpenAI-compatible chat completions API
|
|
34
46
|
*/
|
|
35
47
|
chat: {
|
|
36
48
|
completions: {
|
|
37
|
-
create:
|
|
49
|
+
create: ChatCompletionsCreate;
|
|
38
50
|
};
|
|
39
51
|
};
|
|
40
52
|
/**
|
package/dist/lib/apex/Client.js
CHANGED
|
@@ -44,6 +44,30 @@ const grpc = __importStar(require("@grpc/grpc-js"));
|
|
|
44
44
|
const BaseClient_1 = require("../BaseClient");
|
|
45
45
|
const Stream_1 = require("./Stream");
|
|
46
46
|
Object.defineProperty(exports, "ApexStream", { enumerable: true, get: function () { return Stream_1.ApexStream; } });
|
|
47
|
+
function chatCompletionsCreate(params, _options) {
|
|
48
|
+
const client = this.createGrpcClient();
|
|
49
|
+
const requestParams = {
|
|
50
|
+
...params,
|
|
51
|
+
uids: params.uids ?? [],
|
|
52
|
+
timeout: params.timeout || this.getDefaultTimeout(),
|
|
53
|
+
};
|
|
54
|
+
if (requestParams.stream) {
|
|
55
|
+
const stream = client.chatCompletionStream(requestParams);
|
|
56
|
+
const controller = new AbortController();
|
|
57
|
+
return Promise.resolve(Stream_1.ApexStream.fromGrpcStream(stream, controller));
|
|
58
|
+
}
|
|
59
|
+
else {
|
|
60
|
+
return new Promise((resolve, reject) => {
|
|
61
|
+
client.chatCompletion(requestParams, (error, response) => {
|
|
62
|
+
if (error) {
|
|
63
|
+
reject(error);
|
|
64
|
+
return;
|
|
65
|
+
}
|
|
66
|
+
resolve(response);
|
|
67
|
+
});
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
}
|
|
47
71
|
/**
|
|
48
72
|
* Client for interacting with the Apex API
|
|
49
73
|
* Provides OpenAI-compatible interface over gRPC
|
|
@@ -56,36 +80,7 @@ class ApexClient extends BaseClient_1.BaseClient {
|
|
|
56
80
|
*/
|
|
57
81
|
this.chat = {
|
|
58
82
|
completions: {
|
|
59
|
-
create:
|
|
60
|
-
const client = this.createGrpcClient();
|
|
61
|
-
// Apply default timeout if not specified in params
|
|
62
|
-
const requestParams = {
|
|
63
|
-
...params,
|
|
64
|
-
uids: params.uids ?? [],
|
|
65
|
-
timeout: params.timeout || this.getDefaultTimeout(),
|
|
66
|
-
};
|
|
67
|
-
// Handle streaming vs non-streaming
|
|
68
|
-
if (requestParams.stream) {
|
|
69
|
-
// Create a streaming call
|
|
70
|
-
const stream = client.chatCompletionStream(requestParams);
|
|
71
|
-
// Create controller for abort capability
|
|
72
|
-
const controller = new AbortController();
|
|
73
|
-
// Return a Stream object that wraps the gRPC stream
|
|
74
|
-
return Stream_1.ApexStream.fromGrpcStream(stream, controller);
|
|
75
|
-
}
|
|
76
|
-
else {
|
|
77
|
-
// For non-streaming, return a promise that resolves with the completion
|
|
78
|
-
return new Promise((resolve, reject) => {
|
|
79
|
-
client.chatCompletion(requestParams, (error, response) => {
|
|
80
|
-
if (error) {
|
|
81
|
-
reject(error);
|
|
82
|
-
return;
|
|
83
|
-
}
|
|
84
|
-
resolve(response);
|
|
85
|
-
});
|
|
86
|
-
});
|
|
87
|
-
}
|
|
88
|
-
},
|
|
83
|
+
create: chatCompletionsCreate.bind(this),
|
|
89
84
|
},
|
|
90
85
|
};
|
|
91
86
|
/**
|
|
@@ -5,6 +5,9 @@ export type ReadableStreamInterface = ReadableStream<Uint8Array>;
|
|
|
5
5
|
*/
|
|
6
6
|
export declare class ApexStream<Item> implements AsyncIterable<Item> {
|
|
7
7
|
private iterator;
|
|
8
|
+
/**
|
|
9
|
+
* The AbortController for this stream. Call `abort()` to cancel the stream.
|
|
10
|
+
*/
|
|
8
11
|
controller: AbortController;
|
|
9
12
|
constructor(iterator: () => AsyncIterator<Item>, controller: AbortController);
|
|
10
13
|
/**
|
package/dist/version.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare const VERSION = "1.2.
|
|
1
|
+
export declare const VERSION = "1.2.14";
|
package/dist/version.js
CHANGED