@simfinity/constellation-client 1.0.2 → 1.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -39,8 +39,8 @@ try {
39
39
  /* ... */
40
40
 
41
41
  // Start a chat session
42
- await startSession("You are a useful assistant", true);
43
- await connect(true, {
42
+ await client.startSession("You are a useful assistant", true);
43
+ await client.connect(true, {
44
44
  onStreamClosed: (reason: string) => {
45
45
  console.log("Stream connection lost");
46
46
  },
@@ -57,8 +57,8 @@ try {
57
57
 
58
58
  /* ... */
59
59
 
60
- sendAudioChunk("{PCM16 Base64-encoded data}");
61
- commitAudioChunksSent();
60
+ client.sendAudioChunk("{PCM16 Base64-encoded data}");
61
+ client.commitAudioChunksSent();
62
62
 
63
63
  /* ... */
64
64
  }
package/dist/index.cjs CHANGED
@@ -133,12 +133,12 @@ var WebClient = class {
133
133
  * })
134
134
  * ```
135
135
  */
136
- async connect(audio = false, handlers) {
136
+ async joinSession(audio = false, handlers) {
137
137
  if (!this.sessionId) {
138
138
  throw new Error("No open session");
139
139
  }
140
140
  const ws = new WebSocket(
141
- `${this.config.streamingEndpoint}/web/${this.sessionId}`,
141
+ `${this.config.streamingEndpoint}/player/${this.sessionId}`,
142
142
  ["key", this.config.key]
143
143
  );
144
144
  if (!await this.serverHandShake(ws, audio)) {
@@ -183,6 +183,23 @@ var WebClient = class {
183
183
  };
184
184
  this.ws = ws;
185
185
  }
186
+ /**
187
+ * With an opened streaming connection: send a "system" update of the session settings.
188
+ * Allows to change some behavioral parameters like the temperature or system instructions.
189
+ * This does not trigger a model response.
190
+ *
191
+ * @remarks
192
+ * With openai for example, this triggers (pseudo-code):
193
+ * webSocket.send({ type: "session.update", session: { ... }})
194
+ *
195
+ * @param settings complete definition of the settings. Same values must be re-provided for unchanged parameters.
196
+ *
197
+ * @exception
198
+ * This method throws new Error(...) if unable to execute successfully for any reason.
199
+ */
200
+ async configureSession(settings) {
201
+ this.send("session.configure", settings);
202
+ }
186
203
  /**
187
204
  * With an opened streaming connection: send a text input message to the LLM. This will trigger a
188
205
  * text response as well as an audio response if the session was opened with audio mode active.
package/dist/index.d.cts CHANGED
@@ -29,10 +29,26 @@ interface WebClientConfig {
29
29
  llm: LlmType;
30
30
  model: string;
31
31
  }
32
+ /**
33
+ * System settings influencing the model behavior:
34
+ * @audio: to activate voice conversation
35
+ * @voice: depending on the LLM solution, this is the voice name to be used in audio
36
+ * @temperature: LLM creativity factor in 0-1 range
37
+ * @instructions: system instructions giving context, rules and directions to guide the LLM behavior
38
+ * @maxResponseToken: 1-4096 value, maximum number of token used for a single response. Undefined means unlimited.
39
+ */
40
+ interface SessionSettings {
41
+ audio: boolean;
42
+ voice: string;
43
+ temperature: number;
44
+ instructions: string;
45
+ maxResponseToken?: number;
46
+ }
32
47
  /**
33
48
  * Callback functions to catch all the propagated server events.
34
49
  *
35
50
  * @onStreamClosed the streaming session (web socket) shut down
51
+ * @onSessionConfigured received in response to a session settings update from the client
36
52
  * @onAudioResponseStart the LLM service is about to respond with streaming audio data
37
53
  * @onAudioResponseChunk a new chunk of response audio data was received
38
54
  * @onAudioResponseEnd the model has finished responding. Audio response has been entirely streamed
@@ -50,6 +66,7 @@ interface WebClientConfig {
50
66
  */
51
67
  interface EventHandlers {
52
68
  onStreamClosed: (reason: string) => void;
69
+ onSessionConfigured?: (settings: SessionSettings) => void;
53
70
  onAudioResponseStart?: () => void;
54
71
  onAudioResponseChunk?: (audioChunk: string) => void;
55
72
  onAudioResponseEnd?: () => void;
@@ -128,7 +145,22 @@ declare class WebClient {
128
145
  * })
129
146
  * ```
130
147
  */
131
- connect(audio: boolean | undefined, handlers: EventHandlers): Promise<void>;
148
+ joinSession(audio: boolean | undefined, handlers: EventHandlers): Promise<void>;
149
+ /**
150
+ * With an opened streaming connection: send a "system" update of the session settings.
151
+ * Allows to change some behavioral parameters like the temperature or system instructions.
152
+ * This does not trigger a model response.
153
+ *
154
+ * @remarks
155
+ * With openai for example, this triggers (pseudo-code):
156
+ * webSocket.send({ type: "session.update", session: { ... }})
157
+ *
158
+ * @param settings complete definition of the settings. Same values must be re-provided for unchanged parameters.
159
+ *
160
+ * @exception
161
+ * This method throws new Error(...) if unable to execute successfully for any reason.
162
+ */
163
+ configureSession(settings: SessionSettings): Promise<void>;
132
164
  /**
133
165
  * With an opened streaming connection: send a text input message to the LLM. This will trigger a
134
166
  * text response as well as an audio response if the session was opened with audio mode active.
package/dist/index.d.ts CHANGED
@@ -29,10 +29,26 @@ interface WebClientConfig {
29
29
  llm: LlmType;
30
30
  model: string;
31
31
  }
32
+ /**
33
+ * System settings influencing the model behavior:
34
+ * @audio: to activate voice conversation
35
+ * @voice: depending on the LLM solution, this is the voice name to be used in audio
36
+ * @temperature: LLM creativity factor in 0-1 range
37
+ * @instructions: system instructions giving context, rules and directions to guide the LLM behavior
38
+ * @maxResponseToken: 1-4096 value, maximum number of token used for a single response. Undefined means unlimited.
39
+ */
40
+ interface SessionSettings {
41
+ audio: boolean;
42
+ voice: string;
43
+ temperature: number;
44
+ instructions: string;
45
+ maxResponseToken?: number;
46
+ }
32
47
  /**
33
48
  * Callback functions to catch all the propagated server events.
34
49
  *
35
50
  * @onStreamClosed the streaming session (web socket) shut down
51
+ * @onSessionConfigured received in response to a session settings update from the client
36
52
  * @onAudioResponseStart the LLM service is about to respond with streaming audio data
37
53
  * @onAudioResponseChunk a new chunk of response audio data was received
38
54
  * @onAudioResponseEnd the model has finished responding. Audio response has been entirely streamed
@@ -50,6 +66,7 @@ interface WebClientConfig {
50
66
  */
51
67
  interface EventHandlers {
52
68
  onStreamClosed: (reason: string) => void;
69
+ onSessionConfigured?: (settings: SessionSettings) => void;
53
70
  onAudioResponseStart?: () => void;
54
71
  onAudioResponseChunk?: (audioChunk: string) => void;
55
72
  onAudioResponseEnd?: () => void;
@@ -128,7 +145,22 @@ declare class WebClient {
128
145
  * })
129
146
  * ```
130
147
  */
131
- connect(audio: boolean | undefined, handlers: EventHandlers): Promise<void>;
148
+ joinSession(audio: boolean | undefined, handlers: EventHandlers): Promise<void>;
149
+ /**
150
+ * With an opened streaming connection: send a "system" update of the session settings.
151
+ * Allows to change some behavioral parameters like the temperature or system instructions.
152
+ * This does not trigger a model response.
153
+ *
154
+ * @remarks
155
+ * With openai for example, this triggers (pseudo-code):
156
+ * webSocket.send({ type: "session.update", session: { ... }})
157
+ *
158
+ * @param settings complete definition of the settings. Same values must be re-provided for unchanged parameters.
159
+ *
160
+ * @exception
161
+ * This method throws new Error(...) if unable to execute successfully for any reason.
162
+ */
163
+ configureSession(settings: SessionSettings): Promise<void>;
132
164
  /**
133
165
  * With an opened streaming connection: send a text input message to the LLM. This will trigger a
134
166
  * text response as well as an audio response if the session was opened with audio mode active.
package/dist/index.js CHANGED
@@ -107,12 +107,12 @@ var WebClient = class {
107
107
  * })
108
108
  * ```
109
109
  */
110
- async connect(audio = false, handlers) {
110
+ async joinSession(audio = false, handlers) {
111
111
  if (!this.sessionId) {
112
112
  throw new Error("No open session");
113
113
  }
114
114
  const ws = new WebSocket(
115
- `${this.config.streamingEndpoint}/web/${this.sessionId}`,
115
+ `${this.config.streamingEndpoint}/player/${this.sessionId}`,
116
116
  ["key", this.config.key]
117
117
  );
118
118
  if (!await this.serverHandShake(ws, audio)) {
@@ -157,6 +157,23 @@ var WebClient = class {
157
157
  };
158
158
  this.ws = ws;
159
159
  }
160
+ /**
161
+ * With an opened streaming connection: send a "system" update of the session settings.
162
+ * Allows to change some behavioral parameters like the temperature or system instructions.
163
+ * This does not trigger a model response.
164
+ *
165
+ * @remarks
166
+ * With openai for example, this triggers (pseudo-code):
167
+ * webSocket.send({ type: "session.update", session: { ... }})
168
+ *
169
+ * @param settings complete definition of the settings. Same values must be re-provided for unchanged parameters.
170
+ *
171
+ * @exception
172
+ * This method throws new Error(...) if unable to execute successfully for any reason.
173
+ */
174
+ async configureSession(settings) {
175
+ this.send("session.configure", settings);
176
+ }
160
177
  /**
161
178
  * With an opened streaming connection: send a text input message to the LLM. This will trigger a
162
179
  * text response as well as an audio response if the session was opened with audio mode active.
package/package.json CHANGED
@@ -1,10 +1,11 @@
1
1
  {
2
2
  "name": "@simfinity/constellation-client",
3
- "version": "1.0.2",
3
+ "version": "1.0.4",
4
4
  "type": "module",
5
5
  "exports": {
6
6
  ".": {
7
- "import": "./dist/index.mjs",
7
+ "types": "./dist/index.d.ts",
8
+ "import": "./dist/index.js",
8
9
  "require": "./dist/index.cjs"
9
10
  }
10
11
  },