@simfinity/constellation-client 1.0.20 → 1.0.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -41,15 +41,22 @@ var WebClient = class {
41
41
  *
42
42
  * @param voiceEnabled whether this Model-session can receive & produce audio as well as text
43
43
  * @param voiceName LLM specific voice name e.g. with OpenAI this could be 'alloy'
44
+ * @param behaviour model behaviour parameters. This is optional: default settings
45
+ * will be used if omitted and can be changed mid-session with configureSession().
46
+ * WARNING: some LLMs may not support mid-session updates, thus it is
47
+ * advised and preferred to provide them here at startSession time.
44
48
  *
45
49
  * @exception
46
50
  * This method throws new Error(...) if unable to execute successfully for any reason.
47
51
  */
48
- async startSession(voiceEnabled, voiceName) {
52
+ async startSession(voiceEnabled, voiceName, behaviour) {
49
53
  const prepareBody = {
50
54
  llmProvider: this.config.llm,
51
55
  audioEnabled: voiceEnabled,
52
- voiceName
56
+ voiceName,
57
+ temperature: behaviour == null ? void 0 : behaviour.temperature,
58
+ instructions: behaviour == null ? void 0 : behaviour.instructions,
59
+ maxResponseToken: behaviour == null ? void 0 : behaviour.maxResponseToken
53
60
  };
54
61
  const response = await fetch(`${this.config.sessionEndpoint}/prepare_session`, {
55
62
  method: "POST",
package/dist/index.d.cts CHANGED
@@ -157,11 +157,15 @@ declare class WebClient {
157
157
  *
158
158
  * @param voiceEnabled whether this Model-session can receive & produce audio as well as text
159
159
  * @param voiceName LLM specific voice name e.g. with OpenAI this could be 'alloy'
160
+ * @param behaviour model behaviour parameters. This is optional: default settings
161
+ * will be used if omitted and can be changed mid-session with configureSession().
162
+ * WARNING: some LLMs may not support mid-session updates, thus it is
163
+ * advised and preferred to provide them here at startSession time.
160
164
  *
161
165
  * @exception
162
166
  * This method throws new Error(...) if unable to execute successfully for any reason.
163
167
  */
164
- startSession(voiceEnabled: boolean, voiceName?: string): Promise<void>;
168
+ startSession(voiceEnabled: boolean, voiceName?: string, behaviour?: SessionConfig): Promise<void>;
165
169
  /**
166
170
  * Close an opened, persistent chat room, effectively killing the streaming as well if still opened.
167
171
  * If there is no active session, this method does nothing.
package/dist/index.d.ts CHANGED
@@ -157,11 +157,15 @@ declare class WebClient {
157
157
  *
158
158
  * @param voiceEnabled whether this Model-session can receive & produce audio as well as text
159
159
  * @param voiceName LLM specific voice name e.g. with OpenAI this could be 'alloy'
160
+ * @param behaviour model behaviour parameters. This is optional: default settings
161
+ * will be used if omitted and can be changed mid-session with configureSession().
162
+ * WARNING: some LLMs may not support mid-session updates, thus it is
163
+ * advised and preferred to provide them here at startSession time.
160
164
  *
161
165
  * @exception
162
166
  * This method throws new Error(...) if unable to execute successfully for any reason.
163
167
  */
164
- startSession(voiceEnabled: boolean, voiceName?: string): Promise<void>;
168
+ startSession(voiceEnabled: boolean, voiceName?: string, behaviour?: SessionConfig): Promise<void>;
165
169
  /**
166
170
  * Close an opened, persistent chat room, effectively killing the streaming as well if still opened.
167
171
  * If there is no active session, this method does nothing.
package/dist/index.js CHANGED
@@ -15,15 +15,22 @@ var WebClient = class {
15
15
  *
16
16
  * @param voiceEnabled whether this Model-session can receive & produce audio as well as text
17
17
  * @param voiceName LLM specific voice name e.g. with OpenAI this could be 'alloy'
18
+ * @param behaviour model behaviour parameters. This is optional: default settings
19
+ * will be used if omitted and can be changed mid-session with configureSession().
20
+ * WARNING: some LLMs may not support mid-session updates, thus it is
21
+ * advised and preferred to provide them here at startSession time.
18
22
  *
19
23
  * @exception
20
24
  * This method throws new Error(...) if unable to execute successfully for any reason.
21
25
  */
22
- async startSession(voiceEnabled, voiceName) {
26
+ async startSession(voiceEnabled, voiceName, behaviour) {
23
27
  const prepareBody = {
24
28
  llmProvider: this.config.llm,
25
29
  audioEnabled: voiceEnabled,
26
- voiceName
30
+ voiceName,
31
+ temperature: behaviour == null ? void 0 : behaviour.temperature,
32
+ instructions: behaviour == null ? void 0 : behaviour.instructions,
33
+ maxResponseToken: behaviour == null ? void 0 : behaviour.maxResponseToken
27
34
  };
28
35
  const response = await fetch(`${this.config.sessionEndpoint}/prepare_session`, {
29
36
  method: "POST",
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@simfinity/constellation-client",
3
- "version": "1.0.20",
3
+ "version": "1.0.21",
4
4
  "type": "module",
5
5
  "exports": {
6
6
  ".": {