syllable-sdk 0.1.0-alpha.67 → 0.1.0-alpha.69
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/docs/sdks/prompts/README.md +8 -0
- package/jsr.json +1 -1
- package/lib/config.d.ts +2 -2
- package/lib/config.js +2 -2
- package/models/components/promptcreaterequest.d.ts +1 -1
- package/models/components/promptllmconfig.d.ts +10 -0
- package/models/components/promptllmconfig.d.ts.map +1 -1
- package/models/components/promptllmconfig.js +4 -0
- package/models/components/promptllmconfig.js.map +1 -1
- package/models/components/promptupdaterequest.d.ts +1 -1
- package/openapi.json +32 -2
- package/package.json +2 -2
- package/src/lib/config.ts +2 -2
- package/src/models/components/promptcreaterequest.ts +1 -1
- package/src/models/components/promptllmconfig.ts +14 -0
- package/src/models/components/promptupdaterequest.ts +1 -1
|
@@ -132,6 +132,8 @@ async function run() {
|
|
|
132
132
|
llmConfig: {
|
|
133
133
|
version: "2024-05-13",
|
|
134
134
|
apiVersion: "2024-06-01",
|
|
135
|
+
temperature: 1,
|
|
136
|
+
seed: 123,
|
|
135
137
|
},
|
|
136
138
|
});
|
|
137
139
|
|
|
@@ -168,6 +170,8 @@ async function run() {
|
|
|
168
170
|
llmConfig: {
|
|
169
171
|
version: "2024-05-13",
|
|
170
172
|
apiVersion: "2024-06-01",
|
|
173
|
+
temperature: 1,
|
|
174
|
+
seed: 123,
|
|
171
175
|
},
|
|
172
176
|
});
|
|
173
177
|
|
|
@@ -229,6 +233,8 @@ async function run() {
|
|
|
229
233
|
llmConfig: {
|
|
230
234
|
version: "2024-05-13",
|
|
231
235
|
apiVersion: "2024-06-01",
|
|
236
|
+
temperature: 1,
|
|
237
|
+
seed: 123,
|
|
232
238
|
},
|
|
233
239
|
id: 1,
|
|
234
240
|
editComments: "Updated prompt text to include requirement to not answer questions that aren't about weather.",
|
|
@@ -267,6 +273,8 @@ async function run() {
|
|
|
267
273
|
llmConfig: {
|
|
268
274
|
version: "2024-05-13",
|
|
269
275
|
apiVersion: "2024-06-01",
|
|
276
|
+
temperature: 1,
|
|
277
|
+
seed: 123,
|
|
270
278
|
},
|
|
271
279
|
id: 1,
|
|
272
280
|
editComments: "Updated prompt text to include requirement to not answer questions that aren't about weather.",
|
package/jsr.json
CHANGED
package/lib/config.d.ts
CHANGED
|
@@ -27,8 +27,8 @@ export declare function serverURLFromOptions(options: SDKOptions): URL | null;
|
|
|
27
27
|
export declare const SDK_METADATA: {
|
|
28
28
|
readonly language: "typescript";
|
|
29
29
|
readonly openapiDocVersion: "0.0.2";
|
|
30
|
-
readonly sdkVersion: "0.1.0-alpha.
|
|
30
|
+
readonly sdkVersion: "0.1.0-alpha.69";
|
|
31
31
|
readonly genVersion: "2.512.4";
|
|
32
|
-
readonly userAgent: "speakeasy-sdk/typescript 0.1.0-alpha.
|
|
32
|
+
readonly userAgent: "speakeasy-sdk/typescript 0.1.0-alpha.69 2.512.4 0.0.2 syllable-sdk";
|
|
33
33
|
};
|
|
34
34
|
//# sourceMappingURL=config.d.ts.map
|
package/lib/config.js
CHANGED
|
@@ -31,8 +31,8 @@ function serverURLFromOptions(options) {
|
|
|
31
31
|
exports.SDK_METADATA = {
|
|
32
32
|
language: "typescript",
|
|
33
33
|
openapiDocVersion: "0.0.2",
|
|
34
|
-
sdkVersion: "0.1.0-alpha.
|
|
34
|
+
sdkVersion: "0.1.0-alpha.69",
|
|
35
35
|
genVersion: "2.512.4",
|
|
36
|
-
userAgent: "speakeasy-sdk/typescript 0.1.0-alpha.
|
|
36
|
+
userAgent: "speakeasy-sdk/typescript 0.1.0-alpha.69 2.512.4 0.0.2 syllable-sdk",
|
|
37
37
|
};
|
|
38
38
|
//# sourceMappingURL=config.js.map
|
|
@@ -31,7 +31,7 @@ export type PromptCreateRequest = {
|
|
|
31
31
|
*/
|
|
32
32
|
llmConfig: PromptLlmConfig;
|
|
33
33
|
/**
|
|
34
|
-
* Whether to include the default tools (`
|
|
34
|
+
* Whether to include the default tools (`hangup`, `summary`) in the list of tools for the prompt. If you disable this during creation, you might want to disable it during updates as well, otherwise the default tools will be added when updating the prompt.
|
|
35
35
|
*/
|
|
36
36
|
includeDefaultTools?: boolean | undefined;
|
|
37
37
|
};
|
|
@@ -22,6 +22,14 @@ export type PromptLlmConfig = {
|
|
|
22
22
|
* Version of the API. (Currently only used for Azure OpenAI.)
|
|
23
23
|
*/
|
|
24
24
|
apiVersion?: string | null | undefined;
|
|
25
|
+
/**
|
|
26
|
+
* Temperature parameter for the model. Determines randomness of responses - higher is more random, lower is more focused. Must be between 0.0 and 2.0, inclusive.
|
|
27
|
+
*/
|
|
28
|
+
temperature?: number | null | undefined;
|
|
29
|
+
/**
|
|
30
|
+
* Controls the reproducibility of the job. The LLM will give the same or similar responses given the same inputs in multiple conversations with the same seed.
|
|
31
|
+
*/
|
|
32
|
+
seed?: number | null | undefined;
|
|
25
33
|
};
|
|
26
34
|
/** @internal */
|
|
27
35
|
export declare const PromptLlmConfig$inboundSchema: z.ZodType<PromptLlmConfig, z.ZodTypeDef, unknown>;
|
|
@@ -31,6 +39,8 @@ export type PromptLlmConfig$Outbound = {
|
|
|
31
39
|
model: string;
|
|
32
40
|
version?: string | null | undefined;
|
|
33
41
|
api_version?: string | null | undefined;
|
|
42
|
+
temperature?: number | null | undefined;
|
|
43
|
+
seed?: number | null | undefined;
|
|
34
44
|
};
|
|
35
45
|
/** @internal */
|
|
36
46
|
export declare const PromptLlmConfig$outboundSchema: z.ZodType<PromptLlmConfig$Outbound, z.ZodTypeDef, PromptLlmConfig>;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"promptllmconfig.d.ts","sourceRoot":"","sources":["../../src/models/components/promptllmconfig.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,CAAC,MAAM,KAAK,CAAC;AAGzB,OAAO,EAAE,MAAM,IAAI,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAC9D,OAAO,EAAE,kBAAkB,EAAE,MAAM,iCAAiC,CAAC;AACrE,OAAO,EACL,iBAAiB,EAGlB,MAAM,wBAAwB,CAAC;AAEhC;;GAEG;AACH,MAAM,MAAM,eAAe,GAAG;IAC5B;;OAEG;IACH,QAAQ,CAAC,EAAE,iBAAiB,GAAG,SAAS,CAAC;IACzC;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC3B;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IACpC;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;
|
|
1
|
+
{"version":3,"file":"promptllmconfig.d.ts","sourceRoot":"","sources":["../../src/models/components/promptllmconfig.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,CAAC,MAAM,KAAK,CAAC;AAGzB,OAAO,EAAE,MAAM,IAAI,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAC9D,OAAO,EAAE,kBAAkB,EAAE,MAAM,iCAAiC,CAAC;AACrE,OAAO,EACL,iBAAiB,EAGlB,MAAM,wBAAwB,CAAC;AAEhC;;GAEG;AACH,MAAM,MAAM,eAAe,GAAG;IAC5B;;OAEG;IACH,QAAQ,CAAC,EAAE,iBAAiB,GAAG,SAAS,CAAC;IACzC;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC3B;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IACpC;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IACvC;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IACxC;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;CAClC,CAAC;AAEF,gBAAgB;AAChB,eAAO,MAAM,6BAA6B,EAAE,CAAC,CAAC,OAAO,CACnD,eAAe,EACf,CAAC,CAAC,UAAU,EACZ,OAAO,CAYP,CAAC;AAEH,gBAAgB;AAChB,MAAM,MAAM,wBAAwB,GAAG;IACrC,QAAQ,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC9B,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IACpC,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IACxC,WAAW,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IACxC,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;CAClC,CAAC;AAEF,gBAAgB;AAChB,eAAO,MAAM,8BAA8B,EAAE,CAAC,CAAC,OAAO,CACpD,wBAAwB,EACxB,CAAC,CAAC,UAAU,EACZ,eAAe,CAYf,CAAC;AAEH;;;GAGG;AACH,yBAAiB,gBAAgB,CAAC;IAChC,+DAA+D;IACxD,MAAM,aAAa,mDAAgC,CAAC;IAC3D,gEAAgE;IACzD,MAAM,cAAc,oEAAiC,CAAC;IAC7D,0DAA0D;IAC1D,KAAY,QAAQ,GAAG,wBAAwB,CAAC;CACjD;AAED,wBAAgB,qBAAqB,CACnC,eAAe,EAAE,eAAe,GAC/B,MAAM,CAER;AAED,wBAAgB,uBAAuB,CACrC,UAAU,EAAE,MAAM,GACjB,eAAe,CAAC,eAAe,EAAE,kBAAkB,CAAC,CAMtD"}
|
|
@@ -39,6 +39,8 @@ exports.PromptLlmConfig$inboundSchema = z.object({
|
|
|
39
39
|
model: z.string().default("gpt-4o"),
|
|
40
40
|
version: z.nullable(z.string()).optional(),
|
|
41
41
|
api_version: z.nullable(z.string()).optional(),
|
|
42
|
+
temperature: z.nullable(z.number()).optional(),
|
|
43
|
+
seed: z.nullable(z.number().int()).optional(),
|
|
42
44
|
}).transform((v) => {
|
|
43
45
|
return (0, primitives_js_1.remap)(v, {
|
|
44
46
|
"api_version": "apiVersion",
|
|
@@ -50,6 +52,8 @@ exports.PromptLlmConfig$outboundSchema = z.object({
|
|
|
50
52
|
model: z.string().default("gpt-4o"),
|
|
51
53
|
version: z.nullable(z.string()).optional(),
|
|
52
54
|
apiVersion: z.nullable(z.string()).optional(),
|
|
55
|
+
temperature: z.nullable(z.number()).optional(),
|
|
56
|
+
seed: z.nullable(z.number().int()).optional(),
|
|
53
57
|
}).transform((v) => {
|
|
54
58
|
return (0, primitives_js_1.remap)(v, {
|
|
55
59
|
apiVersion: "api_version",
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"promptllmconfig.js","sourceRoot":"","sources":["../../src/models/components/promptllmconfig.ts"],"names":[],"mappings":";AAAA;;GAEG;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
1
|
+
{"version":3,"file":"promptllmconfig.js","sourceRoot":"","sources":["../../src/models/components/promptllmconfig.ts"],"names":[],"mappings":";AAAA;;GAEG;;;;;;;;;;;;;;;;;;;;;;;;;;AAsGH,sDAIC;AAED,0DAQC;AAlHD,uCAAyB;AACzB,2DAA0D;AAC1D,qDAAiD;AAGjD,iEAIgC;AAgChC,gBAAgB;AACH,QAAA,6BAA6B,GAItC,CAAC,CAAC,MAAM,CAAC;IACX,QAAQ,EAAE,sDAA+B,CAAC,QAAQ,EAAE;IACpD,KAAK,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,OAAO,CAAC,QAAQ,CAAC;IACnC,OAAO,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,QAAQ,EAAE;IAC1C,WAAW,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,QAAQ,EAAE;IAC9C,WAAW,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,QAAQ,EAAE;IAC9C,IAAI,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,QAAQ,EAAE;CAC9C,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,EAAE;IACjB,OAAO,IAAA,qBAAM,EAAC,CAAC,EAAE;QACf,aAAa,EAAE,YAAY;KAC5B,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAYH,gBAAgB;AACH,QAAA,8BAA8B,GAIvC,CAAC,CAAC,MAAM,CAAC;IACX,QAAQ,EAAE,uDAAgC,CAAC,QAAQ,EAAE;IACrD,KAAK,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,OAAO,CAAC,QAAQ,CAAC;IACnC,OAAO,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,QAAQ,EAAE;IAC1C,UAAU,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,QAAQ,EAAE;IAC7C,WAAW,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,QAAQ,EAAE;IAC9C,IAAI,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,QAAQ,EAAE;CAC9C,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,EAAE;IACjB,OAAO,IAAA,qBAAM,EAAC,CAAC,EAAE;QACf,UAAU,EAAE,aAAa;KAC1B,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH;;;GAGG;AACH,IAAiB,gBAAgB,CAOhC;AAPD,WAAiB,gBAAgB;IAC/B,+DAA+D;IAClD,8BAAa,GAAG,qCAA6B,CAAC;IAC3D,gEAAgE;IACnD,+BAAc,GAAG,sCAA8B,CAAC;AAG/D,CAAC,EAPgB,gBAAgB,gCAAhB,gBAAgB,QAOhC;AAED,SAAgB,qBAAqB,CACnC,eAAgC;IAEhC,OAAO,IAAI,CAAC,SAAS,CAAC,sCAA8B,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,CAAC;AAC/E,CAAC;AAED,SAAgB,uBAAuB,CACrC,UAAkB;IAElB,OAAO,IAAA,sBAAS,EACd,UAAU,EACV,CAAC,CAAC,EAAE,EAAE,CAAC,qCAA6B,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EACzD,6CAA6C,CAC9C,CAAC;AACJ,CAAC"}
|
|
@@ -39,7 +39,7 @@ export type PromptUpdateRequest = {
|
|
|
39
39
|
*/
|
|
40
40
|
editComments?: string | null | undefined;
|
|
41
41
|
/**
|
|
42
|
-
* Whether to include the default tools (`
|
|
42
|
+
* Whether to include the default tools (`hangup`, `summary`) in the list of tools for the prompt. If you remove one of the default tools from your prompt, you might want to disable this option so that the tool is not added again when updated.
|
|
43
43
|
*/
|
|
44
44
|
includeDefaultTools?: boolean | undefined;
|
|
45
45
|
};
|
package/openapi.json
CHANGED
|
@@ -14206,7 +14206,7 @@
|
|
|
14206
14206
|
"include_default_tools": {
|
|
14207
14207
|
"type": "boolean",
|
|
14208
14208
|
"title": "Include Default Tools",
|
|
14209
|
-
"description": "Whether to include the default tools (`
|
|
14209
|
+
"description": "Whether to include the default tools (`hangup`, `summary`) in the list of tools for the prompt. If you disable this during creation, you might want to disable it during updates as well, otherwise the default tools will be added when updating the prompt.",
|
|
14210
14210
|
"default": true,
|
|
14211
14211
|
"examples": [
|
|
14212
14212
|
true
|
|
@@ -14369,6 +14369,36 @@
|
|
|
14369
14369
|
"examples": [
|
|
14370
14370
|
"2024-06-01"
|
|
14371
14371
|
]
|
|
14372
|
+
},
|
|
14373
|
+
"temperature": {
|
|
14374
|
+
"anyOf": [
|
|
14375
|
+
{
|
|
14376
|
+
"type": "number"
|
|
14377
|
+
},
|
|
14378
|
+
{
|
|
14379
|
+
"type": "null"
|
|
14380
|
+
}
|
|
14381
|
+
],
|
|
14382
|
+
"title": "Temperature",
|
|
14383
|
+
"description": "Temperature parameter for the model. Determines randomness of responses - higher is more random, lower is more focused. Must be between 0.0 and 2.0, inclusive.",
|
|
14384
|
+
"examples": [
|
|
14385
|
+
1.0
|
|
14386
|
+
]
|
|
14387
|
+
},
|
|
14388
|
+
"seed": {
|
|
14389
|
+
"anyOf": [
|
|
14390
|
+
{
|
|
14391
|
+
"type": "integer"
|
|
14392
|
+
},
|
|
14393
|
+
{
|
|
14394
|
+
"type": "null"
|
|
14395
|
+
}
|
|
14396
|
+
],
|
|
14397
|
+
"title": "Seed",
|
|
14398
|
+
"description": "Controls the reproducibility of the job. The LLM will give the same or similar responses given the same inputs in multiple conversations with the same seed.",
|
|
14399
|
+
"examples": [
|
|
14400
|
+
123
|
|
14401
|
+
]
|
|
14372
14402
|
}
|
|
14373
14403
|
},
|
|
14374
14404
|
"type": "object",
|
|
@@ -14668,7 +14698,7 @@
|
|
|
14668
14698
|
"include_default_tools": {
|
|
14669
14699
|
"type": "boolean",
|
|
14670
14700
|
"title": "Include Default Tools",
|
|
14671
|
-
"description": "Whether to include the default tools (`
|
|
14701
|
+
"description": "Whether to include the default tools (`hangup`, `summary`) in the list of tools for the prompt. If you remove one of the default tools from your prompt, you might want to disable this option so that the tool is not added again when updated.",
|
|
14672
14702
|
"default": true,
|
|
14673
14703
|
"examples": [
|
|
14674
14704
|
true
|
package/package.json
CHANGED
package/src/lib/config.ts
CHANGED
|
@@ -57,8 +57,8 @@ export function serverURLFromOptions(options: SDKOptions): URL | null {
|
|
|
57
57
|
export const SDK_METADATA = {
|
|
58
58
|
language: "typescript",
|
|
59
59
|
openapiDocVersion: "0.0.2",
|
|
60
|
-
sdkVersion: "0.1.0-alpha.
|
|
60
|
+
sdkVersion: "0.1.0-alpha.69",
|
|
61
61
|
genVersion: "2.512.4",
|
|
62
62
|
userAgent:
|
|
63
|
-
"speakeasy-sdk/typescript 0.1.0-alpha.
|
|
63
|
+
"speakeasy-sdk/typescript 0.1.0-alpha.69 2.512.4 0.0.2 syllable-sdk",
|
|
64
64
|
} as const;
|
|
@@ -43,7 +43,7 @@ export type PromptCreateRequest = {
|
|
|
43
43
|
*/
|
|
44
44
|
llmConfig: PromptLlmConfig;
|
|
45
45
|
/**
|
|
46
|
-
* Whether to include the default tools (`
|
|
46
|
+
* Whether to include the default tools (`hangup`, `summary`) in the list of tools for the prompt. If you disable this during creation, you might want to disable it during updates as well, otherwise the default tools will be added when updating the prompt.
|
|
47
47
|
*/
|
|
48
48
|
includeDefaultTools?: boolean | undefined;
|
|
49
49
|
};
|
|
@@ -33,6 +33,14 @@ export type PromptLlmConfig = {
|
|
|
33
33
|
* Version of the API. (Currently only used for Azure OpenAI.)
|
|
34
34
|
*/
|
|
35
35
|
apiVersion?: string | null | undefined;
|
|
36
|
+
/**
|
|
37
|
+
* Temperature parameter for the model. Determines randomness of responses - higher is more random, lower is more focused. Must be between 0.0 and 2.0, inclusive.
|
|
38
|
+
*/
|
|
39
|
+
temperature?: number | null | undefined;
|
|
40
|
+
/**
|
|
41
|
+
* Controls the reproducibility of the job. The LLM will give the same or similar responses given the same inputs in multiple conversations with the same seed.
|
|
42
|
+
*/
|
|
43
|
+
seed?: number | null | undefined;
|
|
36
44
|
};
|
|
37
45
|
|
|
38
46
|
/** @internal */
|
|
@@ -45,6 +53,8 @@ export const PromptLlmConfig$inboundSchema: z.ZodType<
|
|
|
45
53
|
model: z.string().default("gpt-4o"),
|
|
46
54
|
version: z.nullable(z.string()).optional(),
|
|
47
55
|
api_version: z.nullable(z.string()).optional(),
|
|
56
|
+
temperature: z.nullable(z.number()).optional(),
|
|
57
|
+
seed: z.nullable(z.number().int()).optional(),
|
|
48
58
|
}).transform((v) => {
|
|
49
59
|
return remap$(v, {
|
|
50
60
|
"api_version": "apiVersion",
|
|
@@ -57,6 +67,8 @@ export type PromptLlmConfig$Outbound = {
|
|
|
57
67
|
model: string;
|
|
58
68
|
version?: string | null | undefined;
|
|
59
69
|
api_version?: string | null | undefined;
|
|
70
|
+
temperature?: number | null | undefined;
|
|
71
|
+
seed?: number | null | undefined;
|
|
60
72
|
};
|
|
61
73
|
|
|
62
74
|
/** @internal */
|
|
@@ -69,6 +81,8 @@ export const PromptLlmConfig$outboundSchema: z.ZodType<
|
|
|
69
81
|
model: z.string().default("gpt-4o"),
|
|
70
82
|
version: z.nullable(z.string()).optional(),
|
|
71
83
|
apiVersion: z.nullable(z.string()).optional(),
|
|
84
|
+
temperature: z.nullable(z.number()).optional(),
|
|
85
|
+
seed: z.nullable(z.number().int()).optional(),
|
|
72
86
|
}).transform((v) => {
|
|
73
87
|
return remap$(v, {
|
|
74
88
|
apiVersion: "api_version",
|
|
@@ -51,7 +51,7 @@ export type PromptUpdateRequest = {
|
|
|
51
51
|
*/
|
|
52
52
|
editComments?: string | null | undefined;
|
|
53
53
|
/**
|
|
54
|
-
* Whether to include the default tools (`
|
|
54
|
+
* Whether to include the default tools (`hangup`, `summary`) in the list of tools for the prompt. If you remove one of the default tools from your prompt, you might want to disable this option so that the tool is not added again when updated.
|
|
55
55
|
*/
|
|
56
56
|
includeDefaultTools?: boolean | undefined;
|
|
57
57
|
};
|