llama-stack-client 0.2.10 → 0.2.11-rc1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/index.d.mts +9 -0
  2. package/index.d.ts +9 -0
  3. package/index.d.ts.map +1 -1
  4. package/index.js +9 -0
  5. package/index.js.map +1 -1
  6. package/index.mjs +9 -0
  7. package/index.mjs.map +1 -1
  8. package/package.json +1 -1
  9. package/resource.d.ts +1 -1
  10. package/resource.d.ts.map +1 -1
  11. package/resource.js.map +1 -1
  12. package/resource.mjs.map +1 -1
  13. package/resources/completions.d.ts +4 -0
  14. package/resources/completions.d.ts.map +1 -1
  15. package/resources/datasets.d.ts +4 -0
  16. package/resources/datasets.d.ts.map +1 -1
  17. package/resources/embeddings.d.ts +94 -0
  18. package/resources/embeddings.d.ts.map +1 -0
  19. package/resources/embeddings.js +16 -0
  20. package/resources/embeddings.js.map +1 -0
  21. package/resources/embeddings.mjs +12 -0
  22. package/resources/embeddings.mjs.map +1 -0
  23. package/resources/files.d.ts +130 -0
  24. package/resources/files.d.ts.map +1 -0
  25. package/resources/files.js +68 -0
  26. package/resources/files.js.map +1 -0
  27. package/resources/files.mjs +41 -0
  28. package/resources/files.mjs.map +1 -0
  29. package/resources/index.d.ts +3 -0
  30. package/resources/index.d.ts.map +1 -1
  31. package/resources/index.js +7 -1
  32. package/resources/index.js.map +1 -1
  33. package/resources/index.mjs +3 -0
  34. package/resources/index.mjs.map +1 -1
  35. package/resources/responses/input-items.d.ts +8 -1
  36. package/resources/responses/input-items.d.ts.map +1 -1
  37. package/resources/responses/responses.d.ts +391 -6
  38. package/resources/responses/responses.d.ts.map +1 -1
  39. package/resources/responses/responses.js.map +1 -1
  40. package/resources/responses/responses.mjs.map +1 -1
  41. package/resources/shared.d.ts +37 -1
  42. package/resources/shared.d.ts.map +1 -1
  43. package/resources/vector-io.d.ts +29 -3
  44. package/resources/vector-io.d.ts.map +1 -1
  45. package/resources/vector-stores/files.d.ts +74 -0
  46. package/resources/vector-stores/files.d.ts.map +1 -0
  47. package/resources/vector-stores/files.js +15 -0
  48. package/resources/vector-stores/files.js.map +1 -0
  49. package/resources/vector-stores/files.mjs +11 -0
  50. package/resources/vector-stores/files.mjs.map +1 -0
  51. package/resources/vector-stores/index.d.ts +3 -0
  52. package/resources/vector-stores/index.d.ts.map +1 -0
  53. package/resources/vector-stores/index.js +9 -0
  54. package/resources/vector-stores/index.js.map +1 -0
  55. package/resources/vector-stores/index.mjs +4 -0
  56. package/resources/vector-stores/index.mjs.map +1 -0
  57. package/resources/vector-stores/vector-stores.d.ts +198 -0
  58. package/resources/vector-stores/vector-stores.d.ts.map +1 -0
  59. package/resources/vector-stores/vector-stores.js +77 -0
  60. package/resources/vector-stores/vector-stores.js.map +1 -0
  61. package/resources/vector-stores/vector-stores.mjs +50 -0
  62. package/resources/vector-stores/vector-stores.mjs.map +1 -0
  63. package/resources/vector-stores.d.ts +2 -0
  64. package/resources/vector-stores.d.ts.map +1 -0
  65. package/resources/vector-stores.js +19 -0
  66. package/resources/vector-stores.js.map +1 -0
  67. package/resources/vector-stores.mjs +3 -0
  68. package/resources/vector-stores.mjs.map +1 -0
  69. package/src/index.ts +55 -0
  70. package/src/resource.ts +1 -1
  71. package/src/resources/completions.ts +5 -0
  72. package/src/resources/datasets.ts +5 -0
  73. package/src/resources/embeddings.ts +119 -0
  74. package/src/resources/files.ts +184 -0
  75. package/src/resources/index.ts +21 -0
  76. package/src/resources/responses/input-items.ts +13 -0
  77. package/src/resources/responses/responses.ts +616 -1
  78. package/src/resources/shared.ts +42 -1
  79. package/src/resources/vector-io.ts +31 -3
  80. package/src/resources/vector-stores/files.ts +111 -0
  81. package/src/resources/vector-stores/index.ts +14 -0
  82. package/src/resources/vector-stores/vector-stores.ts +306 -0
  83. package/src/resources/vector-stores.ts +3 -0
  84. package/src/version.ts +1 -1
  85. package/version.d.ts +1 -1
  86. package/version.d.ts.map +1 -1
  87. package/version.js +1 -1
  88. package/version.js.map +1 -1
  89. package/version.mjs +1 -1
  90. package/version.mjs.map +1 -1
@@ -0,0 +1,3 @@
1
+ // File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
+ export * from "./vector-stores/index.mjs";
3
+ //# sourceMappingURL=vector-stores.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"vector-stores.mjs","sourceRoot":"","sources":["../src/resources/vector-stores.ts"],"names":[],"mappings":"AAAA,sFAAsF"}
package/src/index.ts CHANGED
@@ -32,6 +32,16 @@ import {
32
32
  Datasets,
33
33
  ListDatasetsResponse,
34
34
  } from './resources/datasets';
35
+ import { CreateEmbeddingsResponse, EmbeddingCreateParams, Embeddings } from './resources/embeddings';
36
+ import {
37
+ DeleteFileResponse,
38
+ File,
39
+ FileContentResponse,
40
+ FileCreateParams,
41
+ FileListParams,
42
+ Files,
43
+ ListFilesResponse,
44
+ } from './resources/files';
35
45
  import {
36
46
  ChatCompletionResponseStreamChunk,
37
47
  CompletionResponse,
@@ -174,6 +184,17 @@ import {
174
184
  ToolRuntimeListToolsParams,
175
185
  ToolRuntimeListToolsResponse,
176
186
  } from './resources/tool-runtime/tool-runtime';
187
+ import {
188
+ ListVectorStoresResponse,
189
+ VectorStore,
190
+ VectorStoreCreateParams,
191
+ VectorStoreDeleteResponse,
192
+ VectorStoreListParams,
193
+ VectorStoreSearchParams,
194
+ VectorStoreSearchResponse,
195
+ VectorStoreUpdateParams,
196
+ VectorStores,
197
+ } from './resources/vector-stores/vector-stores';
177
198
 
178
199
  export interface ClientOptions {
179
200
  /**
@@ -291,10 +312,12 @@ export class LlamaStackClient extends Core.APIClient {
291
312
  eval: API.Eval = new API.Eval(this);
292
313
  inspect: API.Inspect = new API.Inspect(this);
293
314
  inference: API.Inference = new API.Inference(this);
315
+ embeddings: API.Embeddings = new API.Embeddings(this);
294
316
  chat: API.Chat = new API.Chat(this);
295
317
  completions: API.Completions = new API.Completions(this);
296
318
  vectorIo: API.VectorIo = new API.VectorIo(this);
297
319
  vectorDBs: API.VectorDBs = new API.VectorDBs(this);
320
+ vectorStores: API.VectorStores = new API.VectorStores(this);
298
321
  models: API.Models = new API.Models(this);
299
322
  postTraining: API.PostTraining = new API.PostTraining(this);
300
323
  providers: API.Providers = new API.Providers(this);
@@ -306,6 +329,7 @@ export class LlamaStackClient extends Core.APIClient {
306
329
  scoring: API.Scoring = new API.Scoring(this);
307
330
  scoringFunctions: API.ScoringFunctions = new API.ScoringFunctions(this);
308
331
  benchmarks: API.Benchmarks = new API.Benchmarks(this);
332
+ files: API.Files = new API.Files(this);
309
333
 
310
334
  protected override defaultQuery(): Core.DefaultQuery | undefined {
311
335
  return this._options.defaultQuery;
@@ -359,10 +383,12 @@ LlamaStackClient.Datasets = Datasets;
359
383
  LlamaStackClient.Eval = Eval;
360
384
  LlamaStackClient.Inspect = Inspect;
361
385
  LlamaStackClient.Inference = Inference;
386
+ LlamaStackClient.Embeddings = Embeddings;
362
387
  LlamaStackClient.Chat = Chat;
363
388
  LlamaStackClient.Completions = Completions;
364
389
  LlamaStackClient.VectorIo = VectorIo;
365
390
  LlamaStackClient.VectorDBs = VectorDBs;
391
+ LlamaStackClient.VectorStores = VectorStores;
366
392
  LlamaStackClient.Models = Models;
367
393
  LlamaStackClient.PostTraining = PostTraining;
368
394
  LlamaStackClient.Providers = Providers;
@@ -374,6 +400,7 @@ LlamaStackClient.Telemetry = Telemetry;
374
400
  LlamaStackClient.Scoring = Scoring;
375
401
  LlamaStackClient.ScoringFunctions = ScoringFunctions;
376
402
  LlamaStackClient.Benchmarks = Benchmarks;
403
+ LlamaStackClient.Files = Files;
377
404
  export declare namespace LlamaStackClient {
378
405
  export type RequestOptions = Core.RequestOptions;
379
406
 
@@ -479,6 +506,12 @@ export declare namespace LlamaStackClient {
479
506
  type InferenceEmbeddingsParams as InferenceEmbeddingsParams,
480
507
  };
481
508
 
509
+ export {
510
+ Embeddings as Embeddings,
511
+ type CreateEmbeddingsResponse as CreateEmbeddingsResponse,
512
+ type EmbeddingCreateParams as EmbeddingCreateParams,
513
+ };
514
+
482
515
  export { Chat as Chat, type ChatCompletionChunk as ChatCompletionChunk };
483
516
 
484
517
  export {
@@ -505,6 +538,18 @@ export declare namespace LlamaStackClient {
505
538
  type VectorDBRegisterParams as VectorDBRegisterParams,
506
539
  };
507
540
 
541
+ export {
542
+ VectorStores as VectorStores,
543
+ type ListVectorStoresResponse as ListVectorStoresResponse,
544
+ type VectorStore as VectorStore,
545
+ type VectorStoreDeleteResponse as VectorStoreDeleteResponse,
546
+ type VectorStoreSearchResponse as VectorStoreSearchResponse,
547
+ type VectorStoreCreateParams as VectorStoreCreateParams,
548
+ type VectorStoreUpdateParams as VectorStoreUpdateParams,
549
+ type VectorStoreListParams as VectorStoreListParams,
550
+ type VectorStoreSearchParams as VectorStoreSearchParams,
551
+ };
552
+
508
553
  export {
509
554
  Models as Models,
510
555
  type ListModelsResponse as ListModelsResponse,
@@ -597,6 +642,16 @@ export declare namespace LlamaStackClient {
597
642
  type BenchmarkRegisterParams as BenchmarkRegisterParams,
598
643
  };
599
644
 
645
+ export {
646
+ Files as Files,
647
+ type DeleteFileResponse as DeleteFileResponse,
648
+ type File as File,
649
+ type ListFilesResponse as ListFilesResponse,
650
+ type FileContentResponse as FileContentResponse,
651
+ type FileCreateParams as FileCreateParams,
652
+ type FileListParams as FileListParams,
653
+ };
654
+
600
655
  export type AgentConfig = API.AgentConfig;
601
656
  export type BatchCompletion = API.BatchCompletion;
602
657
  export type ChatCompletionResponse = API.ChatCompletionResponse;
package/src/resource.ts CHANGED
@@ -2,7 +2,7 @@
2
2
 
3
3
  import type { LlamaStackClient } from './index';
4
4
 
5
- export class APIResource {
5
+ export abstract class APIResource {
6
6
  protected _client: LlamaStackClient;
7
7
 
8
8
  constructor(client: LlamaStackClient) {
@@ -223,6 +223,11 @@ export interface CompletionCreateParamsBase {
223
223
  */
224
224
  stream_options?: Record<string, boolean | number | string | Array<unknown> | unknown | null>;
225
225
 
226
+ /**
227
+ * (Optional) The suffix that should be appended to the completion.
228
+ */
229
+ suffix?: string;
230
+
226
231
  /**
227
232
  * (Optional) The temperature to use.
228
233
  */
@@ -195,6 +195,11 @@ export interface DatasetIterrowsResponse {
195
195
  * Whether there are more items available after this set
196
196
  */
197
197
  has_more: boolean;
198
+
199
+ /**
200
+ * The URL for accessing this list
201
+ */
202
+ url?: string;
198
203
  }
199
204
 
200
205
  export interface DatasetRegisterResponse {
@@ -0,0 +1,119 @@
1
+ // File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
+
3
+ import { APIResource } from '../resource';
4
+ import * as Core from '../core';
5
+
6
+ export class Embeddings extends APIResource {
7
+ /**
8
+ * Generate OpenAI-compatible embeddings for the given input using the specified
9
+ * model.
10
+ */
11
+ create(
12
+ body: EmbeddingCreateParams,
13
+ options?: Core.RequestOptions,
14
+ ): Core.APIPromise<CreateEmbeddingsResponse> {
15
+ return this._client.post('/v1/openai/v1/embeddings', { body, ...options });
16
+ }
17
+ }
18
+
19
+ /**
20
+ * Response from an OpenAI-compatible embeddings request.
21
+ */
22
+ export interface CreateEmbeddingsResponse {
23
+ /**
24
+ * List of embedding data objects
25
+ */
26
+ data: Array<CreateEmbeddingsResponse.Data>;
27
+
28
+ /**
29
+ * The model that was used to generate the embeddings
30
+ */
31
+ model: string;
32
+
33
+ /**
34
+ * The object type, which will be "list"
35
+ */
36
+ object: 'list';
37
+
38
+ /**
39
+ * Usage information
40
+ */
41
+ usage: CreateEmbeddingsResponse.Usage;
42
+ }
43
+
44
+ export namespace CreateEmbeddingsResponse {
45
+ /**
46
+ * A single embedding data object from an OpenAI-compatible embeddings response.
47
+ */
48
+ export interface Data {
49
+ /**
50
+ * The embedding vector as a list of floats (when encoding_format="float") or as a
51
+ * base64-encoded string (when encoding_format="base64")
52
+ */
53
+ embedding: Array<number> | string;
54
+
55
+ /**
56
+ * The index of the embedding in the input list
57
+ */
58
+ index: number;
59
+
60
+ /**
61
+ * The object type, which will be "embedding"
62
+ */
63
+ object: 'embedding';
64
+ }
65
+
66
+ /**
67
+ * Usage information
68
+ */
69
+ export interface Usage {
70
+ /**
71
+ * The number of tokens in the input
72
+ */
73
+ prompt_tokens: number;
74
+
75
+ /**
76
+ * The total number of tokens used
77
+ */
78
+ total_tokens: number;
79
+ }
80
+ }
81
+
82
+ export interface EmbeddingCreateParams {
83
+ /**
84
+ * Input text to embed, encoded as a string or array of strings. To embed multiple
85
+ * inputs in a single request, pass an array of strings.
86
+ */
87
+ input: string | Array<string>;
88
+
89
+ /**
90
+ * The identifier of the model to use. The model must be an embedding model
91
+ * registered with Llama Stack and available via the /models endpoint.
92
+ */
93
+ model: string;
94
+
95
+ /**
96
+ * (Optional) The number of dimensions the resulting output embeddings should have.
97
+ * Only supported in text-embedding-3 and later models.
98
+ */
99
+ dimensions?: number;
100
+
101
+ /**
102
+ * (Optional) The format to return the embeddings in. Can be either "float" or
103
+ * "base64". Defaults to "float".
104
+ */
105
+ encoding_format?: string;
106
+
107
+ /**
108
+ * (Optional) A unique identifier representing your end-user, which can help OpenAI
109
+ * to monitor and detect abuse.
110
+ */
111
+ user?: string;
112
+ }
113
+
114
+ export declare namespace Embeddings {
115
+ export {
116
+ type CreateEmbeddingsResponse as CreateEmbeddingsResponse,
117
+ type EmbeddingCreateParams as EmbeddingCreateParams,
118
+ };
119
+ }
@@ -0,0 +1,184 @@
1
+ // File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
+
3
+ import { APIResource } from '../resource';
4
+ import { isRequestOptions } from '../core';
5
+ import * as Core from '../core';
6
+
7
+ export class Files extends APIResource {
8
+ /**
9
+ * Upload a file that can be used across various endpoints. The file upload should
10
+ * be a multipart form request with:
11
+ *
12
+ * - file: The File object (not file name) to be uploaded.
13
+ * - purpose: The intended purpose of the uploaded file.
14
+ */
15
+ create(body: FileCreateParams, options?: Core.RequestOptions): Core.APIPromise<File> {
16
+ return this._client.post('/v1/openai/v1/files', Core.multipartFormRequestOptions({ body, ...options }));
17
+ }
18
+
19
+ /**
20
+ * Returns information about a specific file.
21
+ */
22
+ retrieve(fileId: string, options?: Core.RequestOptions): Core.APIPromise<File> {
23
+ return this._client.get(`/v1/openai/v1/files/${fileId}`, options);
24
+ }
25
+
26
+ /**
27
+ * Returns a list of files that belong to the user's organization.
28
+ */
29
+ list(query?: FileListParams, options?: Core.RequestOptions): Core.APIPromise<ListFilesResponse>;
30
+ list(options?: Core.RequestOptions): Core.APIPromise<ListFilesResponse>;
31
+ list(
32
+ query: FileListParams | Core.RequestOptions = {},
33
+ options?: Core.RequestOptions,
34
+ ): Core.APIPromise<ListFilesResponse> {
35
+ if (isRequestOptions(query)) {
36
+ return this.list({}, query);
37
+ }
38
+ return this._client.get('/v1/openai/v1/files', { query, ...options });
39
+ }
40
+
41
+ /**
42
+ * Delete a file.
43
+ */
44
+ delete(fileId: string, options?: Core.RequestOptions): Core.APIPromise<DeleteFileResponse> {
45
+ return this._client.delete(`/v1/openai/v1/files/${fileId}`, options);
46
+ }
47
+
48
+ /**
49
+ * Returns the contents of the specified file.
50
+ */
51
+ content(fileId: string, options?: Core.RequestOptions): Core.APIPromise<unknown> {
52
+ return this._client.get(`/v1/openai/v1/files/${fileId}/content`, options);
53
+ }
54
+ }
55
+
56
+ /**
57
+ * Response for deleting a file in OpenAI Files API.
58
+ */
59
+ export interface DeleteFileResponse {
60
+ /**
61
+ * The file identifier that was deleted
62
+ */
63
+ id: string;
64
+
65
+ /**
66
+ * Whether the file was successfully deleted
67
+ */
68
+ deleted: boolean;
69
+
70
+ /**
71
+ * The object type, which is always "file"
72
+ */
73
+ object: 'file';
74
+ }
75
+
76
+ /**
77
+ * OpenAI File object as defined in the OpenAI Files API.
78
+ */
79
+ export interface File {
80
+ /**
81
+ * The file identifier, which can be referenced in the API endpoints
82
+ */
83
+ id: string;
84
+
85
+ /**
86
+ * The size of the file, in bytes
87
+ */
88
+ bytes: number;
89
+
90
+ /**
91
+ * The Unix timestamp (in seconds) for when the file was created
92
+ */
93
+ created_at: number;
94
+
95
+ /**
96
+ * The Unix timestamp (in seconds) for when the file expires
97
+ */
98
+ expires_at: number;
99
+
100
+ /**
101
+ * The name of the file
102
+ */
103
+ filename: string;
104
+
105
+ /**
106
+ * The object type, which is always "file"
107
+ */
108
+ object: 'file';
109
+
110
+ /**
111
+ * The intended purpose of the file
112
+ */
113
+ purpose: 'assistants';
114
+ }
115
+
116
+ /**
117
+ * Response for listing files in OpenAI Files API.
118
+ */
119
+ export interface ListFilesResponse {
120
+ /**
121
+ * List of file objects
122
+ */
123
+ data: Array<File>;
124
+
125
+ first_id: string;
126
+
127
+ has_more: boolean;
128
+
129
+ last_id: string;
130
+
131
+ /**
132
+ * The object type, which is always "list"
133
+ */
134
+ object: 'list';
135
+ }
136
+
137
+ export type FileContentResponse = unknown;
138
+
139
+ export interface FileCreateParams {
140
+ file: Core.Uploadable;
141
+
142
+ /**
143
+ * Valid purpose values for OpenAI Files API.
144
+ */
145
+ purpose: 'assistants';
146
+ }
147
+
148
+ export interface FileListParams {
149
+ /**
150
+ * A cursor for use in pagination. `after` is an object ID that defines your place
151
+ * in the list. For instance, if you make a list request and receive 100 objects,
152
+ * ending with obj_foo, your subsequent call can include after=obj_foo in order to
153
+ * fetch the next page of the list.
154
+ */
155
+ after?: string;
156
+
157
+ /**
158
+ * A limit on the number of objects to be returned. Limit can range between 1 and
159
+ * 10,000, and the default is 10,000.
160
+ */
161
+ limit?: number;
162
+
163
+ /**
164
+ * Sort order by the `created_at` timestamp of the objects. `asc` for ascending
165
+ * order and `desc` for descending order.
166
+ */
167
+ order?: 'asc' | 'desc';
168
+
169
+ /**
170
+ * Only return files with the given purpose.
171
+ */
172
+ purpose?: 'assistants';
173
+ }
174
+
175
+ export declare namespace Files {
176
+ export {
177
+ type DeleteFileResponse as DeleteFileResponse,
178
+ type File as File,
179
+ type ListFilesResponse as ListFilesResponse,
180
+ type FileContentResponse as FileContentResponse,
181
+ type FileCreateParams as FileCreateParams,
182
+ type FileListParams as FileListParams,
183
+ };
184
+ }
@@ -36,6 +36,7 @@ export {
36
36
  type DatasetIterrowsParams,
37
37
  type DatasetRegisterParams,
38
38
  } from './datasets';
39
+ export { Embeddings, type CreateEmbeddingsResponse, type EmbeddingCreateParams } from './embeddings';
39
40
  export {
40
41
  Eval,
41
42
  type BenchmarkConfig,
@@ -47,6 +48,15 @@ export {
47
48
  type EvalRunEvalParams,
48
49
  type EvalRunEvalAlphaParams,
49
50
  } from './eval/eval';
51
+ export {
52
+ Files,
53
+ type DeleteFileResponse,
54
+ type File,
55
+ type ListFilesResponse,
56
+ type FileContentResponse,
57
+ type FileCreateParams,
58
+ type FileListParams,
59
+ } from './files';
50
60
  export {
51
61
  Inference,
52
62
  type ChatCompletionResponseStreamChunk,
@@ -173,3 +183,14 @@ export {
173
183
  type VectorIoInsertParams,
174
184
  type VectorIoQueryParams,
175
185
  } from './vector-io';
186
+ export {
187
+ VectorStores,
188
+ type ListVectorStoresResponse,
189
+ type VectorStore,
190
+ type VectorStoreDeleteResponse,
191
+ type VectorStoreSearchResponse,
192
+ type VectorStoreCreateParams,
193
+ type VectorStoreUpdateParams,
194
+ type VectorStoreListParams,
195
+ type VectorStoreSearchParams,
196
+ } from './vector-stores/vector-stores';
@@ -29,6 +29,7 @@ export class InputItems extends APIResource {
29
29
  export interface InputItemListResponse {
30
30
  data: Array<
31
31
  | InputItemListResponse.OpenAIResponseOutputMessageWebSearchToolCall
32
+ | InputItemListResponse.OpenAIResponseOutputMessageFileSearchToolCall
32
33
  | InputItemListResponse.OpenAIResponseOutputMessageFunctionToolCall
33
34
  | InputItemListResponse.OpenAIResponseInputFunctionToolCallOutput
34
35
  | InputItemListResponse.OpenAIResponseMessage
@@ -46,6 +47,18 @@ export namespace InputItemListResponse {
46
47
  type: 'web_search_call';
47
48
  }
48
49
 
50
+ export interface OpenAIResponseOutputMessageFileSearchToolCall {
51
+ id: string;
52
+
53
+ queries: Array<string>;
54
+
55
+ status: string;
56
+
57
+ type: 'file_search_call';
58
+
59
+ results?: Array<Record<string, boolean | number | string | Array<unknown> | unknown | null>>;
60
+ }
61
+
49
62
  export interface OpenAIResponseOutputMessageFunctionToolCall {
50
63
  arguments: string;
51
64