@agentuity/core 0.0.33 → 0.0.35
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +5 -0
- package/dist/index.js.map +1 -0
- package/dist/json.js +23 -0
- package/dist/json.js.map +1 -0
- package/dist/services/_util.js +107 -0
- package/dist/services/_util.js.map +1 -0
- package/dist/services/adapter.js +2 -0
- package/dist/services/adapter.js.map +1 -0
- package/dist/services/exception.js +8 -0
- package/dist/services/exception.js.map +1 -0
- package/dist/services/index.js +8 -0
- package/dist/services/index.js.map +1 -0
- package/dist/services/keyvalue.js +85 -0
- package/dist/services/keyvalue.js.map +1 -0
- package/dist/services/objectstore.js +218 -0
- package/dist/services/objectstore.js.map +1 -0
- package/dist/services/stream.js +392 -0
- package/dist/services/stream.js.map +1 -0
- package/dist/services/vector.js +242 -0
- package/dist/services/vector.js.map +1 -0
- package/dist/standard_schema.js +2 -0
- package/dist/standard_schema.js.map +1 -0
- package/dist/typehelper.js +2 -0
- package/dist/typehelper.js.map +1 -0
- package/package.json +5 -4
- package/src/index.ts +4 -0
- package/src/json.ts +26 -0
- package/src/services/__test__/keyvalue.test.ts +402 -0
- package/src/services/__test__/mock-adapter.ts +114 -0
- package/src/services/__test__/objectstore.test.ts +431 -0
- package/src/services/__test__/stream.test.ts +554 -0
- package/src/services/__test__/vector.test.ts +813 -0
- package/src/services/_util.ts +117 -0
- package/src/services/adapter.ts +33 -0
- package/src/services/exception.ts +7 -0
- package/src/services/index.ts +7 -0
- package/src/services/keyvalue.ts +185 -0
- package/src/services/objectstore.ts +466 -0
- package/src/services/stream.ts +614 -0
- package/src/services/vector.ts +599 -0
- package/src/standard_schema.ts +69 -0
- package/src/typehelper.ts +5 -0
|
@@ -0,0 +1,614 @@
|
|
|
1
|
+
import { safeStringify } from '../json';
|
|
2
|
+
import { FetchAdapter, FetchResponse } from './adapter';
|
|
3
|
+
import { buildUrl, toServiceException } from './_util';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Properties for creating a stream
|
|
7
|
+
*/
|
|
8
|
+
export interface CreateStreamProps {
|
|
9
|
+
/**
|
|
10
|
+
* optional metadata for the stream
|
|
11
|
+
*/
|
|
12
|
+
metadata?: Record<string, string>;
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* optional contentType for the stream data. If not set, defaults to application/octet-stream
|
|
16
|
+
*/
|
|
17
|
+
contentType?: string;
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* optional flag to enable gzip compression of stream data during upload. if true, will also add
|
|
21
|
+
* add Content-Encoding: gzip header to responses. The client MUST be able to accept gzip
|
|
22
|
+
* compression for this to work or must be able to uncompress the raw data it receives.
|
|
23
|
+
*/
|
|
24
|
+
compress?: true;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Parameters for listing streams
|
|
29
|
+
*/
|
|
30
|
+
export interface ListStreamsParams {
|
|
31
|
+
/**
|
|
32
|
+
* optional name filter to search for streams
|
|
33
|
+
*/
|
|
34
|
+
name?: string;
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* optional metadata filters to match streams
|
|
38
|
+
*/
|
|
39
|
+
metadata?: Record<string, string>;
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* maximum number of streams to return (default: 100, max: 1000)
|
|
43
|
+
*/
|
|
44
|
+
limit?: number;
|
|
45
|
+
|
|
46
|
+
/**
|
|
47
|
+
* number of streams to skip for pagination
|
|
48
|
+
*/
|
|
49
|
+
offset?: number;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Stream information returned by list operation
|
|
54
|
+
*/
|
|
55
|
+
export interface StreamInfo {
|
|
56
|
+
/**
|
|
57
|
+
* unique stream identifier
|
|
58
|
+
*/
|
|
59
|
+
id: string;
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* the name of the stream
|
|
63
|
+
*/
|
|
64
|
+
name: string;
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* the stream metadata
|
|
68
|
+
*/
|
|
69
|
+
metadata: Record<string, string>;
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* the public URL to access the stream
|
|
73
|
+
*/
|
|
74
|
+
url: string;
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* the size of the stream in bytes
|
|
78
|
+
*/
|
|
79
|
+
sizeBytes: number;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Response from listing streams
|
|
84
|
+
*/
|
|
85
|
+
export interface ListStreamsResponse {
|
|
86
|
+
/**
|
|
87
|
+
* whether the request was successful
|
|
88
|
+
*/
|
|
89
|
+
success: boolean;
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* optional error message if not successful
|
|
93
|
+
*/
|
|
94
|
+
message?: string;
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* array of streams matching the filter criteria
|
|
98
|
+
*/
|
|
99
|
+
streams: StreamInfo[];
|
|
100
|
+
|
|
101
|
+
/**
|
|
102
|
+
* total count of streams matching the filter (useful for pagination)
|
|
103
|
+
*/
|
|
104
|
+
total: number;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* A durable and resumable stream that can be written to and read many times.
|
|
109
|
+
* The underlying stream is backed by a durable storage system and the URL
|
|
110
|
+
* returned is public and guaranteed to return the same data every time it is accessed.
|
|
111
|
+
* You can read from this stream internal in the agent using the getReader() method or
|
|
112
|
+
* return the URL to the stream to be used externally.
|
|
113
|
+
*
|
|
114
|
+
* You must write and close the stream before it can be read but if you attempt to read
|
|
115
|
+
* before any data is written, the reader will block until the first write occurs.
|
|
116
|
+
*/
|
|
117
|
+
export interface Stream extends WritableStream {
|
|
118
|
+
/**
|
|
119
|
+
* unique stream identifier
|
|
120
|
+
*/
|
|
121
|
+
id: string;
|
|
122
|
+
/**
|
|
123
|
+
* the unique stream url to consume the stream
|
|
124
|
+
*/
|
|
125
|
+
url: string;
|
|
126
|
+
/**
|
|
127
|
+
* the total number of bytes written to the stream
|
|
128
|
+
*/
|
|
129
|
+
readonly bytesWritten: number;
|
|
130
|
+
/**
|
|
131
|
+
* whether the stream is using compression
|
|
132
|
+
*/
|
|
133
|
+
readonly compressed: boolean;
|
|
134
|
+
/**
|
|
135
|
+
* write data to the stream
|
|
136
|
+
*/
|
|
137
|
+
write(chunk: string | Uint8Array | ArrayBuffer | Buffer | object): Promise<void>;
|
|
138
|
+
/**
|
|
139
|
+
* close the stream gracefully, handling already closed streams without error
|
|
140
|
+
*/
|
|
141
|
+
close(): Promise<void>;
|
|
142
|
+
/**
|
|
143
|
+
* get a ReadableStream that streams from the internal URL
|
|
144
|
+
*
|
|
145
|
+
* Note: This method will block waiting for data until writes start to the Stream.
|
|
146
|
+
* The returned ReadableStream will remain open until the Stream is closed or an error occurs.
|
|
147
|
+
*
|
|
148
|
+
* @returns a ReadableStream that can be passed to response.stream()
|
|
149
|
+
*/
|
|
150
|
+
getReader(): ReadableStream<Uint8Array>;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
/**
|
|
154
|
+
* Stream API for creating and managing streams
|
|
155
|
+
*/
|
|
156
|
+
export interface StreamStorage {
|
|
157
|
+
/**
|
|
158
|
+
* create a new stream
|
|
159
|
+
*
|
|
160
|
+
* @param name - the name of the stream (1-254 characters). you can group streams by name to organize them.
|
|
161
|
+
* @param props - optional properties for creating the stream
|
|
162
|
+
* @returns a Promise that resolves to the created Stream
|
|
163
|
+
*/
|
|
164
|
+
create(name: string, props?: CreateStreamProps): Promise<Stream>;
|
|
165
|
+
|
|
166
|
+
/**
|
|
167
|
+
* list streams with optional filtering and pagination
|
|
168
|
+
*
|
|
169
|
+
* @param params - optional parameters for filtering and pagination
|
|
170
|
+
* @returns a Promise that resolves to the list of streams
|
|
171
|
+
*/
|
|
172
|
+
list(params?: ListStreamsParams): Promise<ListStreamsResponse>;
|
|
173
|
+
|
|
174
|
+
/**
|
|
175
|
+
* delete a stream by id
|
|
176
|
+
*
|
|
177
|
+
* @param id - the stream id to delete
|
|
178
|
+
* @returns a Promise that resolves when the stream is deleted
|
|
179
|
+
*/
|
|
180
|
+
delete(id: string): Promise<void>;
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
const encoder = new TextEncoder();
|
|
184
|
+
|
|
185
|
+
/**
|
|
186
|
+
* A writable stream implementation that extends WritableStream
|
|
187
|
+
*/
|
|
188
|
+
class StreamImpl extends WritableStream implements Stream {
|
|
189
|
+
public readonly id: string;
|
|
190
|
+
public readonly url: string;
|
|
191
|
+
private activeWriter: WritableStreamDefaultWriter<Uint8Array> | null = null;
|
|
192
|
+
private _compressed: boolean;
|
|
193
|
+
private _adapter: FetchAdapter;
|
|
194
|
+
private _sink: UnderlyingSink;
|
|
195
|
+
|
|
196
|
+
constructor(
|
|
197
|
+
id: string,
|
|
198
|
+
url: string,
|
|
199
|
+
compressed: boolean,
|
|
200
|
+
underlyingSink: UnderlyingSink,
|
|
201
|
+
adapter: FetchAdapter
|
|
202
|
+
) {
|
|
203
|
+
super(underlyingSink);
|
|
204
|
+
this.id = id;
|
|
205
|
+
this.url = url;
|
|
206
|
+
this._compressed = compressed;
|
|
207
|
+
this._adapter = adapter;
|
|
208
|
+
this._sink = underlyingSink;
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
get bytesWritten(): number {
|
|
212
|
+
return this._sink.total;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
get compressed(): boolean {
|
|
216
|
+
return this._compressed;
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
/**
|
|
220
|
+
* Write data to the stream
|
|
221
|
+
*/
|
|
222
|
+
async write(chunk: string | Uint8Array | ArrayBuffer | Buffer | object): Promise<void> {
|
|
223
|
+
let binaryChunk: Uint8Array;
|
|
224
|
+
if (chunk instanceof Uint8Array) {
|
|
225
|
+
binaryChunk = chunk;
|
|
226
|
+
} else if (typeof chunk === 'string') {
|
|
227
|
+
binaryChunk = encoder.encode(chunk);
|
|
228
|
+
} else if (chunk instanceof ArrayBuffer) {
|
|
229
|
+
binaryChunk = new Uint8Array(chunk);
|
|
230
|
+
} else if (typeof chunk === 'object' && chunk !== null) {
|
|
231
|
+
binaryChunk = encoder.encode(safeStringify(chunk));
|
|
232
|
+
} else {
|
|
233
|
+
binaryChunk = encoder.encode(String(chunk));
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
if (!this.activeWriter) {
|
|
237
|
+
this.activeWriter = this.getWriter();
|
|
238
|
+
}
|
|
239
|
+
await this.activeWriter.write(binaryChunk);
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
/**
|
|
243
|
+
* Override close to handle already closed streams gracefully
|
|
244
|
+
* This method safely closes the stream, or silently returns if already closed
|
|
245
|
+
*/
|
|
246
|
+
async close(): Promise<void> {
|
|
247
|
+
try {
|
|
248
|
+
// If we have an active writer from write() calls, use that
|
|
249
|
+
if (this.activeWriter) {
|
|
250
|
+
const writer = this.activeWriter;
|
|
251
|
+
this.activeWriter = null;
|
|
252
|
+
await writer.close();
|
|
253
|
+
return;
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
// Otherwise, get a writer and close it
|
|
257
|
+
const writer = this.getWriter();
|
|
258
|
+
await writer.close();
|
|
259
|
+
} catch (error) {
|
|
260
|
+
// If we get a TypeError about the stream being closed, locked, or errored,
|
|
261
|
+
// that means pipeTo() or another operation already closed it or it's in use
|
|
262
|
+
if (
|
|
263
|
+
error instanceof TypeError &&
|
|
264
|
+
(error.message.includes('closed') ||
|
|
265
|
+
error.message.includes('errored') ||
|
|
266
|
+
error.message.includes('Cannot close'))
|
|
267
|
+
) {
|
|
268
|
+
// Silently return - this is the desired behavior
|
|
269
|
+
return Promise.resolve();
|
|
270
|
+
}
|
|
271
|
+
// If the stream is locked, try to close the underlying writer
|
|
272
|
+
if (error instanceof TypeError && error.message.includes('locked')) {
|
|
273
|
+
// Best-effort closure for locked streams
|
|
274
|
+
return Promise.resolve();
|
|
275
|
+
}
|
|
276
|
+
// Re-throw any other errors
|
|
277
|
+
throw error;
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
/**
|
|
282
|
+
* Get a ReadableStream that streams from the internal URL
|
|
283
|
+
*
|
|
284
|
+
* Note: This method will block waiting for data until writes start to the Stream.
|
|
285
|
+
* The returned ReadableStream will remain open until the Stream is closed or an error occurs.
|
|
286
|
+
*
|
|
287
|
+
* @returns a ReadableStream that can be passed to response.stream()
|
|
288
|
+
*/
|
|
289
|
+
getReader(): ReadableStream<Uint8Array> {
|
|
290
|
+
const url = this.url;
|
|
291
|
+
const adapter = this._adapter;
|
|
292
|
+
let ac: AbortController | null = null;
|
|
293
|
+
return new ReadableStream({
|
|
294
|
+
async start(controller) {
|
|
295
|
+
try {
|
|
296
|
+
ac = new AbortController();
|
|
297
|
+
const res = await adapter.invoke(url, {
|
|
298
|
+
method: 'GET',
|
|
299
|
+
signal: ac.signal,
|
|
300
|
+
binary: true,
|
|
301
|
+
});
|
|
302
|
+
|
|
303
|
+
const response = res.response;
|
|
304
|
+
|
|
305
|
+
if (!res.ok) {
|
|
306
|
+
controller.error(
|
|
307
|
+
new Error(`Failed to read stream: ${response.status} ${response.statusText}`)
|
|
308
|
+
);
|
|
309
|
+
return;
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
if (!response.body) {
|
|
313
|
+
controller.error(new Error('Response body is null'));
|
|
314
|
+
return;
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
const reader = response.body.getReader();
|
|
318
|
+
try {
|
|
319
|
+
// Iterative read to avoid recursive promise chains
|
|
320
|
+
while (true) {
|
|
321
|
+
const { done, value } = await reader.read();
|
|
322
|
+
if (done) break;
|
|
323
|
+
if (value) controller.enqueue(value);
|
|
324
|
+
}
|
|
325
|
+
controller.close();
|
|
326
|
+
} catch (error) {
|
|
327
|
+
controller.error(error);
|
|
328
|
+
}
|
|
329
|
+
} catch (error) {
|
|
330
|
+
controller.error(error);
|
|
331
|
+
}
|
|
332
|
+
},
|
|
333
|
+
cancel(reason?: unknown) {
|
|
334
|
+
if (ac) {
|
|
335
|
+
ac.abort(reason);
|
|
336
|
+
ac = null;
|
|
337
|
+
}
|
|
338
|
+
},
|
|
339
|
+
});
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
// Create a WritableStream that writes to the backend stream
|
|
344
|
+
// Create the underlying sink that will handle the actual streaming
|
|
345
|
+
class UnderlyingSink {
|
|
346
|
+
adapter: FetchAdapter;
|
|
347
|
+
abortController: AbortController | null = null;
|
|
348
|
+
writer: WritableStreamDefaultWriter<Uint8Array> | null = null;
|
|
349
|
+
putRequestPromise: Promise<FetchResponse<unknown>> | null = null;
|
|
350
|
+
total = 0;
|
|
351
|
+
closed = false;
|
|
352
|
+
url: string;
|
|
353
|
+
props?: CreateStreamProps;
|
|
354
|
+
|
|
355
|
+
constructor(url: string, adapter: FetchAdapter, props?: CreateStreamProps) {
|
|
356
|
+
this.url = url;
|
|
357
|
+
this.adapter = adapter;
|
|
358
|
+
this.props = props;
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
async start() {
|
|
362
|
+
// Create AbortController for the fetch request
|
|
363
|
+
this.abortController = new AbortController();
|
|
364
|
+
|
|
365
|
+
// Create a ReadableStream to pipe data to the PUT request
|
|
366
|
+
// eslint-disable-next-line prefer-const
|
|
367
|
+
let { readable, writable } = new TransformStream<Uint8Array, Uint8Array>();
|
|
368
|
+
|
|
369
|
+
// If compression is enabled, add gzip transform
|
|
370
|
+
if (this.props?.compress) {
|
|
371
|
+
const { Readable, Writable } = await import('node:stream');
|
|
372
|
+
|
|
373
|
+
// Create a new transform for the compressed output
|
|
374
|
+
const { readable: compressedReadable, writable: compressedWritable } = new TransformStream<
|
|
375
|
+
Uint8Array,
|
|
376
|
+
Uint8Array
|
|
377
|
+
>();
|
|
378
|
+
|
|
379
|
+
// Set up compression pipeline
|
|
380
|
+
const { createGzip } = await import('node:zlib');
|
|
381
|
+
const gzipStream = createGzip();
|
|
382
|
+
const nodeWritable = Writable.toWeb(gzipStream) as WritableStream<Uint8Array>;
|
|
383
|
+
|
|
384
|
+
// Pipe gzip output to the compressed readable
|
|
385
|
+
const gzipReader = Readable.toWeb(gzipStream) as ReadableStream<Uint8Array>;
|
|
386
|
+
gzipReader.pipeTo(compressedWritable).catch((error) => {
|
|
387
|
+
this.abortController?.abort(error);
|
|
388
|
+
this.writer?.abort(error).catch(() => {});
|
|
389
|
+
});
|
|
390
|
+
|
|
391
|
+
// Chain: writable -> gzip -> compressedReadable
|
|
392
|
+
readable.pipeTo(nodeWritable).catch((error) => {
|
|
393
|
+
this.abortController?.abort(error);
|
|
394
|
+
this.writer?.abort(error).catch(() => {});
|
|
395
|
+
});
|
|
396
|
+
readable = compressedReadable;
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
this.writer = writable.getWriter();
|
|
400
|
+
|
|
401
|
+
// Start the PUT request with the readable stream as body
|
|
402
|
+
const headers: Record<string, string> = {
|
|
403
|
+
'Content-Type': this.props?.contentType || 'application/octet-stream',
|
|
404
|
+
};
|
|
405
|
+
|
|
406
|
+
if (this.props?.compress) {
|
|
407
|
+
headers['Content-Encoding'] = 'gzip';
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
this.putRequestPromise = this.adapter.invoke(this.url, {
|
|
411
|
+
method: 'PUT',
|
|
412
|
+
headers,
|
|
413
|
+
body: readable,
|
|
414
|
+
signal: this.abortController.signal,
|
|
415
|
+
duplex: 'half',
|
|
416
|
+
});
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
async write(chunk: string | Uint8Array | ArrayBuffer | Buffer | object) {
|
|
420
|
+
if (!this.writer) {
|
|
421
|
+
throw new Error('Stream writer not initialized');
|
|
422
|
+
}
|
|
423
|
+
// Convert input to Uint8Array if needed
|
|
424
|
+
let binaryChunk: Uint8Array;
|
|
425
|
+
if (chunk instanceof Uint8Array) {
|
|
426
|
+
binaryChunk = chunk;
|
|
427
|
+
} else if (typeof chunk === 'string') {
|
|
428
|
+
binaryChunk = new TextEncoder().encode(chunk);
|
|
429
|
+
} else if (chunk instanceof ArrayBuffer) {
|
|
430
|
+
binaryChunk = new Uint8Array(chunk);
|
|
431
|
+
} else if (typeof chunk === 'object' && chunk !== null) {
|
|
432
|
+
// Convert objects to JSON string, then to bytes
|
|
433
|
+
binaryChunk = new TextEncoder().encode(safeStringify(chunk));
|
|
434
|
+
} else {
|
|
435
|
+
// Handle primitive types (number, boolean, etc.)
|
|
436
|
+
binaryChunk = new TextEncoder().encode(String(chunk));
|
|
437
|
+
}
|
|
438
|
+
// Write the chunk to the transform stream, which pipes to the PUT request
|
|
439
|
+
await this.writer.write(binaryChunk);
|
|
440
|
+
this.total += binaryChunk.length;
|
|
441
|
+
}
|
|
442
|
+
async close() {
|
|
443
|
+
if (this.closed) {
|
|
444
|
+
return;
|
|
445
|
+
}
|
|
446
|
+
this.closed = true;
|
|
447
|
+
if (this.writer) {
|
|
448
|
+
await this.writer.close();
|
|
449
|
+
this.writer = null;
|
|
450
|
+
}
|
|
451
|
+
// Wait for the PUT request to complete
|
|
452
|
+
if (this.putRequestPromise) {
|
|
453
|
+
try {
|
|
454
|
+
const res = await this.putRequestPromise;
|
|
455
|
+
if (!res.ok) {
|
|
456
|
+
throw new Error(
|
|
457
|
+
`PUT request failed: ${res.response.status} ${res.response.statusText}`
|
|
458
|
+
);
|
|
459
|
+
}
|
|
460
|
+
} catch (error) {
|
|
461
|
+
if (error instanceof Error && error.name !== 'AbortError') {
|
|
462
|
+
throw error;
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
this.putRequestPromise = null;
|
|
466
|
+
}
|
|
467
|
+
this.abortController = null;
|
|
468
|
+
}
|
|
469
|
+
async abort(reason?: unknown) {
|
|
470
|
+
if (this.writer) {
|
|
471
|
+
await this.writer.abort(reason);
|
|
472
|
+
this.writer = null;
|
|
473
|
+
}
|
|
474
|
+
// Abort the fetch request
|
|
475
|
+
if (this.abortController) {
|
|
476
|
+
this.abortController.abort(reason);
|
|
477
|
+
this.abortController = null;
|
|
478
|
+
}
|
|
479
|
+
this.putRequestPromise = null;
|
|
480
|
+
}
|
|
481
|
+
}
|
|
482
|
+
|
|
483
|
+
export class StreamStorageService implements StreamStorage {
|
|
484
|
+
#adapter: FetchAdapter;
|
|
485
|
+
#baseUrl: string;
|
|
486
|
+
|
|
487
|
+
constructor(baseUrl: string, adapter: FetchAdapter) {
|
|
488
|
+
this.#adapter = adapter;
|
|
489
|
+
this.#baseUrl = baseUrl;
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
async create(name: string, props?: CreateStreamProps): Promise<Stream> {
|
|
493
|
+
if (!name || name.length < 1 || name.length > 254) {
|
|
494
|
+
throw new Error('Stream name must be between 1 and 254 characters');
|
|
495
|
+
}
|
|
496
|
+
const url = this.#baseUrl;
|
|
497
|
+
const signal = AbortSignal.timeout(10_000);
|
|
498
|
+
const attributes: Record<string, string> = {
|
|
499
|
+
name,
|
|
500
|
+
};
|
|
501
|
+
if (!props?.contentType) {
|
|
502
|
+
props = props ?? {};
|
|
503
|
+
props.contentType = 'application/octet-stream';
|
|
504
|
+
}
|
|
505
|
+
if (props?.metadata) {
|
|
506
|
+
attributes['metadata'] = JSON.stringify(props.metadata);
|
|
507
|
+
}
|
|
508
|
+
if (props?.contentType) {
|
|
509
|
+
attributes['stream.content_type'] = props.contentType;
|
|
510
|
+
}
|
|
511
|
+
const body = JSON.stringify({
|
|
512
|
+
name,
|
|
513
|
+
...(props?.metadata && { metadata: props.metadata }),
|
|
514
|
+
...(props?.contentType && { contentType: props.contentType }),
|
|
515
|
+
});
|
|
516
|
+
const res = await this.#adapter.invoke<{ id: string }>(url, {
|
|
517
|
+
method: 'POST',
|
|
518
|
+
body,
|
|
519
|
+
contentType: 'application/json',
|
|
520
|
+
signal,
|
|
521
|
+
telemetry: {
|
|
522
|
+
name: 'agentuity.stream.create',
|
|
523
|
+
attributes,
|
|
524
|
+
},
|
|
525
|
+
});
|
|
526
|
+
if (res.ok) {
|
|
527
|
+
const streamUrl = buildUrl(this.#baseUrl, res.data.id);
|
|
528
|
+
const underlyingSink = new UnderlyingSink(streamUrl, this.#adapter, props);
|
|
529
|
+
|
|
530
|
+
const stream = new StreamImpl(
|
|
531
|
+
res.data.id,
|
|
532
|
+
streamUrl,
|
|
533
|
+
props?.compress ?? false,
|
|
534
|
+
underlyingSink,
|
|
535
|
+
this.#adapter
|
|
536
|
+
);
|
|
537
|
+
|
|
538
|
+
return stream;
|
|
539
|
+
}
|
|
540
|
+
throw await toServiceException(res.response);
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
async list(params?: ListStreamsParams): Promise<ListStreamsResponse> {
|
|
544
|
+
const attributes: Record<string, string> = {};
|
|
545
|
+
if (params?.limit !== undefined) {
|
|
546
|
+
if (params.limit <= 0 || params.limit > 1000) {
|
|
547
|
+
throw new Error('limit must be greater than 0 and less than or equal to 1000');
|
|
548
|
+
}
|
|
549
|
+
attributes['limit'] = String(params.limit);
|
|
550
|
+
}
|
|
551
|
+
if (params?.offset !== undefined) {
|
|
552
|
+
attributes['offset'] = String(params.offset);
|
|
553
|
+
}
|
|
554
|
+
if (params?.name) {
|
|
555
|
+
attributes['name'] = params.name;
|
|
556
|
+
}
|
|
557
|
+
if (params?.metadata) {
|
|
558
|
+
attributes['metadata'] = JSON.stringify(params.metadata);
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
const requestBody: Record<string, unknown> = {};
|
|
562
|
+
if (params?.name) {
|
|
563
|
+
requestBody.name = params.name;
|
|
564
|
+
}
|
|
565
|
+
if (params?.metadata) {
|
|
566
|
+
requestBody.metadata = params.metadata;
|
|
567
|
+
}
|
|
568
|
+
if (params?.limit) {
|
|
569
|
+
requestBody.limit = params.limit;
|
|
570
|
+
}
|
|
571
|
+
if (params?.offset) {
|
|
572
|
+
requestBody.offset = params.offset;
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
const signal = AbortSignal.timeout(30_000);
|
|
576
|
+
const url = buildUrl(this.#baseUrl, 'list');
|
|
577
|
+
const res = await this.#adapter.invoke<ListStreamsResponse>(url, {
|
|
578
|
+
method: 'POST',
|
|
579
|
+
signal,
|
|
580
|
+
body: JSON.stringify(requestBody),
|
|
581
|
+
contentType: 'application/json',
|
|
582
|
+
telemetry: {
|
|
583
|
+
name: 'agentuity.stream.list',
|
|
584
|
+
attributes,
|
|
585
|
+
},
|
|
586
|
+
});
|
|
587
|
+
if (res.ok) {
|
|
588
|
+
return res.data;
|
|
589
|
+
}
|
|
590
|
+
throw await toServiceException(res.response);
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
async delete(id: string): Promise<void> {
|
|
594
|
+
if (!id || typeof id !== 'string' || id.trim().length === 0) {
|
|
595
|
+
throw new Error('Stream id is required and must be a non-empty string');
|
|
596
|
+
}
|
|
597
|
+
const signal = AbortSignal.timeout(30_000);
|
|
598
|
+
const url = buildUrl(this.#baseUrl, id);
|
|
599
|
+
const res = await this.#adapter.invoke<void>(url, {
|
|
600
|
+
method: 'DELETE',
|
|
601
|
+
signal,
|
|
602
|
+
telemetry: {
|
|
603
|
+
name: 'agentuity.stream.delete',
|
|
604
|
+
attributes: {
|
|
605
|
+
'stream.id': id,
|
|
606
|
+
},
|
|
607
|
+
},
|
|
608
|
+
});
|
|
609
|
+
if (res.ok) {
|
|
610
|
+
return;
|
|
611
|
+
}
|
|
612
|
+
throw await toServiceException(res.response);
|
|
613
|
+
}
|
|
614
|
+
}
|