durable-cf-streams 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +134 -0
- package/dist/chunk-5N2UNHIW.js +306 -0
- package/dist/chunk-AU5YTPHF.js +239 -0
- package/dist/chunk-E7XMLPFW.js +275 -0
- package/dist/chunk-KHGAPSJ7.js +271 -0
- package/dist/chunk-LK4KY5UI.js +285 -0
- package/dist/chunk-VLDLQ6TP.js +15096 -0
- package/dist/index.d.ts +76 -0
- package/dist/index.js +66 -0
- package/dist/interface-DFfkkkVM.d.ts +63 -0
- package/dist/storage/d1.d.ts +21 -0
- package/dist/storage/d1.js +8 -0
- package/dist/storage/index.d.ts +5 -0
- package/dist/storage/index.js +20 -0
- package/dist/storage/kv.d.ts +21 -0
- package/dist/storage/kv.js +8 -0
- package/dist/storage/memory.d.ts +16 -0
- package/dist/storage/memory.js +8 -0
- package/dist/storage/r2.d.ts +23 -0
- package/dist/storage/r2.js +8 -0
- package/package.json +74 -0
package/README.md
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
# durable-cf-streams
|
|
2
|
+
|
|
3
|
+
building blocks for [durable streams](https://github.com/durable-streams) on cloudflare. storage backends and utilities. the idea is that you can borrow utilities and wire up http (or whatever) however you want.
|
|
4
|
+
|
|
5
|
+
## install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pnpm add durable-cf-streams
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## storage backends
|
|
12
|
+
|
|
13
|
+
```typescript
|
|
14
|
+
import { MemoryStore } from "durable-cf-streams/storage/memory";
|
|
15
|
+
import { D1Store } from "durable-cf-streams/storage/d1";
|
|
16
|
+
import { KVStore } from "durable-cf-streams/storage/kv";
|
|
17
|
+
import { R2Store } from "durable-cf-streams/storage/r2";
|
|
18
|
+
|
|
19
|
+
// in-memory (for durable objects)
|
|
20
|
+
const store = new MemoryStore();
|
|
21
|
+
|
|
22
|
+
// d1 database
|
|
23
|
+
const store = new D1Store(env.DB);
|
|
24
|
+
await store.initialize(); // creates table
|
|
25
|
+
|
|
26
|
+
// workers kv
|
|
27
|
+
const store = new KVStore(env.KV);
|
|
28
|
+
|
|
29
|
+
// r2 bucket
|
|
30
|
+
const store = new R2Store(env.BUCKET);
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
## streamstore interface
|
|
34
|
+
|
|
35
|
+
```typescript
|
|
36
|
+
interface StreamStore {
|
|
37
|
+
put(path: string, options: PutOptions): Promise<PutResult>;
|
|
38
|
+
append(path: string, data: Uint8Array, options?: AppendOptions): Promise<AppendResult>;
|
|
39
|
+
get(path: string, options?: GetOptions): Promise<GetResult>;
|
|
40
|
+
head(path: string): Promise<HeadResult | null>;
|
|
41
|
+
delete(path: string): Promise<void>;
|
|
42
|
+
has(path: string): boolean;
|
|
43
|
+
waitForData(path: string, offset: string, timeoutMs: number): Promise<WaitResult>;
|
|
44
|
+
formatResponse(path: string, messages: StreamMessage[]): Uint8Array;
|
|
45
|
+
}
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
## utilities
|
|
49
|
+
|
|
50
|
+
```typescript
|
|
51
|
+
import {
|
|
52
|
+
// offsets
|
|
53
|
+
parseOffset,
|
|
54
|
+
formatOffset,
|
|
55
|
+
compareOffsets,
|
|
56
|
+
isValidOffset,
|
|
57
|
+
initialOffset,
|
|
58
|
+
|
|
59
|
+
// cursors
|
|
60
|
+
generateCursor,
|
|
61
|
+
getNextCursor,
|
|
62
|
+
|
|
63
|
+
// protocol
|
|
64
|
+
normalizeContentType,
|
|
65
|
+
validateTTL,
|
|
66
|
+
validateExpiresAt,
|
|
67
|
+
generateETag,
|
|
68
|
+
isJsonContentType,
|
|
69
|
+
} from "durable-cf-streams";
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
## errors
|
|
73
|
+
|
|
74
|
+
tagged errors for pattern matching:
|
|
75
|
+
|
|
76
|
+
```typescript
|
|
77
|
+
import {
|
|
78
|
+
StreamNotFoundError,
|
|
79
|
+
SequenceConflictError,
|
|
80
|
+
ContentTypeMismatchError,
|
|
81
|
+
StreamConflictError,
|
|
82
|
+
InvalidJsonError,
|
|
83
|
+
PayloadTooLargeError,
|
|
84
|
+
} from "durable-cf-streams";
|
|
85
|
+
|
|
86
|
+
// check error type
|
|
87
|
+
if (error instanceof StreamNotFoundError) {
|
|
88
|
+
return new Response("not found", { status: 404 });
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// or use _tag for switch
|
|
92
|
+
switch (error._tag) {
|
|
93
|
+
case "StreamNotFoundError": return new Response("not found", { status: 404 });
|
|
94
|
+
case "SequenceConflictError": return new Response("conflict", { status: 409 });
|
|
95
|
+
}
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
## example
|
|
99
|
+
|
|
100
|
+
```typescript
|
|
101
|
+
import { MemoryStore } from "durable-cf-streams/storage/memory";
|
|
102
|
+
import { normalizeContentType } from "durable-cf-streams";
|
|
103
|
+
|
|
104
|
+
export class StreamDO implements DurableObject {
|
|
105
|
+
private store = new MemoryStore();
|
|
106
|
+
|
|
107
|
+
async fetch(request: Request): Promise<Response> {
|
|
108
|
+
const path = new URL(request.url).pathname;
|
|
109
|
+
|
|
110
|
+
if (request.method === "PUT") {
|
|
111
|
+
const contentType = request.headers.get("content-type") ?? "application/octet-stream";
|
|
112
|
+
const body = new Uint8Array(await request.arrayBuffer());
|
|
113
|
+
|
|
114
|
+
const result = await this.store.put(path, {
|
|
115
|
+
contentType: normalizeContentType(contentType),
|
|
116
|
+
data: body.length > 0 ? body : undefined,
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
return new Response(null, {
|
|
120
|
+
status: result.created ? 201 : 200,
|
|
121
|
+
headers: { "Stream-Next-Offset": result.nextOffset },
|
|
122
|
+
});
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// ... handle other methods
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
```
|
|
129
|
+
|
|
130
|
+
see [examples](../../examples) for complete implementations.
|
|
131
|
+
|
|
132
|
+
## license
|
|
133
|
+
|
|
134
|
+
mit
|
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __export = (target, all) => {
|
|
3
|
+
for (var name in all)
|
|
4
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
5
|
+
};
|
|
6
|
+
|
|
7
|
+
// src/cursor.ts
|
|
8
|
+
var CURSOR_INTERVAL_MS = 2e4;
|
|
9
|
+
var generateCursor = () => {
|
|
10
|
+
const interval = Math.floor(Date.now() / CURSOR_INTERVAL_MS);
|
|
11
|
+
return interval.toString(10);
|
|
12
|
+
};
|
|
13
|
+
var parseCursor = (cursor) => {
|
|
14
|
+
const parsed = Number.parseInt(cursor, 10);
|
|
15
|
+
return Number.isNaN(parsed) || parsed <= 0 ? null : parsed;
|
|
16
|
+
};
|
|
17
|
+
var isValidCursor = (cursor) => parseCursor(cursor) !== null;
|
|
18
|
+
var compareCursors = (a, b) => {
|
|
19
|
+
const parsedA = Number.parseInt(a, 10);
|
|
20
|
+
const parsedB = Number.parseInt(b, 10);
|
|
21
|
+
return parsedA - parsedB;
|
|
22
|
+
};
|
|
23
|
+
var isNewerCursor = (cursor, clientCursor) => {
|
|
24
|
+
if (!clientCursor) {
|
|
25
|
+
return true;
|
|
26
|
+
}
|
|
27
|
+
return compareCursors(cursor, clientCursor) > 0;
|
|
28
|
+
};
|
|
29
|
+
var getNextCursor = (clientCursor) => {
|
|
30
|
+
const current = generateCursor();
|
|
31
|
+
if (!clientCursor) {
|
|
32
|
+
return current;
|
|
33
|
+
}
|
|
34
|
+
const clientInterval = Number.parseInt(clientCursor, 10);
|
|
35
|
+
const currentInterval = Number.parseInt(current, 10);
|
|
36
|
+
if (currentInterval <= clientInterval) {
|
|
37
|
+
return (clientInterval + 1).toString(10);
|
|
38
|
+
}
|
|
39
|
+
return current;
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
// src/errors.ts
|
|
43
|
+
var StreamNotFoundError = class extends Error {
|
|
44
|
+
_tag = "StreamNotFoundError";
|
|
45
|
+
path;
|
|
46
|
+
constructor(path) {
|
|
47
|
+
super(`Stream not found: ${path}`);
|
|
48
|
+
this.name = "StreamNotFoundError";
|
|
49
|
+
this.path = path;
|
|
50
|
+
}
|
|
51
|
+
};
|
|
52
|
+
var StreamConflictError = class extends Error {
|
|
53
|
+
_tag = "StreamConflictError";
|
|
54
|
+
constructor(message) {
|
|
55
|
+
super(message);
|
|
56
|
+
this.name = "StreamConflictError";
|
|
57
|
+
}
|
|
58
|
+
};
|
|
59
|
+
var SequenceConflictError = class extends Error {
|
|
60
|
+
_tag = "SequenceConflictError";
|
|
61
|
+
expected;
|
|
62
|
+
received;
|
|
63
|
+
constructor(expected, received) {
|
|
64
|
+
super(`Sequence conflict: expected ${expected}, received ${received}`);
|
|
65
|
+
this.name = "SequenceConflictError";
|
|
66
|
+
this.expected = expected;
|
|
67
|
+
this.received = received;
|
|
68
|
+
}
|
|
69
|
+
};
|
|
70
|
+
var ContentTypeMismatchError = class extends Error {
|
|
71
|
+
_tag = "ContentTypeMismatchError";
|
|
72
|
+
expected;
|
|
73
|
+
received;
|
|
74
|
+
constructor(expected, received) {
|
|
75
|
+
super(`Content-Type mismatch: expected ${expected}, received ${received}`);
|
|
76
|
+
this.name = "ContentTypeMismatchError";
|
|
77
|
+
this.expected = expected;
|
|
78
|
+
this.received = received;
|
|
79
|
+
}
|
|
80
|
+
};
|
|
81
|
+
var InvalidJsonError = class extends Error {
|
|
82
|
+
_tag = "InvalidJsonError";
|
|
83
|
+
constructor(message) {
|
|
84
|
+
super(message);
|
|
85
|
+
this.name = "InvalidJsonError";
|
|
86
|
+
}
|
|
87
|
+
};
|
|
88
|
+
var InvalidOffsetError = class extends Error {
|
|
89
|
+
_tag = "InvalidOffsetError";
|
|
90
|
+
constructor(offset) {
|
|
91
|
+
super(`Invalid offset format: ${offset}`);
|
|
92
|
+
this.name = "InvalidOffsetError";
|
|
93
|
+
}
|
|
94
|
+
};
|
|
95
|
+
var PayloadTooLargeError = class extends Error {
|
|
96
|
+
_tag = "PayloadTooLargeError";
|
|
97
|
+
maxBytes;
|
|
98
|
+
receivedBytes;
|
|
99
|
+
constructor(maxBytes, receivedBytes) {
|
|
100
|
+
super(
|
|
101
|
+
`Payload too large: max ${maxBytes} bytes, received ${receivedBytes} bytes`
|
|
102
|
+
);
|
|
103
|
+
this.name = "PayloadTooLargeError";
|
|
104
|
+
this.maxBytes = maxBytes;
|
|
105
|
+
this.receivedBytes = receivedBytes;
|
|
106
|
+
}
|
|
107
|
+
};
|
|
108
|
+
|
|
109
|
+
// src/offsets.ts
|
|
110
|
+
var OFFSET_REGEX = /^[0-9a-f]{16}_[0-9a-f]{16}$/;
|
|
111
|
+
var INITIAL_OFFSET = "0000000000000000_0000000000000000";
|
|
112
|
+
var SENTINEL_OFFSET = "-1";
|
|
113
|
+
var initialOffset = () => INITIAL_OFFSET;
|
|
114
|
+
var isSentinelOffset = (offset) => offset === SENTINEL_OFFSET;
|
|
115
|
+
var isValidOffset = (offset) => offset === SENTINEL_OFFSET || OFFSET_REGEX.test(offset);
|
|
116
|
+
var normalizeOffset = (offset) => offset === SENTINEL_OFFSET ? INITIAL_OFFSET : offset;
|
|
117
|
+
var parseOffset = (offset) => {
|
|
118
|
+
if (!OFFSET_REGEX.test(offset)) {
|
|
119
|
+
return null;
|
|
120
|
+
}
|
|
121
|
+
const [seqHex, posHex] = offset.split("_");
|
|
122
|
+
return {
|
|
123
|
+
seq: Number.parseInt(seqHex, 16),
|
|
124
|
+
pos: Number.parseInt(posHex, 16)
|
|
125
|
+
};
|
|
126
|
+
};
|
|
127
|
+
var formatOffset = (seq, pos) => {
|
|
128
|
+
const seqHex = seq.toString(16).padStart(16, "0");
|
|
129
|
+
const posHex = pos.toString(16).padStart(16, "0");
|
|
130
|
+
return `${seqHex}_${posHex}`;
|
|
131
|
+
};
|
|
132
|
+
var compareOffsets = (a, b) => {
|
|
133
|
+
const parsedA = parseOffset(a);
|
|
134
|
+
const parsedB = parseOffset(b);
|
|
135
|
+
if (!(parsedA && parsedB)) {
|
|
136
|
+
return 0;
|
|
137
|
+
}
|
|
138
|
+
if (parsedA.seq !== parsedB.seq) {
|
|
139
|
+
return parsedA.seq < parsedB.seq ? -1 : 1;
|
|
140
|
+
}
|
|
141
|
+
if (parsedA.pos !== parsedB.pos) {
|
|
142
|
+
return parsedA.pos < parsedB.pos ? -1 : 1;
|
|
143
|
+
}
|
|
144
|
+
return 0;
|
|
145
|
+
};
|
|
146
|
+
var offsetToBytePos = (offset) => {
|
|
147
|
+
const parsed = parseOffset(offset);
|
|
148
|
+
return parsed ? parsed.pos : 0;
|
|
149
|
+
};
|
|
150
|
+
var advanceOffset = (offset, byteCount) => {
|
|
151
|
+
const parsed = parseOffset(offset);
|
|
152
|
+
if (!parsed) {
|
|
153
|
+
return offset;
|
|
154
|
+
}
|
|
155
|
+
return formatOffset(parsed.seq, parsed.pos + byteCount);
|
|
156
|
+
};
|
|
157
|
+
var incrementSeq = (offset) => {
|
|
158
|
+
const parsed = parseOffset(offset);
|
|
159
|
+
if (!parsed) {
|
|
160
|
+
return offset;
|
|
161
|
+
}
|
|
162
|
+
return formatOffset(parsed.seq + 1, parsed.pos);
|
|
163
|
+
};
|
|
164
|
+
|
|
165
|
+
// src/protocol.ts
|
|
166
|
+
var TTL_REGEX = /^[1-9][0-9]*$/;
|
|
167
|
+
var EXPIRES_AT_REGEX = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?(Z|[+-]\d{2}:\d{2})$/;
|
|
168
|
+
var ETAG_REGEX = /^"([^:]+):([^:]+):([^"]+)"$/;
|
|
169
|
+
var normalizeContentType = (contentType) => {
|
|
170
|
+
const semicolonIndex = contentType.indexOf(";");
|
|
171
|
+
if (semicolonIndex !== -1) {
|
|
172
|
+
return contentType.slice(0, semicolonIndex).trim().toLowerCase();
|
|
173
|
+
}
|
|
174
|
+
return contentType.trim().toLowerCase();
|
|
175
|
+
};
|
|
176
|
+
var isJsonContentType = (contentType) => {
|
|
177
|
+
const normalized = normalizeContentType(contentType);
|
|
178
|
+
return normalized === "application/json" || normalized.endsWith("+json");
|
|
179
|
+
};
|
|
180
|
+
var validateTTL = (ttl) => {
|
|
181
|
+
if (!TTL_REGEX.test(ttl)) {
|
|
182
|
+
return null;
|
|
183
|
+
}
|
|
184
|
+
const parsed = Number.parseInt(ttl, 10);
|
|
185
|
+
return Number.isNaN(parsed) || parsed <= 0 ? null : parsed;
|
|
186
|
+
};
|
|
187
|
+
var validateExpiresAt = (value) => {
|
|
188
|
+
if (!EXPIRES_AT_REGEX.test(value)) {
|
|
189
|
+
return null;
|
|
190
|
+
}
|
|
191
|
+
const date = new Date(value);
|
|
192
|
+
return Number.isNaN(date.getTime()) ? null : date;
|
|
193
|
+
};
|
|
194
|
+
var generateETag = (path, startOffset, endOffset) => {
|
|
195
|
+
const pathBase64 = btoa(path);
|
|
196
|
+
return `"${pathBase64}:${startOffset}:${endOffset}"`;
|
|
197
|
+
};
|
|
198
|
+
var parseETag = (etag) => {
|
|
199
|
+
const match = ETAG_REGEX.exec(etag);
|
|
200
|
+
if (!match || match.length < 4) {
|
|
201
|
+
return null;
|
|
202
|
+
}
|
|
203
|
+
const pathBase64 = match[1];
|
|
204
|
+
const startOffset = match[2];
|
|
205
|
+
const endOffset = match[3];
|
|
206
|
+
if (!(pathBase64 && startOffset && endOffset)) {
|
|
207
|
+
return null;
|
|
208
|
+
}
|
|
209
|
+
try {
|
|
210
|
+
return {
|
|
211
|
+
path: atob(pathBase64),
|
|
212
|
+
startOffset,
|
|
213
|
+
endOffset
|
|
214
|
+
};
|
|
215
|
+
} catch {
|
|
216
|
+
return null;
|
|
217
|
+
}
|
|
218
|
+
};
|
|
219
|
+
var processJsonAppend = (existing, newData) => {
|
|
220
|
+
const newStr = new TextDecoder().decode(newData).trim();
|
|
221
|
+
let parsed;
|
|
222
|
+
try {
|
|
223
|
+
parsed = JSON.parse(newStr);
|
|
224
|
+
} catch (e) {
|
|
225
|
+
throw new InvalidJsonError(e instanceof Error ? e.message : "Invalid JSON");
|
|
226
|
+
}
|
|
227
|
+
const items = Array.isArray(parsed) ? parsed : [parsed];
|
|
228
|
+
if (items.length === 0) {
|
|
229
|
+
throw new InvalidJsonError("Empty array not allowed on append");
|
|
230
|
+
}
|
|
231
|
+
const serialized = `${items.map((item) => JSON.stringify(item)).join(",")},`;
|
|
232
|
+
const serializedBytes = new TextEncoder().encode(serialized);
|
|
233
|
+
const result = new Uint8Array(existing.length + serializedBytes.length);
|
|
234
|
+
result.set(existing);
|
|
235
|
+
result.set(serializedBytes, existing.length);
|
|
236
|
+
return result;
|
|
237
|
+
};
|
|
238
|
+
var formatJsonResponse = (data) => {
|
|
239
|
+
if (data.length === 0) {
|
|
240
|
+
return new TextEncoder().encode("[]");
|
|
241
|
+
}
|
|
242
|
+
let str = new TextDecoder().decode(data);
|
|
243
|
+
if (str.endsWith(",")) {
|
|
244
|
+
str = str.slice(0, -1);
|
|
245
|
+
}
|
|
246
|
+
return new TextEncoder().encode(`[${str}]`);
|
|
247
|
+
};
|
|
248
|
+
var validateJsonCreate = (data, isPut) => {
|
|
249
|
+
const str = new TextDecoder().decode(data).trim();
|
|
250
|
+
let parsed;
|
|
251
|
+
try {
|
|
252
|
+
parsed = JSON.parse(str);
|
|
253
|
+
} catch (e) {
|
|
254
|
+
throw new InvalidJsonError(e instanceof Error ? e.message : "Invalid JSON");
|
|
255
|
+
}
|
|
256
|
+
let items;
|
|
257
|
+
if (Array.isArray(parsed)) {
|
|
258
|
+
if (parsed.length === 0 && !isPut) {
|
|
259
|
+
throw new InvalidJsonError("Empty array not allowed on POST");
|
|
260
|
+
}
|
|
261
|
+
items = parsed;
|
|
262
|
+
} else {
|
|
263
|
+
items = [parsed];
|
|
264
|
+
}
|
|
265
|
+
if (items.length === 0) {
|
|
266
|
+
return new Uint8Array(0);
|
|
267
|
+
}
|
|
268
|
+
const serialized = `${items.map((item) => JSON.stringify(item)).join(",")},`;
|
|
269
|
+
return new TextEncoder().encode(serialized);
|
|
270
|
+
};
|
|
271
|
+
|
|
272
|
+
export {
|
|
273
|
+
__export,
|
|
274
|
+
generateCursor,
|
|
275
|
+
parseCursor,
|
|
276
|
+
isValidCursor,
|
|
277
|
+
compareCursors,
|
|
278
|
+
isNewerCursor,
|
|
279
|
+
getNextCursor,
|
|
280
|
+
StreamNotFoundError,
|
|
281
|
+
StreamConflictError,
|
|
282
|
+
SequenceConflictError,
|
|
283
|
+
ContentTypeMismatchError,
|
|
284
|
+
InvalidJsonError,
|
|
285
|
+
InvalidOffsetError,
|
|
286
|
+
PayloadTooLargeError,
|
|
287
|
+
initialOffset,
|
|
288
|
+
isSentinelOffset,
|
|
289
|
+
isValidOffset,
|
|
290
|
+
normalizeOffset,
|
|
291
|
+
parseOffset,
|
|
292
|
+
formatOffset,
|
|
293
|
+
compareOffsets,
|
|
294
|
+
offsetToBytePos,
|
|
295
|
+
advanceOffset,
|
|
296
|
+
incrementSeq,
|
|
297
|
+
normalizeContentType,
|
|
298
|
+
isJsonContentType,
|
|
299
|
+
validateTTL,
|
|
300
|
+
validateExpiresAt,
|
|
301
|
+
generateETag,
|
|
302
|
+
parseETag,
|
|
303
|
+
processJsonAppend,
|
|
304
|
+
formatJsonResponse,
|
|
305
|
+
validateJsonCreate
|
|
306
|
+
};
|
|
@@ -0,0 +1,239 @@
|
|
|
1
|
+
import {
|
|
2
|
+
Deferred_exports,
|
|
3
|
+
Effect_exports
|
|
4
|
+
} from "./chunk-VLDLQ6TP.js";
|
|
5
|
+
import {
|
|
6
|
+
ContentTypeMismatchError,
|
|
7
|
+
SequenceConflictError,
|
|
8
|
+
StreamConflictError,
|
|
9
|
+
StreamNotFoundError,
|
|
10
|
+
formatJsonResponse,
|
|
11
|
+
formatOffset,
|
|
12
|
+
generateCursor,
|
|
13
|
+
generateETag,
|
|
14
|
+
initialOffset,
|
|
15
|
+
isJsonContentType,
|
|
16
|
+
normalizeContentType,
|
|
17
|
+
offsetToBytePos,
|
|
18
|
+
processJsonAppend,
|
|
19
|
+
validateJsonCreate
|
|
20
|
+
} from "./chunk-5N2UNHIW.js";
|
|
21
|
+
|
|
22
|
+
// src/storage/memory.ts
|
|
23
|
+
function validateIdempotentCreate(existing, options) {
|
|
24
|
+
const existingNormalized = normalizeContentType(existing.contentType);
|
|
25
|
+
const reqNormalized = normalizeContentType(options.contentType);
|
|
26
|
+
if (existingNormalized !== reqNormalized) {
|
|
27
|
+
throw new ContentTypeMismatchError(existingNormalized, reqNormalized);
|
|
28
|
+
}
|
|
29
|
+
if (options.ttlSeconds !== existing.ttlSeconds) {
|
|
30
|
+
throw new StreamConflictError("TTL mismatch on idempotent create");
|
|
31
|
+
}
|
|
32
|
+
if (options.expiresAt !== existing.expiresAt) {
|
|
33
|
+
throw new StreamConflictError("Expires-At mismatch on idempotent create");
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
function prepareInitialData(options) {
|
|
37
|
+
let data = options.data ?? new Uint8Array(0);
|
|
38
|
+
const isJson = isJsonContentType(options.contentType);
|
|
39
|
+
if (isJson && data.length > 0) {
|
|
40
|
+
data = validateJsonCreate(data, true);
|
|
41
|
+
}
|
|
42
|
+
const appendCount = data.length > 0 ? 1 : 0;
|
|
43
|
+
const nextOffset = formatOffset(appendCount, data.length);
|
|
44
|
+
return { data, appendCount, nextOffset };
|
|
45
|
+
}
|
|
46
|
+
var MemoryStore = class {
|
|
47
|
+
streams = /* @__PURE__ */ new Map();
|
|
48
|
+
put(path, options) {
|
|
49
|
+
const existing = this.streams.get(path);
|
|
50
|
+
if (existing) {
|
|
51
|
+
validateIdempotentCreate(existing.metadata, options);
|
|
52
|
+
return Promise.resolve({
|
|
53
|
+
created: false,
|
|
54
|
+
nextOffset: existing.nextOffset
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
const { data, appendCount, nextOffset } = prepareInitialData(options);
|
|
58
|
+
const stream = {
|
|
59
|
+
metadata: {
|
|
60
|
+
path,
|
|
61
|
+
contentType: options.contentType,
|
|
62
|
+
ttlSeconds: options.ttlSeconds,
|
|
63
|
+
expiresAt: options.expiresAt,
|
|
64
|
+
createdAt: Date.now()
|
|
65
|
+
},
|
|
66
|
+
data,
|
|
67
|
+
nextOffset,
|
|
68
|
+
lastSeq: void 0,
|
|
69
|
+
appendCount,
|
|
70
|
+
waiters: []
|
|
71
|
+
};
|
|
72
|
+
this.streams.set(path, stream);
|
|
73
|
+
return Promise.resolve({ created: true, nextOffset });
|
|
74
|
+
}
|
|
75
|
+
append(path, data, options) {
|
|
76
|
+
const stream = this.streams.get(path);
|
|
77
|
+
if (!stream) {
|
|
78
|
+
return Promise.reject(new StreamNotFoundError(path));
|
|
79
|
+
}
|
|
80
|
+
const streamNormalized = normalizeContentType(stream.metadata.contentType);
|
|
81
|
+
if (options?.contentType) {
|
|
82
|
+
const reqNormalized = normalizeContentType(options.contentType);
|
|
83
|
+
if (streamNormalized !== reqNormalized) {
|
|
84
|
+
return Promise.reject(
|
|
85
|
+
new ContentTypeMismatchError(streamNormalized, reqNormalized)
|
|
86
|
+
);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
if (options?.seq !== void 0 && stream.lastSeq !== void 0 && options.seq <= stream.lastSeq) {
|
|
90
|
+
return Promise.reject(
|
|
91
|
+
new SequenceConflictError(`> ${stream.lastSeq}`, options.seq)
|
|
92
|
+
);
|
|
93
|
+
}
|
|
94
|
+
const isJson = isJsonContentType(stream.metadata.contentType);
|
|
95
|
+
let newData;
|
|
96
|
+
if (isJson) {
|
|
97
|
+
newData = processJsonAppend(stream.data, data);
|
|
98
|
+
} else {
|
|
99
|
+
newData = new Uint8Array(stream.data.length + data.length);
|
|
100
|
+
newData.set(stream.data);
|
|
101
|
+
newData.set(data, stream.data.length);
|
|
102
|
+
}
|
|
103
|
+
stream.appendCount++;
|
|
104
|
+
const nextOffset = formatOffset(stream.appendCount, newData.length);
|
|
105
|
+
if (options?.seq !== void 0) {
|
|
106
|
+
stream.lastSeq = options.seq;
|
|
107
|
+
}
|
|
108
|
+
stream.data = newData;
|
|
109
|
+
stream.nextOffset = nextOffset;
|
|
110
|
+
this.notifyWaiters(stream);
|
|
111
|
+
return Promise.resolve({ nextOffset });
|
|
112
|
+
}
|
|
113
|
+
get(path, options) {
|
|
114
|
+
const stream = this.streams.get(path);
|
|
115
|
+
if (!stream) {
|
|
116
|
+
return Promise.reject(new StreamNotFoundError(path));
|
|
117
|
+
}
|
|
118
|
+
const startOffset = options?.offset ?? initialOffset();
|
|
119
|
+
const byteOffset = offsetToBytePos(startOffset);
|
|
120
|
+
const messages = [];
|
|
121
|
+
if (byteOffset < stream.data.length) {
|
|
122
|
+
const data = stream.data.slice(byteOffset);
|
|
123
|
+
messages.push({
|
|
124
|
+
offset: startOffset,
|
|
125
|
+
timestamp: Date.now(),
|
|
126
|
+
data
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
return Promise.resolve({
|
|
130
|
+
messages,
|
|
131
|
+
nextOffset: stream.nextOffset,
|
|
132
|
+
upToDate: true,
|
|
133
|
+
cursor: generateCursor(),
|
|
134
|
+
etag: generateETag(path, startOffset, stream.nextOffset),
|
|
135
|
+
contentType: stream.metadata.contentType
|
|
136
|
+
});
|
|
137
|
+
}
|
|
138
|
+
head(path) {
|
|
139
|
+
const stream = this.streams.get(path);
|
|
140
|
+
if (!stream) {
|
|
141
|
+
return Promise.resolve(null);
|
|
142
|
+
}
|
|
143
|
+
return Promise.resolve({
|
|
144
|
+
contentType: stream.metadata.contentType,
|
|
145
|
+
nextOffset: stream.nextOffset,
|
|
146
|
+
etag: generateETag(path, initialOffset(), stream.nextOffset)
|
|
147
|
+
});
|
|
148
|
+
}
|
|
149
|
+
delete(path) {
|
|
150
|
+
const stream = this.streams.get(path);
|
|
151
|
+
if (stream) {
|
|
152
|
+
const effect = Effect_exports.forEach(
|
|
153
|
+
stream.waiters,
|
|
154
|
+
(waiter) => Deferred_exports.succeed(waiter.deferred, { messages: [], timedOut: false })
|
|
155
|
+
);
|
|
156
|
+
Effect_exports.runSync(effect);
|
|
157
|
+
}
|
|
158
|
+
this.streams.delete(path);
|
|
159
|
+
return Promise.resolve();
|
|
160
|
+
}
|
|
161
|
+
has(path) {
|
|
162
|
+
return this.streams.has(path);
|
|
163
|
+
}
|
|
164
|
+
waitForData(path, offset, timeoutMs) {
|
|
165
|
+
const stream = this.streams.get(path);
|
|
166
|
+
if (!stream) {
|
|
167
|
+
return Promise.reject(new StreamNotFoundError(path));
|
|
168
|
+
}
|
|
169
|
+
const byteOffset = offsetToBytePos(offset);
|
|
170
|
+
if (byteOffset < stream.data.length) {
|
|
171
|
+
const data = stream.data.slice(byteOffset);
|
|
172
|
+
return Promise.resolve({
|
|
173
|
+
messages: [{ offset, timestamp: Date.now(), data }],
|
|
174
|
+
timedOut: false
|
|
175
|
+
});
|
|
176
|
+
}
|
|
177
|
+
const effect = Effect_exports.gen(this, function* () {
|
|
178
|
+
const deferred = yield* Deferred_exports.make();
|
|
179
|
+
const waiter = { deferred, offset };
|
|
180
|
+
stream.waiters.push(waiter);
|
|
181
|
+
const timeout = Effect_exports.as(
|
|
182
|
+
Effect_exports.delay(
|
|
183
|
+
Effect_exports.sync(() => {
|
|
184
|
+
}),
|
|
185
|
+
timeoutMs
|
|
186
|
+
),
|
|
187
|
+
{ messages: [], timedOut: true }
|
|
188
|
+
);
|
|
189
|
+
const result = yield* Effect_exports.race(Deferred_exports.await(deferred), timeout);
|
|
190
|
+
const index = stream.waiters.indexOf(waiter);
|
|
191
|
+
if (index !== -1) {
|
|
192
|
+
stream.waiters.splice(index, 1);
|
|
193
|
+
}
|
|
194
|
+
return result;
|
|
195
|
+
});
|
|
196
|
+
return Effect_exports.runPromise(effect);
|
|
197
|
+
}
|
|
198
|
+
formatResponse(path, messages) {
|
|
199
|
+
const stream = this.streams.get(path);
|
|
200
|
+
if (!stream) {
|
|
201
|
+
return new Uint8Array(0);
|
|
202
|
+
}
|
|
203
|
+
if (messages.length === 0) {
|
|
204
|
+
const isJson2 = isJsonContentType(stream.metadata.contentType);
|
|
205
|
+
return isJson2 ? new TextEncoder().encode("[]") : new Uint8Array(0);
|
|
206
|
+
}
|
|
207
|
+
const combined = new Uint8Array(
|
|
208
|
+
messages.reduce((acc, m) => acc + m.data.length, 0)
|
|
209
|
+
);
|
|
210
|
+
let offset = 0;
|
|
211
|
+
for (const message of messages) {
|
|
212
|
+
combined.set(message.data, offset);
|
|
213
|
+
offset += message.data.length;
|
|
214
|
+
}
|
|
215
|
+
const isJson = isJsonContentType(stream.metadata.contentType);
|
|
216
|
+
return isJson ? formatJsonResponse(combined) : combined;
|
|
217
|
+
}
|
|
218
|
+
notifyWaiters(stream) {
|
|
219
|
+
const waiters = [...stream.waiters];
|
|
220
|
+
stream.waiters = [];
|
|
221
|
+
const effect = Effect_exports.forEach(waiters, (waiter) => {
|
|
222
|
+
const byteOffset = offsetToBytePos(waiter.offset);
|
|
223
|
+
if (byteOffset < stream.data.length) {
|
|
224
|
+
const data = stream.data.slice(byteOffset);
|
|
225
|
+
return Deferred_exports.succeed(waiter.deferred, {
|
|
226
|
+
messages: [{ offset: waiter.offset, timestamp: Date.now(), data }],
|
|
227
|
+
timedOut: false
|
|
228
|
+
});
|
|
229
|
+
}
|
|
230
|
+
stream.waiters.push(waiter);
|
|
231
|
+
return Effect_exports.void;
|
|
232
|
+
});
|
|
233
|
+
Effect_exports.runSync(effect);
|
|
234
|
+
}
|
|
235
|
+
};
|
|
236
|
+
|
|
237
|
+
export {
|
|
238
|
+
MemoryStore
|
|
239
|
+
};
|