durable-cf-streams 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +134 -0
- package/dist/chunk-5N2UNHIW.js +306 -0
- package/dist/chunk-AU5YTPHF.js +239 -0
- package/dist/chunk-E7XMLPFW.js +275 -0
- package/dist/chunk-KHGAPSJ7.js +271 -0
- package/dist/chunk-LK4KY5UI.js +285 -0
- package/dist/chunk-VLDLQ6TP.js +15096 -0
- package/dist/index.d.ts +76 -0
- package/dist/index.js +66 -0
- package/dist/interface-DFfkkkVM.d.ts +63 -0
- package/dist/storage/d1.d.ts +21 -0
- package/dist/storage/d1.js +8 -0
- package/dist/storage/index.d.ts +5 -0
- package/dist/storage/index.js +20 -0
- package/dist/storage/kv.d.ts +21 -0
- package/dist/storage/kv.js +8 -0
- package/dist/storage/memory.d.ts +16 -0
- package/dist/storage/memory.js +8 -0
- package/dist/storage/r2.d.ts +23 -0
- package/dist/storage/r2.js +8 -0
- package/package.json +74 -0
|
@@ -0,0 +1,275 @@
|
|
|
1
|
+
import {
|
|
2
|
+
Deferred_exports,
|
|
3
|
+
Effect_exports
|
|
4
|
+
} from "./chunk-VLDLQ6TP.js";
|
|
5
|
+
import {
|
|
6
|
+
ContentTypeMismatchError,
|
|
7
|
+
SequenceConflictError,
|
|
8
|
+
StreamConflictError,
|
|
9
|
+
StreamNotFoundError,
|
|
10
|
+
formatJsonResponse,
|
|
11
|
+
formatOffset,
|
|
12
|
+
generateCursor,
|
|
13
|
+
generateETag,
|
|
14
|
+
initialOffset,
|
|
15
|
+
isJsonContentType,
|
|
16
|
+
normalizeContentType,
|
|
17
|
+
offsetToBytePos,
|
|
18
|
+
processJsonAppend,
|
|
19
|
+
validateJsonCreate
|
|
20
|
+
} from "./chunk-5N2UNHIW.js";
|
|
21
|
+
|
|
22
|
+
// src/storage/d1.ts
|
|
23
|
+
function validateIdempotentCreate(existing, options) {
|
|
24
|
+
const existingNormalized = normalizeContentType(existing.contentType);
|
|
25
|
+
const reqNormalized = normalizeContentType(options.contentType);
|
|
26
|
+
if (existingNormalized !== reqNormalized) {
|
|
27
|
+
throw new ContentTypeMismatchError(existingNormalized, reqNormalized);
|
|
28
|
+
}
|
|
29
|
+
if (options.ttlSeconds !== existing.ttlSeconds) {
|
|
30
|
+
throw new StreamConflictError("TTL mismatch on idempotent create");
|
|
31
|
+
}
|
|
32
|
+
if (options.expiresAt !== existing.expiresAt) {
|
|
33
|
+
throw new StreamConflictError("Expires-At mismatch on idempotent create");
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
function prepareInitialData(options) {
|
|
37
|
+
let data = options.data ?? new Uint8Array(0);
|
|
38
|
+
const isJson = isJsonContentType(options.contentType);
|
|
39
|
+
if (isJson && data.length > 0) {
|
|
40
|
+
data = validateJsonCreate(data, true);
|
|
41
|
+
}
|
|
42
|
+
const appendCount = data.length > 0 ? 1 : 0;
|
|
43
|
+
const nextOffset = formatOffset(appendCount, data.length);
|
|
44
|
+
return { data, appendCount, nextOffset };
|
|
45
|
+
}
|
|
46
|
+
var D1Store = class _D1Store {
|
|
47
|
+
db;
|
|
48
|
+
waiters = /* @__PURE__ */ new Map();
|
|
49
|
+
streamCache = /* @__PURE__ */ new Map();
|
|
50
|
+
constructor(db) {
|
|
51
|
+
this.db = db;
|
|
52
|
+
}
|
|
53
|
+
static schema = "CREATE TABLE IF NOT EXISTS streams (path TEXT PRIMARY KEY, content_type TEXT NOT NULL, ttl_seconds INTEGER, expires_at TEXT, created_at INTEGER NOT NULL, data BLOB NOT NULL DEFAULT x'', next_offset TEXT NOT NULL, last_seq TEXT, append_count INTEGER NOT NULL DEFAULT 0);";
|
|
54
|
+
async initialize() {
|
|
55
|
+
await this.db.exec(_D1Store.schema);
|
|
56
|
+
}
|
|
57
|
+
async put(path, options) {
|
|
58
|
+
const existing = await this.db.prepare(
|
|
59
|
+
"SELECT content_type, ttl_seconds, expires_at, next_offset FROM streams WHERE path = ?"
|
|
60
|
+
).bind(path).first();
|
|
61
|
+
if (existing) {
|
|
62
|
+
validateIdempotentCreate(
|
|
63
|
+
{
|
|
64
|
+
contentType: existing.content_type,
|
|
65
|
+
ttlSeconds: existing.ttl_seconds ?? void 0,
|
|
66
|
+
expiresAt: existing.expires_at ?? void 0
|
|
67
|
+
},
|
|
68
|
+
options
|
|
69
|
+
);
|
|
70
|
+
return { created: false, nextOffset: existing.next_offset };
|
|
71
|
+
}
|
|
72
|
+
const { data, appendCount, nextOffset } = prepareInitialData(options);
|
|
73
|
+
await this.db.prepare(`
|
|
74
|
+
INSERT INTO streams (path, content_type, ttl_seconds, expires_at, created_at, data, next_offset, append_count)
|
|
75
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
76
|
+
`).bind(
|
|
77
|
+
path,
|
|
78
|
+
options.contentType,
|
|
79
|
+
options.ttlSeconds ?? null,
|
|
80
|
+
options.expiresAt ?? null,
|
|
81
|
+
Date.now(),
|
|
82
|
+
data,
|
|
83
|
+
nextOffset,
|
|
84
|
+
appendCount
|
|
85
|
+
).run();
|
|
86
|
+
this.streamCache.set(path, { contentType: options.contentType });
|
|
87
|
+
return { created: true, nextOffset };
|
|
88
|
+
}
|
|
89
|
+
async append(path, data, options) {
|
|
90
|
+
const stream = await this.db.prepare(
|
|
91
|
+
"SELECT content_type, data, next_offset, last_seq, append_count FROM streams WHERE path = ?"
|
|
92
|
+
).bind(path).first();
|
|
93
|
+
if (!stream) {
|
|
94
|
+
throw new StreamNotFoundError(path);
|
|
95
|
+
}
|
|
96
|
+
const streamNormalized = normalizeContentType(stream.content_type);
|
|
97
|
+
if (options?.contentType) {
|
|
98
|
+
const reqNormalized = normalizeContentType(options.contentType);
|
|
99
|
+
if (streamNormalized !== reqNormalized) {
|
|
100
|
+
throw new ContentTypeMismatchError(streamNormalized, reqNormalized);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
if (options?.seq !== void 0 && stream.last_seq !== null && options.seq <= stream.last_seq) {
|
|
104
|
+
throw new SequenceConflictError(`> ${stream.last_seq}`, options.seq);
|
|
105
|
+
}
|
|
106
|
+
const existingData = new Uint8Array(stream.data);
|
|
107
|
+
const isJson = isJsonContentType(stream.content_type);
|
|
108
|
+
let newData;
|
|
109
|
+
if (isJson) {
|
|
110
|
+
newData = processJsonAppend(existingData, data);
|
|
111
|
+
} else {
|
|
112
|
+
newData = new Uint8Array(existingData.length + data.length);
|
|
113
|
+
newData.set(existingData);
|
|
114
|
+
newData.set(data, existingData.length);
|
|
115
|
+
}
|
|
116
|
+
const newAppendCount = stream.append_count + 1;
|
|
117
|
+
const nextOffset = formatOffset(newAppendCount, newData.length);
|
|
118
|
+
await this.db.prepare(`
|
|
119
|
+
UPDATE streams
|
|
120
|
+
SET data = ?, next_offset = ?, last_seq = ?, append_count = ?
|
|
121
|
+
WHERE path = ?
|
|
122
|
+
`).bind(
|
|
123
|
+
newData,
|
|
124
|
+
nextOffset,
|
|
125
|
+
options?.seq ?? stream.last_seq,
|
|
126
|
+
newAppendCount,
|
|
127
|
+
path
|
|
128
|
+
).run();
|
|
129
|
+
this.notifyWaiters(path, newData);
|
|
130
|
+
return { nextOffset };
|
|
131
|
+
}
|
|
132
|
+
async get(path, options) {
|
|
133
|
+
const stream = await this.db.prepare(
|
|
134
|
+
"SELECT content_type, data, next_offset FROM streams WHERE path = ?"
|
|
135
|
+
).bind(path).first();
|
|
136
|
+
if (!stream) {
|
|
137
|
+
this.streamCache.delete(path);
|
|
138
|
+
throw new StreamNotFoundError(path);
|
|
139
|
+
}
|
|
140
|
+
this.streamCache.set(path, { contentType: stream.content_type });
|
|
141
|
+
const startOffset = options?.offset ?? initialOffset();
|
|
142
|
+
const byteOffset = offsetToBytePos(startOffset);
|
|
143
|
+
const data = new Uint8Array(stream.data);
|
|
144
|
+
const messages = [];
|
|
145
|
+
if (byteOffset < data.length) {
|
|
146
|
+
messages.push({
|
|
147
|
+
offset: startOffset,
|
|
148
|
+
timestamp: Date.now(),
|
|
149
|
+
data: data.slice(byteOffset)
|
|
150
|
+
});
|
|
151
|
+
}
|
|
152
|
+
return {
|
|
153
|
+
messages,
|
|
154
|
+
nextOffset: stream.next_offset,
|
|
155
|
+
upToDate: true,
|
|
156
|
+
cursor: generateCursor(),
|
|
157
|
+
etag: generateETag(path, startOffset, stream.next_offset),
|
|
158
|
+
contentType: stream.content_type
|
|
159
|
+
};
|
|
160
|
+
}
|
|
161
|
+
async head(path) {
|
|
162
|
+
const stream = await this.db.prepare("SELECT content_type, next_offset FROM streams WHERE path = ?").bind(path).first();
|
|
163
|
+
if (!stream) {
|
|
164
|
+
this.streamCache.delete(path);
|
|
165
|
+
return null;
|
|
166
|
+
}
|
|
167
|
+
this.streamCache.set(path, { contentType: stream.content_type });
|
|
168
|
+
return {
|
|
169
|
+
contentType: stream.content_type,
|
|
170
|
+
nextOffset: stream.next_offset,
|
|
171
|
+
etag: generateETag(path, initialOffset(), stream.next_offset)
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
async delete(path) {
|
|
175
|
+
const waiters = this.waiters.get(path) ?? [];
|
|
176
|
+
const effect = Effect_exports.forEach(
|
|
177
|
+
waiters,
|
|
178
|
+
(waiter) => Deferred_exports.succeed(waiter.deferred, { messages: [], timedOut: false })
|
|
179
|
+
);
|
|
180
|
+
Effect_exports.runSync(effect);
|
|
181
|
+
this.waiters.delete(path);
|
|
182
|
+
this.streamCache.delete(path);
|
|
183
|
+
await this.db.prepare("DELETE FROM streams WHERE path = ?").bind(path).run();
|
|
184
|
+
}
|
|
185
|
+
has(path) {
|
|
186
|
+
return this.streamCache.has(path);
|
|
187
|
+
}
|
|
188
|
+
async waitForData(path, offset, timeoutMs) {
|
|
189
|
+
const stream = await this.db.prepare("SELECT data FROM streams WHERE path = ?").bind(path).first();
|
|
190
|
+
if (!stream) {
|
|
191
|
+
throw new StreamNotFoundError(path);
|
|
192
|
+
}
|
|
193
|
+
const data = new Uint8Array(stream.data);
|
|
194
|
+
const byteOffset = offsetToBytePos(offset);
|
|
195
|
+
if (byteOffset < data.length) {
|
|
196
|
+
return {
|
|
197
|
+
messages: [
|
|
198
|
+
{ offset, timestamp: Date.now(), data: data.slice(byteOffset) }
|
|
199
|
+
],
|
|
200
|
+
timedOut: false
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
const effect = Effect_exports.gen(this, function* () {
|
|
204
|
+
const deferred = yield* Deferred_exports.make();
|
|
205
|
+
const waiter = { deferred, offset };
|
|
206
|
+
const pathWaiters = this.waiters.get(path) ?? [];
|
|
207
|
+
pathWaiters.push(waiter);
|
|
208
|
+
this.waiters.set(path, pathWaiters);
|
|
209
|
+
const timeout = Effect_exports.as(
|
|
210
|
+
Effect_exports.delay(
|
|
211
|
+
Effect_exports.sync(() => {
|
|
212
|
+
}),
|
|
213
|
+
timeoutMs
|
|
214
|
+
),
|
|
215
|
+
{ messages: [], timedOut: true }
|
|
216
|
+
);
|
|
217
|
+
const result = yield* Effect_exports.race(Deferred_exports.await(deferred), timeout);
|
|
218
|
+
const currentWaiters = this.waiters.get(path) ?? [];
|
|
219
|
+
const index = currentWaiters.indexOf(waiter);
|
|
220
|
+
if (index !== -1) {
|
|
221
|
+
currentWaiters.splice(index, 1);
|
|
222
|
+
}
|
|
223
|
+
return result;
|
|
224
|
+
});
|
|
225
|
+
return Effect_exports.runPromise(effect);
|
|
226
|
+
}
|
|
227
|
+
formatResponse(path, messages) {
|
|
228
|
+
const cached = this.streamCache.get(path);
|
|
229
|
+
if (!cached) {
|
|
230
|
+
return new Uint8Array(0);
|
|
231
|
+
}
|
|
232
|
+
if (messages.length === 0) {
|
|
233
|
+
const isJson2 = isJsonContentType(cached.contentType);
|
|
234
|
+
return isJson2 ? new TextEncoder().encode("[]") : new Uint8Array(0);
|
|
235
|
+
}
|
|
236
|
+
const combined = new Uint8Array(
|
|
237
|
+
messages.reduce((acc, m) => acc + m.data.length, 0)
|
|
238
|
+
);
|
|
239
|
+
let offset = 0;
|
|
240
|
+
for (const message of messages) {
|
|
241
|
+
combined.set(message.data, offset);
|
|
242
|
+
offset += message.data.length;
|
|
243
|
+
}
|
|
244
|
+
const isJson = isJsonContentType(cached.contentType);
|
|
245
|
+
return isJson ? formatJsonResponse(combined) : combined;
|
|
246
|
+
}
|
|
247
|
+
notifyWaiters(path, data) {
|
|
248
|
+
const waiters = this.waiters.get(path) ?? [];
|
|
249
|
+
this.waiters.set(path, []);
|
|
250
|
+
const effect = Effect_exports.forEach(waiters, (waiter) => {
|
|
251
|
+
const byteOffset = offsetToBytePos(waiter.offset);
|
|
252
|
+
if (byteOffset < data.length) {
|
|
253
|
+
return Deferred_exports.succeed(waiter.deferred, {
|
|
254
|
+
messages: [
|
|
255
|
+
{
|
|
256
|
+
offset: waiter.offset,
|
|
257
|
+
timestamp: Date.now(),
|
|
258
|
+
data: data.slice(byteOffset)
|
|
259
|
+
}
|
|
260
|
+
],
|
|
261
|
+
timedOut: false
|
|
262
|
+
});
|
|
263
|
+
}
|
|
264
|
+
const remaining = this.waiters.get(path) ?? [];
|
|
265
|
+
remaining.push(waiter);
|
|
266
|
+
this.waiters.set(path, remaining);
|
|
267
|
+
return Effect_exports.void;
|
|
268
|
+
});
|
|
269
|
+
Effect_exports.runSync(effect);
|
|
270
|
+
}
|
|
271
|
+
};
|
|
272
|
+
|
|
273
|
+
export {
|
|
274
|
+
D1Store
|
|
275
|
+
};
|
|
@@ -0,0 +1,271 @@
|
|
|
1
|
+
import {
|
|
2
|
+
Deferred_exports,
|
|
3
|
+
Effect_exports
|
|
4
|
+
} from "./chunk-VLDLQ6TP.js";
|
|
5
|
+
import {
|
|
6
|
+
ContentTypeMismatchError,
|
|
7
|
+
SequenceConflictError,
|
|
8
|
+
StreamConflictError,
|
|
9
|
+
StreamNotFoundError,
|
|
10
|
+
formatJsonResponse,
|
|
11
|
+
formatOffset,
|
|
12
|
+
generateCursor,
|
|
13
|
+
generateETag,
|
|
14
|
+
initialOffset,
|
|
15
|
+
isJsonContentType,
|
|
16
|
+
normalizeContentType,
|
|
17
|
+
offsetToBytePos,
|
|
18
|
+
processJsonAppend,
|
|
19
|
+
validateJsonCreate
|
|
20
|
+
} from "./chunk-5N2UNHIW.js";
|
|
21
|
+
|
|
22
|
+
// src/storage/kv.ts
|
|
23
|
+
function validateIdempotentCreate(existing, options) {
|
|
24
|
+
const existingNormalized = normalizeContentType(existing.contentType);
|
|
25
|
+
const reqNormalized = normalizeContentType(options.contentType);
|
|
26
|
+
if (existingNormalized !== reqNormalized) {
|
|
27
|
+
throw new ContentTypeMismatchError(existingNormalized, reqNormalized);
|
|
28
|
+
}
|
|
29
|
+
if (options.ttlSeconds !== existing.ttlSeconds) {
|
|
30
|
+
throw new StreamConflictError("TTL mismatch on idempotent create");
|
|
31
|
+
}
|
|
32
|
+
if (options.expiresAt !== existing.expiresAt) {
|
|
33
|
+
throw new StreamConflictError("Expires-At mismatch on idempotent create");
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
function prepareInitialData(options) {
|
|
37
|
+
let data = options.data ?? new Uint8Array(0);
|
|
38
|
+
const isJson = isJsonContentType(options.contentType);
|
|
39
|
+
if (isJson && data.length > 0) {
|
|
40
|
+
data = validateJsonCreate(data, true);
|
|
41
|
+
}
|
|
42
|
+
const appendCount = data.length > 0 ? 1 : 0;
|
|
43
|
+
const nextOffset = formatOffset(appendCount, data.length);
|
|
44
|
+
return { data, appendCount, nextOffset };
|
|
45
|
+
}
|
|
46
|
+
var KVStore = class {
|
|
47
|
+
kv;
|
|
48
|
+
waiters = /* @__PURE__ */ new Map();
|
|
49
|
+
streamCache = /* @__PURE__ */ new Map();
|
|
50
|
+
constructor(kv) {
|
|
51
|
+
this.kv = kv;
|
|
52
|
+
}
|
|
53
|
+
metaKey(path) {
|
|
54
|
+
return `stream:${path}:meta`;
|
|
55
|
+
}
|
|
56
|
+
dataKey(path) {
|
|
57
|
+
return `stream:${path}:data`;
|
|
58
|
+
}
|
|
59
|
+
async put(path, options) {
|
|
60
|
+
const existingMeta = await this.kv.get(
|
|
61
|
+
this.metaKey(path),
|
|
62
|
+
"json"
|
|
63
|
+
);
|
|
64
|
+
if (existingMeta) {
|
|
65
|
+
validateIdempotentCreate(existingMeta, options);
|
|
66
|
+
return { created: false, nextOffset: existingMeta.nextOffset };
|
|
67
|
+
}
|
|
68
|
+
const { data, appendCount, nextOffset } = prepareInitialData(options);
|
|
69
|
+
const meta = {
|
|
70
|
+
contentType: options.contentType,
|
|
71
|
+
ttlSeconds: options.ttlSeconds,
|
|
72
|
+
expiresAt: options.expiresAt,
|
|
73
|
+
createdAt: Date.now(),
|
|
74
|
+
nextOffset,
|
|
75
|
+
appendCount
|
|
76
|
+
};
|
|
77
|
+
await Promise.all([
|
|
78
|
+
this.kv.put(this.metaKey(path), JSON.stringify(meta)),
|
|
79
|
+
this.kv.put(this.dataKey(path), data)
|
|
80
|
+
]);
|
|
81
|
+
this.streamCache.set(path, { contentType: options.contentType });
|
|
82
|
+
return { created: true, nextOffset };
|
|
83
|
+
}
|
|
84
|
+
async append(path, data, options) {
|
|
85
|
+
const meta = await this.kv.get(this.metaKey(path), "json");
|
|
86
|
+
if (!meta) {
|
|
87
|
+
throw new StreamNotFoundError(path);
|
|
88
|
+
}
|
|
89
|
+
const streamNormalized = normalizeContentType(meta.contentType);
|
|
90
|
+
if (options?.contentType) {
|
|
91
|
+
const reqNormalized = normalizeContentType(options.contentType);
|
|
92
|
+
if (streamNormalized !== reqNormalized) {
|
|
93
|
+
throw new ContentTypeMismatchError(streamNormalized, reqNormalized);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
if (options?.seq !== void 0 && meta.lastSeq !== void 0 && options.seq <= meta.lastSeq) {
|
|
97
|
+
throw new SequenceConflictError(`> ${meta.lastSeq}`, options.seq);
|
|
98
|
+
}
|
|
99
|
+
const existingRaw = await this.kv.get(this.dataKey(path), "arrayBuffer");
|
|
100
|
+
const existingData = existingRaw ? new Uint8Array(existingRaw) : new Uint8Array(0);
|
|
101
|
+
const isJson = isJsonContentType(meta.contentType);
|
|
102
|
+
let newData;
|
|
103
|
+
if (isJson) {
|
|
104
|
+
newData = processJsonAppend(existingData, data);
|
|
105
|
+
} else {
|
|
106
|
+
newData = new Uint8Array(existingData.length + data.length);
|
|
107
|
+
newData.set(existingData);
|
|
108
|
+
newData.set(data, existingData.length);
|
|
109
|
+
}
|
|
110
|
+
const newAppendCount = meta.appendCount + 1;
|
|
111
|
+
const nextOffset = formatOffset(newAppendCount, newData.length);
|
|
112
|
+
const updatedMeta = {
|
|
113
|
+
...meta,
|
|
114
|
+
nextOffset,
|
|
115
|
+
lastSeq: options?.seq ?? meta.lastSeq,
|
|
116
|
+
appendCount: newAppendCount
|
|
117
|
+
};
|
|
118
|
+
await Promise.all([
|
|
119
|
+
this.kv.put(this.metaKey(path), JSON.stringify(updatedMeta)),
|
|
120
|
+
this.kv.put(this.dataKey(path), newData)
|
|
121
|
+
]);
|
|
122
|
+
this.notifyWaiters(path, newData);
|
|
123
|
+
return { nextOffset };
|
|
124
|
+
}
|
|
125
|
+
async get(path, options) {
|
|
126
|
+
const meta = await this.kv.get(this.metaKey(path), "json");
|
|
127
|
+
if (!meta) {
|
|
128
|
+
this.streamCache.delete(path);
|
|
129
|
+
throw new StreamNotFoundError(path);
|
|
130
|
+
}
|
|
131
|
+
this.streamCache.set(path, { contentType: meta.contentType });
|
|
132
|
+
const raw = await this.kv.get(this.dataKey(path), "arrayBuffer");
|
|
133
|
+
const data = raw ? new Uint8Array(raw) : new Uint8Array(0);
|
|
134
|
+
const startOffset = options?.offset ?? initialOffset();
|
|
135
|
+
const byteOffset = offsetToBytePos(startOffset);
|
|
136
|
+
const messages = [];
|
|
137
|
+
if (byteOffset < data.length) {
|
|
138
|
+
messages.push({
|
|
139
|
+
offset: startOffset,
|
|
140
|
+
timestamp: Date.now(),
|
|
141
|
+
data: data.slice(byteOffset)
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
return {
|
|
145
|
+
messages,
|
|
146
|
+
nextOffset: meta.nextOffset,
|
|
147
|
+
upToDate: true,
|
|
148
|
+
cursor: generateCursor(),
|
|
149
|
+
etag: generateETag(path, startOffset, meta.nextOffset),
|
|
150
|
+
contentType: meta.contentType
|
|
151
|
+
};
|
|
152
|
+
}
|
|
153
|
+
async head(path) {
|
|
154
|
+
const meta = await this.kv.get(this.metaKey(path), "json");
|
|
155
|
+
if (!meta) {
|
|
156
|
+
this.streamCache.delete(path);
|
|
157
|
+
return null;
|
|
158
|
+
}
|
|
159
|
+
this.streamCache.set(path, { contentType: meta.contentType });
|
|
160
|
+
return {
|
|
161
|
+
contentType: meta.contentType,
|
|
162
|
+
nextOffset: meta.nextOffset,
|
|
163
|
+
etag: generateETag(path, initialOffset(), meta.nextOffset)
|
|
164
|
+
};
|
|
165
|
+
}
|
|
166
|
+
async delete(path) {
|
|
167
|
+
const waiters = this.waiters.get(path) ?? [];
|
|
168
|
+
const effect = Effect_exports.forEach(
|
|
169
|
+
waiters,
|
|
170
|
+
(waiter) => Deferred_exports.succeed(waiter.deferred, { messages: [], timedOut: false })
|
|
171
|
+
);
|
|
172
|
+
Effect_exports.runSync(effect);
|
|
173
|
+
this.waiters.delete(path);
|
|
174
|
+
this.streamCache.delete(path);
|
|
175
|
+
await Promise.all([
|
|
176
|
+
this.kv.delete(this.metaKey(path)),
|
|
177
|
+
this.kv.delete(this.dataKey(path))
|
|
178
|
+
]);
|
|
179
|
+
}
|
|
180
|
+
has(path) {
|
|
181
|
+
return this.streamCache.has(path);
|
|
182
|
+
}
|
|
183
|
+
async waitForData(path, offset, timeoutMs) {
|
|
184
|
+
const meta = await this.kv.get(this.metaKey(path), "json");
|
|
185
|
+
if (!meta) {
|
|
186
|
+
throw new StreamNotFoundError(path);
|
|
187
|
+
}
|
|
188
|
+
const raw = await this.kv.get(this.dataKey(path), "arrayBuffer");
|
|
189
|
+
const data = raw ? new Uint8Array(raw) : new Uint8Array(0);
|
|
190
|
+
const byteOffset = offsetToBytePos(offset);
|
|
191
|
+
if (byteOffset < data.length) {
|
|
192
|
+
return {
|
|
193
|
+
messages: [
|
|
194
|
+
{ offset, timestamp: Date.now(), data: data.slice(byteOffset) }
|
|
195
|
+
],
|
|
196
|
+
timedOut: false
|
|
197
|
+
};
|
|
198
|
+
}
|
|
199
|
+
const effect = Effect_exports.gen(this, function* () {
|
|
200
|
+
const deferred = yield* Deferred_exports.make();
|
|
201
|
+
const waiter = { deferred, offset };
|
|
202
|
+
const pathWaiters = this.waiters.get(path) ?? [];
|
|
203
|
+
pathWaiters.push(waiter);
|
|
204
|
+
this.waiters.set(path, pathWaiters);
|
|
205
|
+
const timeout = Effect_exports.as(
|
|
206
|
+
Effect_exports.delay(
|
|
207
|
+
Effect_exports.sync(() => {
|
|
208
|
+
}),
|
|
209
|
+
timeoutMs
|
|
210
|
+
),
|
|
211
|
+
{ messages: [], timedOut: true }
|
|
212
|
+
);
|
|
213
|
+
const result = yield* Effect_exports.race(Deferred_exports.await(deferred), timeout);
|
|
214
|
+
const currentWaiters = this.waiters.get(path) ?? [];
|
|
215
|
+
const index = currentWaiters.indexOf(waiter);
|
|
216
|
+
if (index !== -1) {
|
|
217
|
+
currentWaiters.splice(index, 1);
|
|
218
|
+
}
|
|
219
|
+
return result;
|
|
220
|
+
});
|
|
221
|
+
return Effect_exports.runPromise(effect);
|
|
222
|
+
}
|
|
223
|
+
formatResponse(path, messages) {
|
|
224
|
+
const cached = this.streamCache.get(path);
|
|
225
|
+
if (!cached) {
|
|
226
|
+
return new Uint8Array(0);
|
|
227
|
+
}
|
|
228
|
+
if (messages.length === 0) {
|
|
229
|
+
const isJson2 = isJsonContentType(cached.contentType);
|
|
230
|
+
return isJson2 ? new TextEncoder().encode("[]") : new Uint8Array(0);
|
|
231
|
+
}
|
|
232
|
+
const combined = new Uint8Array(
|
|
233
|
+
messages.reduce((acc, m) => acc + m.data.length, 0)
|
|
234
|
+
);
|
|
235
|
+
let pos = 0;
|
|
236
|
+
for (const message of messages) {
|
|
237
|
+
combined.set(message.data, pos);
|
|
238
|
+
pos += message.data.length;
|
|
239
|
+
}
|
|
240
|
+
const isJson = isJsonContentType(cached.contentType);
|
|
241
|
+
return isJson ? formatJsonResponse(combined) : combined;
|
|
242
|
+
}
|
|
243
|
+
notifyWaiters(path, data) {
|
|
244
|
+
const waiters = this.waiters.get(path) ?? [];
|
|
245
|
+
this.waiters.set(path, []);
|
|
246
|
+
const effect = Effect_exports.forEach(waiters, (waiter) => {
|
|
247
|
+
const byteOffset = offsetToBytePos(waiter.offset);
|
|
248
|
+
if (byteOffset < data.length) {
|
|
249
|
+
return Deferred_exports.succeed(waiter.deferred, {
|
|
250
|
+
messages: [
|
|
251
|
+
{
|
|
252
|
+
offset: waiter.offset,
|
|
253
|
+
timestamp: Date.now(),
|
|
254
|
+
data: data.slice(byteOffset)
|
|
255
|
+
}
|
|
256
|
+
],
|
|
257
|
+
timedOut: false
|
|
258
|
+
});
|
|
259
|
+
}
|
|
260
|
+
const remaining = this.waiters.get(path) ?? [];
|
|
261
|
+
remaining.push(waiter);
|
|
262
|
+
this.waiters.set(path, remaining);
|
|
263
|
+
return Effect_exports.void;
|
|
264
|
+
});
|
|
265
|
+
Effect_exports.runSync(effect);
|
|
266
|
+
}
|
|
267
|
+
};
|
|
268
|
+
|
|
269
|
+
export {
|
|
270
|
+
KVStore
|
|
271
|
+
};
|