durable-cf-streams 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +134 -0
- package/dist/chunk-5N2UNHIW.js +306 -0
- package/dist/chunk-AU5YTPHF.js +239 -0
- package/dist/chunk-E7XMLPFW.js +275 -0
- package/dist/chunk-KHGAPSJ7.js +271 -0
- package/dist/chunk-LK4KY5UI.js +285 -0
- package/dist/chunk-VLDLQ6TP.js +15096 -0
- package/dist/index.d.ts +76 -0
- package/dist/index.js +66 -0
- package/dist/interface-DFfkkkVM.d.ts +63 -0
- package/dist/storage/d1.d.ts +21 -0
- package/dist/storage/d1.js +8 -0
- package/dist/storage/index.d.ts +5 -0
- package/dist/storage/index.js +20 -0
- package/dist/storage/kv.d.ts +21 -0
- package/dist/storage/kv.js +8 -0
- package/dist/storage/memory.d.ts +16 -0
- package/dist/storage/memory.js +8 -0
- package/dist/storage/r2.d.ts +23 -0
- package/dist/storage/r2.js +8 -0
- package/package.json +74 -0
|
@@ -0,0 +1,285 @@
|
|
|
1
|
+
import {
|
|
2
|
+
Deferred_exports,
|
|
3
|
+
Effect_exports
|
|
4
|
+
} from "./chunk-VLDLQ6TP.js";
|
|
5
|
+
import {
|
|
6
|
+
ContentTypeMismatchError,
|
|
7
|
+
SequenceConflictError,
|
|
8
|
+
StreamConflictError,
|
|
9
|
+
StreamNotFoundError,
|
|
10
|
+
formatJsonResponse,
|
|
11
|
+
formatOffset,
|
|
12
|
+
generateCursor,
|
|
13
|
+
generateETag,
|
|
14
|
+
initialOffset,
|
|
15
|
+
isJsonContentType,
|
|
16
|
+
normalizeContentType,
|
|
17
|
+
offsetToBytePos,
|
|
18
|
+
processJsonAppend,
|
|
19
|
+
validateJsonCreate
|
|
20
|
+
} from "./chunk-5N2UNHIW.js";
|
|
21
|
+
|
|
22
|
+
// src/storage/r2.ts
|
|
23
|
+
function validateIdempotentCreate(existing, options) {
|
|
24
|
+
const existingNormalized = normalizeContentType(existing.contentType);
|
|
25
|
+
const reqNormalized = normalizeContentType(options.contentType);
|
|
26
|
+
if (existingNormalized !== reqNormalized) {
|
|
27
|
+
throw new ContentTypeMismatchError(existingNormalized, reqNormalized);
|
|
28
|
+
}
|
|
29
|
+
if (options.ttlSeconds !== existing.ttlSeconds) {
|
|
30
|
+
throw new StreamConflictError("TTL mismatch on idempotent create");
|
|
31
|
+
}
|
|
32
|
+
if (options.expiresAt !== existing.expiresAt) {
|
|
33
|
+
throw new StreamConflictError("Expires-At mismatch on idempotent create");
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
function prepareInitialData(options) {
|
|
37
|
+
let data = options.data ?? new Uint8Array(0);
|
|
38
|
+
const isJson = isJsonContentType(options.contentType);
|
|
39
|
+
if (isJson && data.length > 0) {
|
|
40
|
+
data = validateJsonCreate(data, true);
|
|
41
|
+
}
|
|
42
|
+
const appendCount = data.length > 0 ? 1 : 0;
|
|
43
|
+
const nextOffset = formatOffset(appendCount, data.length);
|
|
44
|
+
return { data, appendCount, nextOffset };
|
|
45
|
+
}
|
|
46
|
+
var R2Store = class {
|
|
47
|
+
bucket;
|
|
48
|
+
waiters = /* @__PURE__ */ new Map();
|
|
49
|
+
streamCache = /* @__PURE__ */ new Map();
|
|
50
|
+
constructor(bucket) {
|
|
51
|
+
this.bucket = bucket;
|
|
52
|
+
}
|
|
53
|
+
metaKey(path) {
|
|
54
|
+
return `stream/${path}/meta.json`;
|
|
55
|
+
}
|
|
56
|
+
dataKey(path) {
|
|
57
|
+
return `stream/${path}/data`;
|
|
58
|
+
}
|
|
59
|
+
async getMetadata(path) {
|
|
60
|
+
const obj = await this.bucket.get(this.metaKey(path));
|
|
61
|
+
if (!obj) {
|
|
62
|
+
return null;
|
|
63
|
+
}
|
|
64
|
+
const text = await obj.text();
|
|
65
|
+
return JSON.parse(text);
|
|
66
|
+
}
|
|
67
|
+
async getData(path) {
|
|
68
|
+
const obj = await this.bucket.get(this.dataKey(path));
|
|
69
|
+
if (!obj) {
|
|
70
|
+
return new Uint8Array(0);
|
|
71
|
+
}
|
|
72
|
+
const buffer = await obj.arrayBuffer();
|
|
73
|
+
return new Uint8Array(buffer);
|
|
74
|
+
}
|
|
75
|
+
async put(path, options) {
|
|
76
|
+
const existingMeta = await this.getMetadata(path);
|
|
77
|
+
if (existingMeta) {
|
|
78
|
+
validateIdempotentCreate(existingMeta, options);
|
|
79
|
+
return { created: false, nextOffset: existingMeta.nextOffset };
|
|
80
|
+
}
|
|
81
|
+
const { data, appendCount, nextOffset } = prepareInitialData(options);
|
|
82
|
+
const meta = {
|
|
83
|
+
contentType: options.contentType,
|
|
84
|
+
ttlSeconds: options.ttlSeconds,
|
|
85
|
+
expiresAt: options.expiresAt,
|
|
86
|
+
createdAt: Date.now(),
|
|
87
|
+
nextOffset,
|
|
88
|
+
appendCount
|
|
89
|
+
};
|
|
90
|
+
await Promise.all([
|
|
91
|
+
this.bucket.put(this.metaKey(path), JSON.stringify(meta), {
|
|
92
|
+
httpMetadata: { contentType: "application/json" }
|
|
93
|
+
}),
|
|
94
|
+
this.bucket.put(this.dataKey(path), data)
|
|
95
|
+
]);
|
|
96
|
+
this.streamCache.set(path, { contentType: options.contentType });
|
|
97
|
+
return { created: true, nextOffset };
|
|
98
|
+
}
|
|
99
|
+
async append(path, data, options) {
|
|
100
|
+
const meta = await this.getMetadata(path);
|
|
101
|
+
if (!meta) {
|
|
102
|
+
throw new StreamNotFoundError(path);
|
|
103
|
+
}
|
|
104
|
+
const streamNormalized = normalizeContentType(meta.contentType);
|
|
105
|
+
if (options?.contentType) {
|
|
106
|
+
const reqNormalized = normalizeContentType(options.contentType);
|
|
107
|
+
if (streamNormalized !== reqNormalized) {
|
|
108
|
+
throw new ContentTypeMismatchError(streamNormalized, reqNormalized);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
if (options?.seq !== void 0 && meta.lastSeq !== void 0 && options.seq <= meta.lastSeq) {
|
|
112
|
+
throw new SequenceConflictError(`> ${meta.lastSeq}`, options.seq);
|
|
113
|
+
}
|
|
114
|
+
const existingData = await this.getData(path);
|
|
115
|
+
const isJson = isJsonContentType(meta.contentType);
|
|
116
|
+
let newData;
|
|
117
|
+
if (isJson) {
|
|
118
|
+
newData = processJsonAppend(existingData, data);
|
|
119
|
+
} else {
|
|
120
|
+
newData = new Uint8Array(existingData.length + data.length);
|
|
121
|
+
newData.set(existingData);
|
|
122
|
+
newData.set(data, existingData.length);
|
|
123
|
+
}
|
|
124
|
+
const newAppendCount = meta.appendCount + 1;
|
|
125
|
+
const nextOffset = formatOffset(newAppendCount, newData.length);
|
|
126
|
+
const updatedMeta = {
|
|
127
|
+
...meta,
|
|
128
|
+
nextOffset,
|
|
129
|
+
lastSeq: options?.seq ?? meta.lastSeq,
|
|
130
|
+
appendCount: newAppendCount
|
|
131
|
+
};
|
|
132
|
+
await Promise.all([
|
|
133
|
+
this.bucket.put(this.metaKey(path), JSON.stringify(updatedMeta), {
|
|
134
|
+
httpMetadata: { contentType: "application/json" }
|
|
135
|
+
}),
|
|
136
|
+
this.bucket.put(this.dataKey(path), newData)
|
|
137
|
+
]);
|
|
138
|
+
this.notifyWaiters(path, newData);
|
|
139
|
+
return { nextOffset };
|
|
140
|
+
}
|
|
141
|
+
async get(path, options) {
|
|
142
|
+
const meta = await this.getMetadata(path);
|
|
143
|
+
if (!meta) {
|
|
144
|
+
this.streamCache.delete(path);
|
|
145
|
+
throw new StreamNotFoundError(path);
|
|
146
|
+
}
|
|
147
|
+
this.streamCache.set(path, { contentType: meta.contentType });
|
|
148
|
+
const data = await this.getData(path);
|
|
149
|
+
const startOffset = options?.offset ?? initialOffset();
|
|
150
|
+
const byteOffset = offsetToBytePos(startOffset);
|
|
151
|
+
const messages = [];
|
|
152
|
+
if (byteOffset < data.length) {
|
|
153
|
+
messages.push({
|
|
154
|
+
offset: startOffset,
|
|
155
|
+
timestamp: Date.now(),
|
|
156
|
+
data: data.slice(byteOffset)
|
|
157
|
+
});
|
|
158
|
+
}
|
|
159
|
+
return {
|
|
160
|
+
messages,
|
|
161
|
+
nextOffset: meta.nextOffset,
|
|
162
|
+
upToDate: true,
|
|
163
|
+
cursor: generateCursor(),
|
|
164
|
+
etag: generateETag(path, startOffset, meta.nextOffset),
|
|
165
|
+
contentType: meta.contentType
|
|
166
|
+
};
|
|
167
|
+
}
|
|
168
|
+
async head(path) {
|
|
169
|
+
const meta = await this.getMetadata(path);
|
|
170
|
+
if (!meta) {
|
|
171
|
+
this.streamCache.delete(path);
|
|
172
|
+
return null;
|
|
173
|
+
}
|
|
174
|
+
this.streamCache.set(path, { contentType: meta.contentType });
|
|
175
|
+
return {
|
|
176
|
+
contentType: meta.contentType,
|
|
177
|
+
nextOffset: meta.nextOffset,
|
|
178
|
+
etag: generateETag(path, initialOffset(), meta.nextOffset)
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
async delete(path) {
|
|
182
|
+
const waiters = this.waiters.get(path) ?? [];
|
|
183
|
+
const effect = Effect_exports.forEach(
|
|
184
|
+
waiters,
|
|
185
|
+
(waiter) => Deferred_exports.succeed(waiter.deferred, { messages: [], timedOut: false })
|
|
186
|
+
);
|
|
187
|
+
Effect_exports.runSync(effect);
|
|
188
|
+
this.waiters.delete(path);
|
|
189
|
+
this.streamCache.delete(path);
|
|
190
|
+
await Promise.all([
|
|
191
|
+
this.bucket.delete(this.metaKey(path)),
|
|
192
|
+
this.bucket.delete(this.dataKey(path))
|
|
193
|
+
]);
|
|
194
|
+
}
|
|
195
|
+
has(path) {
|
|
196
|
+
return this.streamCache.has(path);
|
|
197
|
+
}
|
|
198
|
+
async waitForData(path, offset, timeoutMs) {
|
|
199
|
+
const meta = await this.getMetadata(path);
|
|
200
|
+
if (!meta) {
|
|
201
|
+
throw new StreamNotFoundError(path);
|
|
202
|
+
}
|
|
203
|
+
const data = await this.getData(path);
|
|
204
|
+
const byteOffset = offsetToBytePos(offset);
|
|
205
|
+
if (byteOffset < data.length) {
|
|
206
|
+
return {
|
|
207
|
+
messages: [
|
|
208
|
+
{ offset, timestamp: Date.now(), data: data.slice(byteOffset) }
|
|
209
|
+
],
|
|
210
|
+
timedOut: false
|
|
211
|
+
};
|
|
212
|
+
}
|
|
213
|
+
const effect = Effect_exports.gen(this, function* () {
|
|
214
|
+
const deferred = yield* Deferred_exports.make();
|
|
215
|
+
const waiter = { deferred, offset };
|
|
216
|
+
const pathWaiters = this.waiters.get(path) ?? [];
|
|
217
|
+
pathWaiters.push(waiter);
|
|
218
|
+
this.waiters.set(path, pathWaiters);
|
|
219
|
+
const timeout = Effect_exports.as(
|
|
220
|
+
Effect_exports.delay(
|
|
221
|
+
Effect_exports.sync(() => {
|
|
222
|
+
}),
|
|
223
|
+
timeoutMs
|
|
224
|
+
),
|
|
225
|
+
{ messages: [], timedOut: true }
|
|
226
|
+
);
|
|
227
|
+
const result = yield* Effect_exports.race(Deferred_exports.await(deferred), timeout);
|
|
228
|
+
const currentWaiters = this.waiters.get(path) ?? [];
|
|
229
|
+
const index = currentWaiters.indexOf(waiter);
|
|
230
|
+
if (index !== -1) {
|
|
231
|
+
currentWaiters.splice(index, 1);
|
|
232
|
+
}
|
|
233
|
+
return result;
|
|
234
|
+
});
|
|
235
|
+
return Effect_exports.runPromise(effect);
|
|
236
|
+
}
|
|
237
|
+
formatResponse(path, messages) {
|
|
238
|
+
const cached = this.streamCache.get(path);
|
|
239
|
+
if (!cached) {
|
|
240
|
+
return new Uint8Array(0);
|
|
241
|
+
}
|
|
242
|
+
if (messages.length === 0) {
|
|
243
|
+
const isJson2 = isJsonContentType(cached.contentType);
|
|
244
|
+
return isJson2 ? new TextEncoder().encode("[]") : new Uint8Array(0);
|
|
245
|
+
}
|
|
246
|
+
const combined = new Uint8Array(
|
|
247
|
+
messages.reduce((acc, m) => acc + m.data.length, 0)
|
|
248
|
+
);
|
|
249
|
+
let offset = 0;
|
|
250
|
+
for (const message of messages) {
|
|
251
|
+
combined.set(message.data, offset);
|
|
252
|
+
offset += message.data.length;
|
|
253
|
+
}
|
|
254
|
+
const isJson = isJsonContentType(cached.contentType);
|
|
255
|
+
return isJson ? formatJsonResponse(combined) : combined;
|
|
256
|
+
}
|
|
257
|
+
notifyWaiters(path, data) {
|
|
258
|
+
const waiters = this.waiters.get(path) ?? [];
|
|
259
|
+
this.waiters.set(path, []);
|
|
260
|
+
const effect = Effect_exports.forEach(waiters, (waiter) => {
|
|
261
|
+
const byteOffset = offsetToBytePos(waiter.offset);
|
|
262
|
+
if (byteOffset < data.length) {
|
|
263
|
+
return Deferred_exports.succeed(waiter.deferred, {
|
|
264
|
+
messages: [
|
|
265
|
+
{
|
|
266
|
+
offset: waiter.offset,
|
|
267
|
+
timestamp: Date.now(),
|
|
268
|
+
data: data.slice(byteOffset)
|
|
269
|
+
}
|
|
270
|
+
],
|
|
271
|
+
timedOut: false
|
|
272
|
+
});
|
|
273
|
+
}
|
|
274
|
+
const remaining = this.waiters.get(path) ?? [];
|
|
275
|
+
remaining.push(waiter);
|
|
276
|
+
this.waiters.set(path, remaining);
|
|
277
|
+
return Effect_exports.void;
|
|
278
|
+
});
|
|
279
|
+
Effect_exports.runSync(effect);
|
|
280
|
+
}
|
|
281
|
+
};
|
|
282
|
+
|
|
283
|
+
export {
|
|
284
|
+
R2Store
|
|
285
|
+
};
|