@vercel/kv2 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (166) hide show
  1. package/README.md +87 -0
  2. package/SKILL.md +65 -0
  3. package/dist/blob-format.d.ts +35 -0
  4. package/dist/blob-format.d.ts.map +1 -0
  5. package/dist/blob-format.js +91 -0
  6. package/dist/blob-format.js.map +1 -0
  7. package/dist/blob-store.d.ts +11 -0
  8. package/dist/blob-store.d.ts.map +1 -0
  9. package/dist/blob-store.js +32 -0
  10. package/dist/blob-store.js.map +1 -0
  11. package/dist/cache.d.ts +33 -0
  12. package/dist/cache.d.ts.map +1 -0
  13. package/dist/cache.js +146 -0
  14. package/dist/cache.js.map +1 -0
  15. package/dist/cached-kv.d.ts +63 -0
  16. package/dist/cached-kv.d.ts.map +1 -0
  17. package/dist/cached-kv.js +891 -0
  18. package/dist/cached-kv.js.map +1 -0
  19. package/dist/cli.d.ts +3 -0
  20. package/dist/cli.d.ts.map +1 -0
  21. package/dist/cli.js +342 -0
  22. package/dist/cli.js.map +1 -0
  23. package/dist/create-kv.d.ts +86 -0
  24. package/dist/create-kv.d.ts.map +1 -0
  25. package/dist/create-kv.js +125 -0
  26. package/dist/create-kv.js.map +1 -0
  27. package/dist/disk-cache.d.ts.map +1 -0
  28. package/dist/disk-cache.js.map +1 -0
  29. package/dist/index.d.ts +16 -0
  30. package/dist/index.d.ts.map +1 -0
  31. package/dist/index.js +13 -0
  32. package/dist/index.js.map +1 -0
  33. package/dist/indexed-kv.d.ts +44 -0
  34. package/dist/indexed-kv.d.ts.map +1 -0
  35. package/dist/indexed-kv.js +373 -0
  36. package/dist/indexed-kv.js.map +1 -0
  37. package/dist/manifest-log.d.ts +57 -0
  38. package/dist/manifest-log.d.ts.map +1 -0
  39. package/dist/manifest-log.js +128 -0
  40. package/dist/manifest-log.js.map +1 -0
  41. package/dist/memory-cache.d.ts +22 -0
  42. package/dist/memory-cache.d.ts.map +1 -0
  43. package/dist/memory-cache.js +90 -0
  44. package/dist/memory-cache.js.map +1 -0
  45. package/dist/proxy-cache.d.ts +40 -0
  46. package/dist/proxy-cache.d.ts.map +1 -0
  47. package/dist/proxy-cache.js +124 -0
  48. package/dist/proxy-cache.js.map +1 -0
  49. package/dist/readme.test.d.ts +9 -0
  50. package/dist/readme.test.d.ts.map +1 -0
  51. package/dist/readme.test.js +285 -0
  52. package/dist/readme.test.js.map +1 -0
  53. package/dist/schema/define-schema.d.ts +35 -0
  54. package/dist/schema/define-schema.d.ts.map +1 -0
  55. package/dist/schema/define-schema.js +70 -0
  56. package/dist/schema/define-schema.js.map +1 -0
  57. package/dist/schema/index.d.ts +4 -0
  58. package/dist/schema/index.d.ts.map +1 -0
  59. package/dist/schema/index.js +5 -0
  60. package/dist/schema/index.js.map +1 -0
  61. package/dist/schema/key-builders.d.ts +40 -0
  62. package/dist/schema/key-builders.d.ts.map +1 -0
  63. package/dist/schema/key-builders.js +124 -0
  64. package/dist/schema/key-builders.js.map +1 -0
  65. package/dist/schema/schema-kv.d.ts +48 -0
  66. package/dist/schema/schema-kv.d.ts.map +1 -0
  67. package/dist/schema/schema-kv.js +96 -0
  68. package/dist/schema/schema-kv.js.map +1 -0
  69. package/dist/schema/tree.d.ts +14 -0
  70. package/dist/schema/tree.d.ts.map +1 -0
  71. package/dist/schema/tree.js +135 -0
  72. package/dist/schema/tree.js.map +1 -0
  73. package/dist/schema/types.d.ts +135 -0
  74. package/dist/schema/types.d.ts.map +1 -0
  75. package/dist/schema/types.js +2 -0
  76. package/dist/schema/types.js.map +1 -0
  77. package/dist/testing/core-tests.d.ts +30 -0
  78. package/dist/testing/core-tests.d.ts.map +1 -0
  79. package/dist/testing/core-tests.js +383 -0
  80. package/dist/testing/core-tests.js.map +1 -0
  81. package/dist/testing/create-kv-test-setup.d.ts +21 -0
  82. package/dist/testing/create-kv-test-setup.d.ts.map +1 -0
  83. package/dist/testing/create-kv-test-setup.js +25 -0
  84. package/dist/testing/create-kv-test-setup.js.map +1 -0
  85. package/dist/testing/debug-manifest.d.ts +2 -0
  86. package/dist/testing/debug-manifest.d.ts.map +1 -0
  87. package/dist/testing/debug-manifest.js +14 -0
  88. package/dist/testing/debug-manifest.js.map +1 -0
  89. package/dist/testing/fake-blob-store.d.ts +23 -0
  90. package/dist/testing/fake-blob-store.d.ts.map +1 -0
  91. package/dist/testing/fake-blob-store.js +158 -0
  92. package/dist/testing/fake-blob-store.js.map +1 -0
  93. package/dist/testing/fake-cache.d.ts +54 -0
  94. package/dist/testing/fake-cache.d.ts.map +1 -0
  95. package/dist/testing/fake-cache.js +137 -0
  96. package/dist/testing/fake-cache.js.map +1 -0
  97. package/dist/testing/index.d.ts +34 -0
  98. package/dist/testing/index.d.ts.map +1 -0
  99. package/dist/testing/index.js +101 -0
  100. package/dist/testing/index.js.map +1 -0
  101. package/dist/testing/manifest-test-setup.d.ts +22 -0
  102. package/dist/testing/manifest-test-setup.d.ts.map +1 -0
  103. package/dist/testing/manifest-test-setup.js +43 -0
  104. package/dist/testing/manifest-test-setup.js.map +1 -0
  105. package/dist/testing/perf-test.d.ts +13 -0
  106. package/dist/testing/perf-test.d.ts.map +1 -0
  107. package/dist/testing/perf-test.js +101 -0
  108. package/dist/testing/perf-test.js.map +1 -0
  109. package/dist/testing/run-tests.d.ts +2 -0
  110. package/dist/testing/run-tests.d.ts.map +1 -0
  111. package/dist/testing/run-tests.js +141 -0
  112. package/dist/testing/run-tests.js.map +1 -0
  113. package/dist/testing/setup.d.ts +2 -0
  114. package/dist/testing/setup.d.ts.map +1 -0
  115. package/dist/testing/setup.js +3 -0
  116. package/dist/testing/setup.js.map +1 -0
  117. package/dist/testing/test-index.d.ts +28 -0
  118. package/dist/testing/test-index.d.ts.map +1 -0
  119. package/dist/testing/test-index.js +35 -0
  120. package/dist/testing/test-index.js.map +1 -0
  121. package/dist/testing/test-setup.d.ts +32 -0
  122. package/dist/testing/test-setup.d.ts.map +1 -0
  123. package/dist/testing/test-setup.js +72 -0
  124. package/dist/testing/test-setup.js.map +1 -0
  125. package/dist/testing/upstream-kv-test-setup.d.ts +30 -0
  126. package/dist/testing/upstream-kv-test-setup.d.ts.map +1 -0
  127. package/dist/testing/upstream-kv-test-setup.js +66 -0
  128. package/dist/testing/upstream-kv-test-setup.js.map +1 -0
  129. package/dist/testing/vitest-compat.d.ts +92 -0
  130. package/dist/testing/vitest-compat.d.ts.map +1 -0
  131. package/dist/testing/vitest-compat.js +601 -0
  132. package/dist/testing/vitest-compat.js.map +1 -0
  133. package/dist/tracing.d.ts +71 -0
  134. package/dist/tracing.d.ts.map +1 -0
  135. package/dist/tracing.js +232 -0
  136. package/dist/tracing.js.map +1 -0
  137. package/dist/typed-kv.d.ts +120 -0
  138. package/dist/typed-kv.d.ts.map +1 -0
  139. package/dist/typed-kv.js +565 -0
  140. package/dist/typed-kv.js.map +1 -0
  141. package/dist/typed-upstream-kv.d.ts +17 -0
  142. package/dist/typed-upstream-kv.d.ts.map +1 -0
  143. package/dist/typed-upstream-kv.js +38 -0
  144. package/dist/typed-upstream-kv.js.map +1 -0
  145. package/dist/types.d.ts +199 -0
  146. package/dist/types.d.ts.map +1 -0
  147. package/dist/types.js +23 -0
  148. package/dist/types.js.map +1 -0
  149. package/dist/upstream-kv.d.ts +84 -0
  150. package/dist/upstream-kv.d.ts.map +1 -0
  151. package/dist/upstream-kv.js +375 -0
  152. package/dist/upstream-kv.js.map +1 -0
  153. package/docs/api-reference.md +222 -0
  154. package/docs/caching.md +60 -0
  155. package/docs/cli.md +123 -0
  156. package/docs/copy-on-write-branches.md +98 -0
  157. package/docs/getting-started.md +61 -0
  158. package/docs/indexes.md +122 -0
  159. package/docs/iterating-and-pagination.md +93 -0
  160. package/docs/metadata.md +82 -0
  161. package/docs/optimistic-locking.md +72 -0
  162. package/docs/schema-and-trees.md +222 -0
  163. package/docs/streaming.md +61 -0
  164. package/docs/testing-and-tracing.md +141 -0
  165. package/docs/typed-stores.md +68 -0
  166. package/package.json +63 -0
@@ -0,0 +1,891 @@
1
+ import { BlobError, BlobPreconditionFailedError } from "@vercel/blob";
2
+ import { createBlob, isPureJsonFormat, parseBlob } from "./blob-format.js";
3
+ import { VercelBlobStore } from "./blob-store.js";
4
+ import { KVCache } from "./cache.js";
5
+ import { noopTracer } from "./tracing.js";
6
+ import { TypedKV } from "./typed-kv.js";
7
+ import { KVVersionConflictError } from "./types.js";
8
+ const DEFAULT_LARGE_VALUE_THRESHOLD = 1024 * 1024; // 1MB
9
+ const DEFAULT_CACHE_TTL = 3600; // 1 hour
10
+ const BLOB_PREFIX = "cached-kv/";
11
+ const VALUE_SUFFIX = ".value";
12
+ const HEADER_LENGTH_BYTES = 4;
13
+ function validatePrefix(prefix, name) {
14
+ if (prefix.endsWith(VALUE_SUFFIX)) {
15
+ throw new Error(`${name} cannot end with "${VALUE_SUFFIX}": ${prefix}`);
16
+ }
17
+ }
18
+ export class KV2 {
19
+ prefix;
20
+ blobStore;
21
+ cache;
22
+ largeValueThreshold;
23
+ tracer;
24
+ constructor(options = {}) {
25
+ const prefix = options.prefix ?? "";
26
+ if (prefix) {
27
+ validatePrefix(prefix, "Prefix");
28
+ }
29
+ this.prefix = prefix;
30
+ this.blobStore = options.blobStore ?? new VercelBlobStore(options.token);
31
+ this.cache = new KVCache({
32
+ ttl: options.cacheTtl ?? DEFAULT_CACHE_TTL,
33
+ cache: options.cache,
34
+ });
35
+ this.largeValueThreshold =
36
+ options.largeValueThreshold ?? DEFAULT_LARGE_VALUE_THRESHOLD;
37
+ this.tracer = options.tracer ?? noopTracer;
38
+ }
39
+ getFullPath(key) {
40
+ return `${BLOB_PREFIX}${this.prefix}${key}.value`;
41
+ }
42
+ getListPrefix(keyPrefix) {
43
+ return `${BLOB_PREFIX}${this.prefix}${keyPrefix}`;
44
+ }
45
+ stripPrefix(pathname) {
46
+ const fullPrefix = `${BLOB_PREFIX}${this.prefix}`;
47
+ const valueSuffix = ".value";
48
+ if (pathname.startsWith(fullPrefix) && pathname.endsWith(valueSuffix)) {
49
+ return pathname.slice(fullPrefix.length, -valueSuffix.length);
50
+ }
51
+ return pathname;
52
+ }
53
+ async readBlob(path) {
54
+ const result = await this.blobStore.get(path, { access: "public" });
55
+ if (!result) {
56
+ return null;
57
+ }
58
+ const chunks = [];
59
+ const reader = result.stream.getReader();
60
+ while (true) {
61
+ const { done, value } = await reader.read();
62
+ if (done)
63
+ break;
64
+ chunks.push(value);
65
+ }
66
+ return Buffer.concat(chunks);
67
+ }
68
+ /**
69
+ * Reads blob header without buffering the entire payload.
70
+ * For binary format (large values), returns a reader positioned at the payload.
71
+ * For pure JSON format (small values), returns the complete buffer.
72
+ */
73
+ async readBlobStreaming(path) {
74
+ const result = await this.blobStore.get(path, { access: "public" });
75
+ if (!result) {
76
+ return null;
77
+ }
78
+ // Capture etag for optimistic locking
79
+ const etag = result.blob.etag ?? "";
80
+ const reader = result.stream.getReader();
81
+ const chunks = [];
82
+ let totalLength = 0;
83
+ // Read first chunk to determine format
84
+ const firstRead = await reader.read();
85
+ if (firstRead.done || !firstRead.value) {
86
+ return null;
87
+ }
88
+ chunks.push(firstRead.value);
89
+ totalLength += firstRead.value.length;
90
+ // Check if pure JSON format (first byte is '{')
91
+ if (isPureJsonFormat(Buffer.from([firstRead.value[0]]))) {
92
+ // Pure JSON format - need to read entire blob (value is inline)
93
+ while (true) {
94
+ const { done, value } = await reader.read();
95
+ if (done)
96
+ break;
97
+ chunks.push(value);
98
+ totalLength += value.length;
99
+ }
100
+ const buffer = Buffer.concat(chunks);
101
+ const { header } = parseBlob(buffer);
102
+ return { header, buffer, reader: null, overflow: null, etag };
103
+ }
104
+ // Binary format - read just enough for the header
105
+ // First 4 bytes are header length
106
+ while (totalLength < HEADER_LENGTH_BYTES) {
107
+ const { done, value } = await reader.read();
108
+ if (done) {
109
+ throw new Error("Unexpected end of stream reading header length");
110
+ }
111
+ chunks.push(value);
112
+ totalLength += value.length;
113
+ }
114
+ // Combine chunks to read header length
115
+ const combined = Buffer.concat(chunks);
116
+ const headerLength = combined.readUInt32BE(0);
117
+ const headerEnd = HEADER_LENGTH_BYTES + headerLength;
118
+ // Read until we have the complete header
119
+ while (totalLength < headerEnd) {
120
+ const { done, value } = await reader.read();
121
+ if (done) {
122
+ throw new Error("Unexpected end of stream reading header");
123
+ }
124
+ chunks.push(value);
125
+ totalLength += value.length;
126
+ }
127
+ // Parse header
128
+ const fullBuffer = Buffer.concat(chunks);
129
+ const headerJson = fullBuffer
130
+ .subarray(HEADER_LENGTH_BYTES, headerEnd)
131
+ .toString("utf-8");
132
+ const header = JSON.parse(headerJson);
133
+ // Any bytes past the header are overflow (start of payload)
134
+ const overflow = totalLength > headerEnd ? fullBuffer.subarray(headerEnd) : null;
135
+ return { header, buffer: null, reader, overflow, etag };
136
+ }
137
+ async readBlobWithConsistencyCheck(path, expectedWriteTime) {
138
+ const BACKOFF_MS = [50, 100, 200, 400];
139
+ for (let attempt = 0; attempt <= BACKOFF_MS.length; attempt++) {
140
+ const buffer = await this.readBlob(path);
141
+ // No expected writeTime - just return whatever we got
142
+ if (!expectedWriteTime) {
143
+ return buffer;
144
+ }
145
+ // Got nothing - might be stale read after a write
146
+ if (!buffer) {
147
+ if (attempt < BACKOFF_MS.length) {
148
+ await this.sleep(BACKOFF_MS[attempt]);
149
+ continue;
150
+ }
151
+ return null;
152
+ }
153
+ // Check if blob's writeTime is fresh enough
154
+ const { header } = parseBlob(buffer);
155
+ if (header.writeTime && header.writeTime >= expectedWriteTime) {
156
+ return buffer; // Fresh read
157
+ }
158
+ // Stale read - retry with backoff
159
+ if (attempt < BACKOFF_MS.length) {
160
+ await this.sleep(BACKOFF_MS[attempt]);
161
+ continue;
162
+ }
163
+ // Give up - return what we have (might be stale but better than nothing)
164
+ return buffer;
165
+ }
166
+ return null;
167
+ }
168
+ /**
169
+ * Streaming version of readBlobWithConsistencyCheck.
170
+ * Returns header and a reader for the payload without buffering.
171
+ */
172
+ async readBlobStreamingWithConsistencyCheck(path, expectedWriteTime) {
173
+ const BACKOFF_MS = [50, 100, 200, 400];
174
+ for (let attempt = 0; attempt <= BACKOFF_MS.length; attempt++) {
175
+ const result = await this.readBlobStreaming(path);
176
+ // No expected writeTime - just return whatever we got
177
+ if (!expectedWriteTime) {
178
+ return result;
179
+ }
180
+ // Got nothing - might be stale read after a write
181
+ if (!result) {
182
+ if (attempt < BACKOFF_MS.length) {
183
+ await this.sleep(BACKOFF_MS[attempt]);
184
+ continue;
185
+ }
186
+ return null;
187
+ }
188
+ // Check if blob's writeTime is fresh enough
189
+ if (result.header.writeTime &&
190
+ result.header.writeTime >= expectedWriteTime) {
191
+ return result; // Fresh read
192
+ }
193
+ // Stale read - close the reader and retry with backoff
194
+ if (result.reader) {
195
+ await result.reader.cancel();
196
+ }
197
+ if (attempt < BACKOFF_MS.length) {
198
+ await this.sleep(BACKOFF_MS[attempt]);
199
+ continue;
200
+ }
201
+ // Give up - return what we have (might be stale but better than nothing)
202
+ return result;
203
+ }
204
+ return null;
205
+ }
206
+ sleep(ms) {
207
+ return new Promise((resolve) => setTimeout(resolve, ms));
208
+ }
209
+ serializeValue(value) {
210
+ if (value instanceof Buffer) {
211
+ return { data: value, encoding: "base64" };
212
+ }
213
+ if (value instanceof Uint8Array) {
214
+ return { data: Buffer.from(value), encoding: "base64" };
215
+ }
216
+ if (value instanceof ArrayBuffer) {
217
+ return { data: Buffer.from(value), encoding: "base64" };
218
+ }
219
+ // JSON.stringify(undefined) returns undefined, not a string
220
+ // Handle undefined by serializing as null (JSON standard behavior)
221
+ const jsonStr = JSON.stringify(value);
222
+ if (jsonStr === undefined) {
223
+ // undefined becomes null in JSON
224
+ return { data: Buffer.from("null", "utf-8"), encoding: "json" };
225
+ }
226
+ return {
227
+ data: Buffer.from(jsonStr, "utf-8"),
228
+ encoding: "json",
229
+ };
230
+ }
231
+ deserializeValue(data, encoding) {
232
+ if (encoding === "base64" && typeof data === "string") {
233
+ return Buffer.from(data, "base64");
234
+ }
235
+ return data;
236
+ }
237
+ async get(key) {
238
+ const span = this.tracer.startSpan("kv.get", { key });
239
+ const path = this.getFullPath(key);
240
+ try {
241
+ // Try cache first
242
+ const cached = await this.cache.get(path);
243
+ // If cache has full value (not just writeTime marker) and etag, return it
244
+ if (cached && cached.value !== undefined && cached.etag) {
245
+ span.setAttributes({ source: "cache", size: cached.size });
246
+ span.end();
247
+ // Deserialize binary data from base64 if needed
248
+ const cachedValue = cached.isBinary && typeof cached.value === "string"
249
+ ? Buffer.from(cached.value, "base64")
250
+ : cached.value;
251
+ const cachedPayload = this.getPayloadBytes(cachedValue, cached.size);
252
+ let streamPromise = null;
253
+ const cachedEtag = cached.etag;
254
+ return {
255
+ exists: true,
256
+ metadata: cached.metadata,
257
+ version: cachedEtag,
258
+ get value() {
259
+ return Promise.resolve(cachedValue);
260
+ },
261
+ get stream() {
262
+ if (!streamPromise) {
263
+ streamPromise = Promise.resolve(new ReadableStream({
264
+ start(controller) {
265
+ controller.enqueue(cachedPayload);
266
+ controller.close();
267
+ },
268
+ }));
269
+ }
270
+ return streamPromise;
271
+ },
272
+ update: async (value, metadata) => {
273
+ const meta = (metadata ?? cached.metadata);
274
+ return this.set(key, value, meta, { expectedVersion: cachedEtag });
275
+ },
276
+ };
277
+ }
278
+ // Read from blob store with streaming (with retry for eventual consistency)
279
+ const streamingResult = await this.readBlobStreamingWithConsistencyCheck(path, cached?.writeTime);
280
+ if (!streamingResult) {
281
+ span.setAttributes({ source: "blob", exists: false });
282
+ span.end();
283
+ return {
284
+ exists: false,
285
+ metadata: undefined,
286
+ value: undefined,
287
+ stream: undefined,
288
+ };
289
+ }
290
+ const { header, buffer, reader, overflow, etag } = streamingResult;
291
+ // For pure JSON format (small values), buffer is already complete
292
+ if (buffer) {
293
+ span.setAttributes({
294
+ source: "blob",
295
+ exists: true,
296
+ format: "json",
297
+ size: buffer.length,
298
+ });
299
+ span.end();
300
+ const { payload } = parseBlob(buffer);
301
+ return this.createResultFromBuffer(key, header, payload, buffer, path, etag);
302
+ }
303
+ // For binary format (large values), use streaming
304
+ // reader is guaranteed non-null when buffer is null (binary format)
305
+ if (!reader) {
306
+ throw new Error("Unexpected state: binary format without reader");
307
+ }
308
+ span.setAttributes({ source: "blob", exists: true, format: "streaming" });
309
+ span.end();
310
+ return this.createStreamingResult(key, header, reader, overflow, path, etag);
311
+ }
312
+ catch (error) {
313
+ span.setError(error instanceof Error ? error : new Error(String(error)));
314
+ span.end();
315
+ throw error;
316
+ }
317
+ }
318
+ /**
319
+ * Creates a result from a fully-buffered blob (pure JSON format or cached).
320
+ */
321
+ createResultFromBuffer(key, header, payload, buffer, path, etag) {
322
+ // Determine payload bytes for streaming
323
+ let payloadBytes;
324
+ if (payload) {
325
+ payloadBytes = payload;
326
+ }
327
+ else if (header.encoding === "base64" &&
328
+ typeof header.value === "string") {
329
+ payloadBytes = Buffer.from(header.value, "base64");
330
+ }
331
+ else {
332
+ const jsonStr = JSON.stringify(header.value);
333
+ payloadBytes = Buffer.from(jsonStr ?? "null", "utf-8");
334
+ }
335
+ let valuePromise = null;
336
+ let streamPromise = null;
337
+ const self = this;
338
+ return {
339
+ exists: true,
340
+ metadata: header.metadata,
341
+ version: etag,
342
+ get value() {
343
+ if (!valuePromise) {
344
+ valuePromise = self.resolveValue(header, payload, path, buffer, etag);
345
+ }
346
+ return valuePromise;
347
+ },
348
+ get stream() {
349
+ if (!streamPromise) {
350
+ streamPromise = Promise.resolve(new ReadableStream({
351
+ start(controller) {
352
+ controller.enqueue(payloadBytes);
353
+ controller.close();
354
+ },
355
+ }));
356
+ }
357
+ return streamPromise;
358
+ },
359
+ update: async (value, metadata) => {
360
+ const meta = (metadata ?? header.metadata);
361
+ return self.set(key, value, meta, { expectedVersion: etag });
362
+ },
363
+ };
364
+ }
365
+ /**
366
+ * Creates a result with true streaming for binary format (large values).
367
+ * The payload is streamed directly from the blob store without buffering.
368
+ */
369
+ createStreamingResult(key, header, reader, overflow, path, etag) {
370
+ let valuePromise = null;
371
+ let streamPromise = null;
372
+ // Shared state for coordinating between value and stream access
373
+ let payloadBuffer = null;
374
+ let streamConsumed = false;
375
+ const self = this;
376
+ // Helper to buffer the remaining payload
377
+ const bufferPayload = async () => {
378
+ if (payloadBuffer) {
379
+ return payloadBuffer;
380
+ }
381
+ if (streamConsumed) {
382
+ throw new Error("Cannot access value after stream has been consumed");
383
+ }
384
+ const chunks = [];
385
+ if (overflow) {
386
+ chunks.push(overflow);
387
+ }
388
+ while (true) {
389
+ const { done, value } = await reader.read();
390
+ if (done)
391
+ break;
392
+ chunks.push(value);
393
+ }
394
+ payloadBuffer = Buffer.concat(chunks);
395
+ return payloadBuffer;
396
+ };
397
+ return {
398
+ exists: true,
399
+ metadata: header.metadata,
400
+ version: etag,
401
+ get value() {
402
+ if (!valuePromise) {
403
+ valuePromise = (async () => {
404
+ const payload = await bufferPayload();
405
+ let value;
406
+ let size;
407
+ let isBinary = false;
408
+ if (header.encoding === "raw-json") {
409
+ value = JSON.parse(payload.toString("utf-8"));
410
+ size = payload.length;
411
+ }
412
+ else if (header.encoding === "raw-binary") {
413
+ value = payload;
414
+ size = payload.length;
415
+ isBinary = true;
416
+ }
417
+ else {
418
+ // Shouldn't happen for streaming result, but handle gracefully
419
+ value = self.deserializeValue(header.value, header.encoding);
420
+ size = payload.length;
421
+ isBinary = header.encoding === "base64";
422
+ }
423
+ // Cache the result with etag
424
+ const cachedEntry = {
425
+ metadata: header.metadata,
426
+ value: isBinary && value instanceof Buffer
427
+ ? value.toString("base64")
428
+ : value,
429
+ size,
430
+ isBinary,
431
+ etag,
432
+ };
433
+ await self.cache.set(path, cachedEntry);
434
+ return value;
435
+ })();
436
+ }
437
+ return valuePromise;
438
+ },
439
+ get stream() {
440
+ if (!streamPromise) {
441
+ streamPromise = (async () => {
442
+ // If value was already buffered, use that
443
+ if (payloadBuffer) {
444
+ const bufferedData = payloadBuffer;
445
+ return new ReadableStream({
446
+ start(controller) {
447
+ controller.enqueue(bufferedData);
448
+ controller.close();
449
+ },
450
+ });
451
+ }
452
+ // Otherwise, stream directly from the reader
453
+ streamConsumed = true;
454
+ let overflowSent = false;
455
+ return new ReadableStream({
456
+ async pull(controller) {
457
+ // Send overflow bytes first
458
+ if (!overflowSent && overflow) {
459
+ controller.enqueue(overflow);
460
+ overflowSent = true;
461
+ return;
462
+ }
463
+ const { done, value } = await reader.read();
464
+ if (done) {
465
+ controller.close();
466
+ return;
467
+ }
468
+ controller.enqueue(value);
469
+ },
470
+ cancel() {
471
+ reader.cancel();
472
+ },
473
+ });
474
+ })();
475
+ }
476
+ return streamPromise;
477
+ },
478
+ update: async (value, metadata) => {
479
+ const meta = (metadata ?? header.metadata);
480
+ return self.set(key, value, meta, { expectedVersion: etag });
481
+ },
482
+ };
483
+ }
484
+ async resolveValue(header, payload, path, buffer, etag) {
485
+ let value;
486
+ let size;
487
+ let isBinary = false;
488
+ if (header.encoding === "raw-json" && payload) {
489
+ // Large JSON value stored as raw payload after header
490
+ value = JSON.parse(payload.toString("utf-8"));
491
+ size = payload.length;
492
+ }
493
+ else if (header.encoding === "raw-binary" && payload) {
494
+ // Large binary value stored as raw payload after header
495
+ value = payload;
496
+ size = payload.length;
497
+ isBinary = true;
498
+ }
499
+ else {
500
+ // Small value inlined in header
501
+ value = this.deserializeValue(header.value, header.encoding);
502
+ size = buffer.length;
503
+ isBinary = header.encoding === "base64";
504
+ }
505
+ // Cache the result with etag - binary data must be serialized as base64 for cache storage
506
+ const cachedEntry = {
507
+ metadata: header.metadata,
508
+ value: isBinary && value instanceof Buffer ? value.toString("base64") : value,
509
+ size,
510
+ isBinary,
511
+ etag,
512
+ };
513
+ await this.cache.set(path, cachedEntry);
514
+ return value;
515
+ }
516
+ getPayloadBytes(value, _size) {
517
+ if (value instanceof Buffer) {
518
+ return value;
519
+ }
520
+ if (value instanceof Uint8Array) {
521
+ return value;
522
+ }
523
+ return Buffer.from(JSON.stringify(value), "utf-8");
524
+ }
525
+ async set(key, value, ...[metadata, options]) {
526
+ const span = this.tracer.startSpan("kv.set", { key });
527
+ const path = this.getFullPath(key);
528
+ try {
529
+ // Handle streaming input - always use large file mode
530
+ if (value instanceof ReadableStream) {
531
+ span.setAttributes({ format: "stream" });
532
+ const writeTime = Date.now();
533
+ const header = {
534
+ metadata: metadata,
535
+ encoding: "raw-binary",
536
+ writeTime,
537
+ };
538
+ const headerBuffer = createBlob(header);
539
+ // Create a combined stream: header + payload
540
+ const headerStream = new ReadableStream({
541
+ start(controller) {
542
+ controller.enqueue(headerBuffer);
543
+ controller.close();
544
+ },
545
+ });
546
+ const combinedStream = this.concatStreams(headerStream, value);
547
+ const putResult = await this.blobStore.put(path, combinedStream, {
548
+ access: "private",
549
+ contentType: "application/octet-stream",
550
+ cacheControlMaxAge: 60,
551
+ allowOverwrite: options?.override ?? true,
552
+ ifMatch: options?.expectedVersion,
553
+ });
554
+ // Cache writeTime for consistency checking (can't cache value - stream consumed)
555
+ await this.cache.set(path, {
556
+ metadata: metadata,
557
+ value: undefined, // Stream value not available
558
+ size: 0,
559
+ writeTime,
560
+ etag: putResult.etag,
561
+ });
562
+ span.end();
563
+ return { version: putResult.etag ?? "" };
564
+ }
565
+ // Non-streaming path
566
+ const serialized = this.serializeValue(value);
567
+ const isBinary = serialized.encoding === "base64";
568
+ let blobData;
569
+ let cacheValue;
570
+ if (serialized.data.length > this.largeValueThreshold) {
571
+ span.setAttributes({ format: "large", size: serialized.data.length });
572
+ // Large value: store as raw payload after header
573
+ const header = {
574
+ metadata: metadata,
575
+ encoding: serialized.encoding === "json" ? "raw-json" : "raw-binary",
576
+ };
577
+ blobData = createBlob(header, serialized.data);
578
+ // Cache the value - binary data as base64 string for cache serialization
579
+ cacheValue =
580
+ serialized.encoding === "json"
581
+ ? JSON.parse(serialized.data.toString("utf-8"))
582
+ : serialized.data.toString("base64"); // Binary: store as base64 for cache
583
+ }
584
+ else {
585
+ span.setAttributes({ format: "inline", size: serialized.data.length });
586
+ // Small value: inline in header
587
+ const headerValue = serialized.encoding === "json"
588
+ ? JSON.parse(serialized.data.toString("utf-8"))
589
+ : serialized.data.toString("base64");
590
+ const header = {
591
+ metadata: metadata,
592
+ value: headerValue,
593
+ encoding: serialized.encoding,
594
+ };
595
+ blobData = createBlob(header);
596
+ // Cache the value - binary data as base64 string for cache serialization
597
+ cacheValue =
598
+ serialized.encoding === "json"
599
+ ? headerValue
600
+ : serialized.data.toString("base64"); // Binary: store as base64 for cache
601
+ }
602
+ // Write blob
603
+ const putResult = await this.blobStore.put(path, blobData, {
604
+ access: "private",
605
+ contentType: isBinary ? "application/octet-stream" : "application/json",
606
+ cacheControlMaxAge: 60,
607
+ allowOverwrite: options?.override ?? true,
608
+ ifMatch: options?.expectedVersion,
609
+ });
610
+ // Write to cache (provides read-your-writes consistency)
611
+ // Binary values are stored as base64 strings with isBinary flag
612
+ await this.cache.set(path, {
613
+ metadata: metadata,
614
+ value: cacheValue,
615
+ size: serialized.data.length,
616
+ isBinary,
617
+ etag: putResult.etag,
618
+ });
619
+ span.end();
620
+ return { version: putResult.etag ?? "" };
621
+ }
622
+ catch (error) {
623
+ if (error instanceof BlobPreconditionFailedError) {
624
+ span.setError(error);
625
+ span.end();
626
+ throw new KVVersionConflictError(key);
627
+ }
628
+ // Handle BlobError cases from real @vercel/blob:
629
+ // - "This blob already exists" for allowOverwrite: false
630
+ // - "The specified key does not exist" for ifMatch on non-existent key
631
+ if (error instanceof BlobError) {
632
+ const message = error.message ?? "";
633
+ if (message.includes("already exists") ||
634
+ message.includes("does not exist")) {
635
+ span.setError(error);
636
+ span.end();
637
+ throw new KVVersionConflictError(key);
638
+ }
639
+ }
640
+ span.setError(error instanceof Error ? error : new Error(String(error)));
641
+ span.end();
642
+ throw error;
643
+ }
644
+ }
645
+ concatStreams(first, second) {
646
+ const reader1 = first.getReader();
647
+ const reader2 = second.getReader();
648
+ let readingFirst = true;
649
+ return new ReadableStream({
650
+ async pull(controller) {
651
+ if (readingFirst) {
652
+ const { done, value } = await reader1.read();
653
+ if (done) {
654
+ readingFirst = false;
655
+ return this.pull?.(controller);
656
+ }
657
+ controller.enqueue(value);
658
+ }
659
+ else {
660
+ const { done, value } = await reader2.read();
661
+ if (done) {
662
+ controller.close();
663
+ return;
664
+ }
665
+ controller.enqueue(value);
666
+ }
667
+ },
668
+ cancel() {
669
+ reader1.cancel();
670
+ reader2.cancel();
671
+ },
672
+ });
673
+ }
674
+ async delete(key) {
675
+ const span = this.tracer.startSpan("kv.delete", { key });
676
+ const path = this.getFullPath(key);
677
+ try {
678
+ // Delete blob (single file now, no separate payload)
679
+ await this.blobStore.del(path);
680
+ // Invalidate cache
681
+ await this.cache.invalidate(path);
682
+ span.end();
683
+ }
684
+ catch (error) {
685
+ span.setError(error instanceof Error ? error : new Error(String(error)));
686
+ span.end();
687
+ throw error;
688
+ }
689
+ }
690
+ keys(prefix) {
691
+ const self = this;
692
+ const listPrefix = this.getListPrefix(prefix ?? "");
693
+ return {
694
+ async *[Symbol.asyncIterator]() {
695
+ const span = self.tracer.startSpan("kv.keys", { prefix: prefix ?? "" });
696
+ let cursor;
697
+ let count = 0;
698
+ try {
699
+ do {
700
+ const result = await self.blobStore.list({
701
+ prefix: listPrefix,
702
+ cursor,
703
+ });
704
+ for (const blob of result.blobs) {
705
+ if (blob.pathname.endsWith(".value")) {
706
+ count++;
707
+ yield self.stripPrefix(blob.pathname);
708
+ }
709
+ }
710
+ cursor = result.cursor;
711
+ } while (cursor);
712
+ span.setAttributes({ count });
713
+ span.end();
714
+ }
715
+ catch (error) {
716
+ span.setError(error instanceof Error ? error : new Error(String(error)));
717
+ span.end();
718
+ throw error;
719
+ }
720
+ },
721
+ async page(limit, cursor) {
722
+ const span = self.tracer.startSpan("kv.keys.page", {
723
+ prefix: prefix ?? "",
724
+ limit,
725
+ });
726
+ const keys = [];
727
+ try {
728
+ let currentCursor = cursor;
729
+ while (keys.length < limit) {
730
+ const result = await self.blobStore.list({
731
+ prefix: listPrefix,
732
+ cursor: currentCursor,
733
+ limit: limit - keys.length,
734
+ });
735
+ for (const blob of result.blobs) {
736
+ if (blob.pathname.endsWith(".value")) {
737
+ keys.push(self.stripPrefix(blob.pathname));
738
+ if (keys.length >= limit)
739
+ break;
740
+ }
741
+ }
742
+ currentCursor = result.cursor;
743
+ if (!currentCursor)
744
+ break;
745
+ }
746
+ span.setAttributes({ count: keys.length, hasMore: !!currentCursor });
747
+ span.end();
748
+ return { keys, cursor: currentCursor };
749
+ }
750
+ catch (error) {
751
+ span.setError(error instanceof Error ? error : new Error(String(error)));
752
+ span.end();
753
+ throw error;
754
+ }
755
+ },
756
+ };
757
+ }
758
+ /**
759
+ * Fetch multiple keys concurrently with bounded concurrency.
760
+ * Returns a Map of key -> entry for all existing keys.
761
+ *
762
+ * @param keys - Array of keys to fetch
763
+ * @param concurrency - Number of concurrent get operations (default: 10)
764
+ */
765
+ async getMany(keys, concurrency = 20) {
766
+ const span = this.tracer.startSpan("kv.getMany", {
767
+ keyCount: keys.length,
768
+ concurrency,
769
+ });
770
+ const results = new Map();
771
+ try {
772
+ // Process keys in batches
773
+ for (let i = 0; i < keys.length; i += concurrency) {
774
+ const batch = keys.slice(i, i + concurrency);
775
+ const batchResults = await Promise.all(batch.map(async (key) => {
776
+ const result = await this.get(key);
777
+ return { key, result };
778
+ }));
779
+ for (const { key, result } of batchResults) {
780
+ if (result.exists) {
781
+ results.set(key, result);
782
+ }
783
+ }
784
+ }
785
+ span.setAttributes({ count: results.size });
786
+ span.end();
787
+ return results;
788
+ }
789
+ catch (error) {
790
+ span.setError(error instanceof Error ? error : new Error(String(error)));
791
+ span.end();
792
+ throw error;
793
+ }
794
+ }
795
+ /**
796
+ * Iterate over key-value entries with concurrent fetching.
797
+ * Yields [key, entry] pairs as soon as each fetch completes.
798
+ *
799
+ * @param prefix - Optional prefix to filter keys
800
+ * @param concurrency - Number of concurrent get operations (default: 20)
801
+ */
802
+ entries(prefix, concurrency = 20) {
803
+ const self = this;
804
+ return {
805
+ async *[Symbol.asyncIterator]() {
806
+ const span = self.tracer.startSpan("kv.entries", {
807
+ prefix: prefix ?? "",
808
+ concurrency,
809
+ });
810
+ let count = 0;
811
+ try {
812
+ const keyIterator = self.keys(prefix)[Symbol.asyncIterator]();
813
+ // Pool of in-flight fetches
814
+ const inFlight = new Map();
815
+ let keysDone = false;
816
+ // Start initial batch of fetches
817
+ while (inFlight.size < concurrency && !keysDone) {
818
+ const { done, value: key } = await keyIterator.next();
819
+ if (done) {
820
+ keysDone = true;
821
+ break;
822
+ }
823
+ inFlight.set(key, self.get(key).then((result) => ({ key, result })));
824
+ }
825
+ // Process results as they complete, refilling the pool
826
+ while (inFlight.size > 0) {
827
+ const { key, result } = await Promise.race(inFlight.values());
828
+ inFlight.delete(key);
829
+ if (result.exists) {
830
+ count++;
831
+ yield [key, result];
832
+ }
833
+ if (!keysDone) {
834
+ const { done, value: nextKey } = await keyIterator.next();
835
+ if (done) {
836
+ keysDone = true;
837
+ }
838
+ else {
839
+ inFlight.set(nextKey, self
840
+ .get(nextKey)
841
+ .then((r) => ({ key: nextKey, result: r })));
842
+ }
843
+ }
844
+ }
845
+ span.setAttributes({ count });
846
+ span.end();
847
+ }
848
+ catch (error) {
849
+ span.setError(error instanceof Error ? error : new Error(String(error)));
850
+ span.end();
851
+ throw error;
852
+ }
853
+ },
854
+ async page(limit, cursor) {
855
+ const span = self.tracer.startSpan("kv.entries.page", {
856
+ prefix: prefix ?? "",
857
+ limit,
858
+ concurrency,
859
+ });
860
+ try {
861
+ // Get a page of keys first
862
+ const { keys, cursor: nextCursor } = await self
863
+ .keys(prefix)
864
+ .page(limit, cursor);
865
+ // Fetch all values concurrently
866
+ const entriesMap = await self.getMany(keys, concurrency);
867
+ // Build entries array in key order
868
+ const entries = [];
869
+ for (const key of keys) {
870
+ const entry = entriesMap.get(key);
871
+ if (entry) {
872
+ entries.push([key, entry]);
873
+ }
874
+ }
875
+ span.setAttributes({ count: entries.length, hasMore: !!nextCursor });
876
+ span.end();
877
+ return { entries, cursor: nextCursor };
878
+ }
879
+ catch (error) {
880
+ span.setError(error instanceof Error ? error : new Error(String(error)));
881
+ span.end();
882
+ throw error;
883
+ }
884
+ },
885
+ };
886
+ }
887
+ getStore(subPrefix, indexes) {
888
+ return new TypedKV(this, subPrefix, indexes);
889
+ }
890
+ }
891
+ //# sourceMappingURL=cached-kv.js.map