@hot-updater/cloudflare 0.18.0 → 0.18.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -282,7 +282,7 @@ var require_lib = __commonJS({ "../../node_modules/.pnpm/pg-minify@1.6.5/node_mo
282
282
 
283
283
  //#endregion
284
284
  //#region src/d1Database.ts
285
- var import_lib = __toESM(require_lib(), 1);
285
+ var import_lib = __toESM(require_lib());
286
286
  async function resolvePage(singlePage) {
287
287
  const results = [];
288
288
  for await (const page of singlePage.iterPages()) {
@@ -291,8 +291,70 @@ async function resolvePage(singlePage) {
291
291
  }
292
292
  return results;
293
293
  }
294
+ function buildWhereClause(conditions) {
295
+ const clauses = [];
296
+ const params = [];
297
+ if (conditions.channel) {
298
+ clauses.push("channel = ?");
299
+ params.push(conditions.channel);
300
+ }
301
+ if (conditions.platform) {
302
+ clauses.push("platform = ?");
303
+ params.push(conditions.platform);
304
+ }
305
+ const whereClause = clauses.length > 0 ? ` WHERE ${clauses.join(" AND ")}` : "";
306
+ return {
307
+ sql: whereClause,
308
+ params
309
+ };
310
+ }
311
+ function transformRowToBundle(row) {
312
+ return {
313
+ id: row.id,
314
+ channel: row.channel,
315
+ enabled: Boolean(row.enabled),
316
+ shouldForceUpdate: Boolean(row.should_force_update),
317
+ fileHash: row.file_hash,
318
+ gitCommitHash: row.git_commit_hash,
319
+ message: row.message,
320
+ platform: row.platform,
321
+ targetAppVersion: row.target_app_version,
322
+ storageUri: row.storage_uri,
323
+ fingerprintHash: row.fingerprint_hash,
324
+ metadata: row?.metadata ? JSON.parse(row?.metadata) : {}
325
+ };
326
+ }
294
327
  const d1Database = (config, hooks) => {
295
328
  let bundles = [];
329
+ async function getTotalCount(context, conditions) {
330
+ const { sql: whereClause, params } = buildWhereClause(conditions);
331
+ const countSql = (0, import_lib.default)(`SELECT COUNT(*) as total FROM bundles${whereClause}`);
332
+ const countResult = await context.cf.d1.database.query(config.databaseId, {
333
+ account_id: config.accountId,
334
+ sql: countSql,
335
+ params
336
+ });
337
+ const rows = await resolvePage(countResult);
338
+ return rows[0]?.total || 0;
339
+ }
340
+ async function getPaginatedBundles(context, conditions, limit, offset) {
341
+ const { sql: whereClause, params } = buildWhereClause(conditions);
342
+ const sql = (0, import_lib.default)(`
343
+ SELECT * FROM bundles
344
+ ${whereClause}
345
+ ORDER BY id DESC
346
+ LIMIT ?
347
+ OFFSET ?
348
+ `);
349
+ params.push(limit, offset);
350
+ const result = await context.cf.d1.database.query(config.databaseId, {
351
+ account_id: config.accountId,
352
+ sql,
353
+ params
354
+ });
355
+ const rows = await resolvePage(result);
356
+ return rows.map(transformRowToBundle);
357
+ }
296
358
  return (0, __hot_updater_plugin_core.createDatabasePlugin)("d1Database", {
297
359
  getContext: () => ({ cf: new cloudflare.default({ apiToken: config.cloudflareApiToken }) }),
298
360
  async getBundleById(context, bundleId) {
@@ -307,67 +369,21 @@ const d1Database = (config, hooks) => {
307
369
  });
308
370
  const rows = await resolvePage(singlePage);
309
371
  if (rows.length === 0) return null;
310
- const row = rows[0];
311
- return {
312
- channel: row.channel,
313
- enabled: Boolean(row.enabled),
314
- shouldForceUpdate: Boolean(row.should_force_update),
315
- fileHash: row.file_hash,
316
- gitCommitHash: row.git_commit_hash,
317
- id: row.id,
318
- message: row.message,
319
- platform: row.platform,
320
- targetAppVersion: row.target_app_version,
321
- storageUri: row.storage_uri,
322
- fingerprintHash: row.fingerprint_hash,
323
- metadata: row?.metadata ? JSON.parse(row?.metadata) : {}
324
- };
372
+ return transformRowToBundle(rows[0]);
325
373
  },
326
374
  async getBundles(context, options) {
327
- const { where, limit, offset = 0 } = options ?? {};
328
- let sql = "SELECT * FROM bundles";
329
- const params = [];
330
- const conditions = [];
331
- if (where?.channel) {
332
- conditions.push("channel = ?");
333
- params.push(where.channel);
334
- }
335
- if (where?.platform) {
336
- conditions.push("platform = ?");
337
- params.push(where.platform);
338
- }
339
- if (conditions.length > 0) sql += ` WHERE ${conditions.join(" AND ")}`;
340
- sql += " ORDER BY id DESC";
341
- if (limit) {
342
- sql += " LIMIT ?";
343
- params.push(limit);
344
- }
345
- if (offset) {
346
- sql += " OFFSET ?";
347
- params.push(offset);
348
- }
349
- const singlePage = await context.cf.d1.database.query(config.databaseId, {
350
- account_id: config.accountId,
351
- sql: (0, import_lib.default)(sql),
352
- params
353
- });
354
- const rows = await resolvePage(singlePage);
355
- if (rows.length === 0) bundles = [];
356
- else bundles = rows.map((row) => ({
357
- id: row.id,
358
- channel: row.channel,
359
- enabled: Boolean(row.enabled),
360
- shouldForceUpdate: Boolean(row.should_force_update),
361
- fileHash: row.file_hash,
362
- gitCommitHash: row.git_commit_hash,
363
- message: row.message,
364
- platform: row.platform,
365
- targetAppVersion: row.target_app_version,
366
- storageUri: row.storage_uri,
367
- fingerprintHash: row.fingerprint_hash,
368
- metadata: row?.metadata ? JSON.parse(row?.metadata) : {}
369
- }));
370
- return bundles;
375
+ const { where = {}, limit, offset } = options;
376
+ const totalCount = await getTotalCount(context, where);
377
+ bundles = await getPaginatedBundles(context, where, limit, offset);
378
+ const paginationOptions = {
379
+ limit,
380
+ offset
381
+ };
382
+ const pagination = (0, __hot_updater_plugin_core.calculatePagination)(totalCount, paginationOptions);
383
+ return {
384
+ data: bundles,
385
+ pagination
386
+ };
371
387
  },
372
388
  async getChannels(context) {
373
389
  const sql = (0, import_lib.default)(`
@@ -1134,7 +1150,7 @@ const getVerboseObject = ({ type, result, verboseInfo: { escapedCommand, command
1134
1150
  type,
1135
1151
  escapedCommand,
1136
1152
  commandId: `${commandId}`,
1137
- timestamp: new Date(),
1153
+ timestamp: /* @__PURE__ */ new Date(),
1138
1154
  piped,
1139
1155
  result,
1140
1156
  options
@@ -2255,7 +2271,7 @@ const getFromStream = (source, from = "stdout") => {
2255
2271
  if (sourceStream === null || sourceStream === void 0) throw new TypeError(getInvalidStdioOptionMessage(fdNumber, from, options, isWritable));
2256
2272
  return sourceStream;
2257
2273
  };
2258
- const SUBPROCESS_OPTIONS = new WeakMap();
2274
+ const SUBPROCESS_OPTIONS = /* @__PURE__ */ new WeakMap();
2259
2275
  const getFdNumber = (fileDescriptors, fdName, isWritable) => {
2260
2276
  const fdNumber = parseFdNumber(fdName, isWritable);
2261
2277
  validateFdNumber(fdNumber, fdName, isWritable, fileDescriptors);
@@ -2384,7 +2400,7 @@ const onDisconnect = async ({ anyProcess, channel, isSubprocess, ipcEmitter, bou
2384
2400
  ipcEmitter.connected = false;
2385
2401
  ipcEmitter.emit("disconnect");
2386
2402
  };
2387
- const INCOMING_MESSAGES = new WeakMap();
2403
+ const INCOMING_MESSAGES = /* @__PURE__ */ new WeakMap();
2388
2404
 
2389
2405
  //#endregion
2390
2406
  //#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/ipc/forward.js
@@ -2401,7 +2417,7 @@ const getIpcEmitter = (anyProcess, channel, isSubprocess) => {
2401
2417
  });
2402
2418
  return ipcEmitter;
2403
2419
  };
2404
- const IPC_EMITTERS = new WeakMap();
2420
+ const IPC_EMITTERS = /* @__PURE__ */ new WeakMap();
2405
2421
  const forwardEvents = ({ ipcEmitter, anyProcess, channel, isSubprocess }) => {
2406
2422
  const boundOnMessage = onMessage.bind(void 0, {
2407
2423
  anyProcess,
@@ -2500,7 +2516,7 @@ const RESPONSE_TYPE = "execa:ipc:response";
2500
2516
  //#endregion
2501
2517
  //#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/ipc/outgoing.js
2502
2518
  const startSendMessage = (anyProcess, wrappedMessage, strict) => {
2503
- if (!OUTGOING_MESSAGES.has(anyProcess)) OUTGOING_MESSAGES.set(anyProcess, new Set());
2519
+ if (!OUTGOING_MESSAGES.has(anyProcess)) OUTGOING_MESSAGES.set(anyProcess, /* @__PURE__ */ new Set());
2504
2520
  const outgoingMessages = OUTGOING_MESSAGES.get(anyProcess);
2505
2521
  const onMessageSent = createDeferred();
2506
2522
  const id = strict ? wrappedMessage.id : void 0;
@@ -2525,7 +2541,7 @@ const waitForOutgoingMessages = async (anyProcess, ipcEmitter, wrappedMessage) =
2525
2541
  await Promise.all(outgoingMessages.map(({ onMessageSent }) => onMessageSent));
2526
2542
  }
2527
2543
  };
2528
- const OUTGOING_MESSAGES = new WeakMap();
2544
+ const OUTGOING_MESSAGES = /* @__PURE__ */ new WeakMap();
2529
2545
  const hasMessageListeners = (anyProcess, ipcEmitter) => ipcEmitter.listenerCount("message") > getMinListenerCount(anyProcess);
2530
2546
  const getMinListenerCount = (anyProcess) => SUBPROCESS_OPTIONS.has(anyProcess) && !getFdSpecificValue(SUBPROCESS_OPTIONS.get(anyProcess).options.buffer, "ipc") ? 1 : 0;
2531
2547
 
@@ -2597,7 +2613,7 @@ const getSendMethod = (anyProcess) => {
2597
2613
  PROCESS_SEND_METHODS.set(anyProcess, sendMethod);
2598
2614
  return sendMethod;
2599
2615
  };
2600
- const PROCESS_SEND_METHODS = new WeakMap();
2616
+ const PROCESS_SEND_METHODS = /* @__PURE__ */ new WeakMap();
2601
2617
 
2602
2618
  //#endregion
2603
2619
  //#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/ipc/graceful.js
@@ -2914,14 +2930,14 @@ const CR_BINARY = CR.codePointAt(0);
2914
2930
  function isStream(stream, { checkOpen = true } = {}) {
2915
2931
  return stream !== null && typeof stream === "object" && (stream.writable || stream.readable || !checkOpen || stream.writable === void 0 && stream.readable === void 0) && typeof stream.pipe === "function";
2916
2932
  }
2917
- function isWritableStream$1(stream, { checkOpen = true } = {}) {
2933
+ function isWritableStream(stream, { checkOpen = true } = {}) {
2918
2934
  return isStream(stream, { checkOpen }) && (stream.writable || !checkOpen) && typeof stream.write === "function" && typeof stream.end === "function" && typeof stream.writable === "boolean" && typeof stream.writableObjectMode === "boolean" && typeof stream.destroy === "function" && typeof stream.destroyed === "boolean";
2919
2935
  }
2920
- function isReadableStream$1(stream, { checkOpen = true } = {}) {
2936
+ function isReadableStream(stream, { checkOpen = true } = {}) {
2921
2937
  return isStream(stream, { checkOpen }) && (stream.readable || !checkOpen) && typeof stream.read === "function" && typeof stream.readable === "boolean" && typeof stream.readableObjectMode === "boolean" && typeof stream.destroy === "function" && typeof stream.destroyed === "boolean";
2922
2938
  }
2923
2939
  function isDuplexStream(stream, options) {
2924
- return isWritableStream$1(stream, options) && isReadableStream$1(stream, options);
2940
+ return isWritableStream(stream, options) && isReadableStream(stream, options);
2925
2941
  }
2926
2942
 
2927
2943
  //#endregion
@@ -3008,7 +3024,7 @@ function h({ preventCancel: r = !1 } = {}) {
3008
3024
  //#endregion
3009
3025
  //#region ../../node_modules/.pnpm/get-stream@9.0.1/node_modules/get-stream/source/stream.js
3010
3026
  const getAsyncIterable = (stream) => {
3011
- if (isReadableStream$1(stream, { checkOpen: false }) && nodeImports.on !== void 0) return getStreamIterable(stream);
3027
+ if (isReadableStream(stream, { checkOpen: false }) && nodeImports.on !== void 0) return getStreamIterable(stream);
3012
3028
  if (typeof stream?.[Symbol.asyncIterator] === "function") return stream;
3013
3029
  if (toString.call(stream) === "[object ReadableStream]") return h.call(stream);
3014
3030
  throw new TypeError("The first argument must be a Readable, a ReadableStream, or an async iterable.");
@@ -3707,10 +3723,10 @@ const KNOWN_STDIO_STRINGS = new Set([
3707
3723
  "overlapped",
3708
3724
  "pipe"
3709
3725
  ]);
3710
- const isReadableStream = (value) => Object.prototype.toString.call(value) === "[object ReadableStream]";
3711
- const isWritableStream = (value) => Object.prototype.toString.call(value) === "[object WritableStream]";
3712
- const isWebStream = (value) => isReadableStream(value) || isWritableStream(value);
3713
- const isTransformStream = (value) => isReadableStream(value?.readable) && isWritableStream(value?.writable);
3726
+ const isReadableStream$1 = (value) => Object.prototype.toString.call(value) === "[object ReadableStream]";
3727
+ const isWritableStream$1 = (value) => Object.prototype.toString.call(value) === "[object WritableStream]";
3728
+ const isWebStream = (value) => isReadableStream$1(value) || isWritableStream$1(value);
3729
+ const isTransformStream = (value) => isReadableStream$1(value?.readable) && isWritableStream$1(value?.writable);
3714
3730
  const isAsyncIterableObject = (value) => isObject(value) && typeof value[Symbol.asyncIterator] === "function";
3715
3731
  const isIterableObject = (value) => isObject(value) && typeof value[Symbol.iterator] === "function";
3716
3732
  const isObject = (value) => typeof value === "object" && value !== null;
@@ -3874,10 +3890,10 @@ const guessStreamDirection = {
3874
3890
  iterable: alwaysInput,
3875
3891
  asyncIterable: alwaysInput,
3876
3892
  uint8Array: alwaysInput,
3877
- webStream: (value) => isWritableStream(value) ? "output" : "input",
3893
+ webStream: (value) => isWritableStream$1(value) ? "output" : "input",
3878
3894
  nodeStream(value) {
3879
- if (!isReadableStream$1(value, { checkOpen: false })) return "output";
3880
- return isWritableStream$1(value, { checkOpen: false }) ? void 0 : "input";
3895
+ if (!isReadableStream(value, { checkOpen: false })) return "output";
3896
+ return isWritableStream(value, { checkOpen: false }) ? void 0 : "input";
3881
3897
  },
3882
3898
  webTransform: anyDirection,
3883
3899
  duplex: anyDirection,
@@ -4007,7 +4023,7 @@ const handleInputOption = (input) => input === void 0 ? [] : [{
4007
4023
  optionName: "input"
4008
4024
  }];
4009
4025
  const getInputType = (input) => {
4010
- if (isReadableStream$1(input, { checkOpen: false })) return "nodeStream";
4026
+ if (isReadableStream(input, { checkOpen: false })) return "nodeStream";
4011
4027
  if (typeof input === "string") return "string";
4012
4028
  if (isUint8Array(input)) return "uint8Array";
4013
4029
  throw new Error("The `input` option must be a string, a Uint8Array or a Node.js Readable stream.");
@@ -5145,7 +5161,7 @@ var MergedStream = class extends node_stream.PassThrough {
5145
5161
  #aborted = new Set([]);
5146
5162
  #onFinished;
5147
5163
  #unpipeEvent = Symbol("unpipe");
5148
- #streamPromises = new WeakMap();
5164
+ #streamPromises = /* @__PURE__ */ new WeakMap();
5149
5165
  add(stream) {
5150
5166
  validateStream(stream);
5151
5167
  if (this.#streams.has(stream)) return;
@@ -5325,7 +5341,7 @@ const abortSourceStream = (source) => {
5325
5341
  //#endregion
5326
5342
  //#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/io/output-async.js
5327
5343
  const pipeOutputAsync = (subprocess, fileDescriptors, controller) => {
5328
- const pipeGroups = new Map();
5344
+ const pipeGroups = /* @__PURE__ */ new Map();
5329
5345
  for (const [fdNumber, { stdioItems, direction }] of Object.entries(fileDescriptors)) {
5330
5346
  for (const { stream } of stdioItems.filter(({ type }) => TRANSFORM_TYPES.has(type))) pipeTransform(subprocess, stream, direction, fdNumber);
5331
5347
  for (const { stream } of stdioItems.filter(({ type }) => !TRANSFORM_TYPES.has(type))) pipeStdioItem({
@@ -5731,7 +5747,7 @@ const cleanupMergedStreamsMap = async (destinationStream) => {
5731
5747
  } catch {}
5732
5748
  MERGED_STREAMS.delete(destinationStream);
5733
5749
  };
5734
- const MERGED_STREAMS = new WeakMap();
5750
+ const MERGED_STREAMS = /* @__PURE__ */ new WeakMap();
5735
5751
  const SOURCE_LISTENERS_PER_PIPE = 2;
5736
5752
  const DESTINATION_LISTENERS_PER_PIPE = 1;
5737
5753
 
@@ -6217,9 +6233,9 @@ const throwOnSubprocessError = async (subprocess, { signal }) => {
6217
6233
  //#endregion
6218
6234
  //#region ../../node_modules/.pnpm/execa@9.5.2/node_modules/execa/lib/convert/concurrent.js
6219
6235
  const initializeConcurrentStreams = () => ({
6220
- readableDestroy: new WeakMap(),
6221
- writableFinal: new WeakMap(),
6222
- writableDestroy: new WeakMap()
6236
+ readableDestroy: /* @__PURE__ */ new WeakMap(),
6237
+ writableFinal: /* @__PURE__ */ new WeakMap(),
6238
+ writableDestroy: /* @__PURE__ */ new WeakMap()
6223
6239
  });
6224
6240
  const addConcurrentStream = (concurrentStreams, stream, waitName) => {
6225
6241
  const weakMap = concurrentStreams[waitName];
@@ -8114,16 +8130,16 @@ var __classPrivateFieldGet = void 0 && (void 0).__classPrivateFieldGet || functi
8114
8130
  var _Mime_extensionToType, _Mime_typeToExtension, _Mime_typeToExtensions;
8115
8131
  var Mime = class {
8116
8132
  constructor(...args) {
8117
- _Mime_extensionToType.set(this, new Map());
8118
- _Mime_typeToExtension.set(this, new Map());
8119
- _Mime_typeToExtensions.set(this, new Map());
8133
+ _Mime_extensionToType.set(this, /* @__PURE__ */ new Map());
8134
+ _Mime_typeToExtension.set(this, /* @__PURE__ */ new Map());
8135
+ _Mime_typeToExtensions.set(this, /* @__PURE__ */ new Map());
8120
8136
  for (const arg of args) this.define(arg);
8121
8137
  }
8122
8138
  define(typeMap, force = false) {
8123
8139
  for (let [type, extensions] of Object.entries(typeMap)) {
8124
8140
  type = type.toLowerCase();
8125
8141
  extensions = extensions.map((ext) => ext.toLowerCase());
8126
- if (!__classPrivateFieldGet(this, _Mime_typeToExtensions, "f").has(type)) __classPrivateFieldGet(this, _Mime_typeToExtensions, "f").set(type, new Set());
8142
+ if (!__classPrivateFieldGet(this, _Mime_typeToExtensions, "f").has(type)) __classPrivateFieldGet(this, _Mime_typeToExtensions, "f").set(type, /* @__PURE__ */ new Set());
8127
8143
  const allExtensions = __classPrivateFieldGet(this, _Mime_typeToExtensions, "f").get(type);
8128
8144
  let first = true;
8129
8145
  for (let extension of extensions) {
@@ -8173,7 +8189,7 @@ var Mime = class {
8173
8189
  };
8174
8190
  }
8175
8191
  };
8176
- _Mime_extensionToType = new WeakMap(), _Mime_typeToExtension = new WeakMap(), _Mime_typeToExtensions = new WeakMap();
8192
+ _Mime_extensionToType = /* @__PURE__ */ new WeakMap(), _Mime_typeToExtension = /* @__PURE__ */ new WeakMap(), _Mime_typeToExtensions = /* @__PURE__ */ new WeakMap();
8177
8193
  var Mime_default = Mime;
8178
8194
 
8179
8195
  //#endregion
@@ -8200,8 +8216,8 @@ const r2Storage = (config, hooks) => (_) => {
8200
8216
  const filename = path.default.basename(bundlePath);
8201
8217
  const Key = [bundleId, filename].join("/");
8202
8218
  try {
8203
- const { stderr } = await wrangler("r2", "object", "put", [bucketName, Key].join("/"), "--file", bundlePath, ...contentType ? ["--content-type", contentType] : [], "--remote");
8204
- if (stderr) throw new Error(stderr);
8219
+ const { stderr, exitCode } = await wrangler("r2", "object", "put", [bucketName, Key].join("/"), "--file", bundlePath, ...contentType ? ["--content-type", contentType] : [], "--remote");
8220
+ if (exitCode !== 0 && stderr) throw new Error(stderr);
8205
8221
  } catch (error$1) {
8206
8222
  if (error$1 instanceof ExecaError) throw new Error(error$1.stderr || error$1.stdout);
8207
8223
  throw error$1;
package/dist/index.d.cts CHANGED
@@ -8,7 +8,6 @@ interface D1DatabaseConfig {
8
8
  cloudflareApiToken: string;
9
9
  }
10
10
  declare const d1Database: (config: D1DatabaseConfig, hooks?: DatabasePluginHooks) => (options: _hot_updater_plugin_core0.BasePluginArgs) => _hot_updater_plugin_core0.DatabasePlugin;
11
-
12
11
  //#endregion
13
12
  //#region src/r2Storage.d.ts
14
13
  interface R2StorageConfig {
@@ -17,6 +16,5 @@ interface R2StorageConfig {
17
16
  bucketName: string;
18
17
  }
19
18
  declare const r2Storage: (config: R2StorageConfig, hooks?: StoragePluginHooks) => (_: BasePluginArgs) => StoragePlugin;
20
-
21
19
  //#endregion
22
20
  export { D1DatabaseConfig, R2StorageConfig, d1Database, r2Storage };
package/dist/index.d.ts CHANGED
@@ -8,7 +8,6 @@ interface D1DatabaseConfig {
8
8
  cloudflareApiToken: string;
9
9
  }
10
10
  declare const d1Database: (config: D1DatabaseConfig, hooks?: DatabasePluginHooks) => (options: _hot_updater_plugin_core0.BasePluginArgs) => _hot_updater_plugin_core0.DatabasePlugin;
11
-
12
11
  //#endregion
13
12
  //#region src/r2Storage.d.ts
14
13
  interface R2StorageConfig {
@@ -17,6 +16,5 @@ interface R2StorageConfig {
17
16
  bucketName: string;
18
17
  }
19
18
  declare const r2Storage: (config: R2StorageConfig, hooks?: StoragePluginHooks) => (_: BasePluginArgs) => StoragePlugin;
20
-
21
19
  //#endregion
22
20
  export { D1DatabaseConfig, R2StorageConfig, d1Database, r2Storage };