wrangler 2.1.14 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. package/miniflare-dist/index.mjs +3 -1
  2. package/package.json +2 -1
  3. package/src/__tests__/access.test.ts +25 -0
  4. package/src/__tests__/api-dev.test.ts +1 -1
  5. package/src/__tests__/api-devregistry.test.js +2 -2
  6. package/src/__tests__/configuration.test.ts +119 -2
  7. package/src/__tests__/d1.test.ts +2 -0
  8. package/src/__tests__/deployments.test.ts +22 -22
  9. package/src/__tests__/dev.test.tsx +167 -15
  10. package/src/__tests__/helpers/msw/handlers/access.ts +13 -0
  11. package/src/__tests__/helpers/msw/handlers/deployments.ts +22 -43
  12. package/src/__tests__/helpers/msw/handlers/zones.ts +22 -0
  13. package/src/__tests__/helpers/msw/index.ts +4 -0
  14. package/src/__tests__/index.test.ts +42 -33
  15. package/src/__tests__/init.test.ts +88 -4
  16. package/src/__tests__/jest.setup.ts +11 -0
  17. package/src/__tests__/kv.test.ts +400 -400
  18. package/src/__tests__/pages.test.ts +140 -28
  19. package/src/__tests__/publish.test.ts +1161 -647
  20. package/src/__tests__/pubsub.test.ts +3 -0
  21. package/src/__tests__/queues.test.ts +371 -0
  22. package/src/__tests__/r2.test.ts +57 -52
  23. package/src/__tests__/worker-namespace.test.ts +15 -10
  24. package/src/bundle-reporter.tsx +41 -2
  25. package/src/bundle.ts +59 -30
  26. package/src/cli.ts +0 -1
  27. package/src/config/environment.ts +50 -0
  28. package/src/config/index.ts +41 -0
  29. package/src/config/validation.ts +173 -0
  30. package/src/create-worker-preview.ts +10 -3
  31. package/src/create-worker-upload-form.ts +12 -0
  32. package/src/d1/backups.tsx +11 -5
  33. package/src/d1/execute.tsx +52 -47
  34. package/src/d1/index.ts +2 -1
  35. package/src/delete.ts +7 -10
  36. package/src/deployments.ts +73 -0
  37. package/src/deprecated/index.ts +9 -24
  38. package/src/dev/dev-vars.ts +11 -8
  39. package/src/dev/dev.tsx +12 -0
  40. package/src/dev/local.tsx +26 -0
  41. package/src/dev/remote.tsx +2 -0
  42. package/src/dev/start-server.ts +7 -0
  43. package/src/dev/use-esbuild.ts +12 -5
  44. package/src/dev.tsx +12 -9
  45. package/src/dispatch-namespace.ts +4 -3
  46. package/src/index.tsx +61 -45
  47. package/src/init.ts +4 -4
  48. package/src/inspect.ts +21 -1
  49. package/src/is-interactive.ts +4 -0
  50. package/src/kv/index.ts +5 -54
  51. package/src/logger.ts +12 -0
  52. package/src/pages/constants.ts +2 -0
  53. package/src/pages/upload.tsx +42 -15
  54. package/src/proxy.ts +38 -6
  55. package/src/publish/index.ts +11 -8
  56. package/src/publish/publish.ts +151 -30
  57. package/src/pubsub/pubsub-commands.tsx +3 -2
  58. package/src/queues/cli/commands/consumer/add.ts +71 -0
  59. package/src/queues/cli/commands/consumer/index.ts +22 -0
  60. package/src/queues/cli/commands/consumer/remove.ts +38 -0
  61. package/src/queues/cli/commands/create.ts +25 -0
  62. package/src/queues/cli/commands/delete.ts +26 -0
  63. package/src/queues/cli/commands/index.ts +33 -0
  64. package/src/queues/cli/commands/list.ts +25 -0
  65. package/src/queues/client.ts +135 -0
  66. package/src/secret/index.ts +14 -39
  67. package/src/tail/index.ts +5 -8
  68. package/src/user/access.ts +69 -0
  69. package/src/worker.ts +7 -0
  70. package/src/yargs-types.ts +15 -2
  71. package/src/zones.ts +31 -5
  72. package/templates/pages-template-plugin.ts +4 -0
  73. package/templates/pages-template-worker.ts +21 -4
  74. package/wrangler-dist/cli.d.ts +42 -0
  75. package/wrangler-dist/cli.js +4559 -3228
package/src/bundle.ts CHANGED
@@ -15,6 +15,7 @@ import type { CfModule } from "./worker";
15
15
 
16
16
  type BundleResult = {
17
17
  modules: CfModule[];
18
+ dependencies: esbuild.Metafile["outputs"][string]["inputs"];
18
19
  resolvedEntryPointPath: string;
19
20
  bundleType: "esm" | "commonjs";
20
21
  stop: (() => void) | undefined;
@@ -28,31 +29,48 @@ type StaticAssetsConfig =
28
29
  | undefined;
29
30
 
30
31
  /**
31
- * Searches for any uses of node's builtin modules, and throws an error if it
32
- * finds anything. This plugin is only used when nodeCompat is not enabled.
33
- * Supports both regular node builtins, and the new "node:<MODULE>" format.
32
+ * RegExp matching against esbuild's error text when it is unable to resolve
33
+ * a Node built-in module. If we detect this when node_compat is disabled,
34
+ * we'll rewrite the error to suggest enabling it.
34
35
  */
35
- const checkForNodeBuiltinsPlugin = {
36
- name: "checkForNodeBuiltins",
37
- setup(build: esbuild.PluginBuild) {
38
- build.onResolve(
39
- {
40
- filter: new RegExp(
41
- "^(" +
42
- builtinModules.join("|") +
43
- "|" +
44
- builtinModules.map((module) => "node:" + module).join("|") +
45
- ")$"
46
- ),
47
- },
48
- () => {
49
- throw new Error(
50
- `Detected a Node builtin module import while Node compatibility is disabled.\nAdd node_compat = true to your wrangler.toml file to enable Node compatibility.`
51
- );
52
- }
53
- );
54
- },
55
- };
36
+ const nodeBuiltinResolveErrorText = new RegExp(
37
+ '^Could not resolve "(' +
38
+ builtinModules.join("|") +
39
+ "|" +
40
+ builtinModules.map((module) => "node:" + module).join("|") +
41
+ ')"$'
42
+ );
43
+
44
+ /**
45
+ * Returns true if the passed value looks like an esbuild BuildFailure object
46
+ */
47
+ export function isBuildFailure(err: unknown): err is esbuild.BuildFailure {
48
+ return (
49
+ typeof err === "object" &&
50
+ err !== null &&
51
+ "errors" in err &&
52
+ "warnings" in err
53
+ );
54
+ }
55
+
56
+ /**
57
+ * Rewrites esbuild BuildFailures for failing to resolve Node built-in modules
58
+ * to suggest enabling Node compat as opposed to `platform: "node"`.
59
+ */
60
+ export function rewriteNodeCompatBuildFailure(err: esbuild.BuildFailure) {
61
+ for (const error of err.errors) {
62
+ const match = nodeBuiltinResolveErrorText.exec(error.text);
63
+ if (match !== null) {
64
+ error.notes = [
65
+ {
66
+ location: null,
67
+ text: `The package "${match[1]}" wasn't found on the file system but is built into node.
68
+ Add "node_compat = true" to your wrangler.toml file to enable Node compatibility.`,
69
+ },
70
+ ];
71
+ }
72
+ }
73
+ }
56
74
 
57
75
  /**
58
76
  * Generate a bundle for the worker identified by the arguments passed in.
@@ -243,7 +261,7 @@ export async function bundleWorker(
243
261
  );
244
262
  }
245
263
 
246
- const result = await esbuild.build({
264
+ const buildOptions: esbuild.BuildOptions & { metafile: true } = {
247
265
  entryPoints: [inputEntry.file],
248
266
  bundle: true,
249
267
  absWorkingDir: entry.directory,
@@ -282,15 +300,24 @@ export async function bundleWorker(
282
300
  : []),
283
301
  ...(nodeCompat
284
302
  ? [NodeGlobalsPolyfills({ buffer: true }), NodeModulesPolyfills()]
285
- : // we use checkForNodeBuiltinsPlugin to throw a nicer error
286
- // if we find node builtins when nodeCompat isn't turned on
287
- [checkForNodeBuiltinsPlugin]),
303
+ : []),
288
304
  ],
289
305
  ...(jsxFactory && { jsxFactory }),
290
306
  ...(jsxFragment && { jsxFragment }),
291
307
  ...(tsconfig && { tsconfig }),
292
308
  watch,
293
- });
309
+ // The default logLevel is "warning". So that we can rewrite errors before
310
+ // logging, we disable esbuild's default logging, and log build failures
311
+ // ourselves.
312
+ logLevel: "silent",
313
+ };
314
+ let result;
315
+ try {
316
+ result = await esbuild.build(buildOptions);
317
+ } catch (e) {
318
+ if (!nodeCompat && isBuildFailure(e)) rewriteNodeCompatBuildFailure(e);
319
+ throw e;
320
+ }
294
321
 
295
322
  const entryPointOutputs = Object.entries(result.metafile.outputs).filter(
296
323
  ([_path, output]) => output.entryPoint !== undefined
@@ -306,7 +333,8 @@ export async function bundleWorker(
306
333
  listEntryPoints(entryPointOutputs)
307
334
  );
308
335
 
309
- const entryPointExports = entryPointOutputs[0][1].exports;
336
+ const { exports: entryPointExports, inputs: dependencies } =
337
+ entryPointOutputs[0][1];
310
338
  const bundleType = entryPointExports.length > 0 ? "esm" : "commonjs";
311
339
 
312
340
  const sourceMapPath = Object.keys(result.metafile.outputs).filter((_path) =>
@@ -315,6 +343,7 @@ export async function bundleWorker(
315
343
 
316
344
  return {
317
345
  modules: moduleCollector.modules,
346
+ dependencies,
318
347
  resolvedEntryPointPath: path.resolve(
319
348
  entry.directory,
320
349
  entryPointOutputs[0][0]
package/src/cli.ts CHANGED
@@ -1,4 +1,3 @@
1
- import "dotenv/config"; // Grab locally specified env params from a `.env` file.
2
1
  import process from "process";
3
2
  import { hideBin } from "yargs/helpers";
4
3
  import { unstable_dev } from "./api";
@@ -247,6 +247,18 @@ interface EnvironmentInheritable {
247
247
  destination: string;
248
248
  }[];
249
249
  };
250
+
251
+ /**
252
+ * Send Trace Events from this worker to Workers Logpush.
253
+ *
254
+ * This will not configure a corresponding Logpush job automatically.
255
+ *
256
+ * For more information about Workers Logpush, see:
257
+ * https://blog.cloudflare.com/logpush-for-workers/
258
+ *
259
+ * @inheritable
260
+ */
261
+ logpush: boolean | undefined;
250
262
  }
251
263
 
252
264
  /**
@@ -325,6 +337,44 @@ interface EnvironmentNonInheritable {
325
337
  preview_id?: string;
326
338
  }[];
327
339
 
340
+ /**
341
+ * Specifies Queues that are bound to this Worker environment.
342
+ *
343
+ * NOTE: This field is not automatically inherited from the top level environment,
344
+ * and so must be specified in every named environment.
345
+ *
346
+ * @default `{}`
347
+ * @nonInheritable
348
+ */
349
+ queues: {
350
+ /** Producer bindings */
351
+ producers?: {
352
+ /** The binding name used to refer to the Queue in the worker. */
353
+ binding: string;
354
+
355
+ /** The name of this Queue. */
356
+ queue: string;
357
+ }[];
358
+
359
+ /** Consumer configuration */
360
+ consumers?: {
361
+ /** The name of the queue from which this script should consume. */
362
+ queue: string;
363
+
364
+ /** The maximum number of messages per batch */
365
+ max_batch_size?: number;
366
+
367
+ /** The maximum number of seconds to wait to fill a batch with messages. */
368
+ max_batch_timeout?: number;
369
+
370
+ /** The maximum number of retries for each message. */
371
+ max_retries?: number;
372
+
373
+ /** The queue to send messages that failed to be consumed. */
374
+ dead_letter_queue?: string;
375
+ }[];
376
+ };
377
+
328
378
  /**
329
379
  * Specifies R2 buckets that are bound to this Worker environment.
330
380
  *
@@ -1,3 +1,5 @@
1
+ import fs from "node:fs";
2
+ import dotenv from "dotenv";
1
3
  import { findUpSync } from "find-up";
2
4
  import { logger } from "../logger";
3
5
  import { parseTOML, readFileSync } from "../parse";
@@ -86,6 +88,7 @@ export function printBindings(bindings: CfWorkerInit["bindings"]) {
86
88
  data_blobs,
87
89
  durable_objects,
88
90
  kv_namespaces,
91
+ queues,
89
92
  d1_databases,
90
93
  r2_buckets,
91
94
  logfwdr,
@@ -141,6 +144,18 @@ export function printBindings(bindings: CfWorkerInit["bindings"]) {
141
144
  });
142
145
  }
143
146
 
147
+ if (queues !== undefined && queues.length > 0) {
148
+ output.push({
149
+ type: "Queues",
150
+ entries: queues.map(({ binding, queue_name }) => {
151
+ return {
152
+ key: binding,
153
+ value: queue_name,
154
+ };
155
+ }),
156
+ });
157
+ }
158
+
144
159
  if (d1_databases !== undefined && d1_databases.length > 0) {
145
160
  output.push({
146
161
  type: "D1 Databases",
@@ -291,3 +306,29 @@ export function withConfig<T extends { config?: string }>(
291
306
  return handler({ ...rest, config: readConfig(configPath, rest) });
292
307
  };
293
308
  }
309
+
310
+ export interface DotEnv {
311
+ path: string;
312
+ parsed: dotenv.DotenvParseOutput;
313
+ }
314
+
315
+ function tryLoadDotEnv(path: string): DotEnv | undefined {
316
+ try {
317
+ const parsed = dotenv.parse(fs.readFileSync(path));
318
+ return { path, parsed };
319
+ } catch (e) {
320
+ logger.debug(`Failed to load .env file "${path}":`, e);
321
+ }
322
+ }
323
+
324
+ /**
325
+ * Loads a dotenv file from <path>, preferring to read <path>.<environment> if
326
+ * <environment> is defined and that file exists.
327
+ */
328
+ export function loadDotEnv(path: string, env?: string): DotEnv | undefined {
329
+ if (env === undefined) {
330
+ return tryLoadDotEnv(path);
331
+ } else {
332
+ return tryLoadDotEnv(`${path}.${env}`) ?? tryLoadDotEnv(path);
333
+ }
334
+ }
@@ -1075,6 +1075,16 @@ function normalizeAndValidateEnvironment(
1075
1075
  validateBindingArray(envName, validateKVBinding),
1076
1076
  []
1077
1077
  ),
1078
+ queues: notInheritable(
1079
+ diagnostics,
1080
+ topLevelEnv,
1081
+ rawConfig,
1082
+ rawEnv,
1083
+ envName,
1084
+ "queues",
1085
+ validateQueues(envName),
1086
+ { producers: [], consumers: [] }
1087
+ ),
1078
1088
  r2_buckets: notInheritable(
1079
1089
  diagnostics,
1080
1090
  topLevelEnv,
@@ -1171,6 +1181,14 @@ function normalizeAndValidateEnvironment(
1171
1181
  isBoolean,
1172
1182
  undefined
1173
1183
  ),
1184
+ logpush: inheritable(
1185
+ diagnostics,
1186
+ topLevelEnv,
1187
+ rawEnv,
1188
+ "logpush",
1189
+ isBoolean,
1190
+ undefined
1191
+ ),
1174
1192
  };
1175
1193
 
1176
1194
  return environment;
@@ -1673,6 +1691,47 @@ const validateKVBinding: ValidatorFn = (diagnostics, field, value) => {
1673
1691
  return isValid;
1674
1692
  };
1675
1693
 
1694
+ const validateQueueBinding: ValidatorFn = (diagnostics, field, value) => {
1695
+ if (typeof value !== "object" || value === null) {
1696
+ diagnostics.errors.push(
1697
+ `"queue" bindings should be objects, but got ${JSON.stringify(value)}`
1698
+ );
1699
+ return false;
1700
+ }
1701
+
1702
+ if (
1703
+ !validateAdditionalProperties(diagnostics, field, Object.keys(value), [
1704
+ "binding",
1705
+ "queue",
1706
+ ])
1707
+ ) {
1708
+ return false;
1709
+ }
1710
+
1711
+ // Queue bindings must have a binding and queue.
1712
+ let isValid = true;
1713
+ if (!isRequiredProperty(value, "binding", "string")) {
1714
+ diagnostics.errors.push(
1715
+ `"${field}" bindings should have a string "binding" field but got ${JSON.stringify(
1716
+ value
1717
+ )}.`
1718
+ );
1719
+ isValid = false;
1720
+ }
1721
+ if (
1722
+ !isRequiredProperty(value, "queue", "string") ||
1723
+ (value as { queue: string }).queue.length === 0
1724
+ ) {
1725
+ diagnostics.errors.push(
1726
+ `"${field}" bindings should have a string "queue" field but got ${JSON.stringify(
1727
+ value
1728
+ )}.`
1729
+ );
1730
+ isValid = false;
1731
+ }
1732
+ return isValid;
1733
+ };
1734
+
1676
1735
  const validateR2Binding: ValidatorFn = (diagnostics, field, value) => {
1677
1736
  if (typeof value !== "object" || value === null) {
1678
1737
  diagnostics.errors.push(
@@ -1928,3 +1987,117 @@ const validateWorkerNamespaceBinding: ValidatorFn = (
1928
1987
  }
1929
1988
  return isValid;
1930
1989
  };
1990
+
1991
+ function validateQueues(envName: string): ValidatorFn {
1992
+ return (diagnostics, field, value, config) => {
1993
+ const fieldPath =
1994
+ config === undefined ? `${field}` : `env.${envName}.${field}`;
1995
+
1996
+ if (typeof value !== "object" || Array.isArray(value) || value === null) {
1997
+ diagnostics.errors.push(
1998
+ `The field "${fieldPath}" should be an object but got ${JSON.stringify(
1999
+ value
2000
+ )}.`
2001
+ );
2002
+ return false;
2003
+ }
2004
+
2005
+ let isValid = true;
2006
+ if (
2007
+ !validateAdditionalProperties(
2008
+ diagnostics,
2009
+ fieldPath,
2010
+ Object.keys(value),
2011
+ ["consumers", "producers"]
2012
+ )
2013
+ ) {
2014
+ isValid = false;
2015
+ }
2016
+
2017
+ if (hasProperty(value, "consumers")) {
2018
+ const consumers = value.consumers;
2019
+ if (!Array.isArray(consumers)) {
2020
+ diagnostics.errors.push(
2021
+ `The field "${fieldPath}.consumers" should be an array but got ${JSON.stringify(
2022
+ consumers
2023
+ )}.`
2024
+ );
2025
+ isValid = false;
2026
+ }
2027
+
2028
+ for (let i = 0; i < consumers.length; i++) {
2029
+ const consumer = consumers[i];
2030
+ const consumerPath = `${fieldPath}.consumers[${i}]`;
2031
+ if (!validateConsumer(diagnostics, consumerPath, consumer, config)) {
2032
+ isValid = false;
2033
+ }
2034
+ }
2035
+ }
2036
+
2037
+ if (hasProperty(value, "producers")) {
2038
+ if (
2039
+ !validateBindingArray(envName, validateQueueBinding)(
2040
+ diagnostics,
2041
+ `${field}.producers`,
2042
+ value.producers,
2043
+ config
2044
+ )
2045
+ ) {
2046
+ isValid = false;
2047
+ }
2048
+ }
2049
+ return isValid;
2050
+ };
2051
+ }
2052
+
2053
+ const validateConsumer: ValidatorFn = (diagnostics, field, value, _config) => {
2054
+ if (typeof value !== "object" || value === null) {
2055
+ diagnostics.errors.push(
2056
+ `"${field}" should be a objects, but got ${JSON.stringify(value)}`
2057
+ );
2058
+ return false;
2059
+ }
2060
+
2061
+ let isValid = true;
2062
+ if (
2063
+ !validateAdditionalProperties(diagnostics, field, Object.keys(value), [
2064
+ "queue",
2065
+ "max_batch_size",
2066
+ "max_batch_timeout",
2067
+ "max_retries",
2068
+ "dead_letter_queue",
2069
+ ])
2070
+ ) {
2071
+ isValid = false;
2072
+ }
2073
+
2074
+ if (!isRequiredProperty(value, "queue", "string")) {
2075
+ diagnostics.errors.push(
2076
+ `"${field}" should have a string "queue" field but got ${JSON.stringify(
2077
+ value
2078
+ )}.`
2079
+ );
2080
+ }
2081
+
2082
+ const options: {
2083
+ key: string;
2084
+ type: "number" | "string";
2085
+ }[] = [
2086
+ { key: "max_batch_size", type: "number" },
2087
+ { key: "max_batch_timeout", type: "number" },
2088
+ { key: "max_retries", type: "number" },
2089
+ { key: "dead_letter_queue", type: "string" },
2090
+ ];
2091
+ for (const optionalOpt of options) {
2092
+ if (!isOptionalProperty(value, optionalOpt.key, optionalOpt.type)) {
2093
+ diagnostics.errors.push(
2094
+ `"${field}" should, optionally, have a ${optionalOpt.type} "${
2095
+ optionalOpt.key
2096
+ }" field but got ${JSON.stringify(value)}.`
2097
+ );
2098
+ isValid = false;
2099
+ }
2100
+ }
2101
+
2102
+ return isValid;
2103
+ };
@@ -4,7 +4,9 @@ import { fetchResult } from "./cfetch";
4
4
  import { createWorkerUploadForm } from "./create-worker-upload-form";
5
5
  import { logger } from "./logger";
6
6
  import { parseJSON } from "./parse";
7
+ import { getAccessToken } from "./user/access";
7
8
  import type { CfAccount, CfWorkerContext, CfWorkerInit } from "./worker";
9
+ import type { HeadersInit } from "undici";
8
10
 
9
11
  /**
10
12
  * A Preview Session on the edge
@@ -251,13 +253,18 @@ export async function createWorkerPreview(
251
253
  session,
252
254
  abortSignal
253
255
  );
256
+ const accessToken = await getAccessToken(token.prewarmUrl.hostname);
257
+
258
+ const headers: HeadersInit = { "cf-workers-preview-token": token.value };
259
+ if (accessToken) {
260
+ headers.cookie = `CF_Authorization=${accessToken}`;
261
+ }
262
+
254
263
  // fire and forget the prewarm call
255
264
  fetch(token.prewarmUrl.href, {
256
265
  method: "POST",
257
266
  signal: abortSignal,
258
- headers: {
259
- "cf-workers-preview-token": token.value,
260
- },
267
+ headers,
261
268
  }).then(
262
269
  (response) => {
263
270
  if (!response.ok) {
@@ -39,6 +39,7 @@ type WorkerMetadataBinding =
39
39
  script_name?: string;
40
40
  environment?: string;
41
41
  }
42
+ | { type: "queue"; name: string; queue_name: string }
42
43
  | { type: "r2_bucket"; name: string; bucket_name: string }
43
44
  | { type: "d1"; name: string; id: string }
44
45
  | { type: "service"; name: string; service: string; environment?: string }
@@ -61,6 +62,7 @@ export interface WorkerMetadata {
61
62
  capnp_schema?: string;
62
63
  bindings: WorkerMetadataBinding[];
63
64
  keep_bindings?: WorkerMetadataBinding["type"][];
65
+ logpush?: boolean;
64
66
  }
65
67
 
66
68
  /**
@@ -76,6 +78,7 @@ export function createWorkerUploadForm(worker: CfWorkerInit): FormData {
76
78
  compatibility_date,
77
79
  compatibility_flags,
78
80
  keepVars,
81
+ logpush,
79
82
  } = worker;
80
83
 
81
84
  let { modules } = worker;
@@ -110,6 +113,14 @@ export function createWorkerUploadForm(worker: CfWorkerInit): FormData {
110
113
  }
111
114
  );
112
115
 
116
+ bindings.queues?.forEach(({ binding, queue_name }) => {
117
+ metadataBindings.push({
118
+ type: "queue",
119
+ name: binding,
120
+ queue_name,
121
+ });
122
+ });
123
+
113
124
  bindings.r2_buckets?.forEach(({ binding, bucket_name }) => {
114
125
  metadataBindings.push({
115
126
  name: binding,
@@ -268,6 +279,7 @@ export function createWorkerUploadForm(worker: CfWorkerInit): FormData {
268
279
  ...(migrations && { migrations }),
269
280
  capnp_schema: bindings.logfwdr?.schema,
270
281
  ...(keepVars && { keep_bindings: ["plain_text", "json"] }),
282
+ ...(logpush !== undefined && { logpush }),
271
283
  };
272
284
 
273
285
  formData.set("metadata", JSON.stringify(metadata));
@@ -1,4 +1,5 @@
1
1
  import fs from "node:fs/promises";
2
+ import * as path from "path";
2
3
  import { render } from "ink";
3
4
  import Table from "ink-table";
4
5
  import React from "react";
@@ -189,15 +190,20 @@ export const DownloadHandler = withConfig<BackupDownloadArgs>(
189
190
  accountId,
190
191
  name
191
192
  );
192
- const filename = output || `./${name}.${backupId.slice(0, 8)}.sqlite3`;
193
-
194
- console.log(`Downloading backup ${backupId} of ${name} to: ${filename}`);
193
+ const filename =
194
+ output ||
195
+ path.join(
196
+ process.env.INIT_CWD as string,
197
+ `${name}.${backupId.slice(0, 8)}.sqlite3`
198
+ );
199
+
200
+ logger.log(`🌀 Downloading backup ${backupId} from '${name}'`);
195
201
  const response = await getBackupResponse(accountId, db.uuid, backupId);
196
- console.log(`Got file. Saving...`);
202
+ logger.log(`🌀 Saving to ${filename}`);
197
203
  // TODO: stream this once we upgrade to Node18 and can use Writable.fromWeb
198
204
  const buffer = await response.arrayBuffer();
199
205
  await fs.writeFile(filename, new Buffer(buffer));
200
- console.log(`Done! Wrote ${filename} (${formatBytes(buffer.byteLength)})`);
206
+ logger.log(`🌀 Done!`);
201
207
  }
202
208
  );
203
209