@storm-software/cloudflare-tools 0.71.28 → 0.71.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,20 @@
2
2
 
3
3
  # Changelog for Storm Ops - Cloudflare Tools
4
4
 
5
+ ## [0.71.29](https://github.com/storm-software/storm-ops/releases/tag/cloudflare-tools%400.71.29) (12/18/2025)
6
+
7
+ ### Bug Fixes
8
+
9
+ - **cloudflare-tools:** Ensure data is encoded in base64 prior to upload
10
+ ([cb7de3b06](https://github.com/storm-software/storm-ops/commit/cb7de3b06))
11
+
12
+ ## [0.71.28](https://github.com/storm-software/storm-ops/releases/tag/cloudflare-tools%400.71.28) (12/17/2025)
13
+
14
+ ### Bug Fixes
15
+
16
+ - **cloudflare-tools:** Resolve issue uploading image files to R2 bucket
17
+ ([640840262](https://github.com/storm-software/storm-ops/commit/640840262))
18
+
5
19
  ## [0.71.27](https://github.com/storm-software/storm-ops/releases/tag/cloudflare-tools%400.71.27) (12/17/2025)
6
20
 
7
21
  ### Bug Fixes
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  generator_default
3
- } from "./chunk-E64DDPVR.mjs";
3
+ } from "./chunk-FPJU3YOH.mjs";
4
4
  import {
5
5
  findWorkspaceRoot,
6
6
  getConfig
@@ -193,10 +193,10 @@ async function runExecutor(options, context) {
193
193
  bucketPath,
194
194
  name,
195
195
  version,
196
- type === "application/json" || type.includes("text") ? await _promises.readFile.call(void 0, file, "utf8") : await _promises.readFile.call(void 0, file, "binary"),
197
- // Buffer.from(await readFile(file, "binary"), "binary").toString(
198
- // "base64"
199
- // ),
196
+ type === "application/json" || type.includes("text") ? await _promises.readFile.call(void 0, file, "utf8") : `data:${type};base64,${Buffer.from(
197
+ await _promises.readFile.call(void 0, file, "binary"),
198
+ "binary"
199
+ ).toString("base64")}`,
200
200
  type,
201
201
  isDryRun
202
202
  );
@@ -7,7 +7,7 @@ var require_package = __commonJS({
7
7
  "package.json"(exports, module) {
8
8
  module.exports = {
9
9
  name: "@storm-software/cloudflare-tools",
10
- version: "0.71.27",
10
+ version: "0.71.29",
11
11
  description: "A Nx plugin package that contains various executors, generators, and utilities that assist in managing Cloudflare services.",
12
12
  repository: {
13
13
  type: "github",
@@ -7,7 +7,7 @@ var require_package = _chunkMCKGQKYUjs.__commonJS.call(void 0, {
7
7
  "package.json"(exports, module) {
8
8
  module.exports = {
9
9
  name: "@storm-software/cloudflare-tools",
10
- version: "0.71.27",
10
+ version: "0.71.29",
11
11
  description: "A Nx plugin package that contains various executors, generators, and utilities that assist in managing Cloudflare services.",
12
12
  repository: {
13
13
  type: "github",
@@ -1,10 +1,3 @@
1
- import {
2
- createHttpHandler
3
- } from "./chunk-SWYYMID7.mjs";
4
- import {
5
- getInternalDependencies,
6
- uploadFile
7
- } from "./chunk-UI2F3MMU.mjs";
8
1
  import {
9
2
  createCliOptions,
10
3
  getPackageInfo
@@ -13,6 +6,13 @@ import {
13
6
  findWorkspaceRoot,
14
7
  getConfig
15
8
  } from "./chunk-G4ZCI2MN.mjs";
9
+ import {
10
+ createHttpHandler
11
+ } from "./chunk-SWYYMID7.mjs";
12
+ import {
13
+ getInternalDependencies,
14
+ uploadFile
15
+ } from "./chunk-UI2F3MMU.mjs";
16
16
  import {
17
17
  correctPaths,
18
18
  joinPaths,
@@ -193,10 +193,10 @@ async function runExecutor(options, context) {
193
193
  bucketPath,
194
194
  name,
195
195
  version,
196
- type === "application/json" || type.includes("text") ? await readFile(file, "utf8") : await readFile(file, "binary"),
197
- // Buffer.from(await readFile(file, "binary"), "binary").toString(
198
- // "base64"
199
- // ),
196
+ type === "application/json" || type.includes("text") ? await readFile(file, "utf8") : `data:${type};base64,${Buffer.from(
197
+ await readFile(file, "binary"),
198
+ "binary"
199
+ ).toString("base64")}`,
200
200
  type,
201
201
  isDryRun
202
202
  );
@@ -1,6 +1,6 @@
1
1
  "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
2
2
 
3
- var _chunkM34YY63Mjs = require('./chunk-M34YY63M.js');
3
+ var _chunkOZ44M6HXjs = require('./chunk-OZ44M6HX.js');
4
4
 
5
5
 
6
6
 
@@ -66,7 +66,7 @@ ${Object.keys(process.env).map((key) => ` - ${key}=${JSON.stringify(process.env[
66
66
  const options = await normalizeOptions(tree, schema, config);
67
67
  const tasks = [];
68
68
  tasks.push(
69
- await _chunkM34YY63Mjs.generator_default.call(void 0, tree, {
69
+ await _chunkOZ44M6HXjs.generator_default.call(void 0, tree, {
70
70
  ...options,
71
71
  skipFormat: true
72
72
  })
@@ -0,0 +1,225 @@
1
+ import {
2
+ createHttpHandler
3
+ } from "./chunk-SWYYMID7.mjs";
4
+ import {
5
+ getInternalDependencies,
6
+ uploadFile
7
+ } from "./chunk-UI2F3MMU.mjs";
8
+ import {
9
+ createCliOptions,
10
+ getPackageInfo
11
+ } from "./chunk-3MAI3FU2.mjs";
12
+ import {
13
+ findWorkspaceRoot,
14
+ getConfig
15
+ } from "./chunk-G4ZCI2MN.mjs";
16
+ import {
17
+ correctPaths,
18
+ joinPaths,
19
+ writeDebug,
20
+ writeSuccess,
21
+ writeTrace,
22
+ writeWarning
23
+ } from "./chunk-TPNHSNNZ.mjs";
24
+
25
+ // src/executors/r2-upload-publish/executor.ts
26
+ import { S3 } from "@aws-sdk/client-s3";
27
+ import {
28
+ createProjectGraphAsync,
29
+ readCachedProjectGraph
30
+ } from "@nx/devkit";
31
+ import { glob } from "glob";
32
+ import mime from "mime-types";
33
+ import { execSync } from "node:child_process";
34
+ import { statSync } from "node:fs";
35
+ import { readFile } from "node:fs/promises";
36
+ async function runExecutor(options, context) {
37
+ const isDryRun = process.env.NX_DRY_RUN === "true" || options.dryRun || false;
38
+ if (!context.projectName) {
39
+ throw new Error("The executor requires a projectName.");
40
+ }
41
+ if (!options.path) {
42
+ throw new Error("The executor requires the `path` option to upload.");
43
+ }
44
+ console.info(
45
+ `\u{1F680} Running Storm Cloudflare Publish executor on the ${context.projectName} worker`
46
+ );
47
+ if (!context.projectName || !context.projectsConfigurations?.projects || !context.projectsConfigurations.projects[context.projectName] || !context.projectsConfigurations.projects[context.projectName]?.root) {
48
+ throw new Error("The executor requires projectsConfigurations.");
49
+ }
50
+ try {
51
+ const workspaceRoot = findWorkspaceRoot();
52
+ const config = await getConfig(workspaceRoot);
53
+ const projectName = context.projectsConfigurations.projects[context.projectName]?.name ?? context.projectName;
54
+ const projectDetails = getPackageInfo(
55
+ context.projectsConfigurations.projects[context.projectName]
56
+ );
57
+ const bucketId = options.bucketId;
58
+ const bucketPath = options.bucketPath || "/";
59
+ if (!bucketId) {
60
+ throw new Error("The executor requires a bucketId.");
61
+ }
62
+ const args = createCliOptions({ ...options });
63
+ if (isDryRun) {
64
+ args.push("--dry-run");
65
+ }
66
+ const cloudflareAccountId = process.env.CLOUDFLARE_ACCOUNT_ID || process.env.STORM_BOT_CLOUDFLARE_ACCOUNT;
67
+ if (!options?.registry && !cloudflareAccountId) {
68
+ throw new Error(
69
+ "The registry option and `CLOUDFLARE_ACCOUNT_ID` (or `STORM_BOT_CLOUDFLARE_ACCOUNT`) environment variable are not set. Please set one of these values to upload to the Cloudflare R2 bucket."
70
+ );
71
+ }
72
+ if (!process.env.STORM_BOT_ACCESS_KEY_ID && !process.env.ACCESS_KEY_ID && !process.env.CLOUDFLARE_ACCESS_KEY_ID && !process.env.AWS_ACCESS_KEY_ID || !process.env.STORM_BOT_SECRET_ACCESS_KEY && !process.env.CLOUDFLARE_SECRET_ACCESS_KEY && !process.env.SECRET_ACCESS_KEY && !process.env.AWS_SECRET_ACCESS_KEY) {
73
+ throw new Error(
74
+ "The `ACCESS_KEY_ID` (or `STORM_BOT_ACCESS_KEY_ID`) and `SECRET_ACCESS_KEY` (or `STORM_BOT_SECRET_ACCESS_KEY`) environment variables are not set. Please set these environment variables to upload to the Cloudflare R2 bucket."
75
+ );
76
+ }
77
+ const registry = options?.registry ? options.registry : `https://${cloudflareAccountId}.r2.cloudflarestorage.com`;
78
+ let projectGraph;
79
+ try {
80
+ projectGraph = readCachedProjectGraph();
81
+ } catch {
82
+ await createProjectGraphAsync();
83
+ projectGraph = readCachedProjectGraph();
84
+ }
85
+ if (!projectGraph) {
86
+ throw new Error(
87
+ "The executor failed because the project graph is not available. Please run the build command again."
88
+ );
89
+ }
90
+ writeDebug(
91
+ `Publishing ${context.projectName} to the ${bucketId} R2 Bucket (at ${registry})`
92
+ );
93
+ const client = new S3({
94
+ region: "auto",
95
+ endpoint: registry,
96
+ credentials: {
97
+ // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
98
+ accessKeyId: process.env.STORM_BOT_ACCESS_KEY_ID || process.env.CLOUDFLARE_ACCESS_KEY_ID || process.env.AWS_ACCESS_KEY_ID || process.env.ACCESS_KEY_ID,
99
+ // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
100
+ secretAccessKey: process.env.STORM_BOT_SECRET_ACCESS_KEY || process.env.CLOUDFLARE_SECRET_ACCESS_KEY || process.env.AWS_SECRET_ACCESS_KEY || process.env.SECRET_ACCESS_KEY
101
+ },
102
+ requestHandler: createHttpHandler()
103
+ });
104
+ const version = projectDetails?.content?.version;
105
+ if (version) {
106
+ writeDebug(`Starting upload version ${version}`);
107
+ }
108
+ const basePath = options.path;
109
+ const files = await glob(joinPaths(basePath, "**/*"), {
110
+ ignore: "**/{*.stories.tsx,*.stories.ts,*.spec.tsx,*.spec.ts}"
111
+ });
112
+ const internalDependencies = await getInternalDependencies(
113
+ context.projectName,
114
+ projectGraph
115
+ );
116
+ const dependencies = internalDependencies.filter(
117
+ (projectNode) => !projectNode.data.tags || projectNode.data.tags.every((tag) => tag.toLowerCase() !== "component")
118
+ ).reduce((ret, dep) => {
119
+ if (!ret[dep.name]) {
120
+ ret[dep.name] = "latest";
121
+ }
122
+ return ret;
123
+ }, projectDetails?.content.dependencies ?? {});
124
+ const release = options.tag ?? execSync("npm config get tag").toString().trim();
125
+ if (options.clean === true) {
126
+ writeDebug(`Clearing out existing items in ${bucketPath}`);
127
+ if (!isDryRun) {
128
+ const response = await client.listObjects({
129
+ Bucket: bucketId,
130
+ Prefix: !bucketPath || bucketPath === "/" ? void 0 : bucketPath
131
+ });
132
+ if (response?.Contents && response.Contents.length > 0) {
133
+ writeTrace(
134
+ `Deleting the following existing items from the R2 bucket path ${bucketPath}: ${response.Contents.map((item) => item.Key).join(", ")}`
135
+ );
136
+ await client.deleteObjects({
137
+ Bucket: bucketId,
138
+ Delete: {
139
+ Objects: response.Contents.map((item) => ({
140
+ Key: item.Key
141
+ })),
142
+ Quiet: false
143
+ }
144
+ });
145
+ } else {
146
+ writeDebug(
147
+ `No existing items to delete in the R2 bucket path ${bucketPath}`
148
+ );
149
+ }
150
+ } else {
151
+ writeWarning("[Dry run]: Skipping R2 bucket clean.");
152
+ }
153
+ }
154
+ if (options.writeMetaJson === true) {
155
+ const meta = {
156
+ name: context.projectName,
157
+ version,
158
+ release,
159
+ description: projectDetails?.content?.description,
160
+ tags: projectDetails?.content?.keywords,
161
+ dependencies,
162
+ devDependencies: null,
163
+ internalDependencies: internalDependencies.filter(
164
+ (projectNode) => projectNode.data.tags && projectNode.data.tags.some(
165
+ (tag) => tag.toLowerCase() === "component"
166
+ )
167
+ ).map((dep) => dep.name)
168
+ };
169
+ if (projectDetails?.type === "package.json") {
170
+ meta.devDependencies = projectDetails?.content?.devDependencies;
171
+ }
172
+ await uploadFile(
173
+ client,
174
+ bucketId,
175
+ bucketPath,
176
+ "meta.json",
177
+ version,
178
+ JSON.stringify(meta),
179
+ "application/json",
180
+ isDryRun
181
+ );
182
+ }
183
+ await Promise.all(
184
+ files.map(async (file) => {
185
+ if (statSync(file, {
186
+ throwIfNoEntry: false
187
+ })?.isFile()) {
188
+ const name = correctPaths(file).replace(correctPaths(basePath), "");
189
+ const type = mime.lookup(name) || "application/octet-stream";
190
+ await uploadFile(
191
+ client,
192
+ bucketId,
193
+ bucketPath,
194
+ name,
195
+ version,
196
+ type === "application/json" || type.includes("text") ? await readFile(file, "utf8") : `data:${type};base64,${Buffer.from(
197
+ await readFile(file, "binary"),
198
+ "binary"
199
+ ).toString("base64")}`,
200
+ type,
201
+ isDryRun
202
+ );
203
+ }
204
+ })
205
+ );
206
+ writeSuccess(
207
+ `Successfully uploaded the ${projectName} project to the Cloudflare R2 bucket.`,
208
+ config
209
+ );
210
+ return {
211
+ success: true
212
+ };
213
+ } catch (error) {
214
+ console.error("Failed to publish to Cloudflare R2 bucket");
215
+ console.error(error);
216
+ console.log("");
217
+ return {
218
+ success: false
219
+ };
220
+ }
221
+ }
222
+
223
+ export {
224
+ runExecutor
225
+ };
@@ -0,0 +1,225 @@
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
2
+
3
+
4
+ var _chunkN7FW365Qjs = require('./chunk-N7FW365Q.js');
5
+
6
+
7
+
8
+ var _chunk5GILNZWSjs = require('./chunk-5GILNZWS.js');
9
+
10
+
11
+ var _chunkKUGEZPUOjs = require('./chunk-KUGEZPUO.js');
12
+
13
+
14
+
15
+ var _chunkZWHJ35F5js = require('./chunk-ZWHJ35F5.js');
16
+
17
+
18
+
19
+
20
+
21
+
22
+
23
+ var _chunkZ2WQB55Rjs = require('./chunk-Z2WQB55R.js');
24
+
25
+ // src/executors/r2-upload-publish/executor.ts
26
+ var _clients3 = require('@aws-sdk/client-s3');
27
+
28
+
29
+
30
+ var _devkit = require('@nx/devkit');
31
+ var _glob = require('glob');
32
+ var _mimetypes = require('mime-types'); var _mimetypes2 = _interopRequireDefault(_mimetypes);
33
+ var _child_process = require('child_process');
34
+ var _fs = require('fs');
35
+ var _promises = require('fs/promises');
36
+ async function runExecutor(options, context) {
37
+ const isDryRun = process.env.NX_DRY_RUN === "true" || options.dryRun || false;
38
+ if (!context.projectName) {
39
+ throw new Error("The executor requires a projectName.");
40
+ }
41
+ if (!options.path) {
42
+ throw new Error("The executor requires the `path` option to upload.");
43
+ }
44
+ console.info(
45
+ `\u{1F680} Running Storm Cloudflare Publish executor on the ${context.projectName} worker`
46
+ );
47
+ if (!context.projectName || !_optionalChain([context, 'access', _ => _.projectsConfigurations, 'optionalAccess', _2 => _2.projects]) || !context.projectsConfigurations.projects[context.projectName] || !_optionalChain([context, 'access', _3 => _3.projectsConfigurations, 'access', _4 => _4.projects, 'access', _5 => _5[context.projectName], 'optionalAccess', _6 => _6.root])) {
48
+ throw new Error("The executor requires projectsConfigurations.");
49
+ }
50
+ try {
51
+ const workspaceRoot = _chunk5GILNZWSjs.findWorkspaceRoot.call(void 0, );
52
+ const config = await _chunk5GILNZWSjs.getConfig.call(void 0, workspaceRoot);
53
+ const projectName = _nullishCoalesce(_optionalChain([context, 'access', _7 => _7.projectsConfigurations, 'access', _8 => _8.projects, 'access', _9 => _9[context.projectName], 'optionalAccess', _10 => _10.name]), () => ( context.projectName));
54
+ const projectDetails = _chunkN7FW365Qjs.getPackageInfo.call(void 0,
55
+ context.projectsConfigurations.projects[context.projectName]
56
+ );
57
+ const bucketId = options.bucketId;
58
+ const bucketPath = options.bucketPath || "/";
59
+ if (!bucketId) {
60
+ throw new Error("The executor requires a bucketId.");
61
+ }
62
+ const args = _chunkN7FW365Qjs.createCliOptions.call(void 0, { ...options });
63
+ if (isDryRun) {
64
+ args.push("--dry-run");
65
+ }
66
+ const cloudflareAccountId = process.env.CLOUDFLARE_ACCOUNT_ID || process.env.STORM_BOT_CLOUDFLARE_ACCOUNT;
67
+ if (!_optionalChain([options, 'optionalAccess', _11 => _11.registry]) && !cloudflareAccountId) {
68
+ throw new Error(
69
+ "The registry option and `CLOUDFLARE_ACCOUNT_ID` (or `STORM_BOT_CLOUDFLARE_ACCOUNT`) environment variable are not set. Please set one of these values to upload to the Cloudflare R2 bucket."
70
+ );
71
+ }
72
+ if (!process.env.STORM_BOT_ACCESS_KEY_ID && !process.env.ACCESS_KEY_ID && !process.env.CLOUDFLARE_ACCESS_KEY_ID && !process.env.AWS_ACCESS_KEY_ID || !process.env.STORM_BOT_SECRET_ACCESS_KEY && !process.env.CLOUDFLARE_SECRET_ACCESS_KEY && !process.env.SECRET_ACCESS_KEY && !process.env.AWS_SECRET_ACCESS_KEY) {
73
+ throw new Error(
74
+ "The `ACCESS_KEY_ID` (or `STORM_BOT_ACCESS_KEY_ID`) and `SECRET_ACCESS_KEY` (or `STORM_BOT_SECRET_ACCESS_KEY`) environment variables are not set. Please set these environment variables to upload to the Cloudflare R2 bucket."
75
+ );
76
+ }
77
+ const registry = _optionalChain([options, 'optionalAccess', _12 => _12.registry]) ? options.registry : `https://${cloudflareAccountId}.r2.cloudflarestorage.com`;
78
+ let projectGraph;
79
+ try {
80
+ projectGraph = _devkit.readCachedProjectGraph.call(void 0, );
81
+ } catch (e) {
82
+ await _devkit.createProjectGraphAsync.call(void 0, );
83
+ projectGraph = _devkit.readCachedProjectGraph.call(void 0, );
84
+ }
85
+ if (!projectGraph) {
86
+ throw new Error(
87
+ "The executor failed because the project graph is not available. Please run the build command again."
88
+ );
89
+ }
90
+ _chunkZ2WQB55Rjs.writeDebug.call(void 0,
91
+ `Publishing ${context.projectName} to the ${bucketId} R2 Bucket (at ${registry})`
92
+ );
93
+ const client = new (0, _clients3.S3)({
94
+ region: "auto",
95
+ endpoint: registry,
96
+ credentials: {
97
+ // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
98
+ accessKeyId: process.env.STORM_BOT_ACCESS_KEY_ID || process.env.CLOUDFLARE_ACCESS_KEY_ID || process.env.AWS_ACCESS_KEY_ID || process.env.ACCESS_KEY_ID,
99
+ // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
100
+ secretAccessKey: process.env.STORM_BOT_SECRET_ACCESS_KEY || process.env.CLOUDFLARE_SECRET_ACCESS_KEY || process.env.AWS_SECRET_ACCESS_KEY || process.env.SECRET_ACCESS_KEY
101
+ },
102
+ requestHandler: _chunkKUGEZPUOjs.createHttpHandler.call(void 0, )
103
+ });
104
+ const version = _optionalChain([projectDetails, 'optionalAccess', _13 => _13.content, 'optionalAccess', _14 => _14.version]);
105
+ if (version) {
106
+ _chunkZ2WQB55Rjs.writeDebug.call(void 0, `Starting upload version ${version}`);
107
+ }
108
+ const basePath = options.path;
109
+ const files = await _glob.glob.call(void 0, _chunkZ2WQB55Rjs.joinPaths.call(void 0, basePath, "**/*"), {
110
+ ignore: "**/{*.stories.tsx,*.stories.ts,*.spec.tsx,*.spec.ts}"
111
+ });
112
+ const internalDependencies = await _chunkZWHJ35F5js.getInternalDependencies.call(void 0,
113
+ context.projectName,
114
+ projectGraph
115
+ );
116
+ const dependencies = internalDependencies.filter(
117
+ (projectNode) => !projectNode.data.tags || projectNode.data.tags.every((tag) => tag.toLowerCase() !== "component")
118
+ ).reduce((ret, dep) => {
119
+ if (!ret[dep.name]) {
120
+ ret[dep.name] = "latest";
121
+ }
122
+ return ret;
123
+ }, _nullishCoalesce(_optionalChain([projectDetails, 'optionalAccess', _15 => _15.content, 'access', _16 => _16.dependencies]), () => ( {})));
124
+ const release = _nullishCoalesce(options.tag, () => ( _child_process.execSync.call(void 0, "npm config get tag").toString().trim()));
125
+ if (options.clean === true) {
126
+ _chunkZ2WQB55Rjs.writeDebug.call(void 0, `Clearing out existing items in ${bucketPath}`);
127
+ if (!isDryRun) {
128
+ const response = await client.listObjects({
129
+ Bucket: bucketId,
130
+ Prefix: !bucketPath || bucketPath === "/" ? void 0 : bucketPath
131
+ });
132
+ if (_optionalChain([response, 'optionalAccess', _17 => _17.Contents]) && response.Contents.length > 0) {
133
+ _chunkZ2WQB55Rjs.writeTrace.call(void 0,
134
+ `Deleting the following existing items from the R2 bucket path ${bucketPath}: ${response.Contents.map((item) => item.Key).join(", ")}`
135
+ );
136
+ await client.deleteObjects({
137
+ Bucket: bucketId,
138
+ Delete: {
139
+ Objects: response.Contents.map((item) => ({
140
+ Key: item.Key
141
+ })),
142
+ Quiet: false
143
+ }
144
+ });
145
+ } else {
146
+ _chunkZ2WQB55Rjs.writeDebug.call(void 0,
147
+ `No existing items to delete in the R2 bucket path ${bucketPath}`
148
+ );
149
+ }
150
+ } else {
151
+ _chunkZ2WQB55Rjs.writeWarning.call(void 0, "[Dry run]: Skipping R2 bucket clean.");
152
+ }
153
+ }
154
+ if (options.writeMetaJson === true) {
155
+ const meta = {
156
+ name: context.projectName,
157
+ version,
158
+ release,
159
+ description: _optionalChain([projectDetails, 'optionalAccess', _18 => _18.content, 'optionalAccess', _19 => _19.description]),
160
+ tags: _optionalChain([projectDetails, 'optionalAccess', _20 => _20.content, 'optionalAccess', _21 => _21.keywords]),
161
+ dependencies,
162
+ devDependencies: null,
163
+ internalDependencies: internalDependencies.filter(
164
+ (projectNode) => projectNode.data.tags && projectNode.data.tags.some(
165
+ (tag) => tag.toLowerCase() === "component"
166
+ )
167
+ ).map((dep) => dep.name)
168
+ };
169
+ if (_optionalChain([projectDetails, 'optionalAccess', _22 => _22.type]) === "package.json") {
170
+ meta.devDependencies = _optionalChain([projectDetails, 'optionalAccess', _23 => _23.content, 'optionalAccess', _24 => _24.devDependencies]);
171
+ }
172
+ await _chunkZWHJ35F5js.uploadFile.call(void 0,
173
+ client,
174
+ bucketId,
175
+ bucketPath,
176
+ "meta.json",
177
+ version,
178
+ JSON.stringify(meta),
179
+ "application/json",
180
+ isDryRun
181
+ );
182
+ }
183
+ await Promise.all(
184
+ files.map(async (file) => {
185
+ if (_optionalChain([_fs.statSync.call(void 0, file, {
186
+ throwIfNoEntry: false
187
+ }), 'optionalAccess', _25 => _25.isFile, 'call', _26 => _26()])) {
188
+ const name = _chunkZ2WQB55Rjs.correctPaths.call(void 0, file).replace(_chunkZ2WQB55Rjs.correctPaths.call(void 0, basePath), "");
189
+ const type = _mimetypes2.default.lookup(name) || "application/octet-stream";
190
+ await _chunkZWHJ35F5js.uploadFile.call(void 0,
191
+ client,
192
+ bucketId,
193
+ bucketPath,
194
+ name,
195
+ version,
196
+ type === "application/json" || type.includes("text") ? await _promises.readFile.call(void 0, file, "utf8") : `data:${type};base64,${Buffer.from(
197
+ await _promises.readFile.call(void 0, file, "binary"),
198
+ "binary"
199
+ ).toString("base64")}`,
200
+ type,
201
+ isDryRun
202
+ );
203
+ }
204
+ })
205
+ );
206
+ _chunkZ2WQB55Rjs.writeSuccess.call(void 0,
207
+ `Successfully uploaded the ${projectName} project to the Cloudflare R2 bucket.`,
208
+ config
209
+ );
210
+ return {
211
+ success: true
212
+ };
213
+ } catch (error) {
214
+ console.error("Failed to publish to Cloudflare R2 bucket");
215
+ console.error(error);
216
+ console.log("");
217
+ return {
218
+ success: false
219
+ };
220
+ }
221
+ }
222
+
223
+
224
+
225
+ exports.runExecutor = runExecutor;
package/dist/executors.js CHANGED
@@ -1,5 +1,5 @@
1
1
  "use strict";require('./chunk-XO66D74Z.js');
2
- require('./chunk-TOVKKH6U.js');
2
+ require('./chunk-DUHFLDZ4.js');
3
3
  require('./chunk-KUGEZPUO.js');
4
4
  require('./chunk-ZWHJ35F5.js');
5
5
  require('./chunk-QBD2OGUY.js');
@@ -1,5 +1,5 @@
1
1
  import "./chunk-YSCEY447.mjs";
2
- import "./chunk-B3QXFGFP.mjs";
2
+ import "./chunk-YQGPNO3W.mjs";
3
3
  import "./chunk-SWYYMID7.mjs";
4
4
  import "./chunk-UI2F3MMU.mjs";
5
5
  import "./chunk-WFPKBGV3.mjs";
@@ -2,11 +2,11 @@
2
2
 
3
3
 
4
4
 
5
- var _chunkGNNGK2FBjs = require('./chunk-GNNGK2FB.js');
5
+ var _chunkYPRUPZL3js = require('./chunk-YPRUPZL3.js');
6
6
 
7
7
 
8
8
 
9
- var _chunkM34YY63Mjs = require('./chunk-M34YY63M.js');
9
+ var _chunkOZ44M6HXjs = require('./chunk-OZ44M6HX.js');
10
10
  require('./chunk-5GILNZWS.js');
11
11
  require('./chunk-Z2WQB55R.js');
12
12
  require('./chunk-MCKGQKYU.js');
@@ -15,4 +15,4 @@ require('./chunk-MCKGQKYU.js');
15
15
 
16
16
 
17
17
 
18
- exports.applicationGenerator = _chunkGNNGK2FBjs.applicationGenerator; exports.applicationSchematic = _chunkGNNGK2FBjs.applicationSchematic; exports.initGenerator = _chunkM34YY63Mjs.initGenerator; exports.initSchematic = _chunkM34YY63Mjs.initSchematic;
18
+ exports.applicationGenerator = _chunkYPRUPZL3js.applicationGenerator; exports.applicationSchematic = _chunkYPRUPZL3js.applicationSchematic; exports.initGenerator = _chunkOZ44M6HXjs.initGenerator; exports.initSchematic = _chunkOZ44M6HXjs.initSchematic;
@@ -2,11 +2,11 @@ import "./chunk-3J7KBHMJ.mjs";
2
2
  import {
3
3
  applicationGenerator,
4
4
  applicationSchematic
5
- } from "./chunk-3IXNFAVK.mjs";
5
+ } from "./chunk-DEX6LTPV.mjs";
6
6
  import {
7
7
  initGenerator,
8
8
  initSchematic
9
- } from "./chunk-E64DDPVR.mjs";
9
+ } from "./chunk-FPJU3YOH.mjs";
10
10
  import "./chunk-G4ZCI2MN.mjs";
11
11
  import "./chunk-TPNHSNNZ.mjs";
12
12
  import "./chunk-PALWHFOL.mjs";
package/dist/index.js CHANGED
@@ -1,14 +1,14 @@
1
1
  "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }require('./chunk-XO66D74Z.js');
2
- require('./chunk-TOVKKH6U.js');
2
+ require('./chunk-DUHFLDZ4.js');
3
3
  require('./chunk-DHBG5ASJ.js');
4
4
 
5
5
 
6
6
 
7
- var _chunkGNNGK2FBjs = require('./chunk-GNNGK2FB.js');
7
+ var _chunkYPRUPZL3js = require('./chunk-YPRUPZL3.js');
8
8
 
9
9
 
10
10
 
11
- var _chunkM34YY63Mjs = require('./chunk-M34YY63M.js');
11
+ var _chunkOZ44M6HXjs = require('./chunk-OZ44M6HX.js');
12
12
  require('./chunk-KUGEZPUO.js');
13
13
  require('./chunk-CVGPWUNP.js');
14
14
 
@@ -156,4 +156,4 @@ function createPackageJson(projectJsonPath, workspaceRoot) {
156
156
 
157
157
 
158
158
 
159
- exports.applicationGenerator = _chunkGNNGK2FBjs.applicationGenerator; exports.applicationSchematic = _chunkGNNGK2FBjs.applicationSchematic; exports.createNodesV2 = createNodesV2; exports.getInternalDependencies = _chunkZWHJ35F5js.getInternalDependencies; exports.initGenerator = _chunkM34YY63Mjs.initGenerator; exports.initSchematic = _chunkM34YY63Mjs.initSchematic; exports.name = name; exports.uploadFile = _chunkZWHJ35F5js.uploadFile;
159
+ exports.applicationGenerator = _chunkYPRUPZL3js.applicationGenerator; exports.applicationSchematic = _chunkYPRUPZL3js.applicationSchematic; exports.createNodesV2 = createNodesV2; exports.getInternalDependencies = _chunkZWHJ35F5js.getInternalDependencies; exports.initGenerator = _chunkOZ44M6HXjs.initGenerator; exports.initSchematic = _chunkOZ44M6HXjs.initSchematic; exports.name = name; exports.uploadFile = _chunkZWHJ35F5js.uploadFile;
package/dist/index.mjs CHANGED
@@ -1,14 +1,14 @@
1
1
  import "./chunk-YSCEY447.mjs";
2
- import "./chunk-B3QXFGFP.mjs";
2
+ import "./chunk-YQGPNO3W.mjs";
3
3
  import "./chunk-3J7KBHMJ.mjs";
4
4
  import {
5
5
  applicationGenerator,
6
6
  applicationSchematic
7
- } from "./chunk-3IXNFAVK.mjs";
7
+ } from "./chunk-DEX6LTPV.mjs";
8
8
  import {
9
9
  initGenerator,
10
10
  initSchematic
11
- } from "./chunk-E64DDPVR.mjs";
11
+ } from "./chunk-FPJU3YOH.mjs";
12
12
  import "./chunk-SWYYMID7.mjs";
13
13
  import "./chunk-7Z5PILRU.mjs";
14
14
  import {
@@ -1,6 +1,6 @@
1
1
  "use strict";Object.defineProperty(exports, "__esModule", {value: true});
2
2
 
3
- var _chunkTOVKKH6Ujs = require('../../../chunk-TOVKKH6U.js');
3
+ var _chunkDUHFLDZ4js = require('../../../chunk-DUHFLDZ4.js');
4
4
  require('../../../chunk-KUGEZPUO.js');
5
5
  require('../../../chunk-ZWHJ35F5.js');
6
6
  require('../../../chunk-N7FW365Q.js');
@@ -9,4 +9,4 @@ require('../../../chunk-Z2WQB55R.js');
9
9
  require('../../../chunk-MCKGQKYU.js');
10
10
 
11
11
 
12
- exports.default = _chunkTOVKKH6Ujs.runExecutor;
12
+ exports.default = _chunkDUHFLDZ4js.runExecutor;
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  runExecutor
3
- } from "../../../chunk-B3QXFGFP.mjs";
3
+ } from "../../../chunk-YQGPNO3W.mjs";
4
4
  import "../../../chunk-SWYYMID7.mjs";
5
5
  import "../../../chunk-UI2F3MMU.mjs";
6
6
  import "../../../chunk-3MAI3FU2.mjs";
@@ -2,10 +2,10 @@
2
2
 
3
3
 
4
4
 
5
- var _chunkM34YY63Mjs = require('../../../chunk-M34YY63M.js');
5
+ var _chunkOZ44M6HXjs = require('../../../chunk-OZ44M6HX.js');
6
6
  require('../../../chunk-MCKGQKYU.js');
7
7
 
8
8
 
9
9
 
10
10
 
11
- exports.default = _chunkM34YY63Mjs.generator_default; exports.initGenerator = _chunkM34YY63Mjs.initGenerator; exports.initSchematic = _chunkM34YY63Mjs.initSchematic;
11
+ exports.default = _chunkOZ44M6HXjs.generator_default; exports.initGenerator = _chunkOZ44M6HXjs.initGenerator; exports.initSchematic = _chunkOZ44M6HXjs.initSchematic;
@@ -2,7 +2,7 @@ import {
2
2
  generator_default,
3
3
  initGenerator,
4
4
  initSchematic
5
- } from "../../../chunk-E64DDPVR.mjs";
5
+ } from "../../../chunk-FPJU3YOH.mjs";
6
6
  import "../../../chunk-PALWHFOL.mjs";
7
7
  export {
8
8
  generator_default as default,
@@ -2,8 +2,8 @@
2
2
 
3
3
 
4
4
 
5
- var _chunkGNNGK2FBjs = require('../../../chunk-GNNGK2FB.js');
6
- require('../../../chunk-M34YY63M.js');
5
+ var _chunkYPRUPZL3js = require('../../../chunk-YPRUPZL3.js');
6
+ require('../../../chunk-OZ44M6HX.js');
7
7
  require('../../../chunk-5GILNZWS.js');
8
8
  require('../../../chunk-Z2WQB55R.js');
9
9
  require('../../../chunk-MCKGQKYU.js');
@@ -11,4 +11,4 @@ require('../../../chunk-MCKGQKYU.js');
11
11
 
12
12
 
13
13
 
14
- exports.applicationGenerator = _chunkGNNGK2FBjs.applicationGenerator; exports.applicationSchematic = _chunkGNNGK2FBjs.applicationSchematic; exports.default = _chunkGNNGK2FBjs.generator_default;
14
+ exports.applicationGenerator = _chunkYPRUPZL3js.applicationGenerator; exports.applicationSchematic = _chunkYPRUPZL3js.applicationSchematic; exports.default = _chunkYPRUPZL3js.generator_default;
@@ -2,8 +2,8 @@ import {
2
2
  applicationGenerator,
3
3
  applicationSchematic,
4
4
  generator_default
5
- } from "../../../chunk-3IXNFAVK.mjs";
6
- import "../../../chunk-E64DDPVR.mjs";
5
+ } from "../../../chunk-DEX6LTPV.mjs";
6
+ import "../../../chunk-FPJU3YOH.mjs";
7
7
  import "../../../chunk-G4ZCI2MN.mjs";
8
8
  import "../../../chunk-TPNHSNNZ.mjs";
9
9
  import "../../../chunk-PALWHFOL.mjs";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@storm-software/cloudflare-tools",
3
- "version": "0.71.28",
3
+ "version": "0.71.30",
4
4
  "description": "A Nx plugin package that contains various executors, generators, and utilities that assist in managing Cloudflare services.",
5
5
  "repository": {
6
6
  "type": "github",
@@ -153,5 +153,5 @@
153
153
  "publishConfig": { "access": "public" },
154
154
  "executors": "./executors.json",
155
155
  "generators": "./generators.json",
156
- "gitHead": "557681dbdcec099af1f775e87d2bf55f0c0a2ea4"
156
+ "gitHead": "f1a6c01c03133af11e541f3817836f0e231de26d"
157
157
  }