@storm-software/cloudflare-tools 0.71.30 → 0.71.31

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. package/CHANGELOG.md +7 -0
  2. package/dist/{chunk-OZ44M6HX.js → chunk-DBH3ARKO.js} +4 -2
  3. package/dist/chunk-GBGETEC6.mjs +60 -0
  4. package/dist/{chunk-DUHFLDZ4.js → chunk-JFMSM7WW.js} +27 -22
  5. package/dist/chunk-MQLXYAAZ.js +60 -0
  6. package/dist/{chunk-YQGPNO3W.mjs → chunk-Q6BDPVT4.mjs} +25 -20
  7. package/dist/{chunk-DEX6LTPV.mjs → chunk-TWWKKHQ6.mjs} +1 -1
  8. package/dist/{chunk-YPRUPZL3.js → chunk-V4DY7BGL.js} +2 -2
  9. package/dist/{chunk-FPJU3YOH.mjs → chunk-WSU7RB2N.mjs} +4 -2
  10. package/dist/executors.js +2 -2
  11. package/dist/executors.mjs +2 -2
  12. package/dist/generators.js +3 -3
  13. package/dist/generators.mjs +2 -2
  14. package/dist/index.js +5 -5
  15. package/dist/index.mjs +4 -4
  16. package/dist/src/executors/r2-upload-publish/executor.js +3 -3
  17. package/dist/src/executors/r2-upload-publish/executor.mjs +2 -2
  18. package/dist/src/generators/init/generator.js +2 -2
  19. package/dist/src/generators/init/generator.mjs +1 -1
  20. package/dist/src/generators/worker/generator.js +3 -3
  21. package/dist/src/generators/worker/generator.mjs +2 -2
  22. package/dist/src/utils/index.js +2 -2
  23. package/dist/src/utils/index.mjs +1 -1
  24. package/dist/src/utils/r2-bucket-helpers.d.mts +2 -2
  25. package/dist/src/utils/r2-bucket-helpers.d.ts +2 -2
  26. package/dist/src/utils/r2-bucket-helpers.js +2 -2
  27. package/dist/src/utils/r2-bucket-helpers.mjs +1 -1
  28. package/package.json +5 -3
  29. package/dist/chunk-RGRCKWGN.mjs +0 -225
  30. package/dist/chunk-UI2F3MMU.mjs +0 -52
  31. package/dist/chunk-ZBNASCRJ.js +0 -225
  32. package/dist/chunk-ZWHJ35F5.js +0 -52
package/CHANGELOG.md CHANGED
@@ -2,6 +2,13 @@
2
2
 
3
3
  # Changelog for Storm Ops - Cloudflare Tools
4
4
 
5
+ ## [0.71.30](https://github.com/storm-software/storm-ops/releases/tag/cloudflare-tools%400.71.30) (12/18/2025)
6
+
7
+ ### Bug Fixes
8
+
9
+ - **cloudflare-tools:** Ensure data is uploaded as a data URL
10
+ ([b992fc806](https://github.com/storm-software/storm-ops/commit/b992fc806))
11
+
5
12
  ## [0.71.29](https://github.com/storm-software/storm-ops/releases/tag/cloudflare-tools%400.71.29) (12/18/2025)
6
13
 
7
14
  ### Bug Fixes
@@ -7,7 +7,7 @@ var require_package = _chunkMCKGQKYUjs.__commonJS.call(void 0, {
7
7
  "package.json"(exports, module) {
8
8
  module.exports = {
9
9
  name: "@storm-software/cloudflare-tools",
10
- version: "0.71.29",
10
+ version: "0.71.30",
11
11
  description: "A Nx plugin package that contains various executors, generators, and utilities that assist in managing Cloudflare services.",
12
12
  repository: {
13
13
  type: "github",
@@ -165,10 +165,12 @@ var require_package = _chunkMCKGQKYUjs.__commonJS.call(void 0, {
165
165
  },
166
166
  dependencies: {
167
167
  "@aws-sdk/client-s3": "^3.948.0",
168
+ "@aws-sdk/lib-storage": "^3.954.0",
168
169
  "@smithy/node-http-handler": "^4.4.5",
169
170
  defu: "catalog:",
170
171
  glob: "catalog:",
171
- "mime-types": "^3.0.2"
172
+ "mime-types": "^3.0.2",
173
+ "pretty-bytes": "^7.1.0"
172
174
  },
173
175
  devDependencies: {
174
176
  "@nx/devkit": "catalog:",
@@ -0,0 +1,60 @@
1
+ import {
2
+ joinPaths,
3
+ writeDebug,
4
+ writeError,
5
+ writeWarning
6
+ } from "./chunk-TPNHSNNZ.mjs";
7
+
8
+ // src/utils/r2-bucket-helpers.ts
9
+ import { Upload } from "@aws-sdk/lib-storage";
10
+ import { createHash } from "node:crypto";
11
+ import prettyBytes from "pretty-bytes";
12
+ async function uploadFile(client, bucketName, bucketPath, fileName, version, fileContent, contentType = "application/octet-stream", isDryRun = false) {
13
+ const key = (!bucketPath?.trim() || bucketPath?.trim() === "/" ? fileName : joinPaths(bucketPath.trim(), fileName))?.replace(/^\/+/g, "") || "";
14
+ writeDebug(
15
+ `Uploading ${key} (content-type: ${contentType}, size: ${prettyBytes(
16
+ Buffer.byteLength(fileContent, "utf8")
17
+ )}) to the ${bucketName} R2 bucket`
18
+ );
19
+ try {
20
+ if (!isDryRun) {
21
+ const upload = new Upload({
22
+ client,
23
+ params: {
24
+ Bucket: bucketName,
25
+ Key: key,
26
+ Body: Buffer.from(fileContent, "utf8"),
27
+ ContentType: contentType,
28
+ Metadata: {
29
+ version,
30
+ checksum: createHash("sha256").update(fileContent).digest("base64")
31
+ }
32
+ }
33
+ });
34
+ await upload.done();
35
+ } else {
36
+ writeWarning("[Dry run]: Skipping upload to the R2 bucket.");
37
+ }
38
+ } catch (error) {
39
+ writeError(`Failed to upload ${key} to the ${bucketName} R2 bucket.`);
40
+ throw error;
41
+ }
42
+ }
43
+ function getInternalDependencies(projectName, graph) {
44
+ const allDeps = graph.dependencies[projectName] ?? [];
45
+ return Array.from(
46
+ allDeps.reduce(
47
+ (acc, node) => {
48
+ const found = graph.nodes[node.target];
49
+ if (found) acc.push(found);
50
+ return acc;
51
+ },
52
+ []
53
+ )
54
+ );
55
+ }
56
+
57
+ export {
58
+ uploadFile,
59
+ getInternalDependencies
60
+ };
@@ -4,7 +4,7 @@ var _chunkKUGEZPUOjs = require('./chunk-KUGEZPUO.js');
4
4
 
5
5
 
6
6
 
7
- var _chunkZWHJ35F5js = require('./chunk-ZWHJ35F5.js');
7
+ var _chunkMQLXYAAZjs = require('./chunk-MQLXYAAZ.js');
8
8
 
9
9
 
10
10
 
@@ -23,6 +23,10 @@ var _chunk5GILNZWSjs = require('./chunk-5GILNZWS.js');
23
23
  var _chunkZ2WQB55Rjs = require('./chunk-Z2WQB55R.js');
24
24
 
25
25
  // src/executors/r2-upload-publish/executor.ts
26
+
27
+
28
+
29
+
26
30
  var _clients3 = require('@aws-sdk/client-s3');
27
31
 
28
32
 
@@ -90,7 +94,7 @@ async function runExecutor(options, context) {
90
94
  _chunkZ2WQB55Rjs.writeDebug.call(void 0,
91
95
  `Publishing ${context.projectName} to the ${bucketId} R2 Bucket (at ${registry})`
92
96
  );
93
- const client = new (0, _clients3.S3)({
97
+ const client = new (0, _clients3.S3Client)({
94
98
  region: "auto",
95
99
  endpoint: registry,
96
100
  credentials: {
@@ -109,7 +113,7 @@ async function runExecutor(options, context) {
109
113
  const files = await _glob.glob.call(void 0, _chunkZ2WQB55Rjs.joinPaths.call(void 0, basePath, "**/*"), {
110
114
  ignore: "**/{*.stories.tsx,*.stories.ts,*.spec.tsx,*.spec.ts}"
111
115
  });
112
- const internalDependencies = await _chunkZWHJ35F5js.getInternalDependencies.call(void 0,
116
+ const internalDependencies = await _chunkMQLXYAAZjs.getInternalDependencies.call(void 0,
113
117
  context.projectName,
114
118
  projectGraph
115
119
  );
@@ -125,23 +129,27 @@ async function runExecutor(options, context) {
125
129
  if (options.clean === true) {
126
130
  _chunkZ2WQB55Rjs.writeDebug.call(void 0, `Clearing out existing items in ${bucketPath}`);
127
131
  if (!isDryRun) {
128
- const response = await client.listObjects({
129
- Bucket: bucketId,
130
- Prefix: !bucketPath || bucketPath === "/" ? void 0 : bucketPath
131
- });
132
+ const response = await client.send(
133
+ new (0, _clients3.ListObjectsCommand)({
134
+ Bucket: bucketId,
135
+ Prefix: !bucketPath || bucketPath === "/" ? void 0 : bucketPath
136
+ })
137
+ );
132
138
  if (_optionalChain([response, 'optionalAccess', _17 => _17.Contents]) && response.Contents.length > 0) {
133
139
  _chunkZ2WQB55Rjs.writeTrace.call(void 0,
134
140
  `Deleting the following existing items from the R2 bucket path ${bucketPath}: ${response.Contents.map((item) => item.Key).join(", ")}`
135
141
  );
136
- await client.deleteObjects({
137
- Bucket: bucketId,
138
- Delete: {
139
- Objects: response.Contents.map((item) => ({
140
- Key: item.Key
141
- })),
142
- Quiet: false
143
- }
144
- });
142
+ await client.send(
143
+ new (0, _clients3.DeleteObjectsCommand)({
144
+ Bucket: bucketId,
145
+ Delete: {
146
+ Objects: response.Contents.map((item) => ({
147
+ Key: item.Key
148
+ })),
149
+ Quiet: false
150
+ }
151
+ })
152
+ );
145
153
  } else {
146
154
  _chunkZ2WQB55Rjs.writeDebug.call(void 0,
147
155
  `No existing items to delete in the R2 bucket path ${bucketPath}`
@@ -169,7 +177,7 @@ async function runExecutor(options, context) {
169
177
  if (_optionalChain([projectDetails, 'optionalAccess', _22 => _22.type]) === "package.json") {
170
178
  meta.devDependencies = _optionalChain([projectDetails, 'optionalAccess', _23 => _23.content, 'optionalAccess', _24 => _24.devDependencies]);
171
179
  }
172
- await _chunkZWHJ35F5js.uploadFile.call(void 0,
180
+ await _chunkMQLXYAAZjs.uploadFile.call(void 0,
173
181
  client,
174
182
  bucketId,
175
183
  bucketPath,
@@ -187,16 +195,13 @@ async function runExecutor(options, context) {
187
195
  }), 'optionalAccess', _25 => _25.isFile, 'call', _26 => _26()])) {
188
196
  const name = _chunkZ2WQB55Rjs.correctPaths.call(void 0, file).replace(_chunkZ2WQB55Rjs.correctPaths.call(void 0, basePath), "");
189
197
  const type = _mimetypes2.default.lookup(name) || "application/octet-stream";
190
- await _chunkZWHJ35F5js.uploadFile.call(void 0,
198
+ await _chunkMQLXYAAZjs.uploadFile.call(void 0,
191
199
  client,
192
200
  bucketId,
193
201
  bucketPath,
194
202
  name,
195
203
  version,
196
- type === "application/json" || type.includes("text") ? await _promises.readFile.call(void 0, file, "utf8") : `data:${type};base64,${Buffer.from(
197
- await _promises.readFile.call(void 0, file, "binary"),
198
- "binary"
199
- ).toString("base64")}`,
204
+ await _promises.readFile.call(void 0, file, "utf8"),
200
205
  type,
201
206
  isDryRun
202
207
  );
@@ -0,0 +1,60 @@
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
2
+
3
+
4
+
5
+
6
+ var _chunkZ2WQB55Rjs = require('./chunk-Z2WQB55R.js');
7
+
8
+ // src/utils/r2-bucket-helpers.ts
9
+ var _libstorage = require('@aws-sdk/lib-storage');
10
+ var _crypto = require('crypto');
11
+ var _prettybytes = require('pretty-bytes'); var _prettybytes2 = _interopRequireDefault(_prettybytes);
12
+ async function uploadFile(client, bucketName, bucketPath, fileName, version, fileContent, contentType = "application/octet-stream", isDryRun = false) {
13
+ const key = _optionalChain([(!_optionalChain([bucketPath, 'optionalAccess', _ => _.trim, 'call', _2 => _2()]) || _optionalChain([bucketPath, 'optionalAccess', _3 => _3.trim, 'call', _4 => _4()]) === "/" ? fileName : _chunkZ2WQB55Rjs.joinPaths.call(void 0, bucketPath.trim(), fileName)), 'optionalAccess', _5 => _5.replace, 'call', _6 => _6(/^\/+/g, "")]) || "";
14
+ _chunkZ2WQB55Rjs.writeDebug.call(void 0,
15
+ `Uploading ${key} (content-type: ${contentType}, size: ${_prettybytes2.default.call(void 0,
16
+ Buffer.byteLength(fileContent, "utf8")
17
+ )}) to the ${bucketName} R2 bucket`
18
+ );
19
+ try {
20
+ if (!isDryRun) {
21
+ const upload = new (0, _libstorage.Upload)({
22
+ client,
23
+ params: {
24
+ Bucket: bucketName,
25
+ Key: key,
26
+ Body: Buffer.from(fileContent, "utf8"),
27
+ ContentType: contentType,
28
+ Metadata: {
29
+ version,
30
+ checksum: _crypto.createHash.call(void 0, "sha256").update(fileContent).digest("base64")
31
+ }
32
+ }
33
+ });
34
+ await upload.done();
35
+ } else {
36
+ _chunkZ2WQB55Rjs.writeWarning.call(void 0, "[Dry run]: Skipping upload to the R2 bucket.");
37
+ }
38
+ } catch (error) {
39
+ _chunkZ2WQB55Rjs.writeError.call(void 0, `Failed to upload ${key} to the ${bucketName} R2 bucket.`);
40
+ throw error;
41
+ }
42
+ }
43
+ function getInternalDependencies(projectName, graph) {
44
+ const allDeps = _nullishCoalesce(graph.dependencies[projectName], () => ( []));
45
+ return Array.from(
46
+ allDeps.reduce(
47
+ (acc, node) => {
48
+ const found = graph.nodes[node.target];
49
+ if (found) acc.push(found);
50
+ return acc;
51
+ },
52
+ []
53
+ )
54
+ );
55
+ }
56
+
57
+
58
+
59
+
60
+ exports.uploadFile = uploadFile; exports.getInternalDependencies = getInternalDependencies;
@@ -4,7 +4,7 @@ import {
4
4
  import {
5
5
  getInternalDependencies,
6
6
  uploadFile
7
- } from "./chunk-UI2F3MMU.mjs";
7
+ } from "./chunk-GBGETEC6.mjs";
8
8
  import {
9
9
  createCliOptions,
10
10
  getPackageInfo
@@ -23,7 +23,11 @@ import {
23
23
  } from "./chunk-TPNHSNNZ.mjs";
24
24
 
25
25
  // src/executors/r2-upload-publish/executor.ts
26
- import { S3 } from "@aws-sdk/client-s3";
26
+ import {
27
+ DeleteObjectsCommand,
28
+ ListObjectsCommand,
29
+ S3Client
30
+ } from "@aws-sdk/client-s3";
27
31
  import {
28
32
  createProjectGraphAsync,
29
33
  readCachedProjectGraph
@@ -90,7 +94,7 @@ async function runExecutor(options, context) {
90
94
  writeDebug(
91
95
  `Publishing ${context.projectName} to the ${bucketId} R2 Bucket (at ${registry})`
92
96
  );
93
- const client = new S3({
97
+ const client = new S3Client({
94
98
  region: "auto",
95
99
  endpoint: registry,
96
100
  credentials: {
@@ -125,23 +129,27 @@ async function runExecutor(options, context) {
125
129
  if (options.clean === true) {
126
130
  writeDebug(`Clearing out existing items in ${bucketPath}`);
127
131
  if (!isDryRun) {
128
- const response = await client.listObjects({
129
- Bucket: bucketId,
130
- Prefix: !bucketPath || bucketPath === "/" ? void 0 : bucketPath
131
- });
132
+ const response = await client.send(
133
+ new ListObjectsCommand({
134
+ Bucket: bucketId,
135
+ Prefix: !bucketPath || bucketPath === "/" ? void 0 : bucketPath
136
+ })
137
+ );
132
138
  if (response?.Contents && response.Contents.length > 0) {
133
139
  writeTrace(
134
140
  `Deleting the following existing items from the R2 bucket path ${bucketPath}: ${response.Contents.map((item) => item.Key).join(", ")}`
135
141
  );
136
- await client.deleteObjects({
137
- Bucket: bucketId,
138
- Delete: {
139
- Objects: response.Contents.map((item) => ({
140
- Key: item.Key
141
- })),
142
- Quiet: false
143
- }
144
- });
142
+ await client.send(
143
+ new DeleteObjectsCommand({
144
+ Bucket: bucketId,
145
+ Delete: {
146
+ Objects: response.Contents.map((item) => ({
147
+ Key: item.Key
148
+ })),
149
+ Quiet: false
150
+ }
151
+ })
152
+ );
145
153
  } else {
146
154
  writeDebug(
147
155
  `No existing items to delete in the R2 bucket path ${bucketPath}`
@@ -193,10 +201,7 @@ async function runExecutor(options, context) {
193
201
  bucketPath,
194
202
  name,
195
203
  version,
196
- type === "application/json" || type.includes("text") ? await readFile(file, "utf8") : `data:${type};base64,${Buffer.from(
197
- await readFile(file, "binary"),
198
- "binary"
199
- ).toString("base64")}`,
204
+ await readFile(file, "utf8"),
200
205
  type,
201
206
  isDryRun
202
207
  );
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  generator_default
3
- } from "./chunk-FPJU3YOH.mjs";
3
+ } from "./chunk-WSU7RB2N.mjs";
4
4
  import {
5
5
  findWorkspaceRoot,
6
6
  getConfig
@@ -1,6 +1,6 @@
1
1
  "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
2
2
 
3
- var _chunkOZ44M6HXjs = require('./chunk-OZ44M6HX.js');
3
+ var _chunkDBH3ARKOjs = require('./chunk-DBH3ARKO.js');
4
4
 
5
5
 
6
6
 
@@ -66,7 +66,7 @@ ${Object.keys(process.env).map((key) => ` - ${key}=${JSON.stringify(process.env[
66
66
  const options = await normalizeOptions(tree, schema, config);
67
67
  const tasks = [];
68
68
  tasks.push(
69
- await _chunkOZ44M6HXjs.generator_default.call(void 0, tree, {
69
+ await _chunkDBH3ARKOjs.generator_default.call(void 0, tree, {
70
70
  ...options,
71
71
  skipFormat: true
72
72
  })
@@ -7,7 +7,7 @@ var require_package = __commonJS({
7
7
  "package.json"(exports, module) {
8
8
  module.exports = {
9
9
  name: "@storm-software/cloudflare-tools",
10
- version: "0.71.29",
10
+ version: "0.71.30",
11
11
  description: "A Nx plugin package that contains various executors, generators, and utilities that assist in managing Cloudflare services.",
12
12
  repository: {
13
13
  type: "github",
@@ -165,10 +165,12 @@ var require_package = __commonJS({
165
165
  },
166
166
  dependencies: {
167
167
  "@aws-sdk/client-s3": "^3.948.0",
168
+ "@aws-sdk/lib-storage": "^3.954.0",
168
169
  "@smithy/node-http-handler": "^4.4.5",
169
170
  defu: "catalog:",
170
171
  glob: "catalog:",
171
- "mime-types": "^3.0.2"
172
+ "mime-types": "^3.0.2",
173
+ "pretty-bytes": "^7.1.0"
172
174
  },
173
175
  devDependencies: {
174
176
  "@nx/devkit": "catalog:",
package/dist/executors.js CHANGED
@@ -1,7 +1,7 @@
1
1
  "use strict";require('./chunk-XO66D74Z.js');
2
- require('./chunk-DUHFLDZ4.js');
2
+ require('./chunk-JFMSM7WW.js');
3
3
  require('./chunk-KUGEZPUO.js');
4
- require('./chunk-ZWHJ35F5.js');
4
+ require('./chunk-MQLXYAAZ.js');
5
5
  require('./chunk-QBD2OGUY.js');
6
6
  require('./chunk-IVVRVG36.js');
7
7
  require('./chunk-N7FW365Q.js');
@@ -1,7 +1,7 @@
1
1
  import "./chunk-YSCEY447.mjs";
2
- import "./chunk-YQGPNO3W.mjs";
2
+ import "./chunk-Q6BDPVT4.mjs";
3
3
  import "./chunk-SWYYMID7.mjs";
4
- import "./chunk-UI2F3MMU.mjs";
4
+ import "./chunk-GBGETEC6.mjs";
5
5
  import "./chunk-WFPKBGV3.mjs";
6
6
  import "./chunk-NIOOY3TB.mjs";
7
7
  import "./chunk-3MAI3FU2.mjs";
@@ -2,11 +2,11 @@
2
2
 
3
3
 
4
4
 
5
- var _chunkYPRUPZL3js = require('./chunk-YPRUPZL3.js');
5
+ var _chunkV4DY7BGLjs = require('./chunk-V4DY7BGL.js');
6
6
 
7
7
 
8
8
 
9
- var _chunkOZ44M6HXjs = require('./chunk-OZ44M6HX.js');
9
+ var _chunkDBH3ARKOjs = require('./chunk-DBH3ARKO.js');
10
10
  require('./chunk-5GILNZWS.js');
11
11
  require('./chunk-Z2WQB55R.js');
12
12
  require('./chunk-MCKGQKYU.js');
@@ -15,4 +15,4 @@ require('./chunk-MCKGQKYU.js');
15
15
 
16
16
 
17
17
 
18
- exports.applicationGenerator = _chunkYPRUPZL3js.applicationGenerator; exports.applicationSchematic = _chunkYPRUPZL3js.applicationSchematic; exports.initGenerator = _chunkOZ44M6HXjs.initGenerator; exports.initSchematic = _chunkOZ44M6HXjs.initSchematic;
18
+ exports.applicationGenerator = _chunkV4DY7BGLjs.applicationGenerator; exports.applicationSchematic = _chunkV4DY7BGLjs.applicationSchematic; exports.initGenerator = _chunkDBH3ARKOjs.initGenerator; exports.initSchematic = _chunkDBH3ARKOjs.initSchematic;
@@ -2,11 +2,11 @@ import "./chunk-3J7KBHMJ.mjs";
2
2
  import {
3
3
  applicationGenerator,
4
4
  applicationSchematic
5
- } from "./chunk-DEX6LTPV.mjs";
5
+ } from "./chunk-TWWKKHQ6.mjs";
6
6
  import {
7
7
  initGenerator,
8
8
  initSchematic
9
- } from "./chunk-FPJU3YOH.mjs";
9
+ } from "./chunk-WSU7RB2N.mjs";
10
10
  import "./chunk-G4ZCI2MN.mjs";
11
11
  import "./chunk-TPNHSNNZ.mjs";
12
12
  import "./chunk-PALWHFOL.mjs";
package/dist/index.js CHANGED
@@ -1,20 +1,20 @@
1
1
  "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }require('./chunk-XO66D74Z.js');
2
- require('./chunk-DUHFLDZ4.js');
2
+ require('./chunk-JFMSM7WW.js');
3
3
  require('./chunk-DHBG5ASJ.js');
4
4
 
5
5
 
6
6
 
7
- var _chunkYPRUPZL3js = require('./chunk-YPRUPZL3.js');
7
+ var _chunkV4DY7BGLjs = require('./chunk-V4DY7BGL.js');
8
8
 
9
9
 
10
10
 
11
- var _chunkOZ44M6HXjs = require('./chunk-OZ44M6HX.js');
11
+ var _chunkDBH3ARKOjs = require('./chunk-DBH3ARKO.js');
12
12
  require('./chunk-KUGEZPUO.js');
13
13
  require('./chunk-CVGPWUNP.js');
14
14
 
15
15
 
16
16
 
17
- var _chunkZWHJ35F5js = require('./chunk-ZWHJ35F5.js');
17
+ var _chunkMQLXYAAZjs = require('./chunk-MQLXYAAZ.js');
18
18
  require('./chunk-QBD2OGUY.js');
19
19
  require('./chunk-IVVRVG36.js');
20
20
 
@@ -156,4 +156,4 @@ function createPackageJson(projectJsonPath, workspaceRoot) {
156
156
 
157
157
 
158
158
 
159
- exports.applicationGenerator = _chunkYPRUPZL3js.applicationGenerator; exports.applicationSchematic = _chunkYPRUPZL3js.applicationSchematic; exports.createNodesV2 = createNodesV2; exports.getInternalDependencies = _chunkZWHJ35F5js.getInternalDependencies; exports.initGenerator = _chunkOZ44M6HXjs.initGenerator; exports.initSchematic = _chunkOZ44M6HXjs.initSchematic; exports.name = name; exports.uploadFile = _chunkZWHJ35F5js.uploadFile;
159
+ exports.applicationGenerator = _chunkV4DY7BGLjs.applicationGenerator; exports.applicationSchematic = _chunkV4DY7BGLjs.applicationSchematic; exports.createNodesV2 = createNodesV2; exports.getInternalDependencies = _chunkMQLXYAAZjs.getInternalDependencies; exports.initGenerator = _chunkDBH3ARKOjs.initGenerator; exports.initSchematic = _chunkDBH3ARKOjs.initSchematic; exports.name = name; exports.uploadFile = _chunkMQLXYAAZjs.uploadFile;
package/dist/index.mjs CHANGED
@@ -1,20 +1,20 @@
1
1
  import "./chunk-YSCEY447.mjs";
2
- import "./chunk-YQGPNO3W.mjs";
2
+ import "./chunk-Q6BDPVT4.mjs";
3
3
  import "./chunk-3J7KBHMJ.mjs";
4
4
  import {
5
5
  applicationGenerator,
6
6
  applicationSchematic
7
- } from "./chunk-DEX6LTPV.mjs";
7
+ } from "./chunk-TWWKKHQ6.mjs";
8
8
  import {
9
9
  initGenerator,
10
10
  initSchematic
11
- } from "./chunk-FPJU3YOH.mjs";
11
+ } from "./chunk-WSU7RB2N.mjs";
12
12
  import "./chunk-SWYYMID7.mjs";
13
13
  import "./chunk-7Z5PILRU.mjs";
14
14
  import {
15
15
  getInternalDependencies,
16
16
  uploadFile
17
- } from "./chunk-UI2F3MMU.mjs";
17
+ } from "./chunk-GBGETEC6.mjs";
18
18
  import "./chunk-WFPKBGV3.mjs";
19
19
  import "./chunk-NIOOY3TB.mjs";
20
20
  import {
@@ -1,12 +1,12 @@
1
1
  "use strict";Object.defineProperty(exports, "__esModule", {value: true});
2
2
 
3
- var _chunkDUHFLDZ4js = require('../../../chunk-DUHFLDZ4.js');
3
+ var _chunkJFMSM7WWjs = require('../../../chunk-JFMSM7WW.js');
4
4
  require('../../../chunk-KUGEZPUO.js');
5
- require('../../../chunk-ZWHJ35F5.js');
5
+ require('../../../chunk-MQLXYAAZ.js');
6
6
  require('../../../chunk-N7FW365Q.js');
7
7
  require('../../../chunk-5GILNZWS.js');
8
8
  require('../../../chunk-Z2WQB55R.js');
9
9
  require('../../../chunk-MCKGQKYU.js');
10
10
 
11
11
 
12
- exports.default = _chunkDUHFLDZ4js.runExecutor;
12
+ exports.default = _chunkJFMSM7WWjs.runExecutor;
@@ -1,8 +1,8 @@
1
1
  import {
2
2
  runExecutor
3
- } from "../../../chunk-YQGPNO3W.mjs";
3
+ } from "../../../chunk-Q6BDPVT4.mjs";
4
4
  import "../../../chunk-SWYYMID7.mjs";
5
- import "../../../chunk-UI2F3MMU.mjs";
5
+ import "../../../chunk-GBGETEC6.mjs";
6
6
  import "../../../chunk-3MAI3FU2.mjs";
7
7
  import "../../../chunk-G4ZCI2MN.mjs";
8
8
  import "../../../chunk-TPNHSNNZ.mjs";
@@ -2,10 +2,10 @@
2
2
 
3
3
 
4
4
 
5
- var _chunkOZ44M6HXjs = require('../../../chunk-OZ44M6HX.js');
5
+ var _chunkDBH3ARKOjs = require('../../../chunk-DBH3ARKO.js');
6
6
  require('../../../chunk-MCKGQKYU.js');
7
7
 
8
8
 
9
9
 
10
10
 
11
- exports.default = _chunkOZ44M6HXjs.generator_default; exports.initGenerator = _chunkOZ44M6HXjs.initGenerator; exports.initSchematic = _chunkOZ44M6HXjs.initSchematic;
11
+ exports.default = _chunkDBH3ARKOjs.generator_default; exports.initGenerator = _chunkDBH3ARKOjs.initGenerator; exports.initSchematic = _chunkDBH3ARKOjs.initSchematic;
@@ -2,7 +2,7 @@ import {
2
2
  generator_default,
3
3
  initGenerator,
4
4
  initSchematic
5
- } from "../../../chunk-FPJU3YOH.mjs";
5
+ } from "../../../chunk-WSU7RB2N.mjs";
6
6
  import "../../../chunk-PALWHFOL.mjs";
7
7
  export {
8
8
  generator_default as default,
@@ -2,8 +2,8 @@
2
2
 
3
3
 
4
4
 
5
- var _chunkYPRUPZL3js = require('../../../chunk-YPRUPZL3.js');
6
- require('../../../chunk-OZ44M6HX.js');
5
+ var _chunkV4DY7BGLjs = require('../../../chunk-V4DY7BGL.js');
6
+ require('../../../chunk-DBH3ARKO.js');
7
7
  require('../../../chunk-5GILNZWS.js');
8
8
  require('../../../chunk-Z2WQB55R.js');
9
9
  require('../../../chunk-MCKGQKYU.js');
@@ -11,4 +11,4 @@ require('../../../chunk-MCKGQKYU.js');
11
11
 
12
12
 
13
13
 
14
- exports.applicationGenerator = _chunkYPRUPZL3js.applicationGenerator; exports.applicationSchematic = _chunkYPRUPZL3js.applicationSchematic; exports.default = _chunkYPRUPZL3js.generator_default;
14
+ exports.applicationGenerator = _chunkV4DY7BGLjs.applicationGenerator; exports.applicationSchematic = _chunkV4DY7BGLjs.applicationSchematic; exports.default = _chunkV4DY7BGLjs.generator_default;
@@ -2,8 +2,8 @@ import {
2
2
  applicationGenerator,
3
3
  applicationSchematic,
4
4
  generator_default
5
- } from "../../../chunk-DEX6LTPV.mjs";
6
- import "../../../chunk-FPJU3YOH.mjs";
5
+ } from "../../../chunk-TWWKKHQ6.mjs";
6
+ import "../../../chunk-WSU7RB2N.mjs";
7
7
  import "../../../chunk-G4ZCI2MN.mjs";
8
8
  import "../../../chunk-TPNHSNNZ.mjs";
9
9
  import "../../../chunk-PALWHFOL.mjs";
@@ -2,10 +2,10 @@
2
2
 
3
3
 
4
4
 
5
- var _chunkZWHJ35F5js = require('../../chunk-ZWHJ35F5.js');
5
+ var _chunkMQLXYAAZjs = require('../../chunk-MQLXYAAZ.js');
6
6
  require('../../chunk-Z2WQB55R.js');
7
7
  require('../../chunk-MCKGQKYU.js');
8
8
 
9
9
 
10
10
 
11
- exports.getInternalDependencies = _chunkZWHJ35F5js.getInternalDependencies; exports.uploadFile = _chunkZWHJ35F5js.uploadFile;
11
+ exports.getInternalDependencies = _chunkMQLXYAAZjs.getInternalDependencies; exports.uploadFile = _chunkMQLXYAAZjs.uploadFile;
@@ -2,7 +2,7 @@ import "../../chunk-7Z5PILRU.mjs";
2
2
  import {
3
3
  getInternalDependencies,
4
4
  uploadFile
5
- } from "../../chunk-UI2F3MMU.mjs";
5
+ } from "../../chunk-GBGETEC6.mjs";
6
6
  import "../../chunk-TPNHSNNZ.mjs";
7
7
  import "../../chunk-PALWHFOL.mjs";
8
8
  export {
@@ -1,4 +1,4 @@
1
- import { S3 } from '@aws-sdk/client-s3';
1
+ import { S3Client } from '@aws-sdk/client-s3';
2
2
  import { ProjectGraph, ProjectGraphProjectNode } from '@nx/devkit';
3
3
 
4
4
  /**
@@ -13,7 +13,7 @@ import { ProjectGraph, ProjectGraphProjectNode } from '@nx/devkit';
13
13
  * @param contentType - The MIME type of the file content
14
14
  * @param isDryRun - Whether to perform a dry run without actual upload
15
15
  */
16
- declare function uploadFile(client: S3, bucketName: string, bucketPath: string | undefined, fileName: string, version: string, fileContent: string, contentType?: string, isDryRun?: boolean): Promise<void>;
16
+ declare function uploadFile(client: S3Client, bucketName: string, bucketPath: string | undefined, fileName: string, version: string, fileContent: string, contentType?: string, isDryRun?: boolean): Promise<void>;
17
17
  /**
18
18
  * Get internal dependencies of a project from the project graph
19
19
  *
@@ -1,4 +1,4 @@
1
- import { S3 } from '@aws-sdk/client-s3';
1
+ import { S3Client } from '@aws-sdk/client-s3';
2
2
  import { ProjectGraph, ProjectGraphProjectNode } from '@nx/devkit';
3
3
 
4
4
  /**
@@ -13,7 +13,7 @@ import { ProjectGraph, ProjectGraphProjectNode } from '@nx/devkit';
13
13
  * @param contentType - The MIME type of the file content
14
14
  * @param isDryRun - Whether to perform a dry run without actual upload
15
15
  */
16
- declare function uploadFile(client: S3, bucketName: string, bucketPath: string | undefined, fileName: string, version: string, fileContent: string, contentType?: string, isDryRun?: boolean): Promise<void>;
16
+ declare function uploadFile(client: S3Client, bucketName: string, bucketPath: string | undefined, fileName: string, version: string, fileContent: string, contentType?: string, isDryRun?: boolean): Promise<void>;
17
17
  /**
18
18
  * Get internal dependencies of a project from the project graph
19
19
  *
@@ -1,10 +1,10 @@
1
1
  "use strict";Object.defineProperty(exports, "__esModule", {value: true});
2
2
 
3
3
 
4
- var _chunkZWHJ35F5js = require('../../chunk-ZWHJ35F5.js');
4
+ var _chunkMQLXYAAZjs = require('../../chunk-MQLXYAAZ.js');
5
5
  require('../../chunk-Z2WQB55R.js');
6
6
  require('../../chunk-MCKGQKYU.js');
7
7
 
8
8
 
9
9
 
10
- exports.getInternalDependencies = _chunkZWHJ35F5js.getInternalDependencies; exports.uploadFile = _chunkZWHJ35F5js.uploadFile;
10
+ exports.getInternalDependencies = _chunkMQLXYAAZjs.getInternalDependencies; exports.uploadFile = _chunkMQLXYAAZjs.uploadFile;
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  getInternalDependencies,
3
3
  uploadFile
4
- } from "../../chunk-UI2F3MMU.mjs";
4
+ } from "../../chunk-GBGETEC6.mjs";
5
5
  import "../../chunk-TPNHSNNZ.mjs";
6
6
  import "../../chunk-PALWHFOL.mjs";
7
7
  export {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@storm-software/cloudflare-tools",
3
- "version": "0.71.30",
3
+ "version": "0.71.31",
4
4
  "description": "A Nx plugin package that contains various executors, generators, and utilities that assist in managing Cloudflare services.",
5
5
  "repository": {
6
6
  "type": "github",
@@ -134,10 +134,12 @@
134
134
  },
135
135
  "dependencies": {
136
136
  "@aws-sdk/client-s3": "^3.948.0",
137
+ "@aws-sdk/lib-storage": "^3.954.0",
137
138
  "@smithy/node-http-handler": "^4.4.5",
138
139
  "defu": "6.1.4",
139
140
  "glob": "^11.1.0",
140
- "mime-types": "^3.0.2"
141
+ "mime-types": "^3.0.2",
142
+ "pretty-bytes": "^7.1.0"
141
143
  },
142
144
  "devDependencies": {
143
145
  "@nx/devkit": "^22.2.3",
@@ -153,5 +155,5 @@
153
155
  "publishConfig": { "access": "public" },
154
156
  "executors": "./executors.json",
155
157
  "generators": "./generators.json",
156
- "gitHead": "f1a6c01c03133af11e541f3817836f0e231de26d"
158
+ "gitHead": "7f759c57f99e6cf104b110848fdf4d319e5886c1"
157
159
  }
@@ -1,225 +0,0 @@
1
- import {
2
- createCliOptions,
3
- getPackageInfo
4
- } from "./chunk-3MAI3FU2.mjs";
5
- import {
6
- findWorkspaceRoot,
7
- getConfig
8
- } from "./chunk-G4ZCI2MN.mjs";
9
- import {
10
- createHttpHandler
11
- } from "./chunk-SWYYMID7.mjs";
12
- import {
13
- getInternalDependencies,
14
- uploadFile
15
- } from "./chunk-UI2F3MMU.mjs";
16
- import {
17
- correctPaths,
18
- joinPaths,
19
- writeDebug,
20
- writeSuccess,
21
- writeTrace,
22
- writeWarning
23
- } from "./chunk-TPNHSNNZ.mjs";
24
-
25
- // src/executors/r2-upload-publish/executor.ts
26
- import { S3 } from "@aws-sdk/client-s3";
27
- import {
28
- createProjectGraphAsync,
29
- readCachedProjectGraph
30
- } from "@nx/devkit";
31
- import { glob } from "glob";
32
- import mime from "mime-types";
33
- import { execSync } from "node:child_process";
34
- import { statSync } from "node:fs";
35
- import { readFile } from "node:fs/promises";
36
- async function runExecutor(options, context) {
37
- const isDryRun = process.env.NX_DRY_RUN === "true" || options.dryRun || false;
38
- if (!context.projectName) {
39
- throw new Error("The executor requires a projectName.");
40
- }
41
- if (!options.path) {
42
- throw new Error("The executor requires the `path` option to upload.");
43
- }
44
- console.info(
45
- `\u{1F680} Running Storm Cloudflare Publish executor on the ${context.projectName} worker`
46
- );
47
- if (!context.projectName || !context.projectsConfigurations?.projects || !context.projectsConfigurations.projects[context.projectName] || !context.projectsConfigurations.projects[context.projectName]?.root) {
48
- throw new Error("The executor requires projectsConfigurations.");
49
- }
50
- try {
51
- const workspaceRoot = findWorkspaceRoot();
52
- const config = await getConfig(workspaceRoot);
53
- const projectName = context.projectsConfigurations.projects[context.projectName]?.name ?? context.projectName;
54
- const projectDetails = getPackageInfo(
55
- context.projectsConfigurations.projects[context.projectName]
56
- );
57
- const bucketId = options.bucketId;
58
- const bucketPath = options.bucketPath || "/";
59
- if (!bucketId) {
60
- throw new Error("The executor requires a bucketId.");
61
- }
62
- const args = createCliOptions({ ...options });
63
- if (isDryRun) {
64
- args.push("--dry-run");
65
- }
66
- const cloudflareAccountId = process.env.CLOUDFLARE_ACCOUNT_ID || process.env.STORM_BOT_CLOUDFLARE_ACCOUNT;
67
- if (!options?.registry && !cloudflareAccountId) {
68
- throw new Error(
69
- "The registry option and `CLOUDFLARE_ACCOUNT_ID` (or `STORM_BOT_CLOUDFLARE_ACCOUNT`) environment variable are not set. Please set one of these values to upload to the Cloudflare R2 bucket."
70
- );
71
- }
72
- if (!process.env.STORM_BOT_ACCESS_KEY_ID && !process.env.ACCESS_KEY_ID && !process.env.CLOUDFLARE_ACCESS_KEY_ID && !process.env.AWS_ACCESS_KEY_ID || !process.env.STORM_BOT_SECRET_ACCESS_KEY && !process.env.CLOUDFLARE_SECRET_ACCESS_KEY && !process.env.SECRET_ACCESS_KEY && !process.env.AWS_SECRET_ACCESS_KEY) {
73
- throw new Error(
74
- "The `ACCESS_KEY_ID` (or `STORM_BOT_ACCESS_KEY_ID`) and `SECRET_ACCESS_KEY` (or `STORM_BOT_SECRET_ACCESS_KEY`) environment variables are not set. Please set these environment variables to upload to the Cloudflare R2 bucket."
75
- );
76
- }
77
- const registry = options?.registry ? options.registry : `https://${cloudflareAccountId}.r2.cloudflarestorage.com`;
78
- let projectGraph;
79
- try {
80
- projectGraph = readCachedProjectGraph();
81
- } catch {
82
- await createProjectGraphAsync();
83
- projectGraph = readCachedProjectGraph();
84
- }
85
- if (!projectGraph) {
86
- throw new Error(
87
- "The executor failed because the project graph is not available. Please run the build command again."
88
- );
89
- }
90
- writeDebug(
91
- `Publishing ${context.projectName} to the ${bucketId} R2 Bucket (at ${registry})`
92
- );
93
- const client = new S3({
94
- region: "auto",
95
- endpoint: registry,
96
- credentials: {
97
- // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
98
- accessKeyId: process.env.STORM_BOT_ACCESS_KEY_ID || process.env.CLOUDFLARE_ACCESS_KEY_ID || process.env.AWS_ACCESS_KEY_ID || process.env.ACCESS_KEY_ID,
99
- // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
100
- secretAccessKey: process.env.STORM_BOT_SECRET_ACCESS_KEY || process.env.CLOUDFLARE_SECRET_ACCESS_KEY || process.env.AWS_SECRET_ACCESS_KEY || process.env.SECRET_ACCESS_KEY
101
- },
102
- requestHandler: createHttpHandler()
103
- });
104
- const version = projectDetails?.content?.version;
105
- if (version) {
106
- writeDebug(`Starting upload version ${version}`);
107
- }
108
- const basePath = options.path;
109
- const files = await glob(joinPaths(basePath, "**/*"), {
110
- ignore: "**/{*.stories.tsx,*.stories.ts,*.spec.tsx,*.spec.ts}"
111
- });
112
- const internalDependencies = await getInternalDependencies(
113
- context.projectName,
114
- projectGraph
115
- );
116
- const dependencies = internalDependencies.filter(
117
- (projectNode) => !projectNode.data.tags || projectNode.data.tags.every((tag) => tag.toLowerCase() !== "component")
118
- ).reduce((ret, dep) => {
119
- if (!ret[dep.name]) {
120
- ret[dep.name] = "latest";
121
- }
122
- return ret;
123
- }, projectDetails?.content.dependencies ?? {});
124
- const release = options.tag ?? execSync("npm config get tag").toString().trim();
125
- if (options.clean === true) {
126
- writeDebug(`Clearing out existing items in ${bucketPath}`);
127
- if (!isDryRun) {
128
- const response = await client.listObjects({
129
- Bucket: bucketId,
130
- Prefix: !bucketPath || bucketPath === "/" ? void 0 : bucketPath
131
- });
132
- if (response?.Contents && response.Contents.length > 0) {
133
- writeTrace(
134
- `Deleting the following existing items from the R2 bucket path ${bucketPath}: ${response.Contents.map((item) => item.Key).join(", ")}`
135
- );
136
- await client.deleteObjects({
137
- Bucket: bucketId,
138
- Delete: {
139
- Objects: response.Contents.map((item) => ({
140
- Key: item.Key
141
- })),
142
- Quiet: false
143
- }
144
- });
145
- } else {
146
- writeDebug(
147
- `No existing items to delete in the R2 bucket path ${bucketPath}`
148
- );
149
- }
150
- } else {
151
- writeWarning("[Dry run]: Skipping R2 bucket clean.");
152
- }
153
- }
154
- if (options.writeMetaJson === true) {
155
- const meta = {
156
- name: context.projectName,
157
- version,
158
- release,
159
- description: projectDetails?.content?.description,
160
- tags: projectDetails?.content?.keywords,
161
- dependencies,
162
- devDependencies: null,
163
- internalDependencies: internalDependencies.filter(
164
- (projectNode) => projectNode.data.tags && projectNode.data.tags.some(
165
- (tag) => tag.toLowerCase() === "component"
166
- )
167
- ).map((dep) => dep.name)
168
- };
169
- if (projectDetails?.type === "package.json") {
170
- meta.devDependencies = projectDetails?.content?.devDependencies;
171
- }
172
- await uploadFile(
173
- client,
174
- bucketId,
175
- bucketPath,
176
- "meta.json",
177
- version,
178
- JSON.stringify(meta),
179
- "application/json",
180
- isDryRun
181
- );
182
- }
183
- await Promise.all(
184
- files.map(async (file) => {
185
- if (statSync(file, {
186
- throwIfNoEntry: false
187
- })?.isFile()) {
188
- const name = correctPaths(file).replace(correctPaths(basePath), "");
189
- const type = mime.lookup(name) || "application/octet-stream";
190
- await uploadFile(
191
- client,
192
- bucketId,
193
- bucketPath,
194
- name,
195
- version,
196
- type === "application/json" || type.includes("text") ? await readFile(file, "utf8") : `data:${type};base64,${Buffer.from(
197
- await readFile(file, "binary"),
198
- "binary"
199
- ).toString("base64")}`,
200
- type,
201
- isDryRun
202
- );
203
- }
204
- })
205
- );
206
- writeSuccess(
207
- `Successfully uploaded the ${projectName} project to the Cloudflare R2 bucket.`,
208
- config
209
- );
210
- return {
211
- success: true
212
- };
213
- } catch (error) {
214
- console.error("Failed to publish to Cloudflare R2 bucket");
215
- console.error(error);
216
- console.log("");
217
- return {
218
- success: false
219
- };
220
- }
221
- }
222
-
223
- export {
224
- runExecutor
225
- };
@@ -1,52 +0,0 @@
1
- import {
2
- joinPaths,
3
- writeDebug,
4
- writeWarning
5
- } from "./chunk-TPNHSNNZ.mjs";
6
-
7
- // src/utils/r2-bucket-helpers.ts
8
- import { createHash } from "node:crypto";
9
- async function uploadFile(client, bucketName, bucketPath, fileName, version, fileContent, contentType = "application/octet-stream", isDryRun = false) {
10
- const key = (!bucketPath?.trim() || bucketPath?.trim() === "/" ? fileName : joinPaths(bucketPath.trim(), fileName))?.replace(/^\/+/g, "") || "";
11
- writeDebug(
12
- `Uploading ${key} (content-type: ${contentType}) to the ${bucketName} R2 bucket`
13
- );
14
- if (!isDryRun) {
15
- await client.putObject(
16
- {
17
- Bucket: bucketName,
18
- Key: key,
19
- Body: fileContent,
20
- ContentType: contentType,
21
- Metadata: {
22
- version,
23
- checksum: createHash("sha256").update(fileContent).digest("base64")
24
- }
25
- },
26
- {
27
- requestTimeout: 15 * 60 * 1e3
28
- // 15 minutes
29
- }
30
- );
31
- } else {
32
- writeWarning("[Dry run]: Skipping upload to the R2 bucket.");
33
- }
34
- }
35
- function getInternalDependencies(projectName, graph) {
36
- const allDeps = graph.dependencies[projectName] ?? [];
37
- return Array.from(
38
- allDeps.reduce(
39
- (acc, node) => {
40
- const found = graph.nodes[node.target];
41
- if (found) acc.push(found);
42
- return acc;
43
- },
44
- []
45
- )
46
- );
47
- }
48
-
49
- export {
50
- uploadFile,
51
- getInternalDependencies
52
- };
@@ -1,225 +0,0 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
2
-
3
-
4
- var _chunkN7FW365Qjs = require('./chunk-N7FW365Q.js');
5
-
6
-
7
-
8
- var _chunk5GILNZWSjs = require('./chunk-5GILNZWS.js');
9
-
10
-
11
- var _chunkKUGEZPUOjs = require('./chunk-KUGEZPUO.js');
12
-
13
-
14
-
15
- var _chunkZWHJ35F5js = require('./chunk-ZWHJ35F5.js');
16
-
17
-
18
-
19
-
20
-
21
-
22
-
23
- var _chunkZ2WQB55Rjs = require('./chunk-Z2WQB55R.js');
24
-
25
- // src/executors/r2-upload-publish/executor.ts
26
- var _clients3 = require('@aws-sdk/client-s3');
27
-
28
-
29
-
30
- var _devkit = require('@nx/devkit');
31
- var _glob = require('glob');
32
- var _mimetypes = require('mime-types'); var _mimetypes2 = _interopRequireDefault(_mimetypes);
33
- var _child_process = require('child_process');
34
- var _fs = require('fs');
35
- var _promises = require('fs/promises');
36
- async function runExecutor(options, context) {
37
- const isDryRun = process.env.NX_DRY_RUN === "true" || options.dryRun || false;
38
- if (!context.projectName) {
39
- throw new Error("The executor requires a projectName.");
40
- }
41
- if (!options.path) {
42
- throw new Error("The executor requires the `path` option to upload.");
43
- }
44
- console.info(
45
- `\u{1F680} Running Storm Cloudflare Publish executor on the ${context.projectName} worker`
46
- );
47
- if (!context.projectName || !_optionalChain([context, 'access', _ => _.projectsConfigurations, 'optionalAccess', _2 => _2.projects]) || !context.projectsConfigurations.projects[context.projectName] || !_optionalChain([context, 'access', _3 => _3.projectsConfigurations, 'access', _4 => _4.projects, 'access', _5 => _5[context.projectName], 'optionalAccess', _6 => _6.root])) {
48
- throw new Error("The executor requires projectsConfigurations.");
49
- }
50
- try {
51
- const workspaceRoot = _chunk5GILNZWSjs.findWorkspaceRoot.call(void 0, );
52
- const config = await _chunk5GILNZWSjs.getConfig.call(void 0, workspaceRoot);
53
- const projectName = _nullishCoalesce(_optionalChain([context, 'access', _7 => _7.projectsConfigurations, 'access', _8 => _8.projects, 'access', _9 => _9[context.projectName], 'optionalAccess', _10 => _10.name]), () => ( context.projectName));
54
- const projectDetails = _chunkN7FW365Qjs.getPackageInfo.call(void 0,
55
- context.projectsConfigurations.projects[context.projectName]
56
- );
57
- const bucketId = options.bucketId;
58
- const bucketPath = options.bucketPath || "/";
59
- if (!bucketId) {
60
- throw new Error("The executor requires a bucketId.");
61
- }
62
- const args = _chunkN7FW365Qjs.createCliOptions.call(void 0, { ...options });
63
- if (isDryRun) {
64
- args.push("--dry-run");
65
- }
66
- const cloudflareAccountId = process.env.CLOUDFLARE_ACCOUNT_ID || process.env.STORM_BOT_CLOUDFLARE_ACCOUNT;
67
- if (!_optionalChain([options, 'optionalAccess', _11 => _11.registry]) && !cloudflareAccountId) {
68
- throw new Error(
69
- "The registry option and `CLOUDFLARE_ACCOUNT_ID` (or `STORM_BOT_CLOUDFLARE_ACCOUNT`) environment variable are not set. Please set one of these values to upload to the Cloudflare R2 bucket."
70
- );
71
- }
72
- if (!process.env.STORM_BOT_ACCESS_KEY_ID && !process.env.ACCESS_KEY_ID && !process.env.CLOUDFLARE_ACCESS_KEY_ID && !process.env.AWS_ACCESS_KEY_ID || !process.env.STORM_BOT_SECRET_ACCESS_KEY && !process.env.CLOUDFLARE_SECRET_ACCESS_KEY && !process.env.SECRET_ACCESS_KEY && !process.env.AWS_SECRET_ACCESS_KEY) {
73
- throw new Error(
74
- "The `ACCESS_KEY_ID` (or `STORM_BOT_ACCESS_KEY_ID`) and `SECRET_ACCESS_KEY` (or `STORM_BOT_SECRET_ACCESS_KEY`) environment variables are not set. Please set these environment variables to upload to the Cloudflare R2 bucket."
75
- );
76
- }
77
- const registry = _optionalChain([options, 'optionalAccess', _12 => _12.registry]) ? options.registry : `https://${cloudflareAccountId}.r2.cloudflarestorage.com`;
78
- let projectGraph;
79
- try {
80
- projectGraph = _devkit.readCachedProjectGraph.call(void 0, );
81
- } catch (e) {
82
- await _devkit.createProjectGraphAsync.call(void 0, );
83
- projectGraph = _devkit.readCachedProjectGraph.call(void 0, );
84
- }
85
- if (!projectGraph) {
86
- throw new Error(
87
- "The executor failed because the project graph is not available. Please run the build command again."
88
- );
89
- }
90
- _chunkZ2WQB55Rjs.writeDebug.call(void 0,
91
- `Publishing ${context.projectName} to the ${bucketId} R2 Bucket (at ${registry})`
92
- );
93
- const client = new (0, _clients3.S3)({
94
- region: "auto",
95
- endpoint: registry,
96
- credentials: {
97
- // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
98
- accessKeyId: process.env.STORM_BOT_ACCESS_KEY_ID || process.env.CLOUDFLARE_ACCESS_KEY_ID || process.env.AWS_ACCESS_KEY_ID || process.env.ACCESS_KEY_ID,
99
- // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
100
- secretAccessKey: process.env.STORM_BOT_SECRET_ACCESS_KEY || process.env.CLOUDFLARE_SECRET_ACCESS_KEY || process.env.AWS_SECRET_ACCESS_KEY || process.env.SECRET_ACCESS_KEY
101
- },
102
- requestHandler: _chunkKUGEZPUOjs.createHttpHandler.call(void 0, )
103
- });
104
- const version = _optionalChain([projectDetails, 'optionalAccess', _13 => _13.content, 'optionalAccess', _14 => _14.version]);
105
- if (version) {
106
- _chunkZ2WQB55Rjs.writeDebug.call(void 0, `Starting upload version ${version}`);
107
- }
108
- const basePath = options.path;
109
- const files = await _glob.glob.call(void 0, _chunkZ2WQB55Rjs.joinPaths.call(void 0, basePath, "**/*"), {
110
- ignore: "**/{*.stories.tsx,*.stories.ts,*.spec.tsx,*.spec.ts}"
111
- });
112
- const internalDependencies = await _chunkZWHJ35F5js.getInternalDependencies.call(void 0,
113
- context.projectName,
114
- projectGraph
115
- );
116
- const dependencies = internalDependencies.filter(
117
- (projectNode) => !projectNode.data.tags || projectNode.data.tags.every((tag) => tag.toLowerCase() !== "component")
118
- ).reduce((ret, dep) => {
119
- if (!ret[dep.name]) {
120
- ret[dep.name] = "latest";
121
- }
122
- return ret;
123
- }, _nullishCoalesce(_optionalChain([projectDetails, 'optionalAccess', _15 => _15.content, 'access', _16 => _16.dependencies]), () => ( {})));
124
- const release = _nullishCoalesce(options.tag, () => ( _child_process.execSync.call(void 0, "npm config get tag").toString().trim()));
125
- if (options.clean === true) {
126
- _chunkZ2WQB55Rjs.writeDebug.call(void 0, `Clearing out existing items in ${bucketPath}`);
127
- if (!isDryRun) {
128
- const response = await client.listObjects({
129
- Bucket: bucketId,
130
- Prefix: !bucketPath || bucketPath === "/" ? void 0 : bucketPath
131
- });
132
- if (_optionalChain([response, 'optionalAccess', _17 => _17.Contents]) && response.Contents.length > 0) {
133
- _chunkZ2WQB55Rjs.writeTrace.call(void 0,
134
- `Deleting the following existing items from the R2 bucket path ${bucketPath}: ${response.Contents.map((item) => item.Key).join(", ")}`
135
- );
136
- await client.deleteObjects({
137
- Bucket: bucketId,
138
- Delete: {
139
- Objects: response.Contents.map((item) => ({
140
- Key: item.Key
141
- })),
142
- Quiet: false
143
- }
144
- });
145
- } else {
146
- _chunkZ2WQB55Rjs.writeDebug.call(void 0,
147
- `No existing items to delete in the R2 bucket path ${bucketPath}`
148
- );
149
- }
150
- } else {
151
- _chunkZ2WQB55Rjs.writeWarning.call(void 0, "[Dry run]: Skipping R2 bucket clean.");
152
- }
153
- }
154
- if (options.writeMetaJson === true) {
155
- const meta = {
156
- name: context.projectName,
157
- version,
158
- release,
159
- description: _optionalChain([projectDetails, 'optionalAccess', _18 => _18.content, 'optionalAccess', _19 => _19.description]),
160
- tags: _optionalChain([projectDetails, 'optionalAccess', _20 => _20.content, 'optionalAccess', _21 => _21.keywords]),
161
- dependencies,
162
- devDependencies: null,
163
- internalDependencies: internalDependencies.filter(
164
- (projectNode) => projectNode.data.tags && projectNode.data.tags.some(
165
- (tag) => tag.toLowerCase() === "component"
166
- )
167
- ).map((dep) => dep.name)
168
- };
169
- if (_optionalChain([projectDetails, 'optionalAccess', _22 => _22.type]) === "package.json") {
170
- meta.devDependencies = _optionalChain([projectDetails, 'optionalAccess', _23 => _23.content, 'optionalAccess', _24 => _24.devDependencies]);
171
- }
172
- await _chunkZWHJ35F5js.uploadFile.call(void 0,
173
- client,
174
- bucketId,
175
- bucketPath,
176
- "meta.json",
177
- version,
178
- JSON.stringify(meta),
179
- "application/json",
180
- isDryRun
181
- );
182
- }
183
- await Promise.all(
184
- files.map(async (file) => {
185
- if (_optionalChain([_fs.statSync.call(void 0, file, {
186
- throwIfNoEntry: false
187
- }), 'optionalAccess', _25 => _25.isFile, 'call', _26 => _26()])) {
188
- const name = _chunkZ2WQB55Rjs.correctPaths.call(void 0, file).replace(_chunkZ2WQB55Rjs.correctPaths.call(void 0, basePath), "");
189
- const type = _mimetypes2.default.lookup(name) || "application/octet-stream";
190
- await _chunkZWHJ35F5js.uploadFile.call(void 0,
191
- client,
192
- bucketId,
193
- bucketPath,
194
- name,
195
- version,
196
- type === "application/json" || type.includes("text") ? await _promises.readFile.call(void 0, file, "utf8") : `data:${type};base64,${Buffer.from(
197
- await _promises.readFile.call(void 0, file, "binary"),
198
- "binary"
199
- ).toString("base64")}`,
200
- type,
201
- isDryRun
202
- );
203
- }
204
- })
205
- );
206
- _chunkZ2WQB55Rjs.writeSuccess.call(void 0,
207
- `Successfully uploaded the ${projectName} project to the Cloudflare R2 bucket.`,
208
- config
209
- );
210
- return {
211
- success: true
212
- };
213
- } catch (error) {
214
- console.error("Failed to publish to Cloudflare R2 bucket");
215
- console.error(error);
216
- console.log("");
217
- return {
218
- success: false
219
- };
220
- }
221
- }
222
-
223
-
224
-
225
- exports.runExecutor = runExecutor;
@@ -1,52 +0,0 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
2
-
3
-
4
-
5
- var _chunkZ2WQB55Rjs = require('./chunk-Z2WQB55R.js');
6
-
7
- // src/utils/r2-bucket-helpers.ts
8
- var _crypto = require('crypto');
9
- async function uploadFile(client, bucketName, bucketPath, fileName, version, fileContent, contentType = "application/octet-stream", isDryRun = false) {
10
- const key = _optionalChain([(!_optionalChain([bucketPath, 'optionalAccess', _ => _.trim, 'call', _2 => _2()]) || _optionalChain([bucketPath, 'optionalAccess', _3 => _3.trim, 'call', _4 => _4()]) === "/" ? fileName : _chunkZ2WQB55Rjs.joinPaths.call(void 0, bucketPath.trim(), fileName)), 'optionalAccess', _5 => _5.replace, 'call', _6 => _6(/^\/+/g, "")]) || "";
11
- _chunkZ2WQB55Rjs.writeDebug.call(void 0,
12
- `Uploading ${key} (content-type: ${contentType}) to the ${bucketName} R2 bucket`
13
- );
14
- if (!isDryRun) {
15
- await client.putObject(
16
- {
17
- Bucket: bucketName,
18
- Key: key,
19
- Body: fileContent,
20
- ContentType: contentType,
21
- Metadata: {
22
- version,
23
- checksum: _crypto.createHash.call(void 0, "sha256").update(fileContent).digest("base64")
24
- }
25
- },
26
- {
27
- requestTimeout: 15 * 60 * 1e3
28
- // 15 minutes
29
- }
30
- );
31
- } else {
32
- _chunkZ2WQB55Rjs.writeWarning.call(void 0, "[Dry run]: Skipping upload to the R2 bucket.");
33
- }
34
- }
35
- function getInternalDependencies(projectName, graph) {
36
- const allDeps = _nullishCoalesce(graph.dependencies[projectName], () => ( []));
37
- return Array.from(
38
- allDeps.reduce(
39
- (acc, node) => {
40
- const found = graph.nodes[node.target];
41
- if (found) acc.push(found);
42
- return acc;
43
- },
44
- []
45
- )
46
- );
47
- }
48
-
49
-
50
-
51
-
52
- exports.uploadFile = uploadFile; exports.getInternalDependencies = getInternalDependencies;