@storm-software/cloudflare-tools 0.71.29 → 0.71.31
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/dist/{chunk-KKU7RDA6.js → chunk-DBH3ARKO.js} +4 -2
- package/dist/chunk-GBGETEC6.mjs +60 -0
- package/dist/{chunk-YXUE6D4W.js → chunk-JFMSM7WW.js} +27 -21
- package/dist/chunk-MQLXYAAZ.js +60 -0
- package/dist/{chunk-ODJ23C2Z.mjs → chunk-Q6BDPVT4.mjs} +25 -19
- package/dist/{chunk-HF46WK23.mjs → chunk-TWWKKHQ6.mjs} +1 -1
- package/dist/{chunk-TYB3YAPJ.js → chunk-V4DY7BGL.js} +2 -2
- package/dist/{chunk-EBBMMF2H.mjs → chunk-WSU7RB2N.mjs} +4 -2
- package/dist/executors.js +2 -2
- package/dist/executors.mjs +2 -2
- package/dist/generators.js +3 -3
- package/dist/generators.mjs +2 -2
- package/dist/index.js +5 -5
- package/dist/index.mjs +4 -4
- package/dist/src/executors/r2-upload-publish/executor.js +3 -3
- package/dist/src/executors/r2-upload-publish/executor.mjs +2 -2
- package/dist/src/generators/init/generator.js +2 -2
- package/dist/src/generators/init/generator.mjs +1 -1
- package/dist/src/generators/worker/generator.js +3 -3
- package/dist/src/generators/worker/generator.mjs +2 -2
- package/dist/src/utils/index.js +2 -2
- package/dist/src/utils/index.mjs +1 -1
- package/dist/src/utils/r2-bucket-helpers.d.mts +2 -2
- package/dist/src/utils/r2-bucket-helpers.d.ts +2 -2
- package/dist/src/utils/r2-bucket-helpers.js +2 -2
- package/dist/src/utils/r2-bucket-helpers.mjs +1 -1
- package/package.json +5 -3
- package/dist/chunk-UI2F3MMU.mjs +0 -52
- package/dist/chunk-ZWHJ35F5.js +0 -52
package/CHANGELOG.md
CHANGED
|
@@ -2,6 +2,20 @@
|
|
|
2
2
|
|
|
3
3
|
# Changelog for Storm Ops - Cloudflare Tools
|
|
4
4
|
|
|
5
|
+
## [0.71.30](https://github.com/storm-software/storm-ops/releases/tag/cloudflare-tools%400.71.30) (12/18/2025)
|
|
6
|
+
|
|
7
|
+
### Bug Fixes
|
|
8
|
+
|
|
9
|
+
- **cloudflare-tools:** Ensure data is uploaded as a data URL
|
|
10
|
+
([b992fc806](https://github.com/storm-software/storm-ops/commit/b992fc806))
|
|
11
|
+
|
|
12
|
+
## [0.71.29](https://github.com/storm-software/storm-ops/releases/tag/cloudflare-tools%400.71.29) (12/18/2025)
|
|
13
|
+
|
|
14
|
+
### Bug Fixes
|
|
15
|
+
|
|
16
|
+
- **cloudflare-tools:** Ensure data is encoded in base64 prior to upload
|
|
17
|
+
([cb7de3b06](https://github.com/storm-software/storm-ops/commit/cb7de3b06))
|
|
18
|
+
|
|
5
19
|
## [0.71.28](https://github.com/storm-software/storm-ops/releases/tag/cloudflare-tools%400.71.28) (12/17/2025)
|
|
6
20
|
|
|
7
21
|
### Bug Fixes
|
|
@@ -7,7 +7,7 @@ var require_package = _chunkMCKGQKYUjs.__commonJS.call(void 0, {
|
|
|
7
7
|
"package.json"(exports, module) {
|
|
8
8
|
module.exports = {
|
|
9
9
|
name: "@storm-software/cloudflare-tools",
|
|
10
|
-
version: "0.71.
|
|
10
|
+
version: "0.71.30",
|
|
11
11
|
description: "A Nx plugin package that contains various executors, generators, and utilities that assist in managing Cloudflare services.",
|
|
12
12
|
repository: {
|
|
13
13
|
type: "github",
|
|
@@ -165,10 +165,12 @@ var require_package = _chunkMCKGQKYUjs.__commonJS.call(void 0, {
|
|
|
165
165
|
},
|
|
166
166
|
dependencies: {
|
|
167
167
|
"@aws-sdk/client-s3": "^3.948.0",
|
|
168
|
+
"@aws-sdk/lib-storage": "^3.954.0",
|
|
168
169
|
"@smithy/node-http-handler": "^4.4.5",
|
|
169
170
|
defu: "catalog:",
|
|
170
171
|
glob: "catalog:",
|
|
171
|
-
"mime-types": "^3.0.2"
|
|
172
|
+
"mime-types": "^3.0.2",
|
|
173
|
+
"pretty-bytes": "^7.1.0"
|
|
172
174
|
},
|
|
173
175
|
devDependencies: {
|
|
174
176
|
"@nx/devkit": "catalog:",
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import {
|
|
2
|
+
joinPaths,
|
|
3
|
+
writeDebug,
|
|
4
|
+
writeError,
|
|
5
|
+
writeWarning
|
|
6
|
+
} from "./chunk-TPNHSNNZ.mjs";
|
|
7
|
+
|
|
8
|
+
// src/utils/r2-bucket-helpers.ts
|
|
9
|
+
import { Upload } from "@aws-sdk/lib-storage";
|
|
10
|
+
import { createHash } from "node:crypto";
|
|
11
|
+
import prettyBytes from "pretty-bytes";
|
|
12
|
+
async function uploadFile(client, bucketName, bucketPath, fileName, version, fileContent, contentType = "application/octet-stream", isDryRun = false) {
|
|
13
|
+
const key = (!bucketPath?.trim() || bucketPath?.trim() === "/" ? fileName : joinPaths(bucketPath.trim(), fileName))?.replace(/^\/+/g, "") || "";
|
|
14
|
+
writeDebug(
|
|
15
|
+
`Uploading ${key} (content-type: ${contentType}, size: ${prettyBytes(
|
|
16
|
+
Buffer.byteLength(fileContent, "utf8")
|
|
17
|
+
)}) to the ${bucketName} R2 bucket`
|
|
18
|
+
);
|
|
19
|
+
try {
|
|
20
|
+
if (!isDryRun) {
|
|
21
|
+
const upload = new Upload({
|
|
22
|
+
client,
|
|
23
|
+
params: {
|
|
24
|
+
Bucket: bucketName,
|
|
25
|
+
Key: key,
|
|
26
|
+
Body: Buffer.from(fileContent, "utf8"),
|
|
27
|
+
ContentType: contentType,
|
|
28
|
+
Metadata: {
|
|
29
|
+
version,
|
|
30
|
+
checksum: createHash("sha256").update(fileContent).digest("base64")
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
});
|
|
34
|
+
await upload.done();
|
|
35
|
+
} else {
|
|
36
|
+
writeWarning("[Dry run]: Skipping upload to the R2 bucket.");
|
|
37
|
+
}
|
|
38
|
+
} catch (error) {
|
|
39
|
+
writeError(`Failed to upload ${key} to the ${bucketName} R2 bucket.`);
|
|
40
|
+
throw error;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
function getInternalDependencies(projectName, graph) {
|
|
44
|
+
const allDeps = graph.dependencies[projectName] ?? [];
|
|
45
|
+
return Array.from(
|
|
46
|
+
allDeps.reduce(
|
|
47
|
+
(acc, node) => {
|
|
48
|
+
const found = graph.nodes[node.target];
|
|
49
|
+
if (found) acc.push(found);
|
|
50
|
+
return acc;
|
|
51
|
+
},
|
|
52
|
+
[]
|
|
53
|
+
)
|
|
54
|
+
);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
export {
|
|
58
|
+
uploadFile,
|
|
59
|
+
getInternalDependencies
|
|
60
|
+
};
|
|
@@ -4,7 +4,7 @@ var _chunkKUGEZPUOjs = require('./chunk-KUGEZPUO.js');
|
|
|
4
4
|
|
|
5
5
|
|
|
6
6
|
|
|
7
|
-
var
|
|
7
|
+
var _chunkMQLXYAAZjs = require('./chunk-MQLXYAAZ.js');
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
|
|
@@ -23,6 +23,10 @@ var _chunk5GILNZWSjs = require('./chunk-5GILNZWS.js');
|
|
|
23
23
|
var _chunkZ2WQB55Rjs = require('./chunk-Z2WQB55R.js');
|
|
24
24
|
|
|
25
25
|
// src/executors/r2-upload-publish/executor.ts
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
|
|
26
30
|
var _clients3 = require('@aws-sdk/client-s3');
|
|
27
31
|
|
|
28
32
|
|
|
@@ -90,7 +94,7 @@ async function runExecutor(options, context) {
|
|
|
90
94
|
_chunkZ2WQB55Rjs.writeDebug.call(void 0,
|
|
91
95
|
`Publishing ${context.projectName} to the ${bucketId} R2 Bucket (at ${registry})`
|
|
92
96
|
);
|
|
93
|
-
const client = new (0, _clients3.
|
|
97
|
+
const client = new (0, _clients3.S3Client)({
|
|
94
98
|
region: "auto",
|
|
95
99
|
endpoint: registry,
|
|
96
100
|
credentials: {
|
|
@@ -109,7 +113,7 @@ async function runExecutor(options, context) {
|
|
|
109
113
|
const files = await _glob.glob.call(void 0, _chunkZ2WQB55Rjs.joinPaths.call(void 0, basePath, "**/*"), {
|
|
110
114
|
ignore: "**/{*.stories.tsx,*.stories.ts,*.spec.tsx,*.spec.ts}"
|
|
111
115
|
});
|
|
112
|
-
const internalDependencies = await
|
|
116
|
+
const internalDependencies = await _chunkMQLXYAAZjs.getInternalDependencies.call(void 0,
|
|
113
117
|
context.projectName,
|
|
114
118
|
projectGraph
|
|
115
119
|
);
|
|
@@ -125,23 +129,27 @@ async function runExecutor(options, context) {
|
|
|
125
129
|
if (options.clean === true) {
|
|
126
130
|
_chunkZ2WQB55Rjs.writeDebug.call(void 0, `Clearing out existing items in ${bucketPath}`);
|
|
127
131
|
if (!isDryRun) {
|
|
128
|
-
const response = await client.
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
+
const response = await client.send(
|
|
133
|
+
new (0, _clients3.ListObjectsCommand)({
|
|
134
|
+
Bucket: bucketId,
|
|
135
|
+
Prefix: !bucketPath || bucketPath === "/" ? void 0 : bucketPath
|
|
136
|
+
})
|
|
137
|
+
);
|
|
132
138
|
if (_optionalChain([response, 'optionalAccess', _17 => _17.Contents]) && response.Contents.length > 0) {
|
|
133
139
|
_chunkZ2WQB55Rjs.writeTrace.call(void 0,
|
|
134
140
|
`Deleting the following existing items from the R2 bucket path ${bucketPath}: ${response.Contents.map((item) => item.Key).join(", ")}`
|
|
135
141
|
);
|
|
136
|
-
await client.
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
142
|
+
await client.send(
|
|
143
|
+
new (0, _clients3.DeleteObjectsCommand)({
|
|
144
|
+
Bucket: bucketId,
|
|
145
|
+
Delete: {
|
|
146
|
+
Objects: response.Contents.map((item) => ({
|
|
147
|
+
Key: item.Key
|
|
148
|
+
})),
|
|
149
|
+
Quiet: false
|
|
150
|
+
}
|
|
151
|
+
})
|
|
152
|
+
);
|
|
145
153
|
} else {
|
|
146
154
|
_chunkZ2WQB55Rjs.writeDebug.call(void 0,
|
|
147
155
|
`No existing items to delete in the R2 bucket path ${bucketPath}`
|
|
@@ -169,7 +177,7 @@ async function runExecutor(options, context) {
|
|
|
169
177
|
if (_optionalChain([projectDetails, 'optionalAccess', _22 => _22.type]) === "package.json") {
|
|
170
178
|
meta.devDependencies = _optionalChain([projectDetails, 'optionalAccess', _23 => _23.content, 'optionalAccess', _24 => _24.devDependencies]);
|
|
171
179
|
}
|
|
172
|
-
await
|
|
180
|
+
await _chunkMQLXYAAZjs.uploadFile.call(void 0,
|
|
173
181
|
client,
|
|
174
182
|
bucketId,
|
|
175
183
|
bucketPath,
|
|
@@ -187,15 +195,13 @@ async function runExecutor(options, context) {
|
|
|
187
195
|
}), 'optionalAccess', _25 => _25.isFile, 'call', _26 => _26()])) {
|
|
188
196
|
const name = _chunkZ2WQB55Rjs.correctPaths.call(void 0, file).replace(_chunkZ2WQB55Rjs.correctPaths.call(void 0, basePath), "");
|
|
189
197
|
const type = _mimetypes2.default.lookup(name) || "application/octet-stream";
|
|
190
|
-
await
|
|
198
|
+
await _chunkMQLXYAAZjs.uploadFile.call(void 0,
|
|
191
199
|
client,
|
|
192
200
|
bucketId,
|
|
193
201
|
bucketPath,
|
|
194
202
|
name,
|
|
195
203
|
version,
|
|
196
|
-
|
|
197
|
-
"base64"
|
|
198
|
-
),
|
|
204
|
+
await _promises.readFile.call(void 0, file, "utf8"),
|
|
199
205
|
type,
|
|
200
206
|
isDryRun
|
|
201
207
|
);
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
var _chunkZ2WQB55Rjs = require('./chunk-Z2WQB55R.js');
|
|
7
|
+
|
|
8
|
+
// src/utils/r2-bucket-helpers.ts
|
|
9
|
+
var _libstorage = require('@aws-sdk/lib-storage');
|
|
10
|
+
var _crypto = require('crypto');
|
|
11
|
+
var _prettybytes = require('pretty-bytes'); var _prettybytes2 = _interopRequireDefault(_prettybytes);
|
|
12
|
+
async function uploadFile(client, bucketName, bucketPath, fileName, version, fileContent, contentType = "application/octet-stream", isDryRun = false) {
|
|
13
|
+
const key = _optionalChain([(!_optionalChain([bucketPath, 'optionalAccess', _ => _.trim, 'call', _2 => _2()]) || _optionalChain([bucketPath, 'optionalAccess', _3 => _3.trim, 'call', _4 => _4()]) === "/" ? fileName : _chunkZ2WQB55Rjs.joinPaths.call(void 0, bucketPath.trim(), fileName)), 'optionalAccess', _5 => _5.replace, 'call', _6 => _6(/^\/+/g, "")]) || "";
|
|
14
|
+
_chunkZ2WQB55Rjs.writeDebug.call(void 0,
|
|
15
|
+
`Uploading ${key} (content-type: ${contentType}, size: ${_prettybytes2.default.call(void 0,
|
|
16
|
+
Buffer.byteLength(fileContent, "utf8")
|
|
17
|
+
)}) to the ${bucketName} R2 bucket`
|
|
18
|
+
);
|
|
19
|
+
try {
|
|
20
|
+
if (!isDryRun) {
|
|
21
|
+
const upload = new (0, _libstorage.Upload)({
|
|
22
|
+
client,
|
|
23
|
+
params: {
|
|
24
|
+
Bucket: bucketName,
|
|
25
|
+
Key: key,
|
|
26
|
+
Body: Buffer.from(fileContent, "utf8"),
|
|
27
|
+
ContentType: contentType,
|
|
28
|
+
Metadata: {
|
|
29
|
+
version,
|
|
30
|
+
checksum: _crypto.createHash.call(void 0, "sha256").update(fileContent).digest("base64")
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
});
|
|
34
|
+
await upload.done();
|
|
35
|
+
} else {
|
|
36
|
+
_chunkZ2WQB55Rjs.writeWarning.call(void 0, "[Dry run]: Skipping upload to the R2 bucket.");
|
|
37
|
+
}
|
|
38
|
+
} catch (error) {
|
|
39
|
+
_chunkZ2WQB55Rjs.writeError.call(void 0, `Failed to upload ${key} to the ${bucketName} R2 bucket.`);
|
|
40
|
+
throw error;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
function getInternalDependencies(projectName, graph) {
|
|
44
|
+
const allDeps = _nullishCoalesce(graph.dependencies[projectName], () => ( []));
|
|
45
|
+
return Array.from(
|
|
46
|
+
allDeps.reduce(
|
|
47
|
+
(acc, node) => {
|
|
48
|
+
const found = graph.nodes[node.target];
|
|
49
|
+
if (found) acc.push(found);
|
|
50
|
+
return acc;
|
|
51
|
+
},
|
|
52
|
+
[]
|
|
53
|
+
)
|
|
54
|
+
);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
exports.uploadFile = uploadFile; exports.getInternalDependencies = getInternalDependencies;
|
|
@@ -4,7 +4,7 @@ import {
|
|
|
4
4
|
import {
|
|
5
5
|
getInternalDependencies,
|
|
6
6
|
uploadFile
|
|
7
|
-
} from "./chunk-
|
|
7
|
+
} from "./chunk-GBGETEC6.mjs";
|
|
8
8
|
import {
|
|
9
9
|
createCliOptions,
|
|
10
10
|
getPackageInfo
|
|
@@ -23,7 +23,11 @@ import {
|
|
|
23
23
|
} from "./chunk-TPNHSNNZ.mjs";
|
|
24
24
|
|
|
25
25
|
// src/executors/r2-upload-publish/executor.ts
|
|
26
|
-
import {
|
|
26
|
+
import {
|
|
27
|
+
DeleteObjectsCommand,
|
|
28
|
+
ListObjectsCommand,
|
|
29
|
+
S3Client
|
|
30
|
+
} from "@aws-sdk/client-s3";
|
|
27
31
|
import {
|
|
28
32
|
createProjectGraphAsync,
|
|
29
33
|
readCachedProjectGraph
|
|
@@ -90,7 +94,7 @@ async function runExecutor(options, context) {
|
|
|
90
94
|
writeDebug(
|
|
91
95
|
`Publishing ${context.projectName} to the ${bucketId} R2 Bucket (at ${registry})`
|
|
92
96
|
);
|
|
93
|
-
const client = new
|
|
97
|
+
const client = new S3Client({
|
|
94
98
|
region: "auto",
|
|
95
99
|
endpoint: registry,
|
|
96
100
|
credentials: {
|
|
@@ -125,23 +129,27 @@ async function runExecutor(options, context) {
|
|
|
125
129
|
if (options.clean === true) {
|
|
126
130
|
writeDebug(`Clearing out existing items in ${bucketPath}`);
|
|
127
131
|
if (!isDryRun) {
|
|
128
|
-
const response = await client.
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
+
const response = await client.send(
|
|
133
|
+
new ListObjectsCommand({
|
|
134
|
+
Bucket: bucketId,
|
|
135
|
+
Prefix: !bucketPath || bucketPath === "/" ? void 0 : bucketPath
|
|
136
|
+
})
|
|
137
|
+
);
|
|
132
138
|
if (response?.Contents && response.Contents.length > 0) {
|
|
133
139
|
writeTrace(
|
|
134
140
|
`Deleting the following existing items from the R2 bucket path ${bucketPath}: ${response.Contents.map((item) => item.Key).join(", ")}`
|
|
135
141
|
);
|
|
136
|
-
await client.
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
142
|
+
await client.send(
|
|
143
|
+
new DeleteObjectsCommand({
|
|
144
|
+
Bucket: bucketId,
|
|
145
|
+
Delete: {
|
|
146
|
+
Objects: response.Contents.map((item) => ({
|
|
147
|
+
Key: item.Key
|
|
148
|
+
})),
|
|
149
|
+
Quiet: false
|
|
150
|
+
}
|
|
151
|
+
})
|
|
152
|
+
);
|
|
145
153
|
} else {
|
|
146
154
|
writeDebug(
|
|
147
155
|
`No existing items to delete in the R2 bucket path ${bucketPath}`
|
|
@@ -193,9 +201,7 @@ async function runExecutor(options, context) {
|
|
|
193
201
|
bucketPath,
|
|
194
202
|
name,
|
|
195
203
|
version,
|
|
196
|
-
|
|
197
|
-
"base64"
|
|
198
|
-
),
|
|
204
|
+
await readFile(file, "utf8"),
|
|
199
205
|
type,
|
|
200
206
|
isDryRun
|
|
201
207
|
);
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
|
|
2
2
|
|
|
3
|
-
var
|
|
3
|
+
var _chunkDBH3ARKOjs = require('./chunk-DBH3ARKO.js');
|
|
4
4
|
|
|
5
5
|
|
|
6
6
|
|
|
@@ -66,7 +66,7 @@ ${Object.keys(process.env).map((key) => ` - ${key}=${JSON.stringify(process.env[
|
|
|
66
66
|
const options = await normalizeOptions(tree, schema, config);
|
|
67
67
|
const tasks = [];
|
|
68
68
|
tasks.push(
|
|
69
|
-
await
|
|
69
|
+
await _chunkDBH3ARKOjs.generator_default.call(void 0, tree, {
|
|
70
70
|
...options,
|
|
71
71
|
skipFormat: true
|
|
72
72
|
})
|
|
@@ -7,7 +7,7 @@ var require_package = __commonJS({
|
|
|
7
7
|
"package.json"(exports, module) {
|
|
8
8
|
module.exports = {
|
|
9
9
|
name: "@storm-software/cloudflare-tools",
|
|
10
|
-
version: "0.71.
|
|
10
|
+
version: "0.71.30",
|
|
11
11
|
description: "A Nx plugin package that contains various executors, generators, and utilities that assist in managing Cloudflare services.",
|
|
12
12
|
repository: {
|
|
13
13
|
type: "github",
|
|
@@ -165,10 +165,12 @@ var require_package = __commonJS({
|
|
|
165
165
|
},
|
|
166
166
|
dependencies: {
|
|
167
167
|
"@aws-sdk/client-s3": "^3.948.0",
|
|
168
|
+
"@aws-sdk/lib-storage": "^3.954.0",
|
|
168
169
|
"@smithy/node-http-handler": "^4.4.5",
|
|
169
170
|
defu: "catalog:",
|
|
170
171
|
glob: "catalog:",
|
|
171
|
-
"mime-types": "^3.0.2"
|
|
172
|
+
"mime-types": "^3.0.2",
|
|
173
|
+
"pretty-bytes": "^7.1.0"
|
|
172
174
|
},
|
|
173
175
|
devDependencies: {
|
|
174
176
|
"@nx/devkit": "catalog:",
|
package/dist/executors.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"use strict";require('./chunk-XO66D74Z.js');
|
|
2
|
-
require('./chunk-
|
|
2
|
+
require('./chunk-JFMSM7WW.js');
|
|
3
3
|
require('./chunk-KUGEZPUO.js');
|
|
4
|
-
require('./chunk-
|
|
4
|
+
require('./chunk-MQLXYAAZ.js');
|
|
5
5
|
require('./chunk-QBD2OGUY.js');
|
|
6
6
|
require('./chunk-IVVRVG36.js');
|
|
7
7
|
require('./chunk-N7FW365Q.js');
|
package/dist/executors.mjs
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import "./chunk-YSCEY447.mjs";
|
|
2
|
-
import "./chunk-
|
|
2
|
+
import "./chunk-Q6BDPVT4.mjs";
|
|
3
3
|
import "./chunk-SWYYMID7.mjs";
|
|
4
|
-
import "./chunk-
|
|
4
|
+
import "./chunk-GBGETEC6.mjs";
|
|
5
5
|
import "./chunk-WFPKBGV3.mjs";
|
|
6
6
|
import "./chunk-NIOOY3TB.mjs";
|
|
7
7
|
import "./chunk-3MAI3FU2.mjs";
|
package/dist/generators.js
CHANGED
|
@@ -2,11 +2,11 @@
|
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
|
|
5
|
-
var
|
|
5
|
+
var _chunkV4DY7BGLjs = require('./chunk-V4DY7BGL.js');
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
|
|
9
|
-
var
|
|
9
|
+
var _chunkDBH3ARKOjs = require('./chunk-DBH3ARKO.js');
|
|
10
10
|
require('./chunk-5GILNZWS.js');
|
|
11
11
|
require('./chunk-Z2WQB55R.js');
|
|
12
12
|
require('./chunk-MCKGQKYU.js');
|
|
@@ -15,4 +15,4 @@ require('./chunk-MCKGQKYU.js');
|
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
|
|
18
|
-
exports.applicationGenerator =
|
|
18
|
+
exports.applicationGenerator = _chunkV4DY7BGLjs.applicationGenerator; exports.applicationSchematic = _chunkV4DY7BGLjs.applicationSchematic; exports.initGenerator = _chunkDBH3ARKOjs.initGenerator; exports.initSchematic = _chunkDBH3ARKOjs.initSchematic;
|
package/dist/generators.mjs
CHANGED
|
@@ -2,11 +2,11 @@ import "./chunk-3J7KBHMJ.mjs";
|
|
|
2
2
|
import {
|
|
3
3
|
applicationGenerator,
|
|
4
4
|
applicationSchematic
|
|
5
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-TWWKKHQ6.mjs";
|
|
6
6
|
import {
|
|
7
7
|
initGenerator,
|
|
8
8
|
initSchematic
|
|
9
|
-
} from "./chunk-
|
|
9
|
+
} from "./chunk-WSU7RB2N.mjs";
|
|
10
10
|
import "./chunk-G4ZCI2MN.mjs";
|
|
11
11
|
import "./chunk-TPNHSNNZ.mjs";
|
|
12
12
|
import "./chunk-PALWHFOL.mjs";
|
package/dist/index.js
CHANGED
|
@@ -1,20 +1,20 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }require('./chunk-XO66D74Z.js');
|
|
2
|
-
require('./chunk-
|
|
2
|
+
require('./chunk-JFMSM7WW.js');
|
|
3
3
|
require('./chunk-DHBG5ASJ.js');
|
|
4
4
|
|
|
5
5
|
|
|
6
6
|
|
|
7
|
-
var
|
|
7
|
+
var _chunkV4DY7BGLjs = require('./chunk-V4DY7BGL.js');
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
|
|
11
|
-
var
|
|
11
|
+
var _chunkDBH3ARKOjs = require('./chunk-DBH3ARKO.js');
|
|
12
12
|
require('./chunk-KUGEZPUO.js');
|
|
13
13
|
require('./chunk-CVGPWUNP.js');
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
|
|
17
|
-
var
|
|
17
|
+
var _chunkMQLXYAAZjs = require('./chunk-MQLXYAAZ.js');
|
|
18
18
|
require('./chunk-QBD2OGUY.js');
|
|
19
19
|
require('./chunk-IVVRVG36.js');
|
|
20
20
|
|
|
@@ -156,4 +156,4 @@ function createPackageJson(projectJsonPath, workspaceRoot) {
|
|
|
156
156
|
|
|
157
157
|
|
|
158
158
|
|
|
159
|
-
exports.applicationGenerator =
|
|
159
|
+
exports.applicationGenerator = _chunkV4DY7BGLjs.applicationGenerator; exports.applicationSchematic = _chunkV4DY7BGLjs.applicationSchematic; exports.createNodesV2 = createNodesV2; exports.getInternalDependencies = _chunkMQLXYAAZjs.getInternalDependencies; exports.initGenerator = _chunkDBH3ARKOjs.initGenerator; exports.initSchematic = _chunkDBH3ARKOjs.initSchematic; exports.name = name; exports.uploadFile = _chunkMQLXYAAZjs.uploadFile;
|
package/dist/index.mjs
CHANGED
|
@@ -1,20 +1,20 @@
|
|
|
1
1
|
import "./chunk-YSCEY447.mjs";
|
|
2
|
-
import "./chunk-
|
|
2
|
+
import "./chunk-Q6BDPVT4.mjs";
|
|
3
3
|
import "./chunk-3J7KBHMJ.mjs";
|
|
4
4
|
import {
|
|
5
5
|
applicationGenerator,
|
|
6
6
|
applicationSchematic
|
|
7
|
-
} from "./chunk-
|
|
7
|
+
} from "./chunk-TWWKKHQ6.mjs";
|
|
8
8
|
import {
|
|
9
9
|
initGenerator,
|
|
10
10
|
initSchematic
|
|
11
|
-
} from "./chunk-
|
|
11
|
+
} from "./chunk-WSU7RB2N.mjs";
|
|
12
12
|
import "./chunk-SWYYMID7.mjs";
|
|
13
13
|
import "./chunk-7Z5PILRU.mjs";
|
|
14
14
|
import {
|
|
15
15
|
getInternalDependencies,
|
|
16
16
|
uploadFile
|
|
17
|
-
} from "./chunk-
|
|
17
|
+
} from "./chunk-GBGETEC6.mjs";
|
|
18
18
|
import "./chunk-WFPKBGV3.mjs";
|
|
19
19
|
import "./chunk-NIOOY3TB.mjs";
|
|
20
20
|
import {
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true});
|
|
2
2
|
|
|
3
|
-
var
|
|
3
|
+
var _chunkJFMSM7WWjs = require('../../../chunk-JFMSM7WW.js');
|
|
4
4
|
require('../../../chunk-KUGEZPUO.js');
|
|
5
|
-
require('../../../chunk-
|
|
5
|
+
require('../../../chunk-MQLXYAAZ.js');
|
|
6
6
|
require('../../../chunk-N7FW365Q.js');
|
|
7
7
|
require('../../../chunk-5GILNZWS.js');
|
|
8
8
|
require('../../../chunk-Z2WQB55R.js');
|
|
9
9
|
require('../../../chunk-MCKGQKYU.js');
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
exports.default =
|
|
12
|
+
exports.default = _chunkJFMSM7WWjs.runExecutor;
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import {
|
|
2
2
|
runExecutor
|
|
3
|
-
} from "../../../chunk-
|
|
3
|
+
} from "../../../chunk-Q6BDPVT4.mjs";
|
|
4
4
|
import "../../../chunk-SWYYMID7.mjs";
|
|
5
|
-
import "../../../chunk-
|
|
5
|
+
import "../../../chunk-GBGETEC6.mjs";
|
|
6
6
|
import "../../../chunk-3MAI3FU2.mjs";
|
|
7
7
|
import "../../../chunk-G4ZCI2MN.mjs";
|
|
8
8
|
import "../../../chunk-TPNHSNNZ.mjs";
|
|
@@ -2,10 +2,10 @@
|
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
|
|
5
|
-
var
|
|
5
|
+
var _chunkDBH3ARKOjs = require('../../../chunk-DBH3ARKO.js');
|
|
6
6
|
require('../../../chunk-MCKGQKYU.js');
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
|
|
11
|
-
exports.default =
|
|
11
|
+
exports.default = _chunkDBH3ARKOjs.generator_default; exports.initGenerator = _chunkDBH3ARKOjs.initGenerator; exports.initSchematic = _chunkDBH3ARKOjs.initSchematic;
|
|
@@ -2,8 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
|
|
5
|
-
var
|
|
6
|
-
require('../../../chunk-
|
|
5
|
+
var _chunkV4DY7BGLjs = require('../../../chunk-V4DY7BGL.js');
|
|
6
|
+
require('../../../chunk-DBH3ARKO.js');
|
|
7
7
|
require('../../../chunk-5GILNZWS.js');
|
|
8
8
|
require('../../../chunk-Z2WQB55R.js');
|
|
9
9
|
require('../../../chunk-MCKGQKYU.js');
|
|
@@ -11,4 +11,4 @@ require('../../../chunk-MCKGQKYU.js');
|
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
|
|
14
|
-
exports.applicationGenerator =
|
|
14
|
+
exports.applicationGenerator = _chunkV4DY7BGLjs.applicationGenerator; exports.applicationSchematic = _chunkV4DY7BGLjs.applicationSchematic; exports.default = _chunkV4DY7BGLjs.generator_default;
|
|
@@ -2,8 +2,8 @@ import {
|
|
|
2
2
|
applicationGenerator,
|
|
3
3
|
applicationSchematic,
|
|
4
4
|
generator_default
|
|
5
|
-
} from "../../../chunk-
|
|
6
|
-
import "../../../chunk-
|
|
5
|
+
} from "../../../chunk-TWWKKHQ6.mjs";
|
|
6
|
+
import "../../../chunk-WSU7RB2N.mjs";
|
|
7
7
|
import "../../../chunk-G4ZCI2MN.mjs";
|
|
8
8
|
import "../../../chunk-TPNHSNNZ.mjs";
|
|
9
9
|
import "../../../chunk-PALWHFOL.mjs";
|
package/dist/src/utils/index.js
CHANGED
|
@@ -2,10 +2,10 @@
|
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
|
|
5
|
-
var
|
|
5
|
+
var _chunkMQLXYAAZjs = require('../../chunk-MQLXYAAZ.js');
|
|
6
6
|
require('../../chunk-Z2WQB55R.js');
|
|
7
7
|
require('../../chunk-MCKGQKYU.js');
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
|
|
11
|
-
exports.getInternalDependencies =
|
|
11
|
+
exports.getInternalDependencies = _chunkMQLXYAAZjs.getInternalDependencies; exports.uploadFile = _chunkMQLXYAAZjs.uploadFile;
|
package/dist/src/utils/index.mjs
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { S3Client } from '@aws-sdk/client-s3';
|
|
2
2
|
import { ProjectGraph, ProjectGraphProjectNode } from '@nx/devkit';
|
|
3
3
|
|
|
4
4
|
/**
|
|
@@ -13,7 +13,7 @@ import { ProjectGraph, ProjectGraphProjectNode } from '@nx/devkit';
|
|
|
13
13
|
* @param contentType - The MIME type of the file content
|
|
14
14
|
* @param isDryRun - Whether to perform a dry run without actual upload
|
|
15
15
|
*/
|
|
16
|
-
declare function uploadFile(client:
|
|
16
|
+
declare function uploadFile(client: S3Client, bucketName: string, bucketPath: string | undefined, fileName: string, version: string, fileContent: string, contentType?: string, isDryRun?: boolean): Promise<void>;
|
|
17
17
|
/**
|
|
18
18
|
* Get internal dependencies of a project from the project graph
|
|
19
19
|
*
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { S3Client } from '@aws-sdk/client-s3';
|
|
2
2
|
import { ProjectGraph, ProjectGraphProjectNode } from '@nx/devkit';
|
|
3
3
|
|
|
4
4
|
/**
|
|
@@ -13,7 +13,7 @@ import { ProjectGraph, ProjectGraphProjectNode } from '@nx/devkit';
|
|
|
13
13
|
* @param contentType - The MIME type of the file content
|
|
14
14
|
* @param isDryRun - Whether to perform a dry run without actual upload
|
|
15
15
|
*/
|
|
16
|
-
declare function uploadFile(client:
|
|
16
|
+
declare function uploadFile(client: S3Client, bucketName: string, bucketPath: string | undefined, fileName: string, version: string, fileContent: string, contentType?: string, isDryRun?: boolean): Promise<void>;
|
|
17
17
|
/**
|
|
18
18
|
* Get internal dependencies of a project from the project graph
|
|
19
19
|
*
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true});
|
|
2
2
|
|
|
3
3
|
|
|
4
|
-
var
|
|
4
|
+
var _chunkMQLXYAAZjs = require('../../chunk-MQLXYAAZ.js');
|
|
5
5
|
require('../../chunk-Z2WQB55R.js');
|
|
6
6
|
require('../../chunk-MCKGQKYU.js');
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
|
|
10
|
-
exports.getInternalDependencies =
|
|
10
|
+
exports.getInternalDependencies = _chunkMQLXYAAZjs.getInternalDependencies; exports.uploadFile = _chunkMQLXYAAZjs.uploadFile;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@storm-software/cloudflare-tools",
|
|
3
|
-
"version": "0.71.
|
|
3
|
+
"version": "0.71.31",
|
|
4
4
|
"description": "A Nx plugin package that contains various executors, generators, and utilities that assist in managing Cloudflare services.",
|
|
5
5
|
"repository": {
|
|
6
6
|
"type": "github",
|
|
@@ -134,10 +134,12 @@
|
|
|
134
134
|
},
|
|
135
135
|
"dependencies": {
|
|
136
136
|
"@aws-sdk/client-s3": "^3.948.0",
|
|
137
|
+
"@aws-sdk/lib-storage": "^3.954.0",
|
|
137
138
|
"@smithy/node-http-handler": "^4.4.5",
|
|
138
139
|
"defu": "6.1.4",
|
|
139
140
|
"glob": "^11.1.0",
|
|
140
|
-
"mime-types": "^3.0.2"
|
|
141
|
+
"mime-types": "^3.0.2",
|
|
142
|
+
"pretty-bytes": "^7.1.0"
|
|
141
143
|
},
|
|
142
144
|
"devDependencies": {
|
|
143
145
|
"@nx/devkit": "^22.2.3",
|
|
@@ -153,5 +155,5 @@
|
|
|
153
155
|
"publishConfig": { "access": "public" },
|
|
154
156
|
"executors": "./executors.json",
|
|
155
157
|
"generators": "./generators.json",
|
|
156
|
-
"gitHead": "
|
|
158
|
+
"gitHead": "7f759c57f99e6cf104b110848fdf4d319e5886c1"
|
|
157
159
|
}
|
package/dist/chunk-UI2F3MMU.mjs
DELETED
|
@@ -1,52 +0,0 @@
|
|
|
1
|
-
import {
|
|
2
|
-
joinPaths,
|
|
3
|
-
writeDebug,
|
|
4
|
-
writeWarning
|
|
5
|
-
} from "./chunk-TPNHSNNZ.mjs";
|
|
6
|
-
|
|
7
|
-
// src/utils/r2-bucket-helpers.ts
|
|
8
|
-
import { createHash } from "node:crypto";
|
|
9
|
-
async function uploadFile(client, bucketName, bucketPath, fileName, version, fileContent, contentType = "application/octet-stream", isDryRun = false) {
|
|
10
|
-
const key = (!bucketPath?.trim() || bucketPath?.trim() === "/" ? fileName : joinPaths(bucketPath.trim(), fileName))?.replace(/^\/+/g, "") || "";
|
|
11
|
-
writeDebug(
|
|
12
|
-
`Uploading ${key} (content-type: ${contentType}) to the ${bucketName} R2 bucket`
|
|
13
|
-
);
|
|
14
|
-
if (!isDryRun) {
|
|
15
|
-
await client.putObject(
|
|
16
|
-
{
|
|
17
|
-
Bucket: bucketName,
|
|
18
|
-
Key: key,
|
|
19
|
-
Body: fileContent,
|
|
20
|
-
ContentType: contentType,
|
|
21
|
-
Metadata: {
|
|
22
|
-
version,
|
|
23
|
-
checksum: createHash("sha256").update(fileContent).digest("base64")
|
|
24
|
-
}
|
|
25
|
-
},
|
|
26
|
-
{
|
|
27
|
-
requestTimeout: 15 * 60 * 1e3
|
|
28
|
-
// 15 minutes
|
|
29
|
-
}
|
|
30
|
-
);
|
|
31
|
-
} else {
|
|
32
|
-
writeWarning("[Dry run]: Skipping upload to the R2 bucket.");
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
function getInternalDependencies(projectName, graph) {
|
|
36
|
-
const allDeps = graph.dependencies[projectName] ?? [];
|
|
37
|
-
return Array.from(
|
|
38
|
-
allDeps.reduce(
|
|
39
|
-
(acc, node) => {
|
|
40
|
-
const found = graph.nodes[node.target];
|
|
41
|
-
if (found) acc.push(found);
|
|
42
|
-
return acc;
|
|
43
|
-
},
|
|
44
|
-
[]
|
|
45
|
-
)
|
|
46
|
-
);
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
export {
|
|
50
|
-
uploadFile,
|
|
51
|
-
getInternalDependencies
|
|
52
|
-
};
|
package/dist/chunk-ZWHJ35F5.js
DELETED
|
@@ -1,52 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
var _chunkZ2WQB55Rjs = require('./chunk-Z2WQB55R.js');
|
|
6
|
-
|
|
7
|
-
// src/utils/r2-bucket-helpers.ts
|
|
8
|
-
var _crypto = require('crypto');
|
|
9
|
-
async function uploadFile(client, bucketName, bucketPath, fileName, version, fileContent, contentType = "application/octet-stream", isDryRun = false) {
|
|
10
|
-
const key = _optionalChain([(!_optionalChain([bucketPath, 'optionalAccess', _ => _.trim, 'call', _2 => _2()]) || _optionalChain([bucketPath, 'optionalAccess', _3 => _3.trim, 'call', _4 => _4()]) === "/" ? fileName : _chunkZ2WQB55Rjs.joinPaths.call(void 0, bucketPath.trim(), fileName)), 'optionalAccess', _5 => _5.replace, 'call', _6 => _6(/^\/+/g, "")]) || "";
|
|
11
|
-
_chunkZ2WQB55Rjs.writeDebug.call(void 0,
|
|
12
|
-
`Uploading ${key} (content-type: ${contentType}) to the ${bucketName} R2 bucket`
|
|
13
|
-
);
|
|
14
|
-
if (!isDryRun) {
|
|
15
|
-
await client.putObject(
|
|
16
|
-
{
|
|
17
|
-
Bucket: bucketName,
|
|
18
|
-
Key: key,
|
|
19
|
-
Body: fileContent,
|
|
20
|
-
ContentType: contentType,
|
|
21
|
-
Metadata: {
|
|
22
|
-
version,
|
|
23
|
-
checksum: _crypto.createHash.call(void 0, "sha256").update(fileContent).digest("base64")
|
|
24
|
-
}
|
|
25
|
-
},
|
|
26
|
-
{
|
|
27
|
-
requestTimeout: 15 * 60 * 1e3
|
|
28
|
-
// 15 minutes
|
|
29
|
-
}
|
|
30
|
-
);
|
|
31
|
-
} else {
|
|
32
|
-
_chunkZ2WQB55Rjs.writeWarning.call(void 0, "[Dry run]: Skipping upload to the R2 bucket.");
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
function getInternalDependencies(projectName, graph) {
|
|
36
|
-
const allDeps = _nullishCoalesce(graph.dependencies[projectName], () => ( []));
|
|
37
|
-
return Array.from(
|
|
38
|
-
allDeps.reduce(
|
|
39
|
-
(acc, node) => {
|
|
40
|
-
const found = graph.nodes[node.target];
|
|
41
|
-
if (found) acc.push(found);
|
|
42
|
-
return acc;
|
|
43
|
-
},
|
|
44
|
-
[]
|
|
45
|
-
)
|
|
46
|
-
);
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
exports.uploadFile = uploadFile; exports.getInternalDependencies = getInternalDependencies;
|