@storm-software/cloudflare-tools 0.71.30 → 0.71.32
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +21 -0
- package/dist/{chunk-FPJU3YOH.mjs → chunk-3WBFV3XF.mjs} +7 -5
- package/dist/{chunk-YQGPNO3W.mjs → chunk-4WRUZBE7.mjs} +26 -20
- package/dist/{chunk-OZ44M6HX.js → chunk-AOAVYQNP.js} +7 -5
- package/dist/{chunk-YPRUPZL3.js → chunk-QDAQB4EP.js} +2 -2
- package/dist/{chunk-DUHFLDZ4.js → chunk-UQBLSDWU.js} +28 -22
- package/dist/chunk-VZQDXTIP.js +68 -0
- package/dist/chunk-Z2D7AKMD.mjs +68 -0
- package/dist/{chunk-DEX6LTPV.mjs → chunk-ZWYDXLNA.mjs} +1 -1
- package/dist/executors.js +2 -2
- package/dist/executors.mjs +2 -2
- package/dist/generators.js +3 -3
- package/dist/generators.mjs +2 -2
- package/dist/index.d.mts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +9 -5
- package/dist/index.mjs +8 -4
- package/dist/src/executors/r2-upload-publish/executor.js +3 -3
- package/dist/src/executors/r2-upload-publish/executor.mjs +2 -2
- package/dist/src/generators/init/generator.js +2 -2
- package/dist/src/generators/init/generator.mjs +1 -1
- package/dist/src/generators/worker/generator.js +3 -3
- package/dist/src/generators/worker/generator.mjs +2 -2
- package/dist/src/utils/index.d.mts +1 -1
- package/dist/src/utils/index.d.ts +1 -1
- package/dist/src/utils/index.js +6 -2
- package/dist/src/utils/index.mjs +5 -1
- package/dist/src/utils/r2-bucket-helpers.d.mts +17 -3
- package/dist/src/utils/r2-bucket-helpers.d.ts +17 -3
- package/dist/src/utils/r2-bucket-helpers.js +6 -2
- package/dist/src/utils/r2-bucket-helpers.mjs +5 -1
- package/package.json +8 -6
- package/dist/chunk-RGRCKWGN.mjs +0 -225
- package/dist/chunk-UI2F3MMU.mjs +0 -52
- package/dist/chunk-ZBNASCRJ.js +0 -225
- package/dist/chunk-ZWHJ35F5.js +0 -52
|
@@ -2,8 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
|
|
5
|
-
var
|
|
6
|
-
require('../../../chunk-
|
|
5
|
+
var _chunkQDAQB4EPjs = require('../../../chunk-QDAQB4EP.js');
|
|
6
|
+
require('../../../chunk-AOAVYQNP.js');
|
|
7
7
|
require('../../../chunk-5GILNZWS.js');
|
|
8
8
|
require('../../../chunk-Z2WQB55R.js');
|
|
9
9
|
require('../../../chunk-MCKGQKYU.js');
|
|
@@ -11,4 +11,4 @@ require('../../../chunk-MCKGQKYU.js');
|
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
|
|
14
|
-
exports.applicationGenerator =
|
|
14
|
+
exports.applicationGenerator = _chunkQDAQB4EPjs.applicationGenerator; exports.applicationSchematic = _chunkQDAQB4EPjs.applicationSchematic; exports.default = _chunkQDAQB4EPjs.generator_default;
|
|
@@ -2,8 +2,8 @@ import {
|
|
|
2
2
|
applicationGenerator,
|
|
3
3
|
applicationSchematic,
|
|
4
4
|
generator_default
|
|
5
|
-
} from "../../../chunk-
|
|
6
|
-
import "../../../chunk-
|
|
5
|
+
} from "../../../chunk-ZWYDXLNA.mjs";
|
|
6
|
+
import "../../../chunk-3WBFV3XF.mjs";
|
|
7
7
|
import "../../../chunk-G4ZCI2MN.mjs";
|
|
8
8
|
import "../../../chunk-TPNHSNNZ.mjs";
|
|
9
9
|
import "../../../chunk-PALWHFOL.mjs";
|
package/dist/src/utils/index.js
CHANGED
|
@@ -2,10 +2,14 @@
|
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
var _chunkVZQDXTIPjs = require('../../chunk-VZQDXTIP.js');
|
|
6
8
|
require('../../chunk-Z2WQB55R.js');
|
|
7
9
|
require('../../chunk-MCKGQKYU.js');
|
|
8
10
|
|
|
9
11
|
|
|
10
12
|
|
|
11
|
-
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
exports.getEncoding = _chunkVZQDXTIPjs.getEncoding; exports.getInternalDependencies = _chunkVZQDXTIPjs.getInternalDependencies; exports.isTextFile = _chunkVZQDXTIPjs.isTextFile; exports.uploadFile = _chunkVZQDXTIPjs.uploadFile;
|
package/dist/src/utils/index.mjs
CHANGED
|
@@ -1,11 +1,15 @@
|
|
|
1
1
|
import "../../chunk-7Z5PILRU.mjs";
|
|
2
2
|
import {
|
|
3
|
+
getEncoding,
|
|
3
4
|
getInternalDependencies,
|
|
5
|
+
isTextFile,
|
|
4
6
|
uploadFile
|
|
5
|
-
} from "../../chunk-
|
|
7
|
+
} from "../../chunk-Z2D7AKMD.mjs";
|
|
6
8
|
import "../../chunk-TPNHSNNZ.mjs";
|
|
7
9
|
import "../../chunk-PALWHFOL.mjs";
|
|
8
10
|
export {
|
|
11
|
+
getEncoding,
|
|
9
12
|
getInternalDependencies,
|
|
13
|
+
isTextFile,
|
|
10
14
|
uploadFile
|
|
11
15
|
};
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { S3Client } from '@aws-sdk/client-s3';
|
|
2
2
|
import { ProjectGraph, ProjectGraphProjectNode } from '@nx/devkit';
|
|
3
3
|
|
|
4
4
|
/**
|
|
@@ -13,7 +13,7 @@ import { ProjectGraph, ProjectGraphProjectNode } from '@nx/devkit';
|
|
|
13
13
|
* @param contentType - The MIME type of the file content
|
|
14
14
|
* @param isDryRun - Whether to perform a dry run without actual upload
|
|
15
15
|
*/
|
|
16
|
-
declare function uploadFile(client:
|
|
16
|
+
declare function uploadFile(client: S3Client, bucketName: string, bucketPath: string | undefined, fileName: string, version: string, fileContent: string, contentType?: string, isDryRun?: boolean): Promise<void>;
|
|
17
17
|
/**
|
|
18
18
|
* Get internal dependencies of a project from the project graph
|
|
19
19
|
*
|
|
@@ -22,5 +22,19 @@ declare function uploadFile(client: S3, bucketName: string, bucketPath: string |
|
|
|
22
22
|
* @returns An array of internal project nodes that are dependencies of the specified project
|
|
23
23
|
*/
|
|
24
24
|
declare function getInternalDependencies(projectName: string, graph: ProjectGraph): ProjectGraphProjectNode[];
|
|
25
|
+
/**
|
|
26
|
+
* Determine if a MIME type represents a text file
|
|
27
|
+
*
|
|
28
|
+
* @param mimeType - The MIME type to check
|
|
29
|
+
* @returns True if the MIME type represents a text file, false otherwise
|
|
30
|
+
*/
|
|
31
|
+
declare function isTextFile(mimeType: string): boolean;
|
|
32
|
+
/**
|
|
33
|
+
* Get the appropriate encoding for a given MIME type
|
|
34
|
+
*
|
|
35
|
+
* @param mimeType - The MIME type to evaluate
|
|
36
|
+
* @returns The encoding string ("utf8" for text files, "binary" for others)
|
|
37
|
+
*/
|
|
38
|
+
declare function getEncoding(mimeType: string): BufferEncoding;
|
|
25
39
|
|
|
26
|
-
export { getInternalDependencies, uploadFile };
|
|
40
|
+
export { getEncoding, getInternalDependencies, isTextFile, uploadFile };
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { S3Client } from '@aws-sdk/client-s3';
|
|
2
2
|
import { ProjectGraph, ProjectGraphProjectNode } from '@nx/devkit';
|
|
3
3
|
|
|
4
4
|
/**
|
|
@@ -13,7 +13,7 @@ import { ProjectGraph, ProjectGraphProjectNode } from '@nx/devkit';
|
|
|
13
13
|
* @param contentType - The MIME type of the file content
|
|
14
14
|
* @param isDryRun - Whether to perform a dry run without actual upload
|
|
15
15
|
*/
|
|
16
|
-
declare function uploadFile(client:
|
|
16
|
+
declare function uploadFile(client: S3Client, bucketName: string, bucketPath: string | undefined, fileName: string, version: string, fileContent: string, contentType?: string, isDryRun?: boolean): Promise<void>;
|
|
17
17
|
/**
|
|
18
18
|
* Get internal dependencies of a project from the project graph
|
|
19
19
|
*
|
|
@@ -22,5 +22,19 @@ declare function uploadFile(client: S3, bucketName: string, bucketPath: string |
|
|
|
22
22
|
* @returns An array of internal project nodes that are dependencies of the specified project
|
|
23
23
|
*/
|
|
24
24
|
declare function getInternalDependencies(projectName: string, graph: ProjectGraph): ProjectGraphProjectNode[];
|
|
25
|
+
/**
|
|
26
|
+
* Determine if a MIME type represents a text file
|
|
27
|
+
*
|
|
28
|
+
* @param mimeType - The MIME type to check
|
|
29
|
+
* @returns True if the MIME type represents a text file, false otherwise
|
|
30
|
+
*/
|
|
31
|
+
declare function isTextFile(mimeType: string): boolean;
|
|
32
|
+
/**
|
|
33
|
+
* Get the appropriate encoding for a given MIME type
|
|
34
|
+
*
|
|
35
|
+
* @param mimeType - The MIME type to evaluate
|
|
36
|
+
* @returns The encoding string ("utf8" for text files, "binary" for others)
|
|
37
|
+
*/
|
|
38
|
+
declare function getEncoding(mimeType: string): BufferEncoding;
|
|
25
39
|
|
|
26
|
-
export { getInternalDependencies, uploadFile };
|
|
40
|
+
export { getEncoding, getInternalDependencies, isTextFile, uploadFile };
|
|
@@ -1,10 +1,14 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true});
|
|
2
2
|
|
|
3
3
|
|
|
4
|
-
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
var _chunkVZQDXTIPjs = require('../../chunk-VZQDXTIP.js');
|
|
5
7
|
require('../../chunk-Z2WQB55R.js');
|
|
6
8
|
require('../../chunk-MCKGQKYU.js');
|
|
7
9
|
|
|
8
10
|
|
|
9
11
|
|
|
10
|
-
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
exports.getEncoding = _chunkVZQDXTIPjs.getEncoding; exports.getInternalDependencies = _chunkVZQDXTIPjs.getInternalDependencies; exports.isTextFile = _chunkVZQDXTIPjs.isTextFile; exports.uploadFile = _chunkVZQDXTIPjs.uploadFile;
|
|
@@ -1,10 +1,14 @@
|
|
|
1
1
|
import {
|
|
2
|
+
getEncoding,
|
|
2
3
|
getInternalDependencies,
|
|
4
|
+
isTextFile,
|
|
3
5
|
uploadFile
|
|
4
|
-
} from "../../chunk-
|
|
6
|
+
} from "../../chunk-Z2D7AKMD.mjs";
|
|
5
7
|
import "../../chunk-TPNHSNNZ.mjs";
|
|
6
8
|
import "../../chunk-PALWHFOL.mjs";
|
|
7
9
|
export {
|
|
10
|
+
getEncoding,
|
|
8
11
|
getInternalDependencies,
|
|
12
|
+
isTextFile,
|
|
9
13
|
uploadFile
|
|
10
14
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@storm-software/cloudflare-tools",
|
|
3
|
-
"version": "0.71.
|
|
3
|
+
"version": "0.71.32",
|
|
4
4
|
"description": "A Nx plugin package that contains various executors, generators, and utilities that assist in managing Cloudflare services.",
|
|
5
5
|
"repository": {
|
|
6
6
|
"type": "github",
|
|
@@ -117,9 +117,9 @@
|
|
|
117
117
|
"@nx/js": "^22.2.3",
|
|
118
118
|
"@nx/node": "^22.2.3",
|
|
119
119
|
"@nx/web": "^22.2.3",
|
|
120
|
-
"@storm-software/config": "1.134.
|
|
121
|
-
"@storm-software/config-tools": "1.188.
|
|
122
|
-
"@storm-software/workspace-tools": "1.294.
|
|
120
|
+
"@storm-software/config": "1.134.71",
|
|
121
|
+
"@storm-software/config-tools": "1.188.71",
|
|
122
|
+
"@storm-software/workspace-tools": "1.294.17",
|
|
123
123
|
"nx": "^22.2.3",
|
|
124
124
|
"tsup": "8.4.0",
|
|
125
125
|
"wrangler": ">=3.58.0"
|
|
@@ -134,10 +134,12 @@
|
|
|
134
134
|
},
|
|
135
135
|
"dependencies": {
|
|
136
136
|
"@aws-sdk/client-s3": "^3.948.0",
|
|
137
|
+
"@aws-sdk/lib-storage": "^3.954.0",
|
|
137
138
|
"@smithy/node-http-handler": "^4.4.5",
|
|
138
139
|
"defu": "6.1.4",
|
|
139
140
|
"glob": "^11.1.0",
|
|
140
|
-
"mime-types": "^3.0.2"
|
|
141
|
+
"mime-types": "^3.0.2",
|
|
142
|
+
"pretty-bytes": "^7.1.0"
|
|
141
143
|
},
|
|
142
144
|
"devDependencies": {
|
|
143
145
|
"@nx/devkit": "^22.2.3",
|
|
@@ -153,5 +155,5 @@
|
|
|
153
155
|
"publishConfig": { "access": "public" },
|
|
154
156
|
"executors": "./executors.json",
|
|
155
157
|
"generators": "./generators.json",
|
|
156
|
-
"gitHead": "
|
|
158
|
+
"gitHead": "97a4855b1100e6e4481e5191ffcac01acf3f6017"
|
|
157
159
|
}
|
package/dist/chunk-RGRCKWGN.mjs
DELETED
|
@@ -1,225 +0,0 @@
|
|
|
1
|
-
import {
|
|
2
|
-
createCliOptions,
|
|
3
|
-
getPackageInfo
|
|
4
|
-
} from "./chunk-3MAI3FU2.mjs";
|
|
5
|
-
import {
|
|
6
|
-
findWorkspaceRoot,
|
|
7
|
-
getConfig
|
|
8
|
-
} from "./chunk-G4ZCI2MN.mjs";
|
|
9
|
-
import {
|
|
10
|
-
createHttpHandler
|
|
11
|
-
} from "./chunk-SWYYMID7.mjs";
|
|
12
|
-
import {
|
|
13
|
-
getInternalDependencies,
|
|
14
|
-
uploadFile
|
|
15
|
-
} from "./chunk-UI2F3MMU.mjs";
|
|
16
|
-
import {
|
|
17
|
-
correctPaths,
|
|
18
|
-
joinPaths,
|
|
19
|
-
writeDebug,
|
|
20
|
-
writeSuccess,
|
|
21
|
-
writeTrace,
|
|
22
|
-
writeWarning
|
|
23
|
-
} from "./chunk-TPNHSNNZ.mjs";
|
|
24
|
-
|
|
25
|
-
// src/executors/r2-upload-publish/executor.ts
|
|
26
|
-
import { S3 } from "@aws-sdk/client-s3";
|
|
27
|
-
import {
|
|
28
|
-
createProjectGraphAsync,
|
|
29
|
-
readCachedProjectGraph
|
|
30
|
-
} from "@nx/devkit";
|
|
31
|
-
import { glob } from "glob";
|
|
32
|
-
import mime from "mime-types";
|
|
33
|
-
import { execSync } from "node:child_process";
|
|
34
|
-
import { statSync } from "node:fs";
|
|
35
|
-
import { readFile } from "node:fs/promises";
|
|
36
|
-
async function runExecutor(options, context) {
|
|
37
|
-
const isDryRun = process.env.NX_DRY_RUN === "true" || options.dryRun || false;
|
|
38
|
-
if (!context.projectName) {
|
|
39
|
-
throw new Error("The executor requires a projectName.");
|
|
40
|
-
}
|
|
41
|
-
if (!options.path) {
|
|
42
|
-
throw new Error("The executor requires the `path` option to upload.");
|
|
43
|
-
}
|
|
44
|
-
console.info(
|
|
45
|
-
`\u{1F680} Running Storm Cloudflare Publish executor on the ${context.projectName} worker`
|
|
46
|
-
);
|
|
47
|
-
if (!context.projectName || !context.projectsConfigurations?.projects || !context.projectsConfigurations.projects[context.projectName] || !context.projectsConfigurations.projects[context.projectName]?.root) {
|
|
48
|
-
throw new Error("The executor requires projectsConfigurations.");
|
|
49
|
-
}
|
|
50
|
-
try {
|
|
51
|
-
const workspaceRoot = findWorkspaceRoot();
|
|
52
|
-
const config = await getConfig(workspaceRoot);
|
|
53
|
-
const projectName = context.projectsConfigurations.projects[context.projectName]?.name ?? context.projectName;
|
|
54
|
-
const projectDetails = getPackageInfo(
|
|
55
|
-
context.projectsConfigurations.projects[context.projectName]
|
|
56
|
-
);
|
|
57
|
-
const bucketId = options.bucketId;
|
|
58
|
-
const bucketPath = options.bucketPath || "/";
|
|
59
|
-
if (!bucketId) {
|
|
60
|
-
throw new Error("The executor requires a bucketId.");
|
|
61
|
-
}
|
|
62
|
-
const args = createCliOptions({ ...options });
|
|
63
|
-
if (isDryRun) {
|
|
64
|
-
args.push("--dry-run");
|
|
65
|
-
}
|
|
66
|
-
const cloudflareAccountId = process.env.CLOUDFLARE_ACCOUNT_ID || process.env.STORM_BOT_CLOUDFLARE_ACCOUNT;
|
|
67
|
-
if (!options?.registry && !cloudflareAccountId) {
|
|
68
|
-
throw new Error(
|
|
69
|
-
"The registry option and `CLOUDFLARE_ACCOUNT_ID` (or `STORM_BOT_CLOUDFLARE_ACCOUNT`) environment variable are not set. Please set one of these values to upload to the Cloudflare R2 bucket."
|
|
70
|
-
);
|
|
71
|
-
}
|
|
72
|
-
if (!process.env.STORM_BOT_ACCESS_KEY_ID && !process.env.ACCESS_KEY_ID && !process.env.CLOUDFLARE_ACCESS_KEY_ID && !process.env.AWS_ACCESS_KEY_ID || !process.env.STORM_BOT_SECRET_ACCESS_KEY && !process.env.CLOUDFLARE_SECRET_ACCESS_KEY && !process.env.SECRET_ACCESS_KEY && !process.env.AWS_SECRET_ACCESS_KEY) {
|
|
73
|
-
throw new Error(
|
|
74
|
-
"The `ACCESS_KEY_ID` (or `STORM_BOT_ACCESS_KEY_ID`) and `SECRET_ACCESS_KEY` (or `STORM_BOT_SECRET_ACCESS_KEY`) environment variables are not set. Please set these environment variables to upload to the Cloudflare R2 bucket."
|
|
75
|
-
);
|
|
76
|
-
}
|
|
77
|
-
const registry = options?.registry ? options.registry : `https://${cloudflareAccountId}.r2.cloudflarestorage.com`;
|
|
78
|
-
let projectGraph;
|
|
79
|
-
try {
|
|
80
|
-
projectGraph = readCachedProjectGraph();
|
|
81
|
-
} catch {
|
|
82
|
-
await createProjectGraphAsync();
|
|
83
|
-
projectGraph = readCachedProjectGraph();
|
|
84
|
-
}
|
|
85
|
-
if (!projectGraph) {
|
|
86
|
-
throw new Error(
|
|
87
|
-
"The executor failed because the project graph is not available. Please run the build command again."
|
|
88
|
-
);
|
|
89
|
-
}
|
|
90
|
-
writeDebug(
|
|
91
|
-
`Publishing ${context.projectName} to the ${bucketId} R2 Bucket (at ${registry})`
|
|
92
|
-
);
|
|
93
|
-
const client = new S3({
|
|
94
|
-
region: "auto",
|
|
95
|
-
endpoint: registry,
|
|
96
|
-
credentials: {
|
|
97
|
-
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
98
|
-
accessKeyId: process.env.STORM_BOT_ACCESS_KEY_ID || process.env.CLOUDFLARE_ACCESS_KEY_ID || process.env.AWS_ACCESS_KEY_ID || process.env.ACCESS_KEY_ID,
|
|
99
|
-
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
100
|
-
secretAccessKey: process.env.STORM_BOT_SECRET_ACCESS_KEY || process.env.CLOUDFLARE_SECRET_ACCESS_KEY || process.env.AWS_SECRET_ACCESS_KEY || process.env.SECRET_ACCESS_KEY
|
|
101
|
-
},
|
|
102
|
-
requestHandler: createHttpHandler()
|
|
103
|
-
});
|
|
104
|
-
const version = projectDetails?.content?.version;
|
|
105
|
-
if (version) {
|
|
106
|
-
writeDebug(`Starting upload version ${version}`);
|
|
107
|
-
}
|
|
108
|
-
const basePath = options.path;
|
|
109
|
-
const files = await glob(joinPaths(basePath, "**/*"), {
|
|
110
|
-
ignore: "**/{*.stories.tsx,*.stories.ts,*.spec.tsx,*.spec.ts}"
|
|
111
|
-
});
|
|
112
|
-
const internalDependencies = await getInternalDependencies(
|
|
113
|
-
context.projectName,
|
|
114
|
-
projectGraph
|
|
115
|
-
);
|
|
116
|
-
const dependencies = internalDependencies.filter(
|
|
117
|
-
(projectNode) => !projectNode.data.tags || projectNode.data.tags.every((tag) => tag.toLowerCase() !== "component")
|
|
118
|
-
).reduce((ret, dep) => {
|
|
119
|
-
if (!ret[dep.name]) {
|
|
120
|
-
ret[dep.name] = "latest";
|
|
121
|
-
}
|
|
122
|
-
return ret;
|
|
123
|
-
}, projectDetails?.content.dependencies ?? {});
|
|
124
|
-
const release = options.tag ?? execSync("npm config get tag").toString().trim();
|
|
125
|
-
if (options.clean === true) {
|
|
126
|
-
writeDebug(`Clearing out existing items in ${bucketPath}`);
|
|
127
|
-
if (!isDryRun) {
|
|
128
|
-
const response = await client.listObjects({
|
|
129
|
-
Bucket: bucketId,
|
|
130
|
-
Prefix: !bucketPath || bucketPath === "/" ? void 0 : bucketPath
|
|
131
|
-
});
|
|
132
|
-
if (response?.Contents && response.Contents.length > 0) {
|
|
133
|
-
writeTrace(
|
|
134
|
-
`Deleting the following existing items from the R2 bucket path ${bucketPath}: ${response.Contents.map((item) => item.Key).join(", ")}`
|
|
135
|
-
);
|
|
136
|
-
await client.deleteObjects({
|
|
137
|
-
Bucket: bucketId,
|
|
138
|
-
Delete: {
|
|
139
|
-
Objects: response.Contents.map((item) => ({
|
|
140
|
-
Key: item.Key
|
|
141
|
-
})),
|
|
142
|
-
Quiet: false
|
|
143
|
-
}
|
|
144
|
-
});
|
|
145
|
-
} else {
|
|
146
|
-
writeDebug(
|
|
147
|
-
`No existing items to delete in the R2 bucket path ${bucketPath}`
|
|
148
|
-
);
|
|
149
|
-
}
|
|
150
|
-
} else {
|
|
151
|
-
writeWarning("[Dry run]: Skipping R2 bucket clean.");
|
|
152
|
-
}
|
|
153
|
-
}
|
|
154
|
-
if (options.writeMetaJson === true) {
|
|
155
|
-
const meta = {
|
|
156
|
-
name: context.projectName,
|
|
157
|
-
version,
|
|
158
|
-
release,
|
|
159
|
-
description: projectDetails?.content?.description,
|
|
160
|
-
tags: projectDetails?.content?.keywords,
|
|
161
|
-
dependencies,
|
|
162
|
-
devDependencies: null,
|
|
163
|
-
internalDependencies: internalDependencies.filter(
|
|
164
|
-
(projectNode) => projectNode.data.tags && projectNode.data.tags.some(
|
|
165
|
-
(tag) => tag.toLowerCase() === "component"
|
|
166
|
-
)
|
|
167
|
-
).map((dep) => dep.name)
|
|
168
|
-
};
|
|
169
|
-
if (projectDetails?.type === "package.json") {
|
|
170
|
-
meta.devDependencies = projectDetails?.content?.devDependencies;
|
|
171
|
-
}
|
|
172
|
-
await uploadFile(
|
|
173
|
-
client,
|
|
174
|
-
bucketId,
|
|
175
|
-
bucketPath,
|
|
176
|
-
"meta.json",
|
|
177
|
-
version,
|
|
178
|
-
JSON.stringify(meta),
|
|
179
|
-
"application/json",
|
|
180
|
-
isDryRun
|
|
181
|
-
);
|
|
182
|
-
}
|
|
183
|
-
await Promise.all(
|
|
184
|
-
files.map(async (file) => {
|
|
185
|
-
if (statSync(file, {
|
|
186
|
-
throwIfNoEntry: false
|
|
187
|
-
})?.isFile()) {
|
|
188
|
-
const name = correctPaths(file).replace(correctPaths(basePath), "");
|
|
189
|
-
const type = mime.lookup(name) || "application/octet-stream";
|
|
190
|
-
await uploadFile(
|
|
191
|
-
client,
|
|
192
|
-
bucketId,
|
|
193
|
-
bucketPath,
|
|
194
|
-
name,
|
|
195
|
-
version,
|
|
196
|
-
type === "application/json" || type.includes("text") ? await readFile(file, "utf8") : `data:${type};base64,${Buffer.from(
|
|
197
|
-
await readFile(file, "binary"),
|
|
198
|
-
"binary"
|
|
199
|
-
).toString("base64")}`,
|
|
200
|
-
type,
|
|
201
|
-
isDryRun
|
|
202
|
-
);
|
|
203
|
-
}
|
|
204
|
-
})
|
|
205
|
-
);
|
|
206
|
-
writeSuccess(
|
|
207
|
-
`Successfully uploaded the ${projectName} project to the Cloudflare R2 bucket.`,
|
|
208
|
-
config
|
|
209
|
-
);
|
|
210
|
-
return {
|
|
211
|
-
success: true
|
|
212
|
-
};
|
|
213
|
-
} catch (error) {
|
|
214
|
-
console.error("Failed to publish to Cloudflare R2 bucket");
|
|
215
|
-
console.error(error);
|
|
216
|
-
console.log("");
|
|
217
|
-
return {
|
|
218
|
-
success: false
|
|
219
|
-
};
|
|
220
|
-
}
|
|
221
|
-
}
|
|
222
|
-
|
|
223
|
-
export {
|
|
224
|
-
runExecutor
|
|
225
|
-
};
|
package/dist/chunk-UI2F3MMU.mjs
DELETED
|
@@ -1,52 +0,0 @@
|
|
|
1
|
-
import {
|
|
2
|
-
joinPaths,
|
|
3
|
-
writeDebug,
|
|
4
|
-
writeWarning
|
|
5
|
-
} from "./chunk-TPNHSNNZ.mjs";
|
|
6
|
-
|
|
7
|
-
// src/utils/r2-bucket-helpers.ts
|
|
8
|
-
import { createHash } from "node:crypto";
|
|
9
|
-
async function uploadFile(client, bucketName, bucketPath, fileName, version, fileContent, contentType = "application/octet-stream", isDryRun = false) {
|
|
10
|
-
const key = (!bucketPath?.trim() || bucketPath?.trim() === "/" ? fileName : joinPaths(bucketPath.trim(), fileName))?.replace(/^\/+/g, "") || "";
|
|
11
|
-
writeDebug(
|
|
12
|
-
`Uploading ${key} (content-type: ${contentType}) to the ${bucketName} R2 bucket`
|
|
13
|
-
);
|
|
14
|
-
if (!isDryRun) {
|
|
15
|
-
await client.putObject(
|
|
16
|
-
{
|
|
17
|
-
Bucket: bucketName,
|
|
18
|
-
Key: key,
|
|
19
|
-
Body: fileContent,
|
|
20
|
-
ContentType: contentType,
|
|
21
|
-
Metadata: {
|
|
22
|
-
version,
|
|
23
|
-
checksum: createHash("sha256").update(fileContent).digest("base64")
|
|
24
|
-
}
|
|
25
|
-
},
|
|
26
|
-
{
|
|
27
|
-
requestTimeout: 15 * 60 * 1e3
|
|
28
|
-
// 15 minutes
|
|
29
|
-
}
|
|
30
|
-
);
|
|
31
|
-
} else {
|
|
32
|
-
writeWarning("[Dry run]: Skipping upload to the R2 bucket.");
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
function getInternalDependencies(projectName, graph) {
|
|
36
|
-
const allDeps = graph.dependencies[projectName] ?? [];
|
|
37
|
-
return Array.from(
|
|
38
|
-
allDeps.reduce(
|
|
39
|
-
(acc, node) => {
|
|
40
|
-
const found = graph.nodes[node.target];
|
|
41
|
-
if (found) acc.push(found);
|
|
42
|
-
return acc;
|
|
43
|
-
},
|
|
44
|
-
[]
|
|
45
|
-
)
|
|
46
|
-
);
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
export {
|
|
50
|
-
uploadFile,
|
|
51
|
-
getInternalDependencies
|
|
52
|
-
};
|