@backstage/cli 0.9.1 → 0.10.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +53 -0
- package/config/jest.js +5 -3
- package/dist/cjs/{Lockfile-80f0eec4.cjs.js → Lockfile-35661afa.cjs.js} +18 -18
- package/dist/cjs/{build-33ad5323.cjs.js → build-aa750983.cjs.js} +8 -7
- package/dist/cjs/{build-7302d21d.cjs.js → build-b73c146e.cjs.js} +8 -7
- package/dist/cjs/{build-75ee78ab.cjs.js → build-be0f943d.cjs.js} +25 -22
- package/dist/cjs/{build-e21fe681.cjs.js → build-e5c1f9cd.cjs.js} +9 -9
- package/dist/cjs/{buildWorkspace-fa590fad.cjs.js → buildWorkspace-8c1d5134.cjs.js} +6 -6
- package/dist/cjs/{bump-5e4ffccb.cjs.js → bump-d6b32625.cjs.js} +36 -36
- package/dist/cjs/{bundle-4390c300.cjs.js → bundle-a35469cc.cjs.js} +15 -15
- package/dist/cjs/{clean-9279505e.cjs.js → clean-2294315a.cjs.js} +6 -6
- package/dist/cjs/{config-6ab0fc63.cjs.js → config-956d86a6.cjs.js} +7 -8
- package/dist/cjs/{create-4ffadaca.cjs.js → create-cf52e54d.cjs.js} +41 -41
- package/dist/cjs/{createPlugin-eb7251ea.cjs.js → createPlugin-b87a78f0.cjs.js} +31 -31
- package/dist/cjs/{dev-02640e59.cjs.js → dev-0a2ead14.cjs.js} +8 -8
- package/dist/cjs/{diff-56188f93.cjs.js → diff-4789ad9c.cjs.js} +39 -32
- package/dist/cjs/{docs-8039cb55.cjs.js → docs-12b2616e.cjs.js} +6 -6
- package/dist/cjs/{index-e84c4ef7.cjs.js → index-451d4133.cjs.js} +14 -14
- package/dist/cjs/{index-a18a4cd9.cjs.js → index-a18da53d.cjs.js} +15 -15
- package/dist/cjs/{index-994e77d9.cjs.js → index-dc8e5d2a.cjs.js} +68 -72
- package/dist/cjs/{info-2a0746e3.cjs.js → info-c8361946.cjs.js} +6 -6
- package/dist/cjs/{install-807a9039.cjs.js → install-68bbffa5.cjs.js} +60 -33
- package/dist/cjs/{lint-03faccc3.cjs.js → lint-4ec38c59.cjs.js} +5 -5
- package/dist/cjs/{lint-50ae8c8b.cjs.js → lint-a525caa3.cjs.js} +7 -7
- package/dist/cjs/{pack-7d39c708.cjs.js → pack-5af390ec.cjs.js} +6 -6
- package/dist/cjs/{packager-d328c8db.cjs.js → packager-47e5dbe2.cjs.js} +37 -35
- package/dist/cjs/{packages-24e76f27.cjs.js → packages-5bd09b6a.cjs.js} +5 -5
- package/dist/cjs/{paths-eecbbe83.cjs.js → paths-136b374a.cjs.js} +62 -46
- package/dist/cjs/{print-fec91280.cjs.js → print-8d26381a.cjs.js} +5 -5
- package/dist/cjs/{removePlugin-595ea65d.cjs.js → removePlugin-272a3d37.cjs.js} +30 -29
- package/dist/cjs/{run-40072d67.cjs.js → run-330d1527.cjs.js} +5 -5
- package/dist/cjs/{schema-00648d3a.cjs.js → schema-ea8ebcbf.cjs.js} +5 -5
- package/dist/cjs/{serve-e57a7acf.cjs.js → serve-50b65188.cjs.js} +10 -10
- package/dist/cjs/{serve-199d043b.cjs.js → serve-9b01661e.cjs.js} +15 -15
- package/dist/cjs/{server-c7c24602.cjs.js → server-15a54ef2.cjs.js} +6 -6
- package/dist/cjs/{svgrTemplate-2d0d15cf.cjs.js → svgrTemplate-f19e974c.cjs.js} +3 -3
- package/dist/cjs/{tasks-0f3092d3.cjs.js → tasks-6e261e37.cjs.js} +29 -29
- package/dist/cjs/{testCommand-a5c5fec6.cjs.js → testCommand-a9f0692f.cjs.js} +4 -4
- package/dist/cjs/{validate-011c509c.cjs.js → validate-0a0893b6.cjs.js} +4 -4
- package/dist/index.cjs.js +1 -1
- package/package.json +20 -23
- package/templates/default-plugin/package.json.hbs +3 -2
- package/dist/cjs/buildImage-477aa186.cjs.js +0 -77
|
@@ -7,7 +7,7 @@ var path = require('path');
|
|
|
7
7
|
var inquirer = require('inquirer');
|
|
8
8
|
var handlebars = require('handlebars');
|
|
9
9
|
var recursive = require('recursive-readdir');
|
|
10
|
-
var index = require('./index-
|
|
10
|
+
var index = require('./index-dc8e5d2a.cjs.js');
|
|
11
11
|
require('commander');
|
|
12
12
|
require('semver');
|
|
13
13
|
require('@backstage/cli-common');
|
|
@@ -38,7 +38,7 @@ class PackageJsonHandler {
|
|
|
38
38
|
this.targetPkg = targetPkg;
|
|
39
39
|
this.variant = variant;
|
|
40
40
|
}
|
|
41
|
-
static async handler({path, write, missing, targetContents, templateContents}, prompt, variant) {
|
|
41
|
+
static async handler({ path, write, missing, targetContents, templateContents }, prompt, variant) {
|
|
42
42
|
console.log("Checking package.json");
|
|
43
43
|
if (missing) {
|
|
44
44
|
throw new Error(`${path} doesn't exist`);
|
|
@@ -61,18 +61,20 @@ class PackageJsonHandler {
|
|
|
61
61
|
await this.syncScripts();
|
|
62
62
|
await this.syncPublishConfig();
|
|
63
63
|
await this.syncDependencies("dependencies");
|
|
64
|
+
await this.syncDependencies("peerDependencies", true);
|
|
64
65
|
await this.syncDependencies("devDependencies");
|
|
66
|
+
await this.syncReactDeps();
|
|
65
67
|
}
|
|
66
68
|
async syncField(fieldName, obj = this.pkg, targetObj = this.targetPkg, prefix, sort, optional) {
|
|
67
|
-
const fullFieldName = chalk__default[
|
|
69
|
+
const fullFieldName = chalk__default["default"].cyan(prefix ? `${prefix}[${fieldName}]` : fieldName);
|
|
68
70
|
const newValue = obj[fieldName];
|
|
69
|
-
const coloredNewValue = chalk__default[
|
|
71
|
+
const coloredNewValue = chalk__default["default"].cyan(JSON.stringify(newValue));
|
|
70
72
|
if (fieldName in targetObj) {
|
|
71
73
|
const oldValue = targetObj[fieldName];
|
|
72
74
|
if (JSON.stringify(oldValue) === JSON.stringify(newValue)) {
|
|
73
75
|
return;
|
|
74
76
|
}
|
|
75
|
-
const coloredOldValue = chalk__default[
|
|
77
|
+
const coloredOldValue = chalk__default["default"].cyan(JSON.stringify(oldValue));
|
|
76
78
|
const msg = `package.json has mismatched field, ${fullFieldName}, change from ${coloredOldValue} to ${coloredNewValue}?`;
|
|
77
79
|
if (await this.prompt(msg)) {
|
|
78
80
|
targetObj[fieldName] = newValue;
|
|
@@ -92,7 +94,7 @@ class PackageJsonHandler {
|
|
|
92
94
|
}
|
|
93
95
|
}
|
|
94
96
|
async syncFiles() {
|
|
95
|
-
const {configSchema} = this.targetPkg;
|
|
97
|
+
const { configSchema } = this.targetPkg;
|
|
96
98
|
const hasSchemaFile = typeof configSchema === "string";
|
|
97
99
|
if (!this.targetPkg.files) {
|
|
98
100
|
const expected = hasSchemaFile ? ["dist", configSchema] : ["dist"];
|
|
@@ -145,10 +147,10 @@ class PackageJsonHandler {
|
|
|
145
147
|
}
|
|
146
148
|
}
|
|
147
149
|
}
|
|
148
|
-
async syncDependencies(fieldName) {
|
|
150
|
+
async syncDependencies(fieldName, required = false) {
|
|
149
151
|
const pkgDeps = this.pkg[fieldName];
|
|
150
152
|
const targetDeps = this.targetPkg[fieldName] = this.targetPkg[fieldName] || {};
|
|
151
|
-
if (!pkgDeps) {
|
|
153
|
+
if (!pkgDeps && !required) {
|
|
152
154
|
return;
|
|
153
155
|
}
|
|
154
156
|
await this.syncField("@backstage/core", {}, targetDeps, fieldName, true);
|
|
@@ -157,17 +159,22 @@ class PackageJsonHandler {
|
|
|
157
159
|
if (this.variant === "app" && key.startsWith("plugin-")) {
|
|
158
160
|
continue;
|
|
159
161
|
}
|
|
160
|
-
await this.syncField(key, pkgDeps, targetDeps, fieldName, true,
|
|
162
|
+
await this.syncField(key, pkgDeps, targetDeps, fieldName, true, !required);
|
|
161
163
|
}
|
|
162
164
|
}
|
|
165
|
+
async syncReactDeps() {
|
|
166
|
+
const targetDeps = this.targetPkg.dependencies = this.targetPkg.dependencies || {};
|
|
167
|
+
await this.syncField("react", {}, targetDeps, "dependencies");
|
|
168
|
+
await this.syncField("react-dom", {}, targetDeps, "dependencies");
|
|
169
|
+
}
|
|
163
170
|
async write() {
|
|
164
171
|
await this.writeFunc(`${JSON.stringify(this.targetPkg, null, 2)}
|
|
165
172
|
`);
|
|
166
173
|
}
|
|
167
174
|
}
|
|
168
|
-
async function exactMatchHandler({path, write, missing, targetContents, templateContents}, prompt) {
|
|
175
|
+
async function exactMatchHandler({ path, write, missing, targetContents, templateContents }, prompt) {
|
|
169
176
|
console.log(`Checking ${path}`);
|
|
170
|
-
const coloredPath = chalk__default[
|
|
177
|
+
const coloredPath = chalk__default["default"].cyan(path);
|
|
171
178
|
if (missing) {
|
|
172
179
|
if (await prompt(`Missing ${coloredPath}, do you want to add it?`)) {
|
|
173
180
|
await write(templateContents);
|
|
@@ -180,9 +187,9 @@ async function exactMatchHandler({path, write, missing, targetContents, template
|
|
|
180
187
|
const diffs = diff$1.diffLines(targetContents, templateContents);
|
|
181
188
|
for (const diff of diffs) {
|
|
182
189
|
if (diff.added) {
|
|
183
|
-
process.stdout.write(chalk__default[
|
|
190
|
+
process.stdout.write(chalk__default["default"].green(`+${diff.value}`));
|
|
184
191
|
} else if (diff.removed) {
|
|
185
|
-
process.stdout.write(chalk__default[
|
|
192
|
+
process.stdout.write(chalk__default["default"].red(`-${diff.value}`));
|
|
186
193
|
} else {
|
|
187
194
|
process.stdout.write(` ${diff.value}`);
|
|
188
195
|
}
|
|
@@ -191,9 +198,9 @@ async function exactMatchHandler({path, write, missing, targetContents, template
|
|
|
191
198
|
await write(templateContents);
|
|
192
199
|
}
|
|
193
200
|
}
|
|
194
|
-
async function existsHandler({path, write, missing, templateContents}, prompt) {
|
|
201
|
+
async function existsHandler({ path, write, missing, templateContents }, prompt) {
|
|
195
202
|
console.log(`Making sure ${path} exists`);
|
|
196
|
-
const coloredPath = chalk__default[
|
|
203
|
+
const coloredPath = chalk__default["default"].cyan(path);
|
|
197
204
|
if (missing) {
|
|
198
205
|
if (await prompt(`Missing ${coloredPath}, do you want to add it?`)) {
|
|
199
206
|
await write(templateContents);
|
|
@@ -201,7 +208,7 @@ async function existsHandler({path, write, missing, templateContents}, prompt) {
|
|
|
201
208
|
return;
|
|
202
209
|
}
|
|
203
210
|
}
|
|
204
|
-
async function skipHandler({path}) {
|
|
211
|
+
async function skipHandler({ path }) {
|
|
205
212
|
console.log(`Skipping ${path}`);
|
|
206
213
|
}
|
|
207
214
|
const handlers = {
|
|
@@ -224,10 +231,10 @@ async function handleAllFiles(fileHandlers, files, promptFunc) {
|
|
|
224
231
|
}
|
|
225
232
|
|
|
226
233
|
const inquirerPromptFunc = async (msg) => {
|
|
227
|
-
const {result} = await inquirer__default[
|
|
234
|
+
const { result } = await inquirer__default["default"].prompt({
|
|
228
235
|
type: "confirm",
|
|
229
236
|
name: "result",
|
|
230
|
-
message: chalk__default[
|
|
237
|
+
message: chalk__default["default"].blue(msg)
|
|
231
238
|
});
|
|
232
239
|
return result;
|
|
233
240
|
};
|
|
@@ -235,7 +242,7 @@ const makeCheckPromptFunc = () => {
|
|
|
235
242
|
let failed = false;
|
|
236
243
|
const promptFunc = async (msg) => {
|
|
237
244
|
failed = true;
|
|
238
|
-
console.log(chalk__default[
|
|
245
|
+
console.log(chalk__default["default"].red(`[Check Failed] ${msg}`));
|
|
239
246
|
return false;
|
|
240
247
|
};
|
|
241
248
|
const finalize = () => {
|
|
@@ -251,12 +258,12 @@ const yesPromptFunc = async (msg) => {
|
|
|
251
258
|
};
|
|
252
259
|
|
|
253
260
|
async function readTemplateFile(templateFile, templateVars) {
|
|
254
|
-
const contents = await fs__default[
|
|
261
|
+
const contents = await fs__default["default"].readFile(templateFile, "utf8");
|
|
255
262
|
if (!templateFile.endsWith(".hbs")) {
|
|
256
263
|
return contents;
|
|
257
264
|
}
|
|
258
265
|
const packageVersionProvider = index.createPackageVersionProvider(void 0);
|
|
259
|
-
return handlebars__default[
|
|
266
|
+
return handlebars__default["default"].compile(contents)(templateVars, {
|
|
260
267
|
helpers: {
|
|
261
268
|
versionQuery(name, hint) {
|
|
262
269
|
return packageVersionProvider(name, typeof hint === "string" ? hint : void 0);
|
|
@@ -265,28 +272,28 @@ async function readTemplateFile(templateFile, templateVars) {
|
|
|
265
272
|
});
|
|
266
273
|
}
|
|
267
274
|
async function readTemplate(templateDir, templateVars) {
|
|
268
|
-
const templateFilePaths = await recursive__default[
|
|
275
|
+
const templateFilePaths = await recursive__default["default"](templateDir).catch((error) => {
|
|
269
276
|
throw new Error(`Failed to read template directory: ${error.message}`);
|
|
270
277
|
});
|
|
271
278
|
const templatedFiles = new Array();
|
|
272
279
|
for (const templateFile of templateFilePaths) {
|
|
273
280
|
const path$1 = path.relative(templateDir, templateFile).replace(/\.hbs$/, "");
|
|
274
281
|
const contents = await readTemplateFile(templateFile, templateVars);
|
|
275
|
-
templatedFiles.push({path: path$1, contents});
|
|
282
|
+
templatedFiles.push({ path: path$1, contents });
|
|
276
283
|
}
|
|
277
284
|
return templatedFiles;
|
|
278
285
|
}
|
|
279
286
|
async function diffTemplatedFiles(targetDir, templatedFiles) {
|
|
280
287
|
const fileDiffs = new Array();
|
|
281
|
-
for (const {path: path$1, contents: templateContents} of templatedFiles) {
|
|
288
|
+
for (const { path: path$1, contents: templateContents } of templatedFiles) {
|
|
282
289
|
const targetPath = path.resolve(targetDir, path$1);
|
|
283
|
-
const targetExists = await fs__default[
|
|
290
|
+
const targetExists = await fs__default["default"].pathExists(targetPath);
|
|
284
291
|
const write = async (contents) => {
|
|
285
|
-
await fs__default[
|
|
286
|
-
await fs__default[
|
|
292
|
+
await fs__default["default"].ensureDir(path.dirname(targetPath));
|
|
293
|
+
await fs__default["default"].writeFile(targetPath, contents, "utf8");
|
|
287
294
|
};
|
|
288
295
|
if (targetExists) {
|
|
289
|
-
const targetContents = await fs__default[
|
|
296
|
+
const targetContents = await fs__default["default"].readFile(targetPath, "utf8");
|
|
290
297
|
fileDiffs.push({
|
|
291
298
|
path: path$1,
|
|
292
299
|
write,
|
|
@@ -364,14 +371,14 @@ async function readPluginData() {
|
|
|
364
371
|
} catch (error) {
|
|
365
372
|
throw new Error(`Failed to read target package, ${error}`);
|
|
366
373
|
}
|
|
367
|
-
const pluginTsContents = await fs__default[
|
|
374
|
+
const pluginTsContents = await fs__default["default"].readFile(index.paths.resolveTarget("src/plugin.ts"), "utf8");
|
|
368
375
|
const pluginIdMatch = pluginTsContents.match(/id: ['"`](.+?)['"`]/);
|
|
369
376
|
if (!pluginIdMatch) {
|
|
370
377
|
throw new Error(`Failed to parse plugin.ts, no plugin ID found`);
|
|
371
378
|
}
|
|
372
379
|
const id = pluginIdMatch[1];
|
|
373
|
-
return {id, name, privatePackage, pluginVersion, npmRegistry};
|
|
380
|
+
return { id, name, privatePackage, pluginVersion, npmRegistry };
|
|
374
381
|
}
|
|
375
382
|
|
|
376
|
-
exports
|
|
377
|
-
//# sourceMappingURL=diff-
|
|
383
|
+
exports["default"] = diff;
|
|
384
|
+
//# sourceMappingURL=diff-4789ad9c.cjs.js.map
|
|
@@ -2,9 +2,9 @@
|
|
|
2
2
|
|
|
3
3
|
var configLoader = require('@backstage/config-loader');
|
|
4
4
|
var openBrowser = require('react-dev-utils/openBrowser');
|
|
5
|
-
var config = require('./config-
|
|
5
|
+
var config = require('./config-956d86a6.cjs.js');
|
|
6
6
|
require('@backstage/config');
|
|
7
|
-
require('./index-
|
|
7
|
+
require('./index-dc8e5d2a.cjs.js');
|
|
8
8
|
require('commander');
|
|
9
9
|
require('chalk');
|
|
10
10
|
require('fs-extra');
|
|
@@ -19,14 +19,14 @@ var openBrowser__default = /*#__PURE__*/_interopDefaultLegacy(openBrowser);
|
|
|
19
19
|
|
|
20
20
|
const DOCS_URL = "https://config.backstage.io";
|
|
21
21
|
var docs = async (cmd) => {
|
|
22
|
-
const {schema: appSchemas} = await config.loadCliConfig({
|
|
22
|
+
const { schema: appSchemas } = await config.loadCliConfig({
|
|
23
23
|
args: [],
|
|
24
24
|
fromPackage: cmd.package,
|
|
25
25
|
mockEnv: true
|
|
26
26
|
});
|
|
27
27
|
const schema = configLoader.mergeConfigSchemas(appSchemas.serialize().schemas.map((_) => _.value));
|
|
28
|
-
openBrowser__default[
|
|
28
|
+
openBrowser__default["default"](`${DOCS_URL}#schema=${JSON.stringify(schema)}`);
|
|
29
29
|
};
|
|
30
30
|
|
|
31
|
-
exports
|
|
32
|
-
//# sourceMappingURL=docs-
|
|
31
|
+
exports["default"] = docs;
|
|
32
|
+
//# sourceMappingURL=docs-12b2616e.cjs.js.map
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
var fs = require('fs-extra');
|
|
4
4
|
var chalk = require('chalk');
|
|
5
5
|
var yaml = require('yaml');
|
|
6
|
-
var index$1 = require('./index-
|
|
6
|
+
var index$1 = require('./index-dc8e5d2a.cjs.js');
|
|
7
7
|
var crypto = require('crypto');
|
|
8
8
|
var openBrowser = require('react-dev-utils/openBrowser');
|
|
9
9
|
var request = require('@octokit/request');
|
|
@@ -42,7 +42,7 @@ const FORM_PAGE = `
|
|
|
42
42
|
</form>
|
|
43
43
|
<script>
|
|
44
44
|
document.getElementById("form").submit()
|
|
45
|
-
|
|
45
|
+
<\/script>
|
|
46
46
|
</body>
|
|
47
47
|
</html>
|
|
48
48
|
`;
|
|
@@ -69,21 +69,21 @@ class GithubCreateAppServer {
|
|
|
69
69
|
res.setHeader("content-type", "text/html");
|
|
70
70
|
res.send(body);
|
|
71
71
|
};
|
|
72
|
-
const webhookId = crypto__default[
|
|
72
|
+
const webhookId = crypto__default["default"].randomBytes(15).toString("base64").replace(/[\+\/]/g, "");
|
|
73
73
|
this.webhookUrl = `https://smee.io/${webhookId}`;
|
|
74
74
|
}
|
|
75
|
-
static async run({org}) {
|
|
75
|
+
static async run({ org }) {
|
|
76
76
|
const encodedOrg = encodeURIComponent(org);
|
|
77
77
|
const actionUrl = `https://github.com/organizations/${encodedOrg}/settings/apps/new`;
|
|
78
78
|
const server = new GithubCreateAppServer(actionUrl);
|
|
79
79
|
return server.start();
|
|
80
80
|
}
|
|
81
81
|
async start() {
|
|
82
|
-
const app = express__default[
|
|
82
|
+
const app = express__default["default"]();
|
|
83
83
|
app.get("/", this.formHandler);
|
|
84
84
|
const callPromise = new Promise((resolve, reject) => {
|
|
85
85
|
app.get("/callback", (req, res) => {
|
|
86
|
-
request.request(`POST /app-manifests/${encodeURIComponent(req.query.code)}/conversions`).then(({data}) => {
|
|
86
|
+
request.request(`POST /app-manifests/${encodeURIComponent(req.query.code)}/conversions`).then(({ data }) => {
|
|
87
87
|
resolve({
|
|
88
88
|
name: data.name,
|
|
89
89
|
slug: data.slug,
|
|
@@ -99,7 +99,7 @@ class GithubCreateAppServer {
|
|
|
99
99
|
});
|
|
100
100
|
});
|
|
101
101
|
this.baseUrl = await this.listen(app);
|
|
102
|
-
openBrowser__default[
|
|
102
|
+
openBrowser__default["default"](this.baseUrl);
|
|
103
103
|
return callPromise;
|
|
104
104
|
}
|
|
105
105
|
async listen(app) {
|
|
@@ -110,7 +110,7 @@ class GithubCreateAppServer {
|
|
|
110
110
|
reject(new Error(`Unexpected listener info '${info}'`));
|
|
111
111
|
return;
|
|
112
112
|
}
|
|
113
|
-
const {port} = info;
|
|
113
|
+
const { port } = info;
|
|
114
114
|
resolve(`http://localhost:${port}`);
|
|
115
115
|
});
|
|
116
116
|
});
|
|
@@ -118,14 +118,14 @@ class GithubCreateAppServer {
|
|
|
118
118
|
}
|
|
119
119
|
|
|
120
120
|
var index = async (org) => {
|
|
121
|
-
const {slug, name, ...config} = await GithubCreateAppServer.run({org});
|
|
121
|
+
const { slug, name, ...config } = await GithubCreateAppServer.run({ org });
|
|
122
122
|
const fileName = `github-app-${slug}-credentials.yaml`;
|
|
123
123
|
const content = `# Name: ${name}
|
|
124
124
|
${yaml.stringify(config)}`;
|
|
125
|
-
await fs__default[
|
|
126
|
-
console.log(`GitHub App configuration written to ${chalk__default[
|
|
127
|
-
console.log(chalk__default[
|
|
125
|
+
await fs__default["default"].writeFile(index$1.paths.resolveTargetRoot(fileName), content);
|
|
126
|
+
console.log(`GitHub App configuration written to ${chalk__default["default"].cyan(fileName)}`);
|
|
127
|
+
console.log(chalk__default["default"].yellow("This file contains sensitive credentials, it should not be committed to version control and handled with care!"));
|
|
128
128
|
};
|
|
129
129
|
|
|
130
|
-
exports
|
|
131
|
-
//# sourceMappingURL=index-
|
|
130
|
+
exports["default"] = index;
|
|
131
|
+
//# sourceMappingURL=index-451d4133.cjs.js.map
|
|
@@ -4,8 +4,8 @@ var fs = require('fs-extra');
|
|
|
4
4
|
var path = require('path');
|
|
5
5
|
var os = require('os');
|
|
6
6
|
var tar = require('tar');
|
|
7
|
-
var index = require('./index-
|
|
8
|
-
var run = require('./run-
|
|
7
|
+
var index = require('./index-dc8e5d2a.cjs.js');
|
|
8
|
+
var run = require('./run-330d1527.cjs.js');
|
|
9
9
|
|
|
10
10
|
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
|
11
11
|
|
|
@@ -18,7 +18,7 @@ const UNSAFE_PACKAGES = [
|
|
|
18
18
|
];
|
|
19
19
|
async function createDistWorkspace(packageNames, options = {}) {
|
|
20
20
|
var _a, _b, _c;
|
|
21
|
-
const targetDir = (_a = options.targetDir) != null ? _a : await fs__default[
|
|
21
|
+
const targetDir = (_a = options.targetDir) != null ? _a : await fs__default["default"].mkdtemp(path.resolve(os.tmpdir(), "dist-workspace"));
|
|
22
22
|
const targets = await findTargetPackages(packageNames);
|
|
23
23
|
if (options.buildDependencies) {
|
|
24
24
|
const exclude = (_b = options.buildExcludes) != null ? _b : [];
|
|
@@ -36,14 +36,14 @@ async function createDistWorkspace(packageNames, options = {}) {
|
|
|
36
36
|
for (const file of files) {
|
|
37
37
|
const src = typeof file === "string" ? file : file.src;
|
|
38
38
|
const dest = typeof file === "string" ? file : file.dest;
|
|
39
|
-
await fs__default[
|
|
39
|
+
await fs__default["default"].copy(index.paths.resolveTargetRoot(src), path.resolve(targetDir, dest));
|
|
40
40
|
}
|
|
41
41
|
if (options.skeleton) {
|
|
42
42
|
const skeletonFiles = targets.map((target) => {
|
|
43
43
|
const dir = path.relative(index.paths.targetRoot, target.location);
|
|
44
44
|
return path.join(dir, "package.json");
|
|
45
45
|
});
|
|
46
|
-
await tar__default[
|
|
46
|
+
await tar__default["default"].create({
|
|
47
47
|
file: path.resolve(targetDir, options.skeleton),
|
|
48
48
|
cwd: targetDir,
|
|
49
49
|
portable: true,
|
|
@@ -61,24 +61,24 @@ async function moveToDistWorkspace(workspaceDir, localPackages) {
|
|
|
61
61
|
cwd: target.location
|
|
62
62
|
});
|
|
63
63
|
if (target.scripts.postpack) {
|
|
64
|
-
await run.run("yarn", ["postpack"], {cwd: target.location});
|
|
64
|
+
await run.run("yarn", ["postpack"], { cwd: target.location });
|
|
65
65
|
}
|
|
66
66
|
const outputDir = path.relative(index.paths.targetRoot, target.location);
|
|
67
67
|
const absoluteOutputPath = path.resolve(workspaceDir, outputDir);
|
|
68
|
-
await fs__default[
|
|
69
|
-
await tar__default[
|
|
68
|
+
await fs__default["default"].ensureDir(absoluteOutputPath);
|
|
69
|
+
await tar__default["default"].extract({
|
|
70
70
|
file: archivePath,
|
|
71
71
|
cwd: absoluteOutputPath,
|
|
72
72
|
strip: 1
|
|
73
73
|
});
|
|
74
|
-
await fs__default[
|
|
74
|
+
await fs__default["default"].remove(archivePath);
|
|
75
75
|
if (target.get("bundled")) {
|
|
76
|
-
const pkgJson = await fs__default[
|
|
76
|
+
const pkgJson = await fs__default["default"].readJson(path.resolve(absoluteOutputPath, "package.json"));
|
|
77
77
|
delete pkgJson.dependencies;
|
|
78
78
|
delete pkgJson.devDependencies;
|
|
79
79
|
delete pkgJson.peerDependencies;
|
|
80
80
|
delete pkgJson.optionalDependencies;
|
|
81
|
-
await fs__default[
|
|
81
|
+
await fs__default["default"].writeJson(path.resolve(absoluteOutputPath, "package.json"), pkgJson, {
|
|
82
82
|
spaces: 2
|
|
83
83
|
});
|
|
84
84
|
}
|
|
@@ -92,12 +92,12 @@ async function moveToDistWorkspace(workspaceDir, localPackages) {
|
|
|
92
92
|
}
|
|
93
93
|
async function findTargetPackages(pkgNames) {
|
|
94
94
|
var _a;
|
|
95
|
-
const {Project} = require("@lerna/project");
|
|
96
|
-
const {PackageGraph} = require("@lerna/package-graph");
|
|
95
|
+
const { Project } = require("@lerna/project");
|
|
96
|
+
const { PackageGraph } = require("@lerna/package-graph");
|
|
97
97
|
const project = new Project(index.paths.targetDir);
|
|
98
98
|
const packages = await project.getPackages();
|
|
99
99
|
const graph = new PackageGraph(packages);
|
|
100
|
-
const targets = new Map();
|
|
100
|
+
const targets = /* @__PURE__ */ new Map();
|
|
101
101
|
const searchNames = pkgNames.slice();
|
|
102
102
|
while (searchNames.length) {
|
|
103
103
|
const name = searchNames.pop();
|
|
@@ -120,4 +120,4 @@ async function findTargetPackages(pkgNames) {
|
|
|
120
120
|
}
|
|
121
121
|
|
|
122
122
|
exports.createDistWorkspace = createDistWorkspace;
|
|
123
|
-
//# sourceMappingURL=index-
|
|
123
|
+
//# sourceMappingURL=index-a18da53d.cjs.js.map
|