@platforma-sdk/block-tools 2.1.5 → 2.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +1 -756
- package/dist/cli.js.map +1 -1
- package/dist/cli.mjs +187 -0
- package/dist/cli.mjs.map +1 -0
- package/dist/cmd/build-meta.d.ts +10 -0
- package/dist/cmd/build-meta.d.ts.map +1 -0
- package/dist/cmd/build-model.d.ts +11 -0
- package/dist/cmd/build-model.d.ts.map +1 -0
- package/dist/cmd/index.d.ts +11 -0
- package/dist/cmd/index.d.ts.map +1 -0
- package/dist/cmd/pack-block.d.ts +10 -0
- package/dist/cmd/pack-block.d.ts.map +1 -0
- package/dist/cmd/upload-package-v1.d.ts +15 -0
- package/dist/cmd/upload-package-v1.d.ts.map +1 -0
- package/dist/common_types.d.ts +3 -0
- package/dist/common_types.d.ts.map +1 -0
- package/dist/config-BJognM_j.mjs +536 -0
- package/dist/config-BJognM_j.mjs.map +1 -0
- package/dist/config-CfA0Dj6h.js +3 -0
- package/dist/config-CfA0Dj6h.js.map +1 -0
- package/dist/index.js +2 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +43 -0
- package/dist/index.mjs.map +1 -0
- package/dist/lib/storage.d.ts +29 -0
- package/dist/lib/storage.d.ts.map +1 -0
- package/dist/lib.d.ts +3 -2637
- package/dist/lib.d.ts.map +1 -0
- package/dist/registry_v1/config.d.ts +12 -0
- package/dist/registry_v1/config.d.ts.map +1 -0
- package/dist/registry_v1/config_schema.d.ts +94 -0
- package/dist/registry_v1/config_schema.d.ts.map +1 -0
- package/dist/registry_v1/flags.d.ts +9 -0
- package/dist/registry_v1/flags.d.ts.map +1 -0
- package/dist/registry_v1/index.d.ts +4 -0
- package/dist/registry_v1/index.d.ts.map +1 -0
- package/dist/registry_v1/registry.d.ts +46 -0
- package/dist/registry_v1/registry.d.ts.map +1 -0
- package/dist/registry_v1/v1_repo_schema.d.ts +25 -0
- package/dist/registry_v1/v1_repo_schema.d.ts.map +1 -0
- package/dist/util.d.ts +4 -0
- package/dist/util.d.ts.map +1 -0
- package/dist/v2/build_dist.d.ts +3 -0
- package/dist/v2/build_dist.d.ts.map +1 -0
- package/dist/v2/index.d.ts +4 -0
- package/dist/v2/index.d.ts.map +1 -0
- package/dist/v2/model/block_components.d.ts +384 -0
- package/dist/v2/model/block_components.d.ts.map +1 -0
- package/dist/v2/model/common.d.ts +3 -0
- package/dist/v2/model/common.d.ts.map +1 -0
- package/dist/v2/model/content_conversion.d.ts +35 -0
- package/dist/v2/model/content_conversion.d.ts.map +1 -0
- package/dist/v2/model/content_types.d.ts +478 -0
- package/dist/v2/model/content_types.d.ts.map +1 -0
- package/dist/{lib.d.cts → v2/model/index.d.ts} +449 -1005
- package/dist/v2/model/index.d.ts.map +1 -0
- package/dist/v2/model/meta.d.ts +805 -0
- package/dist/v2/model/meta.d.ts.map +1 -0
- package/dist/v2/registry/schema.d.ts +15 -0
- package/dist/v2/registry/schema.d.ts.map +1 -0
- package/dist/v2/source_package.d.ts +8 -0
- package/dist/v2/source_package.d.ts.map +1 -0
- package/package.json +24 -17
- package/src/cmd/build-meta.ts +38 -0
- package/src/cmd/build-model.ts +76 -0
- package/src/cmd/index.ts +12 -0
- package/src/cmd/pack-block.ts +32 -0
- package/src/cmd/upload-package-v1.ts +105 -0
- package/src/common_types.ts +3 -0
- package/src/lib/storage.test.ts +91 -0
- package/src/lib/storage.ts +140 -0
- package/src/lib.ts +2 -0
- package/src/registry_v1/config.ts +90 -0
- package/src/registry_v1/config_schema.ts +30 -0
- package/src/registry_v1/flags.ts +23 -0
- package/src/registry_v1/index.ts +3 -0
- package/src/registry_v1/registry.test.ts +122 -0
- package/src/registry_v1/registry.ts +253 -0
- package/src/registry_v1/v1_repo_schema.ts +42 -0
- package/src/util.ts +25 -0
- package/src/v2/build_dist.test.ts +16 -0
- package/src/v2/build_dist.ts +29 -0
- package/src/v2/index.ts +3 -0
- package/src/v2/model/block_components.ts +32 -0
- package/src/v2/model/common.ts +2 -0
- package/src/v2/model/content_conversion.ts +178 -0
- package/src/v2/model/content_types.ts +233 -0
- package/src/v2/model/index.ts +46 -0
- package/src/v2/model/meta.ts +36 -0
- package/src/v2/registry/schema.ts +29 -0
- package/src/v2/source_package.test.ts +27 -0
- package/src/v2/source_package.ts +82 -0
- package/dist/cli.cjs +0 -786
- package/dist/cli.cjs.map +0 -1
- package/dist/cli.d.cts +0 -58
- package/dist/cli.d.ts +0 -58
- package/dist/lib.cjs +0 -629
- package/dist/lib.cjs.map +0 -1
- package/dist/lib.js +0 -577
- package/dist/lib.js.map +0 -1
package/dist/cli.js
CHANGED
|
@@ -1,757 +1,2 @@
|
|
|
1
|
-
|
|
2
|
-
import path from 'path';
|
|
3
|
-
import fs2 from 'fs';
|
|
4
|
-
import { z } from 'zod';
|
|
5
|
-
import fsp from 'node:fs/promises';
|
|
6
|
-
import * as mime from 'mime-types';
|
|
7
|
-
import * as tar from 'tar';
|
|
8
|
-
import { BlockPackMeta, ContentAbsoluteTextLocal, ContentAbsoluteBinaryLocal, BlockPackMetaEmbeddedContent, CreateBlockPackDescriptionSchema, BlockComponentsManifest, BlockPackMetaManifest, SemVer, DescriptionContentText, DescriptionContentBinary, BlockPackDescriptionFromPackageJsonRaw, BlockComponents, ContentAbsoluteFolder } from '@milaboratories/pl-model-middle-layer';
|
|
9
|
-
import path7 from 'node:path';
|
|
10
|
-
import { notEmpty } from '@milaboratories/ts-helpers';
|
|
11
|
-
import YAML2 from 'yaml';
|
|
12
|
-
import * as os from 'node:os';
|
|
13
|
-
import { randomUUID } from 'node:crypto';
|
|
14
|
-
import semver from 'semver/preload';
|
|
15
|
-
import pathPosix from 'node:path/posix';
|
|
16
|
-
import { S3, paginateListObjectsV2 } from '@aws-sdk/client-s3';
|
|
17
|
-
import * as fs3 from 'node:fs';
|
|
18
|
-
import fs3__default from 'node:fs';
|
|
19
|
-
import { OclifLoggerAdapter } from '@milaboratories/ts-helpers-oclif';
|
|
20
|
-
|
|
21
|
-
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
22
|
-
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
23
|
-
}) : x)(function(x) {
|
|
24
|
-
if (typeof require !== "undefined") return require.apply(this, arguments);
|
|
25
|
-
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
26
|
-
});
|
|
27
|
-
function tryResolve(root, request) {
|
|
28
|
-
try {
|
|
29
|
-
return __require.resolve(request, {
|
|
30
|
-
paths: [root]
|
|
31
|
-
});
|
|
32
|
-
} catch (err) {
|
|
33
|
-
if (err.code !== "MODULE_NOT_FOUND") throw err;
|
|
34
|
-
}
|
|
35
|
-
return void 0;
|
|
36
|
-
}
|
|
37
|
-
function ResolvedModuleFile(moduleRoot) {
|
|
38
|
-
return z.string().transform((request, ctx) => {
|
|
39
|
-
const result = tryResolve(moduleRoot, request);
|
|
40
|
-
if (result === void 0) {
|
|
41
|
-
ctx.addIssue({
|
|
42
|
-
code: z.ZodIssueCode.custom,
|
|
43
|
-
message: `Can't resolve ${request} against ${moduleRoot}`
|
|
44
|
-
});
|
|
45
|
-
return z.NEVER;
|
|
46
|
-
}
|
|
47
|
-
return {
|
|
48
|
-
type: "absolute-file",
|
|
49
|
-
file: result
|
|
50
|
-
};
|
|
51
|
-
});
|
|
52
|
-
}
|
|
53
|
-
function ResolvedModuleFolder(moduleRoot, ...indexFilesToLookFor) {
|
|
54
|
-
return z.string().transform((request, ctx) => {
|
|
55
|
-
const requestWithSlash = request.endsWith("/") ? request : `${request}/`;
|
|
56
|
-
for (const idxFile of indexFilesToLookFor) {
|
|
57
|
-
const result = tryResolve(moduleRoot, requestWithSlash + idxFile);
|
|
58
|
-
if (result !== void 0) {
|
|
59
|
-
if (!result.endsWith(idxFile))
|
|
60
|
-
throw new Error(`Unexpected resolve result ${result} with index file ${idxFile}`);
|
|
61
|
-
return {
|
|
62
|
-
type: "absolute-folder",
|
|
63
|
-
folder: result.slice(0, result.length - idxFile.length)
|
|
64
|
-
};
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
ctx.addIssue({
|
|
68
|
-
code: z.ZodIssueCode.custom,
|
|
69
|
-
message: `Can't resolve ${request} folder against ${moduleRoot}, no index file found (${indexFilesToLookFor.join(", ")})`
|
|
70
|
-
});
|
|
71
|
-
return z.NEVER;
|
|
72
|
-
});
|
|
73
|
-
}
|
|
74
|
-
function mapLocalToAbsolute(root) {
|
|
75
|
-
return (value) => value.type === "relative" ? { type: "absolute-file", file: path.resolve(root, value.path) } : value;
|
|
76
|
-
}
|
|
77
|
-
function absoluteToString() {
|
|
78
|
-
return async (value) => {
|
|
79
|
-
if (value.type === "absolute-file")
|
|
80
|
-
return await fsp.readFile(value.file, { encoding: "utf-8" });
|
|
81
|
-
else return value.content;
|
|
82
|
-
};
|
|
83
|
-
}
|
|
84
|
-
function absoluteToBase64() {
|
|
85
|
-
return async (value) => {
|
|
86
|
-
if (value.type === "absolute-file") {
|
|
87
|
-
const mimeType = mime.lookup(value.file);
|
|
88
|
-
if (!mimeType) throw new Error(`Can't recognize mime type of the file: ${value.file}.`);
|
|
89
|
-
return {
|
|
90
|
-
type: "explicit-base64",
|
|
91
|
-
mimeType,
|
|
92
|
-
content: await fsp.readFile(value.file, { encoding: "base64" })
|
|
93
|
-
};
|
|
94
|
-
} else return value;
|
|
95
|
-
};
|
|
96
|
-
}
|
|
97
|
-
function cpAbsoluteToRelative(dstFolder, fileAccumulator) {
|
|
98
|
-
return async (value) => {
|
|
99
|
-
if (value.type === "absolute-file") {
|
|
100
|
-
const fileName = path.basename(value.file);
|
|
101
|
-
const dst = path.resolve(dstFolder, fileName);
|
|
102
|
-
fileAccumulator?.push(fileName);
|
|
103
|
-
await fsp.cp(value.file, dst);
|
|
104
|
-
return { type: "relative", path: fileName };
|
|
105
|
-
} else return value;
|
|
106
|
-
};
|
|
107
|
-
}
|
|
108
|
-
function packFolderToRelativeTgz(dstFolder, tgzName, fileAccumulator) {
|
|
109
|
-
if (!tgzName.endsWith(".tgz")) throw new Error(`Unexpected tgz file name: ${tgzName}`);
|
|
110
|
-
return async (value) => {
|
|
111
|
-
const dst = path.resolve(dstFolder, tgzName);
|
|
112
|
-
await tar.create(
|
|
113
|
-
{
|
|
114
|
-
gzip: true,
|
|
115
|
-
file: dst,
|
|
116
|
-
cwd: value.folder
|
|
117
|
-
},
|
|
118
|
-
[value.folder]
|
|
119
|
-
);
|
|
120
|
-
fileAccumulator?.push(tgzName);
|
|
121
|
-
return { type: "relative", path: tgzName };
|
|
122
|
-
};
|
|
123
|
-
}
|
|
124
|
-
function BlockComponentsDescription(moduleRoot) {
|
|
125
|
-
return BlockComponents(
|
|
126
|
-
ResolvedModuleFile(moduleRoot),
|
|
127
|
-
ResolvedModuleFolder(moduleRoot, "index.html")
|
|
128
|
-
);
|
|
129
|
-
}
|
|
130
|
-
function BlockComponentsConsolidate(dstFolder, fileAccumulator) {
|
|
131
|
-
return BlockComponents(
|
|
132
|
-
ContentAbsoluteBinaryLocal.transform(cpAbsoluteToRelative(dstFolder, fileAccumulator)),
|
|
133
|
-
ContentAbsoluteFolder.transform(packFolderToRelativeTgz(dstFolder, "ui.tgz", fileAccumulator))
|
|
134
|
-
).pipe(BlockComponentsManifest);
|
|
135
|
-
}
|
|
136
|
-
function BlockPackMetaDescription(root) {
|
|
137
|
-
return BlockPackMeta(
|
|
138
|
-
DescriptionContentText.transform(mapLocalToAbsolute(root)),
|
|
139
|
-
DescriptionContentBinary.transform(mapLocalToAbsolute(root))
|
|
140
|
-
);
|
|
141
|
-
}
|
|
142
|
-
function BlockPackMetaConsolidate(dstFolder, fileAccumulator) {
|
|
143
|
-
return BlockPackMeta(
|
|
144
|
-
ContentAbsoluteTextLocal.transform(cpAbsoluteToRelative(dstFolder, fileAccumulator)),
|
|
145
|
-
ContentAbsoluteBinaryLocal.transform(cpAbsoluteToRelative(dstFolder, fileAccumulator))
|
|
146
|
-
);
|
|
147
|
-
}
|
|
148
|
-
var BlockPackMetaEmbed = BlockPackMeta(
|
|
149
|
-
ContentAbsoluteTextLocal.transform(absoluteToString()),
|
|
150
|
-
ContentAbsoluteBinaryLocal.transform(absoluteToBase64())
|
|
151
|
-
).pipe(BlockPackMetaEmbeddedContent);
|
|
152
|
-
|
|
153
|
-
// src/v2/model/index.ts
|
|
154
|
-
function ResolvedBlockPackDescriptionFromPackageJson(root) {
|
|
155
|
-
return CreateBlockPackDescriptionSchema(
|
|
156
|
-
BlockComponentsDescription(root),
|
|
157
|
-
BlockPackMetaDescription(root)
|
|
158
|
-
);
|
|
159
|
-
}
|
|
160
|
-
function BlockPackDescriptionConsolidateToFolder(dstFolder, fileAccumulator) {
|
|
161
|
-
return CreateBlockPackDescriptionSchema(
|
|
162
|
-
BlockComponentsConsolidate(dstFolder, fileAccumulator),
|
|
163
|
-
//BlockPackMetaToExplicit
|
|
164
|
-
BlockPackMetaConsolidate(dstFolder, fileAccumulator)
|
|
165
|
-
).pipe(BlockPackDescriptionManifest);
|
|
166
|
-
}
|
|
167
|
-
var BlockPackDescriptionManifest = CreateBlockPackDescriptionSchema(
|
|
168
|
-
BlockComponentsManifest,
|
|
169
|
-
BlockPackMetaManifest
|
|
170
|
-
);
|
|
171
|
-
var BlockPackManifest = BlockPackDescriptionManifest.extend({
|
|
172
|
-
schema: z.literal("v1"),
|
|
173
|
-
files: z.array(z.string())
|
|
174
|
-
});
|
|
175
|
-
var BlockPackManifestFile = "manifest.json";
|
|
176
|
-
async function buildBlockPackDist(description, dst) {
|
|
177
|
-
await fsp.mkdir(dst, { recursive: true });
|
|
178
|
-
const files = [];
|
|
179
|
-
const descriptionRelative = await BlockPackDescriptionConsolidateToFolder(dst, files).parseAsync(
|
|
180
|
-
description
|
|
181
|
-
);
|
|
182
|
-
const manifest = BlockPackManifest.parse({
|
|
183
|
-
schema: "v1",
|
|
184
|
-
...descriptionRelative,
|
|
185
|
-
files
|
|
186
|
-
});
|
|
187
|
-
await fsp.writeFile(path7.resolve(dst, BlockPackManifestFile), JSON.stringify(manifest));
|
|
188
|
-
return manifest;
|
|
189
|
-
}
|
|
190
|
-
async function tryLoadFile(file, map) {
|
|
191
|
-
try {
|
|
192
|
-
return map(await fsp.readFile(file));
|
|
193
|
-
} catch (err) {
|
|
194
|
-
if (err.code == "ENOENT") return void 0;
|
|
195
|
-
else throw new Error("", { cause: err });
|
|
196
|
-
}
|
|
197
|
-
}
|
|
198
|
-
var BlockDescriptionPackageJsonField = "block";
|
|
199
|
-
var ConventionPackageNamePattern = /(?:@[a-zA-Z0-9-.]+\/)?(?<organization>[a-zA-Z0-9-]+)\.(?<name>[a-zA-Z0-9-]+)/;
|
|
200
|
-
function parsePackageName(packageName) {
|
|
201
|
-
const match = packageName.match(ConventionPackageNamePattern);
|
|
202
|
-
if (!match)
|
|
203
|
-
throw new Error(
|
|
204
|
-
`Malformed package name (${packageName}), can't infer organization and block pack name.`
|
|
205
|
-
);
|
|
206
|
-
const { name, organization } = match.groups;
|
|
207
|
-
return { name, organization };
|
|
208
|
-
}
|
|
209
|
-
async function loadPackDescriptionRaw(moduleRoot) {
|
|
210
|
-
const fullPackageJsonPath = path.resolve(moduleRoot, "package.json");
|
|
211
|
-
const packageJson = JSON.parse(await fsp.readFile(fullPackageJsonPath, { encoding: "utf-8" }));
|
|
212
|
-
const descriptionNotParsed = packageJson[BlockDescriptionPackageJsonField];
|
|
213
|
-
if (descriptionNotParsed === void 0)
|
|
214
|
-
throw new Error(
|
|
215
|
-
`Block description (field ${BlockDescriptionPackageJsonField}) not found in ${fullPackageJsonPath}.`
|
|
216
|
-
);
|
|
217
|
-
return {
|
|
218
|
-
...BlockPackDescriptionFromPackageJsonRaw.parse(descriptionNotParsed),
|
|
219
|
-
id: {
|
|
220
|
-
...parsePackageName(
|
|
221
|
-
notEmpty(packageJson["name"], `"name" not found in ${fullPackageJsonPath}`)
|
|
222
|
-
),
|
|
223
|
-
version: SemVer.parse(packageJson["version"])
|
|
224
|
-
}
|
|
225
|
-
};
|
|
226
|
-
}
|
|
227
|
-
async function loadPackDescription(moduleRoot) {
|
|
228
|
-
const descriptionRaw = await loadPackDescriptionRaw(moduleRoot);
|
|
229
|
-
return await ResolvedBlockPackDescriptionFromPackageJson(moduleRoot).parseAsync(descriptionRaw);
|
|
230
|
-
}
|
|
231
|
-
|
|
232
|
-
// src/cmd/build-meta.ts
|
|
233
|
-
var BuildMeta = class _BuildMeta extends Command {
|
|
234
|
-
static description = "Extracts meta information from blocks package.json and outputs meta.json with embedded binary and textual information linked from the meta section.";
|
|
235
|
-
static flags = {
|
|
236
|
-
modulePath: Flags.string({
|
|
237
|
-
char: "i",
|
|
238
|
-
summary: "input module path",
|
|
239
|
-
helpValue: "<path>",
|
|
240
|
-
default: "."
|
|
241
|
-
}),
|
|
242
|
-
destination: Flags.string({
|
|
243
|
-
char: "o",
|
|
244
|
-
summary: "output meta.json file",
|
|
245
|
-
helpValue: "<path>",
|
|
246
|
-
required: true
|
|
247
|
-
})
|
|
248
|
-
};
|
|
249
|
-
async run() {
|
|
250
|
-
const { flags } = await this.parse(_BuildMeta);
|
|
251
|
-
const modulePath = path.resolve(flags.modulePath);
|
|
252
|
-
const descriptionRaw = await loadPackDescriptionRaw(modulePath);
|
|
253
|
-
const metaEmbedded = await BlockPackMetaEmbed.parseAsync(
|
|
254
|
-
BlockPackMetaDescription(modulePath).parse(descriptionRaw.meta)
|
|
255
|
-
);
|
|
256
|
-
await fs2.promises.writeFile(path.resolve(flags.destination), JSON.stringify(metaEmbedded));
|
|
257
|
-
}
|
|
258
|
-
};
|
|
259
|
-
async function getFileContent(path9) {
|
|
260
|
-
try {
|
|
261
|
-
return await fs2.promises.readFile(path9, "utf8");
|
|
262
|
-
} catch (error) {
|
|
263
|
-
if (error.code === "ENOENT") {
|
|
264
|
-
return void 0;
|
|
265
|
-
}
|
|
266
|
-
throw error;
|
|
267
|
-
}
|
|
268
|
-
}
|
|
269
|
-
var BuildModel = class _BuildModel extends Command {
|
|
270
|
-
static description = "Extracts and outputs block model JSON from pre-built block model module";
|
|
271
|
-
static flags = {
|
|
272
|
-
modulePath: Flags.string({
|
|
273
|
-
char: "i",
|
|
274
|
-
summary: "input module path",
|
|
275
|
-
helpValue: "<path>",
|
|
276
|
-
default: "."
|
|
277
|
-
}),
|
|
278
|
-
sourceBundle: Flags.string({
|
|
279
|
-
char: "b",
|
|
280
|
-
summary: "bundled model code to embed into the model for callback-based rendering to work",
|
|
281
|
-
helpValue: "<path>",
|
|
282
|
-
default: "./dist/bundle.js"
|
|
283
|
-
}),
|
|
284
|
-
destination: Flags.string({
|
|
285
|
-
char: "o",
|
|
286
|
-
summary: "output model file",
|
|
287
|
-
helpValue: "<path>",
|
|
288
|
-
default: "./dist/model.json"
|
|
289
|
-
})
|
|
290
|
-
};
|
|
291
|
-
async run() {
|
|
292
|
-
const { flags } = await this.parse(_BuildModel);
|
|
293
|
-
const modulePath = path.resolve(flags.modulePath);
|
|
294
|
-
let { model, platforma } = __require(modulePath);
|
|
295
|
-
if (!model) model = platforma;
|
|
296
|
-
if (!model) throw new Error('"model" export not found');
|
|
297
|
-
const { config } = model;
|
|
298
|
-
if (!config)
|
|
299
|
-
throw new Error(
|
|
300
|
-
'Malformed "model" object, check it is created with "BlockModel" and ".done()" is executed as the call in the chain.'
|
|
301
|
-
);
|
|
302
|
-
if (!("canRun" in config || "inputsValid" in config) || !("outputs" in config) || !("sections" in config))
|
|
303
|
-
throw new Error('"config" has unexpected structure');
|
|
304
|
-
const code = await getFileContent(flags.sourceBundle);
|
|
305
|
-
if (code !== void 0) {
|
|
306
|
-
config.code = {
|
|
307
|
-
type: "plain",
|
|
308
|
-
content: code
|
|
309
|
-
};
|
|
310
|
-
}
|
|
311
|
-
await fs2.promises.writeFile(path.resolve(flags.destination), JSON.stringify(config));
|
|
312
|
-
}
|
|
313
|
-
};
|
|
314
|
-
var PackBlock = class _PackBlock extends Command {
|
|
315
|
-
static description = "Builds block pack and outputs a block pack manifest consolidating all references assets into a single folder";
|
|
316
|
-
static flags = {
|
|
317
|
-
modulePath: Flags.string({
|
|
318
|
-
char: "i",
|
|
319
|
-
summary: "input module path",
|
|
320
|
-
helpValue: "<path>",
|
|
321
|
-
default: "."
|
|
322
|
-
}),
|
|
323
|
-
destinationPath: Flags.string({
|
|
324
|
-
char: "o",
|
|
325
|
-
summary: "output folder",
|
|
326
|
-
helpValue: "<path>",
|
|
327
|
-
default: "./block-pack"
|
|
328
|
-
})
|
|
329
|
-
};
|
|
330
|
-
async run() {
|
|
331
|
-
const { flags } = await this.parse(_PackBlock);
|
|
332
|
-
const description = await loadPackDescription(path.resolve(flags.modulePath));
|
|
333
|
-
await buildBlockPackDist(description, path.resolve(flags.destinationPath));
|
|
334
|
-
}
|
|
335
|
-
};
|
|
336
|
-
var PlRegAddress = z.string().regex(/^(?:s3:|file:)/);
|
|
337
|
-
var PlPackageConfigData = z.object({
|
|
338
|
-
organization: z.string(),
|
|
339
|
-
package: z.string(),
|
|
340
|
-
version: SemVer.optional(),
|
|
341
|
-
files: z.record(z.string().regex(/^[^\/]+$/), z.string()).default({}),
|
|
342
|
-
meta: z.object({}).passthrough()
|
|
343
|
-
});
|
|
344
|
-
var PlRegCommonConfigData = z.object({
|
|
345
|
-
registries: z.record(z.string(), PlRegAddress).default({}),
|
|
346
|
-
registry: z.string().optional()
|
|
347
|
-
});
|
|
348
|
-
var PlRegFullPackageConfigData = PlRegCommonConfigData.merge(PlPackageConfigData).required(
|
|
349
|
-
{ registry: true, version: true }
|
|
350
|
-
);
|
|
351
|
-
var PlRegPackageConfigDataShard = PlRegFullPackageConfigData.partial().required({
|
|
352
|
-
registries: true,
|
|
353
|
-
files: true
|
|
354
|
-
});
|
|
355
|
-
var PlPackageJsonConfigFile = "pl.package.json";
|
|
356
|
-
var PlPackageYamlConfigFile = "pl.package.yaml";
|
|
357
|
-
|
|
358
|
-
// src/registry_v1/v1_repo_schema.ts
|
|
359
|
-
var MainPrefix = "v1/";
|
|
360
|
-
function payloadFilePath(bp, file) {
|
|
361
|
-
return `${MainPrefix}${bp.organization}/${bp.package}/${bp.version}/${file}`;
|
|
362
|
-
}
|
|
363
|
-
function packageOverviewPath(bp) {
|
|
364
|
-
return `${MainPrefix}${bp.organization}/${bp.package}/overview.json`;
|
|
365
|
-
}
|
|
366
|
-
var GlobalOverviewPath = `${MainPrefix}overview.json`;
|
|
367
|
-
var MetaFile = "meta.json";
|
|
368
|
-
|
|
369
|
-
// src/registry_v1/registry.ts
|
|
370
|
-
var VersionUpdatesPrefix = "_updates_v1/per_package_version/";
|
|
371
|
-
function packageUpdatePath(bp, seed) {
|
|
372
|
-
return `${VersionUpdatesPrefix}${bp.organization}/${bp.package}/${bp.version}/${seed}`;
|
|
373
|
-
}
|
|
374
|
-
var PackageUpdatePattern = /(?<packageKeyWithoutVersion>(?<organization>[^\/]+)\/(?<pkg>[^\/]+))\/(?<version>[^\/]+)\/(?<seed>[^\/]+)$/;
|
|
375
|
-
var GlobalUpdateSeedInFile = "_updates_v1/_global_update_in";
|
|
376
|
-
var GlobalUpdateSeedOutFile = "_updates_v1/_global_update_out";
|
|
377
|
-
var BlockRegistry = class {
|
|
378
|
-
constructor(storage, logger) {
|
|
379
|
-
this.storage = storage;
|
|
380
|
-
this.logger = logger;
|
|
381
|
-
}
|
|
382
|
-
constructNewPackage(pack) {
|
|
383
|
-
return new BlockRegistryPackConstructor(this.storage, pack);
|
|
384
|
-
}
|
|
385
|
-
async updateRegistry() {
|
|
386
|
-
this.logger?.info("Initiating registry refresh...");
|
|
387
|
-
const packagesToUpdate = /* @__PURE__ */ new Map();
|
|
388
|
-
const seedPaths = [];
|
|
389
|
-
const rawSeedPaths = await this.storage.listFiles(VersionUpdatesPrefix);
|
|
390
|
-
this.logger?.info("Packages to be updated:");
|
|
391
|
-
for (const seedPath of rawSeedPaths) {
|
|
392
|
-
const match = seedPath.match(PackageUpdatePattern);
|
|
393
|
-
if (!match) continue;
|
|
394
|
-
seedPaths.push(seedPath);
|
|
395
|
-
const { packageKeyWithoutVersion, organization, pkg, version, seed } = match.groups;
|
|
396
|
-
let update = packagesToUpdate.get(packageKeyWithoutVersion);
|
|
397
|
-
if (!update) {
|
|
398
|
-
packagesToUpdate.set(packageKeyWithoutVersion, {
|
|
399
|
-
package: { organization, package: pkg },
|
|
400
|
-
versions: /* @__PURE__ */ new Set([version])
|
|
401
|
-
});
|
|
402
|
-
} else if (!update.versions.has(version)) {
|
|
403
|
-
update.versions.add(version);
|
|
404
|
-
}
|
|
405
|
-
this.logger?.info(` - ${organization}:${pkg}:${version}`);
|
|
406
|
-
}
|
|
407
|
-
const overviewContent = await this.storage.getFile(GlobalOverviewPath);
|
|
408
|
-
let overview = overviewContent === void 0 ? [] : JSON.parse(overviewContent.toString());
|
|
409
|
-
this.logger?.info(`Global overview loaded, ${overview.length} records`);
|
|
410
|
-
for (const [, packageInfo] of packagesToUpdate.entries()) {
|
|
411
|
-
const overviewFile = packageOverviewPath(packageInfo.package);
|
|
412
|
-
const pOverviewContent = await this.storage.getFile(overviewFile);
|
|
413
|
-
let packageOverview = pOverviewContent === void 0 ? [] : JSON.parse(pOverviewContent.toString());
|
|
414
|
-
this.logger?.info(
|
|
415
|
-
`Updating ${packageInfo.package.organization}:${packageInfo.package.package} overview, ${packageOverview.length} records`
|
|
416
|
-
);
|
|
417
|
-
packageOverview = packageOverview.filter((e) => !packageInfo.versions.has(e.version));
|
|
418
|
-
for (const [v] of packageInfo.versions.entries()) {
|
|
419
|
-
const version = v.toString();
|
|
420
|
-
const metaContent = await this.storage.getFile(
|
|
421
|
-
payloadFilePath(
|
|
422
|
-
{
|
|
423
|
-
...packageInfo.package,
|
|
424
|
-
version
|
|
425
|
-
},
|
|
426
|
-
MetaFile
|
|
427
|
-
)
|
|
428
|
-
);
|
|
429
|
-
if (!metaContent) continue;
|
|
430
|
-
packageOverview.push({ version, meta: JSON.parse(metaContent.toString()) });
|
|
431
|
-
}
|
|
432
|
-
packageOverview.sort((e1, e2) => semver.compare(e2.version, e1.version));
|
|
433
|
-
await this.storage.putFile(overviewFile, Buffer.from(JSON.stringify(packageOverview)));
|
|
434
|
-
this.logger?.info(`Done (${packageOverview.length} records)`);
|
|
435
|
-
overview = overview.filter(
|
|
436
|
-
(e) => e.organization !== packageInfo.package.organization || e.package !== packageInfo.package.package
|
|
437
|
-
);
|
|
438
|
-
overview.push({
|
|
439
|
-
organization: packageInfo.package.organization,
|
|
440
|
-
package: packageInfo.package.package,
|
|
441
|
-
allVersions: packageOverview.map((e) => e.version).reverse(),
|
|
442
|
-
latestVersion: packageOverview[0].version,
|
|
443
|
-
latestMeta: packageOverview[0].meta
|
|
444
|
-
});
|
|
445
|
-
}
|
|
446
|
-
await this.storage.putFile(GlobalOverviewPath, Buffer.from(JSON.stringify(overview)));
|
|
447
|
-
this.logger?.info(`Global overview updated (${overview.length} records)`);
|
|
448
|
-
await this.storage.deleteFiles(...seedPaths.map((sp) => `${VersionUpdatesPrefix}${sp}`));
|
|
449
|
-
this.logger?.info(`Version update requests cleared`);
|
|
450
|
-
}
|
|
451
|
-
async updateIfNeeded(force = false) {
|
|
452
|
-
this.logger?.info(`Checking if registry requires refresh...`);
|
|
453
|
-
const updateRequestSeed = await this.storage.getFile(GlobalUpdateSeedInFile);
|
|
454
|
-
const currentUpdatedSeed = await this.storage.getFile(GlobalUpdateSeedOutFile);
|
|
455
|
-
if (!force && updateRequestSeed === void 0 && currentUpdatedSeed === void 0) return;
|
|
456
|
-
if (!force && updateRequestSeed !== void 0 && currentUpdatedSeed !== void 0 && updateRequestSeed.equals(currentUpdatedSeed))
|
|
457
|
-
return;
|
|
458
|
-
await this.updateRegistry();
|
|
459
|
-
if (updateRequestSeed) {
|
|
460
|
-
await this.storage.putFile(GlobalUpdateSeedOutFile, updateRequestSeed);
|
|
461
|
-
this.logger?.info(`Refresh finished`);
|
|
462
|
-
}
|
|
463
|
-
}
|
|
464
|
-
async getPackageOverview(name) {
|
|
465
|
-
const content = await this.storage.getFile(packageOverviewPath(name));
|
|
466
|
-
if (content === void 0) return void 0;
|
|
467
|
-
return JSON.parse(content.toString());
|
|
468
|
-
}
|
|
469
|
-
async getGlobalOverview() {
|
|
470
|
-
const content = await this.storage.getFile(GlobalOverviewPath);
|
|
471
|
-
if (content === void 0) return void 0;
|
|
472
|
-
return JSON.parse(content.toString());
|
|
473
|
-
}
|
|
474
|
-
};
|
|
475
|
-
var BlockRegistryPackConstructor = class {
|
|
476
|
-
constructor(storage, name) {
|
|
477
|
-
this.storage = storage;
|
|
478
|
-
this.name = name;
|
|
479
|
-
}
|
|
480
|
-
metaAdded = false;
|
|
481
|
-
seed = randomUUID();
|
|
482
|
-
async addFile(file, content) {
|
|
483
|
-
await this.storage.putFile(payloadFilePath(this.name, file), content);
|
|
484
|
-
}
|
|
485
|
-
async writeMeta(meta) {
|
|
486
|
-
await this.addFile(MetaFile, Buffer.from(JSON.stringify(meta)));
|
|
487
|
-
this.metaAdded = true;
|
|
488
|
-
}
|
|
489
|
-
async finish() {
|
|
490
|
-
if (!this.metaAdded) throw new Error("meta not added");
|
|
491
|
-
await this.storage.putFile(packageUpdatePath(this.name, this.seed), Buffer.of(0));
|
|
492
|
-
await this.storage.putFile(GlobalUpdateSeedInFile, Buffer.from(this.seed));
|
|
493
|
-
}
|
|
494
|
-
};
|
|
495
|
-
var S3Storage = class {
|
|
496
|
-
constructor(client, bucket, root) {
|
|
497
|
-
this.client = client;
|
|
498
|
-
this.bucket = bucket;
|
|
499
|
-
this.root = root;
|
|
500
|
-
}
|
|
501
|
-
async getFile(file) {
|
|
502
|
-
try {
|
|
503
|
-
return Buffer.from(
|
|
504
|
-
await (await this.client.getObject({
|
|
505
|
-
Bucket: this.bucket,
|
|
506
|
-
Key: pathPosix.join(this.root, file)
|
|
507
|
-
})).Body.transformToByteArray()
|
|
508
|
-
);
|
|
509
|
-
} catch (e) {
|
|
510
|
-
if (e.name === "NoSuchKey") return void 0;
|
|
511
|
-
else throw e;
|
|
512
|
-
}
|
|
513
|
-
}
|
|
514
|
-
async listFiles(prefix) {
|
|
515
|
-
const listRoot = pathPosix.join(this.root, prefix);
|
|
516
|
-
const paginator = paginateListObjectsV2(
|
|
517
|
-
{ client: this.client },
|
|
518
|
-
{
|
|
519
|
-
Bucket: this.bucket,
|
|
520
|
-
Prefix: listRoot
|
|
521
|
-
}
|
|
522
|
-
);
|
|
523
|
-
const result = [];
|
|
524
|
-
for await (const page of paginator)
|
|
525
|
-
result.push(...page.Contents.map((e) => pathPosix.relative(listRoot, e.Key)));
|
|
526
|
-
return result;
|
|
527
|
-
}
|
|
528
|
-
async putFile(file, buffer) {
|
|
529
|
-
await this.client.putObject({
|
|
530
|
-
Bucket: this.bucket,
|
|
531
|
-
Key: pathPosix.join(this.root, file),
|
|
532
|
-
Body: buffer
|
|
533
|
-
});
|
|
534
|
-
}
|
|
535
|
-
async deleteFiles(...files) {
|
|
536
|
-
const results = await this.client.deleteObjects({
|
|
537
|
-
Bucket: this.bucket,
|
|
538
|
-
Delete: {
|
|
539
|
-
Objects: files.map((file) => ({
|
|
540
|
-
Key: pathPosix.join(this.root, file)
|
|
541
|
-
}))
|
|
542
|
-
}
|
|
543
|
-
});
|
|
544
|
-
if (results.Errors !== void 0 && results.Errors.length > 0)
|
|
545
|
-
throw new Error(`Errors encountered while deleting files: ${results.Errors.join("\n")}`);
|
|
546
|
-
}
|
|
547
|
-
};
|
|
548
|
-
var FSStorage = class {
|
|
549
|
-
/** Absolute path */
|
|
550
|
-
root;
|
|
551
|
-
constructor(_root) {
|
|
552
|
-
this.root = path7.resolve(_root);
|
|
553
|
-
}
|
|
554
|
-
toAbsolutePath(localPath) {
|
|
555
|
-
if (pathPosix.isAbsolute(localPath)) throw new Error("absolute path");
|
|
556
|
-
return path7.resolve(this.root, localPath.split(pathPosix.sep).join(path7.sep));
|
|
557
|
-
}
|
|
558
|
-
async getFile(address) {
|
|
559
|
-
try {
|
|
560
|
-
return await fs3.promises.readFile(this.toAbsolutePath(address));
|
|
561
|
-
} catch (err) {
|
|
562
|
-
if (err.code == "ENOENT") return void 0;
|
|
563
|
-
else throw new Error("", { cause: err });
|
|
564
|
-
}
|
|
565
|
-
}
|
|
566
|
-
async listFiles(prefix) {
|
|
567
|
-
try {
|
|
568
|
-
const listRoot = this.toAbsolutePath(prefix);
|
|
569
|
-
return (await fs3.promises.readdir(listRoot, { recursive: true, withFileTypes: true })).filter((e) => e.isFile()).map(
|
|
570
|
-
(e) => path7.relative(listRoot, path7.resolve(e.path, e.name)).split(path7.sep).join(pathPosix.sep)
|
|
571
|
-
);
|
|
572
|
-
} catch (err) {
|
|
573
|
-
if (err.code == "ENOENT") return [];
|
|
574
|
-
else throw new Error("", { cause: err });
|
|
575
|
-
}
|
|
576
|
-
}
|
|
577
|
-
async putFile(address, buffer) {
|
|
578
|
-
const absoluteAddress = this.toAbsolutePath(address);
|
|
579
|
-
await fs3.promises.mkdir(path7.dirname(absoluteAddress), { recursive: true });
|
|
580
|
-
await fs3.promises.writeFile(absoluteAddress, buffer);
|
|
581
|
-
}
|
|
582
|
-
async deleteFiles(...files) {
|
|
583
|
-
for (const file of files) await fs3.promises.rm(this.toAbsolutePath(file));
|
|
584
|
-
}
|
|
585
|
-
};
|
|
586
|
-
function storageByUrl(address) {
|
|
587
|
-
const url = new URL(address, `file:${path7.resolve(".").split(path7.sep).join(pathPosix.sep)}/`);
|
|
588
|
-
switch (url.protocol) {
|
|
589
|
-
case "file:":
|
|
590
|
-
const root = path7.resolve(url.pathname);
|
|
591
|
-
return new FSStorage(root);
|
|
592
|
-
case "s3:":
|
|
593
|
-
const options = {};
|
|
594
|
-
const region = url.searchParams.get("region");
|
|
595
|
-
if (region) options.region = region;
|
|
596
|
-
const bucket = url.hostname;
|
|
597
|
-
return new S3Storage(new S3(options), bucket, url.pathname.replace(/^\//, ""));
|
|
598
|
-
default:
|
|
599
|
-
throw new Error(`Unknown protocol: ${url.protocol}`);
|
|
600
|
-
}
|
|
601
|
-
}
|
|
602
|
-
|
|
603
|
-
// src/registry_v1/config.ts
|
|
604
|
-
function mergeConfigs(c1, c2) {
|
|
605
|
-
if (c2 === void 0) return c1;
|
|
606
|
-
return {
|
|
607
|
-
...c1,
|
|
608
|
-
...c2,
|
|
609
|
-
registries: { ...c1.registries, ...c2.registries },
|
|
610
|
-
files: { ...c1.files, ...c2.files }
|
|
611
|
-
};
|
|
612
|
-
}
|
|
613
|
-
async function tryLoadJsonConfigFromFile(file) {
|
|
614
|
-
return tryLoadFile(file, (buf) => PlRegPackageConfigDataShard.parse(JSON.parse(buf.toString())));
|
|
615
|
-
}
|
|
616
|
-
async function tryLoadYamlConfigFromFile(file) {
|
|
617
|
-
return tryLoadFile(file, (buf) => PlRegPackageConfigDataShard.parse(YAML2.parse(buf.toString())));
|
|
618
|
-
}
|
|
619
|
-
async function loadConfigShard() {
|
|
620
|
-
let conf = PlRegPackageConfigDataShard.parse({});
|
|
621
|
-
conf = mergeConfigs(conf, await tryLoadJsonConfigFromFile("./.pl.reg.json"));
|
|
622
|
-
conf = mergeConfigs(conf, await tryLoadYamlConfigFromFile("./.pl.reg.yaml"));
|
|
623
|
-
conf = mergeConfigs(conf, await tryLoadJsonConfigFromFile(`${os.homedir()}/.pl.reg.json`));
|
|
624
|
-
conf = mergeConfigs(conf, await tryLoadYamlConfigFromFile(`${os.homedir()}/.pl.reg.yaml`));
|
|
625
|
-
conf = mergeConfigs(conf, await tryLoadJsonConfigFromFile(PlPackageJsonConfigFile));
|
|
626
|
-
conf = mergeConfigs(conf, await tryLoadYamlConfigFromFile(PlPackageYamlConfigFile));
|
|
627
|
-
return conf;
|
|
628
|
-
}
|
|
629
|
-
var PlRegPackageConfig = class {
|
|
630
|
-
constructor(conf) {
|
|
631
|
-
this.conf = conf;
|
|
632
|
-
}
|
|
633
|
-
createRegistry(logger) {
|
|
634
|
-
let address = this.conf.registry;
|
|
635
|
-
if (!address.startsWith("file:") && !address.startsWith("s3:")) {
|
|
636
|
-
const regByAlias = this.conf.registries[address];
|
|
637
|
-
if (!regByAlias) throw new Error(`Registry with alias "${address}" not found`);
|
|
638
|
-
address = regByAlias;
|
|
639
|
-
}
|
|
640
|
-
return new BlockRegistry(storageByUrl(address), logger);
|
|
641
|
-
}
|
|
642
|
-
get fullPackageName() {
|
|
643
|
-
return {
|
|
644
|
-
organization: this.conf.organization,
|
|
645
|
-
package: this.conf.package,
|
|
646
|
-
version: this.conf.version
|
|
647
|
-
};
|
|
648
|
-
}
|
|
649
|
-
};
|
|
650
|
-
async function getConfig(finalShard) {
|
|
651
|
-
const confShard = await loadConfigShard();
|
|
652
|
-
return new PlRegPackageConfig(
|
|
653
|
-
PlRegFullPackageConfigData.parse(mergeConfigs(confShard, finalShard))
|
|
654
|
-
);
|
|
655
|
-
}
|
|
656
|
-
function parseTargetFile(arg) {
|
|
657
|
-
const match = arg.match(/(?<destName>[^\/\\]+)=(?<src>.*)/);
|
|
658
|
-
if (match) {
|
|
659
|
-
const { src, destName } = match.groups;
|
|
660
|
-
return { src, destName };
|
|
661
|
-
} else {
|
|
662
|
-
return { src: arg, destName: path7.basename(arg) };
|
|
663
|
-
}
|
|
664
|
-
}
|
|
665
|
-
var targetFile = Flags.custom({
|
|
666
|
-
summary: "target files to upload",
|
|
667
|
-
helpValue: "file_path | package_name=file_path",
|
|
668
|
-
parse: async (arg) => parseTargetFile(arg)
|
|
669
|
-
});
|
|
670
|
-
var BasicConfigFields = ["registry", "organization", "package", "version"];
|
|
671
|
-
var UploadPackageV1 = class _UploadPackageV1 extends Command {
|
|
672
|
-
static description = "Uploads V1 package and refreshes the registry";
|
|
673
|
-
static flags = {
|
|
674
|
-
registry: Flags.string({
|
|
675
|
-
char: "r",
|
|
676
|
-
summary: "full address of the registry or alias from .pl.reg",
|
|
677
|
-
helpValue: "<address|alias>",
|
|
678
|
-
env: "PL_REGISTRY"
|
|
679
|
-
}),
|
|
680
|
-
organization: Flags.string({
|
|
681
|
-
char: "o",
|
|
682
|
-
summary: "target organisation",
|
|
683
|
-
env: "PL_PACKAGE_ORGANIZATION"
|
|
684
|
-
}),
|
|
685
|
-
package: Flags.string({
|
|
686
|
-
char: "p",
|
|
687
|
-
summary: "target package",
|
|
688
|
-
env: "PL_PACKAGE_NAME"
|
|
689
|
-
}),
|
|
690
|
-
version: Flags.string({
|
|
691
|
-
char: "v",
|
|
692
|
-
summary: "target version",
|
|
693
|
-
env: "PL_PACKAGE_VERSION"
|
|
694
|
-
}),
|
|
695
|
-
meta: Flags.file({
|
|
696
|
-
char: "m",
|
|
697
|
-
summary: "json file containing meta information to associate with tha package",
|
|
698
|
-
exists: true
|
|
699
|
-
}),
|
|
700
|
-
file: targetFile({
|
|
701
|
-
char: "f",
|
|
702
|
-
summary: "package files",
|
|
703
|
-
multiple: true,
|
|
704
|
-
default: []
|
|
705
|
-
}),
|
|
706
|
-
refresh: Flags.boolean({
|
|
707
|
-
summary: "refresh repository after adding the package",
|
|
708
|
-
default: true,
|
|
709
|
-
allowNo: true,
|
|
710
|
-
env: "PL_REGISTRY_REFRESH"
|
|
711
|
-
})
|
|
712
|
-
};
|
|
713
|
-
async run() {
|
|
714
|
-
const { flags } = await this.parse(_UploadPackageV1);
|
|
715
|
-
const configFromFlags = PlRegPackageConfigDataShard.parse({});
|
|
716
|
-
for (const field of BasicConfigFields) if (flags[field]) configFromFlags[field] = flags[field];
|
|
717
|
-
if (flags.meta) {
|
|
718
|
-
if (flags.meta.endsWith(".json"))
|
|
719
|
-
configFromFlags.meta = JSON.parse(
|
|
720
|
-
await fs3__default.promises.readFile(flags.meta, { encoding: "utf-8" })
|
|
721
|
-
);
|
|
722
|
-
else if (flags.meta.endsWith(".yaml"))
|
|
723
|
-
configFromFlags.meta = YAML2.parse(
|
|
724
|
-
await fs3__default.promises.readFile(flags.meta, { encoding: "utf-8" })
|
|
725
|
-
);
|
|
726
|
-
}
|
|
727
|
-
for (const targetFile2 of flags.file) {
|
|
728
|
-
configFromFlags.files[targetFile2.destName] = targetFile2.src;
|
|
729
|
-
}
|
|
730
|
-
const conf = await getConfig(configFromFlags);
|
|
731
|
-
this.log(YAML2.stringify(conf.conf));
|
|
732
|
-
const registry = conf.createRegistry(new OclifLoggerAdapter(this));
|
|
733
|
-
const name = conf.fullPackageName;
|
|
734
|
-
const builder = registry.constructNewPackage(name);
|
|
735
|
-
for (const [dst, src] of Object.entries(conf.conf.files)) {
|
|
736
|
-
this.log(`Uploading ${src} -> ${dst} ...`);
|
|
737
|
-
const content = await fs3__default.promises.readFile(src);
|
|
738
|
-
await builder.addFile(dst, content);
|
|
739
|
-
}
|
|
740
|
-
this.log(`Uploading meta information...`);
|
|
741
|
-
await builder.writeMeta(conf.conf.meta);
|
|
742
|
-
await builder.finish();
|
|
743
|
-
if (flags.refresh) await registry.updateIfNeeded();
|
|
744
|
-
}
|
|
745
|
-
};
|
|
746
|
-
|
|
747
|
-
// src/cmd/index.ts
|
|
748
|
-
var COMMANDS = {
|
|
749
|
-
"upload-package-v1": UploadPackageV1,
|
|
750
|
-
"pack": PackBlock,
|
|
751
|
-
"build-model": BuildModel,
|
|
752
|
-
"build-meta": BuildMeta
|
|
753
|
-
};
|
|
754
|
-
|
|
755
|
-
export { COMMANDS };
|
|
1
|
+
"use strict";var v=Object.defineProperty;var E=(i,e,t)=>e in i?v(i,e,{enumerable:!0,configurable:!0,writable:!0,value:t}):i[e]=t;var n=(i,e,t)=>E(i,typeof e!="symbol"?e+"":e,t);Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const a=require("@oclif/core"),c=require("node:path"),u=require("node:fs"),l=require("./config-CfA0Dj6h.js"),P=require("yaml"),A=require("@milaboratories/ts-helpers-oclif"),f=class f extends a.Command{async run(){const{flags:e}=await this.parse(f),t=c.resolve(e.modulePath),s=await l.loadPackDescriptionRaw(t),d=await l.BlockPackMetaEmbed.parseAsync(l.BlockPackMetaDescription(t).parse(s.meta));await u.promises.writeFile(c.resolve(e.destination),JSON.stringify(d))}};n(f,"description","Extracts meta information from blocks package.json and outputs meta.json with embedded binary and textual information linked from the meta section."),n(f,"flags",{modulePath:a.Flags.string({char:"i",summary:"input module path",helpValue:"<path>",default:"."}),destination:a.Flags.string({char:"o",summary:"output meta.json file",helpValue:"<path>",required:!0})});let w=f;async function O(i){try{return await u.promises.readFile(i,"utf8")}catch(e){if(e.code==="ENOENT")return;throw e}}const p=class p extends a.Command{async run(){const{flags:e}=await this.parse(p),t=c.resolve(e.modulePath);let{model:s,platforma:d}=require(t);if(s||(s=d),!s)throw new Error('"model" export not found');const{config:r}=s;if(!r)throw new Error('Malformed "model" object, check it is created with "BlockModel" and ".done()" is executed as the call in the chain.');if(!("canRun"in r||"inputsValid"in r)||!("outputs"in r)||!("sections"in r))throw new Error('"config" has unexpected structure');const m=await O(e.sourceBundle);m!==void 0&&(r.code={type:"plain",content:m}),await u.promises.writeFile(c.resolve(e.destination),JSON.stringify(r))}};n(p,"description","Extracts and outputs block model JSON from pre-built block model module"),n(p,"flags",{modulePath:a.Flags.string({char:"i",summary:"input module path",helpValue:"<path>",default:"."}),sourceBundle:a.Flags.string({char:"b",summary:"bundled model code to embed into the model for callback-based rendering to work",helpValue:"<path>",default:"./dist/bundle.js"}),destination:a.Flags.string({char:"o",summary:"output model file",helpValue:"<path>",default:"./dist/model.json"})});let y=p;const g=class g extends a.Command{async run(){const{flags:e}=await this.parse(g),t=await l.loadPackDescription(c.resolve(e.modulePath));await l.buildBlockPackDist(t,c.resolve(e.destinationPath))}};n(g,"description","Builds block pack and outputs a block pack manifest consolidating all references assets into a single folder"),n(g,"flags",{modulePath:a.Flags.string({char:"i",summary:"input module path",helpValue:"<path>",default:"."}),destinationPath:a.Flags.string({char:"o",summary:"output folder",helpValue:"<path>",default:"./block-pack"})});let F=g;function C(i){const e=i.match(/(?<destName>[^\/\\]+)=(?<src>.*)/);if(e){const{src:t,destName:s}=e.groups;return{src:t,destName:s}}else return{src:i,destName:c.basename(i)}}const R=a.Flags.custom({summary:"target files to upload",helpValue:"file_path | package_name=file_path",parse:async i=>C(i)}),S=["registry","organization","package","version"],h=class h extends a.Command{async run(){const{flags:e}=await this.parse(h),t=l.PlRegPackageConfigDataShard.parse({});for(const o of S)e[o]&&(t[o]=e[o]);e.meta&&(e.meta.endsWith(".json")?t.meta=JSON.parse(await u.promises.readFile(e.meta,{encoding:"utf-8"})):e.meta.endsWith(".yaml")&&(t.meta=P.parse(await u.promises.readFile(e.meta,{encoding:"utf-8"}))));for(const o of e.file)t.files[o.destName]=o.src;const s=await l.getConfig(t);this.log(P.stringify(s.conf));const d=s.createRegistry(new A.OclifLoggerAdapter(this)),r=s.fullPackageName,m=d.constructNewPackage(r);for(const[o,k]of Object.entries(s.conf.files)){this.log(`Uploading ${k} -> ${o} ...`);const N=await u.promises.readFile(k);await m.addFile(o,N)}this.log("Uploading meta information..."),await m.writeMeta(s.conf.meta),await m.finish(),e.refresh&&await d.updateIfNeeded()}};n(h,"description","Uploads V1 package and refreshes the registry"),n(h,"flags",{registry:a.Flags.string({char:"r",summary:"full address of the registry or alias from .pl.reg",helpValue:"<address|alias>",env:"PL_REGISTRY"}),organization:a.Flags.string({char:"o",summary:"target organisation",env:"PL_PACKAGE_ORGANIZATION"}),package:a.Flags.string({char:"p",summary:"target package",env:"PL_PACKAGE_NAME"}),version:a.Flags.string({char:"v",summary:"target version",env:"PL_PACKAGE_VERSION"}),meta:a.Flags.file({char:"m",summary:"json file containing meta information to associate with tha package",exists:!0}),file:R({char:"f",summary:"package files",multiple:!0,default:[]}),refresh:a.Flags.boolean({summary:"refresh repository after adding the package",default:!0,allowNo:!0,env:"PL_REGISTRY_REFRESH"})});let b=h;const x={"upload-package-v1":b,pack:F,"build-model":y,"build-meta":w};exports.COMMANDS=x;
|
|
756
2
|
//# sourceMappingURL=cli.js.map
|
|
757
|
-
//# sourceMappingURL=cli.js.map
|