directus-template-cli 0.4.3 → 0.5.0-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/apply.d.ts +22 -1
- package/dist/commands/apply.js +245 -106
- package/dist/commands/extract.d.ts +12 -0
- package/dist/commands/extract.js +81 -16
- package/dist/lib/extract/extract-access.d.ts +1 -0
- package/dist/lib/extract/extract-access.js +25 -0
- package/dist/lib/extract/extract-assets.d.ts +257 -25
- package/dist/lib/extract/extract-extensions.d.ts +4 -0
- package/dist/lib/extract/extract-extensions.js +22 -0
- package/dist/lib/extract/extract-permissions.d.ts +3 -0
- package/dist/lib/extract/extract-permissions.js +11 -4
- package/dist/lib/extract/extract-policies.d.ts +4 -0
- package/dist/lib/extract/extract-policies.js +28 -0
- package/dist/lib/extract/extract-presets.js +1 -1
- package/dist/lib/extract/index.js +6 -0
- package/dist/lib/load/index.d.ts +13 -1
- package/dist/lib/load/index.js +38 -20
- package/dist/lib/load/load-access.d.ts +1 -0
- package/dist/lib/load/load-access.js +62 -0
- package/dist/lib/load/load-collections.js +4 -4
- package/dist/lib/load/load-dashboards.js +31 -8
- package/dist/lib/load/load-data.js +29 -39
- package/dist/lib/load/load-extensions.d.ts +1 -0
- package/dist/lib/load/load-extensions.js +70 -0
- package/dist/lib/load/load-files.d.ts +1 -2
- package/dist/lib/load/load-files.js +54 -23
- package/dist/lib/load/load-flows.js +19 -7
- package/dist/lib/load/load-folders.js +33 -9
- package/dist/lib/load/load-permissions.js +16 -9
- package/dist/lib/load/load-policies.d.ts +1 -0
- package/dist/lib/load/load-policies.js +37 -0
- package/dist/lib/load/load-presets.js +25 -8
- package/dist/lib/load/load-relations.js +16 -5
- package/dist/lib/load/load-roles.js +47 -14
- package/dist/lib/load/load-settings.js +7 -4
- package/dist/lib/load/load-translations.js +24 -5
- package/dist/lib/load/load-users.js +19 -3
- package/dist/lib/sdk.d.ts +1 -1
- package/dist/lib/sdk.js +10 -3
- package/dist/lib/types/extension.d.ts +42 -0
- package/dist/lib/types/extension.js +2 -0
- package/dist/lib/utils/auth.js +8 -2
- package/dist/lib/utils/catch-error.d.ts +6 -0
- package/dist/lib/utils/catch-error.js +35 -0
- package/dist/lib/utils/check-template.js +1 -16
- package/dist/lib/utils/chunk-array.d.ts +1 -0
- package/dist/lib/utils/chunk-array.js +7 -0
- package/dist/lib/utils/get-role-ids.d.ts +3 -53
- package/dist/lib/utils/get-role-ids.js +4 -2
- package/dist/lib/utils/get-template.d.ts +8 -0
- package/dist/lib/utils/get-template.js +58 -0
- package/dist/lib/utils/logger.d.ts +12 -0
- package/dist/lib/utils/logger.js +55 -0
- package/oclif.manifest.json +192 -5
- package/package.json +4 -5
- package/dist/lib/load/load-schema.d.ts +0 -14
- package/dist/lib/load/load-schema.js +0 -95
- package/dist/lib/utils/log-error.d.ts +0 -14
- package/dist/lib/utils/log-error.js +0 -25
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const tslib_1 = require("tslib");
|
|
4
|
+
const core_1 = require("@oclif/core");
|
|
5
|
+
const sdk_1 = require("../sdk");
|
|
6
|
+
const catch_error_1 = tslib_1.__importDefault(require("../utils/catch-error"));
|
|
7
|
+
const read_file_1 = tslib_1.__importDefault(require("../utils/read-file"));
|
|
8
|
+
async function loadAccess(dir) {
|
|
9
|
+
const access = (0, read_file_1.default)('access', dir);
|
|
10
|
+
core_1.ux.action.start(`Loading ${access.length} accesses`);
|
|
11
|
+
// Fetch existing accesses
|
|
12
|
+
const existingAccesses = await sdk_1.api.client.request(() => ({
|
|
13
|
+
method: 'GET',
|
|
14
|
+
params: {
|
|
15
|
+
limit: -1,
|
|
16
|
+
},
|
|
17
|
+
path: '/access',
|
|
18
|
+
}));
|
|
19
|
+
const existingAccessById = new Map(existingAccesses.map(acc => [acc.id, acc]));
|
|
20
|
+
const existingAccessByCompositeKey = new Map(existingAccesses.map(acc => [getCompositeKey(acc), acc]));
|
|
21
|
+
for await (const acc of access) {
|
|
22
|
+
try {
|
|
23
|
+
if (existingAccessById.has(acc.id)) {
|
|
24
|
+
core_1.ux.log(`Skipping existing access with ID: ${acc.id}`);
|
|
25
|
+
continue;
|
|
26
|
+
}
|
|
27
|
+
const compositeKey = getCompositeKey(acc);
|
|
28
|
+
if (existingAccessByCompositeKey.has(compositeKey)) {
|
|
29
|
+
core_1.ux.log(`Skipping existing access with composite key: ${compositeKey}`);
|
|
30
|
+
continue;
|
|
31
|
+
}
|
|
32
|
+
// If the role is null, delete the role key to avoid errors
|
|
33
|
+
if (acc.role === null) {
|
|
34
|
+
delete acc.role;
|
|
35
|
+
}
|
|
36
|
+
await sdk_1.api.client.request(() => ({
|
|
37
|
+
body: JSON.stringify(acc),
|
|
38
|
+
method: 'POST',
|
|
39
|
+
path: '/access',
|
|
40
|
+
}));
|
|
41
|
+
// Add the new access to our maps
|
|
42
|
+
existingAccessById.set(acc.id, acc);
|
|
43
|
+
existingAccessByCompositeKey.set(compositeKey, acc);
|
|
44
|
+
}
|
|
45
|
+
catch (error) {
|
|
46
|
+
(0, catch_error_1.default)(error, {
|
|
47
|
+
context: {
|
|
48
|
+
access: acc,
|
|
49
|
+
operation: 'createAccess',
|
|
50
|
+
},
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
core_1.ux.action.stop();
|
|
55
|
+
core_1.ux.log('Loaded Accesses');
|
|
56
|
+
}
|
|
57
|
+
exports.default = loadAccess;
|
|
58
|
+
// Helper function to generate a composite key for each access
|
|
59
|
+
function getCompositeKey(acc) {
|
|
60
|
+
var _a, _b;
|
|
61
|
+
return `${(_a = acc.role) !== null && _a !== void 0 ? _a : 'null'}-${(_b = acc.user) !== null && _b !== void 0 ? _b : 'null'}-${acc.policy}`;
|
|
62
|
+
}
|
|
@@ -4,7 +4,7 @@ const tslib_1 = require("tslib");
|
|
|
4
4
|
const sdk_1 = require("@directus/sdk");
|
|
5
5
|
const core_1 = require("@oclif/core");
|
|
6
6
|
const sdk_2 = require("../sdk");
|
|
7
|
-
const
|
|
7
|
+
const catch_error_1 = tslib_1.__importDefault(require("../utils/catch-error"));
|
|
8
8
|
const read_file_1 = tslib_1.__importDefault(require("../utils/read-file"));
|
|
9
9
|
/**
|
|
10
10
|
* Load collections into the Directus instance
|
|
@@ -32,7 +32,7 @@ async function addCollections(collections, fields) {
|
|
|
32
32
|
await sdk_2.api.client.request((0, sdk_1.createCollection)(collection));
|
|
33
33
|
}
|
|
34
34
|
catch (error) {
|
|
35
|
-
(0,
|
|
35
|
+
(0, catch_error_1.default)(error);
|
|
36
36
|
}
|
|
37
37
|
}
|
|
38
38
|
}
|
|
@@ -49,7 +49,7 @@ async function updateCollections(collections) {
|
|
|
49
49
|
}
|
|
50
50
|
}
|
|
51
51
|
catch (error) {
|
|
52
|
-
(0,
|
|
52
|
+
(0, catch_error_1.default)(error);
|
|
53
53
|
}
|
|
54
54
|
}
|
|
55
55
|
}
|
|
@@ -60,7 +60,7 @@ async function addCustomFieldsOnSystemCollections(fields) {
|
|
|
60
60
|
await sdk_2.api.client.request((0, sdk_1.createField)(field.collection, field));
|
|
61
61
|
}
|
|
62
62
|
catch (error) {
|
|
63
|
-
(0,
|
|
63
|
+
(0, catch_error_1.default)(error);
|
|
64
64
|
}
|
|
65
65
|
}
|
|
66
66
|
}
|
|
@@ -5,24 +5,35 @@ const tslib_1 = require("tslib");
|
|
|
5
5
|
const sdk_1 = require("@directus/sdk");
|
|
6
6
|
const core_1 = require("@oclif/core");
|
|
7
7
|
const sdk_2 = require("../sdk");
|
|
8
|
-
const
|
|
8
|
+
const catch_error_1 = tslib_1.__importDefault(require("../utils/catch-error"));
|
|
9
9
|
const read_file_1 = tslib_1.__importDefault(require("../utils/read-file"));
|
|
10
10
|
async function loadDashboards(dir) {
|
|
11
11
|
const dashboards = (0, read_file_1.default)('dashboards', dir);
|
|
12
12
|
core_1.ux.action.start(`Loading ${dashboards.length} dashboards`);
|
|
13
|
-
|
|
13
|
+
// Fetch existing dashboards
|
|
14
|
+
const existingDashboards = await sdk_2.api.client.request((0, sdk_1.readDashboards)({
|
|
15
|
+
limit: -1,
|
|
16
|
+
}));
|
|
17
|
+
const existingDashboardIds = new Set(existingDashboards.map(dashboard => dashboard.id));
|
|
18
|
+
const filteredDashboards = dashboards.filter(dashboard => {
|
|
19
|
+
if (existingDashboardIds.has(dashboard.id)) {
|
|
20
|
+
core_1.ux.log(`Skipping existing dashboard: ${dashboard.name}`);
|
|
21
|
+
return false;
|
|
22
|
+
}
|
|
23
|
+
return true;
|
|
24
|
+
}).map(dash => {
|
|
14
25
|
const newDash = { ...dash };
|
|
15
26
|
delete newDash.panels;
|
|
16
27
|
return newDash;
|
|
17
28
|
});
|
|
18
|
-
|
|
29
|
+
await Promise.all(filteredDashboards.map(async (dashboard) => {
|
|
19
30
|
try {
|
|
20
31
|
await sdk_2.api.client.request((0, sdk_1.createDashboard)(dashboard));
|
|
21
32
|
}
|
|
22
33
|
catch (error) {
|
|
23
|
-
(0,
|
|
34
|
+
(0, catch_error_1.default)(error);
|
|
24
35
|
}
|
|
25
|
-
}
|
|
36
|
+
}));
|
|
26
37
|
await loadPanels(dir);
|
|
27
38
|
core_1.ux.action.stop();
|
|
28
39
|
core_1.ux.log('Loaded dashboards');
|
|
@@ -31,13 +42,25 @@ exports.default = loadDashboards;
|
|
|
31
42
|
async function loadPanels(dir) {
|
|
32
43
|
const panels = (0, read_file_1.default)('panels', dir);
|
|
33
44
|
core_1.ux.log(`Loading ${panels.length} panels`);
|
|
34
|
-
|
|
45
|
+
// Fetch existing panels
|
|
46
|
+
const existingPanels = await sdk_2.api.client.request((0, sdk_1.readPanels)({
|
|
47
|
+
limit: -1,
|
|
48
|
+
}));
|
|
49
|
+
const existingPanelIds = new Set(existingPanels.map(panel => panel.id));
|
|
50
|
+
const filteredPanels = panels.filter(panel => {
|
|
51
|
+
if (existingPanelIds.has(panel.id)) {
|
|
52
|
+
core_1.ux.log(`Skipping existing panel: ${panel.id}`);
|
|
53
|
+
return false;
|
|
54
|
+
}
|
|
55
|
+
return true;
|
|
56
|
+
});
|
|
57
|
+
await Promise.all(filteredPanels.map(async (panel) => {
|
|
35
58
|
try {
|
|
36
59
|
await sdk_2.api.client.request((0, sdk_1.createPanel)(panel));
|
|
37
60
|
}
|
|
38
61
|
catch (error) {
|
|
39
|
-
(0,
|
|
62
|
+
(0, catch_error_1.default)(error);
|
|
40
63
|
}
|
|
41
|
-
}
|
|
64
|
+
}));
|
|
42
65
|
}
|
|
43
66
|
exports.loadPanels = loadPanels;
|
|
@@ -5,8 +5,10 @@ const sdk_1 = require("@directus/sdk");
|
|
|
5
5
|
const core_1 = require("@oclif/core");
|
|
6
6
|
const node_path_1 = tslib_1.__importDefault(require("node:path"));
|
|
7
7
|
const sdk_2 = require("../sdk");
|
|
8
|
-
const
|
|
8
|
+
const catch_error_1 = tslib_1.__importDefault(require("../utils/catch-error"));
|
|
9
|
+
const chunk_array_1 = require("../utils/chunk-array");
|
|
9
10
|
const read_file_1 = tslib_1.__importDefault(require("../utils/read-file"));
|
|
11
|
+
const BATCH_SIZE = 50;
|
|
10
12
|
async function loadData(dir) {
|
|
11
13
|
const collections = (0, read_file_1.default)('collections', dir);
|
|
12
14
|
core_1.ux.action.start(`Loading data for ${collections.length} collections`);
|
|
@@ -23,50 +25,40 @@ async function loadSkeletonRecords(dir) {
|
|
|
23
25
|
const primaryKeyMap = await getCollectionPrimaryKeys(dir);
|
|
24
26
|
const userCollections = collections
|
|
25
27
|
.filter(item => !item.collection.startsWith('directus_', 0))
|
|
26
|
-
.filter(item => item.schema !== null)
|
|
27
|
-
.filter(item => !item.meta.singleton);
|
|
28
|
-
|
|
28
|
+
.filter(item => item.schema !== null)
|
|
29
|
+
.filter(item => !item.meta.singleton);
|
|
30
|
+
await Promise.all(userCollections.map(async (collection) => {
|
|
29
31
|
const name = collection.collection;
|
|
30
32
|
const primaryKeyField = getPrimaryKey(primaryKeyMap, name);
|
|
31
33
|
const sourceDir = node_path_1.default.resolve(dir, 'content');
|
|
32
34
|
const data = (0, read_file_1.default)(name, sourceDir);
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
[primaryKeyField]: entry[primaryKeyField],
|
|
37
|
-
}));
|
|
38
|
-
}
|
|
39
|
-
catch (error) {
|
|
40
|
-
(0, log_error_1.default)(error);
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
}
|
|
35
|
+
const batches = (0, chunk_array_1.chunkArray)(data, BATCH_SIZE).map(batch => batch.map(entry => ({ [primaryKeyField]: entry[primaryKeyField] })));
|
|
36
|
+
await Promise.all(batches.map(batch => uploadBatch(name, batch, sdk_1.createItems)));
|
|
37
|
+
}));
|
|
44
38
|
core_1.ux.log('Loaded skeleton records');
|
|
45
39
|
}
|
|
40
|
+
async function uploadBatch(collection, batch, method) {
|
|
41
|
+
try {
|
|
42
|
+
await sdk_2.api.client.request(method(collection, batch));
|
|
43
|
+
}
|
|
44
|
+
catch (error) {
|
|
45
|
+
(0, catch_error_1.default)(error);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
46
48
|
async function loadFullData(dir) {
|
|
47
49
|
core_1.ux.log('Updating records with full data');
|
|
48
50
|
const collections = (0, read_file_1.default)('collections', dir);
|
|
49
|
-
const primaryKeyMap = await getCollectionPrimaryKeys(dir);
|
|
50
51
|
const userCollections = collections
|
|
51
52
|
.filter(item => !item.collection.startsWith('directus_', 0))
|
|
52
|
-
.filter(item => item.schema !== null)
|
|
53
|
-
.filter(item => !item.meta.singleton);
|
|
54
|
-
|
|
53
|
+
.filter(item => item.schema !== null)
|
|
54
|
+
.filter(item => !item.meta.singleton);
|
|
55
|
+
await Promise.all(userCollections.map(async (collection) => {
|
|
55
56
|
const name = collection.collection;
|
|
56
|
-
const primaryKeyField = getPrimaryKey(primaryKeyMap, name);
|
|
57
57
|
const sourceDir = node_path_1.default.resolve(dir, 'content');
|
|
58
58
|
const data = (0, read_file_1.default)(name, sourceDir);
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
delete row.user_updated;
|
|
63
|
-
await sdk_2.api.client.request((0, sdk_1.updateItem)(name, row[primaryKeyField], row));
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
catch (error) {
|
|
67
|
-
(0, log_error_1.default)(error);
|
|
68
|
-
}
|
|
69
|
-
}
|
|
59
|
+
const batches = (0, chunk_array_1.chunkArray)(data, BATCH_SIZE).map(batch => batch.map(({ user_created, user_updated, ...cleanedRow }) => cleanedRow));
|
|
60
|
+
await Promise.all(batches.map(batch => uploadBatch(name, batch, sdk_1.updateItemsBatch)));
|
|
61
|
+
}));
|
|
70
62
|
core_1.ux.log('Updated records with full data');
|
|
71
63
|
}
|
|
72
64
|
async function loadSingletons(dir) {
|
|
@@ -75,21 +67,19 @@ async function loadSingletons(dir) {
|
|
|
75
67
|
const singletonCollections = collections
|
|
76
68
|
.filter(item => !item.collection.startsWith('directus_', 0))
|
|
77
69
|
.filter(item => item.meta.singleton);
|
|
78
|
-
|
|
70
|
+
await Promise.all(singletonCollections.map(async (collection) => {
|
|
79
71
|
const name = collection.collection;
|
|
80
72
|
const sourceDir = node_path_1.default.resolve(dir, 'content');
|
|
81
73
|
const data = (0, read_file_1.default)(name, sourceDir);
|
|
82
74
|
try {
|
|
83
|
-
// @
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
delete data.user_updated;
|
|
87
|
-
await sdk_2.api.client.request((0, sdk_1.updateSingleton)(name, data));
|
|
75
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
76
|
+
const { user_created, user_updated, ...cleanedData } = data;
|
|
77
|
+
await sdk_2.api.client.request((0, sdk_1.updateSingleton)(name, cleanedData));
|
|
88
78
|
}
|
|
89
79
|
catch (error) {
|
|
90
|
-
(0,
|
|
80
|
+
(0, catch_error_1.default)(error);
|
|
91
81
|
}
|
|
92
|
-
}
|
|
82
|
+
}));
|
|
93
83
|
core_1.ux.log('Loaded data for singleton collections');
|
|
94
84
|
}
|
|
95
85
|
async function getCollectionPrimaryKeys(dir) {
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export default function loadExtensions(dir: string): Promise<void>;
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const tslib_1 = require("tslib");
|
|
4
|
+
const sdk_1 = require("@directus/sdk");
|
|
5
|
+
const core_1 = require("@oclif/core");
|
|
6
|
+
const sdk_2 = require("../sdk");
|
|
7
|
+
const catch_error_1 = tslib_1.__importDefault(require("../utils/catch-error"));
|
|
8
|
+
const read_file_1 = tslib_1.__importDefault(require("../utils/read-file"));
|
|
9
|
+
async function installExtension(extension) {
|
|
10
|
+
await sdk_2.api.client.request((0, sdk_1.customEndpoint)({
|
|
11
|
+
body: JSON.stringify({
|
|
12
|
+
extension: extension.id,
|
|
13
|
+
version: extension.version,
|
|
14
|
+
}),
|
|
15
|
+
method: 'POST',
|
|
16
|
+
path: '/extensions/registry/install',
|
|
17
|
+
}));
|
|
18
|
+
}
|
|
19
|
+
async function loadExtensions(dir) {
|
|
20
|
+
const extensions = (0, read_file_1.default)('extensions', dir);
|
|
21
|
+
if (!extensions || extensions.length === 0) {
|
|
22
|
+
core_1.ux.log('No extensions found');
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
const installedExtensions = await sdk_2.api.client.request((0, sdk_1.readExtensions)());
|
|
26
|
+
const registryExtensions = extensions.filter(ext => { var _a; return ((_a = ext.meta) === null || _a === void 0 ? void 0 : _a.source) === 'registry' && !ext.bundle; });
|
|
27
|
+
const bundles = [...new Set(extensions.filter(ext => ext.bundle).map(ext => ext.bundle))];
|
|
28
|
+
const localExtensions = extensions.filter(ext => { var _a; return ((_a = ext.meta) === null || _a === void 0 ? void 0 : _a.source) === 'local'; });
|
|
29
|
+
const extensionsToInstall = extensions.filter(ext => {
|
|
30
|
+
var _a;
|
|
31
|
+
return ((_a = ext.meta) === null || _a === void 0 ? void 0 : _a.source) === 'registry'
|
|
32
|
+
&& !ext.bundle
|
|
33
|
+
// @ts-expect-error
|
|
34
|
+
&& !installedExtensions.some(installed => installed.id === ext.id);
|
|
35
|
+
});
|
|
36
|
+
core_1.ux.log(`Found ${extensions.length} extensions total: ${registryExtensions.length} registry extensions (including ${bundles.length} bundles), and ${localExtensions.length} local extensions`);
|
|
37
|
+
if (extensionsToInstall.length > 0) {
|
|
38
|
+
core_1.ux.action.start(`Installing ${extensionsToInstall.length} extensions`);
|
|
39
|
+
const results = await Promise.allSettled(extensionsToInstall.map(async (ext) => {
|
|
40
|
+
var _a, _b, _c;
|
|
41
|
+
try {
|
|
42
|
+
await installExtension({
|
|
43
|
+
id: ext.id,
|
|
44
|
+
// The extension version UUID is the folder name
|
|
45
|
+
version: (_a = ext.meta) === null || _a === void 0 ? void 0 : _a.folder,
|
|
46
|
+
});
|
|
47
|
+
return `Installed ${(_b = ext.schema) === null || _b === void 0 ? void 0 : _b.name}`;
|
|
48
|
+
}
|
|
49
|
+
catch (error) {
|
|
50
|
+
(0, catch_error_1.default)(error);
|
|
51
|
+
return `Failed to install ${(_c = ext.schema) === null || _c === void 0 ? void 0 : _c.name}`;
|
|
52
|
+
}
|
|
53
|
+
}));
|
|
54
|
+
for (const result of results) {
|
|
55
|
+
if (result.status === 'fulfilled') {
|
|
56
|
+
core_1.ux.log(result.value);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
core_1.ux.action.stop();
|
|
60
|
+
core_1.ux.log('Finished installing extensions');
|
|
61
|
+
}
|
|
62
|
+
else {
|
|
63
|
+
// All extensions are already installed
|
|
64
|
+
core_1.ux.log('All extensions are already installed');
|
|
65
|
+
}
|
|
66
|
+
if (localExtensions.length > 0) {
|
|
67
|
+
core_1.ux.log(`Note: ${localExtensions.length} local extensions need to be installed manually.`);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
exports.default = loadExtensions;
|
|
@@ -1,2 +1 @@
|
|
|
1
|
-
|
|
2
|
-
export default _default;
|
|
1
|
+
export default function loadFiles(dir: string): Promise<void>;
|
|
@@ -7,31 +7,62 @@ const formdata_node_1 = require("formdata-node");
|
|
|
7
7
|
const node_fs_1 = require("node:fs");
|
|
8
8
|
const node_path_1 = tslib_1.__importDefault(require("node:path"));
|
|
9
9
|
const sdk_2 = require("../sdk");
|
|
10
|
-
const
|
|
10
|
+
const catch_error_1 = tslib_1.__importDefault(require("../utils/catch-error"));
|
|
11
11
|
const read_file_1 = tslib_1.__importDefault(require("../utils/read-file"));
|
|
12
|
-
|
|
12
|
+
async function loadFiles(dir) {
|
|
13
13
|
const files = (0, read_file_1.default)('files', dir);
|
|
14
14
|
core_1.ux.action.start(`Loading ${files.length} files`);
|
|
15
|
-
|
|
16
|
-
const
|
|
17
|
-
|
|
18
|
-
const
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
15
|
+
try {
|
|
16
|
+
const fileIds = files.map(file => file.id);
|
|
17
|
+
// Fetch only the files we're interested in
|
|
18
|
+
const existingFiles = await sdk_2.api.client.request((0, sdk_1.readFiles)({
|
|
19
|
+
fields: ['id', 'filename_disk'],
|
|
20
|
+
filter: {
|
|
21
|
+
id: {
|
|
22
|
+
_in: fileIds,
|
|
23
|
+
},
|
|
24
|
+
},
|
|
25
|
+
limit: -1,
|
|
26
|
+
}));
|
|
27
|
+
const existingFileIds = new Set(existingFiles.map(file => file.id));
|
|
28
|
+
const existingFileNames = new Set(existingFiles.map(file => file.filename_disk));
|
|
29
|
+
const filesToUpload = files.filter(file => {
|
|
30
|
+
if (existingFileIds.has(file.id)) {
|
|
31
|
+
core_1.ux.log(`Skipping existing file with ID: ${file.id}`);
|
|
32
|
+
return false;
|
|
33
|
+
}
|
|
34
|
+
if (existingFileNames.has(file.filename_disk)) {
|
|
35
|
+
core_1.ux.log(`Skipping existing file with name: ${file.filename_disk}`);
|
|
36
|
+
return false;
|
|
37
|
+
}
|
|
38
|
+
return true;
|
|
39
|
+
});
|
|
40
|
+
await Promise.all(filesToUpload.map(async (asset) => {
|
|
41
|
+
const fileName = asset.filename_disk;
|
|
42
|
+
const assetPath = node_path_1.default.resolve(dir, 'assets', fileName);
|
|
43
|
+
const fileStream = new Blob([(0, node_fs_1.readFileSync)(assetPath)], { type: asset.type });
|
|
44
|
+
const form = new formdata_node_1.FormData();
|
|
45
|
+
form.append('id', asset.id);
|
|
46
|
+
if (asset.title)
|
|
47
|
+
form.append('title', asset.title);
|
|
48
|
+
if (asset.description)
|
|
49
|
+
form.append('description', asset.description);
|
|
50
|
+
if (asset.folder)
|
|
51
|
+
form.append('folder', asset.folder);
|
|
52
|
+
form.append('file', fileStream, fileName);
|
|
53
|
+
try {
|
|
54
|
+
await sdk_2.api.client.request((0, sdk_1.uploadFiles)(form));
|
|
55
|
+
}
|
|
56
|
+
catch (error) {
|
|
57
|
+
(0, catch_error_1.default)(error);
|
|
58
|
+
}
|
|
59
|
+
}));
|
|
60
|
+
core_1.ux.log(`Uploaded ${filesToUpload.length} new files`);
|
|
61
|
+
}
|
|
62
|
+
catch (error) {
|
|
63
|
+
(0, catch_error_1.default)(error);
|
|
34
64
|
}
|
|
35
65
|
core_1.ux.action.stop();
|
|
36
|
-
core_1.ux.log('
|
|
37
|
-
}
|
|
66
|
+
core_1.ux.log('Finished loading files');
|
|
67
|
+
}
|
|
68
|
+
exports.default = loadFiles;
|
|
@@ -5,20 +5,32 @@ const tslib_1 = require("tslib");
|
|
|
5
5
|
const sdk_1 = require("@directus/sdk");
|
|
6
6
|
const core_1 = require("@oclif/core");
|
|
7
7
|
const sdk_2 = require("../sdk");
|
|
8
|
-
const
|
|
8
|
+
const catch_error_1 = tslib_1.__importDefault(require("../utils/catch-error"));
|
|
9
9
|
const read_file_1 = tslib_1.__importDefault(require("../utils/read-file"));
|
|
10
10
|
async function loadFlows(dir) {
|
|
11
11
|
const flows = (0, read_file_1.default)('flows', dir);
|
|
12
12
|
core_1.ux.action.start(`Loading ${flows.length} flows`);
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
13
|
+
// Fetch existing flows
|
|
14
|
+
const existingFlows = await sdk_2.api.client.request((0, sdk_1.readFlows)({
|
|
15
|
+
limit: -1,
|
|
16
|
+
}));
|
|
17
|
+
const existingFlowIds = new Set(existingFlows.map(flow => flow.id));
|
|
18
|
+
const cleanedUpFlows = flows.map(flow => {
|
|
19
|
+
const cleanFlow = { ...flow };
|
|
20
|
+
delete cleanFlow.operations;
|
|
21
|
+
return cleanFlow;
|
|
22
|
+
});
|
|
23
|
+
for (const flow of cleanedUpFlows) {
|
|
17
24
|
try {
|
|
25
|
+
if (existingFlowIds.has(flow.id)) {
|
|
26
|
+
core_1.ux.log(`Skipping existing flow: ${flow.name}`);
|
|
27
|
+
continue;
|
|
28
|
+
}
|
|
18
29
|
await sdk_2.api.client.request((0, sdk_1.createFlow)(flow));
|
|
30
|
+
existingFlowIds.add(flow.id);
|
|
19
31
|
}
|
|
20
32
|
catch (error) {
|
|
21
|
-
(0,
|
|
33
|
+
(0, catch_error_1.default)(error);
|
|
22
34
|
}
|
|
23
35
|
}
|
|
24
36
|
await loadOperations(dir);
|
|
@@ -45,7 +57,7 @@ async function loadOperations(dir) {
|
|
|
45
57
|
await sdk_2.api.client.request((0, sdk_1.updateOperation)(operation.id, pl));
|
|
46
58
|
}
|
|
47
59
|
catch (error) {
|
|
48
|
-
(0,
|
|
60
|
+
(0, catch_error_1.default)(error);
|
|
49
61
|
}
|
|
50
62
|
}
|
|
51
63
|
}
|
|
@@ -4,23 +4,47 @@ const tslib_1 = require("tslib");
|
|
|
4
4
|
const sdk_1 = require("@directus/sdk");
|
|
5
5
|
const core_1 = require("@oclif/core");
|
|
6
6
|
const sdk_2 = require("../sdk");
|
|
7
|
-
const
|
|
7
|
+
const catch_error_1 = tslib_1.__importDefault(require("../utils/catch-error"));
|
|
8
8
|
const read_file_1 = tslib_1.__importDefault(require("../utils/read-file"));
|
|
9
9
|
async function loadFolders(dir) {
|
|
10
10
|
const folders = (0, read_file_1.default)('folders', dir);
|
|
11
11
|
core_1.ux.action.start(`Loading ${folders.length} folders`);
|
|
12
12
|
try {
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
// Update the folders with relationships concurrently
|
|
17
|
-
await Promise.all(folders.map(async (folder) => {
|
|
18
|
-
const { id, ...rest } = folder;
|
|
19
|
-
await sdk_2.api.client.request((0, sdk_1.updateFolder)(id, rest));
|
|
13
|
+
// Fetch existing folders
|
|
14
|
+
const existingFolders = await sdk_2.api.client.request((0, sdk_1.readFolders)({
|
|
15
|
+
limit: -1,
|
|
20
16
|
}));
|
|
17
|
+
const existingFolderIds = new Set(existingFolders.map(folder => folder.id));
|
|
18
|
+
const foldersToAdd = folders.filter(folder => {
|
|
19
|
+
if (existingFolderIds.has(folder.id)) {
|
|
20
|
+
core_1.ux.log(`Skipping existing folder: ${folder.name}`);
|
|
21
|
+
return false;
|
|
22
|
+
}
|
|
23
|
+
return true;
|
|
24
|
+
});
|
|
25
|
+
if (foldersToAdd.length > 0) {
|
|
26
|
+
const folderSkeleton = foldersToAdd.map(folder => ({ id: folder.id, name: folder.name }));
|
|
27
|
+
// Create the folders
|
|
28
|
+
await sdk_2.api.client.request((0, sdk_1.createFolders)(folderSkeleton));
|
|
29
|
+
core_1.ux.log(`Created ${foldersToAdd.length} new folders`);
|
|
30
|
+
// Update the folders with relationships concurrently
|
|
31
|
+
await Promise.all(foldersToAdd.map(async (folder) => {
|
|
32
|
+
const { id, ...rest } = folder;
|
|
33
|
+
try {
|
|
34
|
+
await sdk_2.api.client.request((0, sdk_1.updateFolder)(id, rest));
|
|
35
|
+
core_1.ux.log(`Updated relationships for folder: ${folder.name}`);
|
|
36
|
+
}
|
|
37
|
+
catch (error) {
|
|
38
|
+
(0, catch_error_1.default)(error);
|
|
39
|
+
}
|
|
40
|
+
}));
|
|
41
|
+
}
|
|
42
|
+
else {
|
|
43
|
+
core_1.ux.log('No new folders to create');
|
|
44
|
+
}
|
|
21
45
|
}
|
|
22
46
|
catch (error) {
|
|
23
|
-
(0,
|
|
47
|
+
(0, catch_error_1.default)(error);
|
|
24
48
|
}
|
|
25
49
|
core_1.ux.action.stop();
|
|
26
50
|
core_1.ux.log('Loaded folders');
|
|
@@ -4,22 +4,29 @@ const tslib_1 = require("tslib");
|
|
|
4
4
|
const sdk_1 = require("@directus/sdk");
|
|
5
5
|
const core_1 = require("@oclif/core");
|
|
6
6
|
const sdk_2 = require("../sdk");
|
|
7
|
-
const
|
|
8
|
-
const log_error_1 = tslib_1.__importDefault(require("../utils/log-error"));
|
|
7
|
+
const catch_error_1 = tslib_1.__importDefault(require("../utils/catch-error"));
|
|
9
8
|
const read_file_1 = tslib_1.__importDefault(require("../utils/read-file"));
|
|
10
9
|
async function loadPermissions(dir) {
|
|
11
10
|
const permissions = (0, read_file_1.default)('permissions', dir);
|
|
12
11
|
core_1.ux.action.start(`Loading ${permissions.length} permissions`);
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
12
|
+
try {
|
|
13
|
+
const existingPermissions = await sdk_2.api.client.request((0, sdk_1.readPermissions)({
|
|
14
|
+
limit: -1,
|
|
15
|
+
}));
|
|
16
|
+
const existingPermissionKeys = new Set(existingPermissions.map(p => `${p.collection}:${p.action}:${p.policy}`));
|
|
17
|
+
// Filter out duplicates
|
|
18
|
+
const newPermissions = permissions.filter(newPerm => !existingPermissionKeys.has(`${newPerm.collection}:${newPerm.action}:${newPerm.policy}`));
|
|
19
|
+
if (newPermissions.length > 0) {
|
|
20
|
+
await sdk_2.api.client.request((0, sdk_1.createPermissions)(newPermissions));
|
|
21
|
+
core_1.ux.log(`Created ${newPermissions.length} new permissions`);
|
|
18
22
|
}
|
|
19
|
-
|
|
20
|
-
(
|
|
23
|
+
else {
|
|
24
|
+
core_1.ux.log('No new permissions to create');
|
|
21
25
|
}
|
|
22
26
|
}
|
|
27
|
+
catch (error) {
|
|
28
|
+
(0, catch_error_1.default)(error);
|
|
29
|
+
}
|
|
23
30
|
core_1.ux.action.stop();
|
|
24
31
|
core_1.ux.log('Loaded permissions');
|
|
25
32
|
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export default function loadPolicies(dir: string): Promise<void>;
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const tslib_1 = require("tslib");
|
|
4
|
+
const sdk_1 = require("@directus/sdk");
|
|
5
|
+
const core_1 = require("@oclif/core");
|
|
6
|
+
const sdk_2 = require("../sdk");
|
|
7
|
+
const catch_error_1 = tslib_1.__importDefault(require("../utils/catch-error"));
|
|
8
|
+
const read_file_1 = tslib_1.__importDefault(require("../utils/read-file"));
|
|
9
|
+
async function loadPolicies(dir) {
|
|
10
|
+
const policies = (0, read_file_1.default)('policies', dir);
|
|
11
|
+
core_1.ux.action.start(`Loading ${policies.length} policies`);
|
|
12
|
+
// Fetch existing policies
|
|
13
|
+
const existingPolicies = await sdk_2.api.client.request((0, sdk_1.readPolicies)({
|
|
14
|
+
limit: -1,
|
|
15
|
+
}));
|
|
16
|
+
const existingPolicyIds = new Set(existingPolicies.map(policy => policy.id));
|
|
17
|
+
const PUBLIC_POLICY_ID = 'abf8a154-5b1c-4a46-ac9c-7300570f4f17';
|
|
18
|
+
const policiesWithoutPublic = policies.filter(policy => policy.id !== PUBLIC_POLICY_ID);
|
|
19
|
+
for await (const policy of policiesWithoutPublic) {
|
|
20
|
+
try {
|
|
21
|
+
if (existingPolicyIds.has(policy.id)) {
|
|
22
|
+
core_1.ux.log(`Skipping existing policy: ${policy.name}`);
|
|
23
|
+
continue;
|
|
24
|
+
}
|
|
25
|
+
// Create new policy
|
|
26
|
+
await sdk_2.api.client.request((0, sdk_1.createPolicy)(policy));
|
|
27
|
+
// Add the new policy ID to our set of existing policies
|
|
28
|
+
existingPolicyIds.add(policy.id);
|
|
29
|
+
}
|
|
30
|
+
catch (error) {
|
|
31
|
+
(0, catch_error_1.default)(error);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
core_1.ux.action.stop();
|
|
35
|
+
core_1.ux.log('Loaded policies');
|
|
36
|
+
}
|
|
37
|
+
exports.default = loadPolicies;
|