@squiz/dxp-cli-next 5.25.3 → 5.26.0-develop.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/__tests__/integration/main.spec.js +4 -0
- package/lib/dxp.js +4 -0
- package/lib/migration/create/createMigration.d.ts +3 -0
- package/lib/migration/create/createMigration.js +82 -0
- package/lib/migration/create/createMigration.spec.d.ts +1 -0
- package/lib/migration/create/createMigration.spec.js +381 -0
- package/lib/migration/index.d.ts +3 -0
- package/lib/migration/index.js +12 -0
- package/lib/migration/utils.d.ts +32 -0
- package/lib/migration/utils.js +190 -0
- package/lib/migration/utils.spec.d.ts +1 -0
- package/lib/migration/utils.spec.js +411 -0
- package/package.json +1 -1
|
@@ -17,6 +17,8 @@ describe('dxp', () => {
|
|
|
17
17
|
'job-runner Job Runner Service Commands',
|
|
18
18
|
'datastore Datastore Service Commands',
|
|
19
19
|
'cdp Customer Data Platform Service Commands',
|
|
20
|
+
// TODO: Migration is hidden behind feature flag.
|
|
21
|
+
// 'migration AI Page Migration Service Commands',
|
|
20
22
|
// TODO: Porter is hidden behind feature flag.
|
|
21
23
|
// 'porter Porter Service Commands',
|
|
22
24
|
]);
|
|
@@ -33,6 +35,8 @@ describe('dxp', () => {
|
|
|
33
35
|
'job-runner Job Runner Service Commands',
|
|
34
36
|
'datastore Datastore Service Commands',
|
|
35
37
|
'cdp Customer Data Platform Service Commands',
|
|
38
|
+
// TODO: Migration is hidden behind feature flag.
|
|
39
|
+
// 'migration AI Page Migration Service Commands',
|
|
36
40
|
'porter Porter Service Commands',
|
|
37
41
|
]);
|
|
38
42
|
});
|
package/lib/dxp.js
CHANGED
|
@@ -18,6 +18,7 @@ const datastore_1 = __importDefault(require("./datastore"));
|
|
|
18
18
|
const cdp_1 = __importDefault(require("./cdp"));
|
|
19
19
|
const porter_1 = __importDefault(require("./porter"));
|
|
20
20
|
const page_1 = __importDefault(require("./page"));
|
|
21
|
+
const migration_1 = __importDefault(require("./migration"));
|
|
21
22
|
const program = new commander_1.default.Command();
|
|
22
23
|
const packageJson = require('../package.json');
|
|
23
24
|
const version = packageJson.version;
|
|
@@ -36,6 +37,9 @@ if (process.env.ENABLE_PORTER === 'true') {
|
|
|
36
37
|
if (process.env.ENABLE_PAGE_LAYOUTS === 'true') {
|
|
37
38
|
program.addCommand(page_1.default);
|
|
38
39
|
}
|
|
40
|
+
if (process.env.ENABLE_MIGRATION === 'true') {
|
|
41
|
+
program.addCommand(migration_1.default);
|
|
42
|
+
}
|
|
39
43
|
program
|
|
40
44
|
.action(() => {
|
|
41
45
|
program.help();
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
const commander_1 = require("commander");
|
|
16
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
17
|
+
const ora_1 = __importDefault(require("ora"));
|
|
18
|
+
const fs_1 = __importDefault(require("fs"));
|
|
19
|
+
const utils_1 = require("../utils");
|
|
20
|
+
const createMigrationCommand = () => {
|
|
21
|
+
const createCommand = new commander_1.Command('create')
|
|
22
|
+
.name('create')
|
|
23
|
+
.description('Create a new migration using the AI Page migration service')
|
|
24
|
+
.addOption(new commander_1.Option('--asset-id <string>', 'The ID of the asset to be migrated').makeOptionMandatory())
|
|
25
|
+
.addOption(new commander_1.Option('--preview-asset-id <string>', 'The ID of the folder that will be used as the parent of the migrated asset generated during the Preview stage').makeOptionMandatory())
|
|
26
|
+
.addOption(new commander_1.Option('--matrix-url <string>', 'Matrix URL for the migration').makeOptionMandatory())
|
|
27
|
+
.addOption(new commander_1.Option('-t, --tenant <string>', 'Tenant ID to run against. If not provided will use configured tenant from login'))
|
|
28
|
+
.argument('<exportPath>', 'Path to the export folder (e.g., ./export)')
|
|
29
|
+
.configureOutput({
|
|
30
|
+
outputError(str, write) {
|
|
31
|
+
write(chalk_1.default.red(str));
|
|
32
|
+
},
|
|
33
|
+
})
|
|
34
|
+
.action((exportPath, options) => __awaiter(void 0, void 0, void 0, function* () {
|
|
35
|
+
yield (0, utils_1.throwErrorIfNotLoggedIn)(createCommand);
|
|
36
|
+
let spinner = (0, ora_1.default)('Validating export folder structure').start();
|
|
37
|
+
try {
|
|
38
|
+
// Validate export folder structure
|
|
39
|
+
(0, utils_1.validateExportFolder)(exportPath);
|
|
40
|
+
spinner.succeed('Export folder structure validated');
|
|
41
|
+
// Create tar file
|
|
42
|
+
spinner = (0, ora_1.default)('Creating tar file from export folder').start();
|
|
43
|
+
const tarFilePath = yield (0, utils_1.createTarFile)(exportPath);
|
|
44
|
+
if (!tarFilePath) {
|
|
45
|
+
throw new Error('Tar file creation failed');
|
|
46
|
+
}
|
|
47
|
+
spinner.succeed(`Tar file created: ${tarFilePath}`);
|
|
48
|
+
// Create migration
|
|
49
|
+
spinner = (0, ora_1.default)('Creating migration').start();
|
|
50
|
+
const response = yield (0, utils_1.createMigration)(options);
|
|
51
|
+
if (!response) {
|
|
52
|
+
throw new Error('Migration creation failed');
|
|
53
|
+
}
|
|
54
|
+
spinner.succeed('Migration created successfully');
|
|
55
|
+
// Upload file to S3
|
|
56
|
+
spinner = (0, ora_1.default)('Uploading file to S3').start();
|
|
57
|
+
const uploadUrl = response.uploadUrl;
|
|
58
|
+
const s3Url = yield (0, utils_1.uploadFileToS3)(uploadUrl, tarFilePath, options.tenant);
|
|
59
|
+
if (!s3Url) {
|
|
60
|
+
throw new Error('File upload failed');
|
|
61
|
+
}
|
|
62
|
+
spinner.succeed('File uploaded to S3');
|
|
63
|
+
// Clean up tar file
|
|
64
|
+
fs_1.default.unlinkSync(tarFilePath);
|
|
65
|
+
spinner.succeed(`Successfully created migration: ${JSON.stringify({
|
|
66
|
+
migrationId: response.assetMigration.migrationId,
|
|
67
|
+
assetId: response.assetMigration.assetId,
|
|
68
|
+
stage: response.assetMigration.stage,
|
|
69
|
+
status: response.assetMigration.status,
|
|
70
|
+
})}`);
|
|
71
|
+
}
|
|
72
|
+
catch (error) {
|
|
73
|
+
spinner.fail();
|
|
74
|
+
(0, utils_1.handleCommandError)(createCommand, error);
|
|
75
|
+
}
|
|
76
|
+
}));
|
|
77
|
+
if (process.env.ENABLE_OVERRIDE_MIGRATION_URL === 'true') {
|
|
78
|
+
createCommand.addOption(new commander_1.Option('-ou, --overrideUrl <string>', 'Developer option to override the entire migration url with a custom value'));
|
|
79
|
+
}
|
|
80
|
+
return createCommand;
|
|
81
|
+
};
|
|
82
|
+
exports.default = createMigrationCommand;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,381 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
+
if (mod && mod.__esModule) return mod;
|
|
20
|
+
var result = {};
|
|
21
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
+
__setModuleDefault(result, mod);
|
|
23
|
+
return result;
|
|
24
|
+
};
|
|
25
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
26
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
27
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
28
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
29
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
30
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
31
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
32
|
+
});
|
|
33
|
+
};
|
|
34
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
35
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
36
|
+
};
|
|
37
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
38
|
+
const nock_1 = __importDefault(require("nock"));
|
|
39
|
+
const createMigration_1 = __importDefault(require("./createMigration"));
|
|
40
|
+
const utils = __importStar(require("../utils"));
|
|
41
|
+
const ApplicationStore = __importStar(require("../../ApplicationStore"));
|
|
42
|
+
const fs_1 = __importDefault(require("fs"));
|
|
43
|
+
jest.mock('../utils');
|
|
44
|
+
jest.mock('../../ApplicationStore');
|
|
45
|
+
jest.mock('fs');
|
|
46
|
+
const mockUtils = utils;
|
|
47
|
+
const mockApplicationStore = ApplicationStore;
|
|
48
|
+
const mockFs = fs_1.default;
|
|
49
|
+
describe('createMigrationCommand', () => {
|
|
50
|
+
let logSpy;
|
|
51
|
+
let mockCreateMigrationResponse;
|
|
52
|
+
beforeEach(() => {
|
|
53
|
+
nock_1.default.cleanAll();
|
|
54
|
+
jest.clearAllMocks();
|
|
55
|
+
jest.resetAllMocks();
|
|
56
|
+
logSpy = jest.spyOn(console, 'log').mockImplementation(() => { });
|
|
57
|
+
mockApplicationStore.getApplicationFile.mockResolvedValue('session-cookie');
|
|
58
|
+
mockUtils.throwErrorIfNotLoggedIn.mockResolvedValue(undefined);
|
|
59
|
+
mockUtils.validateExportFolder.mockImplementation(() => { });
|
|
60
|
+
mockUtils.createTarFile.mockResolvedValue('/path/to/export_123.tar.gz');
|
|
61
|
+
mockUtils.uploadFileToS3.mockResolvedValue('https://s3.amazonaws.com/uploaded-file');
|
|
62
|
+
mockFs.unlinkSync.mockImplementation(() => { });
|
|
63
|
+
mockCreateMigrationResponse = {
|
|
64
|
+
assetMigration: {
|
|
65
|
+
migrationId: 'migration-123',
|
|
66
|
+
assetId: 'asset-456',
|
|
67
|
+
stage: 'pending',
|
|
68
|
+
status: 'created',
|
|
69
|
+
xmlFilePath: '/path/to/xml',
|
|
70
|
+
matrixUrl: 'https://matrix.example.com',
|
|
71
|
+
previewAssetId: 'preview-789',
|
|
72
|
+
created: 1234567890,
|
|
73
|
+
updated: 1234567890,
|
|
74
|
+
migrationIdAssetId: 'migration-123-asset-456',
|
|
75
|
+
},
|
|
76
|
+
uploadUrl: 'https://upload.s3.amazonaws.com',
|
|
77
|
+
};
|
|
78
|
+
mockUtils.createMigration.mockResolvedValue(mockCreateMigrationResponse);
|
|
79
|
+
});
|
|
80
|
+
afterEach(() => {
|
|
81
|
+
logSpy.mockRestore();
|
|
82
|
+
});
|
|
83
|
+
describe('successful migration creation', () => {
|
|
84
|
+
it('should create migration successfully with all required options', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
85
|
+
const program = (0, createMigration_1.default)();
|
|
86
|
+
yield program.parseAsync([
|
|
87
|
+
'node',
|
|
88
|
+
'dxp-cli',
|
|
89
|
+
'--asset-id',
|
|
90
|
+
'asset-123',
|
|
91
|
+
'--preview-asset-id',
|
|
92
|
+
'preview-456',
|
|
93
|
+
'--matrix-url',
|
|
94
|
+
'https://matrix.example.com',
|
|
95
|
+
'/path/to/export',
|
|
96
|
+
]);
|
|
97
|
+
expect(mockUtils.throwErrorIfNotLoggedIn).toHaveBeenCalledWith(program);
|
|
98
|
+
expect(mockUtils.validateExportFolder).toHaveBeenCalledWith('/path/to/export');
|
|
99
|
+
expect(mockUtils.createTarFile).toHaveBeenCalledWith('/path/to/export');
|
|
100
|
+
expect(mockUtils.createMigration).toHaveBeenCalledWith({
|
|
101
|
+
assetId: 'asset-123',
|
|
102
|
+
previewAssetId: 'preview-456',
|
|
103
|
+
matrixUrl: 'https://matrix.example.com',
|
|
104
|
+
});
|
|
105
|
+
expect(mockUtils.uploadFileToS3).toHaveBeenCalledWith('https://upload.s3.amazonaws.com', '/path/to/export_123.tar.gz', undefined);
|
|
106
|
+
expect(mockFs.unlinkSync).toHaveBeenCalledWith('/path/to/export_123.tar.gz');
|
|
107
|
+
expect(mockUtils.validateExportFolder).toHaveBeenCalledTimes(1);
|
|
108
|
+
expect(mockUtils.createTarFile).toHaveBeenCalledTimes(1);
|
|
109
|
+
expect(mockUtils.createMigration).toHaveBeenCalledTimes(1);
|
|
110
|
+
expect(mockUtils.uploadFileToS3).toHaveBeenCalledTimes(1);
|
|
111
|
+
expect(mockFs.unlinkSync).toHaveBeenCalledTimes(1);
|
|
112
|
+
expect(mockUtils.handleCommandError).not.toHaveBeenCalled();
|
|
113
|
+
}));
|
|
114
|
+
it('should create migration with tenant option', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
115
|
+
const program = (0, createMigration_1.default)();
|
|
116
|
+
yield program.parseAsync([
|
|
117
|
+
'node',
|
|
118
|
+
'dxp-cli',
|
|
119
|
+
'--asset-id',
|
|
120
|
+
'asset-123',
|
|
121
|
+
'--preview-asset-id',
|
|
122
|
+
'preview-456',
|
|
123
|
+
'--matrix-url',
|
|
124
|
+
'https://matrix.example.com',
|
|
125
|
+
'--tenant',
|
|
126
|
+
'test-tenant',
|
|
127
|
+
'/path/to/export',
|
|
128
|
+
]);
|
|
129
|
+
expect(mockUtils.createMigration).toHaveBeenCalledWith({
|
|
130
|
+
assetId: 'asset-123',
|
|
131
|
+
previewAssetId: 'preview-456',
|
|
132
|
+
matrixUrl: 'https://matrix.example.com',
|
|
133
|
+
tenant: 'test-tenant',
|
|
134
|
+
});
|
|
135
|
+
expect(mockUtils.uploadFileToS3).toHaveBeenCalledWith('https://upload.s3.amazonaws.com', '/path/to/export_123.tar.gz', 'test-tenant');
|
|
136
|
+
}));
|
|
137
|
+
it('should create migration with override URL when environment variable is set', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
138
|
+
const originalEnv = process.env.ENABLE_OVERRIDE_MIGRATION_URL;
|
|
139
|
+
process.env.ENABLE_OVERRIDE_MIGRATION_URL = 'true';
|
|
140
|
+
const program = (0, createMigration_1.default)();
|
|
141
|
+
yield program.parseAsync([
|
|
142
|
+
'node',
|
|
143
|
+
'dxp-cli',
|
|
144
|
+
'--asset-id',
|
|
145
|
+
'asset-123',
|
|
146
|
+
'--preview-asset-id',
|
|
147
|
+
'preview-456',
|
|
148
|
+
'--matrix-url',
|
|
149
|
+
'https://matrix.example.com',
|
|
150
|
+
'--overrideUrl',
|
|
151
|
+
'https://custom.migration.url',
|
|
152
|
+
'/path/to/export',
|
|
153
|
+
]);
|
|
154
|
+
expect(mockUtils.createMigration).toHaveBeenCalledWith({
|
|
155
|
+
assetId: 'asset-123',
|
|
156
|
+
previewAssetId: 'preview-456',
|
|
157
|
+
matrixUrl: 'https://matrix.example.com',
|
|
158
|
+
overrideUrl: 'https://custom.migration.url',
|
|
159
|
+
});
|
|
160
|
+
process.env.ENABLE_OVERRIDE_MIGRATION_URL = originalEnv;
|
|
161
|
+
}));
|
|
162
|
+
});
|
|
163
|
+
describe('error scenarios', () => {
|
|
164
|
+
it('should handle validation error for export folder', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
165
|
+
const validationError = new Error('Export folder does not exist');
|
|
166
|
+
mockUtils.validateExportFolder.mockImplementation(() => {
|
|
167
|
+
throw validationError;
|
|
168
|
+
});
|
|
169
|
+
mockUtils.handleCommandError.mockImplementation(() => { });
|
|
170
|
+
const program = (0, createMigration_1.default)();
|
|
171
|
+
yield program.parseAsync([
|
|
172
|
+
'node',
|
|
173
|
+
'dxp-cli',
|
|
174
|
+
'--asset-id',
|
|
175
|
+
'asset-123',
|
|
176
|
+
'--preview-asset-id',
|
|
177
|
+
'preview-456',
|
|
178
|
+
'--matrix-url',
|
|
179
|
+
'https://matrix.example.com',
|
|
180
|
+
'/invalid/path',
|
|
181
|
+
]);
|
|
182
|
+
expect(mockUtils.handleCommandError).toHaveBeenCalledWith(program, validationError);
|
|
183
|
+
expect(mockUtils.createTarFile).not.toHaveBeenCalled();
|
|
184
|
+
}));
|
|
185
|
+
it('should handle tar file creation failure', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
186
|
+
mockUtils.createTarFile.mockResolvedValue(null);
|
|
187
|
+
const program = (0, createMigration_1.default)();
|
|
188
|
+
yield program.parseAsync([
|
|
189
|
+
'node',
|
|
190
|
+
'dxp-cli',
|
|
191
|
+
'--asset-id',
|
|
192
|
+
'asset-123',
|
|
193
|
+
'--preview-asset-id',
|
|
194
|
+
'preview-456',
|
|
195
|
+
'--matrix-url',
|
|
196
|
+
'https://matrix.example.com',
|
|
197
|
+
'/path/to/export',
|
|
198
|
+
]);
|
|
199
|
+
expect(mockUtils.createMigration).not.toHaveBeenCalled();
|
|
200
|
+
expect(mockUtils.uploadFileToS3).not.toHaveBeenCalled();
|
|
201
|
+
}));
|
|
202
|
+
it('should handle migration creation failure', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
203
|
+
mockUtils.createMigration.mockResolvedValue(null);
|
|
204
|
+
const program = (0, createMigration_1.default)();
|
|
205
|
+
yield program.parseAsync([
|
|
206
|
+
'node',
|
|
207
|
+
'dxp-cli',
|
|
208
|
+
'--asset-id',
|
|
209
|
+
'asset-123',
|
|
210
|
+
'--preview-asset-id',
|
|
211
|
+
'preview-456',
|
|
212
|
+
'--matrix-url',
|
|
213
|
+
'https://matrix.example.com',
|
|
214
|
+
'/path/to/export',
|
|
215
|
+
]);
|
|
216
|
+
expect(mockUtils.uploadFileToS3).not.toHaveBeenCalled();
|
|
217
|
+
expect(mockFs.unlinkSync).not.toHaveBeenCalled();
|
|
218
|
+
}));
|
|
219
|
+
it('should handle S3 upload failure', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
220
|
+
mockUtils.uploadFileToS3.mockResolvedValue(null);
|
|
221
|
+
const program = (0, createMigration_1.default)();
|
|
222
|
+
yield program.parseAsync([
|
|
223
|
+
'node',
|
|
224
|
+
'dxp-cli',
|
|
225
|
+
'--asset-id',
|
|
226
|
+
'asset-123',
|
|
227
|
+
'--preview-asset-id',
|
|
228
|
+
'preview-456',
|
|
229
|
+
'--matrix-url',
|
|
230
|
+
'https://matrix.example.com',
|
|
231
|
+
'/path/to/export',
|
|
232
|
+
]);
|
|
233
|
+
expect(mockFs.unlinkSync).not.toHaveBeenCalled();
|
|
234
|
+
}));
|
|
235
|
+
it('should handle migration API error', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
236
|
+
const apiError = new Error('Migration API failed');
|
|
237
|
+
mockUtils.createMigration.mockRejectedValue(apiError);
|
|
238
|
+
mockUtils.handleCommandError.mockImplementation(() => { });
|
|
239
|
+
const program = (0, createMigration_1.default)();
|
|
240
|
+
yield program.parseAsync([
|
|
241
|
+
'node',
|
|
242
|
+
'dxp-cli',
|
|
243
|
+
'--asset-id',
|
|
244
|
+
'asset-123',
|
|
245
|
+
'--preview-asset-id',
|
|
246
|
+
'preview-456',
|
|
247
|
+
'--matrix-url',
|
|
248
|
+
'https://matrix.example.com',
|
|
249
|
+
'/path/to/export',
|
|
250
|
+
]);
|
|
251
|
+
expect(mockUtils.handleCommandError).toHaveBeenCalledWith(program, apiError);
|
|
252
|
+
}));
|
|
253
|
+
it('should handle not being logged in', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
254
|
+
const loginError = new Error('Not logged in');
|
|
255
|
+
mockUtils.throwErrorIfNotLoggedIn.mockImplementation(() => {
|
|
256
|
+
throw loginError;
|
|
257
|
+
});
|
|
258
|
+
const program = (0, createMigration_1.default)();
|
|
259
|
+
yield expect(program.parseAsync([
|
|
260
|
+
'node',
|
|
261
|
+
'dxp-cli',
|
|
262
|
+
'--asset-id',
|
|
263
|
+
'asset-123',
|
|
264
|
+
'--preview-asset-id',
|
|
265
|
+
'preview-456',
|
|
266
|
+
'--matrix-url',
|
|
267
|
+
'https://matrix.example.com',
|
|
268
|
+
'/path/to/export',
|
|
269
|
+
])).rejects.toThrow('Not logged in');
|
|
270
|
+
expect(mockUtils.validateExportFolder).not.toHaveBeenCalled();
|
|
271
|
+
}));
|
|
272
|
+
});
|
|
273
|
+
describe('required options validation', () => {
|
|
274
|
+
it('should require assetid option', () => {
|
|
275
|
+
const program = (0, createMigration_1.default)().exitOverride();
|
|
276
|
+
expect(() => {
|
|
277
|
+
program.parse([
|
|
278
|
+
'node',
|
|
279
|
+
'dxp-cli',
|
|
280
|
+
'--preview-asset-id',
|
|
281
|
+
'preview-456',
|
|
282
|
+
'--matrix-url',
|
|
283
|
+
'https://matrix.example.com',
|
|
284
|
+
'/path/to/export',
|
|
285
|
+
]);
|
|
286
|
+
}).toThrow();
|
|
287
|
+
});
|
|
288
|
+
it('should require previewAssetid option', () => {
|
|
289
|
+
const program = (0, createMigration_1.default)().exitOverride();
|
|
290
|
+
expect(() => {
|
|
291
|
+
program.parse([
|
|
292
|
+
'node',
|
|
293
|
+
'dxp-cli',
|
|
294
|
+
'--asset-id',
|
|
295
|
+
'asset-123',
|
|
296
|
+
'--matrix-url',
|
|
297
|
+
'https://matrix.example.com',
|
|
298
|
+
'/path/to/export',
|
|
299
|
+
]);
|
|
300
|
+
}).toThrow();
|
|
301
|
+
});
|
|
302
|
+
it('should require matrixUrl option', () => {
|
|
303
|
+
const program = (0, createMigration_1.default)().exitOverride();
|
|
304
|
+
expect(() => {
|
|
305
|
+
program.parse([
|
|
306
|
+
'node',
|
|
307
|
+
'dxp-cli',
|
|
308
|
+
'--asset-id',
|
|
309
|
+
'asset-123',
|
|
310
|
+
'--preview-asset-id',
|
|
311
|
+
'preview-456',
|
|
312
|
+
'/path/to/export',
|
|
313
|
+
]);
|
|
314
|
+
}).toThrow();
|
|
315
|
+
});
|
|
316
|
+
it('should require exportPath argument', () => {
|
|
317
|
+
const program = (0, createMigration_1.default)().exitOverride();
|
|
318
|
+
expect(() => {
|
|
319
|
+
program.parse([
|
|
320
|
+
'node',
|
|
321
|
+
'dxp-cli',
|
|
322
|
+
'--asset-id',
|
|
323
|
+
'asset-123',
|
|
324
|
+
'--preview-asset-id',
|
|
325
|
+
'preview-456',
|
|
326
|
+
'--matrix-url',
|
|
327
|
+
'https://matrix.example.com',
|
|
328
|
+
]);
|
|
329
|
+
}).toThrow();
|
|
330
|
+
});
|
|
331
|
+
});
|
|
332
|
+
describe('command configuration', () => {
|
|
333
|
+
it('should have correct command name and description', () => {
|
|
334
|
+
const program = (0, createMigration_1.default)();
|
|
335
|
+
expect(program.name()).toBe('create');
|
|
336
|
+
expect(program.description()).toBe('Create a new migration using the AI Page migration service');
|
|
337
|
+
});
|
|
338
|
+
it('should parse options correctly', () => {
|
|
339
|
+
const program = (0, createMigration_1.default)();
|
|
340
|
+
program.parse([
|
|
341
|
+
'node',
|
|
342
|
+
'dxp-cli',
|
|
343
|
+
'--asset-id',
|
|
344
|
+
'asset-123',
|
|
345
|
+
'--preview-asset-id',
|
|
346
|
+
'preview-456',
|
|
347
|
+
'--matrix-url',
|
|
348
|
+
'https://matrix.example.com',
|
|
349
|
+
'--tenant',
|
|
350
|
+
'test-tenant',
|
|
351
|
+
'/path/to/export',
|
|
352
|
+
]);
|
|
353
|
+
const opts = program.opts();
|
|
354
|
+
expect(opts.assetId).toBe('asset-123');
|
|
355
|
+
expect(opts.previewAssetId).toBe('preview-456');
|
|
356
|
+
expect(opts.matrixUrl).toBe('https://matrix.example.com');
|
|
357
|
+
expect(opts.tenant).toBe('test-tenant');
|
|
358
|
+
});
|
|
359
|
+
});
|
|
360
|
+
describe('spinner and output behavior', () => {
|
|
361
|
+
it('should display appropriate spinner messages during execution', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
362
|
+
const program = (0, createMigration_1.default)();
|
|
363
|
+
yield program.parseAsync([
|
|
364
|
+
'node',
|
|
365
|
+
'dxp-cli',
|
|
366
|
+
'--asset-id',
|
|
367
|
+
'asset-123',
|
|
368
|
+
'--preview-asset-id',
|
|
369
|
+
'preview-456',
|
|
370
|
+
'--matrix-url',
|
|
371
|
+
'https://matrix.example.com',
|
|
372
|
+
'/path/to/export',
|
|
373
|
+
]);
|
|
374
|
+
expect(mockUtils.validateExportFolder).toHaveBeenCalledWith('/path/to/export');
|
|
375
|
+
expect(mockUtils.createTarFile).toHaveBeenCalledWith('/path/to/export');
|
|
376
|
+
expect(mockUtils.createMigration).toHaveBeenCalled();
|
|
377
|
+
expect(mockUtils.uploadFileToS3).toHaveBeenCalled();
|
|
378
|
+
expect(mockFs.unlinkSync).toHaveBeenCalled();
|
|
379
|
+
}));
|
|
380
|
+
});
|
|
381
|
+
});
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const commander_1 = require("commander");
|
|
7
|
+
const createMigration_1 = __importDefault(require("./create/createMigration"));
|
|
8
|
+
const migrationCommand = new commander_1.Command('migration');
|
|
9
|
+
migrationCommand
|
|
10
|
+
.description('AI Page Migration Service Commands')
|
|
11
|
+
.addCommand((0, createMigration_1.default)());
|
|
12
|
+
exports.default = migrationCommand;
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { Command } from 'commander';
|
|
2
|
+
export interface CreateMigrationOptions {
|
|
3
|
+
assetId: string;
|
|
4
|
+
previewAssetId: string;
|
|
5
|
+
matrixUrl: string;
|
|
6
|
+
tenant?: string;
|
|
7
|
+
overrideUrl?: string;
|
|
8
|
+
}
|
|
9
|
+
export interface CreateMigrationAPIResponse {
|
|
10
|
+
assetMigration: {
|
|
11
|
+
migrationId: string;
|
|
12
|
+
assetId: string;
|
|
13
|
+
xmlFilePath: string;
|
|
14
|
+
matrixUrl: string;
|
|
15
|
+
previewAssetId: string;
|
|
16
|
+
stage: string;
|
|
17
|
+
status: string;
|
|
18
|
+
created: number;
|
|
19
|
+
updated: number;
|
|
20
|
+
migrationIdAssetId: string;
|
|
21
|
+
};
|
|
22
|
+
uploadUrl: string;
|
|
23
|
+
}
|
|
24
|
+
export declare function handleCommandError(command: Command, error: Error): void;
|
|
25
|
+
export declare function throwErrorIfNotLoggedIn(command: Command): Promise<void>;
|
|
26
|
+
export declare function buildMigrationUrl(tenantID?: string, overrideUrl?: string): Promise<string>;
|
|
27
|
+
export declare function validateAxiosStatus(status: number): boolean;
|
|
28
|
+
export declare function validateExportFolder(exportPath: string): void;
|
|
29
|
+
export declare function createTarFile(exportPath: string): Promise<string>;
|
|
30
|
+
export declare function getMigrationHeaders(tenantID?: string): Promise<Record<string, string>>;
|
|
31
|
+
export declare function uploadFileToS3(uploadUrl: string, filePath: string, tenantID?: string): Promise<string>;
|
|
32
|
+
export declare function createMigration(options: CreateMigrationOptions): Promise<CreateMigrationAPIResponse>;
|
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
exports.createMigration = exports.uploadFileToS3 = exports.getMigrationHeaders = exports.createTarFile = exports.validateExportFolder = exports.validateAxiosStatus = exports.buildMigrationUrl = exports.throwErrorIfNotLoggedIn = exports.handleCommandError = void 0;
|
|
16
|
+
const fs_1 = __importDefault(require("fs"));
|
|
17
|
+
const path_1 = __importDefault(require("path"));
|
|
18
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
19
|
+
const ApplicationConfig_1 = require("../ApplicationConfig");
|
|
20
|
+
const axios_1 = __importDefault(require("axios"));
|
|
21
|
+
const ApplicationStore_1 = require("../ApplicationStore");
|
|
22
|
+
const child_process_1 = require("child_process");
|
|
23
|
+
const ApiService_1 = require("../ApiService");
|
|
24
|
+
function handleCommandError(command, error) {
|
|
25
|
+
var _a, _b, _c, _d;
|
|
26
|
+
if (axios_1.default.isAxiosError(error)) {
|
|
27
|
+
let message = `${error.message}`;
|
|
28
|
+
if ((_b = (_a = error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.message) {
|
|
29
|
+
message += `: ${error.response.data.message}`;
|
|
30
|
+
}
|
|
31
|
+
if ((_d = (_c = error.response) === null || _c === void 0 ? void 0 : _c.data) === null || _d === void 0 ? void 0 : _d.details) {
|
|
32
|
+
message += ` - ${error.response.data.details}`;
|
|
33
|
+
}
|
|
34
|
+
command.error(chalk_1.default.red(message));
|
|
35
|
+
}
|
|
36
|
+
else {
|
|
37
|
+
if (!!process.env.DEBUG && error.stack) {
|
|
38
|
+
command.error(error.stack);
|
|
39
|
+
}
|
|
40
|
+
if (error.message) {
|
|
41
|
+
command.error(chalk_1.default.red(error.message));
|
|
42
|
+
}
|
|
43
|
+
else {
|
|
44
|
+
command.error(chalk_1.default.red('An unknown error occurred'));
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
exports.handleCommandError = handleCommandError;
|
|
49
|
+
function throwErrorIfNotLoggedIn(command) {
|
|
50
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
51
|
+
if (!(yield (0, ApplicationStore_1.getApplicationFile)(ApplicationStore_1.STORE_FILES.sessionCookie))) {
|
|
52
|
+
command.error(chalk_1.default.red('You must login to interact with the migration service. See `dxp-next auth login`'));
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
exports.throwErrorIfNotLoggedIn = throwErrorIfNotLoggedIn;
|
|
57
|
+
function buildMigrationUrl(tenantID, overrideUrl) {
|
|
58
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
59
|
+
if (!overrideUrl) {
|
|
60
|
+
const existingConfig = yield (0, ApplicationConfig_1.fetchApplicationConfig)(tenantID);
|
|
61
|
+
return `${existingConfig.baseUrl}/__dxp/service/aiapps/migration`;
|
|
62
|
+
}
|
|
63
|
+
else {
|
|
64
|
+
return overrideUrl;
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
exports.buildMigrationUrl = buildMigrationUrl;
|
|
69
|
+
function validateAxiosStatus(status) {
|
|
70
|
+
return status < 400;
|
|
71
|
+
}
|
|
72
|
+
exports.validateAxiosStatus = validateAxiosStatus;
|
|
73
|
+
function validateExportFolder(exportPath) {
|
|
74
|
+
// Check if the export folder exists
|
|
75
|
+
if (!fs_1.default.existsSync(exportPath)) {
|
|
76
|
+
throw new Error(`Export folder does not exist: ${exportPath}`);
|
|
77
|
+
}
|
|
78
|
+
// Check if it's a directory
|
|
79
|
+
if (!fs_1.default.statSync(exportPath).isDirectory()) {
|
|
80
|
+
throw new Error(`Export path is not a directory: ${exportPath}`);
|
|
81
|
+
}
|
|
82
|
+
// Check for nested export folder structure (e.g., ./export/export/...)
|
|
83
|
+
const nestedExportPath = path_1.default.join(exportPath, 'export');
|
|
84
|
+
if (!fs_1.default.existsSync(nestedExportPath)) {
|
|
85
|
+
throw new Error(`Nested export folder does not exist: ${nestedExportPath}`);
|
|
86
|
+
}
|
|
87
|
+
if (!fs_1.default.statSync(nestedExportPath).isDirectory()) {
|
|
88
|
+
throw new Error(`Nested export path is not a directory: ${nestedExportPath}`);
|
|
89
|
+
}
|
|
90
|
+
// Check for export.xml file in the nested export directory
|
|
91
|
+
const exportXmlPath = path_1.default.join(nestedExportPath, 'export.xml');
|
|
92
|
+
if (!fs_1.default.existsSync(exportXmlPath)) {
|
|
93
|
+
throw new Error(`export.xml file does not exist in: ${nestedExportPath}`);
|
|
94
|
+
}
|
|
95
|
+
if (!fs_1.default.statSync(exportXmlPath).isFile()) {
|
|
96
|
+
throw new Error(`export.xml is not a valid file: ${exportXmlPath}`);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
exports.validateExportFolder = validateExportFolder;
|
|
100
|
+
function createTarFile(exportPath) {
|
|
101
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
102
|
+
const tarFileName = `export_${Date.now()}.tar.gz`;
|
|
103
|
+
const tarFilePath = path_1.default.join(process.cwd(), tarFileName);
|
|
104
|
+
return new Promise((resolve, reject) => {
|
|
105
|
+
const tar = (0, child_process_1.spawn)('tar', [
|
|
106
|
+
'-czf',
|
|
107
|
+
tarFilePath,
|
|
108
|
+
'-C',
|
|
109
|
+
path_1.default.dirname(exportPath),
|
|
110
|
+
path_1.default.basename(exportPath),
|
|
111
|
+
]);
|
|
112
|
+
tar.on('close', code => {
|
|
113
|
+
if (code === 0) {
|
|
114
|
+
resolve(tarFilePath);
|
|
115
|
+
}
|
|
116
|
+
else {
|
|
117
|
+
reject(new Error(`tar command failed with exit code ${code}`));
|
|
118
|
+
}
|
|
119
|
+
});
|
|
120
|
+
tar.on('error', error => {
|
|
121
|
+
reject(new Error(`Failed to create tar file: ${error.message}`));
|
|
122
|
+
});
|
|
123
|
+
});
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
exports.createTarFile = createTarFile;
|
|
127
|
+
function getMigrationHeaders(tenantID) {
|
|
128
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
129
|
+
const existingConfig = yield (0, ApplicationConfig_1.fetchApplicationConfig)(tenantID);
|
|
130
|
+
return {
|
|
131
|
+
'x-dxp-tenant': existingConfig.tenant,
|
|
132
|
+
};
|
|
133
|
+
});
|
|
134
|
+
}
|
|
135
|
+
exports.getMigrationHeaders = getMigrationHeaders;
|
|
136
|
+
function uploadFileToS3(uploadUrl, filePath, tenantID) {
|
|
137
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
138
|
+
const fileBuffer = fs_1.default.readFileSync(filePath);
|
|
139
|
+
const response = yield fetch(uploadUrl, {
|
|
140
|
+
method: 'PUT',
|
|
141
|
+
body: fileBuffer,
|
|
142
|
+
headers: Object.assign({}, (yield getMigrationHeaders(tenantID))),
|
|
143
|
+
});
|
|
144
|
+
return response.url;
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
exports.uploadFileToS3 = uploadFileToS3;
|
|
148
|
+
function createMigration(options) {
|
|
149
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
150
|
+
const apiService = new ApiService_1.ApiService({
|
|
151
|
+
validateStatus: validateAxiosStatus,
|
|
152
|
+
});
|
|
153
|
+
const migrationUrl = yield buildMigrationUrl(options.tenant, options.overrideUrl);
|
|
154
|
+
try {
|
|
155
|
+
const payload = {
|
|
156
|
+
assetId: options.assetId,
|
|
157
|
+
previewAssetId: options.previewAssetId,
|
|
158
|
+
matrixUrl: options.matrixUrl,
|
|
159
|
+
};
|
|
160
|
+
const response = yield apiService.client.post(`${migrationUrl}/migrations`, payload, {
|
|
161
|
+
headers: Object.assign({ 'Content-Type': 'application/json' }, (yield getMigrationHeaders(options.tenant))),
|
|
162
|
+
});
|
|
163
|
+
if (response.status !== 200 && response.status !== 201) {
|
|
164
|
+
throw new Error(`Migration creation failed with status: ${response.status}`);
|
|
165
|
+
}
|
|
166
|
+
// Validate response structure
|
|
167
|
+
const { assetMigration, uploadUrl } = response.data || {};
|
|
168
|
+
if (!(assetMigration === null || assetMigration === void 0 ? void 0 : assetMigration.migrationId) ||
|
|
169
|
+
!(assetMigration === null || assetMigration === void 0 ? void 0 : assetMigration.assetId) ||
|
|
170
|
+
!(assetMigration === null || assetMigration === void 0 ? void 0 : assetMigration.stage) ||
|
|
171
|
+
!(assetMigration === null || assetMigration === void 0 ? void 0 : assetMigration.status)) {
|
|
172
|
+
throw new Error('Invalid response format from migration service');
|
|
173
|
+
}
|
|
174
|
+
if (!uploadUrl) {
|
|
175
|
+
throw new Error('Upload URL not found in response');
|
|
176
|
+
}
|
|
177
|
+
return {
|
|
178
|
+
assetMigration,
|
|
179
|
+
uploadUrl,
|
|
180
|
+
};
|
|
181
|
+
}
|
|
182
|
+
catch (error) {
|
|
183
|
+
if (error instanceof Error) {
|
|
184
|
+
throw error;
|
|
185
|
+
}
|
|
186
|
+
throw new Error(`Failed to create migration: ${error}`);
|
|
187
|
+
}
|
|
188
|
+
});
|
|
189
|
+
}
|
|
190
|
+
exports.createMigration = createMigration;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,411 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
const fs_1 = __importDefault(require("fs"));
|
|
16
|
+
const path_1 = __importDefault(require("path"));
|
|
17
|
+
const child_process_1 = require("child_process");
|
|
18
|
+
const axios_1 = __importDefault(require("axios"));
|
|
19
|
+
const ApplicationConfig_1 = require("../ApplicationConfig");
|
|
20
|
+
const ApplicationStore_1 = require("../ApplicationStore");
|
|
21
|
+
const ApiService_1 = require("../ApiService");
|
|
22
|
+
const utils_1 = require("./utils");
|
|
23
|
+
// Mock all external dependencies
|
|
24
|
+
jest.mock('fs');
|
|
25
|
+
jest.mock('path');
|
|
26
|
+
jest.mock('child_process');
|
|
27
|
+
jest.mock('axios');
|
|
28
|
+
jest.mock('../ApplicationConfig');
|
|
29
|
+
jest.mock('../ApplicationStore');
|
|
30
|
+
jest.mock('../ApiService');
|
|
31
|
+
// Mock global fetch
|
|
32
|
+
const mockFetch = jest.fn();
|
|
33
|
+
global.fetch = mockFetch;
|
|
34
|
+
const mockFs = fs_1.default;
|
|
35
|
+
const mockPath = path_1.default;
|
|
36
|
+
const mockSpawn = child_process_1.spawn;
|
|
37
|
+
const mockAxios = axios_1.default;
|
|
38
|
+
const mockFetchApplicationConfig = ApplicationConfig_1.fetchApplicationConfig;
|
|
39
|
+
const mockGetApplicationFile = ApplicationStore_1.getApplicationFile;
|
|
40
|
+
const mockApiService = ApiService_1.ApiService;
|
|
41
|
+
describe('Migration Utils', () => {
|
|
42
|
+
beforeEach(() => {
|
|
43
|
+
jest.clearAllMocks();
|
|
44
|
+
jest.resetAllMocks();
|
|
45
|
+
});
|
|
46
|
+
describe('handleCommandError', () => {
|
|
47
|
+
let mockCommand;
|
|
48
|
+
beforeEach(() => {
|
|
49
|
+
mockCommand = {
|
|
50
|
+
error: jest.fn(),
|
|
51
|
+
};
|
|
52
|
+
});
|
|
53
|
+
it('handles axios errors with response data message', () => {
|
|
54
|
+
const axiosError = {
|
|
55
|
+
message: 'Network Error',
|
|
56
|
+
response: {
|
|
57
|
+
data: {
|
|
58
|
+
message: 'Server Error',
|
|
59
|
+
details: 'Additional details',
|
|
60
|
+
},
|
|
61
|
+
},
|
|
62
|
+
};
|
|
63
|
+
mockAxios.isAxiosError.mockReturnValue(true);
|
|
64
|
+
(0, utils_1.handleCommandError)(mockCommand, axiosError);
|
|
65
|
+
expect(mockCommand.error).toHaveBeenCalledWith(expect.stringContaining('Network Error: Server Error - Additional details'));
|
|
66
|
+
});
|
|
67
|
+
it('handles axios errors without response data', () => {
|
|
68
|
+
const axiosError = {
|
|
69
|
+
message: 'Network Error',
|
|
70
|
+
};
|
|
71
|
+
mockAxios.isAxiosError.mockReturnValue(true);
|
|
72
|
+
(0, utils_1.handleCommandError)(mockCommand, axiosError);
|
|
73
|
+
expect(mockCommand.error).toHaveBeenCalledWith(expect.stringContaining('Network Error'));
|
|
74
|
+
});
|
|
75
|
+
it('handles regular errors with message', () => {
|
|
76
|
+
const error = new Error('Regular error');
|
|
77
|
+
mockAxios.isAxiosError.mockReturnValue(false);
|
|
78
|
+
(0, utils_1.handleCommandError)(mockCommand, error);
|
|
79
|
+
expect(mockCommand.error).toHaveBeenCalledWith(expect.stringContaining('Regular error'));
|
|
80
|
+
});
|
|
81
|
+
it('handles errors without message', () => {
|
|
82
|
+
const error = new Error();
|
|
83
|
+
error.message = '';
|
|
84
|
+
mockAxios.isAxiosError.mockReturnValue(false);
|
|
85
|
+
(0, utils_1.handleCommandError)(mockCommand, error);
|
|
86
|
+
expect(mockCommand.error).toHaveBeenCalledWith(expect.stringContaining('An unknown error occurred'));
|
|
87
|
+
});
|
|
88
|
+
it('shows stack trace in debug mode', () => {
|
|
89
|
+
const originalDebug = process.env.DEBUG;
|
|
90
|
+
process.env.DEBUG = 'true';
|
|
91
|
+
const error = new Error('Test error');
|
|
92
|
+
error.stack = 'Error stack trace';
|
|
93
|
+
mockAxios.isAxiosError.mockReturnValue(false);
|
|
94
|
+
(0, utils_1.handleCommandError)(mockCommand, error);
|
|
95
|
+
expect(mockCommand.error).toHaveBeenCalledWith('Error stack trace');
|
|
96
|
+
process.env.DEBUG = originalDebug;
|
|
97
|
+
});
|
|
98
|
+
});
|
|
99
|
+
describe('throwErrorIfNotLoggedIn', () => {
|
|
100
|
+
let mockCommand;
|
|
101
|
+
beforeEach(() => {
|
|
102
|
+
mockCommand = {
|
|
103
|
+
error: jest.fn(),
|
|
104
|
+
};
|
|
105
|
+
});
|
|
106
|
+
it('throws error when not logged in', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
107
|
+
mockGetApplicationFile.mockResolvedValue(undefined);
|
|
108
|
+
yield (0, utils_1.throwErrorIfNotLoggedIn)(mockCommand);
|
|
109
|
+
expect(mockCommand.error).toHaveBeenCalledWith(expect.stringContaining('You must login to interact with the migration service'));
|
|
110
|
+
}));
|
|
111
|
+
it('does not throw error when logged in', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
112
|
+
mockGetApplicationFile.mockResolvedValue('session-cookie');
|
|
113
|
+
yield (0, utils_1.throwErrorIfNotLoggedIn)(mockCommand);
|
|
114
|
+
expect(mockCommand.error).not.toHaveBeenCalled();
|
|
115
|
+
}));
|
|
116
|
+
});
|
|
117
|
+
describe('buildMigrationUrl', () => {
|
|
118
|
+
it('builds url without override', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
119
|
+
const mockConfig = {
|
|
120
|
+
baseUrl: 'https://example.com',
|
|
121
|
+
tenant: 'test-tenant',
|
|
122
|
+
region: 'au',
|
|
123
|
+
};
|
|
124
|
+
mockFetchApplicationConfig.mockResolvedValue(mockConfig);
|
|
125
|
+
const result = yield (0, utils_1.buildMigrationUrl)('tenant-id');
|
|
126
|
+
expect(result).toBe('https://example.com/__dxp/service/aiapps/migration');
|
|
127
|
+
expect(mockFetchApplicationConfig).toHaveBeenCalledWith('tenant-id');
|
|
128
|
+
}));
|
|
129
|
+
it('returns override url when provided', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
130
|
+
const overrideUrl = 'https://override.com';
|
|
131
|
+
const result = yield (0, utils_1.buildMigrationUrl)('tenant-id', overrideUrl);
|
|
132
|
+
expect(result).toBe(overrideUrl);
|
|
133
|
+
expect(mockFetchApplicationConfig).not.toHaveBeenCalled();
|
|
134
|
+
}));
|
|
135
|
+
});
|
|
136
|
+
describe('validateAxiosStatus', () => {
|
|
137
|
+
it('returns true for successful status codes', () => {
|
|
138
|
+
expect((0, utils_1.validateAxiosStatus)(200)).toBe(true);
|
|
139
|
+
expect((0, utils_1.validateAxiosStatus)(201)).toBe(true);
|
|
140
|
+
expect((0, utils_1.validateAxiosStatus)(399)).toBe(true);
|
|
141
|
+
});
|
|
142
|
+
it('returns false for error status codes', () => {
|
|
143
|
+
expect((0, utils_1.validateAxiosStatus)(400)).toBe(false);
|
|
144
|
+
expect((0, utils_1.validateAxiosStatus)(404)).toBe(false);
|
|
145
|
+
expect((0, utils_1.validateAxiosStatus)(500)).toBe(false);
|
|
146
|
+
});
|
|
147
|
+
});
|
|
148
|
+
describe('validateExportFolder', () => {
|
|
149
|
+
beforeEach(() => {
|
|
150
|
+
mockPath.join.mockImplementation((...paths) => paths.join('/'));
|
|
151
|
+
});
|
|
152
|
+
it('validates successful export folder structure', () => {
|
|
153
|
+
mockFs.existsSync.mockImplementation(filePath => {
|
|
154
|
+
return [
|
|
155
|
+
'/export/path',
|
|
156
|
+
'/export/path/export',
|
|
157
|
+
'/export/path/export/export.xml',
|
|
158
|
+
].includes(filePath);
|
|
159
|
+
});
|
|
160
|
+
mockFs.statSync.mockImplementation(filePath => ({
|
|
161
|
+
isDirectory: () => filePath !== '/export/path/export/export.xml',
|
|
162
|
+
isFile: () => filePath === '/export/path/export/export.xml',
|
|
163
|
+
}));
|
|
164
|
+
expect(() => (0, utils_1.validateExportFolder)('/export/path')).not.toThrow();
|
|
165
|
+
});
|
|
166
|
+
it('throws error when export folder does not exist', () => {
|
|
167
|
+
mockFs.existsSync.mockReturnValue(false);
|
|
168
|
+
expect(() => (0, utils_1.validateExportFolder)('/nonexistent')).toThrow('Export folder does not exist: /nonexistent');
|
|
169
|
+
});
|
|
170
|
+
it('throws error when export path is not a directory', () => {
|
|
171
|
+
mockFs.existsSync.mockReturnValue(true);
|
|
172
|
+
mockFs.statSync.mockReturnValue({
|
|
173
|
+
isDirectory: () => false,
|
|
174
|
+
isFile: () => true,
|
|
175
|
+
});
|
|
176
|
+
expect(() => (0, utils_1.validateExportFolder)('/export/file.txt')).toThrow('Export path is not a directory: /export/file.txt');
|
|
177
|
+
});
|
|
178
|
+
it('throws error when nested export folder does not exist', () => {
|
|
179
|
+
mockFs.existsSync.mockImplementation(filePath => filePath === '/export/path');
|
|
180
|
+
mockFs.statSync.mockReturnValue({
|
|
181
|
+
isDirectory: () => true,
|
|
182
|
+
});
|
|
183
|
+
expect(() => (0, utils_1.validateExportFolder)('/export/path')).toThrow('Nested export folder does not exist: /export/path/export');
|
|
184
|
+
});
|
|
185
|
+
it('throws error when export.xml does not exist', () => {
|
|
186
|
+
mockFs.existsSync.mockImplementation(filePath => {
|
|
187
|
+
return ['/export/path', '/export/path/export'].includes(filePath);
|
|
188
|
+
});
|
|
189
|
+
mockFs.statSync.mockReturnValue({
|
|
190
|
+
isDirectory: () => true,
|
|
191
|
+
});
|
|
192
|
+
expect(() => (0, utils_1.validateExportFolder)('/export/path')).toThrow('export.xml file does not exist in: /export/path/export');
|
|
193
|
+
});
|
|
194
|
+
it('throws error when export.xml is not a file', () => {
|
|
195
|
+
mockFs.existsSync.mockReturnValue(true);
|
|
196
|
+
mockFs.statSync.mockImplementation(filePath => ({
|
|
197
|
+
isDirectory: () => filePath !== '/export/path/export/export.xml',
|
|
198
|
+
isFile: () => false,
|
|
199
|
+
}));
|
|
200
|
+
expect(() => (0, utils_1.validateExportFolder)('/export/path')).toThrow('export.xml is not a valid file: /export/path/export/export.xml');
|
|
201
|
+
});
|
|
202
|
+
it('throws error when nested export path is not a directory', () => {
|
|
203
|
+
mockFs.existsSync.mockReturnValue(true);
|
|
204
|
+
mockFs.statSync.mockImplementation(filePath => {
|
|
205
|
+
if (filePath === '/export/path') {
|
|
206
|
+
return { isDirectory: () => true };
|
|
207
|
+
}
|
|
208
|
+
else if (filePath === '/export/path/export') {
|
|
209
|
+
return { isDirectory: () => false, isFile: () => true };
|
|
210
|
+
}
|
|
211
|
+
return { isDirectory: () => false, isFile: () => false };
|
|
212
|
+
});
|
|
213
|
+
expect(() => (0, utils_1.validateExportFolder)('/export/path')).toThrow('Nested export path is not a directory: /export/path/export');
|
|
214
|
+
});
|
|
215
|
+
});
|
|
216
|
+
describe('createTarFile', () => {
|
|
217
|
+
let mockChildProcess;
|
|
218
|
+
beforeEach(() => {
|
|
219
|
+
mockChildProcess = {
|
|
220
|
+
on: jest.fn(),
|
|
221
|
+
};
|
|
222
|
+
mockSpawn.mockReturnValue(mockChildProcess);
|
|
223
|
+
mockPath.join.mockImplementation((...paths) => paths.join('/'));
|
|
224
|
+
mockPath.dirname.mockReturnValue('/parent');
|
|
225
|
+
mockPath.basename.mockReturnValue('export');
|
|
226
|
+
// Mock Date.now() to return consistent timestamp
|
|
227
|
+
jest.spyOn(Date, 'now').mockReturnValue(1234567890);
|
|
228
|
+
});
|
|
229
|
+
afterEach(() => {
|
|
230
|
+
jest.restoreAllMocks();
|
|
231
|
+
});
|
|
232
|
+
it('creates tar file successfully', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
233
|
+
const promise = (0, utils_1.createTarFile)('/parent/export');
|
|
234
|
+
// Simulate successful tar creation
|
|
235
|
+
const closeCallback = mockChildProcess.on.mock.calls.find((call) => call[0] === 'close')[1];
|
|
236
|
+
closeCallback(0);
|
|
237
|
+
const result = yield promise;
|
|
238
|
+
expect(result).toBe(`${process.cwd()}/export_1234567890.tar.gz`);
|
|
239
|
+
expect(mockSpawn).toHaveBeenCalledWith('tar', [
|
|
240
|
+
'-czf',
|
|
241
|
+
`${process.cwd()}/export_1234567890.tar.gz`,
|
|
242
|
+
'-C',
|
|
243
|
+
'/parent',
|
|
244
|
+
'export',
|
|
245
|
+
]);
|
|
246
|
+
}));
|
|
247
|
+
it('handles tar command failure', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
248
|
+
const promise = (0, utils_1.createTarFile)('/parent/export');
|
|
249
|
+
// Simulate tar command failure
|
|
250
|
+
const closeCallback = mockChildProcess.on.mock.calls.find((call) => call[0] === 'close')[1];
|
|
251
|
+
closeCallback(1);
|
|
252
|
+
yield expect(promise).rejects.toThrow('tar command failed with exit code 1');
|
|
253
|
+
}));
|
|
254
|
+
it('handles spawn error', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
255
|
+
const promise = (0, utils_1.createTarFile)('/parent/export');
|
|
256
|
+
// Simulate spawn error
|
|
257
|
+
const errorCallback = mockChildProcess.on.mock.calls.find((call) => call[0] === 'error')[1];
|
|
258
|
+
errorCallback(new Error('Spawn failed'));
|
|
259
|
+
yield expect(promise).rejects.toThrow('Failed to create tar file: Spawn failed');
|
|
260
|
+
}));
|
|
261
|
+
});
|
|
262
|
+
describe('getMigrationHeaders', () => {
|
|
263
|
+
it('returns headers with tenant from config', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
264
|
+
const mockConfig = {
|
|
265
|
+
tenant: 'test-tenant',
|
|
266
|
+
baseUrl: 'https://example.com',
|
|
267
|
+
region: 'au',
|
|
268
|
+
};
|
|
269
|
+
mockFetchApplicationConfig.mockResolvedValue(mockConfig);
|
|
270
|
+
const result = yield (0, utils_1.getMigrationHeaders)('tenant-id');
|
|
271
|
+
expect(result).toEqual({
|
|
272
|
+
'x-dxp-tenant': 'test-tenant',
|
|
273
|
+
});
|
|
274
|
+
expect(mockFetchApplicationConfig).toHaveBeenCalledWith('tenant-id');
|
|
275
|
+
}));
|
|
276
|
+
});
|
|
277
|
+
describe('uploadFileToS3', () => {
|
|
278
|
+
beforeEach(() => {
|
|
279
|
+
mockFs.readFileSync.mockReturnValue(Buffer.from('file content'));
|
|
280
|
+
mockFetchApplicationConfig.mockResolvedValue({
|
|
281
|
+
tenant: 'test-tenant',
|
|
282
|
+
baseUrl: 'https://example.com',
|
|
283
|
+
region: 'au',
|
|
284
|
+
});
|
|
285
|
+
});
|
|
286
|
+
it('uploads file successfully', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
287
|
+
const mockResponse = {
|
|
288
|
+
url: 'https://s3.amazonaws.com/uploaded-file',
|
|
289
|
+
};
|
|
290
|
+
mockFetch.mockResolvedValue(mockResponse);
|
|
291
|
+
const result = yield (0, utils_1.uploadFileToS3)('https://upload.url', '/path/to/file.tar.gz', 'tenant-id');
|
|
292
|
+
expect(result).toBe('https://s3.amazonaws.com/uploaded-file');
|
|
293
|
+
expect(mockFetch).toHaveBeenCalledWith('https://upload.url', {
|
|
294
|
+
method: 'PUT',
|
|
295
|
+
body: Buffer.from('file content'),
|
|
296
|
+
headers: {
|
|
297
|
+
'x-dxp-tenant': 'test-tenant',
|
|
298
|
+
},
|
|
299
|
+
});
|
|
300
|
+
}));
|
|
301
|
+
it('handles file read error', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
302
|
+
mockFs.readFileSync.mockImplementation(() => {
|
|
303
|
+
throw new Error('File not found');
|
|
304
|
+
});
|
|
305
|
+
yield expect((0, utils_1.uploadFileToS3)('https://upload.url', '/nonexistent/file.tar.gz')).rejects.toThrow('File not found');
|
|
306
|
+
}));
|
|
307
|
+
});
|
|
308
|
+
describe('createMigration', () => {
|
|
309
|
+
let mockApiServiceInstance;
|
|
310
|
+
let mockOptions;
|
|
311
|
+
beforeEach(() => {
|
|
312
|
+
mockApiServiceInstance = {
|
|
313
|
+
client: {
|
|
314
|
+
post: jest.fn(),
|
|
315
|
+
},
|
|
316
|
+
};
|
|
317
|
+
mockApiService.mockImplementation(() => mockApiServiceInstance);
|
|
318
|
+
mockOptions = {
|
|
319
|
+
assetId: 'asset-123',
|
|
320
|
+
previewAssetId: 'preview-456',
|
|
321
|
+
matrixUrl: 'https://matrix.example.com',
|
|
322
|
+
tenant: 'test-tenant',
|
|
323
|
+
};
|
|
324
|
+
mockFetchApplicationConfig.mockResolvedValue({
|
|
325
|
+
tenant: 'test-tenant',
|
|
326
|
+
baseUrl: 'https://example.com',
|
|
327
|
+
region: 'au',
|
|
328
|
+
});
|
|
329
|
+
});
|
|
330
|
+
it('creates migration successfully', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
331
|
+
const mockResponse = {
|
|
332
|
+
status: 201,
|
|
333
|
+
data: {
|
|
334
|
+
assetMigration: {
|
|
335
|
+
migrationId: 'migration-123',
|
|
336
|
+
assetId: 'asset-123',
|
|
337
|
+
stage: 'pending',
|
|
338
|
+
status: 'created',
|
|
339
|
+
xmlFilePath: '/path/to/xml',
|
|
340
|
+
matrixUrl: 'https://matrix.example.com',
|
|
341
|
+
previewAssetId: 'preview-456',
|
|
342
|
+
created: 1234567890,
|
|
343
|
+
updated: 1234567890,
|
|
344
|
+
migrationIdAssetId: 'migration-123-asset-123',
|
|
345
|
+
},
|
|
346
|
+
uploadUrl: 'https://upload.s3.amazonaws.com',
|
|
347
|
+
},
|
|
348
|
+
};
|
|
349
|
+
mockApiServiceInstance.client.post.mockResolvedValue(mockResponse);
|
|
350
|
+
const result = yield (0, utils_1.createMigration)(mockOptions);
|
|
351
|
+
expect(result).toEqual(mockResponse.data);
|
|
352
|
+
expect(mockApiServiceInstance.client.post).toHaveBeenCalledWith('https://example.com/__dxp/service/aiapps/migration/migrations', {
|
|
353
|
+
assetId: 'asset-123',
|
|
354
|
+
previewAssetId: 'preview-456',
|
|
355
|
+
matrixUrl: 'https://matrix.example.com',
|
|
356
|
+
}, {
|
|
357
|
+
headers: {
|
|
358
|
+
'Content-Type': 'application/json',
|
|
359
|
+
'x-dxp-tenant': 'test-tenant',
|
|
360
|
+
},
|
|
361
|
+
});
|
|
362
|
+
}));
|
|
363
|
+
it('handles non-success status codes', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
364
|
+
const mockResponse = {
|
|
365
|
+
status: 400,
|
|
366
|
+
data: {},
|
|
367
|
+
};
|
|
368
|
+
mockApiServiceInstance.client.post.mockResolvedValue(mockResponse);
|
|
369
|
+
yield expect((0, utils_1.createMigration)(mockOptions)).rejects.toThrow('Migration creation failed with status: 400');
|
|
370
|
+
}));
|
|
371
|
+
it('handles invalid response format - missing migrationId', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
372
|
+
const mockResponse = {
|
|
373
|
+
status: 200,
|
|
374
|
+
data: {
|
|
375
|
+
assetMigration: {
|
|
376
|
+
assetId: 'asset-123',
|
|
377
|
+
stage: 'pending',
|
|
378
|
+
status: 'created',
|
|
379
|
+
},
|
|
380
|
+
uploadUrl: 'https://upload.s3.amazonaws.com',
|
|
381
|
+
},
|
|
382
|
+
};
|
|
383
|
+
mockApiServiceInstance.client.post.mockResolvedValue(mockResponse);
|
|
384
|
+
yield expect((0, utils_1.createMigration)(mockOptions)).rejects.toThrow('Invalid response format from migration service');
|
|
385
|
+
}));
|
|
386
|
+
it('handles missing upload URL', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
387
|
+
const mockResponse = {
|
|
388
|
+
status: 200,
|
|
389
|
+
data: {
|
|
390
|
+
assetMigration: {
|
|
391
|
+
migrationId: 'migration-123',
|
|
392
|
+
assetId: 'asset-123',
|
|
393
|
+
stage: 'pending',
|
|
394
|
+
status: 'created',
|
|
395
|
+
},
|
|
396
|
+
},
|
|
397
|
+
};
|
|
398
|
+
mockApiServiceInstance.client.post.mockResolvedValue(mockResponse);
|
|
399
|
+
yield expect((0, utils_1.createMigration)(mockOptions)).rejects.toThrow('Upload URL not found in response');
|
|
400
|
+
}));
|
|
401
|
+
it('handles API service errors', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
402
|
+
const error = new Error('Network error');
|
|
403
|
+
mockApiServiceInstance.client.post.mockRejectedValue(error);
|
|
404
|
+
yield expect((0, utils_1.createMigration)(mockOptions)).rejects.toThrow('Network error');
|
|
405
|
+
}));
|
|
406
|
+
it('handles unknown errors', () => __awaiter(void 0, void 0, void 0, function* () {
|
|
407
|
+
mockApiServiceInstance.client.post.mockRejectedValue('Unknown error');
|
|
408
|
+
yield expect((0, utils_1.createMigration)(mockOptions)).rejects.toThrow('Failed to create migration: Unknown error');
|
|
409
|
+
}));
|
|
410
|
+
});
|
|
411
|
+
});
|