sfcc-metadata-cli 0.0.1 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/check.yml +80 -0
- package/AGENTS.md +82 -0
- package/LICENSE +661 -0
- package/README.md +249 -0
- package/biome.json +32 -0
- package/commands/create-migration.js +157 -0
- package/commands/custom-object.js +426 -0
- package/commands/site-preference.js +503 -0
- package/commands/system-object.js +572 -0
- package/index.js +34 -0
- package/lib/merge.js +271 -0
- package/lib/templates.js +315 -0
- package/lib/utils.js +188 -0
- package/package.json +24 -15
- package/test/merge.test.js +84 -0
- package/test/templates.test.js +133 -0
- package/test/utils.test.js +79 -0
package/lib/utils.js
ADDED
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Utility functions for migration helper
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
const fs = require('node:fs');
|
|
6
|
+
const path = require('node:path');
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Generates a migration name using timestamp format: YYYYMMDD_HHMMSS
|
|
10
|
+
* This ensures unique, sortable migration names without conflicts.
|
|
11
|
+
* Optionally appends a description: YYYYMMDD_HHMMSS_description
|
|
12
|
+
* @param {string} migrationsDir - Path to migrations directory (unused, kept for compatibility)
|
|
13
|
+
* @param {string} [description] - Optional description to append
|
|
14
|
+
* @returns {string} Migration name
|
|
15
|
+
*/
|
|
16
|
+
function generateMigrationName(migrationsDir, description) {
|
|
17
|
+
const now = new Date();
|
|
18
|
+
const year = now.getFullYear();
|
|
19
|
+
const month = (now.getMonth() + 1).toString().padStart(2, '0');
|
|
20
|
+
const day = now.getDate().toString().padStart(2, '0');
|
|
21
|
+
const hours = now.getHours().toString().padStart(2, '0');
|
|
22
|
+
const minutes = now.getMinutes().toString().padStart(2, '0');
|
|
23
|
+
const seconds = now.getSeconds().toString().padStart(2, '0');
|
|
24
|
+
|
|
25
|
+
const timestamp = `${year}${month}${day}_${hours}${minutes}${seconds}`;
|
|
26
|
+
|
|
27
|
+
if (description) {
|
|
28
|
+
// Sanitize description: lowercase, replace spaces with underscores, remove special chars
|
|
29
|
+
const sanitized = description
|
|
30
|
+
.toLowerCase()
|
|
31
|
+
.replace(/\s+/g, '_')
|
|
32
|
+
.replace(/[^a-z0-9_]/g, '')
|
|
33
|
+
.substring(0, 50); // Limit length
|
|
34
|
+
return `${timestamp}_${sanitized}`;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
return timestamp;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Generates a short migration name using date only: YYYYMMDD_N
|
|
42
|
+
* Falls back to this format if multiple migrations on same day
|
|
43
|
+
* @param {string} migrationsDir - Path to migrations directory
|
|
44
|
+
* @param {string} [description] - Optional description to append
|
|
45
|
+
* @returns {string} Migration name
|
|
46
|
+
*/
|
|
47
|
+
function generateShortMigrationName(migrationsDir, description) {
|
|
48
|
+
const now = new Date();
|
|
49
|
+
const year = now.getFullYear();
|
|
50
|
+
const month = (now.getMonth() + 1).toString().padStart(2, '0');
|
|
51
|
+
const day = now.getDate().toString().padStart(2, '0');
|
|
52
|
+
const datePrefix = `${year}${month}${day}`;
|
|
53
|
+
|
|
54
|
+
// Find existing migrations for today
|
|
55
|
+
let todayCount = 0;
|
|
56
|
+
if (fs.existsSync(migrationsDir)) {
|
|
57
|
+
todayCount = fs
|
|
58
|
+
.readdirSync(migrationsDir)
|
|
59
|
+
.filter((name) => name.startsWith(datePrefix)).length;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
const sequence = (todayCount + 1).toString().padStart(2, '0');
|
|
63
|
+
const base = `${datePrefix}_${sequence}`;
|
|
64
|
+
|
|
65
|
+
if (description) {
|
|
66
|
+
const sanitized = description
|
|
67
|
+
.toLowerCase()
|
|
68
|
+
.replace(/\s+/g, '_')
|
|
69
|
+
.replace(/[^a-z0-9_]/g, '')
|
|
70
|
+
.substring(0, 50);
|
|
71
|
+
return `${base}_${sanitized}`;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
return base;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Lists all existing migrations sorted by name (chronological for timestamp format)
|
|
79
|
+
* @param {string} migrationsDir - Path to migrations directory
|
|
80
|
+
* @returns {string[]} Array of migration names
|
|
81
|
+
*/
|
|
82
|
+
function listMigrations(migrationsDir) {
|
|
83
|
+
if (!fs.existsSync(migrationsDir)) {
|
|
84
|
+
return [];
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
return fs
|
|
88
|
+
.readdirSync(migrationsDir)
|
|
89
|
+
.filter((name) => {
|
|
90
|
+
const fullPath = path.join(migrationsDir, name);
|
|
91
|
+
// Match both old format (YY.MM.N) and new format (YYYYMMDD_*)
|
|
92
|
+
return (
|
|
93
|
+
fs.statSync(fullPath).isDirectory() &&
|
|
94
|
+
(/^\d{2}\.\d{2}\.\d+/.test(name) || /^\d{8}_/.test(name))
|
|
95
|
+
);
|
|
96
|
+
})
|
|
97
|
+
.sort(compareMigrationVersions);
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Compares two migration version strings
|
|
102
|
+
* Handles both old format (YY.MM.N) and new format (YYYYMMDD_HHMMSS)
|
|
103
|
+
* @param {string} a - First version
|
|
104
|
+
* @param {string} b - Second version
|
|
105
|
+
* @returns {number} Comparison result
|
|
106
|
+
*/
|
|
107
|
+
function compareMigrationVersions(a, b) {
|
|
108
|
+
// New timestamp format sorts alphabetically correctly
|
|
109
|
+
// Old format needs special handling
|
|
110
|
+
const isOldFormatA = /^\d{2}\.\d{2}\.\d+/.test(a);
|
|
111
|
+
const isOldFormatB = /^\d{2}\.\d{2}\.\d+/.test(b);
|
|
112
|
+
|
|
113
|
+
// Old format migrations come before new format
|
|
114
|
+
if (isOldFormatA && !isOldFormatB) return -1;
|
|
115
|
+
if (!isOldFormatA && isOldFormatB) return 1;
|
|
116
|
+
|
|
117
|
+
// Both old format: compare by parts
|
|
118
|
+
if (isOldFormatA && isOldFormatB) {
|
|
119
|
+
const partsA = a.split('.').map((p) => Number.parseInt(p, 10) || 0);
|
|
120
|
+
const partsB = b.split('.').map((p) => Number.parseInt(p, 10) || 0);
|
|
121
|
+
|
|
122
|
+
const maxLength = Math.max(partsA.length, partsB.length);
|
|
123
|
+
for (let i = 0; i < maxLength; i++) {
|
|
124
|
+
const numA = partsA[i] || 0;
|
|
125
|
+
const numB = partsB[i] || 0;
|
|
126
|
+
if (numA !== numB) {
|
|
127
|
+
return numA - numB;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
return 0;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// Both new format: simple string comparison works
|
|
134
|
+
return a.localeCompare(b);
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Ensures a directory exists, creating it if necessary
|
|
139
|
+
* @param {string} dirPath - Path to directory
|
|
140
|
+
*/
|
|
141
|
+
function ensureDir(dirPath) {
|
|
142
|
+
if (!fs.existsSync(dirPath)) {
|
|
143
|
+
fs.mkdirSync(dirPath, { recursive: true });
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
/**
|
|
148
|
+
* Writes content to a file, creating directories as needed
|
|
149
|
+
* @param {string} filePath - Path to file
|
|
150
|
+
* @param {string} content - File content
|
|
151
|
+
*/
|
|
152
|
+
function writeFile(filePath, content) {
|
|
153
|
+
ensureDir(path.dirname(filePath));
|
|
154
|
+
fs.writeFileSync(filePath, content, 'utf8');
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* Reads and parses an existing XML file or returns null
|
|
159
|
+
* @param {string} filePath - Path to XML file
|
|
160
|
+
* @returns {string|null} File content or null
|
|
161
|
+
*/
|
|
162
|
+
function readXmlFile(filePath) {
|
|
163
|
+
if (fs.existsSync(filePath)) {
|
|
164
|
+
return fs.readFileSync(filePath, 'utf8');
|
|
165
|
+
}
|
|
166
|
+
return null;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
/**
|
|
170
|
+
* Gets the latest migration folder
|
|
171
|
+
* @param {string} migrationsDir - Path to migrations directory
|
|
172
|
+
* @returns {string|null} Latest migration name or null
|
|
173
|
+
*/
|
|
174
|
+
function getLatestMigration(migrationsDir) {
|
|
175
|
+
const migrations = listMigrations(migrationsDir);
|
|
176
|
+
return migrations.length > 0 ? migrations[migrations.length - 1] : null;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
module.exports = {
|
|
180
|
+
generateMigrationName,
|
|
181
|
+
generateShortMigrationName,
|
|
182
|
+
listMigrations,
|
|
183
|
+
compareMigrationVersions,
|
|
184
|
+
ensureDir,
|
|
185
|
+
writeFile,
|
|
186
|
+
readXmlFile,
|
|
187
|
+
getLatestMigration,
|
|
188
|
+
};
|
package/package.json
CHANGED
|
@@ -1,17 +1,26 @@
|
|
|
1
1
|
{
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
2
|
+
"name": "sfcc-metadata-cli",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "CLI tool for creating SFCC B2C metadata migrations",
|
|
5
|
+
"main": "index.js",
|
|
6
|
+
"bin": {
|
|
7
|
+
"sfcc-metadata": "./index.js"
|
|
8
|
+
},
|
|
9
|
+
"scripts": {
|
|
10
|
+
"start": "node index.js",
|
|
11
|
+
"test": "node --test",
|
|
12
|
+
"check": "biome check .",
|
|
13
|
+
"check:write": "biome check --write ."
|
|
14
|
+
},
|
|
15
|
+
"keywords": ["sfcc", "b2c", "migration", "salesforce", "metadata", "cli"],
|
|
16
|
+
"author": "",
|
|
17
|
+
"license": "AGPL-3.0-or-later",
|
|
18
|
+
"dependencies": {
|
|
19
|
+
"chalk": "^4.1.2",
|
|
20
|
+
"inquirer": "^8.2.6",
|
|
21
|
+
"yargs": "^17.7.2"
|
|
22
|
+
},
|
|
23
|
+
"devDependencies": {
|
|
24
|
+
"@biomejs/biome": "^1.9.4"
|
|
25
|
+
}
|
|
17
26
|
}
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
const { describe, it } = require('node:test');
|
|
2
|
+
const assert = require('node:assert');
|
|
3
|
+
const { mergeSystemObjectExtension } = require('../lib/merge');
|
|
4
|
+
|
|
5
|
+
const BASE_XML = `<?xml version="1.0" encoding="UTF-8"?>
|
|
6
|
+
<metadata xmlns="http://www.demandware.com/xml/impex/metadata/2006-10-31">
|
|
7
|
+
</metadata>`;
|
|
8
|
+
|
|
9
|
+
const EXISTING_TYPE_XML = `<?xml version="1.0" encoding="UTF-8"?>
|
|
10
|
+
<metadata xmlns="http://www.demandware.com/xml/impex/metadata/2006-10-31">
|
|
11
|
+
<type-extension type-id="Order">
|
|
12
|
+
<custom-attribute-definitions>
|
|
13
|
+
<attribute-definition attribute-id="existingAttr">
|
|
14
|
+
<type>string</type>
|
|
15
|
+
</attribute-definition>
|
|
16
|
+
</custom-attribute-definitions>
|
|
17
|
+
<group-definitions>
|
|
18
|
+
<attribute-group group-id="Order_Custom">
|
|
19
|
+
<display-name xml:lang="x-default">Order Custom</display-name>
|
|
20
|
+
<attribute attribute-id="existingAttr"/>
|
|
21
|
+
</attribute-group>
|
|
22
|
+
</group-definitions>
|
|
23
|
+
</type-extension>
|
|
24
|
+
</metadata>`;
|
|
25
|
+
|
|
26
|
+
describe('mergeSystemObjectExtension', () => {
|
|
27
|
+
it('should add new type extension to empty metadata', () => {
|
|
28
|
+
const result = mergeSystemObjectExtension(BASE_XML, {
|
|
29
|
+
objectTypeId: 'Order',
|
|
30
|
+
attributeId: 'newAttr',
|
|
31
|
+
displayName: 'New Attribute',
|
|
32
|
+
type: 'string',
|
|
33
|
+
groupId: 'Order_Custom',
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
assert.strictEqual(result.typeExtensionExisted, false);
|
|
37
|
+
assert.strictEqual(result.groupExisted, false);
|
|
38
|
+
assert.ok(result.xml.includes('type-extension type-id="Order"'));
|
|
39
|
+
assert.ok(result.xml.includes('attribute-id="newAttr"'));
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
it('should merge into existing type extension', () => {
|
|
43
|
+
const result = mergeSystemObjectExtension(EXISTING_TYPE_XML, {
|
|
44
|
+
objectTypeId: 'Order',
|
|
45
|
+
attributeId: 'anotherAttr',
|
|
46
|
+
displayName: 'Another Attribute',
|
|
47
|
+
type: 'boolean',
|
|
48
|
+
groupId: 'Order_Custom',
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
assert.strictEqual(result.typeExtensionExisted, true);
|
|
52
|
+
assert.strictEqual(result.groupExisted, true);
|
|
53
|
+
assert.ok(result.xml.includes('attribute-id="existingAttr"'));
|
|
54
|
+
assert.ok(result.xml.includes('attribute-id="anotherAttr"'));
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
it('should create new group if not exists', () => {
|
|
58
|
+
const result = mergeSystemObjectExtension(EXISTING_TYPE_XML, {
|
|
59
|
+
objectTypeId: 'Order',
|
|
60
|
+
attributeId: 'newGroupAttr',
|
|
61
|
+
displayName: 'New Group Attr',
|
|
62
|
+
type: 'string',
|
|
63
|
+
groupId: 'Order_NewGroup',
|
|
64
|
+
groupDisplayName: 'New Group',
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
assert.strictEqual(result.groupExisted, false);
|
|
68
|
+
assert.ok(result.xml.includes('group-id="Order_NewGroup"'));
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
it('should add new type extension for different object type', () => {
|
|
72
|
+
const result = mergeSystemObjectExtension(EXISTING_TYPE_XML, {
|
|
73
|
+
objectTypeId: 'Product',
|
|
74
|
+
attributeId: 'productAttr',
|
|
75
|
+
displayName: 'Product Attribute',
|
|
76
|
+
type: 'string',
|
|
77
|
+
groupId: 'Product_Custom',
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
assert.strictEqual(result.typeExtensionExisted, false);
|
|
81
|
+
assert.ok(result.xml.includes('type-extension type-id="Order"'));
|
|
82
|
+
assert.ok(result.xml.includes('type-extension type-id="Product"'));
|
|
83
|
+
});
|
|
84
|
+
});
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
const { describe, it } = require('node:test');
|
|
2
|
+
const assert = require('node:assert');
|
|
3
|
+
const {
|
|
4
|
+
escapeXml,
|
|
5
|
+
wrapInMetadata,
|
|
6
|
+
generateAttributeDefinition,
|
|
7
|
+
XML_HEADER,
|
|
8
|
+
METADATA_NAMESPACE,
|
|
9
|
+
} = require('../lib/templates');
|
|
10
|
+
|
|
11
|
+
describe('escapeXml', () => {
|
|
12
|
+
it('should escape ampersand', () => {
|
|
13
|
+
assert.strictEqual(escapeXml('foo & bar'), 'foo & bar');
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
it('should escape less than', () => {
|
|
17
|
+
assert.strictEqual(escapeXml('a < b'), 'a < b');
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
it('should escape greater than', () => {
|
|
21
|
+
assert.strictEqual(escapeXml('a > b'), 'a > b');
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
it('should escape quotes', () => {
|
|
25
|
+
assert.strictEqual(escapeXml('"hello"'), '"hello"');
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
it('should escape apostrophes', () => {
|
|
29
|
+
assert.strictEqual(escapeXml("it's"), 'it's');
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
it('should handle empty string', () => {
|
|
33
|
+
assert.strictEqual(escapeXml(''), '');
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
it('should handle null/undefined', () => {
|
|
37
|
+
assert.strictEqual(escapeXml(null), '');
|
|
38
|
+
assert.strictEqual(escapeXml(undefined), '');
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
it('should escape multiple special characters', () => {
|
|
42
|
+
assert.strictEqual(
|
|
43
|
+
escapeXml('<tag attr="value">content & more</tag>'),
|
|
44
|
+
'<tag attr="value">content & more</tag>',
|
|
45
|
+
);
|
|
46
|
+
});
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
describe('wrapInMetadata', () => {
|
|
50
|
+
it('should wrap content with XML header and metadata element', () => {
|
|
51
|
+
const content = ' <test>content</test>';
|
|
52
|
+
const result = wrapInMetadata(content);
|
|
53
|
+
|
|
54
|
+
assert.ok(result.startsWith(XML_HEADER));
|
|
55
|
+
assert.ok(result.includes(`xmlns="${METADATA_NAMESPACE}"`));
|
|
56
|
+
assert.ok(result.includes(content));
|
|
57
|
+
assert.ok(result.includes('</metadata>'));
|
|
58
|
+
});
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
describe('generateAttributeDefinition', () => {
|
|
62
|
+
it('should generate string attribute definition', () => {
|
|
63
|
+
const xml = generateAttributeDefinition({
|
|
64
|
+
id: 'testAttr',
|
|
65
|
+
displayName: 'Test Attribute',
|
|
66
|
+
type: 'string',
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
assert.ok(xml.includes('attribute-id="testAttr"'));
|
|
70
|
+
assert.ok(xml.includes('<type>string</type>'));
|
|
71
|
+
assert.ok(
|
|
72
|
+
xml.includes(
|
|
73
|
+
'<display-name xml:lang="x-default">Test Attribute</display-name>',
|
|
74
|
+
),
|
|
75
|
+
);
|
|
76
|
+
assert.ok(xml.includes('<min-length>0</min-length>'));
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
it('should generate boolean attribute definition', () => {
|
|
80
|
+
const xml = generateAttributeDefinition({
|
|
81
|
+
id: 'isEnabled',
|
|
82
|
+
type: 'boolean',
|
|
83
|
+
defaultValue: 'false',
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
assert.ok(xml.includes('<type>boolean</type>'));
|
|
87
|
+
assert.ok(xml.includes('<default-value>false</default-value>'));
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
it('should include description when provided', () => {
|
|
91
|
+
const xml = generateAttributeDefinition({
|
|
92
|
+
id: 'testAttr',
|
|
93
|
+
type: 'string',
|
|
94
|
+
description: 'This is a test',
|
|
95
|
+
});
|
|
96
|
+
|
|
97
|
+
assert.ok(
|
|
98
|
+
xml.includes(
|
|
99
|
+
'<description xml:lang="x-default">This is a test</description>',
|
|
100
|
+
),
|
|
101
|
+
);
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
it('should set mandatory flag', () => {
|
|
105
|
+
const xml = generateAttributeDefinition({
|
|
106
|
+
id: 'requiredAttr',
|
|
107
|
+
type: 'string',
|
|
108
|
+
mandatory: true,
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
assert.ok(xml.includes('<mandatory-flag>true</mandatory-flag>'));
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
it('should generate enum attribute with values', () => {
|
|
115
|
+
const xml = generateAttributeDefinition({
|
|
116
|
+
id: 'status',
|
|
117
|
+
type: 'enum-of-string',
|
|
118
|
+
enumValues: [
|
|
119
|
+
{ value: 'active', display: 'Active' },
|
|
120
|
+
{ value: 'inactive', display: 'Inactive' },
|
|
121
|
+
],
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
assert.ok(xml.includes('<type>enum-of-string</type>'));
|
|
125
|
+
assert.ok(xml.includes('<value>active</value>'));
|
|
126
|
+
assert.ok(
|
|
127
|
+
xml.includes('<display xml:lang="x-default">Active</display>'),
|
|
128
|
+
);
|
|
129
|
+
assert.ok(
|
|
130
|
+
xml.includes('<select-multiple-flag>false</select-multiple-flag>'),
|
|
131
|
+
);
|
|
132
|
+
});
|
|
133
|
+
});
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
const { describe, it } = require('node:test');
|
|
2
|
+
const assert = require('node:assert');
|
|
3
|
+
const {
|
|
4
|
+
compareMigrationVersions,
|
|
5
|
+
generateMigrationName,
|
|
6
|
+
} = require('../lib/utils');
|
|
7
|
+
|
|
8
|
+
describe('compareMigrationVersions', () => {
|
|
9
|
+
it('should sort old format migrations correctly', () => {
|
|
10
|
+
const migrations = ['24.01.2', '24.01.1', '24.02.1', '23.12.1'];
|
|
11
|
+
const sorted = migrations.sort(compareMigrationVersions);
|
|
12
|
+
assert.deepStrictEqual(sorted, [
|
|
13
|
+
'23.12.1',
|
|
14
|
+
'24.01.1',
|
|
15
|
+
'24.01.2',
|
|
16
|
+
'24.02.1',
|
|
17
|
+
]);
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
it('should sort new format migrations correctly', () => {
|
|
21
|
+
const migrations = [
|
|
22
|
+
'20251231_143052',
|
|
23
|
+
'20251231_100000',
|
|
24
|
+
'20251230_235959',
|
|
25
|
+
];
|
|
26
|
+
const sorted = migrations.sort(compareMigrationVersions);
|
|
27
|
+
assert.deepStrictEqual(sorted, [
|
|
28
|
+
'20251230_235959',
|
|
29
|
+
'20251231_100000',
|
|
30
|
+
'20251231_143052',
|
|
31
|
+
]);
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
it('should place old format before new format', () => {
|
|
35
|
+
const migrations = ['20251231_143052', '24.01.1'];
|
|
36
|
+
const sorted = migrations.sort(compareMigrationVersions);
|
|
37
|
+
assert.deepStrictEqual(sorted, ['24.01.1', '20251231_143052']);
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
it('should handle migrations with descriptions', () => {
|
|
41
|
+
const migrations = [
|
|
42
|
+
'20251231_143052_add_fields',
|
|
43
|
+
'20251231_100000_initial',
|
|
44
|
+
];
|
|
45
|
+
const sorted = migrations.sort(compareMigrationVersions);
|
|
46
|
+
assert.deepStrictEqual(sorted, [
|
|
47
|
+
'20251231_100000_initial',
|
|
48
|
+
'20251231_143052_add_fields',
|
|
49
|
+
]);
|
|
50
|
+
});
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
describe('generateMigrationName', () => {
|
|
54
|
+
it('should generate timestamp format without description', () => {
|
|
55
|
+
const name = generateMigrationName('./migrations');
|
|
56
|
+
assert.match(name, /^\d{8}_\d{6}$/);
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
it('should append sanitized description', () => {
|
|
60
|
+
const name = generateMigrationName('./migrations', 'Add Order Fields');
|
|
61
|
+
assert.match(name, /^\d{8}_\d{6}_add_order_fields$/);
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
it('should remove special characters from description', () => {
|
|
65
|
+
const name = generateMigrationName(
|
|
66
|
+
'./migrations',
|
|
67
|
+
'Fix bug #123 (urgent!)',
|
|
68
|
+
);
|
|
69
|
+
assert.match(name, /^\d{8}_\d{6}_fix_bug_123_urgent$/);
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
it('should truncate long descriptions to 50 characters', () => {
|
|
73
|
+
const longDesc =
|
|
74
|
+
'this is a very long description that should be truncated to fifty characters maximum';
|
|
75
|
+
const name = generateMigrationName('./migrations', longDesc);
|
|
76
|
+
const descPart = name.split('_').slice(2).join('_');
|
|
77
|
+
assert.ok(descPart.length <= 50);
|
|
78
|
+
});
|
|
79
|
+
});
|