@garthub/gart-npm 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/archive.js ADDED
@@ -0,0 +1,271 @@
1
+ const fs = require('node:fs');
2
+ const path = require('node:path');
3
+ const zlib = require('node:zlib');
4
+ const {listFilesRecursively} = require('./hash');
5
+
6
+ const LOCAL_FILE_HEADER_SIGNATURE = 0x04034b50;
7
+ const CENTRAL_DIRECTORY_HEADER_SIGNATURE = 0x02014b50;
8
+ const END_OF_CENTRAL_DIRECTORY_SIGNATURE = 0x06054b50;
9
+ const COMPRESSION_STORE = 0;
10
+ const COMPRESSION_DEFLATE = 8;
11
+ const VERSION_NEEDED = 20;
12
+
13
+ function createZipArchive(
14
+ sourceDirectory,
15
+ zipPath,
16
+ ) {
17
+ fs.mkdirSync(path.dirname(zipPath), {recursive: true});
18
+ if (fs.existsSync(zipPath)) {
19
+ fs.unlinkSync(zipPath);
20
+ }
21
+
22
+ const files = listFilesRecursively(sourceDirectory)
23
+ .map((absolutePath) => ({
24
+ absolutePath,
25
+ relativePath: normalizeArchivePath(path.relative(sourceDirectory, absolutePath)),
26
+ stats: fs.statSync(absolutePath),
27
+ }))
28
+ .sort((
29
+ left,
30
+ right,
31
+ ) => left.relativePath.localeCompare(right.relativePath));
32
+
33
+ const localParts = [];
34
+ const centralParts = [];
35
+ let offset = 0;
36
+
37
+ for (const file of files) {
38
+ const fileName = Buffer.from(file.relativePath, 'utf8');
39
+ const content = fs.readFileSync(file.absolutePath);
40
+ const compressed = zlib.deflateRawSync(content);
41
+ const compressionMethod = compressed.length < content.length ? COMPRESSION_DEFLATE : COMPRESSION_STORE;
42
+ const payload = compressionMethod === COMPRESSION_DEFLATE ? compressed : content;
43
+ const crc32 = computeCrc32(content);
44
+ const timestamp = toDosDateTime(file.stats.mtime);
45
+
46
+ const localHeader = Buffer.alloc(30);
47
+ localHeader.writeUInt32LE(LOCAL_FILE_HEADER_SIGNATURE, 0);
48
+ localHeader.writeUInt16LE(VERSION_NEEDED, 4);
49
+ localHeader.writeUInt16LE(0, 6);
50
+ localHeader.writeUInt16LE(compressionMethod, 8);
51
+ localHeader.writeUInt16LE(timestamp.time, 10);
52
+ localHeader.writeUInt16LE(timestamp.date, 12);
53
+ localHeader.writeUInt32LE(crc32, 14);
54
+ localHeader.writeUInt32LE(payload.length, 18);
55
+ localHeader.writeUInt32LE(content.length, 22);
56
+ localHeader.writeUInt16LE(fileName.length, 26);
57
+ localHeader.writeUInt16LE(0, 28);
58
+
59
+ localParts.push(localHeader, fileName, payload);
60
+
61
+ const centralHeader = Buffer.alloc(46);
62
+ centralHeader.writeUInt32LE(CENTRAL_DIRECTORY_HEADER_SIGNATURE, 0);
63
+ centralHeader.writeUInt16LE(VERSION_NEEDED, 4);
64
+ centralHeader.writeUInt16LE(VERSION_NEEDED, 6);
65
+ centralHeader.writeUInt16LE(0, 8);
66
+ centralHeader.writeUInt16LE(compressionMethod, 10);
67
+ centralHeader.writeUInt16LE(timestamp.time, 12);
68
+ centralHeader.writeUInt16LE(timestamp.date, 14);
69
+ centralHeader.writeUInt32LE(crc32, 16);
70
+ centralHeader.writeUInt32LE(payload.length, 20);
71
+ centralHeader.writeUInt32LE(content.length, 24);
72
+ centralHeader.writeUInt16LE(fileName.length, 28);
73
+ centralHeader.writeUInt16LE(0, 30);
74
+ centralHeader.writeUInt16LE(0, 32);
75
+ centralHeader.writeUInt16LE(0, 34);
76
+ centralHeader.writeUInt16LE(0, 36);
77
+ centralHeader.writeUInt32LE(0, 38);
78
+ centralHeader.writeUInt32LE(offset, 42);
79
+ centralParts.push(centralHeader, fileName);
80
+
81
+ offset += localHeader.length + fileName.length + payload.length;
82
+ }
83
+
84
+ const centralDirectory = Buffer.concat(centralParts);
85
+ const endOfCentralDirectory = Buffer.alloc(22);
86
+ endOfCentralDirectory.writeUInt32LE(END_OF_CENTRAL_DIRECTORY_SIGNATURE, 0);
87
+ endOfCentralDirectory.writeUInt16LE(0, 4);
88
+ endOfCentralDirectory.writeUInt16LE(0, 6);
89
+ endOfCentralDirectory.writeUInt16LE(files.length, 8);
90
+ endOfCentralDirectory.writeUInt16LE(files.length, 10);
91
+ endOfCentralDirectory.writeUInt32LE(centralDirectory.length, 12);
92
+ endOfCentralDirectory.writeUInt32LE(offset, 16);
93
+ endOfCentralDirectory.writeUInt16LE(0, 20);
94
+
95
+ fs.writeFileSync(zipPath, Buffer.concat([...localParts, centralDirectory, endOfCentralDirectory]));
96
+ }
97
+
98
+ function extractZipArchive(
99
+ zipPath,
100
+ destinationPath,
101
+ ) {
102
+ fs.rmSync(destinationPath, {recursive: true, force: true});
103
+ fs.mkdirSync(destinationPath, {recursive: true});
104
+
105
+ const archive = fs.readFileSync(zipPath);
106
+ const entries = readCentralDirectoryEntries(archive);
107
+
108
+ for (const entry of entries) {
109
+ if (entry.fileName.endsWith('/')) {
110
+ continue;
111
+ }
112
+
113
+ const destinationFilePath = resolveEntryDestination(destinationPath, entry.fileName);
114
+ const content = readEntryContent(archive, entry);
115
+
116
+ fs.mkdirSync(path.dirname(destinationFilePath), {recursive: true});
117
+ fs.writeFileSync(destinationFilePath, content);
118
+ }
119
+ }
120
+
121
+ function readCentralDirectoryEntries(archive) {
122
+ const endRecordOffset = findEndOfCentralDirectoryOffset(archive);
123
+
124
+ if (endRecordOffset < 0) {
125
+ throw new Error('Failed to extract ZIP archive: end of central directory record was not found.');
126
+ }
127
+
128
+ const centralDirectoryOffset = archive.readUInt32LE(endRecordOffset + 16);
129
+ const entryCount = archive.readUInt16LE(endRecordOffset + 10);
130
+ const entries = [];
131
+ let offset = centralDirectoryOffset;
132
+
133
+ for (let index = 0; index < entryCount; index += 1) {
134
+ if (archive.readUInt32LE(offset) !== CENTRAL_DIRECTORY_HEADER_SIGNATURE) {
135
+ throw new Error('Failed to extract ZIP archive: central directory entry is invalid.');
136
+ }
137
+
138
+ const compressionMethod = archive.readUInt16LE(offset + 10);
139
+ const crc32 = archive.readUInt32LE(offset + 16);
140
+ const compressedSize = archive.readUInt32LE(offset + 20);
141
+ const uncompressedSize = archive.readUInt32LE(offset + 24);
142
+ const fileNameLength = archive.readUInt16LE(offset + 28);
143
+ const extraFieldLength = archive.readUInt16LE(offset + 30);
144
+ const fileCommentLength = archive.readUInt16LE(offset + 32);
145
+ const localHeaderOffset = archive.readUInt32LE(offset + 42);
146
+ const fileName = archive.slice(offset + 46, offset + 46 + fileNameLength).toString('utf8');
147
+
148
+ entries.push({
149
+ fileName,
150
+ compressionMethod,
151
+ crc32,
152
+ compressedSize,
153
+ uncompressedSize,
154
+ localHeaderOffset,
155
+ });
156
+
157
+ offset += 46 + fileNameLength + extraFieldLength + fileCommentLength;
158
+ }
159
+
160
+ return entries;
161
+ }
162
+
163
+ function readEntryContent(
164
+ archive,
165
+ entry,
166
+ ) {
167
+ if (archive.readUInt32LE(entry.localHeaderOffset) !== LOCAL_FILE_HEADER_SIGNATURE) {
168
+ throw new Error(`Failed to extract ZIP archive: local header is invalid for ${entry.fileName}.`);
169
+ }
170
+
171
+ const fileNameLength = archive.readUInt16LE(entry.localHeaderOffset + 26);
172
+ const extraFieldLength = archive.readUInt16LE(entry.localHeaderOffset + 28);
173
+ const dataOffset = entry.localHeaderOffset + 30 + fileNameLength + extraFieldLength;
174
+ const compressed = archive.slice(dataOffset, dataOffset + entry.compressedSize);
175
+
176
+ let content;
177
+
178
+ if (entry.compressionMethod === COMPRESSION_STORE) {
179
+ content = compressed;
180
+ } else if (entry.compressionMethod === COMPRESSION_DEFLATE) {
181
+ content = zlib.inflateRawSync(compressed);
182
+ } else {
183
+ throw new Error(
184
+ `Failed to extract ZIP archive: unsupported compression method ${entry.compressionMethod} for ${entry.fileName}.`,
185
+ );
186
+ }
187
+
188
+ if (content.length !== entry.uncompressedSize) {
189
+ throw new Error(`Failed to extract ZIP archive: size mismatch for ${entry.fileName}.`);
190
+ }
191
+
192
+ if (computeCrc32(content) !== entry.crc32) {
193
+ throw new Error(`Failed to extract ZIP archive: CRC mismatch for ${entry.fileName}.`);
194
+ }
195
+
196
+ return content;
197
+ }
198
+
199
+ function findEndOfCentralDirectoryOffset(archive) {
200
+ const minimumOffset = Math.max(0, archive.length - 65557);
201
+
202
+ for (let offset = archive.length - 22; offset >= minimumOffset; offset -= 1) {
203
+ if (archive.readUInt32LE(offset) === END_OF_CENTRAL_DIRECTORY_SIGNATURE) {
204
+ return offset;
205
+ }
206
+ }
207
+
208
+ return -1;
209
+ }
210
+
211
+ function resolveEntryDestination(
212
+ destinationPath,
213
+ fileName,
214
+ ) {
215
+ const normalizedName = normalizeArchivePath(fileName);
216
+ const segments = normalizedName.split('/').filter(Boolean);
217
+
218
+ if (segments.length === 0) {
219
+ throw new Error('Failed to extract ZIP archive: encountered an empty file entry.');
220
+ }
221
+
222
+ if (segments.some((segment) => segment === '..')) {
223
+ throw new Error(`Failed to extract ZIP archive: unsafe path ${fileName}.`);
224
+ }
225
+
226
+ const resolvedPath = path.resolve(destinationPath, ...segments);
227
+ const relativePath = path.relative(destinationPath, resolvedPath);
228
+
229
+ if (relativePath.startsWith('..') || path.isAbsolute(relativePath)) {
230
+ throw new Error(`Failed to extract ZIP archive: unsafe path ${fileName}.`);
231
+ }
232
+
233
+ return resolvedPath;
234
+ }
235
+
236
+ function normalizeArchivePath(relativePath) {
237
+ return String(relativePath).replace(/\\/g, '/');
238
+ }
239
+
240
+ function toDosDateTime(date) {
241
+ const value = date instanceof Date ? date : new Date(date);
242
+ const year = Math.min(Math.max(value.getFullYear(), 1980), 2107);
243
+
244
+ return {
245
+ time: ((value.getHours() & 0x1f) << 11) | ((value.getMinutes() & 0x3f) << 5) | Math.floor(value.getSeconds() / 2),
246
+ date: (((year - 1980) & 0x7f) << 9) | (((value.getMonth() + 1) & 0x0f) << 5) | (value.getDate() & 0x1f),
247
+ };
248
+ }
249
+
250
+ function computeCrc32(buffer) {
251
+ let crc = 0xffffffff;
252
+
253
+ for (const byte of buffer) {
254
+ crc ^= byte;
255
+
256
+ for (let index = 0; index < 8; index += 1) {
257
+ const mask = -(crc & 1);
258
+ crc = (crc >>> 1) ^ (0xedb88320 & mask);
259
+ }
260
+ }
261
+
262
+ return (crc ^ 0xffffffff) >>> 0;
263
+ }
264
+
265
+ module.exports = {
266
+ createZipArchive,
267
+ extractZipArchive,
268
+ computeCrc32,
269
+ readCentralDirectoryEntries,
270
+ resolveEntryDestination,
271
+ };
@@ -0,0 +1,150 @@
1
+ const fs = require('node:fs');
2
+ const path = require('node:path');
3
+ const {computeFileSha256, listFilesRecursively} = require('./hash');
4
+ const {createZipArchive} = require('./archive');
5
+
6
+ function buildSkillsArchive(
7
+ config,
8
+ options = {},
9
+ ) {
10
+ const archiver = options.archiver || createZipArchive;
11
+ const projectInfo = readProjectInfo(config.cwd);
12
+ const existingSourceDirectories = collectExistingSourceDirectories(config);
13
+ const collectedFiles = collectSourceFiles(existingSourceDirectories);
14
+ validateDuplicateRelativePaths(collectedFiles);
15
+
16
+ const stagingDirectory = path.join(config.buildDir, 'build', 'staging');
17
+ const distDirectory = path.join(config.buildDir, 'dist');
18
+ const archivePath = path.join(distDirectory, `${projectInfo.artifactId}-skills.zip`);
19
+ const manifestPath = path.join(config.buildDir, 'build.json');
20
+
21
+ fs.rmSync(stagingDirectory, {recursive: true, force: true});
22
+ fs.mkdirSync(stagingDirectory, {recursive: true});
23
+ fs.mkdirSync(distDirectory, {recursive: true});
24
+
25
+ stageCollectedFiles(stagingDirectory, collectedFiles);
26
+ archiver(stagingDirectory, archivePath);
27
+ const archiveSha256 = computeFileSha256(archivePath);
28
+
29
+ const manifest = {
30
+ version: 1,
31
+ generatedAt: new Date().toISOString(),
32
+ projectName: projectInfo.packageName,
33
+ artifactId: projectInfo.artifactId,
34
+ archivePath,
35
+ archiveSha256,
36
+ stagingDirectory,
37
+ sourceDirectories: existingSourceDirectories.map((sourceDirectory) => ({
38
+ configuredPath: sourceDirectory.configuredPath,
39
+ absolutePath: sourceDirectory.absolutePath,
40
+ })),
41
+ files: collectedFiles.map((file) => ({
42
+ relativePath: file.relativePath,
43
+ sourceDirectory: file.sourceDirectory,
44
+ sha256: computeFileSha256(file.absolutePath),
45
+ })),
46
+ };
47
+
48
+ fs.writeFileSync(manifestPath, JSON.stringify(manifest, null, 2));
49
+
50
+ return {
51
+ archivePath,
52
+ archiveSha256,
53
+ manifestPath,
54
+ manifest,
55
+ };
56
+ }
57
+
58
+ function collectExistingSourceDirectories(config) {
59
+ return config.srcDirs
60
+ .map((configuredPath) => ({
61
+ configuredPath,
62
+ absolutePath: path.resolve(config.cwd, configuredPath),
63
+ }))
64
+ .filter((entry) => fs.existsSync(entry.absolutePath) && fs.statSync(entry.absolutePath).isDirectory());
65
+ }
66
+
67
+ function collectSourceFiles(sourceDirectories) {
68
+ const collectedFiles = [];
69
+
70
+ for (const sourceDirectory of sourceDirectories) {
71
+ for (const absolutePath of listFilesRecursively(sourceDirectory.absolutePath)) {
72
+ collectedFiles.push({
73
+ absolutePath,
74
+ relativePath: normalizeRelativePath(path.relative(sourceDirectory.absolutePath, absolutePath)),
75
+ sourceDirectory: sourceDirectory.configuredPath,
76
+ });
77
+ }
78
+ }
79
+
80
+ return collectedFiles;
81
+ }
82
+
83
+ function validateDuplicateRelativePaths(collectedFiles) {
84
+ const seen = new Map();
85
+
86
+ for (const file of collectedFiles) {
87
+ if (!seen.has(file.relativePath)) {
88
+ seen.set(file.relativePath, file);
89
+ continue;
90
+ }
91
+
92
+ const existing = seen.get(file.relativePath);
93
+ throw new Error(
94
+ `Duplicate build resource path detected: ${file.relativePath} from ${existing.sourceDirectory} and ${file.sourceDirectory}.`,
95
+ );
96
+ }
97
+ }
98
+
99
+ function stageCollectedFiles(
100
+ stagingDirectory,
101
+ collectedFiles,
102
+ ) {
103
+ for (const file of collectedFiles) {
104
+ const targetPath = path.join(stagingDirectory, ...file.relativePath.split('/'));
105
+ fs.mkdirSync(path.dirname(targetPath), {recursive: true});
106
+ fs.copyFileSync(file.absolutePath, targetPath);
107
+ }
108
+ }
109
+
110
+ function readProjectInfo(cwd) {
111
+ const packageJsonPath = path.join(cwd, 'package.json');
112
+
113
+ if (!fs.existsSync(packageJsonPath)) {
114
+ const fallbackName = path.basename(cwd);
115
+ return {
116
+ packageName: fallbackName,
117
+ artifactId: fallbackName,
118
+ version: null,
119
+ };
120
+ }
121
+
122
+ const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
123
+ const packageName = packageJson.name || path.basename(cwd);
124
+ return {
125
+ packageName,
126
+ artifactId: toArtifactId(packageName),
127
+ version: packageJson.version || null,
128
+ };
129
+ }
130
+
131
+ function toArtifactId(packageName) {
132
+ if (!String(packageName).includes('/')) {
133
+ return String(packageName);
134
+ }
135
+
136
+ return String(packageName).split('/').pop();
137
+ }
138
+
139
+ function normalizeRelativePath(relativePath) {
140
+ return relativePath.split(path.sep).join('/');
141
+ }
142
+
143
+ module.exports = {
144
+ buildSkillsArchive,
145
+ collectExistingSourceDirectories,
146
+ collectSourceFiles,
147
+ validateDuplicateRelativePaths,
148
+ readProjectInfo,
149
+ toArtifactId,
150
+ };
package/src/cli.js ADDED
@@ -0,0 +1,49 @@
1
+ #!/usr/bin/env node
2
+ const {loadConfig} = require('./config');
3
+ const {runResolve} = require('./commands/resolve');
4
+ const {runBuild} = require('./commands/build');
5
+ const {runClean} = require('./commands/clean');
6
+ const {runPublish} = require('./commands/publish');
7
+ const {resolveExecutionCwd} = require('./runtime');
8
+
9
+ const COMMANDS = {
10
+ resolve: runResolve,
11
+ build: runBuild,
12
+ clean: runClean,
13
+ publish: runPublish,
14
+ };
15
+
16
+ async function main() {
17
+ const [, , commandName, ...args] = process.argv;
18
+
19
+ if (!commandName || commandName === '--help' || commandName === '-h') {
20
+ printHelp();
21
+ return;
22
+ }
23
+
24
+ const command = COMMANDS[commandName];
25
+
26
+ if (!command) {
27
+ throw new Error(`Unknown command: ${commandName}`);
28
+ }
29
+
30
+ const context = {
31
+ commandName,
32
+ args,
33
+ cwd: resolveExecutionCwd(commandName, args, process.env, process.cwd()),
34
+ };
35
+
36
+ context.config = loadConfig(context.cwd);
37
+
38
+ await command(context);
39
+ }
40
+
41
+ function printHelp() {
42
+ console.log('gart <resolve|build|clean|publish>');
43
+ }
44
+
45
+ main().catch((error) => {
46
+ const message = error && error.message ? error.message : String(error);
47
+ console.error(`[gart] ${message}`);
48
+ process.exitCode = 1;
49
+ });
@@ -0,0 +1,19 @@
1
+ const fs = require('node:fs');
2
+ const {buildSkillsArchive, collectExistingSourceDirectories} = require('../build-resources');
3
+
4
+ async function runBuild(context) {
5
+ const {config, archiver} = context;
6
+ fs.mkdirSync(config.buildDir, {recursive: true});
7
+ const existingSourceDirectories = collectExistingSourceDirectories(config);
8
+ const build = buildSkillsArchive(config, {archiver});
9
+
10
+ console.log('[gart] Build completed.');
11
+ console.log(`[gart] Configured source directories: ${config.srcDirs.join(', ') || '(none)'}.`);
12
+ console.log(`[gart] Existing source directories: ${existingSourceDirectories.map((entry) => entry.configuredPath).join(', ') || '(none)'}.`);
13
+ console.log(`[gart] Archive written to ${build.archivePath}.`);
14
+ console.log(`[gart] Build manifest written to ${build.manifestPath}.`);
15
+ }
16
+
17
+ module.exports = {
18
+ runBuild,
19
+ };
@@ -0,0 +1,147 @@
1
+ const fs = require('node:fs');
2
+ const path = require('node:path');
3
+ const {computeFileSha256} = require('../hash');
4
+ const {cleanupManagedSkills, readManagedIndex} = require('../managed-skills');
5
+
6
+ async function runClean(context) {
7
+ const {config} = context;
8
+ const indexPath = path.join(config.managedSkillsDir, 'gart.json');
9
+
10
+ if (!fs.existsSync(indexPath)) {
11
+ console.log('[gart] Nothing to clean. Managed index file was not found.');
12
+ return;
13
+ }
14
+
15
+ const previousIndex = readManagedIndex(config.managedSkillsDir);
16
+
17
+ try {
18
+ const cleaned = cleanupManagedSkills(config);
19
+ const cacheCleanup = cleanupResolvedArtifactCache(previousIndex.resolvedArtifacts || []);
20
+ const nextIndex = {
21
+ ...previousIndex,
22
+ version: previousIndex.version || 1,
23
+ generatedAt: new Date().toISOString(),
24
+ status: 'skillsCleaned',
25
+ managedDir: config.managedSkillsDir,
26
+ managedFiles: cleaned.managedFiles,
27
+ removedFiles: cleaned.removedFiles,
28
+ removedCacheFiles: cacheCleanup.removedCacheFiles,
29
+ skippedCacheFiles: cacheCleanup.skippedCacheFiles,
30
+ writtenFiles: [],
31
+ resolvedArtifacts: [],
32
+ unresolved: [],
33
+ note: 'Managed skill cleanup completed successfully.',
34
+ };
35
+
36
+ fs.writeFileSync(indexPath, JSON.stringify(nextIndex, null, 2));
37
+
38
+ console.log(`[gart] Removed ${cleaned.removedFiles.length} managed skill file(s).`);
39
+ console.log(`[gart] Removed ${cacheCleanup.removedCacheFiles.length} cached artifact file(s).`);
40
+ if (cacheCleanup.skippedCacheFiles.length > 0) {
41
+ console.log(`[gart] Skipped ${cacheCleanup.skippedCacheFiles.length} cached artifact file(s) because their checksums no longer match the managed index.`);
42
+ }
43
+ console.log(`[gart] Clean index written to ${indexPath}.`);
44
+ } catch (error) {
45
+ const failedIndex = {
46
+ ...previousIndex,
47
+ version: previousIndex.version || 1,
48
+ generatedAt: new Date().toISOString(),
49
+ status: 'cleanFailed',
50
+ managedDir: config.managedSkillsDir,
51
+ cleanError: error.message,
52
+ note: 'Managed skill cleanup failed.',
53
+ };
54
+
55
+ fs.writeFileSync(indexPath, JSON.stringify(failedIndex, null, 2));
56
+ throw error;
57
+ }
58
+ }
59
+
60
+ function cleanupResolvedArtifactCache(resolvedArtifacts) {
61
+ const removedCacheFiles = [];
62
+ const skippedCacheFiles = [];
63
+ const candidateEntries = collectCacheCleanupEntries(resolvedArtifacts);
64
+
65
+ for (const entry of candidateEntries) {
66
+ const filePath = entry.path;
67
+ if (!fs.existsSync(filePath)) {
68
+ continue;
69
+ }
70
+
71
+ if (entry.sha256) {
72
+ const actualSha256 = computeFileSha256(filePath);
73
+ if (actualSha256 !== entry.sha256) {
74
+ skippedCacheFiles.push({
75
+ path: filePath,
76
+ kind: entry.kind,
77
+ expectedSha256: entry.sha256,
78
+ actualSha256,
79
+ reason: 'sha256Mismatch',
80
+ });
81
+ continue;
82
+ }
83
+ }
84
+
85
+ fs.unlinkSync(filePath);
86
+ removeEmptyParentDirectories(path.dirname(filePath));
87
+ removedCacheFiles.push(filePath);
88
+ }
89
+
90
+ return {
91
+ removedCacheFiles,
92
+ skippedCacheFiles,
93
+ };
94
+ }
95
+
96
+ function collectCacheCleanupEntries(resolvedArtifacts) {
97
+ const candidateEntries = [];
98
+ const seen = new Set();
99
+
100
+ for (const artifact of resolvedArtifacts) {
101
+ for (const entry of [
102
+ {
103
+ path: artifact.artifactPath,
104
+ sha256: artifact.archiveSha256 || null,
105
+ kind: 'artifact',
106
+ },
107
+ {
108
+ path: artifact.pomPath,
109
+ sha256: artifact.pomSha256 || null,
110
+ kind: 'pom',
111
+ },
112
+ ]) {
113
+ if (!entry.path || typeof entry.path !== 'string') {
114
+ continue;
115
+ }
116
+
117
+ const key = `${entry.kind}:${entry.path}`;
118
+ if (seen.has(key)) {
119
+ continue;
120
+ }
121
+
122
+ seen.add(key);
123
+ candidateEntries.push(entry);
124
+ }
125
+ }
126
+
127
+ return candidateEntries;
128
+ }
129
+
130
+ function removeEmptyParentDirectories(startDir) {
131
+ let currentDir = startDir;
132
+
133
+ while (fs.existsSync(currentDir) && fs.statSync(currentDir).isDirectory()) {
134
+ if (fs.readdirSync(currentDir).length > 0) {
135
+ return;
136
+ }
137
+
138
+ fs.rmdirSync(currentDir);
139
+ currentDir = path.dirname(currentDir);
140
+ }
141
+ }
142
+
143
+ module.exports = {
144
+ runClean,
145
+ cleanupResolvedArtifactCache,
146
+ collectCacheCleanupEntries,
147
+ };
@@ -0,0 +1,15 @@
1
+ const {publishSkillsPackage} = require('../publish-artifacts');
2
+
3
+ async function runPublish(context) {
4
+ const {config, archiver} = context;
5
+ const publication = await publishSkillsPackage(config, {archiver});
6
+
7
+ console.log(`[gart] Published ${publication.coordinates.coordinates}.`);
8
+ console.log(`[gart] Archive published to ${publication.publishedArtifact.location}.`);
9
+ console.log(`[gart] POM published to ${publication.publishedPom.location}.`);
10
+ console.log(`[gart] Publish manifest written to ${publication.publishManifestPath}.`);
11
+ }
12
+
13
+ module.exports = {
14
+ runPublish,
15
+ };