@intecoag/inteco-cli 1.4.0 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@intecoag/inteco-cli",
3
- "version": "1.4.0",
3
+ "version": "1.5.0",
4
4
  "description": "CLI-Tools for Inteco",
5
5
  "main": "src/index.js",
6
6
  "type": "module",
@@ -12,6 +12,8 @@
12
12
  "license": "ISC",
13
13
  "repository": "https://github.com/intecoag/IntecoCLI",
14
14
  "dependencies": {
15
+ "@azure/identity": "^4.13.0",
16
+ "@azure/storage-blob": "^12.31.0",
15
17
  "7zip-bin": "^5.2.0",
16
18
  "adb-ts": "^6.2.0",
17
19
  "application-config": "^3.0.0",
@@ -26,7 +28,7 @@
26
28
  "mysql-await": "^2.1.8",
27
29
  "n-readlines": "^3.4.0",
28
30
  "node-7z": "^3.0.0",
29
- "ora": "^9.0.0",
31
+ "ora": "^9.3.0",
30
32
  "prompts": "^2.4.2",
31
33
  "properties-parser": "^0.6.0",
32
34
  "readline": "^1.3.0",
@@ -37,8 +39,8 @@
37
39
  "inteco": "src/index.js"
38
40
  },
39
41
  "devDependencies": {
40
- "@commitlint/cli": "^20.4.0",
41
- "@commitlint/config-conventional": "^20.4.0",
42
+ "@commitlint/cli": "^20.4.1",
43
+ "@commitlint/config-conventional": "^20.4.1",
42
44
  "husky": "^9.1.7",
43
45
  "semantic-release": "^25.0.3"
44
46
  },
package/src/index.js CHANGED
@@ -20,6 +20,7 @@ import extdSearch from './modules/extdSearch.js';
20
20
  import syncConfig from './modules/syncConfig.js';
21
21
  import configMutation from './modules/configMutation.js';
22
22
  import bundleProduct from './modules/bundleProduct.js';
23
+ import { azureCreateSyncConfig, azurePush, azurePull } from './modules/azureSync.js';
23
24
 
24
25
  import updateNotifier from 'update-notifier';
25
26
 
@@ -97,6 +98,15 @@ switch (cli.input[0]) {
97
98
  case "changelog":
98
99
  showChangelog();
99
100
  break;
101
+ case "azure_sync_config":
102
+ azureCreateSyncConfig();
103
+ break;
104
+ case "azure_sync_push":
105
+ azurePush();
106
+ break;
107
+ case "azure_sync_pull":
108
+ azurePull();
109
+ break;
100
110
  default:
101
111
  cli.showHelp()
102
112
  break;
@@ -0,0 +1,540 @@
1
+ import chalk from "chalk";
2
+ import crypto from "crypto";
3
+ import fs from "fs";
4
+ import path from "path";
5
+ import prompts from "prompts";
6
+ import YAML from "yaml";
7
+ import ora from "ora";
8
+ import { AzureHelper } from "../utils/azure/azure.js";
9
+
10
+ const defaultAccountName = "intecodev";
11
+
12
+ // Erstellt .az-sync Konfigurationsdatei im aktuellen Verzeichnis oder überschreibt sie, wenn sie bereits existiert
13
+ export async function azureCreateSyncConfig() {
14
+ console.log()
15
+
16
+ const configPath = path.join(process.cwd(), ".az-sync");
17
+ let existingConfig = {};
18
+
19
+ if (fs.existsSync(configPath)) {
20
+ try {
21
+ const rawConfig = fs.readFileSync(configPath, "utf-8");
22
+ existingConfig = YAML.parse(rawConfig) ?? {};
23
+ } catch (error) {
24
+ console.log();
25
+ console.log(chalk.red("Failed to read existing .az-sync file."));
26
+ console.log(error?.message ?? error);
27
+ console.log();
28
+ }
29
+ }
30
+
31
+ const defaultincludes = Array.isArray(existingConfig.includes)
32
+ ? existingConfig.includes.join(", ")
33
+ : (existingConfig.includes ?? "");
34
+
35
+ let success = true;
36
+
37
+ const storageResponse = await prompts([
38
+ {
39
+ type: "text",
40
+ name: "storageAccount",
41
+ message: "Storage account?",
42
+ initial: existingConfig.storageAccount ?? defaultAccountName
43
+ }
44
+ ], {
45
+ onCancel: () => {
46
+ console.log();
47
+ console.log(chalk.red("Cancelled operation."));
48
+ console.log();
49
+ success = false;
50
+ }
51
+ });
52
+
53
+ if (!success) {
54
+ return;
55
+ }
56
+
57
+ const storageAccount = storageResponse.storageAccount ?? "";
58
+
59
+ let containerChoices = [];
60
+ if (storageAccount) {
61
+ try {
62
+ const azure = new AzureHelper();
63
+ const containers = await azure.listContainers(storageAccount);
64
+ containerChoices = containers.map(name => ({ title: name, value: name }));
65
+ } catch (error) {
66
+ console.log();
67
+ console.log(chalk.yellow("Could not load containers. Falling back to manual input."));
68
+ console.log(error?.message ?? error);
69
+ console.log();
70
+ containerChoices = [];
71
+ }
72
+ }
73
+
74
+ const responses = await prompts([
75
+ {
76
+ type: containerChoices.length > 0 ? "autocomplete" : "text",
77
+ name: "container",
78
+ message: "Azure container?",
79
+ choices: containerChoices,
80
+ initial: existingConfig.container ?? ""
81
+ },
82
+ {
83
+ type: "text",
84
+ name: "includes",
85
+ message: "File types (glob, comma-separated)?",
86
+ initial: defaultincludes
87
+ }
88
+ ], {
89
+ onCancel: () => {
90
+ console.log();
91
+ console.log(chalk.red("Cancelled operation."));
92
+ console.log();
93
+ success = false;
94
+ }
95
+ });
96
+
97
+ if (!success) {
98
+ return;
99
+ }
100
+
101
+ const includes = (responses.includes ?? "")
102
+ .split(",")
103
+ .map(value => value.trim())
104
+ .filter(Boolean);
105
+
106
+ const config = {
107
+ storageAccount,
108
+ container: responses.container ?? "",
109
+ includes
110
+ };
111
+
112
+ fs.writeFileSync(configPath, YAML.stringify(config), "utf-8");
113
+ console.log();
114
+ console.log(chalk.green("Saved .az-sync configuration."));
115
+ console.log();
116
+ }
117
+
118
+ // Push aller Änderungen von lokalen Dateien zu Azure basierend auf der .az-sync Konfiguration
119
+ // Löscht auch Dateien in Azure
120
+ export async function azurePush() {
121
+ console.log()
122
+
123
+ const syncState = await collectSyncState(process.cwd());
124
+
125
+ if (!syncState || syncState.size === 0) {
126
+ console.log(chalk.yellow("No files found to sync."));
127
+ console.log();
128
+ return;
129
+ }
130
+
131
+ const azure = new AzureHelper();
132
+ const operations = await buildPushOperations(syncState, azure);
133
+ await executeOperations("push", operations);
134
+ }
135
+
136
+ // Pull aller Änderungen von Azure zu lokalen Dateien basierend auf der .az-sync Konfiguration
137
+ // Löscht keine Dateien lokal
138
+ export async function azurePull() {
139
+ console.log()
140
+
141
+ const syncState = await collectSyncState(process.cwd());
142
+
143
+ if (!syncState || syncState.size === 0) {
144
+ console.log(chalk.yellow("No files found to sync."));
145
+ console.log();
146
+ return;
147
+ }
148
+
149
+ const azure = new AzureHelper();
150
+ const operations = await buildPullOperations(syncState, azure);
151
+ await executeOperations("pull", operations);
152
+ }
153
+
154
+ async function collectSyncState(startDir) {
155
+ const state = new Map();
156
+ await walkSyncTree(startDir, null, null, state);
157
+ return state;
158
+ }
159
+
160
+ async function walkSyncTree(currentDir, activeConfig, activeRootDir, state) {
161
+ const configPath = path.join(currentDir, ".az-sync");
162
+
163
+ if (fs.existsSync(configPath)) {
164
+ const loadedConfig = loadSyncConfig(configPath);
165
+ if (loadedConfig) {
166
+ activeConfig = loadedConfig;
167
+ activeRootDir = currentDir;
168
+ }
169
+ }
170
+
171
+ if (activeConfig && activeRootDir) {
172
+ const key = activeConfig.configPath;
173
+ if (!state.has(key)) {
174
+ state.set(key, {
175
+ config: activeConfig,
176
+ rootDir: activeRootDir,
177
+ localFiles: new Map()
178
+ });
179
+ }
180
+ }
181
+
182
+ const entries = await fs.promises.readdir(currentDir, { withFileTypes: true });
183
+
184
+ for (const entry of entries) {
185
+ if (entry.name === ".git" || entry.name === "node_modules") {
186
+ continue;
187
+ }
188
+
189
+ const entryPath = path.join(currentDir, entry.name);
190
+
191
+ if (entry.isDirectory()) {
192
+ await walkSyncTree(entryPath, activeConfig, activeRootDir, state);
193
+ continue;
194
+ }
195
+
196
+ if (!entry.isFile() || entry.name === ".az-sync") {
197
+ continue;
198
+ }
199
+
200
+ if (!activeConfig || !activeRootDir) {
201
+ continue;
202
+ }
203
+
204
+ const relativePath = path.relative(activeRootDir, entryPath);
205
+ if (!matchesGlob(relativePath, activeConfig.includes)) {
206
+ continue;
207
+ }
208
+
209
+ if (!activeConfig.storageAccount || !activeConfig.container) {
210
+ continue;
211
+ }
212
+
213
+ const key = activeConfig.configPath;
214
+ const entryState = state.get(key);
215
+ if (!entryState) {
216
+ continue;
217
+ }
218
+
219
+ const fileStats = await fs.promises.stat(entryPath);
220
+
221
+ entryState.localFiles.set(relativePath.split(path.sep).join("/"), {
222
+ filePath: entryPath,
223
+ relativePath,
224
+ blobPath: relativePath.split(path.sep).join("/"),
225
+ mtimeMs: fileStats.mtimeMs,
226
+ size: fileStats.size
227
+ });
228
+ }
229
+ }
230
+
231
+ function loadSyncConfig(configPath) {
232
+ try {
233
+ const rawConfig = fs.readFileSync(configPath, "utf-8");
234
+ const parsedConfig = YAML.parse(rawConfig) ?? {};
235
+ const includes = Array.isArray(parsedConfig.includes)
236
+ ? parsedConfig.includes
237
+ : (parsedConfig.includes ? [parsedConfig.includes] : []);
238
+ const legacyIncludes = Array.isArray(parsedConfig.fileTypeRegexes)
239
+ ? parsedConfig.fileTypeRegexes
240
+ : (parsedConfig.fileTypeRegexes ? [parsedConfig.fileTypeRegexes] : []);
241
+ const mergedIncludes = includes.length > 0 ? includes : legacyIncludes;
242
+
243
+ return {
244
+ storageAccount: parsedConfig.storageAccount ?? "",
245
+ container: parsedConfig.container ?? "",
246
+ includes: mergedIncludes,
247
+ configPath
248
+ };
249
+ } catch (error) {
250
+ console.log();
251
+ console.log(chalk.red(`Failed to read ${configPath}.`));
252
+ console.log(error?.message ?? error);
253
+ console.log();
254
+ return null;
255
+ }
256
+ }
257
+
258
+ function matchesGlob(relativePath, globList) {
259
+ if (!Array.isArray(globList) || globList.length === 0) {
260
+ return false;
261
+ }
262
+
263
+ const normalizedPath = relativePath.split(path.sep).join("/");
264
+
265
+ for (const pattern of globList) {
266
+ try {
267
+ const regex = globToRegExp(pattern);
268
+ if (regex.test(normalizedPath)) {
269
+ return true;
270
+ }
271
+ } catch (error) {
272
+ continue;
273
+ }
274
+ }
275
+
276
+ return false;
277
+ }
278
+
279
+ function globToRegExp(pattern) {
280
+ const normalizedPattern = pattern.split(path.sep).join("/").trim();
281
+ let regexBody = "";
282
+ let index = 0;
283
+
284
+ while (index < normalizedPattern.length) {
285
+ const char = normalizedPattern[index];
286
+
287
+ if (char === "*") {
288
+ const nextChar = normalizedPattern[index + 1];
289
+ if (nextChar === "*") {
290
+ const isSlash = normalizedPattern[index + 2] === "/";
291
+ regexBody += isSlash ? "(?:.*/)?" : ".*";
292
+ index += isSlash ? 3 : 2;
293
+ continue;
294
+ }
295
+
296
+ regexBody += "[^/]*";
297
+ index += 1;
298
+ continue;
299
+ }
300
+
301
+ if (char === "?") {
302
+ regexBody += "[^/]";
303
+ index += 1;
304
+ continue;
305
+ }
306
+
307
+ regexBody += char.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
308
+ index += 1;
309
+ }
310
+
311
+ return new RegExp(`^${regexBody}$`);
312
+ }
313
+
314
+ async function ensureDirectory(dirPath) {
315
+ if (!dirPath) {
316
+ return;
317
+ }
318
+
319
+ await fs.promises.mkdir(dirPath, { recursive: true });
320
+ }
321
+
322
+ async function executeOperations(mode, operations) {
323
+ if (operations.length === 0) {
324
+ console.log(chalk.yellow("No changes detected."));
325
+ console.log();
326
+ return;
327
+ }
328
+
329
+ const uploadOps = operations.filter(operation => operation.type === "upload");
330
+ const downloadOps = operations.filter(operation => operation.type === "download");
331
+ const deleteOps = operations.filter(operation => operation.type === "delete");
332
+
333
+ console.log(chalk.gray(`Planned ${mode} changes: ${operations.length} total (${uploadOps.length} upload, ${downloadOps.length} download, ${deleteOps.length} delete)`));
334
+ operations.forEach(operation => {
335
+ const name = operation.localFile?.relativePath ?? operation.blobPath;
336
+ console.log(chalk.gray(`- ${operation.type}: ${name}`));
337
+ });
338
+ console.log();
339
+
340
+ const confirmation = await prompts([
341
+ {
342
+ type: "confirm",
343
+ name: "proceed",
344
+ message: `Apply ${operations.length} ${mode} changes?`,
345
+ initial: false
346
+ }
347
+ ], {
348
+ onCancel: () => {
349
+ console.log();
350
+ console.log(chalk.red("Cancelled operation."));
351
+ console.log();
352
+ return false;
353
+ }
354
+ });
355
+
356
+ if (!confirmation.proceed) {
357
+ console.log(chalk.yellow("No changes applied."));
358
+ console.log();
359
+ return;
360
+ }
361
+
362
+ const spinner = ora(`Syncing ${operations.length} changes...`).start();
363
+ const failures = [];
364
+
365
+ for (let index = 0; index < operations.length; index += 1) {
366
+ const operation = operations[index];
367
+ try {
368
+ if (operation.type === "upload") {
369
+ spinner.text = `Uploading ${operation.localFile.relativePath} (${index + 1}/${operations.length})`;
370
+ const md5Base64 = operation.localFile.md5Base64
371
+ ?? await getLocalMd5Base64(operation.localFile.filePath);
372
+ await operation.azure.uploadFile(
373
+ operation.containerClient,
374
+ operation.localFile.blobPath,
375
+ operation.localFile.filePath,
376
+ md5Base64
377
+ );
378
+ } else if (operation.type === "download") {
379
+ spinner.text = `Downloading ${operation.localFile.relativePath} (${index + 1}/${operations.length})`;
380
+ await ensureDirectory(path.dirname(operation.localFile.filePath));
381
+ await operation.azure.downloadToFile(
382
+ operation.containerClient,
383
+ operation.localFile.blobPath,
384
+ operation.localFile.filePath
385
+ );
386
+ } else if (operation.type === "delete") {
387
+ spinner.text = `Deleting ${operation.blobPath} (${index + 1}/${operations.length})`;
388
+ await operation.azure.deleteBlob(operation.containerClient, operation.blobPath);
389
+ }
390
+ } catch (error) {
391
+ failures.push({ operation, error });
392
+ spinner.warn(`Failed ${operation.type}: ${operation.localFile?.relativePath ?? operation.blobPath}`);
393
+ spinner.start();
394
+ }
395
+ }
396
+
397
+ if (failures.length > 0) {
398
+ spinner.warn(`Completed with ${failures.length} failures.`);
399
+ failures.forEach(({ operation, error }) => {
400
+ const name = operation.localFile?.relativePath ?? operation.blobPath;
401
+ console.log(chalk.red(`- ${name}: ${error?.message ?? error}`));
402
+ });
403
+ } else {
404
+ spinner.succeed(`Synced ${operations.length} changes.`);
405
+ }
406
+
407
+ console.log();
408
+ }
409
+
410
+ async function getLocalMd5Base64(filePath) {
411
+ return new Promise((resolve, reject) => {
412
+ const hash = crypto.createHash("md5");
413
+ const stream = fs.createReadStream(filePath);
414
+
415
+ stream.on("data", chunk => hash.update(chunk));
416
+ stream.on("error", reject);
417
+ stream.on("end", () => resolve(hash.digest("base64")));
418
+ });
419
+ }
420
+
421
+ async function buildPushOperations(syncState, azure) {
422
+ const operations = [];
423
+
424
+ for (const entry of syncState.values()) {
425
+ if (!validateConfig(entry.config)) {
426
+ continue;
427
+ }
428
+
429
+ const containerClient = azure.getContainerClient(entry.config.storageAccount, entry.config.container);
430
+ const remoteBlobs = await listRemoteBlobNames(containerClient, entry.config.includes);
431
+
432
+ for (const localFile of entry.localFiles.values()) {
433
+ const existsRemote = remoteBlobs.has(localFile.blobPath);
434
+
435
+ if (!existsRemote) {
436
+ operations.push({
437
+ type: "upload",
438
+ azure,
439
+ containerClient,
440
+ localFile
441
+ });
442
+ continue;
443
+ }
444
+
445
+ const remoteHash = await azure.getBlobMd5Base64(containerClient, localFile.blobPath);
446
+ const localHash = await getLocalMd5Base64(localFile.filePath);
447
+
448
+ if (!remoteHash || !localHash || remoteHash !== localHash) {
449
+ localFile.md5Base64 = localHash;
450
+ operations.push({
451
+ type: "upload",
452
+ azure,
453
+ containerClient,
454
+ localFile
455
+ });
456
+ }
457
+
458
+ remoteBlobs.delete(localFile.blobPath);
459
+ }
460
+
461
+ for (const blobName of remoteBlobs.values()) {
462
+ operations.push({
463
+ type: "delete",
464
+ azure,
465
+ containerClient,
466
+ blobPath: blobName
467
+ });
468
+ }
469
+ }
470
+
471
+ return operations;
472
+ }
473
+
474
+ async function buildPullOperations(syncState, azure) {
475
+ const operations = [];
476
+
477
+ for (const entry of syncState.values()) {
478
+ if (!validateConfig(entry.config)) {
479
+ continue;
480
+ }
481
+
482
+ const containerClient = azure.getContainerClient(entry.config.storageAccount, entry.config.container);
483
+ const blobs = await azure.listBlobs(containerClient);
484
+
485
+ for (const blob of blobs) {
486
+ if (!matchesGlob(blob.name, entry.config.includes)) {
487
+ continue;
488
+ }
489
+
490
+ const localPath = path.join(entry.rootDir, blob.name.split("/").join(path.sep));
491
+ const localExists = fs.existsSync(localPath);
492
+ const remoteHash = await azure.getBlobMd5Base64(containerClient, blob.name);
493
+ let localHash = null;
494
+
495
+ if (localExists) {
496
+ localHash = await getLocalMd5Base64(localPath);
497
+ }
498
+
499
+ if (!remoteHash || !localHash || remoteHash !== localHash) {
500
+ operations.push({
501
+ type: "download",
502
+ azure,
503
+ containerClient,
504
+ localFile: {
505
+ filePath: localPath,
506
+ relativePath: path.relative(entry.rootDir, localPath),
507
+ blobPath: blob.name
508
+ }
509
+ });
510
+ }
511
+ }
512
+ }
513
+
514
+ return operations;
515
+ }
516
+
517
+ function validateConfig(config) {
518
+ if (!config.storageAccount || !config.container) {
519
+ console.log(chalk.yellow(`Skipping config at ${config.configPath} (missing storage account or container).`));
520
+ return false;
521
+ }
522
+
523
+ if (!config.includes || config.includes.length === 0) {
524
+ console.log(chalk.yellow(`Skipping config at ${config.configPath} (no includes configured).`));
525
+ return false;
526
+ }
527
+
528
+ return true;
529
+ }
530
+
531
+ async function listRemoteBlobNames(containerClient, includes) {
532
+ const remoteBlobs = new Set();
533
+ for await (const blob of containerClient.listBlobsFlat()) {
534
+ if (!matchesGlob(blob.name, includes)) {
535
+ continue;
536
+ }
537
+ remoteBlobs.add(blob.name);
538
+ }
539
+ return remoteBlobs;
540
+ }
@@ -49,5 +49,14 @@
49
49
  },
50
50
  "changelog": {
51
51
  "desc": "Shows the changelog history"
52
+ },
53
+ "azure_sync_config": {
54
+ "desc": "Creates or updates the .az-sync configuration file"
55
+ },
56
+ "azure_push": {
57
+ "desc": "Pushes local files to Azure Blob Storage using checksum comparison"
58
+ },
59
+ "azure_pull": {
60
+ "desc": "Pulls blobs to local files using checksum comparison"
52
61
  }
53
62
  }
@@ -0,0 +1,76 @@
1
+ import { BlobServiceClient } from "@azure/storage-blob";
2
+ import { DefaultAzureCredential } from "@azure/identity";
3
+
4
+ export class AzureHelper {
5
+ constructor(credential = new DefaultAzureCredential()) {
6
+ this.credential = credential;
7
+ this.clientCache = new Map();
8
+ }
9
+
10
+ getContainerClient(storageAccount, container) {
11
+ const cacheKey = `${storageAccount}|${container}`;
12
+ let containerClient = this.clientCache.get(cacheKey);
13
+ if (containerClient) {
14
+ return containerClient;
15
+ }
16
+
17
+ const blobServiceClient = new BlobServiceClient(
18
+ `https://${storageAccount}.blob.core.windows.net`,
19
+ this.credential
20
+ );
21
+ containerClient = blobServiceClient.getContainerClient(container);
22
+ this.clientCache.set(cacheKey, containerClient);
23
+ return containerClient;
24
+ }
25
+
26
+ async listContainers(storageAccount) {
27
+ const blobServiceClient = new BlobServiceClient(
28
+ `https://${storageAccount}.blob.core.windows.net`,
29
+ this.credential
30
+ );
31
+ const containers = [];
32
+ for await (const container of blobServiceClient.listContainers()) {
33
+ containers.push(container.name);
34
+ }
35
+ return containers;
36
+ }
37
+
38
+ async listBlobs(containerClient) {
39
+ const blobs = [];
40
+ for await (const blob of containerClient.listBlobsFlat()) {
41
+ blobs.push(blob);
42
+ }
43
+ return blobs;
44
+ }
45
+
46
+ async getBlobMd5Base64(containerClient, blobPath) {
47
+ try {
48
+ const properties = await containerClient.getBlobClient(blobPath).getProperties();
49
+ const contentMd5 = properties?.contentMD5;
50
+ if (!contentMd5) {
51
+ return null;
52
+ }
53
+ return Buffer.isBuffer(contentMd5) ? contentMd5.toString("base64") : null;
54
+ } catch (error) {
55
+ return null;
56
+ }
57
+ }
58
+
59
+ async uploadFile(containerClient, blobPath, filePath, md5Base64) {
60
+ const blockBlobClient = containerClient.getBlockBlobClient(blobPath);
61
+ const md5Buffer = md5Base64 ? Buffer.from(md5Base64, "base64") : undefined;
62
+
63
+ await blockBlobClient.uploadFile(filePath, {
64
+ blobHTTPHeaders: md5Buffer ? { blobContentMD5: md5Buffer } : undefined
65
+ });
66
+ }
67
+
68
+ async downloadToFile(containerClient, blobPath, filePath) {
69
+ const blockBlobClient = containerClient.getBlockBlobClient(blobPath);
70
+ await blockBlobClient.downloadToFile(filePath);
71
+ }
72
+
73
+ async deleteBlob(containerClient, blobPath) {
74
+ await containerClient.deleteBlob(blobPath);
75
+ }
76
+ }