contensis-cli 1.3.1-beta.0 → 1.3.1-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/README.md +1 -1
  2. package/dist/commands/copy.js.map +1 -1
  3. package/dist/commands/create.js +1 -2
  4. package/dist/commands/create.js.map +1 -1
  5. package/dist/commands/globalOptions.js +3 -0
  6. package/dist/commands/globalOptions.js.map +1 -1
  7. package/dist/commands/index.js +0 -2
  8. package/dist/commands/index.js.map +1 -1
  9. package/dist/commands/list.js.map +1 -1
  10. package/dist/commands/login.js +1 -2
  11. package/dist/commands/login.js.map +1 -1
  12. package/dist/commands/push.js +13 -10
  13. package/dist/commands/push.js.map +1 -1
  14. package/dist/commands/remove.js +2 -4
  15. package/dist/commands/remove.js.map +1 -1
  16. package/dist/commands/set.js +2 -4
  17. package/dist/commands/set.js.map +1 -1
  18. package/dist/factories/RequestHandlerFactory.js +12 -5
  19. package/dist/factories/RequestHandlerFactory.js.map +2 -2
  20. package/dist/index.js +4 -0
  21. package/dist/index.js.map +1 -1
  22. package/dist/localisation/en-GB.js +8 -1
  23. package/dist/localisation/en-GB.js.map +1 -1
  24. package/dist/mappers/ContensisCliService-to-RequestHanderSiteConfigYaml.js.map +1 -1
  25. package/dist/mappers/DevInit-to-CIWorkflow.js +11 -6
  26. package/dist/mappers/DevInit-to-CIWorkflow.js.map +1 -1
  27. package/dist/mappers/DevRequests-to-RequestHanderCliArgs.js +4 -2
  28. package/dist/mappers/DevRequests-to-RequestHanderCliArgs.js.map +1 -1
  29. package/dist/providers/CredentialProvider.js +11 -4
  30. package/dist/providers/CredentialProvider.js.map +1 -1
  31. package/dist/providers/GitHubCliModuleProvider.js +8 -10
  32. package/dist/providers/GitHubCliModuleProvider.js.map +1 -1
  33. package/dist/providers/HttpProvider.js +5 -4
  34. package/dist/providers/HttpProvider.js.map +1 -1
  35. package/dist/providers/ManifestProvider.js +1 -4
  36. package/dist/providers/ManifestProvider.js.map +1 -1
  37. package/dist/providers/SessionCacheProvider.js +8 -8
  38. package/dist/providers/SessionCacheProvider.js.map +1 -1
  39. package/dist/providers/file-provider.js +13 -11
  40. package/dist/providers/file-provider.js.map +1 -1
  41. package/dist/services/ContensisAuthService.js +1 -2
  42. package/dist/services/ContensisAuthService.js.map +1 -1
  43. package/dist/services/ContensisCliService.js +67 -94
  44. package/dist/services/ContensisCliService.js.map +2 -2
  45. package/dist/services/ContensisDevService.js +15 -18
  46. package/dist/services/ContensisDevService.js.map +2 -2
  47. package/dist/services/ContensisRoleService.js +8 -10
  48. package/dist/services/ContensisRoleService.js.map +1 -1
  49. package/dist/shell.js +10 -6
  50. package/dist/shell.js.map +1 -1
  51. package/dist/util/api-ids.js.map +1 -1
  52. package/dist/util/console.printer.js +12 -16
  53. package/dist/util/console.printer.js.map +1 -1
  54. package/dist/util/csv.formatter.js +8 -15
  55. package/dist/util/csv.formatter.js.map +2 -2
  56. package/dist/util/diff.js +6 -4
  57. package/dist/util/diff.js.map +1 -1
  58. package/dist/util/dotenv.js +1 -2
  59. package/dist/util/dotenv.js.map +1 -1
  60. package/dist/util/error.js.map +1 -1
  61. package/dist/util/fetch.js +4 -0
  62. package/dist/util/fetch.js.map +1 -1
  63. package/dist/util/git.js +8 -8
  64. package/dist/util/git.js.map +1 -1
  65. package/dist/util/gitignore.js +4 -0
  66. package/dist/util/gitignore.js.map +1 -1
  67. package/dist/util/index.js +5 -1
  68. package/dist/util/index.js.map +2 -2
  69. package/dist/util/json.formatter.js +6 -4
  70. package/dist/util/json.formatter.js.map +1 -1
  71. package/dist/util/logger.js +45 -50
  72. package/dist/util/logger.js.map +2 -2
  73. package/dist/util/os.js +4 -0
  74. package/dist/util/os.js.map +1 -1
  75. package/dist/util/xml.formatter.js +4 -0
  76. package/dist/util/xml.formatter.js.map +1 -1
  77. package/dist/util/yaml.js +1 -2
  78. package/dist/util/yaml.js.map +1 -1
  79. package/dist/version.js +1 -1
  80. package/dist/version.js.map +1 -1
  81. package/esbuild.config.js +10 -14
  82. package/package.json +5 -5
  83. package/src/factories/RequestHandlerFactory.ts +1 -1
  84. package/src/services/ContensisCliService.ts +78 -91
  85. package/src/services/ContensisDevService.ts +2 -2
  86. package/src/util/csv.formatter.ts +1 -1
  87. package/src/util/index.ts +1 -1
  88. package/src/util/logger.ts +15 -14
  89. package/src/version.ts +1 -1
  90. package/tsconfig.json +1 -1
@@ -31,8 +31,7 @@ const csvFormatter = async (entries) => {
31
31
  for (const entry of entries) {
32
32
  flatEntries.push((0, import_json.flattenObject)(entry));
33
33
  }
34
- else
35
- flatEntries.push((0, import_json.flattenObject)(entries));
34
+ else flatEntries.push((0, import_json.flattenObject)(entries));
36
35
  const columns = new Set(flatEntries.map((e) => Object.keys(e)).flat());
37
36
  const csv = await new Promise((resolve, reject) => {
38
37
  (0, import_csv.stringify)(
@@ -43,8 +42,7 @@ const csvFormatter = async (entries) => {
43
42
  columns: [...columns]
44
43
  },
45
44
  (err, data) => {
46
- if (err)
47
- reject(err);
45
+ if (err) reject(err);
48
46
  resolve(data);
49
47
  }
50
48
  );
@@ -61,8 +59,7 @@ const csvToJson = async (data) => {
61
59
  cast: (value, context) => {
62
60
  if (context.header || context.column === "sys.version.versionNo")
63
61
  return value;
64
- if (value === "")
65
- return void 0;
62
+ if (value === "") return void 0;
66
63
  try {
67
64
  return JSON.parse(value);
68
65
  } catch (e) {
@@ -71,8 +68,7 @@ const csvToJson = async (data) => {
71
68
  }
72
69
  },
73
70
  (err, records) => {
74
- if (err)
75
- reject(err);
71
+ if (err) reject(err);
76
72
  resolve(records);
77
73
  }
78
74
  );
@@ -80,15 +76,13 @@ const csvToJson = async (data) => {
80
76
  };
81
77
  const detectCsv = (chunk, opts) => {
82
78
  opts = opts || {};
83
- if (Buffer.isBuffer(chunk))
84
- chunk = chunk + "";
79
+ if (Buffer.isBuffer(chunk)) chunk = chunk + "";
85
80
  const delimiters = opts.delimiters || [",", ";", " ", "|"];
86
81
  const newlines = opts.newlines || ["\n", "\r"];
87
82
  const lines = chunk.split(/[\n\r]+/g);
88
83
  const delimiter = determineMost(lines[0], delimiters);
89
84
  const newline = determineMost(chunk, newlines);
90
- if (!delimiter)
91
- return null;
85
+ if (!delimiter) return null;
92
86
  return {
93
87
  delimiter,
94
88
  newline
@@ -103,9 +97,8 @@ const determineMost = (chunk, items) => {
103
97
  items.forEach((item) => {
104
98
  itemCount[item] = 0;
105
99
  });
106
- for (var i = 0; i < chunk.length; i++) {
107
- if (chunk[i] === '"')
108
- ignoreString = !ignoreString;
100
+ for (let i = 0; i < chunk.length; i++) {
101
+ if (chunk[i] === '"') ignoreString = !ignoreString;
109
102
  else if (!ignoreString && chunk[i] in itemCount) {
110
103
  currValue = ++itemCount[chunk[i]];
111
104
  if (currValue > maxValue) {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/util/csv.formatter.ts"],
4
- "sourcesContent": ["import { parse, stringify } from 'csv';\n// import { parse, stringify } from 'csv/sync';\nimport { flattenObject } from './json.formatter';\n\nexport const csvFormatter = async <T>(entries: T | T[]) => {\n // Flatten the passed in object\n const flatEntries = [] as any[];\n if (Array.isArray(entries))\n for (const entry of entries) {\n flatEntries.push(flattenObject(entry));\n }\n else flatEntries.push(flattenObject(entries));\n\n // Parse the flattened object to csv\n // const csv = stringify(flatEntries, { header: true });\n // Create an exhaustive list of columns from the entries array\n const columns = new Set<string>(flatEntries.map(e => Object.keys(e)).flat());\n const csv = await new Promise<string>((resolve, reject) => {\n stringify(\n flatEntries,\n {\n header: true,\n cast: { boolean: (value, context) => `${value}` },\n columns: [...columns],\n },\n (err, data) => {\n if (err) reject(err);\n resolve(data);\n }\n );\n });\n return csv;\n};\n\nexport const csvToJson = async <T = any>(data: string): Promise<T[]> => {\n return new Promise((resolve, reject) => {\n parse(\n data,\n {\n columns: true,\n skip_empty_lines: true,\n cast: (value, context) => {\n if (context.header || context.column === 'sys.version.versionNo')\n return value;\n if (value === '') return undefined;\n try {\n return JSON.parse(value);\n } catch (e) {\n return value;\n }\n },\n },\n (err, records) => {\n if (err) reject(err);\n resolve(records);\n }\n );\n });\n};\n\nexport const detectCsv = (\n chunk: string,\n opts?: { delimiters?: string[]; newlines?: string[] }\n) => {\n opts = opts || {};\n if (Buffer.isBuffer(chunk)) chunk = chunk + '';\n const delimiters = opts.delimiters || [',', ';', '\\t', '|'];\n const newlines = opts.newlines || ['\\n', '\\r'];\n\n const lines = chunk.split(/[\\n\\r]+/g);\n\n const delimiter = determineMost(lines[0], delimiters);\n const newline = determineMost(chunk, newlines);\n\n if (!delimiter) return null;\n\n return {\n delimiter: delimiter,\n newline: newline,\n };\n};\n\nconst determineMost = (chunk: string, items: string[]) => {\n const itemCount = {} as any;\n let ignoreString = false;\n let maxValue = 0;\n let maxChar;\n let currValue;\n items.forEach(item => {\n itemCount[item] = 0;\n });\n for (var i = 0; i < chunk.length; i++) {\n if (chunk[i] === '\"') ignoreString = !ignoreString;\n else if (!ignoreString && chunk[i] in itemCount) {\n currValue = ++itemCount[chunk[i]];\n if (currValue > maxValue) {\n maxValue = currValue;\n maxChar = chunk[i];\n }\n }\n }\n return maxChar;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAiC;AAEjC,kBAA8B;AAEvB,MAAM,eAAe,OAAU,YAAqB;AAEzD,QAAM,cAAc,CAAC;AACrB,MAAI,MAAM,QAAQ,OAAO;AACvB,eAAW,SAAS,SAAS;AAC3B,kBAAY,SAAK,2BAAc,KAAK,CAAC;AAAA,IACvC;AAAA;AACG,gBAAY,SAAK,2BAAc,OAAO,CAAC;AAK5C,QAAM,UAAU,IAAI,IAAY,YAAY,IAAI,OAAK,OAAO,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC;AAC3E,QAAM,MAAM,MAAM,IAAI,QAAgB,CAAC,SAAS,WAAW;AACzD;AAAA,MACE;AAAA,MACA;AAAA,QACE,QAAQ;AAAA,QACR,MAAM,EAAE,SAAS,CAAC,OAAO,YAAY,GAAG,QAAQ;AAAA,QAChD,SAAS,CAAC,GAAG,OAAO;AAAA,MACtB;AAAA,MACA,CAAC,KAAK,SAAS;AACb,YAAI;AAAK,iBAAO,GAAG;AACnB,gBAAQ,IAAI;AAAA,MACd;AAAA,IACF;AAAA,EACF,CAAC;AACD,SAAO;AACT;AAEO,MAAM,YAAY,OAAgB,SAA+B;AACtE,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC;AAAA,MACE;AAAA,MACA;AAAA,QACE,SAAS;AAAA,QACT,kBAAkB;AAAA,QAClB,MAAM,CAAC,OAAO,YAAY;AACxB,cAAI,QAAQ,UAAU,QAAQ,WAAW;AACvC,mBAAO;AACT,cAAI,UAAU;AAAI,mBAAO;AACzB,cAAI;AACF,mBAAO,KAAK,MAAM,KAAK;AAAA,UACzB,SAAS,GAAP;AACA,mBAAO;AAAA,UACT;AAAA,QACF;AAAA,MACF;AAAA,MACA,CAAC,KAAK,YAAY;AAChB,YAAI;AAAK,iBAAO,GAAG;AACnB,gBAAQ,OAAO;AAAA,MACjB;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEO,MAAM,YAAY,CACvB,OACA,SACG;AACH,SAAO,QAAQ,CAAC;AAChB,MAAI,OAAO,SAAS,KAAK;AAAG,YAAQ,QAAQ;AAC5C,QAAM,aAAa,KAAK,cAAc,CAAC,KAAK,KAAK,KAAM,GAAG;AAC1D,QAAM,WAAW,KAAK,YAAY,CAAC,MAAM,IAAI;AAE7C,QAAM,QAAQ,MAAM,MAAM,UAAU;AAEpC,QAAM,YAAY,cAAc,MAAM,IAAI,UAAU;AACpD,QAAM,UAAU,cAAc,OAAO,QAAQ;AAE7C,MAAI,CAAC;AAAW,WAAO;AAEvB,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AAEA,MAAM,gBAAgB,CAAC,OAAe,UAAoB;AACxD,QAAM,YAAY,CAAC;AACnB,MAAI,eAAe;AACnB,MAAI,WAAW;AACf,MAAI;AACJ,MAAI;AACJ,QAAM,QAAQ,UAAQ;AACpB,cAAU,QAAQ;AAAA,EACpB,CAAC;AACD,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,QAAI,MAAM,OAAO;AAAK,qBAAe,CAAC;AAAA,aAC7B,CAAC,gBAAgB,MAAM,MAAM,WAAW;AAC/C,kBAAY,EAAE,UAAU,MAAM;AAC9B,UAAI,YAAY,UAAU;AACxB,mBAAW;AACX,kBAAU,MAAM;AAAA,MAClB;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;",
4
+ "sourcesContent": ["import { parse, stringify } from 'csv';\n// import { parse, stringify } from 'csv/sync';\nimport { flattenObject } from './json.formatter';\n\nexport const csvFormatter = async <T>(entries: T | T[]) => {\n // Flatten the passed in object\n const flatEntries = [] as any[];\n if (Array.isArray(entries))\n for (const entry of entries) {\n flatEntries.push(flattenObject(entry));\n }\n else flatEntries.push(flattenObject(entries));\n\n // Parse the flattened object to csv\n // const csv = stringify(flatEntries, { header: true });\n // Create an exhaustive list of columns from the entries array\n const columns = new Set<string>(flatEntries.map(e => Object.keys(e)).flat());\n const csv = await new Promise<string>((resolve, reject) => {\n stringify(\n flatEntries,\n {\n header: true,\n cast: { boolean: (value, context) => `${value}` },\n columns: [...columns],\n },\n (err, data) => {\n if (err) reject(err);\n resolve(data);\n }\n );\n });\n return csv;\n};\n\nexport const csvToJson = async <T = any>(data: string): Promise<T[]> => {\n return new Promise((resolve, reject) => {\n parse(\n data,\n {\n columns: true,\n skip_empty_lines: true,\n cast: (value, context) => {\n if (context.header || context.column === 'sys.version.versionNo')\n return value;\n if (value === '') return undefined;\n try {\n return JSON.parse(value);\n } catch (e) {\n return value;\n }\n },\n },\n (err, records) => {\n if (err) reject(err);\n resolve(records);\n }\n );\n });\n};\n\nexport const detectCsv = (\n chunk: string,\n opts?: { delimiters?: string[]; newlines?: string[] }\n) => {\n opts = opts || {};\n if (Buffer.isBuffer(chunk)) chunk = chunk + '';\n const delimiters = opts.delimiters || [',', ';', '\\t', '|'];\n const newlines = opts.newlines || ['\\n', '\\r'];\n\n const lines = chunk.split(/[\\n\\r]+/g);\n\n const delimiter = determineMost(lines[0], delimiters);\n const newline = determineMost(chunk, newlines);\n\n if (!delimiter) return null;\n\n return {\n delimiter: delimiter,\n newline: newline,\n };\n};\n\nconst determineMost = (chunk: string, items: string[]) => {\n const itemCount = {} as any;\n let ignoreString = false;\n let maxValue = 0;\n let maxChar;\n let currValue;\n items.forEach(item => {\n itemCount[item] = 0;\n });\n for (let i = 0; i < chunk.length; i++) {\n if (chunk[i] === '\"') ignoreString = !ignoreString;\n else if (!ignoreString && chunk[i] in itemCount) {\n currValue = ++itemCount[chunk[i]];\n if (currValue > maxValue) {\n maxValue = currValue;\n maxChar = chunk[i];\n }\n }\n }\n return maxChar;\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAiC;AAEjC,kBAA8B;AAEvB,MAAM,eAAe,OAAU,YAAqB;AAEzD,QAAM,cAAc,CAAC;AACrB,MAAI,MAAM,QAAQ,OAAO;AACvB,eAAW,SAAS,SAAS;AAC3B,kBAAY,SAAK,2BAAc,KAAK,CAAC;AAAA,IACvC;AAAA,MACG,aAAY,SAAK,2BAAc,OAAO,CAAC;AAK5C,QAAM,UAAU,IAAI,IAAY,YAAY,IAAI,OAAK,OAAO,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC;AAC3E,QAAM,MAAM,MAAM,IAAI,QAAgB,CAAC,SAAS,WAAW;AACzD;AAAA,MACE;AAAA,MACA;AAAA,QACE,QAAQ;AAAA,QACR,MAAM,EAAE,SAAS,CAAC,OAAO,YAAY,GAAG,KAAK,GAAG;AAAA,QAChD,SAAS,CAAC,GAAG,OAAO;AAAA,MACtB;AAAA,MACA,CAAC,KAAK,SAAS;AACb,YAAI,IAAK,QAAO,GAAG;AACnB,gBAAQ,IAAI;AAAA,MACd;AAAA,IACF;AAAA,EACF,CAAC;AACD,SAAO;AACT;AAEO,MAAM,YAAY,OAAgB,SAA+B;AACtE,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC;AAAA,MACE;AAAA,MACA;AAAA,QACE,SAAS;AAAA,QACT,kBAAkB;AAAA,QAClB,MAAM,CAAC,OAAO,YAAY;AACxB,cAAI,QAAQ,UAAU,QAAQ,WAAW;AACvC,mBAAO;AACT,cAAI,UAAU,GAAI,QAAO;AACzB,cAAI;AACF,mBAAO,KAAK,MAAM,KAAK;AAAA,UACzB,SAAS,GAAG;AACV,mBAAO;AAAA,UACT;AAAA,QACF;AAAA,MACF;AAAA,MACA,CAAC,KAAK,YAAY;AAChB,YAAI,IAAK,QAAO,GAAG;AACnB,gBAAQ,OAAO;AAAA,MACjB;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEO,MAAM,YAAY,CACvB,OACA,SACG;AACH,SAAO,QAAQ,CAAC;AAChB,MAAI,OAAO,SAAS,KAAK,EAAG,SAAQ,QAAQ;AAC5C,QAAM,aAAa,KAAK,cAAc,CAAC,KAAK,KAAK,KAAM,GAAG;AAC1D,QAAM,WAAW,KAAK,YAAY,CAAC,MAAM,IAAI;AAE7C,QAAM,QAAQ,MAAM,MAAM,UAAU;AAEpC,QAAM,YAAY,cAAc,MAAM,CAAC,GAAG,UAAU;AACpD,QAAM,UAAU,cAAc,OAAO,QAAQ;AAE7C,MAAI,CAAC,UAAW,QAAO;AAEvB,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AAEA,MAAM,gBAAgB,CAAC,OAAe,UAAoB;AACxD,QAAM,YAAY,CAAC;AACnB,MAAI,eAAe;AACnB,MAAI,WAAW;AACf,MAAI;AACJ,MAAI;AACJ,QAAM,QAAQ,UAAQ;AACpB,cAAU,IAAI,IAAI;AAAA,EACpB,CAAC;AACD,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,QAAI,MAAM,CAAC,MAAM,IAAK,gBAAe,CAAC;AAAA,aAC7B,CAAC,gBAAgB,MAAM,CAAC,KAAK,WAAW;AAC/C,kBAAY,EAAE,UAAU,MAAM,CAAC,CAAC;AAChC,UAAI,YAAY,UAAU;AACxB,mBAAW;AACX,kBAAU,MAAM,CAAC;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;",
6
6
  "names": []
7
7
  }
package/dist/util/diff.js CHANGED
@@ -18,6 +18,10 @@ var __copyProps = (to, from, except, desc) => {
18
18
  return to;
19
19
  };
20
20
  var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
21
25
  isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
22
26
  mod
23
27
  ));
@@ -71,10 +75,8 @@ ${part.value.split("\n").map(
71
75
  (ln, idx) => ln.trim() !== "" ? `${part.startLineNumber ? part.startLineNumber + idx : lnSpaces}${part.added ? "+" : part.removed ? "-" : " "} ${import_chalk.default[colour](`${ln}`)}` : ln
72
76
  ).join("\n")}`
73
77
  );
74
- } else
75
- needsNewLine = true;
76
- } else
77
- needsNewLine = true;
78
+ } else needsNewLine = true;
79
+ } else needsNewLine = true;
78
80
  }
79
81
  return output.join("").replaceAll("\n\n\n", "\n\n");
80
82
  };
@@ -2,6 +2,6 @@
2
2
  "version": 3,
3
3
  "sources": ["../../src/util/diff.ts"],
4
4
  "sourcesContent": ["import chalk from 'chalk';\nimport { Change, diffLines } from 'diff';\nimport { normaliseLineEndings } from './os';\n\nexport const diffLogStrings = (updates: string, previous: string) => {\n const lastFewLines = previous.split('\\n').slice(-10);\n const incomingLines = updates.split('\\n');\n\n // Find the line indices in the incoming lines\n // of the last few lines previously rendered\n const incomingLineIndices = [];\n for (const lastRenderedLine of lastFewLines) {\n if (lastRenderedLine.length > 10)\n incomingLineIndices.push(incomingLines.lastIndexOf(lastRenderedLine));\n }\n\n // Get the new lines from the next position on from the last of the already shown lines\n const differentFromPos = Math.max(...incomingLineIndices, 0) + 1;\n // Return just the incoming lines from the position we matched\n return incomingLines.slice(differentFromPos).join('\\n');\n};\n\nexport const diffFileContent = (\n existingContent: string,\n newContent: string\n) => {\n const existingContentNormalised = normaliseLineEndings(existingContent, '\\n');\n const newContentNormalised = normaliseLineEndings(newContent, '\\n');\n\n const diff = diffLines(existingContentNormalised, newContentNormalised, {\n newlineIsToken: true,\n });\n const diffRanges = addDiffPositionInfo(diff);\n\n // Create formatted output for console\n const output: string[] = [];\n const lnSpaceLength = Math.max(\n ...diffRanges.map(d => d.startLineNumber.toString().length),\n 0\n );\n\n const lnSpaces = Array(lnSpaceLength).join(' ');\n\n let needsNewLine = false;\n for (let i = 0; i < diffRanges.length; i++) {\n const part = diffRanges[i];\n if (part.added || part.removed) {\n const colour = part.added ? 'green' : part.removed ? 'red' : 'grey';\n\n if (part.value !== '\\n') {\n if (needsNewLine) {\n output.push('\\n### --');\n needsNewLine = false;\n }\n output.push(\n `\\n${part.value\n .split('\\n')\n .map((ln, idx) =>\n ln.trim() !== ''\n ? `${\n part.startLineNumber ? part.startLineNumber + idx : lnSpaces\n }${part.added ? '+' : part.removed ? '-' : ' '} ${chalk[\n colour\n ](`${ln}`)}`\n : ln\n )\n .join('\\n')}`\n );\n } else needsNewLine = true;\n } else needsNewLine = true;\n }\n\n return output.join('').replaceAll('\\n\\n\\n', '\\n\\n');\n};\n\nconst addDiffPositionInfo = (diff: Change[]) => {\n const diffRanges: (Change & {\n startLineNumber: number;\n startColumn: number;\n endLineNumber: number;\n endColumn: number;\n })[] = [];\n\n let lineNumber = 0;\n let column = 0;\n for (let partIndex = 0; partIndex < diff.length; partIndex++) {\n const part = diff[partIndex];\n\n // // Skip any parts that aren't in `after`\n // if (part.removed === true) {\n // continue;\n // }\n\n const startLineNumber = lineNumber;\n const startColumn = column;\n\n // Split the part into lines. Loop throug these lines to find\n // the line no. and column at the end of this part.\n const substring = part.value;\n const lines = substring.split('\\n');\n lines.forEach((line, lineIndex) => {\n // The first `line` is actually just a continuation of the last line\n if (lineIndex === 0) {\n column += line.length;\n // All other lines come after a line break.\n } else if (lineIndex > 0) {\n lineNumber += 1;\n column = line.length;\n }\n });\n\n // Save a range for all of the parts with position info added\n if (part.added === true || part.removed === true) {\n diffRanges.push({\n startLineNumber: startLineNumber + 1,\n startColumn: startColumn,\n endLineNumber: lineNumber,\n endColumn: column,\n ...part,\n });\n }\n }\n return diffRanges;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAClB,kBAAkC;AAClC,gBAAqC;AAE9B,MAAM,iBAAiB,CAAC,SAAiB,aAAqB;AACnE,QAAM,eAAe,SAAS,MAAM,IAAI,EAAE,MAAM,GAAG;AACnD,QAAM,gBAAgB,QAAQ,MAAM,IAAI;AAIxC,QAAM,sBAAsB,CAAC;AAC7B,aAAW,oBAAoB,cAAc;AAC3C,QAAI,iBAAiB,SAAS;AAC5B,0BAAoB,KAAK,cAAc,YAAY,gBAAgB,CAAC;AAAA,EACxE;AAGA,QAAM,mBAAmB,KAAK,IAAI,GAAG,qBAAqB,CAAC,IAAI;AAE/D,SAAO,cAAc,MAAM,gBAAgB,EAAE,KAAK,IAAI;AACxD;AAEO,MAAM,kBAAkB,CAC7B,iBACA,eACG;AACH,QAAM,gCAA4B,gCAAqB,iBAAiB,IAAI;AAC5E,QAAM,2BAAuB,gCAAqB,YAAY,IAAI;AAElE,QAAM,WAAO,uBAAU,2BAA2B,sBAAsB;AAAA,IACtE,gBAAgB;AAAA,EAClB,CAAC;AACD,QAAM,aAAa,oBAAoB,IAAI;AAG3C,QAAM,SAAmB,CAAC;AAC1B,QAAM,gBAAgB,KAAK;AAAA,IACzB,GAAG,WAAW,IAAI,OAAK,EAAE,gBAAgB,SAAS,EAAE,MAAM;AAAA,IAC1D;AAAA,EACF;AAEA,QAAM,WAAW,MAAM,aAAa,EAAE,KAAK,GAAG;AAE9C,MAAI,eAAe;AACnB,WAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AAC1C,UAAM,OAAO,WAAW;AACxB,QAAI,KAAK,SAAS,KAAK,SAAS;AAC9B,YAAM,SAAS,KAAK,QAAQ,UAAU,KAAK,UAAU,QAAQ;AAE7D,UAAI,KAAK,UAAU,MAAM;AACvB,YAAI,cAAc;AAChB,iBAAO,KAAK,UAAU;AACtB,yBAAe;AAAA,QACjB;AACA,eAAO;AAAA,UACL;AAAA,EAAK,KAAK,MACP,MAAM,IAAI,EACV;AAAA,YAAI,CAAC,IAAI,QACR,GAAG,KAAK,MAAM,KACV,GACE,KAAK,kBAAkB,KAAK,kBAAkB,MAAM,WACnD,KAAK,QAAQ,MAAM,KAAK,UAAU,MAAM,OAAO,aAAAA,QAChD,QACA,GAAG,IAAI,MACT;AAAA,UACN,EACC,KAAK,IAAI;AAAA,QACd;AAAA,MACF;AAAO,uBAAe;AAAA,IACxB;AAAO,qBAAe;AAAA,EACxB;AAEA,SAAO,OAAO,KAAK,EAAE,EAAE,WAAW,UAAU,MAAM;AACpD;AAEA,MAAM,sBAAsB,CAAC,SAAmB;AAC9C,QAAM,aAKC,CAAC;AAER,MAAI,aAAa;AACjB,MAAI,SAAS;AACb,WAAS,YAAY,GAAG,YAAY,KAAK,QAAQ,aAAa;AAC5D,UAAM,OAAO,KAAK;AAOlB,UAAM,kBAAkB;AACxB,UAAM,cAAc;AAIpB,UAAM,YAAY,KAAK;AACvB,UAAM,QAAQ,UAAU,MAAM,IAAI;AAClC,UAAM,QAAQ,CAAC,MAAM,cAAc;AAEjC,UAAI,cAAc,GAAG;AACnB,kBAAU,KAAK;AAAA,MAEjB,WAAW,YAAY,GAAG;AACxB,sBAAc;AACd,iBAAS,KAAK;AAAA,MAChB;AAAA,IACF,CAAC;AAGD,QAAI,KAAK,UAAU,QAAQ,KAAK,YAAY,MAAM;AAChD,iBAAW,KAAK;AAAA,QACd,iBAAiB,kBAAkB;AAAA,QACnC;AAAA,QACA,eAAe;AAAA,QACf,WAAW;AAAA,QACX,GAAG;AAAA,MACL,CAAC;AAAA,IACH;AAAA,EACF;AACA,SAAO;AACT;",
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAClB,kBAAkC;AAClC,gBAAqC;AAE9B,MAAM,iBAAiB,CAAC,SAAiB,aAAqB;AACnE,QAAM,eAAe,SAAS,MAAM,IAAI,EAAE,MAAM,GAAG;AACnD,QAAM,gBAAgB,QAAQ,MAAM,IAAI;AAIxC,QAAM,sBAAsB,CAAC;AAC7B,aAAW,oBAAoB,cAAc;AAC3C,QAAI,iBAAiB,SAAS;AAC5B,0BAAoB,KAAK,cAAc,YAAY,gBAAgB,CAAC;AAAA,EACxE;AAGA,QAAM,mBAAmB,KAAK,IAAI,GAAG,qBAAqB,CAAC,IAAI;AAE/D,SAAO,cAAc,MAAM,gBAAgB,EAAE,KAAK,IAAI;AACxD;AAEO,MAAM,kBAAkB,CAC7B,iBACA,eACG;AACH,QAAM,gCAA4B,gCAAqB,iBAAiB,IAAI;AAC5E,QAAM,2BAAuB,gCAAqB,YAAY,IAAI;AAElE,QAAM,WAAO,uBAAU,2BAA2B,sBAAsB;AAAA,IACtE,gBAAgB;AAAA,EAClB,CAAC;AACD,QAAM,aAAa,oBAAoB,IAAI;AAG3C,QAAM,SAAmB,CAAC;AAC1B,QAAM,gBAAgB,KAAK;AAAA,IACzB,GAAG,WAAW,IAAI,OAAK,EAAE,gBAAgB,SAAS,EAAE,MAAM;AAAA,IAC1D;AAAA,EACF;AAEA,QAAM,WAAW,MAAM,aAAa,EAAE,KAAK,GAAG;AAE9C,MAAI,eAAe;AACnB,WAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AAC1C,UAAM,OAAO,WAAW,CAAC;AACzB,QAAI,KAAK,SAAS,KAAK,SAAS;AAC9B,YAAM,SAAS,KAAK,QAAQ,UAAU,KAAK,UAAU,QAAQ;AAE7D,UAAI,KAAK,UAAU,MAAM;AACvB,YAAI,cAAc;AAChB,iBAAO,KAAK,UAAU;AACtB,yBAAe;AAAA,QACjB;AACA,eAAO;AAAA,UACL;AAAA,EAAK,KAAK,MACP,MAAM,IAAI,EACV;AAAA,YAAI,CAAC,IAAI,QACR,GAAG,KAAK,MAAM,KACV,GACE,KAAK,kBAAkB,KAAK,kBAAkB,MAAM,QACtD,GAAG,KAAK,QAAQ,MAAM,KAAK,UAAU,MAAM,GAAG,IAAI,aAAAA,QAChD,MACF,EAAE,GAAG,EAAE,EAAE,CAAC,KACV;AAAA,UACN,EACC,KAAK,IAAI,CAAC;AAAA,QACf;AAAA,MACF,MAAO,gBAAe;AAAA,IACxB,MAAO,gBAAe;AAAA,EACxB;AAEA,SAAO,OAAO,KAAK,EAAE,EAAE,WAAW,UAAU,MAAM;AACpD;AAEA,MAAM,sBAAsB,CAAC,SAAmB;AAC9C,QAAM,aAKC,CAAC;AAER,MAAI,aAAa;AACjB,MAAI,SAAS;AACb,WAAS,YAAY,GAAG,YAAY,KAAK,QAAQ,aAAa;AAC5D,UAAM,OAAO,KAAK,SAAS;AAO3B,UAAM,kBAAkB;AACxB,UAAM,cAAc;AAIpB,UAAM,YAAY,KAAK;AACvB,UAAM,QAAQ,UAAU,MAAM,IAAI;AAClC,UAAM,QAAQ,CAAC,MAAM,cAAc;AAEjC,UAAI,cAAc,GAAG;AACnB,kBAAU,KAAK;AAAA,MAEjB,WAAW,YAAY,GAAG;AACxB,sBAAc;AACd,iBAAS,KAAK;AAAA,MAChB;AAAA,IACF,CAAC;AAGD,QAAI,KAAK,UAAU,QAAQ,KAAK,YAAY,MAAM;AAChD,iBAAW,KAAK;AAAA,QACd,iBAAiB,kBAAkB;AAAA,QACnC;AAAA,QACA,eAAe;AAAA,QACf,WAAW;AAAA,QACX,GAAG;AAAA,MACL,CAAC;AAAA,IACH;AAAA,EACF;AACA,SAAO;AACT;",
6
6
  "names": ["chalk"]
7
7
  }
@@ -36,8 +36,7 @@ const mergeDotEnvFileContents = (existingFileLines, envContentsToAdd) => {
36
36
  newline = `${k}=${v}`;
37
37
  updatedEnvKeys.push(k);
38
38
  }
39
- if (newline || ln)
40
- envFileLines.push(newline || ln);
39
+ if (newline || ln) envFileLines.push(newline || ln);
41
40
  }
42
41
  for (const addKey of Object.keys(envContentsToAdd).filter(
43
42
  (efl) => !updatedEnvKeys.find((uek) => {
@@ -2,6 +2,6 @@
2
2
  "version": 3,
3
3
  "sources": ["../../src/util/dotenv.ts"],
4
4
  "sourcesContent": ["import { EnvContentsToAdd } from \"~/models/DevService\";\n\nexport const mergeDotEnvFileContents = (\n existingFileLines: string[],\n envContentsToAdd: EnvContentsToAdd\n): string[] => {\n const envFileLines: string[] = []; // the new .env file\n if (existingFileLines.length === 0) {\n // There is no env file, just create one from envContentsToAdd\n envFileLines.push(\n ...Object.entries(envContentsToAdd).map(([k, v]) => `${k}=${v}`)\n );\n } else {\n const updatedEnvKeys: string[] = [];\n // Find lines in env that already exist for the keys in envContentsToAdd\n // update them if they exist and add them to envFileLines\n for (const ln of existingFileLines) {\n let newline = '';\n for (const [k, v] of Object.entries(envContentsToAdd))\n if (ln.startsWith(`${k}=`)) {\n newline = `${k}=${v}`;\n updatedEnvKeys.push(k);\n }\n // Ensure an updated line or other lines from the existing env file are re-added\n if (newline || ln) envFileLines.push(newline || ln);\n }\n\n // Add the envContentsToAdd lines to the file that did not previously exist or had an update\n for (const addKey of Object.keys(envContentsToAdd).filter(\n efl =>\n !updatedEnvKeys.find(uek => uek.startsWith(`${efl.split('=')?.[0]}`))\n ) as (keyof typeof envContentsToAdd)[]) {\n envFileLines.push(`${addKey}=${envContentsToAdd[addKey]}`);\n }\n }\n return envFileLines;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEO,MAAM,0BAA0B,CACrC,mBACA,qBACa;AACb,QAAM,eAAyB,CAAC;AAChC,MAAI,kBAAkB,WAAW,GAAG;AAElC,iBAAa;AAAA,MACX,GAAG,OAAO,QAAQ,gBAAgB,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,KAAK,GAAG;AAAA,IACjE;AAAA,EACF,OAAO;AACL,UAAM,iBAA2B,CAAC;AAGlC,eAAW,MAAM,mBAAmB;AAClC,UAAI,UAAU;AACd,iBAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,gBAAgB;AAClD,YAAI,GAAG,WAAW,GAAG,IAAI,GAAG;AAC1B,oBAAU,GAAG,KAAK;AAClB,yBAAe,KAAK,CAAC;AAAA,QACvB;AAEF,UAAI,WAAW;AAAI,qBAAa,KAAK,WAAW,EAAE;AAAA,IACpD;AAGA,eAAW,UAAU,OAAO,KAAK,gBAAgB,EAAE;AAAA,MACjD,SACE,CAAC,eAAe,KAAK,SAAI;AA9BjC;AA8BoC,mBAAI,WAAW,IAAG,SAAI,MAAM,GAAG,MAAb,mBAAiB,IAAI;AAAA,OAAC;AAAA,IACxE,GAAwC;AACtC,mBAAa,KAAK,GAAG,UAAU,iBAAiB,SAAS;AAAA,IAC3D;AAAA,EACF;AACA,SAAO;AACT;",
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEO,MAAM,0BAA0B,CACrC,mBACA,qBACa;AACb,QAAM,eAAyB,CAAC;AAChC,MAAI,kBAAkB,WAAW,GAAG;AAElC,iBAAa;AAAA,MACX,GAAG,OAAO,QAAQ,gBAAgB,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,EAAE;AAAA,IACjE;AAAA,EACF,OAAO;AACL,UAAM,iBAA2B,CAAC;AAGlC,eAAW,MAAM,mBAAmB;AAClC,UAAI,UAAU;AACd,iBAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,gBAAgB;AAClD,YAAI,GAAG,WAAW,GAAG,CAAC,GAAG,GAAG;AAC1B,oBAAU,GAAG,CAAC,IAAI,CAAC;AACnB,yBAAe,KAAK,CAAC;AAAA,QACvB;AAEF,UAAI,WAAW,GAAI,cAAa,KAAK,WAAW,EAAE;AAAA,IACpD;AAGA,eAAW,UAAU,OAAO,KAAK,gBAAgB,EAAE;AAAA,MACjD,SACE,CAAC,eAAe,KAAK,SAAI;AA9BjC;AA8BoC,mBAAI,WAAW,IAAG,SAAI,MAAM,GAAG,MAAb,mBAAiB,EAAE,EAAE;AAAA,OAAC;AAAA,IACxE,GAAwC;AACtC,mBAAa,KAAK,GAAG,MAAM,IAAI,iBAAiB,MAAM,CAAC,EAAE;AAAA,IAC3D;AAAA,EACF;AACA,SAAO;AACT;",
6
6
  "names": []
7
7
  }
@@ -2,6 +2,6 @@
2
2
  "version": 3,
3
3
  "sources": ["../../src/util/error.ts"],
4
4
  "sourcesContent": ["export const deconstructApiError = (error: MappedError) => {\n let inner = '';\n if (error.data?.[0]) {\n inner = `${error.data?.[0].Field}: ${error.data?.[0].Message}`;\n }\n return `${error.message} ${inner}`;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAO,MAAM,sBAAsB,CAAC,UAAuB;AAA3D;AACE,MAAI,QAAQ;AACZ,OAAI,WAAM,SAAN,mBAAa,IAAI;AACnB,YAAQ,IAAG,WAAM,SAAN,mBAAa,GAAG,WAAU,WAAM,SAAN,mBAAa,GAAG;AAAA,EACvD;AACA,SAAO,GAAG,MAAM,WAAW;AAC7B;",
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAO,MAAM,sBAAsB,CAAC,UAAuB;AAA3D;AACE,MAAI,QAAQ;AACZ,OAAI,WAAM,SAAN,mBAAa,IAAI;AACnB,YAAQ,IAAG,WAAM,SAAN,mBAAa,GAAG,KAAK,MAAK,WAAM,SAAN,mBAAa,GAAG,OAAO;AAAA,EAC9D;AACA,SAAO,GAAG,MAAM,OAAO,IAAI,KAAK;AAClC;",
6
6
  "names": []
7
7
  }
@@ -52,8 +52,12 @@ const doRetry = ({ silent = false } = {}) => {
52
52
  return fn;
53
53
  };
54
54
  const fetchDefaults = {
55
+ // The timeout to apply to requests that do not supply a timeout option
55
56
  timeout: applyTimeout(60),
57
+ // Retry policy for all fetch requests
56
58
  retry: retryPolicy,
59
+ // Do retry function to examine failures and apply custom retry logic
60
+ // return true to retry the fetch call
57
61
  doRetry: doRetry()
58
62
  };
59
63
  const enhancedFetch = (0, import_enterprise_fetch.fetchWithDefaults)(fetchDefaults);
@@ -2,6 +2,6 @@
2
2
  "version": 3,
3
3
  "sources": ["../../src/util/fetch.ts"],
4
4
  "sourcesContent": ["import { DoRetry, FetchInit, fetchWithDefaults } from 'enterprise-fetch';\nimport { Logger } from './logger';\n\nconst retryPolicy = {\n retries: 2,\n minTimeout: 400,\n factor: 2,\n};\n\nconst applyTimeout = (s: number) =>\n process.env.NODE_ENV === 'test' ? 2 * 1000 : s * 1000;\n\nexport const doRetry = ({ silent = false } = {}) => {\n const fn: DoRetry = async (\n attempt: number,\n res: Response | AppError,\n { url, options } = { url: '', options: {} }\n ) => {\n // Get the retry policy from options or fetchDefaults\n const { retry = retryPolicy } = options || ({} as any);\n let shouldRetry = false;\n\n // Retry request on any network error, or 4xx or 5xx status codes\n if (\n !res.status ||\n (res.status >= 400 && ![400, 404, 409, 422, 500].includes(res.status))\n )\n if (\n !('message' in res) ||\n ('message' in res &&\n !(res.message as string).includes('Nock: No match'))\n )\n shouldRetry = true;\n\n if (attempt <= retry.retries) {\n // If a res has a status it is a HTTP error\n // With no status it could be a fetch error or app error\n const errorMessage = !res.status\n ? `${('name' in res && res.name) || ('type' in res && res.type)}: ${\n 'message' in res && res.message\n }`\n : `${res.status}: ${res.statusText}`;\n\n if (!silent)\n Logger.warning(\n `[fetch] ${\n shouldRetry\n ? `attempt ${attempt}/${retry.retries}`\n : 'non-retriable'\n } ${errorMessage} ${url || ''}`\n );\n } else {\n shouldRetry = false;\n }\n return await Promise.resolve(shouldRetry);\n };\n return fn;\n};\n\nconst fetchDefaults = {\n // The timeout to apply to requests that do not supply a timeout option\n timeout: applyTimeout(60),\n // Retry policy for all fetch requests\n retry: retryPolicy,\n // Do retry function to examine failures and apply custom retry logic\n // return true to retry the fetch call\n doRetry: doRetry(),\n} as FetchInit;\n\nexport const enhancedFetch = fetchWithDefaults(fetchDefaults);\n// export const assetFetch = fetchWithDefaults({\n// ...fetchDefaults,\n// timeout: applyTimeout(1200),\n// });\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,8BAAsD;AACtD,oBAAuB;AAEvB,MAAM,cAAc;AAAA,EAClB,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,QAAQ;AACV;AAEA,MAAM,eAAe,CAAC,MACpB,QAAQ,IAAI,aAAa,SAAS,IAAI,MAAO,IAAI;AAE5C,MAAM,UAAU,CAAC,EAAE,SAAS,MAAM,IAAI,CAAC,MAAM;AAClD,QAAM,KAAc,OAClB,SACA,KACA,EAAE,KAAK,QAAQ,IAAI,EAAE,KAAK,IAAI,SAAS,CAAC,EAAE,MACvC;AAEH,UAAM,EAAE,QAAQ,YAAY,IAAI,WAAY,CAAC;AAC7C,QAAI,cAAc;AAGlB,QACE,CAAC,IAAI,UACJ,IAAI,UAAU,OAAO,CAAC,CAAC,KAAK,KAAK,KAAK,KAAK,GAAG,EAAE,SAAS,IAAI,MAAM;AAEpE,UACE,EAAE,aAAa,QACd,aAAa,OACZ,CAAE,IAAI,QAAmB,SAAS,gBAAgB;AAEpD,sBAAc;AAAA;AAElB,QAAI,WAAW,MAAM,SAAS;AAG5B,YAAM,eAAe,CAAC,IAAI,SACtB,GAAI,UAAU,OAAO,IAAI,QAAU,UAAU,OAAO,IAAI,SACtD,aAAa,OAAO,IAAI,YAE1B,GAAG,IAAI,WAAW,IAAI;AAE1B,UAAI,CAAC;AACH,6BAAO;AAAA,UACL,WACE,cACI,WAAW,WAAW,MAAM,YAC5B,mBACF,gBAAgB,OAAO;AAAA,QAC7B;AAAA,IACJ,OAAO;AACL,oBAAc;AAAA,IAChB;AACA,WAAO,MAAM,QAAQ,QAAQ,WAAW;AAAA,EAC1C;AACA,SAAO;AACT;AAEA,MAAM,gBAAgB;AAAA,EAEpB,SAAS,aAAa,EAAE;AAAA,EAExB,OAAO;AAAA,EAGP,SAAS,QAAQ;AACnB;AAEO,MAAM,oBAAgB,2CAAkB,aAAa;",
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,8BAAsD;AACtD,oBAAuB;AAEvB,MAAM,cAAc;AAAA,EAClB,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,QAAQ;AACV;AAEA,MAAM,eAAe,CAAC,MACpB,QAAQ,IAAI,aAAa,SAAS,IAAI,MAAO,IAAI;AAE5C,MAAM,UAAU,CAAC,EAAE,SAAS,MAAM,IAAI,CAAC,MAAM;AAClD,QAAM,KAAc,OAClB,SACA,KACA,EAAE,KAAK,QAAQ,IAAI,EAAE,KAAK,IAAI,SAAS,CAAC,EAAE,MACvC;AAEH,UAAM,EAAE,QAAQ,YAAY,IAAI,WAAY,CAAC;AAC7C,QAAI,cAAc;AAGlB,QACE,CAAC,IAAI,UACJ,IAAI,UAAU,OAAO,CAAC,CAAC,KAAK,KAAK,KAAK,KAAK,GAAG,EAAE,SAAS,IAAI,MAAM;AAEpE,UACE,EAAE,aAAa,QACd,aAAa,OACZ,CAAE,IAAI,QAAmB,SAAS,gBAAgB;AAEpD,sBAAc;AAAA;AAElB,QAAI,WAAW,MAAM,SAAS;AAG5B,YAAM,eAAe,CAAC,IAAI,SACtB,GAAI,UAAU,OAAO,IAAI,QAAU,UAAU,OAAO,IAAI,IAAK,KAC3D,aAAa,OAAO,IAAI,OAC1B,KACA,GAAG,IAAI,MAAM,KAAK,IAAI,UAAU;AAEpC,UAAI,CAAC;AACH,6BAAO;AAAA,UACL,WACE,cACI,WAAW,OAAO,IAAI,MAAM,OAAO,KACnC,eACN,IAAI,YAAY,IAAI,OAAO,EAAE;AAAA,QAC/B;AAAA,IACJ,OAAO;AACL,oBAAc;AAAA,IAChB;AACA,WAAO,MAAM,QAAQ,QAAQ,WAAW;AAAA,EAC1C;AACA,SAAO;AACT;AAEA,MAAM,gBAAgB;AAAA;AAAA,EAEpB,SAAS,aAAa,EAAE;AAAA;AAAA,EAExB,OAAO;AAAA;AAAA;AAAA,EAGP,SAAS,QAAQ;AACnB;AAEO,MAAM,oBAAgB,2CAAkB,aAAa;",
6
6
  "names": []
7
7
  }
package/dist/util/git.js CHANGED
@@ -18,6 +18,10 @@ var __copyProps = (to, from, except, desc) => {
18
18
  return to;
19
19
  };
20
20
  var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
21
25
  isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
22
26
  mod
23
27
  ));
@@ -58,8 +62,7 @@ class GitHelper {
58
62
  get originUrl() {
59
63
  var _a, _b, _c;
60
64
  const originUrl = (_c = (_b = (_a = this == null ? void 0 : this.config) == null ? void 0 : _a.remote) == null ? void 0 : _b.origin) == null ? void 0 : _c.url;
61
- if (originUrl)
62
- return originUrl;
65
+ if (originUrl) return originUrl;
63
66
  }
64
67
  get secretsUri() {
65
68
  return `${this.type === "github" ? `${this.home}/settings/secrets/actions` : `${this.home}/-/settings/ci_cd`}`;
@@ -81,10 +84,8 @@ class GitHelper {
81
84
  gitInfo = (url = this.originUrl) => import_hosted_git_info.default.fromUrl(url);
82
85
  hostType = (url = this.originUrl) => {
83
86
  if (url) {
84
- if (url.includes("github.com"))
85
- return "github";
86
- else
87
- return "gitlab";
87
+ if (url.includes("github.com")) return "github";
88
+ else return "gitlab";
88
89
  }
89
90
  };
90
91
  gitConfig = (cwd = this.gitRepoPath) => {
@@ -93,8 +94,7 @@ class GitHelper {
93
94
  path: ".git/config",
94
95
  expandKeys: true
95
96
  });
96
- if (Object.keys(config || {}).length)
97
- return config;
97
+ if (Object.keys(config || {}).length) return config;
98
98
  const pathParts = (0, import_os.linuxSlash)(cwd).split("/");
99
99
  for (let i = 1; i <= pathParts.length; i++) {
100
100
  const relPath = `${Array(i).fill("..").join("/")}/.git/config`;
@@ -2,6 +2,6 @@
2
2
  "version": 3,
3
3
  "sources": ["../../src/util/git.ts"],
4
4
  "sourcesContent": ["import giturl from 'giturl';\nimport hostedGitInfo from 'hosted-git-info';\nimport parseGitConfig from 'parse-git-config';\nimport path from 'path';\n\nimport { linuxSlash } from './os';\nimport { readFile, readFiles } from '~/providers/file-provider';\nimport { Logger } from './logger';\n\nconst GITLAB_CI_FILENAME = '.gitlab-ci.yml';\n\ntype GitConfig = parseGitConfig.Config;\n\nexport type GitTypes = hostedGitInfo.Hosts;\n\nexport class GitHelper {\n private gitRepoPath: string;\n private ciFile?: string;\n\n config = {} as GitConfig;\n info: hostedGitInfo | undefined;\n home: string | undefined;\n\n set ciFileName(fileName: string) {\n this.ciFile = fileName;\n }\n\n get ciFileName() {\n return (\n this.ciFile ||\n (this.workflows\n ? this.type === 'github'\n ? this.workflows.length > 1\n ? '[multiple workflows]'\n : this.workflows?.[0]\n : GITLAB_CI_FILENAME\n : '[unknown]')\n );\n }\n get ciFilePath() {\n return `${this.gitRepoPath}/${this.ciFileName}`;\n }\n get name() {\n return (\n this.info?.project || this.home?.split('/').pop() || '[set arg --name]'\n );\n }\n get originUrl() {\n const originUrl = this?.config?.remote?.origin?.url;\n if (originUrl) return originUrl;\n }\n get secretsUri() {\n return `${\n this.type === 'github'\n ? `${this.home}/settings/secrets/actions`\n : `${this.home}/-/settings/ci_cd`\n }`;\n }\n get type() {\n return this.info?.type || this.hostType();\n }\n get workflows() {\n return this.type === 'github'\n ? this.githubWorkflows()\n : this.gitlabWorkflow();\n }\n constructor(gitRepoPath: string = process.cwd()) {\n this.gitRepoPath = gitRepoPath;\n this.config = this.gitConfig();\n this.home = giturl.parse(this.originUrl);\n this.info = this.gitInfo();\n // console.log(this.config);\n // console.log(this.home);\n // console.log(this.info);\n }\n gitcwd = () => path.join(this.gitRepoPath);\n gitInfo = (url: string = this.originUrl) => hostedGitInfo.fromUrl(url);\n hostType = (url: string = this.originUrl): GitTypes | undefined => {\n if (url) {\n if (url.includes('github.com')) return 'github';\n else return 'gitlab';\n }\n\n // if (url.includes('gitlab.com')) return 'gl';\n // if (url.includes('gitlab.zengenti.com')) return 'gl';\n };\n gitConfig = (cwd = this.gitRepoPath) => {\n // Find .git/config in project cwd\n const config = parseGitConfig.sync({\n cwd,\n path: '.git/config',\n expandKeys: true,\n });\n // console.log(cwd, config);\n if (Object.keys(config || {}).length) return config;\n\n // Recursively check the directory heirarchy for existance of a .git/config\n const pathParts = linuxSlash(cwd).split('/');\n for (let i = 1; i <= pathParts.length; i++) {\n const relPath = `${Array(i).fill('..').join('/')}/.git/config`;\n // Does not appear to work when using a shortened cwd, using relative path instead\n const config = parseGitConfig.sync({\n path: relPath,\n expandKeys: true,\n });\n // console.log(relPath, config);\n if (Object.keys(config || {}).length) {\n this.gitRepoPath = path.join(\n this.gitRepoPath,\n Array(i).fill('..').join('/')\n );\n return config;\n }\n }\n return config;\n };\n githubWorkflows = () => {\n const workflowPath = path.join(this.gitcwd(), '.github/workflows');\n const workflowFiles = readFiles(workflowPath, false);\n const addFolderSuffix = (files: string[]) =>\n files.map(f => `.github/workflows/${f}`);\n\n if (workflowFiles.some(f => f.includes('build'))) {\n return addFolderSuffix(workflowFiles.filter(f => f.includes('build')));\n } else {\n return addFolderSuffix(workflowFiles);\n }\n };\n gitlabWorkflow = (ciFileName = GITLAB_CI_FILENAME) => {\n const workflowPath = this.gitcwd();\n const workflowFilePath = path.join(workflowPath, ciFileName);\n const workflowFile = readFile(workflowFilePath);\n // console.log(ciFileName, workflowFile);\n\n return workflowFile;\n };\n checkIsRepo = () => {\n if (\n this.config &&\n this.config.core &&\n this.config.core.repositoryformatversion\n ) {\n Logger.success('You are inside a Git repository.');\n return true;\n } else {\n Logger.error('You are not inside a Git repository.');\n return false;\n }\n };\n}\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAmB;AACnB,6BAA0B;AAC1B,8BAA2B;AAC3B,kBAAiB;AAEjB,gBAA2B;AAC3B,2BAAoC;AACpC,oBAAuB;AAEvB,MAAM,qBAAqB;AAMpB,MAAM,UAAU;AAAA,EACb;AAAA,EACA;AAAA,EAER,SAAS,CAAC;AAAA,EACV;AAAA,EACA;AAAA,EAEA,IAAI,WAAW,UAAkB;AAC/B,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,aAAa;AA3BnB;AA4BI,WACE,KAAK,WACJ,KAAK,YACF,KAAK,SAAS,WACZ,KAAK,UAAU,SAAS,IACtB,0BACA,UAAK,cAAL,mBAAiB,KACnB,qBACF;AAAA,EAER;AAAA,EACA,IAAI,aAAa;AACf,WAAO,GAAG,KAAK,eAAe,KAAK;AAAA,EACrC;AAAA,EACA,IAAI,OAAO;AA1Cb;AA2CI,aACE,UAAK,SAAL,mBAAW,cAAW,UAAK,SAAL,mBAAW,MAAM,KAAK,UAAS;AAAA,EAEzD;AAAA,EACA,IAAI,YAAY;AA/ClB;AAgDI,UAAM,aAAY,8CAAM,WAAN,mBAAc,WAAd,mBAAsB,WAAtB,mBAA8B;AAChD,QAAI;AAAW,aAAO;AAAA,EACxB;AAAA,EACA,IAAI,aAAa;AACf,WAAO,GACL,KAAK,SAAS,WACV,GAAG,KAAK,kCACR,GAAG,KAAK;AAAA,EAEhB;AAAA,EACA,IAAI,OAAO;AA1Db;AA2DI,aAAO,UAAK,SAAL,mBAAW,SAAQ,KAAK,SAAS;AAAA,EAC1C;AAAA,EACA,IAAI,YAAY;AACd,WAAO,KAAK,SAAS,WACjB,KAAK,gBAAgB,IACrB,KAAK,eAAe;AAAA,EAC1B;AAAA,EACA,YAAY,cAAsB,QAAQ,IAAI,GAAG;AAC/C,SAAK,cAAc;AACnB,SAAK,SAAS,KAAK,UAAU;AAC7B,SAAK,OAAO,cAAAA,QAAO,MAAM,KAAK,SAAS;AACvC,SAAK,OAAO,KAAK,QAAQ;AAAA,EAI3B;AAAA,EACA,SAAS,MAAM,YAAAC,QAAK,KAAK,KAAK,WAAW;AAAA,EACzC,UAAU,CAAC,MAAc,KAAK,cAAc,uBAAAC,QAAc,QAAQ,GAAG;AAAA,EACrE,WAAW,CAAC,MAAc,KAAK,cAAoC;AACjE,QAAI,KAAK;AACP,UAAI,IAAI,SAAS,YAAY;AAAG,eAAO;AAAA;AAClC,eAAO;AAAA,IACd;AAAA,EAIF;AAAA,EACA,YAAY,CAAC,MAAM,KAAK,gBAAgB;AAEtC,UAAM,SAAS,wBAAAC,QAAe,KAAK;AAAA,MACjC;AAAA,MACA,MAAM;AAAA,MACN,YAAY;AAAA,IACd,CAAC;AAED,QAAI,OAAO,KAAK,UAAU,CAAC,CAAC,EAAE;AAAQ,aAAO;AAG7C,UAAM,gBAAY,sBAAW,GAAG,EAAE,MAAM,GAAG;AAC3C,aAAS,IAAI,GAAG,KAAK,UAAU,QAAQ,KAAK;AAC1C,YAAM,UAAU,GAAG,MAAM,CAAC,EAAE,KAAK,IAAI,EAAE,KAAK,GAAG;AAE/C,YAAMC,UAAS,wBAAAD,QAAe,KAAK;AAAA,QACjC,MAAM;AAAA,QACN,YAAY;AAAA,MACd,CAAC;AAED,UAAI,OAAO,KAAKC,WAAU,CAAC,CAAC,EAAE,QAAQ;AACpC,aAAK,cAAc,YAAAH,QAAK;AAAA,UACtB,KAAK;AAAA,UACL,MAAM,CAAC,EAAE,KAAK,IAAI,EAAE,KAAK,GAAG;AAAA,QAC9B;AACA,eAAOG;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EACA,kBAAkB,MAAM;AACtB,UAAM,eAAe,YAAAH,QAAK,KAAK,KAAK,OAAO,GAAG,mBAAmB;AACjE,UAAM,oBAAgB,gCAAU,cAAc,KAAK;AACnD,UAAM,kBAAkB,CAAC,UACvB,MAAM,IAAI,OAAK,qBAAqB,GAAG;AAEzC,QAAI,cAAc,KAAK,OAAK,EAAE,SAAS,OAAO,CAAC,GAAG;AAChD,aAAO,gBAAgB,cAAc,OAAO,OAAK,EAAE,SAAS,OAAO,CAAC,CAAC;AAAA,IACvE,OAAO;AACL,aAAO,gBAAgB,aAAa;AAAA,IACtC;AAAA,EACF;AAAA,EACA,iBAAiB,CAAC,aAAa,uBAAuB;AACpD,UAAM,eAAe,KAAK,OAAO;AACjC,UAAM,mBAAmB,YAAAA,QAAK,KAAK,cAAc,UAAU;AAC3D,UAAM,mBAAe,+BAAS,gBAAgB;AAG9C,WAAO;AAAA,EACT;AAAA,EACA,cAAc,MAAM;AAClB,QACE,KAAK,UACL,KAAK,OAAO,QACZ,KAAK,OAAO,KAAK,yBACjB;AACA,2BAAO,QAAQ,kCAAkC;AACjD,aAAO;AAAA,IACT,OAAO;AACL,2BAAO,MAAM,sCAAsC;AACnD,aAAO;AAAA,IACT;AAAA,EACF;AACF;",
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAmB;AACnB,6BAA0B;AAC1B,8BAA2B;AAC3B,kBAAiB;AAEjB,gBAA2B;AAC3B,2BAAoC;AACpC,oBAAuB;AAEvB,MAAM,qBAAqB;AAMpB,MAAM,UAAU;AAAA,EACb;AAAA,EACA;AAAA,EAER,SAAS,CAAC;AAAA,EACV;AAAA,EACA;AAAA,EAEA,IAAI,WAAW,UAAkB;AAC/B,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,aAAa;AA3BnB;AA4BI,WACE,KAAK,WACJ,KAAK,YACF,KAAK,SAAS,WACZ,KAAK,UAAU,SAAS,IACtB,0BACA,UAAK,cAAL,mBAAiB,KACnB,qBACF;AAAA,EAER;AAAA,EACA,IAAI,aAAa;AACf,WAAO,GAAG,KAAK,WAAW,IAAI,KAAK,UAAU;AAAA,EAC/C;AAAA,EACA,IAAI,OAAO;AA1Cb;AA2CI,aACE,UAAK,SAAL,mBAAW,cAAW,UAAK,SAAL,mBAAW,MAAM,KAAK,UAAS;AAAA,EAEzD;AAAA,EACA,IAAI,YAAY;AA/ClB;AAgDI,UAAM,aAAY,8CAAM,WAAN,mBAAc,WAAd,mBAAsB,WAAtB,mBAA8B;AAChD,QAAI,UAAW,QAAO;AAAA,EACxB;AAAA,EACA,IAAI,aAAa;AACf,WAAO,GACL,KAAK,SAAS,WACV,GAAG,KAAK,IAAI,8BACZ,GAAG,KAAK,IAAI,mBAClB;AAAA,EACF;AAAA,EACA,IAAI,OAAO;AA1Db;AA2DI,aAAO,UAAK,SAAL,mBAAW,SAAQ,KAAK,SAAS;AAAA,EAC1C;AAAA,EACA,IAAI,YAAY;AACd,WAAO,KAAK,SAAS,WACjB,KAAK,gBAAgB,IACrB,KAAK,eAAe;AAAA,EAC1B;AAAA,EACA,YAAY,cAAsB,QAAQ,IAAI,GAAG;AAC/C,SAAK,cAAc;AACnB,SAAK,SAAS,KAAK,UAAU;AAC7B,SAAK,OAAO,cAAAA,QAAO,MAAM,KAAK,SAAS;AACvC,SAAK,OAAO,KAAK,QAAQ;AAAA,EAI3B;AAAA,EACA,SAAS,MAAM,YAAAC,QAAK,KAAK,KAAK,WAAW;AAAA,EACzC,UAAU,CAAC,MAAc,KAAK,cAAc,uBAAAC,QAAc,QAAQ,GAAG;AAAA,EACrE,WAAW,CAAC,MAAc,KAAK,cAAoC;AACjE,QAAI,KAAK;AACP,UAAI,IAAI,SAAS,YAAY,EAAG,QAAO;AAAA,UAClC,QAAO;AAAA,IACd;AAAA,EAIF;AAAA,EACA,YAAY,CAAC,MAAM,KAAK,gBAAgB;AAEtC,UAAM,SAAS,wBAAAC,QAAe,KAAK;AAAA,MACjC;AAAA,MACA,MAAM;AAAA,MACN,YAAY;AAAA,IACd,CAAC;AAED,QAAI,OAAO,KAAK,UAAU,CAAC,CAAC,EAAE,OAAQ,QAAO;AAG7C,UAAM,gBAAY,sBAAW,GAAG,EAAE,MAAM,GAAG;AAC3C,aAAS,IAAI,GAAG,KAAK,UAAU,QAAQ,KAAK;AAC1C,YAAM,UAAU,GAAG,MAAM,CAAC,EAAE,KAAK,IAAI,EAAE,KAAK,GAAG,CAAC;AAEhD,YAAMC,UAAS,wBAAAD,QAAe,KAAK;AAAA,QACjC,MAAM;AAAA,QACN,YAAY;AAAA,MACd,CAAC;AAED,UAAI,OAAO,KAAKC,WAAU,CAAC,CAAC,EAAE,QAAQ;AACpC,aAAK,cAAc,YAAAH,QAAK;AAAA,UACtB,KAAK;AAAA,UACL,MAAM,CAAC,EAAE,KAAK,IAAI,EAAE,KAAK,GAAG;AAAA,QAC9B;AACA,eAAOG;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EACA,kBAAkB,MAAM;AACtB,UAAM,eAAe,YAAAH,QAAK,KAAK,KAAK,OAAO,GAAG,mBAAmB;AACjE,UAAM,oBAAgB,gCAAU,cAAc,KAAK;AACnD,UAAM,kBAAkB,CAAC,UACvB,MAAM,IAAI,OAAK,qBAAqB,CAAC,EAAE;AAEzC,QAAI,cAAc,KAAK,OAAK,EAAE,SAAS,OAAO,CAAC,GAAG;AAChD,aAAO,gBAAgB,cAAc,OAAO,OAAK,EAAE,SAAS,OAAO,CAAC,CAAC;AAAA,IACvE,OAAO;AACL,aAAO,gBAAgB,aAAa;AAAA,IACtC;AAAA,EACF;AAAA,EACA,iBAAiB,CAAC,aAAa,uBAAuB;AACpD,UAAM,eAAe,KAAK,OAAO;AACjC,UAAM,mBAAmB,YAAAA,QAAK,KAAK,cAAc,UAAU;AAC3D,UAAM,mBAAe,+BAAS,gBAAgB;AAG9C,WAAO;AAAA,EACT;AAAA,EACA,cAAc,MAAM;AAClB,QACE,KAAK,UACL,KAAK,OAAO,QACZ,KAAK,OAAO,KAAK,yBACjB;AACA,2BAAO,QAAQ,kCAAkC;AACjD,aAAO;AAAA,IACT,OAAO;AACL,2BAAO,MAAM,sCAAsC;AACnD,aAAO;AAAA,IACT;AAAA,EACF;AACF;",
6
6
  "names": ["giturl", "path", "hostedGitInfo", "parseGitConfig", "config"]
7
7
  }
@@ -18,6 +18,10 @@ var __copyProps = (to, from, except, desc) => {
18
18
  return to;
19
19
  };
20
20
  var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
21
25
  isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
22
26
  mod
23
27
  ));
@@ -2,6 +2,6 @@
2
2
  "version": 3,
3
3
  "sources": ["../../src/util/gitignore.ts"],
4
4
  "sourcesContent": ["import fs from 'fs';\nimport { Logger } from './logger';\n\nexport const mergeContentsToAddWithGitignore = (\n filename: string,\n contentsToAdd: string[]\n) => {\n // Check if .gitignore file already exists\n if (fs.existsSync(filename)) {\n // Read the existing .gitignore file\n const existingContent = fs.readFileSync(filename, 'utf-8');\n\n // Create a Set from existing patterns for fast look-up\n const existingContentSet = new Set(\n existingContent.split('\\n').filter(line => line.trim() !== '')\n );\n\n // Filter out patterns that already exist\n const newContents = contentsToAdd.filter(\n contentsItem => !existingContentSet.has(contentsItem)\n );\n\n if (newContents.length >= 1) {\n // Append the new patterns to the end of the existing .gitignore content\n fs.appendFileSync(filename, '\\n' + newContents.join('\\n'));\n Logger.success('.gitignore file updated');\n } else {\n Logger.success('.gitignore checked, nothing to update');\n }\n } else {\n // If .gitignore doesn't exist, create one and add the contents\n const gitignoreContent = contentsToAdd.join('\\n');\n fs.writeFileSync(filename, gitignoreContent);\n\n Logger.success('.gitignore file created and updated');\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAe;AACf,oBAAuB;AAEhB,MAAM,kCAAkC,CAC7C,UACA,kBACG;AAEH,MAAI,UAAAA,QAAG,WAAW,QAAQ,GAAG;AAE3B,UAAM,kBAAkB,UAAAA,QAAG,aAAa,UAAU,OAAO;AAGzD,UAAM,qBAAqB,IAAI;AAAA,MAC7B,gBAAgB,MAAM,IAAI,EAAE,OAAO,UAAQ,KAAK,KAAK,MAAM,EAAE;AAAA,IAC/D;AAGA,UAAM,cAAc,cAAc;AAAA,MAChC,kBAAgB,CAAC,mBAAmB,IAAI,YAAY;AAAA,IACtD;AAEA,QAAI,YAAY,UAAU,GAAG;AAE3B,gBAAAA,QAAG,eAAe,UAAU,OAAO,YAAY,KAAK,IAAI,CAAC;AACzD,2BAAO,QAAQ,yBAAyB;AAAA,IAC1C,OAAO;AACL,2BAAO,QAAQ,uCAAuC;AAAA,IACxD;AAAA,EACF,OAAO;AAEL,UAAM,mBAAmB,cAAc,KAAK,IAAI;AAChD,cAAAA,QAAG,cAAc,UAAU,gBAAgB;AAE3C,yBAAO,QAAQ,qCAAqC;AAAA,EACtD;AACF;",
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAe;AACf,oBAAuB;AAEhB,MAAM,kCAAkC,CAC7C,UACA,kBACG;AAEH,MAAI,UAAAA,QAAG,WAAW,QAAQ,GAAG;AAE3B,UAAM,kBAAkB,UAAAA,QAAG,aAAa,UAAU,OAAO;AAGzD,UAAM,qBAAqB,IAAI;AAAA,MAC7B,gBAAgB,MAAM,IAAI,EAAE,OAAO,UAAQ,KAAK,KAAK,MAAM,EAAE;AAAA,IAC/D;AAGA,UAAM,cAAc,cAAc;AAAA,MAChC,kBAAgB,CAAC,mBAAmB,IAAI,YAAY;AAAA,IACtD;AAEA,QAAI,YAAY,UAAU,GAAG;AAE3B,gBAAAA,QAAG,eAAe,UAAU,OAAO,YAAY,KAAK,IAAI,CAAC;AACzD,2BAAO,QAAQ,yBAAyB;AAAA,IAC1C,OAAO;AACL,2BAAO,QAAQ,uCAAuC;AAAA,IACxD;AAAA,EACF,OAAO;AAEL,UAAM,mBAAmB,cAAc,KAAK,IAAI;AAChD,cAAAA,QAAG,cAAc,UAAU,gBAAgB;AAE3C,yBAAO,QAAQ,qCAAqC;AAAA,EACtD;AACF;",
6
6
  "names": ["fs"]
7
7
  }
@@ -18,6 +18,10 @@ var __copyProps = (to, from, except, desc) => {
18
18
  return to;
19
19
  };
20
20
  var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
21
25
  isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
22
26
  mod
23
27
  ));
@@ -73,7 +77,7 @@ const url = (alias, project) => {
73
77
  };
74
78
  const Logging = async (language = "en-GB") => {
75
79
  const defaultMessages = import_en_GB.LogMessages;
76
- let localisedMessages = defaultMessages;
80
+ const localisedMessages = defaultMessages;
77
81
  if (language === "en-GB") {
78
82
  }
79
83
  return {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/util/index.ts"],
4
- "sourcesContent": ["import mergeWith from 'lodash/mergeWith';\nimport { Logger } from './logger';\nimport { LogMessages as enGB } from '../localisation/en-GB.js';\n\nexport const isSharedSecret = (str = '') =>\n str.length > 80 && str.split('-').length === 3 ? str : undefined;\n\nexport const isPassword = (str = '') =>\n !isSharedSecret(str) ? str : undefined;\n\nexport const tryParse = (str: any) => {\n try {\n return typeof str === 'object' ? str : JSON.parse(str);\n } catch (e) {\n return false;\n }\n};\n\nexport const isJson = (str?: string) =>\n typeof str === 'object' || !!tryParse(str);\n\nexport const tryStringify = (obj: any) => {\n try {\n return typeof obj === 'object' ? JSON.stringify(obj) : obj;\n } catch (e) {\n return obj;\n }\n};\n\nexport const isSysError = (error: any): error is Error =>\n error?.message !== undefined && error.stack;\n\nexport const isUuid = (str: string) => {\n // Regular expression to check if string is a valid UUID\n const regexExp =\n /^[0-9a-fA-F]{8}\\b-[0-9a-fA-F]{4}\\b-[0-9a-fA-F]{4}\\b-[0-9a-fA-F]{4}\\b-[0-9a-fA-F]{12}$/gi;\n\n return regexExp.test(str);\n};\n\nexport const url = (alias: string, project: string) => {\n const projectAndAlias =\n project && project.toLowerCase() !== 'website'\n ? `${project.toLowerCase()}-${alias}`\n : alias;\n return {\n api: `https://api-${alias}.cloud.contensis.com`,\n cms: `https://cms-${alias}.cloud.contensis.com`,\n liveWeb: `https://live-${projectAndAlias}.cloud.contensis.com`,\n previewWeb: `https://preview-${projectAndAlias}.cloud.contensis.com`,\n iisWeb: `https://iis-live-${projectAndAlias}.cloud.contensis.com`,\n iisPreviewWeb: `https://iis-preview-${projectAndAlias}.cloud.contensis.com`,\n };\n};\n\nexport const Logging = async (language = 'en-GB') => {\n const defaultMessages = enGB;\n // const { LogMessages: defaultMessages } = await import(\n // `../localisation/en-GB.js`\n // );\n let localisedMessages = defaultMessages;\n\n if (language === 'en-GB') {\n // Using a variable import e.g. `import(`../localisation/${language}.js`);`\n // does not play well with packaged executables\n // So we have to hard code the import for each language individually\n }\n return {\n messages: mergeWith(\n localisedMessages,\n defaultMessages,\n (v, s) => v || s\n ) as typeof defaultMessages,\n Log: Logger,\n };\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAAsB;AACtB,oBAAuB;AACvB,mBAAoC;AAE7B,MAAM,iBAAiB,CAAC,MAAM,OACnC,IAAI,SAAS,MAAM,IAAI,MAAM,GAAG,EAAE,WAAW,IAAI,MAAM;AAElD,MAAM,aAAa,CAAC,MAAM,OAC/B,CAAC,eAAe,GAAG,IAAI,MAAM;AAExB,MAAM,WAAW,CAAC,QAAa;AACpC,MAAI;AACF,WAAO,OAAO,QAAQ,WAAW,MAAM,KAAK,MAAM,GAAG;AAAA,EACvD,SAAS,GAAP;AACA,WAAO;AAAA,EACT;AACF;AAEO,MAAM,SAAS,CAAC,QACrB,OAAO,QAAQ,YAAY,CAAC,CAAC,SAAS,GAAG;AAEpC,MAAM,eAAe,CAAC,QAAa;AACxC,MAAI;AACF,WAAO,OAAO,QAAQ,WAAW,KAAK,UAAU,GAAG,IAAI;AAAA,EACzD,SAAS,GAAP;AACA,WAAO;AAAA,EACT;AACF;AAEO,MAAM,aAAa,CAAC,WACzB,+BAAO,aAAY,UAAa,MAAM;AAEjC,MAAM,SAAS,CAAC,QAAgB;AAErC,QAAM,WACJ;AAEF,SAAO,SAAS,KAAK,GAAG;AAC1B;AAEO,MAAM,MAAM,CAAC,OAAe,YAAoB;AACrD,QAAM,kBACJ,WAAW,QAAQ,YAAY,MAAM,YACjC,GAAG,QAAQ,YAAY,KAAK,UAC5B;AACN,SAAO;AAAA,IACL,KAAK,eAAe;AAAA,IACpB,KAAK,eAAe;AAAA,IACpB,SAAS,gBAAgB;AAAA,IACzB,YAAY,mBAAmB;AAAA,IAC/B,QAAQ,oBAAoB;AAAA,IAC5B,eAAe,uBAAuB;AAAA,EACxC;AACF;AAEO,MAAM,UAAU,OAAO,WAAW,YAAY;AACnD,QAAM,kBAAkB,aAAAA;AAIxB,MAAI,oBAAoB;AAExB,MAAI,aAAa,SAAS;AAAA,EAI1B;AACA,SAAO;AAAA,IACL,cAAU,iBAAAC;AAAA,MACR;AAAA,MACA;AAAA,MACA,CAAC,GAAG,MAAM,KAAK;AAAA,IACjB;AAAA,IACA,KAAK;AAAA,EACP;AACF;",
4
+ "sourcesContent": ["import mergeWith from 'lodash/mergeWith';\nimport { Logger } from './logger';\nimport { LogMessages as enGB } from '../localisation/en-GB.js';\n\nexport const isSharedSecret = (str = '') =>\n str.length > 80 && str.split('-').length === 3 ? str : undefined;\n\nexport const isPassword = (str = '') =>\n !isSharedSecret(str) ? str : undefined;\n\nexport const tryParse = (str: any) => {\n try {\n return typeof str === 'object' ? str : JSON.parse(str);\n } catch (e) {\n return false;\n }\n};\n\nexport const isJson = (str?: string) =>\n typeof str === 'object' || !!tryParse(str);\n\nexport const tryStringify = (obj: any) => {\n try {\n return typeof obj === 'object' ? JSON.stringify(obj) : obj;\n } catch (e) {\n return obj;\n }\n};\n\nexport const isSysError = (error: any): error is Error =>\n error?.message !== undefined && error.stack;\n\nexport const isUuid = (str: string) => {\n // Regular expression to check if string is a valid UUID\n const regexExp =\n /^[0-9a-fA-F]{8}\\b-[0-9a-fA-F]{4}\\b-[0-9a-fA-F]{4}\\b-[0-9a-fA-F]{4}\\b-[0-9a-fA-F]{12}$/gi;\n\n return regexExp.test(str);\n};\n\nexport const url = (alias: string, project: string) => {\n const projectAndAlias =\n project && project.toLowerCase() !== 'website'\n ? `${project.toLowerCase()}-${alias}`\n : alias;\n return {\n api: `https://api-${alias}.cloud.contensis.com`,\n cms: `https://cms-${alias}.cloud.contensis.com`,\n liveWeb: `https://live-${projectAndAlias}.cloud.contensis.com`,\n previewWeb: `https://preview-${projectAndAlias}.cloud.contensis.com`,\n iisWeb: `https://iis-live-${projectAndAlias}.cloud.contensis.com`,\n iisPreviewWeb: `https://iis-preview-${projectAndAlias}.cloud.contensis.com`,\n };\n};\n\nexport const Logging = async (language = 'en-GB') => {\n const defaultMessages = enGB;\n // const { LogMessages: defaultMessages } = await import(\n // `../localisation/en-GB.js`\n // );\n const localisedMessages = defaultMessages;\n\n if (language === 'en-GB') {\n // Using a variable import e.g. `import(`../localisation/${language}.js`);`\n // does not play well with packaged executables\n // So we have to hard code the import for each language individually\n }\n return {\n messages: mergeWith(\n localisedMessages,\n defaultMessages,\n (v, s) => v || s\n ) as typeof defaultMessages,\n Log: Logger,\n };\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAAsB;AACtB,oBAAuB;AACvB,mBAAoC;AAE7B,MAAM,iBAAiB,CAAC,MAAM,OACnC,IAAI,SAAS,MAAM,IAAI,MAAM,GAAG,EAAE,WAAW,IAAI,MAAM;AAElD,MAAM,aAAa,CAAC,MAAM,OAC/B,CAAC,eAAe,GAAG,IAAI,MAAM;AAExB,MAAM,WAAW,CAAC,QAAa;AACpC,MAAI;AACF,WAAO,OAAO,QAAQ,WAAW,MAAM,KAAK,MAAM,GAAG;AAAA,EACvD,SAAS,GAAG;AACV,WAAO;AAAA,EACT;AACF;AAEO,MAAM,SAAS,CAAC,QACrB,OAAO,QAAQ,YAAY,CAAC,CAAC,SAAS,GAAG;AAEpC,MAAM,eAAe,CAAC,QAAa;AACxC,MAAI;AACF,WAAO,OAAO,QAAQ,WAAW,KAAK,UAAU,GAAG,IAAI;AAAA,EACzD,SAAS,GAAG;AACV,WAAO;AAAA,EACT;AACF;AAEO,MAAM,aAAa,CAAC,WACzB,+BAAO,aAAY,UAAa,MAAM;AAEjC,MAAM,SAAS,CAAC,QAAgB;AAErC,QAAM,WACJ;AAEF,SAAO,SAAS,KAAK,GAAG;AAC1B;AAEO,MAAM,MAAM,CAAC,OAAe,YAAoB;AACrD,QAAM,kBACJ,WAAW,QAAQ,YAAY,MAAM,YACjC,GAAG,QAAQ,YAAY,CAAC,IAAI,KAAK,KACjC;AACN,SAAO;AAAA,IACL,KAAK,eAAe,KAAK;AAAA,IACzB,KAAK,eAAe,KAAK;AAAA,IACzB,SAAS,gBAAgB,eAAe;AAAA,IACxC,YAAY,mBAAmB,eAAe;AAAA,IAC9C,QAAQ,oBAAoB,eAAe;AAAA,IAC3C,eAAe,uBAAuB,eAAe;AAAA,EACvD;AACF;AAEO,MAAM,UAAU,OAAO,WAAW,YAAY;AACnD,QAAM,kBAAkB,aAAAA;AAIxB,QAAM,oBAAoB;AAE1B,MAAI,aAAa,SAAS;AAAA,EAI1B;AACA,SAAO;AAAA,IACL,cAAU,iBAAAC;AAAA,MACR;AAAA,MACA;AAAA,MACA,CAAC,GAAG,MAAM,KAAK;AAAA,IACjB;AAAA,IACA,KAAK;AAAA,EACP;AACF;",
6
6
  "names": ["enGB", "mergeWith"]
7
7
  }
@@ -18,6 +18,10 @@ var __copyProps = (to, from, except, desc) => {
18
18
  return to;
19
19
  };
20
20
  var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
21
25
  isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
22
26
  mod
23
27
  ));
@@ -36,12 +40,10 @@ const jsonFormatter = (obj, fields) => JSON.stringify(limitFields(obj, fields),
36
40
  const flattenObject = (obj) => (0, import_flat.flatten)((0, import_deep_cleaner.default)(obj, ["workflow"]), { maxDepth: 99 });
37
41
  const unflattenObject = (obj) => (0, import_flat.unflatten)(obj, { overwrite: true });
38
42
  const limitFields = (obj, fields) => {
39
- if (!fields)
40
- return obj;
43
+ if (!fields) return obj;
41
44
  if (obj && Array.isArray(obj)) {
42
45
  const arr = [];
43
- for (const child of obj)
44
- arr.push(limitFields(child, fields));
46
+ for (const child of obj) arr.push(limitFields(child, fields));
45
47
  return arr;
46
48
  }
47
49
  if (obj && typeof obj === "object") {
@@ -2,6 +2,6 @@
2
2
  "version": 3,
3
3
  "sources": ["../../src/util/json.formatter.ts"],
4
4
  "sourcesContent": ["import { flatten, unflatten } from 'flat';\nimport cleaner from 'deep-cleaner';\n\n// Format a JSON object for a nice output\nexport const jsonFormatter = <T>(obj: T, fields?: string[]) =>\n JSON.stringify(limitFields(obj, fields), null, 2);\n\n// Flatten a JSON object such as an entry so there are no\n// nested object and the keys are presented like \"sys.version.versionNo\": \"1.0\"\nexport const flattenObject = (obj: any) =>\n flatten(cleaner(obj, ['workflow']), { maxDepth: 99 });\n\n// Unflatten a JSON object such as an entry so the arrays and\n// nested objects are reconstructed - the opposite of flattenObject\nexport const unflattenObject = (obj: any) =>\n unflatten(obj, { overwrite: true });\n\n// Will limit and sort an object's keys by an array of supplied fields\nexport const limitFields = (obj: any, fields?: string[]): any => {\n if (!fields) return obj;\n if (obj && Array.isArray(obj)) {\n const arr = [];\n for (const child of obj) arr.push(limitFields(child, fields));\n return arr;\n }\n\n if (obj && typeof obj === 'object') {\n const flattenedObj = flatten(obj) as any;\n const sortedObj = {} as any;\n for (const field of fields) {\n sortedObj[field] = flattenedObj[field];\n }\n\n return unflatten(sortedObj);\n }\n\n return obj;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAmC;AACnC,0BAAoB;AAGb,MAAM,gBAAgB,CAAI,KAAQ,WACvC,KAAK,UAAU,YAAY,KAAK,MAAM,GAAG,MAAM,CAAC;AAI3C,MAAM,gBAAgB,CAAC,YAC5B,yBAAQ,oBAAAA,SAAQ,KAAK,CAAC,UAAU,CAAC,GAAG,EAAE,UAAU,GAAG,CAAC;AAI/C,MAAM,kBAAkB,CAAC,YAC9B,uBAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAG7B,MAAM,cAAc,CAAC,KAAU,WAA2B;AAC/D,MAAI,CAAC;AAAQ,WAAO;AACpB,MAAI,OAAO,MAAM,QAAQ,GAAG,GAAG;AAC7B,UAAM,MAAM,CAAC;AACb,eAAW,SAAS;AAAK,UAAI,KAAK,YAAY,OAAO,MAAM,CAAC;AAC5D,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,OAAO,QAAQ,UAAU;AAClC,UAAM,mBAAe,qBAAQ,GAAG;AAChC,UAAM,YAAY,CAAC;AACnB,eAAW,SAAS,QAAQ;AAC1B,gBAAU,SAAS,aAAa;AAAA,IAClC;AAEA,eAAO,uBAAU,SAAS;AAAA,EAC5B;AAEA,SAAO;AACT;",
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAmC;AACnC,0BAAoB;AAGb,MAAM,gBAAgB,CAAI,KAAQ,WACvC,KAAK,UAAU,YAAY,KAAK,MAAM,GAAG,MAAM,CAAC;AAI3C,MAAM,gBAAgB,CAAC,YAC5B,yBAAQ,oBAAAA,SAAQ,KAAK,CAAC,UAAU,CAAC,GAAG,EAAE,UAAU,GAAG,CAAC;AAI/C,MAAM,kBAAkB,CAAC,YAC9B,uBAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAG7B,MAAM,cAAc,CAAC,KAAU,WAA2B;AAC/D,MAAI,CAAC,OAAQ,QAAO;AACpB,MAAI,OAAO,MAAM,QAAQ,GAAG,GAAG;AAC7B,UAAM,MAAM,CAAC;AACb,eAAW,SAAS,IAAK,KAAI,KAAK,YAAY,OAAO,MAAM,CAAC;AAC5D,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,OAAO,QAAQ,UAAU;AAClC,UAAM,mBAAe,qBAAQ,GAAG;AAChC,UAAM,YAAY,CAAC;AACnB,eAAW,SAAS,QAAQ;AAC1B,gBAAU,KAAK,IAAI,aAAa,KAAK;AAAA,IACvC;AAEA,eAAO,uBAAU,SAAS;AAAA,EAC5B;AAEA,SAAO;AACT;",
6
6
  "names": ["cleaner"]
7
7
  }
@@ -18,6 +18,10 @@ var __copyProps = (to, from, except, desc) => {
18
18
  return to;
19
19
  };
20
20
  var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
21
25
  isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
22
26
  mod
23
27
  ));
@@ -38,7 +42,7 @@ var import__ = require(".");
38
42
  class Logger {
39
43
  static isUserTerminal = !!process.stdout.columns;
40
44
  static getPrefix = () => {
41
- return Logger.isUserTerminal ? Logger.infoText(`[cli]`) : `[${(0, import_dateformat.default)(new Date(), "dd/mm HH:MM:ss")}]`;
45
+ return Logger.isUserTerminal ? Logger.infoText(`[cli]`) : `[${(0, import_dateformat.default)(/* @__PURE__ */ new Date(), "dd/mm HH:MM:ss")}]`;
42
46
  };
43
47
  static errorText = import_chalk.default.bold.red;
44
48
  static warningText = import_chalk.default.keyword("orange");
@@ -62,63 +66,49 @@ ${Logger.infoText(
62
66
  (0, import__.isSysError)(err) ? err.toString() : JSON.stringify(err, null, 2)
63
67
  )}` : ""}`
64
68
  )}${newline}`;
65
- if (progress.active)
66
- progress.current.interrupt(message);
67
- else
68
- console.log(message);
69
+ if (progress.active) progress.current.interrupt(message);
70
+ else console.log(message);
69
71
  };
70
72
  static warning = (content) => {
71
73
  const message = `${Logger.getPrefix()} ${Logger.warningText(
72
74
  `${Logger.isUserTerminal ? "\u26A0\uFE0F " : "[WARN]"} ${content}`
73
75
  )}
74
76
  `;
75
- if (progress.active)
76
- progress.current.interrupt(message);
77
- else
78
- console.log(message);
77
+ if (progress.active) progress.current.interrupt(message);
78
+ else console.log(message);
79
79
  };
80
80
  static success = (content) => {
81
81
  const message = `${Logger.getPrefix()} ${Logger.successText(
82
82
  `${Logger.isUserTerminal ? "\u2705" : "[OK]"} ${content}`
83
83
  )}`;
84
- if (progress.active)
85
- progress.current.interrupt(message);
86
- else
87
- console.log(message);
84
+ if (progress.active) progress.current.interrupt(message);
85
+ else console.log(message);
88
86
  };
89
87
  static info = (content) => {
90
88
  const message = `${Logger.getPrefix()} ${Logger.isUserTerminal ? import_chalk.default.bgCyan(" \u2139 ") : "[INFO]"} ${Logger.infoText(content)}`;
91
- if (progress.active)
92
- progress.current.interrupt(message);
93
- else
94
- console.log(message);
89
+ if (progress.active) progress.current.interrupt(message);
90
+ else console.log(message);
95
91
  };
96
92
  static help = (content) => {
97
93
  const message = `${Logger.getPrefix()} ${import_chalk.default.blue(
98
94
  `${Logger.isUserTerminal ? "\u23E9" : "[HELP]"} ${content}`
99
95
  )}
100
96
  `;
101
- if (progress.active)
102
- progress.current.interrupt(message);
103
- else
104
- console.log(message);
97
+ if (progress.active) progress.current.interrupt(message);
98
+ else console.log(message);
105
99
  };
106
100
  static standard = (content) => {
107
101
  const message = `${Logger.getPrefix()} ${Logger.isUserTerminal ? "\u25FB\uFE0F" : "[STD]"}
108
102
  ${Logger.standardText(content)}`;
109
- if (progress.active)
110
- progress.current.interrupt(message);
111
- else
112
- console.log(message);
103
+ if (progress.active) progress.current.interrupt(message);
104
+ else console.log(message);
113
105
  progress.current.interrupt(message);
114
106
  };
115
107
  static debug = (content) => {
116
108
  if (["true", "1"].includes(process.env.debug || "")) {
117
109
  const message = `${Logger.getPrefix()} ${Logger.isUserTerminal ? import_chalk.default.bgGrey(" \u2699 ") : "[DEBUG]"} ${Logger.infoText(content)}`;
118
- if (progress.active)
119
- progress.current.interrupt(message);
120
- else
121
- console.log(message);
110
+ if (progress.active) progress.current.interrupt(message);
111
+ else console.log(message);
122
112
  }
123
113
  };
124
114
  static json = (content, depth = 9) => console.dir((0, import_deep_cleaner.default)(content), { colors: true, depth });
@@ -166,32 +156,32 @@ ${Logger.standardText(content)}`;
166
156
  if (Array.isArray(item) && depth > 3)
167
157
  if (item.length)
168
158
  Logger.raw(import_chalk.default.grey(`${indent} [${item.join(", ")}]`));
169
- else
159
+ else Logger.objectRecurse(item, depth + 1, `${indent} `);
160
+ else {
161
+ if (Array.isArray(item))
162
+ Logger.raw(
163
+ `${indent}${import_chalk.default.grey(`[`)}${item.join(", ")}${import_chalk.default.grey(
164
+ `]`
165
+ )}`
166
+ );
167
+ else if (typeof item === "object" && item)
170
168
  Logger.objectRecurse(item, depth + 1, `${indent} `);
171
- else
172
- Array.isArray(item) ? Logger.raw(
173
- `${indent}${import_chalk.default.grey(`[`)}${item.join(", ")}${import_chalk.default.grey(
174
- `]`
175
- )}`
176
- ) : typeof item === "object" && item ? Logger.objectRecurse(item, depth + 1, `${indent} `) : Logger.raw(`${indent}${item}`);
177
- } else
178
- Logger.raw(`${indent}${item}`);
169
+ else Logger.raw(`${indent}${item}`);
170
+ }
171
+ } else Logger.raw(`${indent}${item}`);
179
172
  }
180
173
  } else {
181
174
  let pos = 0;
182
175
  for (const [key, value] of Object.entries(content)) {
183
- if (key === "stack")
184
- continue;
176
+ if (key === "stack") continue;
185
177
  const thisIndent = pos === 0 ? `${indent.substring(0, indent.length - 2)}- ` : indent;
186
178
  if (Array.isArray(value)) {
187
- if (value.length)
188
- Logger.raw(`${thisIndent}${import_chalk.default.bold.grey(key)}:`);
179
+ if (value.length) Logger.raw(`${thisIndent}${import_chalk.default.bold.grey(key)}:`);
189
180
  for (const item of value) {
190
181
  if (item && typeof item === "object") {
191
182
  if (Array.isArray(item) && depth > 3)
192
183
  Logger.raw(import_chalk.default.grey(`${indent} [${item.join(", ")}]`));
193
- else
194
- Logger.objectRecurse(item, depth + 1, `${indent} `);
184
+ else Logger.objectRecurse(item, depth + 1, `${indent} `);
195
185
  } else {
196
186
  Logger.raw(`${indent} ${item}`);
197
187
  }
@@ -207,10 +197,8 @@ ${Logger.standardText(content)}`;
207
197
  }
208
198
  };
209
199
  static raw = (content) => {
210
- if (progress.active)
211
- progress.current.interrupt(content);
212
- else
213
- console.log(content);
200
+ if (progress.active) progress.current.interrupt(content);
201
+ else console.log(content);
214
202
  };
215
203
  static limits = (content, displayLength = 30, consoleWidth = process.stdout.columns, logMethod = console.info) => {
216
204
  if (consoleWidth) {
@@ -247,8 +235,7 @@ const logError = (err = new Error("Undefined error"), msg, level = "error") => {
247
235
  `);
248
236
  return;
249
237
  }
250
- if ("stack" in error)
251
- Logger.raw(` ${Logger.infoText(error.stack)}
238
+ if ("stack" in error) Logger.raw(` ${Logger.infoText(error.stack)}
252
239
  `);
253
240
  if ("data" in error)
254
241
  Logger.raw(` ${Logger.infoText((0, import__.tryStringify)(error.data))}
@@ -288,6 +275,14 @@ const progress = {
288
275
  current: { interrupt: (x) => {
289
276
  } },
290
277
  active: false
278
+ // done: () => new ProgressBar('', 0),
279
+ // colours: { green: '\u001b[42m \u001b[0m', red: '\u001b[41m \u001b[0m' },
280
+ // current: new ProgressBar(`:bar`, {
281
+ // complete: '=',
282
+ // incomplete: ' ',
283
+ // width: 20,
284
+ // total: 100,
285
+ // }),
291
286
  };
292
287
  // Annotate the CommonJS export names for ESM import in node:
293
288
  0 && (module.exports = {