contensis-cli 1.1.2-beta.1 → 1.1.2-beta.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/copy.js +70 -0
- package/dist/commands/copy.js.map +7 -0
- package/dist/commands/create.js +1 -1
- package/dist/commands/create.js.map +2 -2
- package/dist/commands/dev.js +11 -4
- package/dist/commands/dev.js.map +2 -2
- package/dist/commands/get.js +1 -0
- package/dist/commands/get.js.map +2 -2
- package/dist/commands/globalOptions.js +24 -3
- package/dist/commands/globalOptions.js.map +2 -2
- package/dist/commands/import.js +1 -6
- package/dist/commands/import.js.map +2 -2
- package/dist/commands/index.js +7 -3
- package/dist/commands/index.js.map +2 -2
- package/dist/factories/RequestHandlerFactory.js +24 -13
- package/dist/factories/RequestHandlerFactory.js.map +2 -2
- package/dist/localisation/en-GB.js +3 -1
- package/dist/localisation/en-GB.js.map +2 -2
- package/dist/mappers/DevRequests-to-RequestHanderCliArgs.js +128 -88
- package/dist/mappers/DevRequests-to-RequestHanderCliArgs.js.map +3 -3
- package/dist/providers/GitHubCliModuleProvider.js +14 -4
- package/dist/providers/GitHubCliModuleProvider.js.map +2 -2
- package/dist/providers/file-provider.js +3 -0
- package/dist/providers/file-provider.js.map +2 -2
- package/dist/services/ContensisCliService.js +61 -0
- package/dist/services/ContensisCliService.js.map +2 -2
- package/dist/services/ContensisDevService.js +30 -5
- package/dist/services/ContensisDevService.js.map +3 -3
- package/dist/shell.js +1 -0
- package/dist/shell.js.map +2 -2
- package/dist/util/api-ids.js +110 -0
- package/dist/util/api-ids.js.map +7 -0
- package/dist/util/console.printer.js.map +2 -2
- package/dist/version.js +1 -1
- package/dist/version.js.map +1 -1
- package/package.json +2 -3
- package/src/commands/copy.ts +79 -0
- package/src/commands/create.ts +1 -2
- package/src/commands/dev.ts +14 -6
- package/src/commands/get.ts +12 -11
- package/src/commands/globalOptions.ts +25 -2
- package/src/commands/import.ts +4 -8
- package/src/commands/index.ts +7 -3
- package/src/factories/RequestHandlerFactory.ts +38 -17
- package/src/localisation/en-GB.ts +3 -2
- package/src/mappers/DevRequests-to-RequestHanderCliArgs.ts +147 -92
- package/src/providers/GitHubCliModuleProvider.ts +19 -6
- package/src/providers/file-provider.ts +4 -0
- package/src/services/ContensisCliService.ts +82 -0
- package/src/services/ContensisDevService.ts +37 -6
- package/src/shell.ts +2 -1
- package/src/util/api-ids.ts +111 -0
- package/src/util/console.printer.ts +2 -1
- package/src/version.ts +1 -1
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/mappers/DevRequests-to-RequestHanderCliArgs.ts"],
|
|
4
|
-
"sourcesContent": ["import { ContensisMigrationService } from 'migratortron';\nimport ContensisCli from '~/services/ContensisCliService';\n\ntype EndpointJson = {\n id: string;\n path: string;\n};\n\ntype BlockJson = {\n id: string;\n baseUri: string;\n staticPaths: string[];\n endpoints: EndpointJson[];\n versionNo: number;\n branch: string;\n};\n\ntype RendererJson = {\n id: string;\n name: string;\n rules: RendererRuleJson[];\n assignedContentTypes: string[];\n};\n\ntype RendererRuleJson = {\n return?: {\n blockId?: string;\n endpointId?: string | null;\n version?: string;\n };\n};\ninterface ISiteConfigYaml {\n alias: string;\n projectId: string;\n
|
|
5
|
-
"mappings": "
|
|
6
|
-
"names": ["contensis", "err"]
|
|
4
|
+
"sourcesContent": ["import { ContensisMigrationService } from 'migratortron';\nimport PQueue from 'p-queue';\nimport ContensisCli from '~/services/ContensisCliService';\n\ntype EndpointJson = {\n id: string;\n path: string;\n};\n\ntype BlockJson = {\n id: string;\n baseUri: string;\n staticPaths: string[];\n endpoints: EndpointJson[];\n versionNo: number;\n branch: string;\n};\n\ntype RendererJson = {\n id: string;\n name: string;\n rules: RendererRuleJson[];\n assignedContentTypes: string[];\n};\n\ntype RendererRuleJson = {\n return?: {\n blockId?: string;\n endpointId?: string | null;\n version?: string;\n };\n};\ninterface ISiteConfigYaml {\n alias: string;\n projectId: string;\n iisHostname: string;\n podClusterId: string;\n accessToken: string; // needed?\n clientId: string;\n sharedSecret: string;\n blocks: BlockJson[];\n renderers: RendererJson[];\n}\n\nclass RequestHandlerArgs {\n private cli;\n args?: string[];\n siteConfig?: ISiteConfigYaml;\n\n constructor(cli: ContensisCli) {\n this.cli = cli;\n }\n\n Create = async () => {\n this.siteConfig = await this.buildSiteConfig();\n await this.cli.Login(this.cli.env.lastUserId, { silent: true }); // to hydrate the auth service\n this.args = this.getArgs();\n };\n\n buildSiteConfig = async () => {\n const { currentEnv, currentProject, env, log, messages, urls } = this.cli;\n const contensis = await this.cli.ConnectContensis();\n\n const siteConfig: ISiteConfigYaml = {\n alias: currentEnv,\n projectId: currentProject,\n iisHostname: urls?.iisPreviewWeb.split('//')[1] || '',\n podClusterId: 'hq',\n accessToken: '',\n clientId: '',\n sharedSecret: '',\n blocks: [],\n renderers: [],\n };\n\n const getBlocks = async (contensis: ContensisMigrationService) => {\n const [err, blocksRaw] = await contensis.blocks.GetBlocks();\n if (err)\n log.error(messages.blocks.noList(currentEnv, env.currentProject));\n\n // const blocksRaw = await cli.PrintBlocks();\n\n const blocks: BlockJson[] = [];\n const queue = new PQueue({ concurrency: 4 });\n for (const block of blocksRaw || []) {\n queue.add(async () => {\n // Retrieve block version\n const [err, versions] = await contensis.blocks.GetBlockVersions(\n block.id,\n 'default',\n 'latest'\n );\n if (err || versions?.length === 0)\n log.warning(\n messages.blocks.noGet(\n block.id,\n 'default',\n 'latest',\n currentEnv,\n env.currentProject\n )\n );\n if (versions?.[0]) {\n const v = versions[0];\n blocks.push({\n id: v.id,\n baseUri: v.previewUrl,\n staticPaths: v.staticPaths,\n endpoints: v.endpoints,\n versionNo: v.version.versionNo,\n branch: v.source.branch,\n });\n }\n });\n }\n\n await queue.onIdle();\n return blocks;\n };\n\n if (contensis) {\n const [blocks, renderers] = await Promise.all([\n getBlocks(contensis),\n contensis.renderers.GetRenderers(),\n ]);\n\n siteConfig.blocks = blocks;\n siteConfig.renderers = renderers?.[1]\n ?.filter(r => blocks.find(b => b.id === r.id))\n .map(r => ({\n id: r.id,\n name: r.name,\n assignedContentTypes: r.assignedContentTypes,\n rules: r.rules,\n }));\n }\n return siteConfig;\n };\n\n getArgs = (overrideArgs: string[] = []) => {\n const args = overrideArgs\n ? typeof overrideArgs?.[0] === 'string' &&\n overrideArgs[0].includes(' ', 2)\n ? overrideArgs[0].split(' ')\n : overrideArgs\n : []; // args could be [ '-c .\\\\site_config.yaml' ] or [ '-c', '.\\\\site_config.yaml' ]\n\n const { cli, siteConfig } = this;\n if (siteConfig) {\n // Add required args\n if (!args.find(a => a === '--alias'))\n args.push('--alias', cli.currentEnv);\n if (!args.find(a => a === '--project-api-id'))\n args.push('--project-api-id', cli.currentProject);\n if (!args.find(a => a === '--iis-hostname'))\n args.push('--iis-hostname', siteConfig.iisHostname);\n if (!args.find(a => a === '--pod-cluster-id'))\n args.push('--pod-cluster-id', siteConfig.podClusterId);\n if (!args.find(a => a === '--blocks-json'))\n args.push('--blocks-json', JSON.stringify(siteConfig.blocks));\n if (!args.find(a => a === '--renderers-json'))\n args.push('--renderers-json', JSON.stringify(siteConfig.renderers));\n }\n\n const client = cli.auth?.clientDetails;\n if (client) {\n if (!args.find(a => a === '--client-id') && 'clientId' in client)\n args.push('--client-id', client.clientId);\n if (!args.find(a => a === '--client-secret') && 'clientSecret' in client)\n args.push('--client-secret', client.clientSecret);\n if (!args.find(a => a === '--username') && 'username' in client)\n args.push('--username', client.username);\n if (!args.find(a => a === '--password') && 'password' in client)\n args.push('--password', client.password);\n }\n\n return args;\n };\n\n overrideBlock = (blockId: string, overrideUri: string) => {\n if (blockId && blockId !== 'none') {\n const blockIndex = this.siteConfig?.blocks.findIndex(\n b => b.id.toLowerCase() === blockId.toLowerCase()\n );\n if (\n typeof blockIndex === 'number' &&\n !isNaN(blockIndex) &&\n this.siteConfig?.blocks[blockIndex]\n ) {\n this.siteConfig.blocks[blockIndex].baseUri = overrideUri;\n // this.siteConfig.blocks[blockIndex].staticPaths.push(\n // ...['/static/*', '/image-library/*']\n // );\n this.siteConfig.blocks[blockIndex].staticPaths.push('/*.js');\n }\n }\n };\n}\n\nexport default RequestHandlerArgs;\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,qBAAmB;AA2CnB,MAAM,mBAAmB;AAAA,EACf;AAAA,EACR;AAAA,EACA;AAAA,EAEA,YAAY,KAAmB;AAC7B,SAAK,MAAM;AAAA,EACb;AAAA,EAEA,SAAS,YAAY;AACnB,SAAK,aAAa,MAAM,KAAK,gBAAgB;AAC7C,UAAM,KAAK,IAAI,MAAM,KAAK,IAAI,IAAI,YAAY,EAAE,QAAQ,KAAK,CAAC;AAC9D,SAAK,OAAO,KAAK,QAAQ;AAAA,EAC3B;AAAA,EAEA,kBAAkB,YAAY;AA3DhC;AA4DI,UAAM,EAAE,YAAY,gBAAgB,KAAK,KAAK,UAAU,KAAK,IAAI,KAAK;AACtE,UAAM,YAAY,MAAM,KAAK,IAAI,iBAAiB;AAElD,UAAM,aAA8B;AAAA,MAClC,OAAO;AAAA,MACP,WAAW;AAAA,MACX,cAAa,6BAAM,cAAc,MAAM,MAAM,OAAM;AAAA,MACnD,cAAc;AAAA,MACd,aAAa;AAAA,MACb,UAAU;AAAA,MACV,cAAc;AAAA,MACd,QAAQ,CAAC;AAAA,MACT,WAAW,CAAC;AAAA,IACd;AAEA,UAAM,YAAY,OAAOA,eAAyC;AAChE,YAAM,CAAC,KAAK,SAAS,IAAI,MAAMA,WAAU,OAAO,UAAU;AAC1D,UAAI;AACF,YAAI,MAAM,SAAS,OAAO,OAAO,YAAY,IAAI,cAAc,CAAC;AAIlE,YAAM,SAAsB,CAAC;AAC7B,YAAM,QAAQ,IAAI,eAAAC,QAAO,EAAE,aAAa,EAAE,CAAC;AAC3C,iBAAW,SAAS,aAAa,CAAC,GAAG;AACnC,cAAM,IAAI,YAAY;AAEpB,gBAAM,CAACC,MAAK,QAAQ,IAAI,MAAMF,WAAU,OAAO;AAAA,YAC7C,MAAM;AAAA,YACN;AAAA,YACA;AAAA,UACF;AACA,cAAIE,SAAO,qCAAU,YAAW;AAC9B,gBAAI;AAAA,cACF,SAAS,OAAO;AAAA,gBACd,MAAM;AAAA,gBACN;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA,IAAI;AAAA,cACN;AAAA,YACF;AACF,cAAI,qCAAW,IAAI;AACjB,kBAAM,IAAI,SAAS;AACnB,mBAAO,KAAK;AAAA,cACV,IAAI,EAAE;AAAA,cACN,SAAS,EAAE;AAAA,cACX,aAAa,EAAE;AAAA,cACf,WAAW,EAAE;AAAA,cACb,WAAW,EAAE,QAAQ;AAAA,cACrB,QAAQ,EAAE,OAAO;AAAA,YACnB,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAEA,YAAM,MAAM,OAAO;AACnB,aAAO;AAAA,IACT;AAEA,QAAI,WAAW;AACb,YAAM,CAAC,QAAQ,SAAS,IAAI,MAAM,QAAQ,IAAI;AAAA,QAC5C,UAAU,SAAS;AAAA,QACnB,UAAU,UAAU,aAAa;AAAA,MACnC,CAAC;AAED,iBAAW,SAAS;AACpB,iBAAW,aAAY,4CAAY,OAAZ,mBACnB,OAAO,OAAK,OAAO,KAAK,OAAK,EAAE,OAAO,EAAE,EAAE,GAC3C,IAAI,QAAM;AAAA,QACT,IAAI,EAAE;AAAA,QACN,MAAM,EAAE;AAAA,QACR,sBAAsB,EAAE;AAAA,QACxB,OAAO,EAAE;AAAA,MACX;AAAA,IACJ;AACA,WAAO;AAAA,EACT;AAAA,EAEA,UAAU,CAAC,eAAyB,CAAC,MAAM;AA3I7C;AA4II,UAAM,OAAO,eACT,QAAO,6CAAe,QAAO,YAC7B,aAAa,GAAG,SAAS,KAAK,CAAC,IAC7B,aAAa,GAAG,MAAM,GAAG,IACzB,eACF,CAAC;AAEL,UAAM,EAAE,KAAK,WAAW,IAAI;AAC5B,QAAI,YAAY;AAEd,UAAI,CAAC,KAAK,KAAK,OAAK,MAAM,SAAS;AACjC,aAAK,KAAK,WAAW,IAAI,UAAU;AACrC,UAAI,CAAC,KAAK,KAAK,OAAK,MAAM,kBAAkB;AAC1C,aAAK,KAAK,oBAAoB,IAAI,cAAc;AAClD,UAAI,CAAC,KAAK,KAAK,OAAK,MAAM,gBAAgB;AACxC,aAAK,KAAK,kBAAkB,WAAW,WAAW;AACpD,UAAI,CAAC,KAAK,KAAK,OAAK,MAAM,kBAAkB;AAC1C,aAAK,KAAK,oBAAoB,WAAW,YAAY;AACvD,UAAI,CAAC,KAAK,KAAK,OAAK,MAAM,eAAe;AACvC,aAAK,KAAK,iBAAiB,KAAK,UAAU,WAAW,MAAM,CAAC;AAC9D,UAAI,CAAC,KAAK,KAAK,OAAK,MAAM,kBAAkB;AAC1C,aAAK,KAAK,oBAAoB,KAAK,UAAU,WAAW,SAAS,CAAC;AAAA,IACtE;AAEA,UAAM,UAAS,SAAI,SAAJ,mBAAU;AACzB,QAAI,QAAQ;AACV,UAAI,CAAC,KAAK,KAAK,OAAK,MAAM,aAAa,KAAK,cAAc;AACxD,aAAK,KAAK,eAAe,OAAO,QAAQ;AAC1C,UAAI,CAAC,KAAK,KAAK,OAAK,MAAM,iBAAiB,KAAK,kBAAkB;AAChE,aAAK,KAAK,mBAAmB,OAAO,YAAY;AAClD,UAAI,CAAC,KAAK,KAAK,OAAK,MAAM,YAAY,KAAK,cAAc;AACvD,aAAK,KAAK,cAAc,OAAO,QAAQ;AACzC,UAAI,CAAC,KAAK,KAAK,OAAK,MAAM,YAAY,KAAK,cAAc;AACvD,aAAK,KAAK,cAAc,OAAO,QAAQ;AAAA,IAC3C;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,gBAAgB,CAAC,SAAiB,gBAAwB;AAnL5D;AAoLI,QAAI,WAAW,YAAY,QAAQ;AACjC,YAAM,cAAa,UAAK,eAAL,mBAAiB,OAAO;AAAA,QACzC,OAAK,EAAE,GAAG,YAAY,MAAM,QAAQ,YAAY;AAAA;AAElD,UACE,OAAO,eAAe,YACtB,CAAC,MAAM,UAAU,OACjB,UAAK,eAAL,mBAAiB,OAAO,cACxB;AACA,aAAK,WAAW,OAAO,YAAY,UAAU;AAI7C,aAAK,WAAW,OAAO,YAAY,YAAY,KAAK,OAAO;AAAA,MAC7D;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAO,8CAAQ;",
|
|
6
|
+
"names": ["contensis", "PQueue", "err"]
|
|
7
7
|
}
|
|
@@ -47,7 +47,7 @@ class GitHubCliModuleProvider {
|
|
|
47
47
|
this.http = new import_HttpProvider.default();
|
|
48
48
|
this.repo = repo;
|
|
49
49
|
}
|
|
50
|
-
async FindLatestRelease() {
|
|
50
|
+
async FindLatestRelease(version) {
|
|
51
51
|
const { http, latest_release_url, releases_url } = this;
|
|
52
52
|
const responses = await Promise.all([
|
|
53
53
|
http.get(latest_release_url, {
|
|
@@ -61,18 +61,26 @@ class GitHubCliModuleProvider {
|
|
|
61
61
|
throw new Error(`Unable to get releases`, { cause: releasesErr });
|
|
62
62
|
} else if (!releases || releases.length === 0)
|
|
63
63
|
throw new Error(`No releases available`);
|
|
64
|
-
else if (
|
|
64
|
+
else if (version) {
|
|
65
|
+
const release = releases.find(
|
|
66
|
+
(r) => r.tag_name.toLowerCase() === version.toLowerCase()
|
|
67
|
+
);
|
|
68
|
+
if (release)
|
|
69
|
+
return release;
|
|
70
|
+
else
|
|
71
|
+
throw new Error(`No release for ${version} found`);
|
|
72
|
+
} else if (latestErr && !latest) {
|
|
65
73
|
if ((latestResponse == null ? void 0 : latestResponse.status) === 404 && (releases == null ? void 0 : releases.length)) {
|
|
66
74
|
const release = releases.find((r) => r.prerelease) || releases[0];
|
|
67
|
-
if (release)
|
|
75
|
+
if (release)
|
|
68
76
|
return release;
|
|
69
|
-
}
|
|
70
77
|
}
|
|
71
78
|
} else {
|
|
72
79
|
return latest;
|
|
73
80
|
}
|
|
74
81
|
}
|
|
75
82
|
async DownloadRelease(release, {
|
|
83
|
+
cmd,
|
|
76
84
|
path,
|
|
77
85
|
platforms,
|
|
78
86
|
unzip = true
|
|
@@ -94,6 +102,8 @@ class GitHubCliModuleProvider {
|
|
|
94
102
|
zipFile.extractAllTo(path);
|
|
95
103
|
(0, import_file_provider.removeFile)(filePath);
|
|
96
104
|
}
|
|
105
|
+
if (import_os.default.platform() !== "win32")
|
|
106
|
+
(0, import_file_provider.addExecutePermission)((0, import_file_provider.joinPath)(path, cmd));
|
|
97
107
|
} else
|
|
98
108
|
throw new Error(
|
|
99
109
|
`no asset found in release ${release.tag_name} for platform ${import_os.default.platform()}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/providers/GitHubCliModuleProvider.ts"],
|
|
4
|
-
"sourcesContent": ["import os from 'os';\nimport Zip from 'adm-zip';\nimport type { Endpoints } from '@octokit/types';\nimport HttpProvider from '~/providers/HttpProvider';\nimport {\n checkDir,\n joinPath,\n removeDirectory,\n removeFile,\n} from './file-provider';\nimport { doRetry } from '~/util/fetch';\n\ntype GitHubApiRelease =\n Endpoints['GET /repos/{owner}/{repo}/releases/latest']['response']['data'];\n\nclass GitHubCliModuleProvider {\n http: HttpProvider;\n repo: string;\n baseUrl = 'https://api.github.com/repos';\n\n get releases_url() {\n return `${this.baseUrl}/${this.repo}/releases`;\n }\n get latest_release_url() {\n return `${this.baseUrl}/${this.repo}/releases/latest`;\n }\n\n download?: {\n tag: string;\n name: string;\n url: string;\n browser_url: string;\n };\n\n constructor(repo: string) {\n this.http = new HttpProvider();\n this.repo = repo;\n }\n\n async FindLatestRelease() {\n const { http, latest_release_url, releases_url } = this;\n // return latest tag version is:\n\n const responses = await Promise.all([\n http.get<GitHubApiRelease>(latest_release_url, {\n doRetry: doRetry({ silent: true }),\n }),\n http.get<GitHubApiRelease[]>(releases_url),\n ]);\n\n const [latestErr, latest, latestResponse] = responses[0];\n const [releasesErr, releases] = responses[1];\n\n if (releasesErr) {\n throw new Error(`Unable to get releases`, { cause: releasesErr });\n } else if (!releases || releases.length === 0)\n throw new Error(`No releases available`);\n else if (latestErr && !latest) {\n if (latestResponse?.status === 404 && releases?.length) {\n // No latest release, check releases for prerelease version, fallback to last release\n const release = releases.find(r => r.prerelease) || releases[0];\n\n if (release)
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAe;AACf,qBAAgB;AAEhB,0BAAyB;AACzB,
|
|
4
|
+
"sourcesContent": ["import os from 'os';\nimport Zip from 'adm-zip';\nimport type { Endpoints } from '@octokit/types';\nimport HttpProvider from '~/providers/HttpProvider';\nimport {\n addExecutePermission,\n checkDir,\n joinPath,\n removeDirectory,\n removeFile,\n} from './file-provider';\nimport { doRetry } from '~/util/fetch';\n\ntype GitHubApiRelease =\n Endpoints['GET /repos/{owner}/{repo}/releases/latest']['response']['data'];\n\nclass GitHubCliModuleProvider {\n http: HttpProvider;\n repo: string;\n baseUrl = 'https://api.github.com/repos';\n\n get releases_url() {\n return `${this.baseUrl}/${this.repo}/releases`;\n }\n get latest_release_url() {\n return `${this.baseUrl}/${this.repo}/releases/latest`;\n }\n\n download?: {\n tag: string;\n name: string;\n url: string;\n browser_url: string;\n };\n\n constructor(repo: string) {\n this.http = new HttpProvider();\n this.repo = repo;\n }\n\n async FindLatestRelease(version?: string) {\n const { http, latest_release_url, releases_url } = this;\n // return latest tag version is:\n\n const responses = await Promise.all([\n http.get<GitHubApiRelease>(latest_release_url, {\n doRetry: doRetry({ silent: true }),\n }),\n http.get<GitHubApiRelease[]>(releases_url),\n ]);\n\n const [latestErr, latest, latestResponse] = responses[0];\n const [releasesErr, releases] = responses[1];\n\n if (releasesErr) {\n throw new Error(`Unable to get releases`, { cause: releasesErr });\n } else if (!releases || releases.length === 0)\n throw new Error(`No releases available`);\n else if (version) {\n const release = releases.find(\n r => r.tag_name.toLowerCase() === version.toLowerCase()\n );\n if (release) return release;\n else throw new Error(`No release for ${version} found`);\n } else if (latestErr && !latest) {\n if (latestResponse?.status === 404 && releases?.length) {\n // No latest release, check releases for prerelease version, fallback to last release\n const release = releases.find(r => r.prerelease) || releases[0];\n\n if (release) return release;\n }\n } else {\n return latest;\n }\n }\n\n async DownloadRelease(\n release: GitHubApiRelease,\n {\n cmd,\n path,\n platforms,\n unzip = true,\n }: {\n cmd: string;\n path: string;\n unzip?: boolean;\n platforms: [NodeJS.Platform, string][];\n }\n ) {\n // find os-specific asset\n const platform = platforms.find(p => p[0] === os.platform()) || [\n os.platform(),\n os.platform(),\n ];\n\n const asset = release.assets.find(r =>\n r.name.toLowerCase().includes(platform[1])\n );\n\n // download asset\n if (asset) {\n const filePath = joinPath(path, asset.name);\n removeDirectory(path);\n checkDir(filePath);\n await this.http.downloadFile(asset.browser_download_url, filePath);\n\n if (unzip && asset.name.endsWith('.zip')) {\n // unzip the downloaded file\n const zipFile = new Zip(filePath);\n zipFile.extractAllTo(path);\n\n // delete the downloaded zip file\n removeFile(filePath);\n }\n\n if (os.platform() !== 'win32') addExecutePermission(joinPath(path, cmd));\n } else\n throw new Error(\n `no asset found in release ${\n release.tag_name\n } for platform ${os.platform()}\\n${release.html_url}`\n );\n }\n}\n\nexport default GitHubCliModuleProvider;\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAe;AACf,qBAAgB;AAEhB,0BAAyB;AACzB,2BAMO;AACP,mBAAwB;AAKxB,MAAM,wBAAwB;AAAA,EAC5B;AAAA,EACA;AAAA,EACA,UAAU;AAAA,EAEV,IAAI,eAAe;AACjB,WAAO,GAAG,KAAK,WAAW,KAAK;AAAA,EACjC;AAAA,EACA,IAAI,qBAAqB;AACvB,WAAO,GAAG,KAAK,WAAW,KAAK;AAAA,EACjC;AAAA,EAEA;AAAA,EAOA,YAAY,MAAc;AACxB,SAAK,OAAO,IAAI,oBAAAA,QAAa;AAC7B,SAAK,OAAO;AAAA,EACd;AAAA,EAEA,MAAM,kBAAkB,SAAkB;AACxC,UAAM,EAAE,MAAM,oBAAoB,aAAa,IAAI;AAGnD,UAAM,YAAY,MAAM,QAAQ,IAAI;AAAA,MAClC,KAAK,IAAsB,oBAAoB;AAAA,QAC7C,aAAS,sBAAQ,EAAE,QAAQ,KAAK,CAAC;AAAA,MACnC,CAAC;AAAA,MACD,KAAK,IAAwB,YAAY;AAAA,IAC3C,CAAC;AAED,UAAM,CAAC,WAAW,QAAQ,cAAc,IAAI,UAAU;AACtD,UAAM,CAAC,aAAa,QAAQ,IAAI,UAAU;AAE1C,QAAI,aAAa;AACf,YAAM,IAAI,MAAM,0BAA0B,EAAE,OAAO,YAAY,CAAC;AAAA,IAClE,WAAW,CAAC,YAAY,SAAS,WAAW;AAC1C,YAAM,IAAI,MAAM,uBAAuB;AAAA,aAChC,SAAS;AAChB,YAAM,UAAU,SAAS;AAAA,QACvB,OAAK,EAAE,SAAS,YAAY,MAAM,QAAQ,YAAY;AAAA,MACxD;AACA,UAAI;AAAS,eAAO;AAAA;AACf,cAAM,IAAI,MAAM,kBAAkB,eAAe;AAAA,IACxD,WAAW,aAAa,CAAC,QAAQ;AAC/B,WAAI,iDAAgB,YAAW,QAAO,qCAAU,SAAQ;AAEtD,cAAM,UAAU,SAAS,KAAK,OAAK,EAAE,UAAU,KAAK,SAAS;AAE7D,YAAI;AAAS,iBAAO;AAAA,MACtB;AAAA,IACF,OAAO;AACL,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,gBACJ,SACA;AAAA,IACE;AAAA,IACA;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,EACV,GAMA;AAEA,UAAM,WAAW,UAAU,KAAK,OAAK,EAAE,OAAO,UAAAC,QAAG,SAAS,CAAC,KAAK;AAAA,MAC9D,UAAAA,QAAG,SAAS;AAAA,MACZ,UAAAA,QAAG,SAAS;AAAA,IACd;AAEA,UAAM,QAAQ,QAAQ,OAAO;AAAA,MAAK,OAChC,EAAE,KAAK,YAAY,EAAE,SAAS,SAAS,EAAE;AAAA,IAC3C;AAGA,QAAI,OAAO;AACT,YAAM,eAAW,+BAAS,MAAM,MAAM,IAAI;AAC1C,gDAAgB,IAAI;AACpB,yCAAS,QAAQ;AACjB,YAAM,KAAK,KAAK,aAAa,MAAM,sBAAsB,QAAQ;AAEjE,UAAI,SAAS,MAAM,KAAK,SAAS,MAAM,GAAG;AAExC,cAAM,UAAU,IAAI,eAAAC,QAAI,QAAQ;AAChC,gBAAQ,aAAa,IAAI;AAGzB,6CAAW,QAAQ;AAAA,MACrB;AAEA,UAAI,UAAAD,QAAG,SAAS,MAAM;AAAS,2DAAqB,+BAAS,MAAM,GAAG,CAAC;AAAA,IACzE;AACE,YAAM,IAAI;AAAA,QACR,6BACE,QAAQ,yBACO,UAAAA,QAAG,SAAS;AAAA,EAAM,QAAQ;AAAA,MAC7C;AAAA,EACJ;AACF;AAEA,IAAO,kCAAQ;",
|
|
6
6
|
"names": ["HttpProvider", "os", "Zip"]
|
|
7
7
|
}
|
|
@@ -24,6 +24,7 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
24
24
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
25
25
|
var file_provider_exports = {};
|
|
26
26
|
__export(file_provider_exports, {
|
|
27
|
+
addExecutePermission: () => addExecutePermission,
|
|
27
28
|
appPath: () => appPath,
|
|
28
29
|
appRootDir: () => appRootDir,
|
|
29
30
|
checkDir: () => checkDir,
|
|
@@ -113,6 +114,7 @@ const checkDir = (filePath) => {
|
|
|
113
114
|
const appPath = (filePath) => import_path.default.isAbsolute(filePath) ? filePath : import_path.default.join(appRootDir, filePath);
|
|
114
115
|
const cwdPath = (filePath) => import_path.default.isAbsolute(filePath) ? filePath : import_path.default.join(process.cwd(), filePath);
|
|
115
116
|
const joinPath = import_path.default.join;
|
|
117
|
+
const addExecutePermission = (filePath) => import_fs.default.chmodSync(filePath, import_fs.default.constants.S_IRWXU);
|
|
116
118
|
const detectFileType = (fromFile) => {
|
|
117
119
|
const fileData = readFile(fromFile);
|
|
118
120
|
if (!fileData)
|
|
@@ -153,6 +155,7 @@ const readFileAsJSON = async (fromFile) => {
|
|
|
153
155
|
};
|
|
154
156
|
// Annotate the CommonJS export names for ESM import in node:
|
|
155
157
|
0 && (module.exports = {
|
|
158
|
+
addExecutePermission,
|
|
156
159
|
appPath,
|
|
157
160
|
appRootDir,
|
|
158
161
|
checkDir,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/providers/file-provider.ts"],
|
|
4
|
-
"sourcesContent": ["import fs from 'fs';\nimport { homedir } from 'os';\nimport path from 'path';\nimport { tryParse } from '~/util';\nimport { csvToJson, detectCsv } from '~/util/csv.formatter';\nimport { unflattenObject } from '~/util/json.formatter';\nimport { Logger } from '~/util/logger';\nimport { xmlToJson } from '~/util/xml.formatter';\n\nconst userHomeDir = homedir();\n\nexport const appRootDir =\n process.env.CONTAINER_CONTEXT === 'true'\n ? process.cwd()\n : path.join(userHomeDir, '.contensis/');\n\nexport const readFile = (filePath: string) => {\n const directoryPath = appPath(filePath);\n if (fs.existsSync(directoryPath)) {\n const file = fs.readFileSync(directoryPath, 'utf8');\n return file;\n } else {\n return undefined;\n }\n};\n\nexport const readFiles = (directory: string, createDirectory = true) => {\n const directoryPath = appPath(directory);\n if (fs.existsSync(directoryPath)) {\n const files = fs.readdirSync(directoryPath);\n return files;\n } else if (createDirectory) {\n fs.mkdirSync(directoryPath, { recursive: true });\n return [];\n } else {\n throw new Error(`ENOENT: Directory does not exist ${directoryPath}`);\n // return undefined;\n }\n};\n\nexport const writeFile = (filePath: string, content: string) => {\n const directoryPath = appPath(filePath);\n fs.writeFileSync(directoryPath, content, { encoding: 'utf-8' });\n};\n\nexport const removeFile = (filePath: string) => {\n const directoryPath = appPath(filePath);\n if (fs.existsSync(directoryPath)) {\n fs.rmSync(directoryPath);\n }\n};\n\nexport const removeDirectory = (filePath: string) => {\n const directoryPath = appPath(filePath);\n if (fs.existsSync(directoryPath)) {\n fs.rmSync(directoryPath, { force: true, recursive: true });\n }\n};\n\nexport const moveFile = (file: string, fromPath: string, toPath: string) => {\n const from = path.join(appRootDir, `${fromPath}${file}`);\n const to = path.join(appRootDir, `${toPath}${file}`);\n if (fs.existsSync(from)) {\n checkDir(toPath);\n // if (!fs.existsSync(toPath)) fs.mkdirSync(toPath, { recursive: true });\n\n fs.rename(from, to, err => {\n if (err)\n console.error(\n `Could not rename file \"${file}\" from: ${fromPath} to: ${toPath}`,\n err\n );\n console.info(`Renamed file \"${file}\" from: ${fromPath} to: ${toPath}`);\n });\n } else {\n console.error(\n `Could not rename file \"${file}\" from: ${fromPath} to: ${toPath}\\nFile does not exist!`\n );\n }\n};\n\nexport const checkDir = (filePath: string) => {\n const directoryPath = path.dirname(appPath(filePath));\n if (!fs.existsSync(directoryPath))\n fs.mkdirSync(directoryPath, { recursive: true });\n};\n\nexport const appPath = (filePath: string) =>\n path.isAbsolute(filePath) ? filePath : path.join(appRootDir, filePath);\n\nexport const cwdPath = (filePath: string) =>\n path.isAbsolute(filePath) ? filePath : path.join(process.cwd(), filePath);\n\nexport const joinPath = path.join;\n\ntype DetectedFileType =\n | { type: 'json'; contents: any }\n | { type: 'xml' | 'csv'; contents: string };\n\nconst detectFileType = (fromFile: string): DetectedFileType | undefined => {\n const fileData = readFile(fromFile);\n if (!fileData) throw new Error(`Unable to read file at ${fromFile}`);\n try {\n // if XML\n if (fileData.startsWith('<')) return { contents: fileData, type: 'xml' };\n\n // if JSON\n const jsonData = tryParse(fileData);\n if (jsonData) return { contents: jsonData, type: 'json' };\n\n // if CSV\n const csv = detectCsv(fileData);\n if (csv) return { contents: fileData, type: 'csv' };\n } catch (ex) {\n Logger.error(`Problem detecting file type ${fromFile}`, ex);\n }\n};\n\nexport const readFileAsJSON = async <T = any>(\n fromFile: string\n): Promise<T | undefined> => {\n const detectedFile = detectFileType(cwdPath(fromFile));\n if (!detectedFile) return undefined;\n try {\n switch (detectedFile.type) {\n case 'csv': {\n const flatJson = await csvToJson(detectedFile.contents);\n const unflattenedJson = flatJson.map(record => unflattenObject(record));\n return unflattenedJson as T;\n }\n case 'xml':\n return (await xmlToJson(detectedFile.contents)) as T;\n\n case 'json':\n default:\n return detectedFile.contents;\n }\n } catch (ex) {\n Logger.error(`Problem converting file from ${detectedFile.type}`, ex);\n }\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAe;AACf,gBAAwB;AACxB,kBAAiB;AACjB,kBAAyB;AACzB,iBAAqC;AACrC,kBAAgC;AAChC,oBAAuB;AACvB,iBAA0B;AAE1B,MAAM,kBAAc,mBAAQ;AAErB,MAAM,aACX,QAAQ,IAAI,sBAAsB,SAC9B,QAAQ,IAAI,IACZ,YAAAA,QAAK,KAAK,aAAa,aAAa;AAEnC,MAAM,WAAW,CAAC,aAAqB;AAC5C,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,MAAI,UAAAC,QAAG,WAAW,aAAa,GAAG;AAChC,UAAM,OAAO,UAAAA,QAAG,aAAa,eAAe,MAAM;AAClD,WAAO;AAAA,EACT,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEO,MAAM,YAAY,CAAC,WAAmB,kBAAkB,SAAS;AACtE,QAAM,gBAAgB,QAAQ,SAAS;AACvC,MAAI,UAAAA,QAAG,WAAW,aAAa,GAAG;AAChC,UAAM,QAAQ,UAAAA,QAAG,YAAY,aAAa;AAC1C,WAAO;AAAA,EACT,WAAW,iBAAiB;AAC1B,cAAAA,QAAG,UAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AAC/C,WAAO,CAAC;AAAA,EACV,OAAO;AACL,UAAM,IAAI,MAAM,oCAAoC,eAAe;AAAA,EAErE;AACF;AAEO,MAAM,YAAY,CAAC,UAAkB,YAAoB;AAC9D,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,YAAAA,QAAG,cAAc,eAAe,SAAS,EAAE,UAAU,QAAQ,CAAC;AAChE;AAEO,MAAM,aAAa,CAAC,aAAqB;AAC9C,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,MAAI,UAAAA,QAAG,WAAW,aAAa,GAAG;AAChC,cAAAA,QAAG,OAAO,aAAa;AAAA,EACzB;AACF;AAEO,MAAM,kBAAkB,CAAC,aAAqB;AACnD,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,MAAI,UAAAA,QAAG,WAAW,aAAa,GAAG;AAChC,cAAAA,QAAG,OAAO,eAAe,EAAE,OAAO,MAAM,WAAW,KAAK,CAAC;AAAA,EAC3D;AACF;AAEO,MAAM,WAAW,CAAC,MAAc,UAAkB,WAAmB;AAC1E,QAAM,OAAO,YAAAD,QAAK,KAAK,YAAY,GAAG,WAAW,MAAM;AACvD,QAAM,KAAK,YAAAA,QAAK,KAAK,YAAY,GAAG,SAAS,MAAM;AACnD,MAAI,UAAAC,QAAG,WAAW,IAAI,GAAG;AACvB,aAAS,MAAM;AAGf,cAAAA,QAAG,OAAO,MAAM,IAAI,SAAO;AACzB,UAAI;AACF,gBAAQ;AAAA,UACN,0BAA0B,eAAe,gBAAgB;AAAA,UACzD;AAAA,QACF;AACF,cAAQ,KAAK,iBAAiB,eAAe,gBAAgB,QAAQ;AAAA,IACvE,CAAC;AAAA,EACH,OAAO;AACL,YAAQ;AAAA,MACN,0BAA0B,eAAe,gBAAgB;AAAA;AAAA,IAC3D;AAAA,EACF;AACF;AAEO,MAAM,WAAW,CAAC,aAAqB;AAC5C,QAAM,gBAAgB,YAAAD,QAAK,QAAQ,QAAQ,QAAQ,CAAC;AACpD,MAAI,CAAC,UAAAC,QAAG,WAAW,aAAa;AAC9B,cAAAA,QAAG,UAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AACnD;AAEO,MAAM,UAAU,CAAC,aACtB,YAAAD,QAAK,WAAW,QAAQ,IAAI,WAAW,YAAAA,QAAK,KAAK,YAAY,QAAQ;AAEhE,MAAM,UAAU,CAAC,aACtB,YAAAA,QAAK,WAAW,QAAQ,IAAI,WAAW,YAAAA,QAAK,KAAK,QAAQ,IAAI,GAAG,QAAQ;AAEnE,MAAM,WAAW,YAAAA,QAAK;
|
|
4
|
+
"sourcesContent": ["import fs from 'fs';\nimport { homedir } from 'os';\nimport path from 'path';\nimport { tryParse } from '~/util';\nimport { csvToJson, detectCsv } from '~/util/csv.formatter';\nimport { unflattenObject } from '~/util/json.formatter';\nimport { Logger } from '~/util/logger';\nimport { xmlToJson } from '~/util/xml.formatter';\n\nconst userHomeDir = homedir();\n\nexport const appRootDir =\n process.env.CONTAINER_CONTEXT === 'true'\n ? process.cwd()\n : path.join(userHomeDir, '.contensis/');\n\nexport const readFile = (filePath: string) => {\n const directoryPath = appPath(filePath);\n if (fs.existsSync(directoryPath)) {\n const file = fs.readFileSync(directoryPath, 'utf8');\n return file;\n } else {\n return undefined;\n }\n};\n\nexport const readFiles = (directory: string, createDirectory = true) => {\n const directoryPath = appPath(directory);\n if (fs.existsSync(directoryPath)) {\n const files = fs.readdirSync(directoryPath);\n return files;\n } else if (createDirectory) {\n fs.mkdirSync(directoryPath, { recursive: true });\n return [];\n } else {\n throw new Error(`ENOENT: Directory does not exist ${directoryPath}`);\n // return undefined;\n }\n};\n\nexport const writeFile = (filePath: string, content: string) => {\n const directoryPath = appPath(filePath);\n fs.writeFileSync(directoryPath, content, { encoding: 'utf-8' });\n};\n\nexport const removeFile = (filePath: string) => {\n const directoryPath = appPath(filePath);\n if (fs.existsSync(directoryPath)) {\n fs.rmSync(directoryPath);\n }\n};\n\nexport const removeDirectory = (filePath: string) => {\n const directoryPath = appPath(filePath);\n if (fs.existsSync(directoryPath)) {\n fs.rmSync(directoryPath, { force: true, recursive: true });\n }\n};\n\nexport const moveFile = (file: string, fromPath: string, toPath: string) => {\n const from = path.join(appRootDir, `${fromPath}${file}`);\n const to = path.join(appRootDir, `${toPath}${file}`);\n if (fs.existsSync(from)) {\n checkDir(toPath);\n // if (!fs.existsSync(toPath)) fs.mkdirSync(toPath, { recursive: true });\n\n fs.rename(from, to, err => {\n if (err)\n console.error(\n `Could not rename file \"${file}\" from: ${fromPath} to: ${toPath}`,\n err\n );\n console.info(`Renamed file \"${file}\" from: ${fromPath} to: ${toPath}`);\n });\n } else {\n console.error(\n `Could not rename file \"${file}\" from: ${fromPath} to: ${toPath}\\nFile does not exist!`\n );\n }\n};\n\nexport const checkDir = (filePath: string) => {\n const directoryPath = path.dirname(appPath(filePath));\n if (!fs.existsSync(directoryPath))\n fs.mkdirSync(directoryPath, { recursive: true });\n};\n\nexport const appPath = (filePath: string) =>\n path.isAbsolute(filePath) ? filePath : path.join(appRootDir, filePath);\n\nexport const cwdPath = (filePath: string) =>\n path.isAbsolute(filePath) ? filePath : path.join(process.cwd(), filePath);\n\nexport const joinPath = path.join;\n\nexport const addExecutePermission = (filePath: string) =>\n // Fails in windows with `TypeError [ERR_INVALID_ARG_TYPE]: The \"mode\" argument must be of type number. Received undefined`\n fs.chmodSync(filePath, fs.constants.S_IRWXU);\n\ntype DetectedFileType =\n | { type: 'json'; contents: any }\n | { type: 'xml' | 'csv'; contents: string };\n\nconst detectFileType = (fromFile: string): DetectedFileType | undefined => {\n const fileData = readFile(fromFile);\n if (!fileData) throw new Error(`Unable to read file at ${fromFile}`);\n try {\n // if XML\n if (fileData.startsWith('<')) return { contents: fileData, type: 'xml' };\n\n // if JSON\n const jsonData = tryParse(fileData);\n if (jsonData) return { contents: jsonData, type: 'json' };\n\n // if CSV\n const csv = detectCsv(fileData);\n if (csv) return { contents: fileData, type: 'csv' };\n } catch (ex) {\n Logger.error(`Problem detecting file type ${fromFile}`, ex);\n }\n};\n\nexport const readFileAsJSON = async <T = any>(\n fromFile: string\n): Promise<T | undefined> => {\n const detectedFile = detectFileType(cwdPath(fromFile));\n if (!detectedFile) return undefined;\n try {\n switch (detectedFile.type) {\n case 'csv': {\n const flatJson = await csvToJson(detectedFile.contents);\n const unflattenedJson = flatJson.map(record => unflattenObject(record));\n return unflattenedJson as T;\n }\n case 'xml':\n return (await xmlToJson(detectedFile.contents)) as T;\n\n case 'json':\n default:\n return detectedFile.contents;\n }\n } catch (ex) {\n Logger.error(`Problem converting file from ${detectedFile.type}`, ex);\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAe;AACf,gBAAwB;AACxB,kBAAiB;AACjB,kBAAyB;AACzB,iBAAqC;AACrC,kBAAgC;AAChC,oBAAuB;AACvB,iBAA0B;AAE1B,MAAM,kBAAc,mBAAQ;AAErB,MAAM,aACX,QAAQ,IAAI,sBAAsB,SAC9B,QAAQ,IAAI,IACZ,YAAAA,QAAK,KAAK,aAAa,aAAa;AAEnC,MAAM,WAAW,CAAC,aAAqB;AAC5C,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,MAAI,UAAAC,QAAG,WAAW,aAAa,GAAG;AAChC,UAAM,OAAO,UAAAA,QAAG,aAAa,eAAe,MAAM;AAClD,WAAO;AAAA,EACT,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEO,MAAM,YAAY,CAAC,WAAmB,kBAAkB,SAAS;AACtE,QAAM,gBAAgB,QAAQ,SAAS;AACvC,MAAI,UAAAA,QAAG,WAAW,aAAa,GAAG;AAChC,UAAM,QAAQ,UAAAA,QAAG,YAAY,aAAa;AAC1C,WAAO;AAAA,EACT,WAAW,iBAAiB;AAC1B,cAAAA,QAAG,UAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AAC/C,WAAO,CAAC;AAAA,EACV,OAAO;AACL,UAAM,IAAI,MAAM,oCAAoC,eAAe;AAAA,EAErE;AACF;AAEO,MAAM,YAAY,CAAC,UAAkB,YAAoB;AAC9D,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,YAAAA,QAAG,cAAc,eAAe,SAAS,EAAE,UAAU,QAAQ,CAAC;AAChE;AAEO,MAAM,aAAa,CAAC,aAAqB;AAC9C,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,MAAI,UAAAA,QAAG,WAAW,aAAa,GAAG;AAChC,cAAAA,QAAG,OAAO,aAAa;AAAA,EACzB;AACF;AAEO,MAAM,kBAAkB,CAAC,aAAqB;AACnD,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,MAAI,UAAAA,QAAG,WAAW,aAAa,GAAG;AAChC,cAAAA,QAAG,OAAO,eAAe,EAAE,OAAO,MAAM,WAAW,KAAK,CAAC;AAAA,EAC3D;AACF;AAEO,MAAM,WAAW,CAAC,MAAc,UAAkB,WAAmB;AAC1E,QAAM,OAAO,YAAAD,QAAK,KAAK,YAAY,GAAG,WAAW,MAAM;AACvD,QAAM,KAAK,YAAAA,QAAK,KAAK,YAAY,GAAG,SAAS,MAAM;AACnD,MAAI,UAAAC,QAAG,WAAW,IAAI,GAAG;AACvB,aAAS,MAAM;AAGf,cAAAA,QAAG,OAAO,MAAM,IAAI,SAAO;AACzB,UAAI;AACF,gBAAQ;AAAA,UACN,0BAA0B,eAAe,gBAAgB;AAAA,UACzD;AAAA,QACF;AACF,cAAQ,KAAK,iBAAiB,eAAe,gBAAgB,QAAQ;AAAA,IACvE,CAAC;AAAA,EACH,OAAO;AACL,YAAQ;AAAA,MACN,0BAA0B,eAAe,gBAAgB;AAAA;AAAA,IAC3D;AAAA,EACF;AACF;AAEO,MAAM,WAAW,CAAC,aAAqB;AAC5C,QAAM,gBAAgB,YAAAD,QAAK,QAAQ,QAAQ,QAAQ,CAAC;AACpD,MAAI,CAAC,UAAAC,QAAG,WAAW,aAAa;AAC9B,cAAAA,QAAG,UAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AACnD;AAEO,MAAM,UAAU,CAAC,aACtB,YAAAD,QAAK,WAAW,QAAQ,IAAI,WAAW,YAAAA,QAAK,KAAK,YAAY,QAAQ;AAEhE,MAAM,UAAU,CAAC,aACtB,YAAAA,QAAK,WAAW,QAAQ,IAAI,WAAW,YAAAA,QAAK,KAAK,QAAQ,IAAI,GAAG,QAAQ;AAEnE,MAAM,WAAW,YAAAA,QAAK;AAEtB,MAAM,uBAAuB,CAAC,aAEnC,UAAAC,QAAG,UAAU,UAAU,UAAAA,QAAG,UAAU,OAAO;AAM7C,MAAM,iBAAiB,CAAC,aAAmD;AACzE,QAAM,WAAW,SAAS,QAAQ;AAClC,MAAI,CAAC;AAAU,UAAM,IAAI,MAAM,0BAA0B,UAAU;AACnE,MAAI;AAEF,QAAI,SAAS,WAAW,GAAG;AAAG,aAAO,EAAE,UAAU,UAAU,MAAM,MAAM;AAGvE,UAAM,eAAW,sBAAS,QAAQ;AAClC,QAAI;AAAU,aAAO,EAAE,UAAU,UAAU,MAAM,OAAO;AAGxD,UAAM,UAAM,sBAAU,QAAQ;AAC9B,QAAI;AAAK,aAAO,EAAE,UAAU,UAAU,MAAM,MAAM;AAAA,EACpD,SAAS,IAAP;AACA,yBAAO,MAAM,+BAA+B,YAAY,EAAE;AAAA,EAC5D;AACF;AAEO,MAAM,iBAAiB,OAC5B,aAC2B;AAC3B,QAAM,eAAe,eAAe,QAAQ,QAAQ,CAAC;AACrD,MAAI,CAAC;AAAc,WAAO;AAC1B,MAAI;AACF,YAAQ,aAAa;AAAA,WACd,OAAO;AACV,cAAM,WAAW,UAAM,sBAAU,aAAa,QAAQ;AACtD,cAAM,kBAAkB,SAAS,IAAI,gBAAU,6BAAgB,MAAM,CAAC;AACtE,eAAO;AAAA,MACT;AAAA,WACK;AACH,eAAQ,UAAM,sBAAU,aAAa,QAAQ;AAAA,WAE1C;AAAA;AAEH,eAAO,aAAa;AAAA;AAAA,EAE1B,SAAS,IAAP;AACA,yBAAO,MAAM,gCAAgC,aAAa,QAAQ,EAAE;AAAA,EACtE;AACF;",
|
|
6
6
|
"names": ["path", "fs"]
|
|
7
7
|
}
|
|
@@ -41,6 +41,7 @@ var import_file_provider = require("../providers/file-provider");
|
|
|
41
41
|
var import_SessionCacheProvider = __toESM(require("../providers/SessionCacheProvider"));
|
|
42
42
|
var import_CredentialProvider = __toESM(require("../providers/CredentialProvider"));
|
|
43
43
|
var import_util = require("../util");
|
|
44
|
+
var import_api_ids = require("../util/api-ids");
|
|
44
45
|
var import_console = require("../util/console.printer");
|
|
45
46
|
var import_csv = require("../util/csv.formatter");
|
|
46
47
|
var import_json = require("../util/json.formatter");
|
|
@@ -100,6 +101,7 @@ class ContensisCli {
|
|
|
100
101
|
}
|
|
101
102
|
}
|
|
102
103
|
constructor(args, outputOpts, contensisOpts = {}) {
|
|
104
|
+
var _a;
|
|
103
105
|
const [exe, script, verb = "", noun = "", ...restArgs] = args;
|
|
104
106
|
this.verb = verb == null ? void 0 : verb.toLowerCase();
|
|
105
107
|
this.noun = noun == null ? void 0 : noun.toLowerCase();
|
|
@@ -107,6 +109,11 @@ class ContensisCli {
|
|
|
107
109
|
const commandText = `${this.verb} ${this.noun} ${restArgs ? restArgs.join(" ") : ""}`.trim();
|
|
108
110
|
this.session = new import_SessionCacheProvider.default();
|
|
109
111
|
this.contensisOpts = contensisOpts;
|
|
112
|
+
if (Array.isArray((_a = this.contensisOpts.query) == null ? void 0 : _a.fields)) {
|
|
113
|
+
this.contensisOpts.query.fields = (0, import_api_ids.sanitiseIds)(
|
|
114
|
+
this.contensisOpts.query.fields
|
|
115
|
+
);
|
|
116
|
+
}
|
|
110
117
|
this.format = outputOpts == null ? void 0 : outputOpts.format;
|
|
111
118
|
this.output = (outputOpts == null ? void 0 : outputOpts.output) && import_path.default.join(process.cwd(), outputOpts.output);
|
|
112
119
|
const currentEnvironment = (outputOpts == null ? void 0 : outputOpts.alias) || this.currentEnv;
|
|
@@ -1410,6 +1417,60 @@ Components:`));
|
|
|
1410
1417
|
log.help(messages.connect.tip());
|
|
1411
1418
|
}
|
|
1412
1419
|
};
|
|
1420
|
+
CopyEntryField = async ({
|
|
1421
|
+
commit,
|
|
1422
|
+
fromFile,
|
|
1423
|
+
logOutput
|
|
1424
|
+
}) => {
|
|
1425
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
|
1426
|
+
const { currentEnv, currentProject, log, messages } = this;
|
|
1427
|
+
const contensis = await this.ConnectContensisImport({
|
|
1428
|
+
commit,
|
|
1429
|
+
fromFile,
|
|
1430
|
+
importDataType: "entries"
|
|
1431
|
+
});
|
|
1432
|
+
if (contensis) {
|
|
1433
|
+
log.line();
|
|
1434
|
+
if (contensis.isPreview) {
|
|
1435
|
+
console.log(log.successText(` -- IMPORT PREVIEW -- `));
|
|
1436
|
+
} else {
|
|
1437
|
+
console.log(log.warningText(` *** COMMITTING IMPORT *** `));
|
|
1438
|
+
}
|
|
1439
|
+
const [err, result] = await (0, import_await_to_js.default)(
|
|
1440
|
+
contensis.content.copy.MigrateFieldContent()
|
|
1441
|
+
);
|
|
1442
|
+
if (err)
|
|
1443
|
+
(0, import_logger.logError)(err);
|
|
1444
|
+
if (result)
|
|
1445
|
+
await this.HandleFormattingAndOutput(result, () => {
|
|
1446
|
+
(0, import_console.printEntriesMigrateResult)(this, result, {
|
|
1447
|
+
showAll: logOutput === "all",
|
|
1448
|
+
showDiff: logOutput === "all" || logOutput === "changes",
|
|
1449
|
+
showChanged: logOutput === "changes"
|
|
1450
|
+
});
|
|
1451
|
+
});
|
|
1452
|
+
if (result && !err && !((_a = result.errors) == null ? void 0 : _a.length) && (!commit && result.entriesToMigrate[currentProject].totalCount || commit && (((_b = result.migrateResult) == null ? void 0 : _b.created) || ((_c = result.migrateResult) == null ? void 0 : _c.updated)))) {
|
|
1453
|
+
log.success(
|
|
1454
|
+
messages.entries.imported(
|
|
1455
|
+
currentEnv,
|
|
1456
|
+
commit,
|
|
1457
|
+
commit ? (((_d = result.migrateResult) == null ? void 0 : _d.created) || 0) + (((_e = result.migrateResult) == null ? void 0 : _e.updated) || 0) : result.entriesToMigrate[currentProject].totalCount
|
|
1458
|
+
)
|
|
1459
|
+
);
|
|
1460
|
+
if (!commit) {
|
|
1461
|
+
log.raw(``);
|
|
1462
|
+
log.help(messages.entries.commitTip());
|
|
1463
|
+
}
|
|
1464
|
+
} else {
|
|
1465
|
+
log.error(messages.entries.failedImport(currentEnv), err);
|
|
1466
|
+
if (!((_g = (_f = result == null ? void 0 : result.entriesToMigrate) == null ? void 0 : _f[currentProject]) == null ? void 0 : _g.totalCount))
|
|
1467
|
+
log.help(messages.entries.notFound(currentEnv));
|
|
1468
|
+
}
|
|
1469
|
+
} else {
|
|
1470
|
+
log.warning(messages.models.noList(currentProject));
|
|
1471
|
+
log.help(messages.connect.tip());
|
|
1472
|
+
}
|
|
1473
|
+
};
|
|
1413
1474
|
GetNodes = async (rootPath, depth = 0) => {
|
|
1414
1475
|
const { currentProject, log, messages } = this;
|
|
1415
1476
|
const contensis = await this.ConnectContensis();
|