contensis-cli 1.1.1-beta.0 → 1.1.1-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +1 -1
- package/dist/commands/import.js +1 -0
- package/dist/commands/import.js.map +2 -2
- package/dist/providers/file-provider.js +52 -17
- package/dist/providers/file-provider.js.map +2 -2
- package/dist/services/ContensisCliService.js +5 -5
- package/dist/services/ContensisCliService.js.map +2 -2
- package/dist/shell.js.map +1 -1
- package/dist/util/csv.formatter.js +52 -5
- package/dist/util/csv.formatter.js.map +2 -2
- package/dist/util/json.formatter.js +5 -2
- package/dist/util/json.formatter.js.map +2 -2
- package/dist/util/xml.formatter.js +10 -3
- package/dist/util/xml.formatter.js.map +2 -2
- package/dist/version.js +1 -1
- package/dist/version.js.map +1 -1
- package/package.json +1 -2
- package/src/commands/import.ts +1 -0
- package/src/providers/file-provider.ts +60 -12
- package/src/services/ContensisCliService.ts +13 -9
- package/src/shell.ts +1 -1
- package/src/util/csv.formatter.ts +53 -3
- package/src/util/json.formatter.ts +4 -0
- package/src/util/xml.formatter.ts +7 -1
- package/src/version.ts +1 -1
package/dist/shell.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/shell.ts"],
|
|
4
|
-
"sourcesContent": ["import figlet from 'figlet';\nimport inquirer from 'inquirer';\nimport inquirerPrompt from 'inquirer-command-prompt';\nimport commands from './commands';\nimport { LogMessages } from './localisation/en-GB';\nimport CredentialProvider from './providers/CredentialProvider';\nimport { appRootDir } from './providers/file-provider';\nimport ContensisCli, { cliCommand } from './services/ContensisCliService';\nimport { Logging } from './util';\nimport { logError, Logger } from './util/logger';\nimport { LIB_VERSION } from './version';\n\nclass ContensisShell {\n private currentEnvironment!: string;\n private emptyInputCounter: number = 0;\n private env!: EnvironmentCache;\n private firstStart = true;\n private userId: string = '';\n private log = Logger;\n private messages = LogMessages;\n\n private refreshEnvironment = () => {\n // Reload any persisted changes from the disk cache\n const {\n cache: { currentEnvironment = '', environments = {} },\n } = new ContensisCli([]);\n // console.log(`refreshing env w/${currentEnvironment}`);\n this.currentEnvironment = currentEnvironment;\n this.env = environments[currentEnvironment];\n\n // Reload logging here to support changing language\n Logging('en-GB').then(({ messages, Log }) => {\n this.log = Log;\n this.messages = messages;\n });\n };\n\n constructor() {\n this.refreshEnvironment();\n inquirerPrompt.setConfig({\n history: {\n save: true,\n folder: appRootDir,\n limit: 100,\n blacklist: ['quit'],\n },\n });\n\n const { log, messages } = this;\n\n figlet.text(\n messages.app.contensis(),\n {\n font: 'Block',\n horizontalLayout: 'default',\n verticalLayout: 'default',\n width: process.stdout.columns,\n whitespaceBreak: true,\n },\n (err, data) => {\n if (err) {\n log.error(messages.app.unknownError());\n console.dir(err);\n return;\n }\n console.log(log.successText(data));\n console.log(log.infoText(messages.app.startup(LIB_VERSION)));\n console.log(log.helpText(messages.app.help()));\n\n this.start().catch(ex => log.error(ex));\n }\n );\n }\n\n restart = async () => {\n this.firstStart = false;\n this.log.line(); // add a line so we can see where the shell has been restarted\n await this.start();\n };\n\n start = async () => {\n this.refreshEnvironment();\n this.userId = '';\n const { currentEnvironment, env, log, messages } = this;\n\n if (env?.lastUserId) {\n const [credsErr, credentials] = await new CredentialProvider(\n {\n userId: env.lastUserId,\n alias: currentEnvironment,\n },\n env.passwordFallback\n ).Init();\n if (credsErr && !credentials.current) {\n log.error(credsErr.message);\n }\n if (credentials.current) {\n if (this.firstStart) {\n const token = await cliCommand(['login', env.lastUserId]).Login(\n env.lastUserId,\n {\n promptPassword: false,\n silent: true,\n }\n );\n if (token) {\n this.userId = env.lastUserId;\n if (!env.currentProject) log.warning(messages.projects.tip());\n }\n this.firstStart = false;\n this.refreshEnvironment();\n } else {\n this.userId = env.lastUserId;\n }\n }\n }\n await this.contensisPrompt();\n };\n\n contensisPrompt = async (): Promise<any> => {\n const { currentEnvironment, env, log, messages, userId } = this;\n\n const availableCommands = [\n {\n filter: (str: string) => {\n return str.replace(/ \\[.*$/, '');\n },\n },\n 'connect',\n 'list envs',\n 'quit',\n ];\n\n if (currentEnvironment)\n availableCommands.push('login', 'list projects', 'set project');\n if (userId)\n availableCommands.push(\n 'create key',\n 'create project',\n 'create role',\n 'diff models',\n 'execute block action release',\n 'execute block action makelive',\n 'execute block action rollback',\n 'execute block action markasbroken',\n 'get assets',\n 'get block',\n 'get block logs',\n 'get contenttype',\n 'get component',\n 'get entries',\n 'get nodes',\n 'get model',\n 'get project',\n 'get proxy',\n 'get renderer',\n 'get role',\n 'get token',\n 'get version',\n 'get webhook',\n 'get workflow',\n 'import contenttypes',\n 'import components',\n 'import entries',\n 'import models',\n 'import nodes',\n 'list blocks',\n 'list contenttypes',\n 'list components',\n 'list keys',\n 'list models',\n 'list proxies',\n 'list renderers',\n 'list roles',\n 'list webhooks',\n 'list workflows',\n 'push block',\n 'remove components',\n 'remove contenttypes',\n 'remove key',\n 'remove entries',\n 'remove nodes',\n 'remove role',\n 'set project name',\n 'set project description',\n 'set role name',\n 'set role description',\n 'set role assignments',\n 'set role enabled',\n 'set role permissions'\n );\n\n const prompt = inquirer.createPromptModule();\n prompt.registerPrompt('command', inquirerPrompt);\n return prompt([\n {\n type: 'command',\n name: 'cmd',\n autoCompletion: availableCommands.sort(),\n autocompletePrompt: log.infoText(messages.app.autocomplete()),\n message: `${userId ? `${userId}@` : ''}${currentEnvironment || ''}>`,\n context: 0,\n validate: (val: string) => {\n if (!val) this.emptyInputCounter++;\n if (this.emptyInputCounter > 1)\n console.log(this.log.infoText(this.messages.app.suggestions()));\n if (val) {\n this.emptyInputCounter = 0;\n return true;\n }\n },\n prefix: `${env?.currentProject || log.infoText('contensis')}`,\n short: true,\n },\n ])\n .then(async (answers: { cmd: string }) => {\n if (answers.cmd === 'quit') {\n this.quit();\n } else {\n try {\n if (answers.cmd) {\n const program = commands();\n await program.parseAsync(\n answers.cmd\n .match(/\"[^\"]+\"|[^\\s]+/g)\n ?.map(e => e.replace(/\"(.+)\"/, '$1')),\n {\n from: 'user',\n }\n );\n }\n } catch (ex: any) {\n const str = ex.toString();\n if (!str.includes('CommanderError'))\n logError(\n ex,\n `Shell ${\n ex instanceof Error\n ? ex.toString()\n : JSON.stringify(ex, null, 2)\n }`\n );\n } finally {\n return this.contensisPrompt();\n }\n }\n })\n .catch((err: Error) => {\n log.error(err.message);\n this.quit();\n });\n };\n\n quit = (error?: Error) => {\n const { log, messages } = this;\n process.removeAllListeners('exit');\n\n if (error) {\n log.error(error.message);\n process.exit(1);\n } else {\n log.success(messages.app.quit());\n process.exitCode = 0;\n process.exit(0);\n }\n };\n}\n\nlet globalShell: ContensisShell;\n\nexport const shell = () => {\n // Return a benign function for shell().restart() when used in cli context\n // as some commands need to restart the shell to show an updated prompt\n // after successful connect / login / set project\n if (typeof process.argv?.[2] !== 'undefined')\n return {\n quit: ContensisCli.quit,\n restart() {},\n } as any;\n if (!globalShell) globalShell = new ContensisShell();\n return globalShell;\n};\n\nprocess.on('uncaughtException', function (err) {\n // Handle the error safely\n console.log(err);\n});\n\nprocess.on('SIGINT', () => {\n Logger.warning('received SIGINT');\n shell().quit();\n // setTimeout(() => {\n // }, 2000);\n});\n\nprocess.on('SIGTERM', () => {\n Logger.warning('received SIGTERM');\n shell().quit();\n});\n\nprocess.stdin.on('data', key => {\n if ((key as any) == '\\u0003') {\n console.log('');\n Logger.info(`[CTRL]+[C] detected, exiting shell...`);\n shell().quit();\n }\n});\n\n// process.env.http_proxy = 'http://127.0.0.1:8888';\n"],
|
|
4
|
+
"sourcesContent": ["import figlet from 'figlet';\nimport inquirer from 'inquirer';\nimport inquirerPrompt from 'inquirer-command-prompt';\nimport commands from './commands';\nimport { LogMessages } from './localisation/en-GB';\nimport CredentialProvider from './providers/CredentialProvider';\nimport { appRootDir } from './providers/file-provider';\nimport ContensisCli, { cliCommand } from './services/ContensisCliService';\nimport { Logging } from './util';\nimport { logError, Logger } from './util/logger';\nimport { LIB_VERSION } from './version';\n\nclass ContensisShell {\n private currentEnvironment!: string;\n private emptyInputCounter: number = 0;\n private env!: EnvironmentCache;\n private firstStart = true;\n private userId: string = '';\n private log = Logger;\n private messages = LogMessages;\n\n private refreshEnvironment = () => {\n // Reload any persisted changes from the disk cache\n const {\n cache: { currentEnvironment = '', environments = {} },\n } = new ContensisCli([]);\n // console.log(`refreshing env w/${currentEnvironment}`);\n this.currentEnvironment = currentEnvironment;\n this.env = environments[currentEnvironment];\n\n // Reload logging here to support changing language\n Logging('en-GB').then(({ messages, Log }) => {\n this.log = Log;\n this.messages = messages;\n });\n };\n\n constructor() {\n this.refreshEnvironment();\n inquirerPrompt.setConfig({\n history: {\n save: true,\n folder: appRootDir,\n limit: 100,\n blacklist: ['quit'],\n },\n });\n\n const { log, messages } = this;\n\n figlet.text(\n messages.app.contensis(),\n {\n font: 'Block',\n horizontalLayout: 'default',\n verticalLayout: 'default',\n width: process.stdout.columns,\n whitespaceBreak: true,\n },\n (err, data) => {\n if (err) {\n log.error(messages.app.unknownError());\n console.dir(err);\n return;\n }\n console.log(log.successText(data));\n console.log(log.infoText(messages.app.startup(LIB_VERSION)));\n console.log(log.helpText(messages.app.help()));\n\n this.start().catch(ex => log.error(ex));\n }\n );\n }\n\n restart = async () => {\n this.firstStart = false;\n this.log.line(); // add a line so we can see where the shell has been restarted\n await this.start();\n };\n\n start = async () => {\n this.refreshEnvironment();\n this.userId = '';\n const { currentEnvironment, env, log, messages } = this;\n\n if (env?.lastUserId) {\n const [credsErr, credentials] = await new CredentialProvider(\n {\n userId: env.lastUserId,\n alias: currentEnvironment,\n },\n env.passwordFallback\n ).Init();\n if (credsErr && !credentials.current) {\n log.error(credsErr.message);\n }\n if (credentials.current) {\n if (this.firstStart) {\n const token = await cliCommand(['login', env.lastUserId]).Login(\n env.lastUserId,\n {\n promptPassword: false,\n silent: true,\n }\n );\n if (token) {\n this.userId = env.lastUserId;\n if (!env.currentProject) log.warning(messages.projects.tip());\n }\n this.firstStart = false;\n this.refreshEnvironment();\n } else {\n this.userId = env.lastUserId;\n }\n }\n }\n await this.contensisPrompt();\n };\n\n contensisPrompt = async (): Promise<any> => {\n const { currentEnvironment, env, log, messages, userId } = this;\n\n const availableCommands = [\n {\n filter: (str: string) => {\n return str.replace(/ \\[.*$/, '');\n },\n },\n 'connect',\n 'list envs',\n 'quit',\n ];\n\n if (currentEnvironment)\n availableCommands.push('login', 'list projects', 'set project');\n if (userId)\n availableCommands.push(\n 'create key',\n 'create project',\n 'create role',\n 'diff models',\n 'execute block action release',\n 'execute block action makelive',\n 'execute block action rollback',\n 'execute block action markasbroken',\n 'get assets',\n 'get block',\n 'get block logs',\n 'get contenttype',\n 'get component',\n 'get entries',\n 'get nodes',\n 'get model',\n 'get project',\n 'get proxy',\n 'get renderer',\n 'get role',\n 'get token',\n 'get version',\n 'get webhook',\n 'get workflow',\n 'import contenttypes',\n 'import components',\n 'import entries',\n 'import models',\n 'import nodes',\n 'list blocks',\n 'list contenttypes',\n 'list components',\n 'list keys',\n 'list models',\n 'list proxies',\n 'list renderers',\n 'list roles',\n 'list webhooks',\n 'list workflows',\n 'push block',\n 'remove components',\n 'remove contenttypes',\n 'remove key',\n 'remove entries',\n 'remove nodes',\n 'remove role',\n 'set project name',\n 'set project description',\n 'set role name',\n 'set role description',\n 'set role assignments',\n 'set role enabled',\n 'set role permissions'\n );\n\n const prompt = inquirer.createPromptModule();\n prompt.registerPrompt('command', inquirerPrompt);\n return prompt([\n {\n type: 'command',\n name: 'cmd',\n autoCompletion: availableCommands.sort(),\n autocompletePrompt: log.infoText(messages.app.autocomplete()),\n message: `${userId ? `${userId}@` : ''}${currentEnvironment || ''}>`,\n context: 0,\n validate: (val: string) => {\n if (!val) this.emptyInputCounter++;\n if (this.emptyInputCounter > 1)\n console.log(this.log.infoText(this.messages.app.suggestions()));\n if (val) {\n this.emptyInputCounter = 0;\n return true;\n }\n },\n prefix: `${env?.currentProject || log.infoText('contensis')}`,\n short: true,\n },\n ])\n .then(async (answers: { cmd: string }) => {\n if (answers.cmd === 'quit') {\n this.quit();\n } else {\n try {\n if (answers.cmd) {\n const program = commands();\n await program.parseAsync(\n answers.cmd\n .match(/\"[^\"]+\"|[^\\s]+/g)\n ?.map(e => e.replace(/\"(.+)\"/, '$1')),\n {\n from: 'user',\n }\n );\n }\n } catch (ex: any) {\n const str = ex.toString();\n if (!str.includes('CommanderError'))\n logError(\n ex,\n `Shell ${\n ex instanceof Error\n ? ex.toString()\n : JSON.stringify(ex, null, 2)\n }`\n );\n } finally {\n return this.contensisPrompt();\n }\n }\n })\n .catch((err: Error) => {\n log.error(err.message);\n this.quit();\n });\n };\n\n quit = (error?: Error) => {\n const { log, messages } = this;\n process.removeAllListeners('exit');\n\n if (error) {\n log.error(error.message);\n process.exit(1);\n } else {\n log.success(messages.app.quit());\n process.exitCode = 0;\n process.exit(0);\n }\n };\n}\n\nlet globalShell: ContensisShell;\n\nexport const shell = () => {\n // Return a benign function for shell().restart() when used in cli context\n // as some commands need to restart the shell to show an updated prompt\n // after successful connect / login / set project\n if (typeof process.argv?.[2] !== 'undefined')\n return {\n quit: ContensisCli.quit,\n restart() {},\n } as any;\n if (!globalShell) globalShell = new ContensisShell();\n return globalShell;\n};\n\nprocess.on('uncaughtException', function (err) {\n // Handle the error safely\n console.log(err);\n});\n\nprocess.on('SIGINT', () => {\n Logger.warning('received SIGINT');\n shell().quit();\n // setTimeout(() => {\n // }, 2000);\n});\n\nprocess.on('SIGTERM', () => {\n Logger.warning('received SIGTERM');\n shell().quit();\n});\n\nprocess.stdin.on('data', key => {\n if ((key as any) == '\\u0003') {\n console.log('');\n Logger.info(`[CTRL]+[C] detected, exiting shell...`);\n shell().quit();\n }\n});\n\n// // process.env.http_proxy = 'http://127.0.0.1:8888';\n"],
|
|
5
5
|
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAmB;AACnB,sBAAqB;AACrB,qCAA2B;AAC3B,sBAAqB;AACrB,mBAA4B;AAC5B,gCAA+B;AAC/B,2BAA2B;AAC3B,iCAAyC;AACzC,kBAAwB;AACxB,oBAAiC;AACjC,qBAA4B;AAE5B,MAAM,eAAe;AAAA,EACX;AAAA,EACA,oBAA4B;AAAA,EAC5B;AAAA,EACA,aAAa;AAAA,EACb,SAAiB;AAAA,EACjB,MAAM;AAAA,EACN,WAAW;AAAA,EAEX,qBAAqB,MAAM;AAEjC,UAAM;AAAA,MACJ,OAAO,EAAE,qBAAqB,IAAI,eAAe,CAAC,EAAE;AAAA,IACtD,IAAI,IAAI,2BAAAA,QAAa,CAAC,CAAC;AAEvB,SAAK,qBAAqB;AAC1B,SAAK,MAAM,aAAa;AAGxB,6BAAQ,OAAO,EAAE,KAAK,CAAC,EAAE,UAAU,IAAI,MAAM;AAC3C,WAAK,MAAM;AACX,WAAK,WAAW;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EAEA,cAAc;AACZ,SAAK,mBAAmB;AACxB,mCAAAC,QAAe,UAAU;AAAA,MACvB,SAAS;AAAA,QACP,MAAM;AAAA,QACN,QAAQ;AAAA,QACR,OAAO;AAAA,QACP,WAAW,CAAC,MAAM;AAAA,MACpB;AAAA,IACF,CAAC;AAED,UAAM,EAAE,KAAK,SAAS,IAAI;AAE1B,kBAAAC,QAAO;AAAA,MACL,SAAS,IAAI,UAAU;AAAA,MACvB;AAAA,QACE,MAAM;AAAA,QACN,kBAAkB;AAAA,QAClB,gBAAgB;AAAA,QAChB,OAAO,QAAQ,OAAO;AAAA,QACtB,iBAAiB;AAAA,MACnB;AAAA,MACA,CAAC,KAAK,SAAS;AACb,YAAI,KAAK;AACP,cAAI,MAAM,SAAS,IAAI,aAAa,CAAC;AACrC,kBAAQ,IAAI,GAAG;AACf;AAAA,QACF;AACA,gBAAQ,IAAI,IAAI,YAAY,IAAI,CAAC;AACjC,gBAAQ,IAAI,IAAI,SAAS,SAAS,IAAI,QAAQ,0BAAW,CAAC,CAAC;AAC3D,gBAAQ,IAAI,IAAI,SAAS,SAAS,IAAI,KAAK,CAAC,CAAC;AAE7C,aAAK,MAAM,EAAE,MAAM,QAAM,IAAI,MAAM,EAAE,CAAC;AAAA,MACxC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,UAAU,YAAY;AACpB,SAAK,aAAa;AAClB,SAAK,IAAI,KAAK;AACd,UAAM,KAAK,MAAM;AAAA,EACnB;AAAA,EAEA,QAAQ,YAAY;AAClB,SAAK,mBAAmB;AACxB,SAAK,SAAS;AACd,UAAM,EAAE,oBAAoB,KAAK,KAAK,SAAS,IAAI;AAEnD,QAAI,2BAAK,YAAY;AACnB,YAAM,CAAC,UAAU,WAAW,IAAI,MAAM,IAAI,0BAAAC;AAAA,QACxC;AAAA,UACE,QAAQ,IAAI;AAAA,UACZ,OAAO;AAAA,QACT;AAAA,QACA,IAAI;AAAA,MACN,EAAE,KAAK;AACP,UAAI,YAAY,CAAC,YAAY,SAAS;AACpC,YAAI,MAAM,SAAS,OAAO;AAAA,MAC5B;AACA,UAAI,YAAY,SAAS;AACvB,YAAI,KAAK,YAAY;AACnB,gBAAM,QAAQ,UAAM,uCAAW,CAAC,SAAS,IAAI,UAAU,CAAC,EAAE;AAAA,YACxD,IAAI;AAAA,YACJ;AAAA,cACE,gBAAgB;AAAA,cAChB,QAAQ;AAAA,YACV;AAAA,UACF;AACA,cAAI,OAAO;AACT,iBAAK,SAAS,IAAI;AAClB,gBAAI,CAAC,IAAI;AAAgB,kBAAI,QAAQ,SAAS,SAAS,IAAI,CAAC;AAAA,UAC9D;AACA,eAAK,aAAa;AAClB,eAAK,mBAAmB;AAAA,QAC1B,OAAO;AACL,eAAK,SAAS,IAAI;AAAA,QACpB;AAAA,MACF;AAAA,IACF;AACA,UAAM,KAAK,gBAAgB;AAAA,EAC7B;AAAA,EAEA,kBAAkB,YAA0B;AAC1C,UAAM,EAAE,oBAAoB,KAAK,KAAK,UAAU,OAAO,IAAI;AAE3D,UAAM,oBAAoB;AAAA,MACxB;AAAA,QACE,QAAQ,CAAC,QAAgB;AACvB,iBAAO,IAAI,QAAQ,UAAU,EAAE;AAAA,QACjC;AAAA,MACF;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,QAAI;AACF,wBAAkB,KAAK,SAAS,iBAAiB,aAAa;AAChE,QAAI;AACF,wBAAkB;AAAA,QAChB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEF,UAAM,SAAS,gBAAAC,QAAS,mBAAmB;AAC3C,WAAO,eAAe,WAAW,+BAAAH,OAAc;AAC/C,WAAO,OAAO;AAAA,MACZ;AAAA,QACE,MAAM;AAAA,QACN,MAAM;AAAA,QACN,gBAAgB,kBAAkB,KAAK;AAAA,QACvC,oBAAoB,IAAI,SAAS,SAAS,IAAI,aAAa,CAAC;AAAA,QAC5D,SAAS,GAAG,SAAS,GAAG,YAAY,KAAK,sBAAsB;AAAA,QAC/D,SAAS;AAAA,QACT,UAAU,CAAC,QAAgB;AACzB,cAAI,CAAC;AAAK,iBAAK;AACf,cAAI,KAAK,oBAAoB;AAC3B,oBAAQ,IAAI,KAAK,IAAI,SAAS,KAAK,SAAS,IAAI,YAAY,CAAC,CAAC;AAChE,cAAI,KAAK;AACP,iBAAK,oBAAoB;AACzB,mBAAO;AAAA,UACT;AAAA,QACF;AAAA,QACA,QAAQ,IAAG,2BAAK,mBAAkB,IAAI,SAAS,WAAW;AAAA,QAC1D,OAAO;AAAA,MACT;AAAA,IACF,CAAC,EACE,KAAK,OAAO,YAA6B;AAvNhD;AAwNQ,UAAI,QAAQ,QAAQ,QAAQ;AAC1B,aAAK,KAAK;AAAA,MACZ,OAAO;AACL,YAAI;AACF,cAAI,QAAQ,KAAK;AACf,kBAAM,cAAU,gBAAAI,SAAS;AACzB,kBAAM,QAAQ;AAAA,eACZ,aAAQ,IACL,MAAM,iBAAiB,MAD1B,mBAEI,IAAI,OAAK,EAAE,QAAQ,UAAU,IAAI;AAAA,cACrC;AAAA,gBACE,MAAM;AAAA,cACR;AAAA,YACF;AAAA,UACF;AAAA,QACF,SAAS,IAAP;AACA,gBAAM,MAAM,GAAG,SAAS;AACxB,cAAI,CAAC,IAAI,SAAS,gBAAgB;AAChC;AAAA,cACE;AAAA,cACA,SACE,cAAc,QACV,GAAG,SAAS,IACZ,KAAK,UAAU,IAAI,MAAM,CAAC;AAAA,YAElC;AAAA,QACJ,UAAE;AACA,iBAAO,KAAK,gBAAgB;AAAA,QAC9B;AAAA,MACF;AAAA,IACF,CAAC,EACA,MAAM,CAAC,QAAe;AACrB,UAAI,MAAM,IAAI,OAAO;AACrB,WAAK,KAAK;AAAA,IACZ,CAAC;AAAA,EACL;AAAA,EAEA,OAAO,CAAC,UAAkB;AACxB,UAAM,EAAE,KAAK,SAAS,IAAI;AAC1B,YAAQ,mBAAmB,MAAM;AAEjC,QAAI,OAAO;AACT,UAAI,MAAM,MAAM,OAAO;AACvB,cAAQ,KAAK,CAAC;AAAA,IAChB,OAAO;AACL,UAAI,QAAQ,SAAS,IAAI,KAAK,CAAC;AAC/B,cAAQ,WAAW;AACnB,cAAQ,KAAK,CAAC;AAAA,IAChB;AAAA,EACF;AACF;AAEA,IAAI;AAEG,MAAM,QAAQ,MAAM;AA9Q3B;AAkRE,MAAI,SAAO,aAAQ,SAAR,mBAAe,QAAO;AAC/B,WAAO;AAAA,MACL,MAAM,2BAAAL,QAAa;AAAA,MACnB,UAAU;AAAA,MAAC;AAAA,IACb;AACF,MAAI,CAAC;AAAa,kBAAc,IAAI,eAAe;AACnD,SAAO;AACT;AAEA,QAAQ,GAAG,qBAAqB,SAAU,KAAK;AAE7C,UAAQ,IAAI,GAAG;AACjB,CAAC;AAED,QAAQ,GAAG,UAAU,MAAM;AACzB,uBAAO,QAAQ,iBAAiB;AAChC,QAAM,EAAE,KAAK;AAGf,CAAC;AAED,QAAQ,GAAG,WAAW,MAAM;AAC1B,uBAAO,QAAQ,kBAAkB;AACjC,QAAM,EAAE,KAAK;AACf,CAAC;AAED,QAAQ,MAAM,GAAG,QAAQ,SAAO;AAC9B,MAAK,OAAe,KAAU;AAC5B,YAAQ,IAAI,EAAE;AACd,yBAAO,KAAK,uCAAuC;AACnD,UAAM,EAAE,KAAK;AAAA,EACf;AACF,CAAC;",
|
|
6
6
|
"names": ["ContensisCli", "inquirerPrompt", "figlet", "CredentialProvider", "inquirer", "commands"]
|
|
7
7
|
}
|
|
@@ -18,10 +18,12 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
18
18
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
19
|
var csv_formatter_exports = {};
|
|
20
20
|
__export(csv_formatter_exports, {
|
|
21
|
-
csvFormatter: () => csvFormatter
|
|
21
|
+
csvFormatter: () => csvFormatter,
|
|
22
|
+
csvToJson: () => csvToJson,
|
|
23
|
+
detectCsv: () => detectCsv
|
|
22
24
|
});
|
|
23
25
|
module.exports = __toCommonJS(csv_formatter_exports);
|
|
24
|
-
var
|
|
26
|
+
var import_sync = require("csv/sync");
|
|
25
27
|
var import_json = require("./json.formatter");
|
|
26
28
|
const csvFormatter = (entries) => {
|
|
27
29
|
const flatEntries = [];
|
|
@@ -31,12 +33,57 @@ const csvFormatter = (entries) => {
|
|
|
31
33
|
}
|
|
32
34
|
else
|
|
33
35
|
flatEntries.push((0, import_json.flattenObject)(entries));
|
|
34
|
-
const
|
|
35
|
-
const csv = json2csvParser.parse(flatEntries);
|
|
36
|
+
const csv = (0, import_sync.stringify)(flatEntries, { header: true });
|
|
36
37
|
return csv;
|
|
37
38
|
};
|
|
39
|
+
const csvToJson = (data) => {
|
|
40
|
+
return (0, import_sync.parse)(data, {
|
|
41
|
+
columns: true,
|
|
42
|
+
skip_empty_lines: true
|
|
43
|
+
});
|
|
44
|
+
};
|
|
45
|
+
const detectCsv = (chunk, opts) => {
|
|
46
|
+
opts = opts || {};
|
|
47
|
+
if (Buffer.isBuffer(chunk))
|
|
48
|
+
chunk = chunk + "";
|
|
49
|
+
const delimiters = opts.delimiters || [",", ";", " ", "|"];
|
|
50
|
+
const newlines = opts.newlines || ["\n", "\r"];
|
|
51
|
+
const lines = chunk.split(/[\n\r]+/g);
|
|
52
|
+
const delimiter = determineMost(lines[0], delimiters);
|
|
53
|
+
const newline = determineMost(chunk, newlines);
|
|
54
|
+
if (!delimiter)
|
|
55
|
+
return null;
|
|
56
|
+
return {
|
|
57
|
+
delimiter,
|
|
58
|
+
newline
|
|
59
|
+
};
|
|
60
|
+
};
|
|
61
|
+
const determineMost = (chunk, items) => {
|
|
62
|
+
const itemCount = {};
|
|
63
|
+
let ignoreString = false;
|
|
64
|
+
let maxValue = 0;
|
|
65
|
+
let maxChar;
|
|
66
|
+
let currValue;
|
|
67
|
+
items.forEach((item) => {
|
|
68
|
+
itemCount[item] = 0;
|
|
69
|
+
});
|
|
70
|
+
for (var i = 0; i < chunk.length; i++) {
|
|
71
|
+
if (chunk[i] === '"')
|
|
72
|
+
ignoreString = !ignoreString;
|
|
73
|
+
else if (!ignoreString && chunk[i] in itemCount) {
|
|
74
|
+
currValue = ++itemCount[chunk[i]];
|
|
75
|
+
if (currValue > maxValue) {
|
|
76
|
+
maxValue = currValue;
|
|
77
|
+
maxChar = chunk[i];
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
return maxChar;
|
|
82
|
+
};
|
|
38
83
|
// Annotate the CommonJS export names for ESM import in node:
|
|
39
84
|
0 && (module.exports = {
|
|
40
|
-
csvFormatter
|
|
85
|
+
csvFormatter,
|
|
86
|
+
csvToJson,
|
|
87
|
+
detectCsv
|
|
41
88
|
});
|
|
42
89
|
//# sourceMappingURL=csv.formatter.js.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/util/csv.formatter.ts"],
|
|
4
|
-
"sourcesContent": ["import {
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,
|
|
4
|
+
"sourcesContent": ["import { parse, stringify } from 'csv/sync';\nimport { flattenObject } from './json.formatter';\n\nexport const csvFormatter = <T>(entries: T | T[]) => {\n // Flatten the passed in object\n const flatEntries = [];\n if (Array.isArray(entries))\n for (const entry of entries) {\n flatEntries.push(flattenObject(entry));\n }\n else flatEntries.push(flattenObject(entries));\n\n // Parse the flattened object to csv\n const csv = stringify(flatEntries, { header: true });\n\n return csv;\n};\n\nexport const csvToJson = <T>(data: string): T[] => {\n return parse(data, {\n columns: true,\n skip_empty_lines: true,\n });\n};\n\nexport const detectCsv = (\n chunk: string,\n opts?: { delimiters?: string[]; newlines?: string[] }\n) => {\n opts = opts || {};\n if (Buffer.isBuffer(chunk)) chunk = chunk + '';\n const delimiters = opts.delimiters || [',', ';', '\\t', '|'];\n const newlines = opts.newlines || ['\\n', '\\r'];\n\n const lines = chunk.split(/[\\n\\r]+/g);\n\n const delimiter = determineMost(lines[0], delimiters);\n const newline = determineMost(chunk, newlines);\n\n if (!delimiter) return null;\n\n return {\n delimiter: delimiter,\n newline: newline,\n };\n};\n\nconst determineMost = (chunk: string, items: string[]) => {\n const itemCount = {} as any;\n let ignoreString = false;\n let maxValue = 0;\n let maxChar;\n let currValue;\n items.forEach(item => {\n itemCount[item] = 0;\n });\n for (var i = 0; i < chunk.length; i++) {\n if (chunk[i] === '\"') ignoreString = !ignoreString;\n else if (!ignoreString && chunk[i] in itemCount) {\n currValue = ++itemCount[chunk[i]];\n if (currValue > maxValue) {\n maxValue = currValue;\n maxChar = chunk[i];\n }\n }\n }\n return maxChar;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiC;AACjC,kBAA8B;AAEvB,MAAM,eAAe,CAAI,YAAqB;AAEnD,QAAM,cAAc,CAAC;AACrB,MAAI,MAAM,QAAQ,OAAO;AACvB,eAAW,SAAS,SAAS;AAC3B,kBAAY,SAAK,2BAAc,KAAK,CAAC;AAAA,IACvC;AAAA;AACG,gBAAY,SAAK,2BAAc,OAAO,CAAC;AAG5C,QAAM,UAAM,uBAAU,aAAa,EAAE,QAAQ,KAAK,CAAC;AAEnD,SAAO;AACT;AAEO,MAAM,YAAY,CAAI,SAAsB;AACjD,aAAO,mBAAM,MAAM;AAAA,IACjB,SAAS;AAAA,IACT,kBAAkB;AAAA,EACpB,CAAC;AACH;AAEO,MAAM,YAAY,CACvB,OACA,SACG;AACH,SAAO,QAAQ,CAAC;AAChB,MAAI,OAAO,SAAS,KAAK;AAAG,YAAQ,QAAQ;AAC5C,QAAM,aAAa,KAAK,cAAc,CAAC,KAAK,KAAK,KAAM,GAAG;AAC1D,QAAM,WAAW,KAAK,YAAY,CAAC,MAAM,IAAI;AAE7C,QAAM,QAAQ,MAAM,MAAM,UAAU;AAEpC,QAAM,YAAY,cAAc,MAAM,IAAI,UAAU;AACpD,QAAM,UAAU,cAAc,OAAO,QAAQ;AAE7C,MAAI,CAAC;AAAW,WAAO;AAEvB,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AAEA,MAAM,gBAAgB,CAAC,OAAe,UAAoB;AACxD,QAAM,YAAY,CAAC;AACnB,MAAI,eAAe;AACnB,MAAI,WAAW;AACf,MAAI;AACJ,MAAI;AACJ,QAAM,QAAQ,UAAQ;AACpB,cAAU,QAAQ;AAAA,EACpB,CAAC;AACD,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,QAAI,MAAM,OAAO;AAAK,qBAAe,CAAC;AAAA,aAC7B,CAAC,gBAAgB,MAAM,MAAM,WAAW;AAC/C,kBAAY,EAAE,UAAU,MAAM;AAC9B,UAAI,YAAY,UAAU;AACxB,mBAAW;AACX,kBAAU,MAAM;AAAA,MAClB;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -26,13 +26,15 @@ var json_formatter_exports = {};
|
|
|
26
26
|
__export(json_formatter_exports, {
|
|
27
27
|
flattenObject: () => flattenObject,
|
|
28
28
|
jsonFormatter: () => jsonFormatter,
|
|
29
|
-
limitFields: () => limitFields
|
|
29
|
+
limitFields: () => limitFields,
|
|
30
|
+
unflattenObject: () => unflattenObject
|
|
30
31
|
});
|
|
31
32
|
module.exports = __toCommonJS(json_formatter_exports);
|
|
32
33
|
var import_flat = require("flat");
|
|
33
34
|
var import_deep_cleaner = __toESM(require("deep-cleaner"));
|
|
34
35
|
const jsonFormatter = (obj, fields) => JSON.stringify(limitFields(obj, fields), null, 2);
|
|
35
36
|
const flattenObject = (obj) => (0, import_flat.flatten)((0, import_deep_cleaner.default)(obj, ["workflow"]));
|
|
37
|
+
const unflattenObject = (obj) => (0, import_flat.unflatten)(obj);
|
|
36
38
|
const limitFields = (obj, fields) => {
|
|
37
39
|
if (!fields)
|
|
38
40
|
return obj;
|
|
@@ -56,6 +58,7 @@ const limitFields = (obj, fields) => {
|
|
|
56
58
|
0 && (module.exports = {
|
|
57
59
|
flattenObject,
|
|
58
60
|
jsonFormatter,
|
|
59
|
-
limitFields
|
|
61
|
+
limitFields,
|
|
62
|
+
unflattenObject
|
|
60
63
|
});
|
|
61
64
|
//# sourceMappingURL=json.formatter.js.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/util/json.formatter.ts"],
|
|
4
|
-
"sourcesContent": ["import { flatten, unflatten } from 'flat';\nimport cleaner from 'deep-cleaner';\n\n// Format a JSON object for a nice output\nexport const jsonFormatter = <T>(obj: T, fields?: string[]) =>\n JSON.stringify(limitFields(obj, fields), null, 2);\n\n// Flatten a JSON object such as an entry so there are no\n// nested object and the keys are presented like \"sys.version.versionNo\": \"1.0\"\nexport const flattenObject = (obj: any) => flatten(cleaner(obj, ['workflow']));\n\n// Will limit and sort an object's keys by an array of supplied fields\nexport const limitFields = (obj: any, fields?: string[]): any => {\n if (!fields) return obj;\n if (obj && Array.isArray(obj)) {\n const arr = [];\n for (const child of obj) arr.push(limitFields(child, fields));\n return arr;\n }\n\n if (obj && typeof obj === 'object') {\n const flattenedObj = flatten(obj) as any;\n const sortedObj = {} as any;\n for (const field of fields) {\n sortedObj[field] = flattenedObj[field];\n }\n\n return unflatten(sortedObj);\n }\n\n return obj;\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAmC;AACnC,0BAAoB;AAGb,MAAM,gBAAgB,CAAI,KAAQ,WACvC,KAAK,UAAU,YAAY,KAAK,MAAM,GAAG,MAAM,CAAC;AAI3C,MAAM,gBAAgB,CAAC,YAAa,yBAAQ,oBAAAA,SAAQ,KAAK,CAAC,UAAU,CAAC,CAAC;
|
|
4
|
+
"sourcesContent": ["import { flatten, unflatten } from 'flat';\nimport cleaner from 'deep-cleaner';\n\n// Format a JSON object for a nice output\nexport const jsonFormatter = <T>(obj: T, fields?: string[]) =>\n JSON.stringify(limitFields(obj, fields), null, 2);\n\n// Flatten a JSON object such as an entry so there are no\n// nested object and the keys are presented like \"sys.version.versionNo\": \"1.0\"\nexport const flattenObject = (obj: any) => flatten(cleaner(obj, ['workflow']));\n\n// Unflatten a JSON object such as an entry so the arrays and\n// nested objects are reconstructed - the opposite of flattenObject\nexport const unflattenObject = (obj: any) => unflatten(obj);\n\n// Will limit and sort an object's keys by an array of supplied fields\nexport const limitFields = (obj: any, fields?: string[]): any => {\n if (!fields) return obj;\n if (obj && Array.isArray(obj)) {\n const arr = [];\n for (const child of obj) arr.push(limitFields(child, fields));\n return arr;\n }\n\n if (obj && typeof obj === 'object') {\n const flattenedObj = flatten(obj) as any;\n const sortedObj = {} as any;\n for (const field of fields) {\n sortedObj[field] = flattenedObj[field];\n }\n\n return unflatten(sortedObj);\n }\n\n return obj;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAmC;AACnC,0BAAoB;AAGb,MAAM,gBAAgB,CAAI,KAAQ,WACvC,KAAK,UAAU,YAAY,KAAK,MAAM,GAAG,MAAM,CAAC;AAI3C,MAAM,gBAAgB,CAAC,YAAa,yBAAQ,oBAAAA,SAAQ,KAAK,CAAC,UAAU,CAAC,CAAC;AAItE,MAAM,kBAAkB,CAAC,YAAa,uBAAU,GAAG;AAGnD,MAAM,cAAc,CAAC,KAAU,WAA2B;AAC/D,MAAI,CAAC;AAAQ,WAAO;AACpB,MAAI,OAAO,MAAM,QAAQ,GAAG,GAAG;AAC7B,UAAM,MAAM,CAAC;AACb,eAAW,SAAS;AAAK,UAAI,KAAK,YAAY,OAAO,MAAM,CAAC;AAC5D,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,OAAO,QAAQ,UAAU;AAClC,UAAM,mBAAe,qBAAQ,GAAG;AAChC,UAAM,YAAY,CAAC;AACnB,eAAW,SAAS,QAAQ;AAC1B,gBAAU,SAAS,aAAa;AAAA,IAClC;AAEA,eAAO,uBAAU,SAAS;AAAA,EAC5B;AAEA,SAAO;AACT;",
|
|
6
6
|
"names": ["cleaner"]
|
|
7
7
|
}
|
|
@@ -24,7 +24,8 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
24
24
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
25
25
|
var xml_formatter_exports = {};
|
|
26
26
|
__export(xml_formatter_exports, {
|
|
27
|
-
xmlFormatter: () => xmlFormatter
|
|
27
|
+
xmlFormatter: () => xmlFormatter,
|
|
28
|
+
xmlToJson: () => xmlToJson
|
|
28
29
|
});
|
|
29
30
|
module.exports = __toCommonJS(xml_formatter_exports);
|
|
30
31
|
var import_xml2js = __toESM(require("xml2js"));
|
|
@@ -40,12 +41,18 @@ const xmlFormatter = (entries) => {
|
|
|
40
41
|
const xml = builder.buildObject({ Entry: cleanedEntries });
|
|
41
42
|
return xml;
|
|
42
43
|
} catch (ex) {
|
|
43
|
-
import_logger.Logger.error(`Problem building XML from
|
|
44
|
+
import_logger.Logger.error(`Problem building XML from json data`, ex);
|
|
44
45
|
return "";
|
|
45
46
|
}
|
|
46
47
|
};
|
|
48
|
+
const xmlToJson = async (data) => {
|
|
49
|
+
var _a;
|
|
50
|
+
const json = await import_xml2js.default.parseStringPromise(data, { explicitArray: false });
|
|
51
|
+
return ((_a = json.Items) == null ? void 0 : _a.Entry) || json;
|
|
52
|
+
};
|
|
47
53
|
// Annotate the CommonJS export names for ESM import in node:
|
|
48
54
|
0 && (module.exports = {
|
|
49
|
-
xmlFormatter
|
|
55
|
+
xmlFormatter,
|
|
56
|
+
xmlToJson
|
|
50
57
|
});
|
|
51
58
|
//# sourceMappingURL=xml.formatter.js.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/util/xml.formatter.ts"],
|
|
4
|
-
"sourcesContent": ["import xml2js from 'xml2js';\nimport cleaner from 'deep-cleaner';\nimport { Logger } from './logger';\n\nexport const xmlFormatter = <T>(entries: T | T[]) => {\n try {\n const cleanedEntries = cleaner(cleaner(entries, ['workflow']));\n\n const builder = new xml2js.Builder({\n cdata: true,\n rootName: 'Items',\n });\n const xml = builder.buildObject({ Entry: cleanedEntries });\n\n return xml;\n } catch (ex) {\n Logger.error(`Problem building XML from
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAmB;AACnB,0BAAoB;AACpB,oBAAuB;AAEhB,MAAM,eAAe,CAAI,YAAqB;AACnD,MAAI;AACF,UAAM,qBAAiB,oBAAAA,aAAQ,oBAAAA,SAAQ,SAAS,CAAC,UAAU,CAAC,CAAC;AAE7D,UAAM,UAAU,IAAI,cAAAC,QAAO,QAAQ;AAAA,MACjC,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,CAAC;AACD,UAAM,MAAM,QAAQ,YAAY,EAAE,OAAO,eAAe,CAAC;AAEzD,WAAO;AAAA,EACT,SAAS,IAAP;AACA,yBAAO,MAAM,
|
|
4
|
+
"sourcesContent": ["import xml2js from 'xml2js';\nimport cleaner from 'deep-cleaner';\nimport { Logger } from './logger';\n\nexport const xmlFormatter = <T>(entries: T | T[]) => {\n try {\n const cleanedEntries = cleaner(cleaner(entries, ['workflow']));\n\n const builder = new xml2js.Builder({\n cdata: true,\n rootName: 'Items',\n });\n const xml = builder.buildObject({ Entry: cleanedEntries });\n\n return xml;\n } catch (ex) {\n Logger.error(`Problem building XML from json data`, ex);\n return '';\n }\n};\n\nexport const xmlToJson = async <T>(data: string) => {\n const json = await xml2js.parseStringPromise(data, { explicitArray: false });\n\n return json.Items?.Entry || json;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAmB;AACnB,0BAAoB;AACpB,oBAAuB;AAEhB,MAAM,eAAe,CAAI,YAAqB;AACnD,MAAI;AACF,UAAM,qBAAiB,oBAAAA,aAAQ,oBAAAA,SAAQ,SAAS,CAAC,UAAU,CAAC,CAAC;AAE7D,UAAM,UAAU,IAAI,cAAAC,QAAO,QAAQ;AAAA,MACjC,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,CAAC;AACD,UAAM,MAAM,QAAQ,YAAY,EAAE,OAAO,eAAe,CAAC;AAEzD,WAAO;AAAA,EACT,SAAS,IAAP;AACA,yBAAO,MAAM,uCAAuC,EAAE;AACtD,WAAO;AAAA,EACT;AACF;AAEO,MAAM,YAAY,OAAU,SAAiB;AArBpD;AAsBE,QAAM,OAAO,MAAM,cAAAA,QAAO,mBAAmB,MAAM,EAAE,eAAe,MAAM,CAAC;AAE3E,WAAO,UAAK,UAAL,mBAAY,UAAS;AAC9B;",
|
|
6
6
|
"names": ["cleaner", "xml2js"]
|
|
7
7
|
}
|
package/dist/version.js
CHANGED
|
@@ -21,7 +21,7 @@ __export(version_exports, {
|
|
|
21
21
|
LIB_VERSION: () => LIB_VERSION
|
|
22
22
|
});
|
|
23
23
|
module.exports = __toCommonJS(version_exports);
|
|
24
|
-
const LIB_VERSION = "1.1.1-beta.
|
|
24
|
+
const LIB_VERSION = "1.1.1-beta.1";
|
|
25
25
|
// Annotate the CommonJS export names for ESM import in node:
|
|
26
26
|
0 && (module.exports = {
|
|
27
27
|
LIB_VERSION
|
package/dist/version.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/version.ts"],
|
|
4
|
-
"sourcesContent": ["export const LIB_VERSION = \"1.1.1-beta.
|
|
4
|
+
"sourcesContent": ["export const LIB_VERSION = \"1.1.1-beta.1\";\n"],
|
|
5
5
|
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAO,MAAM,cAAc;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "contensis-cli",
|
|
3
|
-
"version": "1.1.1-beta.
|
|
3
|
+
"version": "1.1.1-beta.1",
|
|
4
4
|
"description": "A fully featured Contensis command line interface with a shell UI provides simple and intuitive ways to manage or profile your content in any NodeJS terminal.",
|
|
5
5
|
"repository": "https://github.com/contensis/cli",
|
|
6
6
|
"homepage": "https://github.com/contensis/cli/tree/main/packages/contensis-cli#readme",
|
|
@@ -36,7 +36,6 @@
|
|
|
36
36
|
"giturl": "^2.0.0",
|
|
37
37
|
"hosted-git-info": "^6.1.1",
|
|
38
38
|
"inquirer-command-prompt": "^0.1.0",
|
|
39
|
-
"json2csv": "^5.0.7",
|
|
40
39
|
"jsonpath-mapper": "^1.1.0",
|
|
41
40
|
"keytar": "^7.9.0",
|
|
42
41
|
"lodash": "^4.17.21",
|
package/src/commands/import.ts
CHANGED
|
@@ -127,6 +127,7 @@ Example call:
|
|
|
127
127
|
`
|
|
128
128
|
Example call:
|
|
129
129
|
> import entries --source-cms example-dev --source-project-id microsite --zenql "sys.contentTypeId = blog"
|
|
130
|
+
> import entries --from-file myImportData.json --preserve-guids
|
|
130
131
|
`
|
|
131
132
|
)
|
|
132
133
|
.action(async (phrase: string, opts, cmd) => {
|
|
@@ -2,6 +2,10 @@ import fs from 'fs';
|
|
|
2
2
|
import { homedir } from 'os';
|
|
3
3
|
import path from 'path';
|
|
4
4
|
import { tryParse } from '~/util';
|
|
5
|
+
import { csvToJson, detectCsv } from '~/util/csv.formatter';
|
|
6
|
+
import { unflattenObject } from '~/util/json.formatter';
|
|
7
|
+
import { Logger } from '~/util/logger';
|
|
8
|
+
import { xmlToJson } from '~/util/xml.formatter';
|
|
5
9
|
|
|
6
10
|
const userHomeDir = homedir();
|
|
7
11
|
|
|
@@ -10,14 +14,8 @@ export const appRootDir =
|
|
|
10
14
|
? process.cwd()
|
|
11
15
|
: path.join(userHomeDir, '.contensis/');
|
|
12
16
|
|
|
13
|
-
export const readJsonFile = <T>(filePath: string) => {
|
|
14
|
-
const directoryPath = cwdPath(filePath);
|
|
15
|
-
const file = readFile(directoryPath);
|
|
16
|
-
if (file) return tryParse(file) as T | string;
|
|
17
|
-
return undefined;
|
|
18
|
-
};
|
|
19
17
|
export const readFile = (filePath: string) => {
|
|
20
|
-
const directoryPath =
|
|
18
|
+
const directoryPath = appPath(filePath);
|
|
21
19
|
if (fs.existsSync(directoryPath)) {
|
|
22
20
|
const file = fs.readFileSync(directoryPath, 'utf8');
|
|
23
21
|
return file;
|
|
@@ -27,7 +25,7 @@ export const readFile = (filePath: string) => {
|
|
|
27
25
|
};
|
|
28
26
|
|
|
29
27
|
export const readFiles = (directory: string, createDirectory = true) => {
|
|
30
|
-
const directoryPath =
|
|
28
|
+
const directoryPath = appPath(directory);
|
|
31
29
|
if (fs.existsSync(directoryPath)) {
|
|
32
30
|
const files = fs.readdirSync(directoryPath);
|
|
33
31
|
return files;
|
|
@@ -41,12 +39,12 @@ export const readFiles = (directory: string, createDirectory = true) => {
|
|
|
41
39
|
};
|
|
42
40
|
|
|
43
41
|
export const writeFile = (filePath: string, content: string) => {
|
|
44
|
-
const directoryPath =
|
|
42
|
+
const directoryPath = appPath(filePath);
|
|
45
43
|
fs.writeFileSync(directoryPath, content, { encoding: 'utf-8' });
|
|
46
44
|
};
|
|
47
45
|
|
|
48
46
|
export const removeFile = (filePath: string) => {
|
|
49
|
-
const directoryPath =
|
|
47
|
+
const directoryPath = appPath(filePath);
|
|
50
48
|
if (fs.existsSync(directoryPath)) {
|
|
51
49
|
fs.rmSync(directoryPath);
|
|
52
50
|
}
|
|
@@ -75,13 +73,63 @@ export const moveFile = (file: string, fromPath: string, toPath: string) => {
|
|
|
75
73
|
};
|
|
76
74
|
|
|
77
75
|
export const checkDir = (filePath: string) => {
|
|
78
|
-
const directoryPath = path.dirname(
|
|
76
|
+
const directoryPath = path.dirname(appPath(filePath));
|
|
79
77
|
if (!fs.existsSync(directoryPath))
|
|
80
78
|
fs.mkdirSync(directoryPath, { recursive: true });
|
|
81
79
|
};
|
|
82
80
|
|
|
83
|
-
export const
|
|
81
|
+
export const appPath = (filePath: string) =>
|
|
84
82
|
path.isAbsolute(filePath) ? filePath : path.join(appRootDir, filePath);
|
|
85
83
|
|
|
86
84
|
export const cwdPath = (filePath: string) =>
|
|
87
85
|
path.isAbsolute(filePath) ? filePath : path.join(process.cwd(), filePath);
|
|
86
|
+
|
|
87
|
+
type DetectedFileType =
|
|
88
|
+
| { type: 'json'; contents: any }
|
|
89
|
+
| { type: 'xml' | 'csv'; contents: string };
|
|
90
|
+
|
|
91
|
+
const detectFileType = (
|
|
92
|
+
fromFile: string
|
|
93
|
+
): DetectedFileType | undefined => {
|
|
94
|
+
try {
|
|
95
|
+
const fileData = readFile(fromFile);
|
|
96
|
+
if (fileData) {
|
|
97
|
+
// if XML
|
|
98
|
+
if (fileData.startsWith('<')) return { contents: fileData, type: 'xml' };
|
|
99
|
+
|
|
100
|
+
// if JSON
|
|
101
|
+
const jsonData = tryParse(fileData);
|
|
102
|
+
if (jsonData) return { contents: jsonData, type: 'json' };
|
|
103
|
+
|
|
104
|
+
// if CSV
|
|
105
|
+
const csv = detectCsv(fileData);
|
|
106
|
+
if (csv) return { contents: fileData, type: 'csv' };
|
|
107
|
+
}
|
|
108
|
+
} catch (ex) {
|
|
109
|
+
Logger.error(`Problem detecting file type ${fromFile}`, ex);
|
|
110
|
+
}
|
|
111
|
+
};
|
|
112
|
+
|
|
113
|
+
export const readFileAsJSON = async <T = any>(
|
|
114
|
+
fromFile: string
|
|
115
|
+
): Promise<T | undefined> => {
|
|
116
|
+
const detectedFile = detectFileType(cwdPath(fromFile));
|
|
117
|
+
if (!detectedFile) return undefined;
|
|
118
|
+
try {
|
|
119
|
+
switch (detectedFile.type) {
|
|
120
|
+
case 'csv': {
|
|
121
|
+
const flatJson = csvToJson(detectedFile.contents);
|
|
122
|
+
const unflattenedJson = flatJson.map(record => unflattenObject(record));
|
|
123
|
+
return unflattenedJson as T;
|
|
124
|
+
}
|
|
125
|
+
case 'xml':
|
|
126
|
+
return (await xmlToJson(detectedFile.contents)) as T;
|
|
127
|
+
|
|
128
|
+
case 'json':
|
|
129
|
+
default:
|
|
130
|
+
return detectedFile.contents;
|
|
131
|
+
}
|
|
132
|
+
} catch (ex) {
|
|
133
|
+
Logger.error(`Problem converting file from ${detectedFile.type}`, ex);
|
|
134
|
+
}
|
|
135
|
+
};
|
|
@@ -6,7 +6,7 @@ import fetch from 'node-fetch';
|
|
|
6
6
|
import path from 'path';
|
|
7
7
|
|
|
8
8
|
import { Component, ContentType, Project } from 'contensis-core-api';
|
|
9
|
-
import {
|
|
9
|
+
import { Role } from 'contensis-management-api/lib/models';
|
|
10
10
|
import {
|
|
11
11
|
ContensisMigrationService,
|
|
12
12
|
MigrateRequest,
|
|
@@ -24,7 +24,7 @@ import ContensisAuthService from './ContensisAuthService';
|
|
|
24
24
|
import { LogMessages } from '~/localisation/en-GB';
|
|
25
25
|
import { OutputFormat, OutputOptionsConstructorArg } from '~/models/CliService';
|
|
26
26
|
|
|
27
|
-
import {
|
|
27
|
+
import { readFileAsJSON } from '~/providers/file-provider';
|
|
28
28
|
import SessionCacheProvider from '../providers/SessionCacheProvider';
|
|
29
29
|
import CredentialProvider from '~/providers/CredentialProvider';
|
|
30
30
|
|
|
@@ -297,9 +297,7 @@ class ContensisCli {
|
|
|
297
297
|
}) => {
|
|
298
298
|
const source: 'contensis' | 'file' = fromFile ? 'file' : 'contensis';
|
|
299
299
|
|
|
300
|
-
const fileData = fromFile
|
|
301
|
-
? readJsonFile<(Entry | ContentType | Component)[]>(fromFile) || []
|
|
302
|
-
: [];
|
|
300
|
+
const fileData = fromFile ? (await readFileAsJSON(fromFile)) || [] : [];
|
|
303
301
|
|
|
304
302
|
if (typeof fileData === 'string')
|
|
305
303
|
throw new Error(`Import file format must be of type JSON`);
|
|
@@ -1279,7 +1277,7 @@ class ContensisCli {
|
|
|
1279
1277
|
const { currentProject, log, messages } = this;
|
|
1280
1278
|
|
|
1281
1279
|
const fileData = fromFile
|
|
1282
|
-
?
|
|
1280
|
+
? (await readFileAsJSON<(ContentType | Component)[]>(fromFile)) || []
|
|
1283
1281
|
: [];
|
|
1284
1282
|
if (typeof fileData === 'string')
|
|
1285
1283
|
throw new Error(`Import file format must be of type JSON`);
|
|
@@ -1428,7 +1426,9 @@ class ContensisCli {
|
|
|
1428
1426
|
) => {
|
|
1429
1427
|
const { currentProject, log, messages } = this;
|
|
1430
1428
|
|
|
1431
|
-
let fileData = fromFile
|
|
1429
|
+
let fileData = fromFile
|
|
1430
|
+
? (await readFileAsJSON<ContentType[]>(fromFile)) || []
|
|
1431
|
+
: [];
|
|
1432
1432
|
if (typeof fileData === 'string')
|
|
1433
1433
|
throw new Error(`Import file format must be of type JSON`);
|
|
1434
1434
|
|
|
@@ -1476,7 +1476,9 @@ class ContensisCli {
|
|
|
1476
1476
|
) => {
|
|
1477
1477
|
const { log } = this;
|
|
1478
1478
|
|
|
1479
|
-
let fileData = fromFile
|
|
1479
|
+
let fileData = fromFile
|
|
1480
|
+
? (await readFileAsJSON<ContentType[]>(fromFile)) || []
|
|
1481
|
+
: [];
|
|
1480
1482
|
if (typeof fileData === 'string')
|
|
1481
1483
|
throw new Error(`Import file format must be of type JSON`);
|
|
1482
1484
|
|
|
@@ -1608,7 +1610,9 @@ class ContensisCli {
|
|
|
1608
1610
|
) => {
|
|
1609
1611
|
const { currentProject, log, messages } = this;
|
|
1610
1612
|
|
|
1611
|
-
let fileData = fromFile
|
|
1613
|
+
let fileData = fromFile
|
|
1614
|
+
? (await readFileAsJSON<Component[]>(fromFile)) || []
|
|
1615
|
+
: [];
|
|
1612
1616
|
if (typeof fileData === 'string')
|
|
1613
1617
|
throw new Error(`Import file format must be of type JSON`);
|
|
1614
1618
|
|
package/src/shell.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { parse, stringify } from 'csv/sync';
|
|
2
2
|
import { flattenObject } from './json.formatter';
|
|
3
3
|
|
|
4
4
|
export const csvFormatter = <T>(entries: T | T[]) => {
|
|
@@ -11,8 +11,58 @@ export const csvFormatter = <T>(entries: T | T[]) => {
|
|
|
11
11
|
else flatEntries.push(flattenObject(entries));
|
|
12
12
|
|
|
13
13
|
// Parse the flattened object to csv
|
|
14
|
-
const
|
|
15
|
-
const csv = json2csvParser.parse(flatEntries);
|
|
14
|
+
const csv = stringify(flatEntries, { header: true });
|
|
16
15
|
|
|
17
16
|
return csv;
|
|
18
17
|
};
|
|
18
|
+
|
|
19
|
+
export const csvToJson = <T>(data: string): T[] => {
|
|
20
|
+
return parse(data, {
|
|
21
|
+
columns: true,
|
|
22
|
+
skip_empty_lines: true,
|
|
23
|
+
});
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
export const detectCsv = (
|
|
27
|
+
chunk: string,
|
|
28
|
+
opts?: { delimiters?: string[]; newlines?: string[] }
|
|
29
|
+
) => {
|
|
30
|
+
opts = opts || {};
|
|
31
|
+
if (Buffer.isBuffer(chunk)) chunk = chunk + '';
|
|
32
|
+
const delimiters = opts.delimiters || [',', ';', '\t', '|'];
|
|
33
|
+
const newlines = opts.newlines || ['\n', '\r'];
|
|
34
|
+
|
|
35
|
+
const lines = chunk.split(/[\n\r]+/g);
|
|
36
|
+
|
|
37
|
+
const delimiter = determineMost(lines[0], delimiters);
|
|
38
|
+
const newline = determineMost(chunk, newlines);
|
|
39
|
+
|
|
40
|
+
if (!delimiter) return null;
|
|
41
|
+
|
|
42
|
+
return {
|
|
43
|
+
delimiter: delimiter,
|
|
44
|
+
newline: newline,
|
|
45
|
+
};
|
|
46
|
+
};
|
|
47
|
+
|
|
48
|
+
const determineMost = (chunk: string, items: string[]) => {
|
|
49
|
+
const itemCount = {} as any;
|
|
50
|
+
let ignoreString = false;
|
|
51
|
+
let maxValue = 0;
|
|
52
|
+
let maxChar;
|
|
53
|
+
let currValue;
|
|
54
|
+
items.forEach(item => {
|
|
55
|
+
itemCount[item] = 0;
|
|
56
|
+
});
|
|
57
|
+
for (var i = 0; i < chunk.length; i++) {
|
|
58
|
+
if (chunk[i] === '"') ignoreString = !ignoreString;
|
|
59
|
+
else if (!ignoreString && chunk[i] in itemCount) {
|
|
60
|
+
currValue = ++itemCount[chunk[i]];
|
|
61
|
+
if (currValue > maxValue) {
|
|
62
|
+
maxValue = currValue;
|
|
63
|
+
maxChar = chunk[i];
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
return maxChar;
|
|
68
|
+
};
|
|
@@ -9,6 +9,10 @@ export const jsonFormatter = <T>(obj: T, fields?: string[]) =>
|
|
|
9
9
|
// nested object and the keys are presented like "sys.version.versionNo": "1.0"
|
|
10
10
|
export const flattenObject = (obj: any) => flatten(cleaner(obj, ['workflow']));
|
|
11
11
|
|
|
12
|
+
// Unflatten a JSON object such as an entry so the arrays and
|
|
13
|
+
// nested objects are reconstructed - the opposite of flattenObject
|
|
14
|
+
export const unflattenObject = (obj: any) => unflatten(obj);
|
|
15
|
+
|
|
12
16
|
// Will limit and sort an object's keys by an array of supplied fields
|
|
13
17
|
export const limitFields = (obj: any, fields?: string[]): any => {
|
|
14
18
|
if (!fields) return obj;
|
|
@@ -14,7 +14,13 @@ export const xmlFormatter = <T>(entries: T | T[]) => {
|
|
|
14
14
|
|
|
15
15
|
return xml;
|
|
16
16
|
} catch (ex) {
|
|
17
|
-
Logger.error(`Problem building XML from
|
|
17
|
+
Logger.error(`Problem building XML from json data`, ex);
|
|
18
18
|
return '';
|
|
19
19
|
}
|
|
20
20
|
};
|
|
21
|
+
|
|
22
|
+
export const xmlToJson = async <T>(data: string) => {
|
|
23
|
+
const json = await xml2js.parseStringPromise(data, { explicitArray: false });
|
|
24
|
+
|
|
25
|
+
return json.Items?.Entry || json;
|
|
26
|
+
};
|
package/src/version.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export const LIB_VERSION = "1.1.1-beta.
|
|
1
|
+
export const LIB_VERSION = "1.1.1-beta.1";
|