contensis-cli 1.1.1-beta.0 → 1.1.1-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # Changelog
2
2
 
3
- ## [1.1.0](https://github.com/contensis/cli/compare/v1.0.11...v1.1.0) (2023-12-01)
3
+ ## [1.1.0](https://github.com/contensis/cli/compare/contensis-cli-v1.0.11...contensis-cli-v1.1.0) (2023-12-01)
4
4
 
5
5
 
6
6
  ### Features
@@ -109,6 +109,7 @@ Example call:
109
109
  `
110
110
  Example call:
111
111
  > import entries --source-cms example-dev --source-project-id microsite --zenql "sys.contentTypeId = blog"
112
+ > import entries --from-file myImportData.json --preserve-guids
112
113
  `
113
114
  ).action(async (phrase, opts, cmd) => {
114
115
  await (0, import_ContensisCliService.cliCommand)(
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/commands/import.ts"],
4
- "sourcesContent": ["import { Command, Option } from 'commander';\nimport { cliCommand } from '~/services/ContensisCliService';\nimport {\n commit,\n getEntryOptions,\n ignoreErrors,\n mapContensisOpts,\n} from './globalOptions';\n\nexport const makeImportCommand = () => {\n const program = new Command()\n .command('import')\n .description('import command')\n .addHelpText('after', `\\n`)\n .showHelpAfterError(true)\n .exitOverride();\n\n program\n .command('models')\n .description('import complete content models')\n .argument('[modelIds...]', 'ids of the content models to import (optional)')\n .addOption(commit)\n .addHelpText(\n 'after',\n `\nExample call:\n > import models blogPost --from-file contentmodels-backup.json\n > import models --source-alias example-dev\n`\n )\n .action(async (modelIds: string[], opts) => {\n await cliCommand(\n ['import', 'models', modelIds.join(' ')],\n opts,\n mapContensisOpts({ modelIds, ...opts })\n ).ImportContentModels({\n fromFile: opts.fromFile,\n commit: opts.commit,\n });\n });\n\n program\n .command('contenttypes')\n .description('import content types')\n .argument(\n '[contentTypeIds...]',\n 'Optional list of API id(s) of the content type(s) to import'\n )\n .addOption(commit)\n .addHelpText(\n 'after',\n `\nExample call:\n > import contenttypes {contentTypeIds} --from-file contenttypes-backup.json\n > import contenttypes {contentTypeIds} --source-alias example-dev\n`\n )\n .action(async (contentTypeIds: string[], opts) => {\n await cliCommand(\n ['import', 'contenttypes'],\n opts,\n mapContensisOpts({ contentTypeIds, ...opts })\n ).ImportContentTypes(\n {\n fromFile: opts.fromFile,\n commit: opts.commit,\n },\n contentTypeIds\n );\n });\n\n program\n .command('components')\n .description('import components')\n .argument(\n '[componentIds...]',\n 'Optional list of API id(s) of the component(s) to import'\n )\n .addOption(commit)\n .addHelpText(\n 'after',\n `\nExample call:\n > import components {componentIds} --from-file component-backup.json\n > import components {componentIds} --source-alias example-dev\n`\n )\n .action(async (componentIds: string[], opts) => {\n await cliCommand(\n ['import', 'component'],\n opts,\n mapContensisOpts({ componentIds, ...opts })\n ).ImportComponents(\n {\n fromFile: opts.fromFile,\n commit: opts.commit,\n },\n componentIds\n );\n });\n\n getEntryOptions(\n program\n .command('entries')\n .description('import entries')\n .argument(\n '[search phrase]',\n 'get entries with the search phrase, use quotes for multiple words'\n )\n )\n .addOption(commit)\n .option(\n '-preserve --preserve-guids',\n 'include this flag when you are importing entries that you have previously exported and wish to update'\n )\n .addOption(\n new Option(\n '-oe --output-entries <outputEntries>',\n 'which details of the entries included in the import to output'\n )\n .choices(['errors', 'changes', 'all'])\n .default('errors')\n )\n .addOption(ignoreErrors)\n .addHelpText(\n 'after',\n `\nExample call:\n > import entries --source-cms example-dev --source-project-id microsite --zenql \"sys.contentTypeId = blog\"\n`\n )\n .action(async (phrase: string, opts, cmd) => {\n await cliCommand(\n ['import', 'entries'],\n opts,\n mapContensisOpts({ phrase, ...opts })\n ).ImportEntries({\n commit: opts.commit,\n fromFile: opts.fromFile,\n logOutput: opts.outputEntries,\n });\n });\n\n // TODO: add options to import one an array of nodes? nodeIds: string[]\n program\n .command('nodes')\n .description('import nodes')\n .argument('[root]', 'import nodes from the specified path e.g. /blog', '/')\n .option(\n '-preserve --preserve-guids',\n 'include this flag when you are importing nodes that you have previously exported and wish to update'\n )\n .addOption(ignoreErrors)\n .addOption(commit)\n .addOption(\n new Option(\n '-od --output-detail <outputDetail>',\n 'how much detail to output from the import'\n )\n .choices(['errors', 'changes', 'all'])\n .default('errors')\n )\n .option(\n '-ol --output-limit <outputLimit>',\n 'expand or limit the number of records output to the console',\n '200'\n )\n .addHelpText(\n 'after',\n `\nExample call:\n > import nodes /blog --source-alias example-alias --source-project-id example-project\n > import nodes --from-file site-backup.json --preserve-guids\n`\n )\n .action(async (root: string, opts) => {\n await cliCommand(\n ['import', 'nodes'],\n opts,\n mapContensisOpts({ paths: root.split(' '), ...opts })\n ).ImportNodes({\n commit: opts.commit,\n fromFile: opts.fromFile,\n logOutput: opts.outputDetail,\n logLimit: Number(opts.outputLimit),\n });\n });\n\n return program;\n};\n\nexport const get = makeImportCommand();\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAAgC;AAChC,iCAA2B;AAC3B,2BAKO;AAEA,MAAM,oBAAoB,MAAM;AACrC,QAAM,UAAU,IAAI,yBAAQ,EACzB,QAAQ,QAAQ,EAChB,YAAY,gBAAgB,EAC5B,YAAY,SAAS;AAAA,CAAI,EACzB,mBAAmB,IAAI,EACvB,aAAa;AAEhB,UACG,QAAQ,QAAQ,EAChB,YAAY,gCAAgC,EAC5C,SAAS,iBAAiB,gDAAgD,EAC1E,UAAU,2BAAM,EAChB;AAAA,IACC;AAAA,IACA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EACC,OAAO,OAAO,UAAoB,SAAS;AAC1C,cAAM;AAAA,MACJ,CAAC,UAAU,UAAU,SAAS,KAAK,GAAG,CAAC;AAAA,MACvC;AAAA,UACA,uCAAiB,EAAE,UAAU,GAAG,KAAK,CAAC;AAAA,IACxC,EAAE,oBAAoB;AAAA,MACpB,UAAU,KAAK;AAAA,MACf,QAAQ,KAAK;AAAA,IACf,CAAC;AAAA,EACH,CAAC;AAEH,UACG,QAAQ,cAAc,EACtB,YAAY,sBAAsB,EAClC;AAAA,IACC;AAAA,IACA;AAAA,EACF,EACC,UAAU,2BAAM,EAChB;AAAA,IACC;AAAA,IACA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EACC,OAAO,OAAO,gBAA0B,SAAS;AAChD,cAAM;AAAA,MACJ,CAAC,UAAU,cAAc;AAAA,MACzB;AAAA,UACA,uCAAiB,EAAE,gBAAgB,GAAG,KAAK,CAAC;AAAA,IAC9C,EAAE;AAAA,MACA;AAAA,QACE,UAAU,KAAK;AAAA,QACf,QAAQ,KAAK;AAAA,MACf;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AAEH,UACG,QAAQ,YAAY,EACpB,YAAY,mBAAmB,EAC/B;AAAA,IACC;AAAA,IACA;AAAA,EACF,EACC,UAAU,2BAAM,EAChB;AAAA,IACC;AAAA,IACA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EACC,OAAO,OAAO,cAAwB,SAAS;AAC9C,cAAM;AAAA,MACJ,CAAC,UAAU,WAAW;AAAA,MACtB;AAAA,UACA,uCAAiB,EAAE,cAAc,GAAG,KAAK,CAAC;AAAA,IAC5C,EAAE;AAAA,MACA;AAAA,QACE,UAAU,KAAK;AAAA,QACf,QAAQ,KAAK;AAAA,MACf;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AAEH;AAAA,IACE,QACG,QAAQ,SAAS,EACjB,YAAY,gBAAgB,EAC5B;AAAA,MACC;AAAA,MACA;AAAA,IACF;AAAA,EACJ,EACG,UAAU,2BAAM,EAChB;AAAA,IACC;AAAA,IACA;AAAA,EACF,EACC;AAAA,IACC,IAAI;AAAA,MACF;AAAA,MACA;AAAA,IACF,EACG,QAAQ,CAAC,UAAU,WAAW,KAAK,CAAC,EACpC,QAAQ,QAAQ;AAAA,EACrB,EACC,UAAU,iCAAY,EACtB;AAAA,IACC;AAAA,IACA;AAAA;AAAA;AAAA;AAAA,EAIF,EACC,OAAO,OAAO,QAAgB,MAAM,QAAQ;AAC3C,cAAM;AAAA,MACJ,CAAC,UAAU,SAAS;AAAA,MACpB;AAAA,UACA,uCAAiB,EAAE,QAAQ,GAAG,KAAK,CAAC;AAAA,IACtC,EAAE,cAAc;AAAA,MACd,QAAQ,KAAK;AAAA,MACb,UAAU,KAAK;AAAA,MACf,WAAW,KAAK;AAAA,IAClB,CAAC;AAAA,EACH,CAAC;AAGH,UACG,QAAQ,OAAO,EACf,YAAY,cAAc,EAC1B,SAAS,UAAU,mDAAmD,GAAG,EACzE;AAAA,IACC;AAAA,IACA;AAAA,EACF,EACC,UAAU,iCAAY,EACtB,UAAU,2BAAM,EAChB;AAAA,IACC,IAAI;AAAA,MACF;AAAA,MACA;AAAA,IACF,EACG,QAAQ,CAAC,UAAU,WAAW,KAAK,CAAC,EACpC,QAAQ,QAAQ;AAAA,EACrB,EACC;AAAA,IACC;AAAA,IACA;AAAA,IACA;AAAA,EACF,EACC;AAAA,IACC;AAAA,IACA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EACC,OAAO,OAAO,MAAc,SAAS;AACpC,cAAM;AAAA,MACJ,CAAC,UAAU,OAAO;AAAA,MAClB;AAAA,UACA,uCAAiB,EAAE,OAAO,KAAK,MAAM,GAAG,GAAG,GAAG,KAAK,CAAC;AAAA,IACtD,EAAE,YAAY;AAAA,MACZ,QAAQ,KAAK;AAAA,MACb,UAAU,KAAK;AAAA,MACf,WAAW,KAAK;AAAA,MAChB,UAAU,OAAO,KAAK,WAAW;AAAA,IACnC,CAAC;AAAA,EACH,CAAC;AAEH,SAAO;AACT;AAEO,MAAM,MAAM,kBAAkB;",
4
+ "sourcesContent": ["import { Command, Option } from 'commander';\nimport { cliCommand } from '~/services/ContensisCliService';\nimport {\n commit,\n getEntryOptions,\n ignoreErrors,\n mapContensisOpts,\n} from './globalOptions';\n\nexport const makeImportCommand = () => {\n const program = new Command()\n .command('import')\n .description('import command')\n .addHelpText('after', `\\n`)\n .showHelpAfterError(true)\n .exitOverride();\n\n program\n .command('models')\n .description('import complete content models')\n .argument('[modelIds...]', 'ids of the content models to import (optional)')\n .addOption(commit)\n .addHelpText(\n 'after',\n `\nExample call:\n > import models blogPost --from-file contentmodels-backup.json\n > import models --source-alias example-dev\n`\n )\n .action(async (modelIds: string[], opts) => {\n await cliCommand(\n ['import', 'models', modelIds.join(' ')],\n opts,\n mapContensisOpts({ modelIds, ...opts })\n ).ImportContentModels({\n fromFile: opts.fromFile,\n commit: opts.commit,\n });\n });\n\n program\n .command('contenttypes')\n .description('import content types')\n .argument(\n '[contentTypeIds...]',\n 'Optional list of API id(s) of the content type(s) to import'\n )\n .addOption(commit)\n .addHelpText(\n 'after',\n `\nExample call:\n > import contenttypes {contentTypeIds} --from-file contenttypes-backup.json\n > import contenttypes {contentTypeIds} --source-alias example-dev\n`\n )\n .action(async (contentTypeIds: string[], opts) => {\n await cliCommand(\n ['import', 'contenttypes'],\n opts,\n mapContensisOpts({ contentTypeIds, ...opts })\n ).ImportContentTypes(\n {\n fromFile: opts.fromFile,\n commit: opts.commit,\n },\n contentTypeIds\n );\n });\n\n program\n .command('components')\n .description('import components')\n .argument(\n '[componentIds...]',\n 'Optional list of API id(s) of the component(s) to import'\n )\n .addOption(commit)\n .addHelpText(\n 'after',\n `\nExample call:\n > import components {componentIds} --from-file component-backup.json\n > import components {componentIds} --source-alias example-dev\n`\n )\n .action(async (componentIds: string[], opts) => {\n await cliCommand(\n ['import', 'component'],\n opts,\n mapContensisOpts({ componentIds, ...opts })\n ).ImportComponents(\n {\n fromFile: opts.fromFile,\n commit: opts.commit,\n },\n componentIds\n );\n });\n\n getEntryOptions(\n program\n .command('entries')\n .description('import entries')\n .argument(\n '[search phrase]',\n 'get entries with the search phrase, use quotes for multiple words'\n )\n )\n .addOption(commit)\n .option(\n '-preserve --preserve-guids',\n 'include this flag when you are importing entries that you have previously exported and wish to update'\n )\n .addOption(\n new Option(\n '-oe --output-entries <outputEntries>',\n 'which details of the entries included in the import to output'\n )\n .choices(['errors', 'changes', 'all'])\n .default('errors')\n )\n .addOption(ignoreErrors)\n .addHelpText(\n 'after',\n `\nExample call:\n > import entries --source-cms example-dev --source-project-id microsite --zenql \"sys.contentTypeId = blog\"\n > import entries --from-file myImportData.json --preserve-guids\n`\n )\n .action(async (phrase: string, opts, cmd) => {\n await cliCommand(\n ['import', 'entries'],\n opts,\n mapContensisOpts({ phrase, ...opts })\n ).ImportEntries({\n commit: opts.commit,\n fromFile: opts.fromFile,\n logOutput: opts.outputEntries,\n });\n });\n\n // TODO: add options to import one an array of nodes? nodeIds: string[]\n program\n .command('nodes')\n .description('import nodes')\n .argument('[root]', 'import nodes from the specified path e.g. /blog', '/')\n .option(\n '-preserve --preserve-guids',\n 'include this flag when you are importing nodes that you have previously exported and wish to update'\n )\n .addOption(ignoreErrors)\n .addOption(commit)\n .addOption(\n new Option(\n '-od --output-detail <outputDetail>',\n 'how much detail to output from the import'\n )\n .choices(['errors', 'changes', 'all'])\n .default('errors')\n )\n .option(\n '-ol --output-limit <outputLimit>',\n 'expand or limit the number of records output to the console',\n '200'\n )\n .addHelpText(\n 'after',\n `\nExample call:\n > import nodes /blog --source-alias example-alias --source-project-id example-project\n > import nodes --from-file site-backup.json --preserve-guids\n`\n )\n .action(async (root: string, opts) => {\n await cliCommand(\n ['import', 'nodes'],\n opts,\n mapContensisOpts({ paths: root.split(' '), ...opts })\n ).ImportNodes({\n commit: opts.commit,\n fromFile: opts.fromFile,\n logOutput: opts.outputDetail,\n logLimit: Number(opts.outputLimit),\n });\n });\n\n return program;\n};\n\nexport const get = makeImportCommand();\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAAgC;AAChC,iCAA2B;AAC3B,2BAKO;AAEA,MAAM,oBAAoB,MAAM;AACrC,QAAM,UAAU,IAAI,yBAAQ,EACzB,QAAQ,QAAQ,EAChB,YAAY,gBAAgB,EAC5B,YAAY,SAAS;AAAA,CAAI,EACzB,mBAAmB,IAAI,EACvB,aAAa;AAEhB,UACG,QAAQ,QAAQ,EAChB,YAAY,gCAAgC,EAC5C,SAAS,iBAAiB,gDAAgD,EAC1E,UAAU,2BAAM,EAChB;AAAA,IACC;AAAA,IACA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EACC,OAAO,OAAO,UAAoB,SAAS;AAC1C,cAAM;AAAA,MACJ,CAAC,UAAU,UAAU,SAAS,KAAK,GAAG,CAAC;AAAA,MACvC;AAAA,UACA,uCAAiB,EAAE,UAAU,GAAG,KAAK,CAAC;AAAA,IACxC,EAAE,oBAAoB;AAAA,MACpB,UAAU,KAAK;AAAA,MACf,QAAQ,KAAK;AAAA,IACf,CAAC;AAAA,EACH,CAAC;AAEH,UACG,QAAQ,cAAc,EACtB,YAAY,sBAAsB,EAClC;AAAA,IACC;AAAA,IACA;AAAA,EACF,EACC,UAAU,2BAAM,EAChB;AAAA,IACC;AAAA,IACA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EACC,OAAO,OAAO,gBAA0B,SAAS;AAChD,cAAM;AAAA,MACJ,CAAC,UAAU,cAAc;AAAA,MACzB;AAAA,UACA,uCAAiB,EAAE,gBAAgB,GAAG,KAAK,CAAC;AAAA,IAC9C,EAAE;AAAA,MACA;AAAA,QACE,UAAU,KAAK;AAAA,QACf,QAAQ,KAAK;AAAA,MACf;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AAEH,UACG,QAAQ,YAAY,EACpB,YAAY,mBAAmB,EAC/B;AAAA,IACC;AAAA,IACA;AAAA,EACF,EACC,UAAU,2BAAM,EAChB;AAAA,IACC;AAAA,IACA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EACC,OAAO,OAAO,cAAwB,SAAS;AAC9C,cAAM;AAAA,MACJ,CAAC,UAAU,WAAW;AAAA,MACtB;AAAA,UACA,uCAAiB,EAAE,cAAc,GAAG,KAAK,CAAC;AAAA,IAC5C,EAAE;AAAA,MACA;AAAA,QACE,UAAU,KAAK;AAAA,QACf,QAAQ,KAAK;AAAA,MACf;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC;AAEH;AAAA,IACE,QACG,QAAQ,SAAS,EACjB,YAAY,gBAAgB,EAC5B;AAAA,MACC;AAAA,MACA;AAAA,IACF;AAAA,EACJ,EACG,UAAU,2BAAM,EAChB;AAAA,IACC;AAAA,IACA;AAAA,EACF,EACC;AAAA,IACC,IAAI;AAAA,MACF;AAAA,MACA;AAAA,IACF,EACG,QAAQ,CAAC,UAAU,WAAW,KAAK,CAAC,EACpC,QAAQ,QAAQ;AAAA,EACrB,EACC,UAAU,iCAAY,EACtB;AAAA,IACC;AAAA,IACA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EACC,OAAO,OAAO,QAAgB,MAAM,QAAQ;AAC3C,cAAM;AAAA,MACJ,CAAC,UAAU,SAAS;AAAA,MACpB;AAAA,UACA,uCAAiB,EAAE,QAAQ,GAAG,KAAK,CAAC;AAAA,IACtC,EAAE,cAAc;AAAA,MACd,QAAQ,KAAK;AAAA,MACb,UAAU,KAAK;AAAA,MACf,WAAW,KAAK;AAAA,IAClB,CAAC;AAAA,EACH,CAAC;AAGH,UACG,QAAQ,OAAO,EACf,YAAY,cAAc,EAC1B,SAAS,UAAU,mDAAmD,GAAG,EACzE;AAAA,IACC;AAAA,IACA;AAAA,EACF,EACC,UAAU,iCAAY,EACtB,UAAU,2BAAM,EAChB;AAAA,IACC,IAAI;AAAA,MACF;AAAA,MACA;AAAA,IACF,EACG,QAAQ,CAAC,UAAU,WAAW,KAAK,CAAC,EACpC,QAAQ,QAAQ;AAAA,EACrB,EACC;AAAA,IACC;AAAA,IACA;AAAA,IACA;AAAA,EACF,EACC;AAAA,IACC;AAAA,IACA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EACC,OAAO,OAAO,MAAc,SAAS;AACpC,cAAM;AAAA,MACJ,CAAC,UAAU,OAAO;AAAA,MAClB;AAAA,UACA,uCAAiB,EAAE,OAAO,KAAK,MAAM,GAAG,GAAG,GAAG,KAAK,CAAC;AAAA,IACtD,EAAE,YAAY;AAAA,MACZ,QAAQ,KAAK;AAAA,MACb,UAAU,KAAK;AAAA,MACf,WAAW,KAAK;AAAA,MAChB,UAAU,OAAO,KAAK,WAAW;AAAA,IACnC,CAAC;AAAA,EACH,CAAC;AAEH,SAAO;AACT;AAEO,MAAM,MAAM,kBAAkB;",
6
6
  "names": []
7
7
  }
@@ -24,14 +24,14 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
24
24
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
25
25
  var file_provider_exports = {};
26
26
  __export(file_provider_exports, {
27
+ appPath: () => appPath,
27
28
  appRootDir: () => appRootDir,
28
29
  checkDir: () => checkDir,
29
30
  cwdPath: () => cwdPath,
30
- localPath: () => localPath,
31
31
  moveFile: () => moveFile,
32
32
  readFile: () => readFile,
33
+ readFileAsJSON: () => readFileAsJSON,
33
34
  readFiles: () => readFiles,
34
- readJsonFile: () => readJsonFile,
35
35
  removeFile: () => removeFile,
36
36
  writeFile: () => writeFile
37
37
  });
@@ -40,17 +40,14 @@ var import_fs = __toESM(require("fs"));
40
40
  var import_os = require("os");
41
41
  var import_path = __toESM(require("path"));
42
42
  var import_util = require("../util");
43
+ var import_csv = require("../util/csv.formatter");
44
+ var import_json = require("../util/json.formatter");
45
+ var import_logger = require("../util/logger");
46
+ var import_xml = require("../util/xml.formatter");
43
47
  const userHomeDir = (0, import_os.homedir)();
44
48
  const appRootDir = process.env.CONTAINER_CONTEXT === "true" ? process.cwd() : import_path.default.join(userHomeDir, ".contensis/");
45
- const readJsonFile = (filePath) => {
46
- const directoryPath = cwdPath(filePath);
47
- const file = readFile(directoryPath);
48
- if (file)
49
- return (0, import_util.tryParse)(file);
50
- return void 0;
51
- };
52
49
  const readFile = (filePath) => {
53
- const directoryPath = localPath(filePath);
50
+ const directoryPath = appPath(filePath);
54
51
  if (import_fs.default.existsSync(directoryPath)) {
55
52
  const file = import_fs.default.readFileSync(directoryPath, "utf8");
56
53
  return file;
@@ -59,7 +56,7 @@ const readFile = (filePath) => {
59
56
  }
60
57
  };
61
58
  const readFiles = (directory, createDirectory = true) => {
62
- const directoryPath = localPath(directory);
59
+ const directoryPath = appPath(directory);
63
60
  if (import_fs.default.existsSync(directoryPath)) {
64
61
  const files = import_fs.default.readdirSync(directoryPath);
65
62
  return files;
@@ -71,11 +68,11 @@ const readFiles = (directory, createDirectory = true) => {
71
68
  }
72
69
  };
73
70
  const writeFile = (filePath, content) => {
74
- const directoryPath = localPath(filePath);
71
+ const directoryPath = appPath(filePath);
75
72
  import_fs.default.writeFileSync(directoryPath, content, { encoding: "utf-8" });
76
73
  };
77
74
  const removeFile = (filePath) => {
78
- const directoryPath = localPath(filePath);
75
+ const directoryPath = appPath(filePath);
79
76
  if (import_fs.default.existsSync(directoryPath)) {
80
77
  import_fs.default.rmSync(directoryPath);
81
78
  }
@@ -101,22 +98,60 @@ File does not exist!`
101
98
  }
102
99
  };
103
100
  const checkDir = (filePath) => {
104
- const directoryPath = import_path.default.dirname(localPath(filePath));
101
+ const directoryPath = import_path.default.dirname(appPath(filePath));
105
102
  if (!import_fs.default.existsSync(directoryPath))
106
103
  import_fs.default.mkdirSync(directoryPath, { recursive: true });
107
104
  };
108
- const localPath = (filePath) => import_path.default.isAbsolute(filePath) ? filePath : import_path.default.join(appRootDir, filePath);
105
+ const appPath = (filePath) => import_path.default.isAbsolute(filePath) ? filePath : import_path.default.join(appRootDir, filePath);
109
106
  const cwdPath = (filePath) => import_path.default.isAbsolute(filePath) ? filePath : import_path.default.join(process.cwd(), filePath);
107
+ const detectFileType = (fromFile) => {
108
+ try {
109
+ const fileData = readFile(fromFile);
110
+ if (fileData) {
111
+ if (fileData.startsWith("<"))
112
+ return { contents: fileData, type: "xml" };
113
+ const jsonData = (0, import_util.tryParse)(fileData);
114
+ if (jsonData)
115
+ return { contents: jsonData, type: "json" };
116
+ const csv = (0, import_csv.detectCsv)(fileData);
117
+ if (csv)
118
+ return { contents: fileData, type: "csv" };
119
+ }
120
+ } catch (ex) {
121
+ import_logger.Logger.error(`Problem detecting file type ${fromFile}`, ex);
122
+ }
123
+ };
124
+ const readFileAsJSON = async (fromFile) => {
125
+ const detectedFile = detectFileType(cwdPath(fromFile));
126
+ if (!detectedFile)
127
+ return void 0;
128
+ try {
129
+ switch (detectedFile.type) {
130
+ case "csv": {
131
+ const flatJson = (0, import_csv.csvToJson)(detectedFile.contents);
132
+ const unflattenedJson = flatJson.map((record) => (0, import_json.unflattenObject)(record));
133
+ return unflattenedJson;
134
+ }
135
+ case "xml":
136
+ return await (0, import_xml.xmlToJson)(detectedFile.contents);
137
+ case "json":
138
+ default:
139
+ return detectedFile.contents;
140
+ }
141
+ } catch (ex) {
142
+ import_logger.Logger.error(`Problem converting file from ${detectedFile.type}`, ex);
143
+ }
144
+ };
110
145
  // Annotate the CommonJS export names for ESM import in node:
111
146
  0 && (module.exports = {
147
+ appPath,
112
148
  appRootDir,
113
149
  checkDir,
114
150
  cwdPath,
115
- localPath,
116
151
  moveFile,
117
152
  readFile,
153
+ readFileAsJSON,
118
154
  readFiles,
119
- readJsonFile,
120
155
  removeFile,
121
156
  writeFile
122
157
  });
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/providers/file-provider.ts"],
4
- "sourcesContent": ["import fs from 'fs';\nimport { homedir } from 'os';\nimport path from 'path';\nimport { tryParse } from '~/util';\n\nconst userHomeDir = homedir();\n\nexport const appRootDir =\n process.env.CONTAINER_CONTEXT === 'true'\n ? process.cwd()\n : path.join(userHomeDir, '.contensis/');\n\nexport const readJsonFile = <T>(filePath: string) => {\n const directoryPath = cwdPath(filePath);\n const file = readFile(directoryPath);\n if (file) return tryParse(file) as T | string;\n return undefined;\n};\nexport const readFile = (filePath: string) => {\n const directoryPath = localPath(filePath);\n if (fs.existsSync(directoryPath)) {\n const file = fs.readFileSync(directoryPath, 'utf8');\n return file;\n } else {\n return undefined;\n }\n};\n\nexport const readFiles = (directory: string, createDirectory = true) => {\n const directoryPath = localPath(directory);\n if (fs.existsSync(directoryPath)) {\n const files = fs.readdirSync(directoryPath);\n return files;\n } else if (createDirectory) {\n fs.mkdirSync(directoryPath, { recursive: true });\n return [];\n } else {\n throw new Error(`ENOENT: Directory does not exist ${directoryPath}`);\n // return undefined;\n }\n};\n\nexport const writeFile = (filePath: string, content: string) => {\n const directoryPath = localPath(filePath);\n fs.writeFileSync(directoryPath, content, { encoding: 'utf-8' });\n};\n\nexport const removeFile = (filePath: string) => {\n const directoryPath = localPath(filePath);\n if (fs.existsSync(directoryPath)) {\n fs.rmSync(directoryPath);\n }\n};\n\nexport const moveFile = (file: string, fromPath: string, toPath: string) => {\n const from = path.join(appRootDir, `${fromPath}${file}`);\n const to = path.join(appRootDir, `${toPath}${file}`);\n if (fs.existsSync(from)) {\n checkDir(toPath);\n // if (!fs.existsSync(toPath)) fs.mkdirSync(toPath, { recursive: true });\n\n fs.rename(from, to, err => {\n if (err)\n console.error(\n `Could not rename file \"${file}\" from: ${fromPath} to: ${toPath}`,\n err\n );\n console.info(`Renamed file \"${file}\" from: ${fromPath} to: ${toPath}`);\n });\n } else {\n console.error(\n `Could not rename file \"${file}\" from: ${fromPath} to: ${toPath}\\nFile does not exist!`\n );\n }\n};\n\nexport const checkDir = (filePath: string) => {\n const directoryPath = path.dirname(localPath(filePath));\n if (!fs.existsSync(directoryPath))\n fs.mkdirSync(directoryPath, { recursive: true });\n};\n\nexport const localPath = (filePath: string) =>\n path.isAbsolute(filePath) ? filePath : path.join(appRootDir, filePath);\n\nexport const cwdPath = (filePath: string) =>\n path.isAbsolute(filePath) ? filePath : path.join(process.cwd(), filePath);\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAe;AACf,gBAAwB;AACxB,kBAAiB;AACjB,kBAAyB;AAEzB,MAAM,kBAAc,mBAAQ;AAErB,MAAM,aACX,QAAQ,IAAI,sBAAsB,SAC9B,QAAQ,IAAI,IACZ,YAAAA,QAAK,KAAK,aAAa,aAAa;AAEnC,MAAM,eAAe,CAAI,aAAqB;AACnD,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,QAAM,OAAO,SAAS,aAAa;AACnC,MAAI;AAAM,eAAO,sBAAS,IAAI;AAC9B,SAAO;AACT;AACO,MAAM,WAAW,CAAC,aAAqB;AAC5C,QAAM,gBAAgB,UAAU,QAAQ;AACxC,MAAI,UAAAC,QAAG,WAAW,aAAa,GAAG;AAChC,UAAM,OAAO,UAAAA,QAAG,aAAa,eAAe,MAAM;AAClD,WAAO;AAAA,EACT,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEO,MAAM,YAAY,CAAC,WAAmB,kBAAkB,SAAS;AACtE,QAAM,gBAAgB,UAAU,SAAS;AACzC,MAAI,UAAAA,QAAG,WAAW,aAAa,GAAG;AAChC,UAAM,QAAQ,UAAAA,QAAG,YAAY,aAAa;AAC1C,WAAO;AAAA,EACT,WAAW,iBAAiB;AAC1B,cAAAA,QAAG,UAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AAC/C,WAAO,CAAC;AAAA,EACV,OAAO;AACL,UAAM,IAAI,MAAM,oCAAoC,eAAe;AAAA,EAErE;AACF;AAEO,MAAM,YAAY,CAAC,UAAkB,YAAoB;AAC9D,QAAM,gBAAgB,UAAU,QAAQ;AACxC,YAAAA,QAAG,cAAc,eAAe,SAAS,EAAE,UAAU,QAAQ,CAAC;AAChE;AAEO,MAAM,aAAa,CAAC,aAAqB;AAC9C,QAAM,gBAAgB,UAAU,QAAQ;AACxC,MAAI,UAAAA,QAAG,WAAW,aAAa,GAAG;AAChC,cAAAA,QAAG,OAAO,aAAa;AAAA,EACzB;AACF;AAEO,MAAM,WAAW,CAAC,MAAc,UAAkB,WAAmB;AAC1E,QAAM,OAAO,YAAAD,QAAK,KAAK,YAAY,GAAG,WAAW,MAAM;AACvD,QAAM,KAAK,YAAAA,QAAK,KAAK,YAAY,GAAG,SAAS,MAAM;AACnD,MAAI,UAAAC,QAAG,WAAW,IAAI,GAAG;AACvB,aAAS,MAAM;AAGf,cAAAA,QAAG,OAAO,MAAM,IAAI,SAAO;AACzB,UAAI;AACF,gBAAQ;AAAA,UACN,0BAA0B,eAAe,gBAAgB;AAAA,UACzD;AAAA,QACF;AACF,cAAQ,KAAK,iBAAiB,eAAe,gBAAgB,QAAQ;AAAA,IACvE,CAAC;AAAA,EACH,OAAO;AACL,YAAQ;AAAA,MACN,0BAA0B,eAAe,gBAAgB;AAAA;AAAA,IAC3D;AAAA,EACF;AACF;AAEO,MAAM,WAAW,CAAC,aAAqB;AAC5C,QAAM,gBAAgB,YAAAD,QAAK,QAAQ,UAAU,QAAQ,CAAC;AACtD,MAAI,CAAC,UAAAC,QAAG,WAAW,aAAa;AAC9B,cAAAA,QAAG,UAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AACnD;AAEO,MAAM,YAAY,CAAC,aACxB,YAAAD,QAAK,WAAW,QAAQ,IAAI,WAAW,YAAAA,QAAK,KAAK,YAAY,QAAQ;AAEhE,MAAM,UAAU,CAAC,aACtB,YAAAA,QAAK,WAAW,QAAQ,IAAI,WAAW,YAAAA,QAAK,KAAK,QAAQ,IAAI,GAAG,QAAQ;",
4
+ "sourcesContent": ["import fs from 'fs';\nimport { homedir } from 'os';\nimport path from 'path';\nimport { tryParse } from '~/util';\nimport { csvToJson, detectCsv } from '~/util/csv.formatter';\nimport { unflattenObject } from '~/util/json.formatter';\nimport { Logger } from '~/util/logger';\nimport { xmlToJson } from '~/util/xml.formatter';\n\nconst userHomeDir = homedir();\n\nexport const appRootDir =\n process.env.CONTAINER_CONTEXT === 'true'\n ? process.cwd()\n : path.join(userHomeDir, '.contensis/');\n\nexport const readFile = (filePath: string) => {\n const directoryPath = appPath(filePath);\n if (fs.existsSync(directoryPath)) {\n const file = fs.readFileSync(directoryPath, 'utf8');\n return file;\n } else {\n return undefined;\n }\n};\n\nexport const readFiles = (directory: string, createDirectory = true) => {\n const directoryPath = appPath(directory);\n if (fs.existsSync(directoryPath)) {\n const files = fs.readdirSync(directoryPath);\n return files;\n } else if (createDirectory) {\n fs.mkdirSync(directoryPath, { recursive: true });\n return [];\n } else {\n throw new Error(`ENOENT: Directory does not exist ${directoryPath}`);\n // return undefined;\n }\n};\n\nexport const writeFile = (filePath: string, content: string) => {\n const directoryPath = appPath(filePath);\n fs.writeFileSync(directoryPath, content, { encoding: 'utf-8' });\n};\n\nexport const removeFile = (filePath: string) => {\n const directoryPath = appPath(filePath);\n if (fs.existsSync(directoryPath)) {\n fs.rmSync(directoryPath);\n }\n};\n\nexport const moveFile = (file: string, fromPath: string, toPath: string) => {\n const from = path.join(appRootDir, `${fromPath}${file}`);\n const to = path.join(appRootDir, `${toPath}${file}`);\n if (fs.existsSync(from)) {\n checkDir(toPath);\n // if (!fs.existsSync(toPath)) fs.mkdirSync(toPath, { recursive: true });\n\n fs.rename(from, to, err => {\n if (err)\n console.error(\n `Could not rename file \"${file}\" from: ${fromPath} to: ${toPath}`,\n err\n );\n console.info(`Renamed file \"${file}\" from: ${fromPath} to: ${toPath}`);\n });\n } else {\n console.error(\n `Could not rename file \"${file}\" from: ${fromPath} to: ${toPath}\\nFile does not exist!`\n );\n }\n};\n\nexport const checkDir = (filePath: string) => {\n const directoryPath = path.dirname(appPath(filePath));\n if (!fs.existsSync(directoryPath))\n fs.mkdirSync(directoryPath, { recursive: true });\n};\n\nexport const appPath = (filePath: string) =>\n path.isAbsolute(filePath) ? filePath : path.join(appRootDir, filePath);\n\nexport const cwdPath = (filePath: string) =>\n path.isAbsolute(filePath) ? filePath : path.join(process.cwd(), filePath);\n\ntype DetectedFileType =\n | { type: 'json'; contents: any }\n | { type: 'xml' | 'csv'; contents: string };\n\nconst detectFileType = (\n fromFile: string\n): DetectedFileType | undefined => {\n try {\n const fileData = readFile(fromFile);\n if (fileData) {\n // if XML\n if (fileData.startsWith('<')) return { contents: fileData, type: 'xml' };\n\n // if JSON\n const jsonData = tryParse(fileData);\n if (jsonData) return { contents: jsonData, type: 'json' };\n\n // if CSV\n const csv = detectCsv(fileData);\n if (csv) return { contents: fileData, type: 'csv' };\n }\n } catch (ex) {\n Logger.error(`Problem detecting file type ${fromFile}`, ex);\n }\n};\n\nexport const readFileAsJSON = async <T = any>(\n fromFile: string\n): Promise<T | undefined> => {\n const detectedFile = detectFileType(cwdPath(fromFile));\n if (!detectedFile) return undefined;\n try {\n switch (detectedFile.type) {\n case 'csv': {\n const flatJson = csvToJson(detectedFile.contents);\n const unflattenedJson = flatJson.map(record => unflattenObject(record));\n return unflattenedJson as T;\n }\n case 'xml':\n return (await xmlToJson(detectedFile.contents)) as T;\n\n case 'json':\n default:\n return detectedFile.contents;\n }\n } catch (ex) {\n Logger.error(`Problem converting file from ${detectedFile.type}`, ex);\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAe;AACf,gBAAwB;AACxB,kBAAiB;AACjB,kBAAyB;AACzB,iBAAqC;AACrC,kBAAgC;AAChC,oBAAuB;AACvB,iBAA0B;AAE1B,MAAM,kBAAc,mBAAQ;AAErB,MAAM,aACX,QAAQ,IAAI,sBAAsB,SAC9B,QAAQ,IAAI,IACZ,YAAAA,QAAK,KAAK,aAAa,aAAa;AAEnC,MAAM,WAAW,CAAC,aAAqB;AAC5C,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,MAAI,UAAAC,QAAG,WAAW,aAAa,GAAG;AAChC,UAAM,OAAO,UAAAA,QAAG,aAAa,eAAe,MAAM;AAClD,WAAO;AAAA,EACT,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEO,MAAM,YAAY,CAAC,WAAmB,kBAAkB,SAAS;AACtE,QAAM,gBAAgB,QAAQ,SAAS;AACvC,MAAI,UAAAA,QAAG,WAAW,aAAa,GAAG;AAChC,UAAM,QAAQ,UAAAA,QAAG,YAAY,aAAa;AAC1C,WAAO;AAAA,EACT,WAAW,iBAAiB;AAC1B,cAAAA,QAAG,UAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AAC/C,WAAO,CAAC;AAAA,EACV,OAAO;AACL,UAAM,IAAI,MAAM,oCAAoC,eAAe;AAAA,EAErE;AACF;AAEO,MAAM,YAAY,CAAC,UAAkB,YAAoB;AAC9D,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,YAAAA,QAAG,cAAc,eAAe,SAAS,EAAE,UAAU,QAAQ,CAAC;AAChE;AAEO,MAAM,aAAa,CAAC,aAAqB;AAC9C,QAAM,gBAAgB,QAAQ,QAAQ;AACtC,MAAI,UAAAA,QAAG,WAAW,aAAa,GAAG;AAChC,cAAAA,QAAG,OAAO,aAAa;AAAA,EACzB;AACF;AAEO,MAAM,WAAW,CAAC,MAAc,UAAkB,WAAmB;AAC1E,QAAM,OAAO,YAAAD,QAAK,KAAK,YAAY,GAAG,WAAW,MAAM;AACvD,QAAM,KAAK,YAAAA,QAAK,KAAK,YAAY,GAAG,SAAS,MAAM;AACnD,MAAI,UAAAC,QAAG,WAAW,IAAI,GAAG;AACvB,aAAS,MAAM;AAGf,cAAAA,QAAG,OAAO,MAAM,IAAI,SAAO;AACzB,UAAI;AACF,gBAAQ;AAAA,UACN,0BAA0B,eAAe,gBAAgB;AAAA,UACzD;AAAA,QACF;AACF,cAAQ,KAAK,iBAAiB,eAAe,gBAAgB,QAAQ;AAAA,IACvE,CAAC;AAAA,EACH,OAAO;AACL,YAAQ;AAAA,MACN,0BAA0B,eAAe,gBAAgB;AAAA;AAAA,IAC3D;AAAA,EACF;AACF;AAEO,MAAM,WAAW,CAAC,aAAqB;AAC5C,QAAM,gBAAgB,YAAAD,QAAK,QAAQ,QAAQ,QAAQ,CAAC;AACpD,MAAI,CAAC,UAAAC,QAAG,WAAW,aAAa;AAC9B,cAAAA,QAAG,UAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AACnD;AAEO,MAAM,UAAU,CAAC,aACtB,YAAAD,QAAK,WAAW,QAAQ,IAAI,WAAW,YAAAA,QAAK,KAAK,YAAY,QAAQ;AAEhE,MAAM,UAAU,CAAC,aACtB,YAAAA,QAAK,WAAW,QAAQ,IAAI,WAAW,YAAAA,QAAK,KAAK,QAAQ,IAAI,GAAG,QAAQ;AAM1E,MAAM,iBAAiB,CACrB,aACiC;AACjC,MAAI;AACF,UAAM,WAAW,SAAS,QAAQ;AAClC,QAAI,UAAU;AAEZ,UAAI,SAAS,WAAW,GAAG;AAAG,eAAO,EAAE,UAAU,UAAU,MAAM,MAAM;AAGvE,YAAM,eAAW,sBAAS,QAAQ;AAClC,UAAI;AAAU,eAAO,EAAE,UAAU,UAAU,MAAM,OAAO;AAGxD,YAAM,UAAM,sBAAU,QAAQ;AAC9B,UAAI;AAAK,eAAO,EAAE,UAAU,UAAU,MAAM,MAAM;AAAA,IACpD;AAAA,EACF,SAAS,IAAP;AACA,yBAAO,MAAM,+BAA+B,YAAY,EAAE;AAAA,EAC5D;AACF;AAEO,MAAM,iBAAiB,OAC5B,aAC2B;AAC3B,QAAM,eAAe,eAAe,QAAQ,QAAQ,CAAC;AACrD,MAAI,CAAC;AAAc,WAAO;AAC1B,MAAI;AACF,YAAQ,aAAa;AAAA,WACd,OAAO;AACV,cAAM,eAAW,sBAAU,aAAa,QAAQ;AAChD,cAAM,kBAAkB,SAAS,IAAI,gBAAU,6BAAgB,MAAM,CAAC;AACtE,eAAO;AAAA,MACT;AAAA,WACK;AACH,eAAQ,UAAM,sBAAU,aAAa,QAAQ;AAAA,WAE1C;AAAA;AAEH,eAAO,aAAa;AAAA;AAAA,EAE1B,SAAS,IAAP;AACA,yBAAO,MAAM,gCAAgC,aAAa,QAAQ,EAAE;AAAA,EACtE;AACF;",
6
6
  "names": ["path", "fs"]
7
7
  }
@@ -235,7 +235,7 @@ class ContensisCli {
235
235
  }) => {
236
236
  var _a, _b, _c, _d, _e, _f;
237
237
  const source = fromFile ? "file" : "contensis";
238
- const fileData = fromFile ? (0, import_file_provider.readJsonFile)(fromFile) || [] : [];
238
+ const fileData = fromFile ? await (0, import_file_provider.readFileAsJSON)(fromFile) || [] : [];
239
239
  if (typeof fileData === "string")
240
240
  throw new Error(`Import file format must be of type JSON`);
241
241
  const { contensisOpts, currentEnv, env, log, messages, sourceAlias } = this;
@@ -986,7 +986,7 @@ class ContensisCli {
986
986
  fromFile
987
987
  }) => {
988
988
  const { currentProject, log, messages } = this;
989
- const fileData = fromFile ? (0, import_file_provider.readJsonFile)(fromFile) || [] : [];
989
+ const fileData = fromFile ? await (0, import_file_provider.readFileAsJSON)(fromFile) || [] : [];
990
990
  if (typeof fileData === "string")
991
991
  throw new Error(`Import file format must be of type JSON`);
992
992
  const contensis = await this.ConnectContensisImport({
@@ -1119,7 +1119,7 @@ Components:`));
1119
1119
  fromFile
1120
1120
  }, contentTypeIds = []) => {
1121
1121
  const { currentProject, log, messages } = this;
1122
- let fileData = fromFile ? (0, import_file_provider.readJsonFile)(fromFile) || [] : [];
1122
+ let fileData = fromFile ? await (0, import_file_provider.readFileAsJSON)(fromFile) || [] : [];
1123
1123
  if (typeof fileData === "string")
1124
1124
  throw new Error(`Import file format must be of type JSON`);
1125
1125
  if (!Array.isArray(fileData))
@@ -1153,7 +1153,7 @@ Components:`));
1153
1153
  fromFile
1154
1154
  }, modelIds = []) => {
1155
1155
  const { log } = this;
1156
- let fileData = fromFile ? (0, import_file_provider.readJsonFile)(fromFile) || [] : [];
1156
+ let fileData = fromFile ? await (0, import_file_provider.readFileAsJSON)(fromFile) || [] : [];
1157
1157
  if (typeof fileData === "string")
1158
1158
  throw new Error(`Import file format must be of type JSON`);
1159
1159
  if (!Array.isArray(fileData))
@@ -1268,7 +1268,7 @@ Components:`));
1268
1268
  fromFile
1269
1269
  }, componentIds = []) => {
1270
1270
  const { currentProject, log, messages } = this;
1271
- let fileData = fromFile ? (0, import_file_provider.readJsonFile)(fromFile) || [] : [];
1271
+ let fileData = fromFile ? await (0, import_file_provider.readFileAsJSON)(fromFile) || [] : [];
1272
1272
  if (typeof fileData === "string")
1273
1273
  throw new Error(`Import file format must be of type JSON`);
1274
1274
  if (!Array.isArray(fileData))