@diplodoc/cli 4.1.0 → 4.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/build/app.client.css +2678 -41
  2. package/build/app.client.js +1 -1
  3. package/build/index.js +376 -267
  4. package/build/index.js.map +3 -3
  5. package/build/linter.js +54 -46
  6. package/build/linter.js.map +2 -2
  7. package/package.json +12 -11
  8. package/src/cmd/build/index.ts +29 -15
  9. package/src/cmd/publish/index.ts +2 -7
  10. package/src/cmd/publish/upload.ts +24 -18
  11. package/src/cmd/translate/index.ts +55 -41
  12. package/src/cmd/xliff/compose.ts +17 -8
  13. package/src/cmd/xliff/extract.ts +20 -11
  14. package/src/cmd/xliff/index.ts +4 -1
  15. package/src/constants.ts +4 -4
  16. package/src/index.ts +3 -3
  17. package/src/models.ts +13 -8
  18. package/src/resolvers/lintPage.ts +2 -8
  19. package/src/resolvers/md2html.ts +26 -18
  20. package/src/resolvers/md2md.ts +15 -15
  21. package/src/services/authors.ts +4 -2
  22. package/src/services/contributors.ts +26 -12
  23. package/src/services/includers/batteries/common.ts +14 -2
  24. package/src/services/includers/batteries/generic.ts +29 -11
  25. package/src/services/includers/batteries/sourcedocs.ts +4 -1
  26. package/src/services/includers/batteries/unarchive.ts +11 -7
  27. package/src/services/includers/index.ts +7 -5
  28. package/src/services/leading.ts +23 -26
  29. package/src/services/metadata.ts +37 -12
  30. package/src/services/plugins.ts +2 -2
  31. package/src/services/preset.ts +2 -2
  32. package/src/services/tocs.ts +31 -26
  33. package/src/services/utils.ts +13 -3
  34. package/src/steps/processAssets.ts +2 -8
  35. package/src/steps/processExcludedFiles.ts +12 -21
  36. package/src/steps/processLinter.ts +11 -10
  37. package/src/steps/processLogs.ts +1 -6
  38. package/src/steps/processMapFile.ts +8 -11
  39. package/src/steps/processPages.ts +102 -72
  40. package/src/steps/processServiceFiles.ts +4 -2
  41. package/src/steps/publishFilesToS3.ts +17 -13
  42. package/src/utils/file.ts +5 -1
  43. package/src/utils/glob.ts +1 -3
  44. package/src/utils/markup.ts +30 -15
  45. package/src/utils/singlePage.ts +11 -10
  46. package/src/utils/toc.ts +21 -8
  47. package/src/utils/worker.ts +1 -1
  48. package/src/validator.ts +25 -17
  49. package/src/vcs-connector/client/github.ts +17 -9
  50. package/src/vcs-connector/connector-validator.ts +12 -6
  51. package/src/vcs-connector/github.ts +38 -11
  52. package/src/vcs-connector/index.ts +2 -2
  53. package/src/workers/linter/index.ts +1 -5
  54. package/CHANGELOG.md +0 -793
package/build/index.js CHANGED
@@ -208,36 +208,39 @@ function requiredValueValidator(value) {
208
208
  return Boolean(value);
209
209
  }
210
210
  var validators = {
211
- "storageEndpoint": {
211
+ storageEndpoint: {
212
212
  errorMessage: "Endpoint of S3 storage must be provided when publishes.",
213
213
  validateFn: notEmptyStringValidator
214
214
  },
215
- "storageBucket": {
215
+ storageBucket: {
216
216
  errorMessage: "Bucket name of S3 storage must be provided when publishes.",
217
217
  validateFn: notEmptyStringValidator
218
218
  },
219
- "storageKeyId": {
219
+ storageKeyId: {
220
220
  errorMessage: "Key Id of S3 storage must be provided when publishes.",
221
221
  validateFn: notEmptyStringValidator,
222
222
  defaultValue: process.env.YFM_STORAGE_KEY_ID
223
223
  },
224
- "storageSecretKey": {
224
+ storageSecretKey: {
225
225
  errorMessage: "Secret key of S3 storage must be provided when publishes.",
226
226
  validateFn: notEmptyStringValidator,
227
227
  defaultValue: process.env.YFM_STORAGE_SECRET_KEY
228
228
  },
229
- "storageRegion": {
229
+ storageRegion: {
230
230
  errorMessage: "Region of S3 storage must be provided when publishes.",
231
231
  validateFn: notEmptyStringValidator,
232
232
  defaultValue: "eu-central-1"
233
233
  }
234
234
  };
235
235
  function validateRedirects(redirectsConfig, pathToRedirects) {
236
- const redirects = Object.keys(redirectsConfig).reduce((res, redirectSectionName) => {
237
- const sectionRedirects = redirectsConfig[redirectSectionName];
238
- res.push(...sectionRedirects);
239
- return res;
240
- }, []);
236
+ const redirects = Object.keys(redirectsConfig).reduce(
237
+ (res, redirectSectionName) => {
238
+ const sectionRedirects = redirectsConfig[redirectSectionName];
239
+ res.push(...sectionRedirects);
240
+ return res;
241
+ },
242
+ []
243
+ );
241
244
  const getContext = (from, to) => ` [Context:
242
245
  - from: ${from}
243
246
  - to: ${to} ]`;
@@ -245,10 +248,14 @@ function validateRedirects(redirectsConfig, pathToRedirects) {
245
248
  redirects.forEach((redirect) => {
246
249
  const { from, to } = redirect;
247
250
  if (!from || !to) {
248
- throw new Error(formatMessage("One of the two parameters is missing", pathToRedirects, from, to));
251
+ throw new Error(
252
+ formatMessage("One of the two parameters is missing", pathToRedirects, from, to)
253
+ );
249
254
  }
250
255
  if (from === to) {
251
- throw new Error(formatMessage("Parameters must be different", pathToRedirects, from, to));
256
+ throw new Error(
257
+ formatMessage("Parameters must be different", pathToRedirects, from, to)
258
+ );
252
259
  }
253
260
  });
254
261
  }
@@ -379,7 +386,9 @@ function isExternalHref(href) {
379
386
  var HEADERS_SELECTOR = "h1, h2, h3, h4, h5, h6";
380
387
  function getNewNode(options) {
381
388
  const { rawTagName, innerHTML, attrEntries } = options;
382
- const nodeNew = (0, import_node_html_parser.parse)(`<html><${rawTagName}></${rawTagName}></html>`).querySelector(`${rawTagName}`);
389
+ const nodeNew = (0, import_node_html_parser.parse)(`<html><${rawTagName}></${rawTagName}></html>`).querySelector(
390
+ `${rawTagName}`
391
+ );
383
392
  if (!nodeNew) {
384
393
  return null;
385
394
  }
@@ -473,7 +482,9 @@ function addPagePrefixToAnchors(rootEl, options) {
473
482
  });
474
483
  const mainHeader = rootEl.querySelector("h1");
475
484
  if (mainHeader) {
476
- const anchor = (0, import_node_html_parser.parse)(`<a class="yfm-anchor" aria-hidden="true" href="${pageIdAnchor}" id="${pageId}"></a>`);
485
+ const anchor = (0, import_node_html_parser.parse)(
486
+ `<a class="yfm-anchor" aria-hidden="true" href="${pageIdAnchor}" id="${pageId}"></a>`
487
+ );
477
488
  if (!anchor) {
478
489
  return;
479
490
  }
@@ -560,7 +571,9 @@ function generateStaticMarkup(props, pathToBundle) {
560
571
  window.STATIC_CONTENT = ${staticContent}
561
572
  window.__DATA__ = ${JSON.stringify(props)};
562
573
  </script>
563
- <script type="application/javascript" src="${import_client2.default.bundle.js(pathToBundle)}"></script>
574
+ <script type="application/javascript" src="${import_client2.default.bundle.js(
575
+ pathToBundle
576
+ )}"></script>
564
577
  </body>
565
578
  </html>
566
579
  `;
@@ -579,7 +592,9 @@ function getMetadata(metadata) {
579
592
  if (!metadata) {
580
593
  return "";
581
594
  }
582
- const metaEntries = Object.entries(metadata).filter(([key]) => !Object.keys(ResourceType).includes(key));
595
+ const metaEntries = Object.entries(metadata).filter(
596
+ ([key]) => !Object.keys(ResourceType).includes(key)
597
+ );
583
598
  return metaEntries.map(([name4, content]) => {
584
599
  return `<meta name="${name4}" content="${content}">`;
585
600
  }).join("\n");
@@ -587,21 +602,23 @@ function getMetadata(metadata) {
587
602
  function getResources({ style, script }) {
588
603
  const resourcesTags = [];
589
604
  if (style) {
590
- style.forEach((el, id) => resourcesTags.push(
591
- `<link rel="stylesheet" type="text/css" href="${el}" ${id === 0 && `id="${CUSTOM_STYLE}"`}>`
592
- ));
605
+ style.forEach(
606
+ (el, id) => resourcesTags.push(
607
+ `<link rel="stylesheet" type="text/css" href="${el}" ${id === 0 && `id="${CUSTOM_STYLE}"`}>`
608
+ )
609
+ );
593
610
  }
594
611
  if (script) {
595
- script.forEach((el) => resourcesTags.push(
596
- `<script src="${el}"></script>`
597
- ));
612
+ script.forEach((el) => resourcesTags.push(`<script src="${el}"></script>`));
598
613
  }
599
614
  return resourcesTags.join("\n");
600
615
  }
601
616
  var \u0441arriage = import_process.platform === "win32" /* WINDOWS */ ? "\r\n" : "\n";
602
617
  function joinSinglePageResults(singlePageResults2, root, tocDir) {
603
618
  const delimeter = `${\u0441arriage}${\u0441arriage}<hr class="yfm-page__delimeter">${\u0441arriage}${\u0441arriage}`;
604
- return singlePageResults2.filter(({ content }) => content).map(({ content, path, title }) => preprocessPageHtmlForSinglePage(content, { root, path, tocDir, title })).join(delimeter);
619
+ return singlePageResults2.filter(({ content }) => content).map(
620
+ ({ content, path, title }) => preprocessPageHtmlForSinglePage(content, { root, path, tocDir, title })
621
+ ).join(delimeter);
605
622
  }
606
623
  function replaceDoubleToSingleQuotes(str) {
607
624
  return str.replace(/"/g, "'");
@@ -726,9 +743,14 @@ function transformToc(toc, pathToFileDirectory) {
726
743
  }
727
744
  const localToc = JSON.parse(JSON.stringify(toc));
728
745
  if (localToc.items) {
729
- localToc.items = filterFiles(localToc.items, "items", {}, {
730
- removeHiddenTocItems: true
731
- });
746
+ localToc.items = filterFiles(
747
+ localToc.items,
748
+ "items",
749
+ {},
750
+ {
751
+ removeHiddenTocItems: true
752
+ }
753
+ );
732
754
  }
733
755
  const baseTocPath = localToc.base || "";
734
756
  const navigationItemQueue = [localToc];
@@ -761,9 +783,14 @@ function transformTocForSinglePage(toc, options) {
761
783
  }
762
784
  const localToc = JSON.parse(JSON.stringify(toc));
763
785
  if (localToc.items) {
764
- localToc.items = filterFiles(localToc.items, "items", {}, {
765
- removeHiddenTocItems: true
766
- });
786
+ localToc.items = filterFiles(
787
+ localToc.items,
788
+ "items",
789
+ {},
790
+ {
791
+ removeHiddenTocItems: true
792
+ }
793
+ );
767
794
  }
768
795
  function processItems(items) {
769
796
  items.forEach((item) => {
@@ -797,11 +824,7 @@ function getVarsPerRelativeFile(filePath) {
797
824
  var import_glob = __toESM(require("glob"));
798
825
  var glob = (pattern, options) => __async(void 0, null, function* () {
799
826
  return new Promise((res, rej) => {
800
- const state = (0, import_glob.default)(
801
- pattern,
802
- options,
803
- (err) => err ? rej(err) : res({ state })
804
- );
827
+ const state = (0, import_glob.default)(pattern, options, (err) => err ? rej(err) : res({ state }));
805
828
  });
806
829
  });
807
830
 
@@ -884,7 +907,9 @@ function getFileContributorsString(fileData, vcsConnector) {
884
907
  vcsConnector.addNestedContributorsForPath(relativeFilePath, nestedContributors);
885
908
  }
886
909
  const fileContributorsWithContributorsIncludedFiles = __spreadValues(__spreadValues({}, fileContributors.contributors), nestedContributors);
887
- const contributorsArray = Object.entries(fileContributorsWithContributorsIncludedFiles).map(([, contributor]) => contributor);
910
+ const contributorsArray = Object.entries(
911
+ fileContributorsWithContributorsIncludedFiles
912
+ ).map(([, contributor]) => contributor);
888
913
  return replaceDoubleToSingleQuotes(JSON.stringify(contributorsArray));
889
914
  });
890
915
  }
@@ -896,7 +921,10 @@ function getContributorsForNestedFiles(fileData, vcsConnector) {
896
921
  return {};
897
922
  }
898
923
  const includesContributors = [];
899
- const relativeIncludeFilePaths = getRelativeIncludeFilePaths(fileData, includeContents);
924
+ const relativeIncludeFilePaths = getRelativeIncludeFilePaths(
925
+ fileData,
926
+ includeContents
927
+ );
900
928
  for (const relativeIncludeFilePath of relativeIncludeFilePaths.values()) {
901
929
  const relativeFilePath = relativeIncludeFilePath.substring(inputFolderPathLength);
902
930
  const includeContributors = yield vcsConnector.getContributorsByPath(relativeFilePath);
@@ -931,7 +959,10 @@ function getRelativeIncludeFilePaths(fileData, includeContents) {
931
959
  const relativeIncludeFilePath = includeContent.match(REGEXP_INCLUDE_FILE_PATH);
932
960
  if (relativeIncludeFilePath && relativeIncludeFilePath.length !== 0) {
933
961
  const relativeIncludeFilePathWithoutHash = relativeIncludeFilePath[0].split("#");
934
- const includeFilePath = (0, import_path8.join)((0, import_path8.dirname)(tmpInputFilePath), relativeIncludeFilePathWithoutHash[0]);
962
+ const includeFilePath = (0, import_path8.join)(
963
+ (0, import_path8.dirname)(tmpInputFilePath),
964
+ relativeIncludeFilePathWithoutHash[0]
965
+ );
935
966
  relativeIncludeFilePaths.add(includeFilePath);
936
967
  }
937
968
  });
@@ -1009,13 +1040,21 @@ function getContentWithUpdatedDynamicMetadata(fileContent, options) {
1009
1040
  const matchAuthor = fileMetadata.match(REGEXP_AUTHOR);
1010
1041
  if (matchAuthor) {
1011
1042
  const matchedAuthor = matchAuthor[0];
1012
- authorMetadata = yield updateAuthorMetadataStringByAuthorLogin(matchedAuthor, options.vcsConnector);
1043
+ authorMetadata = yield updateAuthorMetadataStringByAuthorLogin(
1044
+ matchedAuthor,
1045
+ options.vcsConnector
1046
+ );
1013
1047
  }
1014
1048
  }
1015
1049
  if (!authorMetadata) {
1016
- const { fileData: { tmpInputFilePath, inputFolderPathLength } } = options;
1050
+ const {
1051
+ fileData: { tmpInputFilePath, inputFolderPathLength }
1052
+ } = options;
1017
1053
  const relativeFilePath = tmpInputFilePath.substring(inputFolderPathLength);
1018
- authorMetadata = yield updateAuthorMetadataStringByFilePath(relativeFilePath, options.vcsConnector);
1054
+ authorMetadata = yield updateAuthorMetadataStringByFilePath(
1055
+ relativeFilePath,
1056
+ options.vcsConnector
1057
+ );
1019
1058
  }
1020
1059
  if (authorMetadata) {
1021
1060
  newMetadatas.push(`author: ${authorMetadata}`);
@@ -1139,7 +1178,14 @@ var MD_GLOB = "**/*.md";
1139
1178
  function includerFunction(params) {
1140
1179
  return __async(this, null, function* () {
1141
1180
  var _a, _b;
1142
- const { readBasePath, writeBasePath, tocPath, item, passedParams: { input, leadingPage }, index } = params;
1181
+ const {
1182
+ readBasePath,
1183
+ writeBasePath,
1184
+ tocPath,
1185
+ item,
1186
+ passedParams: { input, leadingPage },
1187
+ index
1188
+ } = params;
1143
1189
  if (!(input == null ? void 0 : input.length) || !((_a = item.include) == null ? void 0 : _a.path)) {
1144
1190
  throw new GenericIncluderError("provide includer with input parameter", tocPath);
1145
1191
  }
@@ -1149,7 +1195,9 @@ function includerFunction(params) {
1149
1195
  const contentPath = index === 0 ? (0, import_path10.join)(writeBasePath, tocDirPath, input) : (0, import_path10.join)(readBasePath, tocDirPath, input);
1150
1196
  let cache = {};
1151
1197
  let found = [];
1152
- ({ state: { found, cache } } = yield glob(MD_GLOB, {
1198
+ ({
1199
+ state: { found, cache }
1200
+ } = yield glob(MD_GLOB, {
1153
1201
  cwd: contentPath,
1154
1202
  nosort: true,
1155
1203
  nocase: true,
@@ -1181,9 +1229,14 @@ function createGraphFromPaths(paths) {
1181
1229
  continue;
1182
1230
  }
1183
1231
  const file = chunks.pop();
1184
- (0, import_lodash2.updateWith)(graph, chunks, (old) => {
1185
- return old ? { files: [...old.files, file] } : { files: [file] };
1186
- }, Object);
1232
+ (0, import_lodash2.updateWith)(
1233
+ graph,
1234
+ chunks,
1235
+ (old) => {
1236
+ return old ? { files: [...old.files, file] } : { files: [file] };
1237
+ },
1238
+ Object
1239
+ );
1187
1240
  }
1188
1241
  return graph;
1189
1242
  }
@@ -1225,8 +1278,11 @@ var usage = `include:
1225
1278
  `;
1226
1279
  function includerFunction2(params) {
1227
1280
  return __async(this, null, function* () {
1228
- logger.warn(params.tocPath, `sourcedocs inlcuder is getting depricated in favor of generic includer
1229
- ${usage}`);
1281
+ logger.warn(
1282
+ params.tocPath,
1283
+ `sourcedocs inlcuder is getting depricated in favor of generic includer
1284
+ ${usage}`
1285
+ );
1230
1286
  yield generic_default.includerFunction(params);
1231
1287
  });
1232
1288
  }
@@ -1285,7 +1341,13 @@ function pipeline(readPath, writeBasePath) {
1285
1341
  }
1286
1342
  function includerFunction3(params) {
1287
1343
  return __async(this, null, function* () {
1288
- const { readBasePath, writeBasePath, tocPath, passedParams: { input, output }, index } = params;
1344
+ const {
1345
+ readBasePath,
1346
+ writeBasePath,
1347
+ tocPath,
1348
+ passedParams: { input, output },
1349
+ index
1350
+ } = params;
1289
1351
  if (!(input == null ? void 0 : input.length) || !(output == null ? void 0 : output.length)) {
1290
1352
  throw new UnarchiveIncluderError("provide includer with input parameter", tocPath);
1291
1353
  }
@@ -1427,11 +1489,9 @@ function add(path) {
1427
1489
  }
1428
1490
  const combinedVars = __spreadValues(__spreadValues({}, preset_default.get(pathToDir)), vars);
1429
1491
  if (parsedToc.title) {
1430
- parsedToc.title = firstFilterTextItems(
1431
- parsedToc.title,
1432
- combinedVars,
1433
- { resolveConditions: true }
1434
- );
1492
+ parsedToc.title = firstFilterTextItems(parsedToc.title, combinedVars, {
1493
+ resolveConditions: true
1494
+ });
1435
1495
  }
1436
1496
  if (typeof parsedToc.title === "string") {
1437
1497
  parsedToc.title = liquidField(parsedToc.title, combinedVars, path);
@@ -1456,10 +1516,7 @@ function add(path) {
1456
1516
  }
1457
1517
  function processTocItems(path, items, tocDir, sourcesDir, vars) {
1458
1518
  return __async(this, null, function* () {
1459
- const {
1460
- resolveConditions,
1461
- removeHiddenTocItems
1462
- } = argv_default.getConfig();
1519
+ const { resolveConditions, removeHiddenTocItems } = argv_default.getConfig();
1463
1520
  let preparedItems = items;
1464
1521
  if (resolveConditions || removeHiddenTocItems) {
1465
1522
  try {
@@ -1602,9 +1659,19 @@ function _replaceIncludes(path, items, tocDir, sourcesDir, vars) {
1602
1659
  addIncludeTocPath(includeTocPath);
1603
1660
  let includedTocItems = (item.items || []).concat(includeToc.items);
1604
1661
  const baseTocDir = mode === "link" /* LINK */ ? includeTocDir : tocDir;
1605
- includedTocItems = yield processTocItems(path, includedTocItems, baseTocDir, sourcesDir, vars);
1662
+ includedTocItems = yield processTocItems(
1663
+ path,
1664
+ includedTocItems,
1665
+ baseTocDir,
1666
+ sourcesDir,
1667
+ vars
1668
+ );
1606
1669
  if (mode === "link" /* LINK */) {
1607
- includedTocItems = _replaceIncludesHrefs(includedTocItems, includeTocDir, tocDir);
1670
+ includedTocItems = _replaceIncludesHrefs(
1671
+ includedTocItems,
1672
+ includeTocDir,
1673
+ tocDir
1674
+ );
1608
1675
  }
1609
1676
  if (item.name) {
1610
1677
  item.items = includedTocItems;
@@ -1612,7 +1679,9 @@ function _replaceIncludes(path, items, tocDir, sourcesDir, vars) {
1612
1679
  includedInlineItems = includedTocItems;
1613
1680
  }
1614
1681
  } catch (err) {
1615
- const message = `Error while including toc: ${(0, import_chalk2.bold)(includeTocPath)} to ${(0, import_chalk2.bold)((0, import_path13.join)(tocDir, "toc.yaml"))}`;
1682
+ const message = `Error while including toc: ${(0, import_chalk2.bold)(includeTocPath)} to ${(0, import_chalk2.bold)(
1683
+ (0, import_path13.join)(tocDir, "toc.yaml")
1684
+ )}`;
1616
1685
  import_log3.default.error(message);
1617
1686
  continue;
1618
1687
  } finally {
@@ -1723,34 +1792,25 @@ var import_js_yaml5 = require("js-yaml");
1723
1792
  var import_log4 = __toESM(require("@diplodoc/transform/lib/log"));
1724
1793
  function filterFile(path) {
1725
1794
  var _a, _b;
1726
- const {
1727
- input: inputFolderPath,
1728
- vars
1729
- } = argv_default.getConfig();
1795
+ const { input: inputFolderPath, vars } = argv_default.getConfig();
1730
1796
  const pathToDir = (0, import_path16.dirname)(path);
1731
1797
  const filePath = (0, import_path16.resolve)(inputFolderPath, path);
1732
1798
  const content = (0, import_fs6.readFileSync)(filePath, "utf8");
1733
1799
  const parsedIndex = (0, import_js_yaml5.load)(content);
1734
1800
  const combinedVars = __spreadValues(__spreadValues({}, preset_default.get(pathToDir)), vars);
1735
1801
  try {
1736
- const title = firstFilterTextItems(
1737
- parsedIndex.title,
1738
- combinedVars,
1739
- { resolveConditions: true }
1740
- );
1802
+ const title = firstFilterTextItems(parsedIndex.title, combinedVars, {
1803
+ resolveConditions: true
1804
+ });
1741
1805
  parsedIndex.title = liquidField(title, combinedVars, path);
1742
- const description6 = filterTextItems(
1743
- parsedIndex.description,
1744
- combinedVars,
1745
- { resolveConditions: true }
1746
- );
1806
+ const description6 = filterTextItems(parsedIndex.description, combinedVars, {
1807
+ resolveConditions: true
1808
+ });
1747
1809
  parsedIndex.description = liquidFields(description6, combinedVars, path);
1748
1810
  if ((_a = parsedIndex.meta) == null ? void 0 : _a.title) {
1749
- const metaTitle = firstFilterTextItems(
1750
- parsedIndex.meta.title,
1751
- combinedVars,
1752
- { resolveConditions: true }
1753
- );
1811
+ const metaTitle = firstFilterTextItems(parsedIndex.meta.title, combinedVars, {
1812
+ resolveConditions: true
1813
+ });
1754
1814
  parsedIndex.meta.title = liquidField(metaTitle, combinedVars, path);
1755
1815
  }
1756
1816
  if ((_b = parsedIndex.meta) == null ? void 0 : _b.description) {
@@ -1762,14 +1822,14 @@ function filterFile(path) {
1762
1822
  parsedIndex.meta.description = liquidField(metaDescription, combinedVars, path);
1763
1823
  }
1764
1824
  if (parsedIndex.nav) {
1765
- const navTitle = firstFilterTextItems(
1766
- parsedIndex.nav.title,
1767
- combinedVars,
1768
- { resolveConditions: true }
1769
- );
1825
+ const navTitle = firstFilterTextItems(parsedIndex.nav.title, combinedVars, {
1826
+ resolveConditions: true
1827
+ });
1770
1828
  parsedIndex.nav.title = liquidField(navTitle, combinedVars, path);
1771
1829
  }
1772
- parsedIndex.links = filterFiles(parsedIndex.links, "links", combinedVars, { resolveConditions: true });
1830
+ parsedIndex.links = filterFiles(parsedIndex.links, "links", combinedVars, {
1831
+ resolveConditions: true
1832
+ });
1773
1833
  parsedIndex.links.forEach((link) => {
1774
1834
  if (link.title) {
1775
1835
  link.title = liquidField(link.title, combinedVars, path);
@@ -1864,17 +1924,11 @@ var import_walk_sync2 = __toESM(require("walk-sync"));
1864
1924
  var import_shelljs3 = __toESM(require("shelljs"));
1865
1925
  var import_client3 = __toESM(require_client());
1866
1926
  function processAssets(outputBundlePath) {
1867
- const {
1868
- input: inputFolderPath,
1869
- output: outputFolderPath
1870
- } = argv_default.getConfig();
1927
+ const { input: inputFolderPath, output: outputFolderPath } = argv_default.getConfig();
1871
1928
  const assetFilePath = (0, import_walk_sync2.default)(inputFolderPath, {
1872
1929
  directories: false,
1873
1930
  includeBasePath: false,
1874
- ignore: [
1875
- "**/*.yaml",
1876
- "**/*.md"
1877
- ]
1931
+ ignore: ["**/*.yaml", "**/*.md"]
1878
1932
  });
1879
1933
  copyFiles(inputFolderPath, outputFolderPath, assetFilePath);
1880
1934
  import_shelljs3.default.mkdir("-p", outputBundlePath);
@@ -1888,23 +1942,17 @@ var import_path17 = require("path");
1888
1942
  var import_walk_sync3 = __toESM(require("walk-sync"));
1889
1943
  var import_shelljs4 = __toESM(require("shelljs"));
1890
1944
  function processExcludedFiles() {
1891
- const {
1892
- input: inputFolderPath,
1893
- output: outputFolderPath,
1894
- ignore
1895
- } = argv_default.getConfig();
1945
+ const { input: inputFolderPath, output: outputFolderPath, ignore } = argv_default.getConfig();
1896
1946
  const allContentFiles = (0, import_walk_sync3.default)(inputFolderPath, {
1897
1947
  directories: false,
1898
1948
  includeBasePath: true,
1899
- globs: [
1900
- "**/*.md",
1901
- "**/index.yaml",
1902
- ...ignore
1903
- ],
1949
+ globs: ["**/*.md", "**/index.yaml", ...ignore],
1904
1950
  // Ignores service directories like "_includes", "_templates" and etc.
1905
1951
  ignore: ["**/_*/**/*"]
1906
1952
  });
1907
- const navigationPaths2 = tocs_default.getNavigationPaths().map((filePath) => convertBackSlashToSlash((0, import_path17.resolve)(inputFolderPath, filePath)));
1953
+ const navigationPaths2 = tocs_default.getNavigationPaths().map(
1954
+ (filePath) => convertBackSlashToSlash((0, import_path17.resolve)(inputFolderPath, filePath))
1955
+ );
1908
1956
  const tocSpecifiedFiles = new Set(navigationPaths2);
1909
1957
  const excludedFiles = allContentFiles.filter((filePath) => !tocSpecifiedFiles.has(filePath));
1910
1958
  import_shelljs4.default.rm("-f", excludedFiles);
@@ -1922,15 +1970,7 @@ var import_uniq = __toESM(require("lodash/uniq"));
1922
1970
  function processLogs(inputFolder) {
1923
1971
  const replacementRegExp = new RegExp(inputFolder, "ig");
1924
1972
  const { info, warn, error } = import_log5.default.get();
1925
- const outputLogs = (0, import_uniq.default)([
1926
- "",
1927
- ...info,
1928
- "",
1929
- ...warn,
1930
- "",
1931
- ...error,
1932
- ""
1933
- ]);
1973
+ const outputLogs = (0, import_uniq.default)(["", ...info, "", ...warn, "", ...error, ""]);
1934
1974
  for (const outputLog of outputLogs) {
1935
1975
  const preparedLog = outputLog.replace(replacementRegExp, "");
1936
1976
  console.log(preparedLog);
@@ -1988,15 +2028,21 @@ function resolveMd2Md(options) {
1988
2028
  changesName = Math.trunc(new Date(changesDate).getTime() / 1e3);
1989
2029
  }
1990
2030
  if (!changesName) {
1991
- changesName = `name-${mdFilename}-${String(changelogs.length - index).padStart(3, "0")}`;
2031
+ changesName = `name-${mdFilename}-${String(changelogs.length - index).padStart(
2032
+ 3,
2033
+ "0"
2034
+ )}`;
1992
2035
  }
1993
2036
  const changesPath = (0, import_path18.join)(outputDir, `changes-${changesName}.json`);
1994
2037
  if ((0, import_fs7.existsSync)(changesPath)) {
1995
2038
  throw new Error(`Changelog ${changesPath} already exists!`);
1996
2039
  }
1997
- (0, import_fs7.writeFileSync)(changesPath, JSON.stringify(__spreadProps(__spreadValues({}, changes), {
1998
- source: mdFilename
1999
- })));
2040
+ (0, import_fs7.writeFileSync)(
2041
+ changesPath,
2042
+ JSON.stringify(__spreadProps(__spreadValues({}, changes), {
2043
+ source: mdFilename
2044
+ }))
2045
+ );
2000
2046
  });
2001
2047
  }
2002
2048
  logger.info(inputPath, PROCESSING_FINISHED);
@@ -2014,11 +2060,7 @@ function copyFile(targetPath, targetDestPath, options) {
2014
2060
  }
2015
2061
  }
2016
2062
  function liquidMd2Md(input, vars, path) {
2017
- const {
2018
- applyPresets,
2019
- resolveConditions,
2020
- conditionsInCode
2021
- } = argv_default.getConfig();
2063
+ const { applyPresets, resolveConditions, conditionsInCode } = argv_default.getConfig();
2022
2064
  return (0, import_liquid3.default)(input, vars, path, {
2023
2065
  conditions: resolveConditions,
2024
2066
  substitutions: applyPresets,
@@ -2028,9 +2070,7 @@ function liquidMd2Md(input, vars, path) {
2028
2070
  });
2029
2071
  }
2030
2072
  function transformMd2Md(input, options) {
2031
- const {
2032
- disableLiquid
2033
- } = argv_default.getConfig();
2073
+ const { disableLiquid } = argv_default.getConfig();
2034
2074
  const {
2035
2075
  vars = {},
2036
2076
  path,
@@ -2204,17 +2244,7 @@ function lintPage(options) {
2204
2244
  }
2205
2245
  }
2206
2246
  function MdFileLinter(content, lintOptions) {
2207
- const _a = argv_default.getConfig(), {
2208
- input,
2209
- lintConfig,
2210
- disableLiquid,
2211
- outputFormat
2212
- } = _a, options = __objRest(_a, [
2213
- "input",
2214
- "lintConfig",
2215
- "disableLiquid",
2216
- "outputFormat"
2217
- ]);
2247
+ const _a = argv_default.getConfig(), { input, lintConfig, disableLiquid, outputFormat } = _a, options = __objRest(_a, ["input", "lintConfig", "disableLiquid", "outputFormat"]);
2218
2248
  const { path: filePath } = lintOptions;
2219
2249
  const plugins2 = outputFormat === "md" ? [] : plugins_exports.getPlugins();
2220
2250
  const vars = getVarsPerFile(filePath);
@@ -2297,7 +2327,7 @@ var githubConnectorValidator = {
2297
2327
  }
2298
2328
  };
2299
2329
  var connectorValidator = {
2300
- "type": {
2330
+ type: {
2301
2331
  warnMessage: "'type' must be provided for repo.",
2302
2332
  validateFn: notEmptyValue
2303
2333
  },
@@ -2386,14 +2416,19 @@ function getRepoCommitByHash(httpClientByToken, hashCommit) {
2386
2416
  return null;
2387
2417
  }
2388
2418
  try {
2389
- const commit = yield httpClientByToken.request("GET /repos/{owner}/{repo}/commits/{commit_sha}", {
2390
- owner: validatedFileds["owner" /* OWNER */],
2391
- repo: validatedFileds["repo" /* REPO */],
2392
- commit_sha: hashCommit
2393
- });
2419
+ const commit = yield httpClientByToken.request(
2420
+ "GET /repos/{owner}/{repo}/commits/{commit_sha}",
2421
+ {
2422
+ owner: validatedFileds["owner" /* OWNER */],
2423
+ repo: validatedFileds["repo" /* REPO */],
2424
+ commit_sha: hashCommit
2425
+ }
2426
+ );
2394
2427
  return commit.data;
2395
2428
  } catch (error) {
2396
- import_log10.default.warn(`Getting commit by sha has been failed for GitHub. SHA commit: ${hashCommit}. Error: ${error}`);
2429
+ import_log10.default.warn(
2430
+ `Getting commit by sha has been failed for GitHub. SHA commit: ${hashCommit}. Error: ${error}`
2431
+ );
2397
2432
  return null;
2398
2433
  }
2399
2434
  });
@@ -2464,7 +2499,14 @@ function getAllContributorsTocFiles(httpClientByToken) {
2464
2499
  const masterDir = "./_yfm-master";
2465
2500
  const tmpMasterBranch = "yfm-tmp-master";
2466
2501
  try {
2467
- yield (0, import_simple_git.default)(options).raw("worktree", "add", "-b", tmpMasterBranch, masterDir, "origin/master");
2502
+ yield (0, import_simple_git.default)(options).raw(
2503
+ "worktree",
2504
+ "add",
2505
+ "-b",
2506
+ tmpMasterBranch,
2507
+ masterDir,
2508
+ "origin/master"
2509
+ );
2468
2510
  const fullRepoLogString = yield (0, import_simple_git.default)({
2469
2511
  baseDir: (0, import_path21.join)(rootInput, masterDir)
2470
2512
  }).raw(
@@ -2511,7 +2553,10 @@ function matchContributionsForEachPath(repoLogs, httpClientByToken) {
2511
2553
  let contributorDataByHash;
2512
2554
  if (hasContributorData === void 0) {
2513
2555
  logger.info("Contributors: Getting data for", email);
2514
- contributorDataByHash = yield getContributorDataByHashCommit(httpClientByToken, hashCommit);
2556
+ contributorDataByHash = yield getContributorDataByHashCommit(
2557
+ httpClientByToken,
2558
+ hashCommit
2559
+ );
2515
2560
  if (contributorDataByHash) {
2516
2561
  const paths = dataArray.splice(1);
2517
2562
  addContributorForPath(paths, {
@@ -2691,12 +2736,31 @@ function processPages(outputBundlePath) {
2691
2736
  plugins_exports.setPlugins();
2692
2737
  const navigationPaths2 = tocs_default.getNavigationPaths();
2693
2738
  const concurrency = 500;
2694
- yield (0, import_async.mapLimit)(navigationPaths2, concurrency, (0, import_async.asyncify)((pathToFile) => __async(this, null, function* () {
2695
- const pathData = getPathData(pathToFile, inputFolderPath, outputFolderPath, outputFormat, outputBundlePath);
2696
- logger.proc(pathToFile);
2697
- const metaDataOptions = getMetaDataOptions(pathData, inputFolderPath.length, vcsConnector);
2698
- yield preparingPagesByOutputFormat(pathData, metaDataOptions, resolveConditions, singlePage);
2699
- })));
2739
+ yield (0, import_async.mapLimit)(
2740
+ navigationPaths2,
2741
+ concurrency,
2742
+ (0, import_async.asyncify)((pathToFile) => __async(this, null, function* () {
2743
+ const pathData = getPathData(
2744
+ pathToFile,
2745
+ inputFolderPath,
2746
+ outputFolderPath,
2747
+ outputFormat,
2748
+ outputBundlePath
2749
+ );
2750
+ logger.proc(pathToFile);
2751
+ const metaDataOptions = getMetaDataOptions(
2752
+ pathData,
2753
+ inputFolderPath.length,
2754
+ vcsConnector
2755
+ );
2756
+ yield preparingPagesByOutputFormat(
2757
+ pathData,
2758
+ metaDataOptions,
2759
+ resolveConditions,
2760
+ singlePage
2761
+ );
2762
+ }))
2763
+ );
2700
2764
  if (singlePage) {
2701
2765
  yield saveSinglePages(outputBundlePath);
2702
2766
  }
@@ -2737,38 +2801,44 @@ function saveSinglePages(outputBundlePath) {
2737
2801
  resources
2738
2802
  } = argv_default.getConfig();
2739
2803
  try {
2740
- yield Promise.all(Object.keys(singlePageResults).map((tocDir) => __async(this, null, function* () {
2741
- if (!singlePageResults[tocDir].length) {
2742
- return;
2743
- }
2744
- const singlePageBody = joinSinglePageResults(singlePageResults[tocDir], inputFolderPath, tocDir);
2745
- const tocPath = (0, import_path22.join)((0, import_path22.relative)(inputFolderPath, tocDir), "toc.yaml");
2746
- const toc = tocs_default.getForPath(tocPath) || null;
2747
- const preparedToc = transformTocForSinglePage(toc, {
2748
- root: inputFolderPath,
2749
- currentPath: (0, import_path22.join)(tocDir, SINGLE_PAGE_FILENAME)
2750
- });
2751
- const pageData = {
2752
- data: {
2753
- leading: false,
2754
- html: singlePageBody,
2755
- headings: [],
2756
- meta: resources || {},
2757
- toc: preparedToc
2758
- },
2759
- router: {
2760
- pathname: SINGLE_PAGE_FILENAME
2761
- },
2762
- lang: lang || "ru" /* RU */
2763
- };
2764
- const outputTocDir = (0, import_path22.resolve)(outputFolderPath, (0, import_path22.relative)(inputFolderPath, tocDir));
2765
- const relativeOutputBundlePath = (0, import_path22.relative)(outputTocDir, outputBundlePath);
2766
- const singlePageFn = (0, import_path22.join)(tocDir, SINGLE_PAGE_FILENAME);
2767
- const singlePageDataFn = (0, import_path22.join)(tocDir, SINGLE_PAGE_DATA_FILENAME);
2768
- const singlePageContent = generateStaticMarkup(pageData, relativeOutputBundlePath);
2769
- (0, import_fs10.writeFileSync)(singlePageFn, singlePageContent);
2770
- (0, import_fs10.writeFileSync)(singlePageDataFn, JSON.stringify(pageData));
2771
- })));
2804
+ yield Promise.all(
2805
+ Object.keys(singlePageResults).map((tocDir) => __async(this, null, function* () {
2806
+ if (!singlePageResults[tocDir].length) {
2807
+ return;
2808
+ }
2809
+ const singlePageBody = joinSinglePageResults(
2810
+ singlePageResults[tocDir],
2811
+ inputFolderPath,
2812
+ tocDir
2813
+ );
2814
+ const tocPath = (0, import_path22.join)((0, import_path22.relative)(inputFolderPath, tocDir), "toc.yaml");
2815
+ const toc = tocs_default.getForPath(tocPath) || null;
2816
+ const preparedToc = transformTocForSinglePage(toc, {
2817
+ root: inputFolderPath,
2818
+ currentPath: (0, import_path22.join)(tocDir, SINGLE_PAGE_FILENAME)
2819
+ });
2820
+ const pageData = {
2821
+ data: {
2822
+ leading: false,
2823
+ html: singlePageBody,
2824
+ headings: [],
2825
+ meta: resources || {},
2826
+ toc: preparedToc
2827
+ },
2828
+ router: {
2829
+ pathname: SINGLE_PAGE_FILENAME
2830
+ },
2831
+ lang: lang || "ru" /* RU */
2832
+ };
2833
+ const outputTocDir = (0, import_path22.resolve)(outputFolderPath, (0, import_path22.relative)(inputFolderPath, tocDir));
2834
+ const relativeOutputBundlePath = (0, import_path22.relative)(outputTocDir, outputBundlePath);
2835
+ const singlePageFn = (0, import_path22.join)(tocDir, SINGLE_PAGE_FILENAME);
2836
+ const singlePageDataFn = (0, import_path22.join)(tocDir, SINGLE_PAGE_DATA_FILENAME);
2837
+ const singlePageContent = generateStaticMarkup(pageData, relativeOutputBundlePath);
2838
+ (0, import_fs10.writeFileSync)(singlePageFn, singlePageContent);
2839
+ (0, import_fs10.writeFileSync)(singlePageDataFn, JSON.stringify(pageData));
2840
+ }))
2841
+ );
2772
2842
  } catch (error) {
2773
2843
  console.log(error);
2774
2844
  }
@@ -2886,13 +2956,7 @@ function processingFileToMd(path, metaDataOptions) {
2886
2956
  }
2887
2957
  function processingFileToHtml(path, metaDataOptions) {
2888
2958
  return __async(this, null, function* () {
2889
- const {
2890
- outputBundlePath,
2891
- filename,
2892
- fileExtension,
2893
- outputPath,
2894
- pathToFile
2895
- } = path;
2959
+ const { outputBundlePath, filename, fileExtension, outputPath, pathToFile } = path;
2896
2960
  return resolveMd2HTML({
2897
2961
  inputPath: pathToFile,
2898
2962
  outputBundlePath,
@@ -2969,9 +3033,11 @@ function initLinterWorkers() {
2969
3033
  }
2970
3034
  navigationPathsChunks = splitOnChunks(navigationPaths2, chunkSize).filter((arr) => arr.length);
2971
3035
  const workersCount = navigationPathsChunks.length;
2972
- processLinterWorkers = yield Promise.all(new Array(workersCount).fill(null).map(() => {
2973
- return (0, import_threads.spawn)(new import_threads.Worker("./linter"), { timeout: 6e4 });
2974
- }));
3036
+ processLinterWorkers = yield Promise.all(
3037
+ new Array(workersCount).fill(null).map(() => {
3038
+ return (0, import_threads.spawn)(new import_threads.Worker("./linter"), { timeout: 6e4 });
3039
+ })
3040
+ );
2975
3041
  });
2976
3042
  }
2977
3043
  function getChunkSize(arr) {
@@ -3071,9 +3137,7 @@ function preparingTocFiles(getFilePathsByGlobals) {
3071
3137
  var import_fs12 = require("fs");
3072
3138
  var import_path25 = require("path");
3073
3139
  function prepareMapFile() {
3074
- const {
3075
- output: outputFolderPath
3076
- } = argv_default.getConfig();
3140
+ const { output: outputFolderPath } = argv_default.getConfig();
3077
3141
  const navigationPathsWithoutExtensions = tocs_default.getNavigationPaths().map((path) => {
3078
3142
  let preparedPath = convertBackSlashToSlash(path.replace((0, import_path25.extname)(path), ""));
3079
3143
  if (preparedPath.endsWith("/index")) {
@@ -3119,20 +3183,26 @@ function upload(props) {
3119
3183
  includeBasePath: false,
3120
3184
  ignore
3121
3185
  });
3122
- yield (0, import_async2.mapLimit)(filesToPublish, 100, (0, import_async2.asyncify)((pathToFile) => __async(this, null, function* () {
3123
- const mimeType = import_mime_types.default.lookup(pathToFile);
3124
- logger.upload(pathToFile);
3125
- try {
3126
- yield s3Client.send(new import_client_s3.PutObjectCommand({
3127
- ContentType: mimeType ? mimeType : void 0,
3128
- Bucket: bucket,
3129
- Key: convertBackSlashToSlash((0, import_path26.join)(prefix, pathToFile)),
3130
- Body: (0, import_fs13.createReadStream)((0, import_path26.resolve)(input, pathToFile))
3131
- }));
3132
- } catch (error) {
3133
- logger.error(pathToFile, error.message);
3134
- }
3135
- })));
3186
+ yield (0, import_async2.mapLimit)(
3187
+ filesToPublish,
3188
+ 100,
3189
+ (0, import_async2.asyncify)((pathToFile) => __async(this, null, function* () {
3190
+ const mimeType = import_mime_types.default.lookup(pathToFile);
3191
+ logger.upload(pathToFile);
3192
+ try {
3193
+ yield s3Client.send(
3194
+ new import_client_s3.PutObjectCommand({
3195
+ ContentType: mimeType ? mimeType : void 0,
3196
+ Bucket: bucket,
3197
+ Key: convertBackSlashToSlash((0, import_path26.join)(prefix, pathToFile)),
3198
+ Body: (0, import_fs13.createReadStream)((0, import_path26.resolve)(input, pathToFile))
3199
+ })
3200
+ );
3201
+ } catch (error) {
3202
+ logger.error(pathToFile, error.message);
3203
+ }
3204
+ }))
3205
+ );
3136
3206
  });
3137
3207
  }
3138
3208
 
@@ -3252,7 +3322,10 @@ function builder(argv) {
3252
3322
  describe: "Include static content in the page",
3253
3323
  type: "boolean",
3254
3324
  group: "Build options:"
3255
- }).check(argvValidator).example("yfm -i ./input -o ./output", "").demandOption(["input", "output"], "Please provide input and output arguments to work with this tool");
3325
+ }).check(argvValidator).example("yfm -i ./input -o ./output", "").demandOption(
3326
+ ["input", "output"],
3327
+ "Please provide input and output arguments to work with this tool"
3328
+ );
3256
3329
  }
3257
3330
  function handler(args) {
3258
3331
  return __async(this, null, function* () {
@@ -3306,16 +3379,24 @@ function handler(args) {
3306
3379
  import_shelljs8.default.cp((0, import_path27.resolve)(pathToLintConfig), tmpOutputFolder);
3307
3380
  if (resources && allowCustomResources) {
3308
3381
  const resourcePaths = [];
3309
- Object.keys(resources).forEach((type) => {
3310
- var _a2;
3311
- return (_a2 = resources[type]) == null ? void 0 : _a2.forEach((path) => resourcePaths.push(path));
3312
- });
3382
+ Object.keys(resources).forEach(
3383
+ (type) => {
3384
+ var _a2;
3385
+ return (_a2 = resources[type]) == null ? void 0 : _a2.forEach(
3386
+ (path) => resourcePaths.push(path)
3387
+ );
3388
+ }
3389
+ );
3313
3390
  copyFiles(args.input, tmpOutputFolder, resourcePaths);
3314
3391
  }
3315
3392
  break;
3316
3393
  }
3317
3394
  }
3318
- import_shelljs8.default.cp("-r", [(0, import_path27.join)(tmpOutputFolder, "*"), (0, import_path27.join)(tmpOutputFolder, ".*")], userOutputFolder);
3395
+ import_shelljs8.default.cp(
3396
+ "-r",
3397
+ [(0, import_path27.join)(tmpOutputFolder, "*"), (0, import_path27.join)(tmpOutputFolder, ".*")],
3398
+ userOutputFolder
3399
+ );
3319
3400
  if (publish2) {
3320
3401
  const DEFAULT_PREFIX = (_a = process.env.YFM_STORAGE_PREFIX) != null ? _a : "";
3321
3402
  const {
@@ -3353,15 +3434,16 @@ function preparingTemporaryFolders(userOutputFolder) {
3353
3434
  import_shelljs8.default.rm("-rf", args.input, args.output);
3354
3435
  import_shelljs8.default.mkdir(args.input, args.output);
3355
3436
  import_shelljs8.default.chmod("-R", "u+w", args.input);
3356
- copyFiles(args.rootInput, args.input, import_glob3.default.sync("**", {
3357
- cwd: args.rootInput,
3358
- nodir: true,
3359
- follow: true,
3360
- ignore: [
3361
- "node_modules/**",
3362
- "*/node_modules/**"
3363
- ]
3364
- }));
3437
+ copyFiles(
3438
+ args.rootInput,
3439
+ args.input,
3440
+ import_glob3.default.sync("**", {
3441
+ cwd: args.rootInput,
3442
+ nodir: true,
3443
+ follow: true,
3444
+ ignore: ["node_modules/**", "*/node_modules/**"]
3445
+ })
3446
+ );
3365
3447
  }
3366
3448
 
3367
3449
  // src/cmd/publish/index.ts
@@ -3418,12 +3500,7 @@ function builder2(argv) {
3418
3500
  function handler2(args) {
3419
3501
  return __async(this, null, function* () {
3420
3502
  argv_default.init(__spreadValues({}, args));
3421
- const {
3422
- input,
3423
- endpoint,
3424
- bucket,
3425
- prefix
3426
- } = argv_default.getConfig();
3503
+ const { input, endpoint, bucket, prefix } = argv_default.getConfig();
3427
3504
  logger.info("", `Upload artifacts from ${input} to ${(0, import_path28.join)(endpoint, bucket, prefix)}`);
3428
3505
  try {
3429
3506
  yield upload(argv_default.getConfig());
@@ -3437,7 +3514,9 @@ function handler2(args) {
3437
3514
  var import_path29 = require("path");
3438
3515
  var import_markdown_translation = __toESM(require("@diplodoc/markdown-translation"));
3439
3516
  var import_async3 = require("async");
3440
- var { promises: { readFile: readFile3, writeFile: writeFile2, mkdir: mkdir2 } } = require("fs");
3517
+ var {
3518
+ promises: { readFile: readFile3, writeFile: writeFile2, mkdir: mkdir2 }
3519
+ } = require("fs");
3441
3520
  var command2 = "extract";
3442
3521
  var description2 = "extract xliff and skeleton from yfm documentation";
3443
3522
  var extract2 = { command: command2, description: description2, handler: handler3, builder: builder3 };
@@ -3471,7 +3550,10 @@ function builder3(argv) {
3471
3550
  alias: "o",
3472
3551
  describe: "output folder to store xliff and skeleton files",
3473
3552
  type: "string"
3474
- }).demandOption(["source-language-locale", "target-language-locale", "input", "output"], USAGE);
3553
+ }).demandOption(
3554
+ ["source-language-locale", "target-language-locale", "input", "output"],
3555
+ USAGE
3556
+ );
3475
3557
  }
3476
3558
  function handler3(args) {
3477
3559
  return __async(this, null, function* () {
@@ -3490,7 +3572,9 @@ function handler3(args) {
3490
3572
  let cache = {};
3491
3573
  let found = [];
3492
3574
  try {
3493
- ({ state: { found, cache } } = yield glob((0, import_path29.join)(input, MD_GLOB2), {
3575
+ ({
3576
+ state: { found, cache }
3577
+ } = yield glob((0, import_path29.join)(input, MD_GLOB2), {
3494
3578
  nosort: true,
3495
3579
  cache
3496
3580
  }));
@@ -3591,7 +3675,9 @@ function writer(params) {
3591
3675
  var import_path30 = require("path");
3592
3676
  var import_markdown_translation2 = __toESM(require("@diplodoc/markdown-translation"));
3593
3677
  var import_async4 = require("async");
3594
- var { promises: { readFile: readFile4, writeFile: writeFile3, mkdir: mkdir3 } } = require("fs");
3678
+ var {
3679
+ promises: { readFile: readFile4, writeFile: writeFile3, mkdir: mkdir3 }
3680
+ } = require("fs");
3595
3681
  var command3 = "compose";
3596
3682
  var description3 = "compose xliff and skeleton into documentation";
3597
3683
  var compose = { command: command3, description: description3, handler: handler4, builder: builder4 };
@@ -3624,11 +3710,15 @@ function handler4(args) {
3624
3710
  let skeletonPaths = [];
3625
3711
  let xliffPaths = [];
3626
3712
  try {
3627
- ({ state: { found: skeletonPaths, cache } } = yield glob((0, import_path30.join)(input, SKL_MD_GLOB), {
3713
+ ({
3714
+ state: { found: skeletonPaths, cache }
3715
+ } = yield glob((0, import_path30.join)(input, SKL_MD_GLOB), {
3628
3716
  nosort: false,
3629
3717
  cache
3630
3718
  }));
3631
- ({ state: { found: xliffPaths, cache } } = yield glob((0, import_path30.join)(input, XLF_GLOB), {
3719
+ ({
3720
+ state: { found: xliffPaths, cache }
3721
+ } = yield glob((0, import_path30.join)(input, XLF_GLOB), {
3632
3722
  nosort: false,
3633
3723
  cache
3634
3724
  }));
@@ -3748,7 +3838,10 @@ var xliff = {
3748
3838
  builder: builder5
3749
3839
  };
3750
3840
  function builder5(argv) {
3751
- return argv.command(extract2).command(compose).demandCommand(1, `provide one of the folowing ${command4} commands: ${extract2.command}, ${compose.command}`);
3841
+ return argv.command(extract2).command(compose).demandCommand(
3842
+ 1,
3843
+ `provide one of the folowing ${command4} commands: ${extract2.command}, ${compose.command}`
3844
+ );
3752
3845
  }
3753
3846
 
3754
3847
  // src/cmd/translate/index.ts
@@ -3759,6 +3852,7 @@ var import_fast_xml_parser = require("fast-xml-parser");
3759
3852
  var import_session = require("@yandex-cloud/nodejs-sdk/dist/session");
3760
3853
  var import_service_clients = require("@yandex-cloud/nodejs-sdk/dist/generated/yandex/cloud/service_clients");
3761
3854
  var import_translation_service = require("@yandex-cloud/nodejs-sdk/dist/generated/yandex/cloud/ai/translate/v2/translation_service");
3855
+ var import_markdown_translation3 = __toESM(require("@diplodoc/markdown-translation"));
3762
3856
 
3763
3857
  // src/packages/credentials/yandex-oauth.ts
3764
3858
  var import_promises3 = require("fs/promises");
@@ -3792,15 +3886,6 @@ function getYandexOAuthTokenFromHomeDir() {
3792
3886
  }
3793
3887
 
3794
3888
  // src/cmd/translate/index.ts
3795
- var yfm2xliff = require("@doc-tools/yfm2xliff/lib/cjs");
3796
- var composer2 = (xliff2, skeleton) => __async(void 0, null, function* () {
3797
- return new Promise((res, rej) => yfm2xliff.compose(xliff2, skeleton, (err, composed) => {
3798
- if (err) {
3799
- rej(err);
3800
- }
3801
- return res(composed);
3802
- }));
3803
- });
3804
3889
  var command5 = "translate";
3805
3890
  var description5 = "translate documentation with Yandex.Cloud Translator API";
3806
3891
  var translate = {
@@ -3844,10 +3929,15 @@ function handler5(args) {
3844
3929
  sl: sourceLanguage,
3845
3930
  tl: targetLanguage
3846
3931
  } = args;
3847
- logger.info(input, `translating documentation from ${sourceLanguage} to ${targetLanguage} language`);
3932
+ logger.info(
3933
+ input,
3934
+ `translating documentation from ${sourceLanguage} to ${targetLanguage} language`
3935
+ );
3848
3936
  try {
3849
3937
  let found = [];
3850
- ({ state: { found } } = yield glob((0, import_path32.join)(input, MD_GLOB3), {
3938
+ ({
3939
+ state: { found }
3940
+ } = yield glob((0, import_path32.join)(input, MD_GLOB3), {
3851
3941
  nosort: true
3852
3942
  }));
3853
3943
  const oauthToken = yield getYandexOAuthToken();
@@ -3869,7 +3959,10 @@ function handler5(args) {
3869
3959
  logger.error(file, message);
3870
3960
  }
3871
3961
  }
3872
- logger.info(output, `translated documentation from ${sourceLanguage} to ${targetLanguage} language`);
3962
+ logger.info(
3963
+ output,
3964
+ `translated documentation from ${sourceLanguage} to ${targetLanguage} language`
3965
+ );
3873
3966
  });
3874
3967
  }
3875
3968
  function translator(params) {
@@ -3888,15 +3981,20 @@ function translator(params) {
3888
3981
  try {
3889
3982
  logger.info(mdPath, "translating");
3890
3983
  const md = yield (0, import_promises4.readFile)((0, import_path32.resolve)(mdPath), { encoding: "utf-8" });
3891
- const extracted = yield yfm2xliff.extract({
3892
- md,
3893
- mdPath,
3894
- source: sourceLanguage,
3895
- target: targetLanguage,
3896
- sklPath: "",
3897
- xlfPath: ""
3984
+ const { xlf, skeleton } = import_markdown_translation3.default.extract({
3985
+ source: {
3986
+ language: sourceLanguage,
3987
+ locale: sourceLanguage.toUpperCase()
3988
+ },
3989
+ target: {
3990
+ language: targetLanguage,
3991
+ locale: targetLanguage.toUpperCase()
3992
+ },
3993
+ markdown: md,
3994
+ markdownPath: mdPath,
3995
+ skeletonPath: ""
3898
3996
  });
3899
- const texts = parseSourcesFromXLIFF(extracted.xliff);
3997
+ const texts = parseSourcesFromXLIFF(xlf);
3900
3998
  const machineTranslateParams = import_translation_service.TranslateRequest.fromPartial({
3901
3999
  texts,
3902
4000
  folderId,
@@ -3909,13 +4007,21 @@ function translator(params) {
3909
4007
  },
3910
4008
  format: import_translation_service.TranslateRequest_Format.PLAIN_TEXT
3911
4009
  });
3912
- const translations = yield (0, import_async5.retry)({ times: RETRY_LIMIT, interval: (count) => {
3913
- return (1 << count) * 1e3;
3914
- } }, (0, import_async5.asyncify)(
3915
- () => __async(this, null, function* () {
3916
- return yield client3.translate(machineTranslateParams).then((results) => results.translations.map(({ text }) => text));
3917
- })
3918
- ));
4010
+ const translations = yield (0, import_async5.retry)(
4011
+ {
4012
+ times: RETRY_LIMIT,
4013
+ interval: (count) => {
4014
+ return (1 << count) * 1e3;
4015
+ }
4016
+ },
4017
+ (0, import_async5.asyncify)(
4018
+ () => __async(this, null, function* () {
4019
+ return yield client3.translate(machineTranslateParams).then(
4020
+ (results) => results.translations.map(({ text }) => text)
4021
+ );
4022
+ })
4023
+ )
4024
+ );
3919
4025
  const createXLIFFDocumentParams = {
3920
4026
  sourceLanguage: sourceLanguage + "-" + MTRANS_LOCALE,
3921
4027
  targetLanguage: targetLanguage + "-" + MTRANS_LOCALE,
@@ -3923,7 +4029,10 @@ function translator(params) {
3923
4029
  targets: translations
3924
4030
  };
3925
4031
  const translatedXLIFF = createXLIFFDocument(createXLIFFDocumentParams);
3926
- const composed = yield composer2(translatedXLIFF, extracted.skeleton);
4032
+ const composed = yield import_markdown_translation3.default.compose({
4033
+ xlf: translatedXLIFF,
4034
+ skeleton
4035
+ });
3927
4036
  const outputPath = mdPath.replace(input, output);
3928
4037
  yield (0, import_promises4.mkdir)((0, import_path32.dirname)(outputPath), { recursive: true });
3929
4038
  yield (0, import_promises4.writeFile)(outputPath, composed);
@@ -3977,7 +4086,7 @@ import_yargs.default.command(build).command(publish).command(xliff).command(tran
3977
4086
  default: false,
3978
4087
  describe: "Run in quiet mode. Don't write logs to stdout",
3979
4088
  type: "boolean"
3980
- }).group(["config", "strict", "quiet", "help", "version"], "Common options:").version(true ? "4.1.0" : "").help().parse((0, import_helpers.hideBin)(process.argv), {}, (err, { strict }, output) => {
4089
+ }).group(["config", "strict", "quiet", "help", "version"], "Common options:").version(true ? "4.2.1" : "").help().parse((0, import_helpers.hideBin)(process.argv), {}, (err, { strict }, output) => {
3981
4090
  console.timeEnd(MAIN_TIMER_ID);
3982
4091
  if (err) {
3983
4092
  console.error(err);