@diplodoc/cli 4.0.0 → 4.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/build/app.client.css +2678 -41
  2. package/build/app.client.js +1 -1
  3. package/build/index.js +418 -309
  4. package/build/index.js.map +3 -3
  5. package/build/linter.js +89 -81
  6. package/build/linter.js.map +2 -2
  7. package/package.json +12 -11
  8. package/src/cmd/build/index.ts +29 -15
  9. package/src/cmd/publish/index.ts +1 -6
  10. package/src/cmd/publish/upload.ts +22 -16
  11. package/src/cmd/translate/index.ts +51 -37
  12. package/src/cmd/xliff/compose.ts +16 -7
  13. package/src/cmd/xliff/extract.ts +18 -9
  14. package/src/cmd/xliff/index.ts +4 -1
  15. package/src/constants.ts +24 -24
  16. package/src/index.ts +3 -3
  17. package/src/models.ts +15 -10
  18. package/src/resolvers/lintPage.ts +3 -9
  19. package/src/resolvers/md2html.ts +27 -19
  20. package/src/resolvers/md2md.ts +16 -16
  21. package/src/services/authors.ts +4 -2
  22. package/src/services/contributors.ts +26 -12
  23. package/src/services/includers/batteries/common.ts +14 -2
  24. package/src/services/includers/batteries/generic.ts +27 -9
  25. package/src/services/includers/batteries/sourcedocs.ts +4 -1
  26. package/src/services/includers/batteries/unarchive.ts +8 -4
  27. package/src/services/includers/index.ts +6 -4
  28. package/src/services/leading.ts +24 -27
  29. package/src/services/metadata.ts +35 -10
  30. package/src/services/plugins.ts +1 -1
  31. package/src/services/preset.ts +2 -2
  32. package/src/services/tocs.ts +28 -23
  33. package/src/services/utils.ts +15 -5
  34. package/src/steps/processAssets.ts +2 -8
  35. package/src/steps/processExcludedFiles.ts +11 -20
  36. package/src/steps/processLinter.ts +9 -8
  37. package/src/steps/processLogs.ts +2 -7
  38. package/src/steps/processMapFile.ts +8 -11
  39. package/src/steps/processPages.ts +97 -67
  40. package/src/steps/processServiceFiles.ts +4 -2
  41. package/src/steps/publishFilesToS3.ts +16 -12
  42. package/src/utils/file.ts +5 -1
  43. package/src/utils/glob.ts +1 -3
  44. package/src/utils/logger.ts +1 -1
  45. package/src/utils/markup.ts +28 -13
  46. package/src/utils/singlePage.ts +11 -10
  47. package/src/utils/toc.ts +20 -7
  48. package/src/utils/worker.ts +1 -1
  49. package/src/validator.ts +25 -17
  50. package/src/vcs-connector/client/github.ts +16 -8
  51. package/src/vcs-connector/connector-validator.ts +13 -7
  52. package/src/vcs-connector/github.ts +38 -11
  53. package/src/vcs-connector/index.ts +1 -1
  54. package/src/workers/linter/index.ts +2 -6
  55. package/CHANGELOG.md +0 -785
  56. package/build/lib.js +0 -3374
  57. package/build/lib.js.map +0 -7
@@ -12,8 +12,11 @@ export interface TitleMeta {
12
12
  }
13
13
  export type Meta = TitleMeta & Resources;
14
14
 
15
- export function generateStaticMarkup(props: DocInnerProps<DocPageData>, pathToBundle: string): string {
16
- const {title: metaTitle, style, script} = props.data.meta as Meta || {};
15
+ export function generateStaticMarkup(
16
+ props: DocInnerProps<DocPageData>,
17
+ pathToBundle: string,
18
+ ): string {
19
+ const {title: metaTitle, style, script} = (props.data.meta as Meta) || {};
17
20
  const {title: tocTitle} = props.data.toc;
18
21
  const {title: pageTitle} = props.data;
19
22
 
@@ -51,7 +54,9 @@ export function generateStaticMarkup(props: DocInnerProps<DocPageData>, pathToBu
51
54
  window.STATIC_CONTENT = ${staticContent}
52
55
  window.__DATA__ = ${JSON.stringify(props)};
53
56
  </script>
54
- <script type="application/javascript" src="${client.bundle.js(pathToBundle)}"></script>
57
+ <script type="application/javascript" src="${client.bundle.js(
58
+ pathToBundle,
59
+ )}"></script>
55
60
  </body>
56
61
  </html>
57
62
  `;
@@ -83,7 +88,9 @@ function getMetadata(metadata: Record<string, string>): string {
83
88
  }
84
89
 
85
90
  // Exclude resources from meta, proceed them separately
86
- const metaEntries = Object.entries(metadata).filter(([key]) => !Object.keys(ResourceType).includes(key));
91
+ const metaEntries = Object.entries(metadata).filter(
92
+ ([key]) => !Object.keys(ResourceType).includes(key),
93
+ );
87
94
 
88
95
  return metaEntries
89
96
  .map(([name, content]) => {
@@ -96,15 +103,17 @@ function getResources({style, script}: Resources) {
96
103
  const resourcesTags: string[] = [];
97
104
 
98
105
  if (style) {
99
- style.forEach((el, id) => resourcesTags.push(
100
- `<link rel="stylesheet" type="text/css" href="${el}" ${id === 0 && `id="${CUSTOM_STYLE}"`}>`,
101
- ));
106
+ style.forEach((el, id) =>
107
+ resourcesTags.push(
108
+ `<link rel="stylesheet" type="text/css" href="${el}" ${
109
+ id === 0 && `id="${CUSTOM_STYLE}"`
110
+ }>`,
111
+ ),
112
+ );
102
113
  }
103
114
 
104
115
  if (script) {
105
- script.forEach((el) => resourcesTags.push(
106
- `<script src="${el}"></script>`,
107
- ));
116
+ script.forEach((el) => resourcesTags.push(`<script src="${el}"></script>`));
108
117
  }
109
118
 
110
119
  return resourcesTags.join('\n');
@@ -112,14 +121,20 @@ function getResources({style, script}: Resources) {
112
121
 
113
122
  export const сarriage = platform === Platforms.WINDOWS ? '\r\n' : '\n';
114
123
 
115
- export function joinSinglePageResults(singlePageResults: SinglePageResult[], root: string, tocDir: string): string {
124
+ export function joinSinglePageResults(
125
+ singlePageResults: SinglePageResult[],
126
+ root: string,
127
+ tocDir: string,
128
+ ): string {
116
129
  const delimeter = `${сarriage}${сarriage}<hr class="yfm-page__delimeter">${сarriage}${сarriage}`;
117
130
  return singlePageResults
118
131
  .filter(({content}) => content)
119
- .map(({content, path, title}) => preprocessPageHtmlForSinglePage(content, {root, path, tocDir, title}))
132
+ .map(({content, path, title}) =>
133
+ preprocessPageHtmlForSinglePage(content, {root, path, tocDir, title}),
134
+ )
120
135
  .join(delimeter);
121
136
  }
122
137
 
123
138
  export function replaceDoubleToSingleQuotes(str: string): string {
124
- return str.replace(/"/g, '\'');
139
+ return str.replace(/"/g, "'");
125
140
  }
@@ -1,7 +1,7 @@
1
1
  import HTMLElement from 'node-html-parser/dist/nodes/html';
2
2
  import {parse} from 'node-html-parser';
3
3
  import {resolve, sep, relative} from 'path';
4
- import {resolveRelativePath} from '@doc-tools/transform/lib/utilsFS';
4
+ import {resolveRelativePath} from '@diplodoc/transform/lib/utilsFS';
5
5
  import url from 'url';
6
6
  import _ from 'lodash';
7
7
 
@@ -25,8 +25,9 @@ const HEADERS_SELECTOR = 'h1, h2, h3, h4, h5, h6';
25
25
  function getNewNode(options: ModifyNode): HTMLElement | null {
26
26
  const {rawTagName, innerHTML, attrEntries} = options;
27
27
 
28
- const nodeNew = parse(`<html><${rawTagName}></${rawTagName}></html>`)
29
- .querySelector(`${rawTagName}`);
28
+ const nodeNew = parse(`<html><${rawTagName}></${rawTagName}></html>`).querySelector(
29
+ `${rawTagName}`,
30
+ );
30
31
 
31
32
  if (!nodeNew) {
32
33
  return null;
@@ -63,7 +64,6 @@ export function tryFixFirstPageHeader(root: HTMLElement) {
63
64
  firstPageHeader.rawTagName = 'h1';
64
65
  }
65
66
 
66
-
67
67
  export function replaceLinks(rootEl: HTMLElement, options: PreprocessSinglePageOptions) {
68
68
  const {root, path, tocDir} = options;
69
69
 
@@ -74,7 +74,6 @@ export function replaceLinks(rootEl: HTMLElement, options: PreprocessSinglePageO
74
74
  const linkFullPath = resolveRelativePath(resolvedPath, href);
75
75
  const isLinkOutOfToc = !linkFullPath.startsWith(tocDir);
76
76
 
77
-
78
77
  let preparedHref = href;
79
78
 
80
79
  if (isLinkOutOfToc) {
@@ -93,7 +92,6 @@ export function replaceLinks(rootEl: HTMLElement, options: PreprocessSinglePageO
93
92
  }
94
93
  }
95
94
 
96
-
97
95
  node.setAttribute('href', preparedHref);
98
96
  });
99
97
  }
@@ -112,12 +110,10 @@ export function replaceImages(rootEl: HTMLElement, options: PreprocessSinglePage
112
110
  const linkFullPath = resolveRelativePath(resolvedPath, href);
113
111
  const preparedHref = relative(tocDir, linkFullPath);
114
112
 
115
-
116
113
  node.setAttribute('src', preparedHref);
117
114
  });
118
115
  }
119
116
 
120
-
121
117
  function prepareAnchorAttr(name: string, value: string, pageId: string) {
122
118
  switch (name) {
123
119
  case 'href':
@@ -153,7 +149,9 @@ export function addPagePrefixToAnchors(rootEl: HTMLElement, options: PreprocessS
153
149
 
154
150
  const mainHeader = rootEl.querySelector('h1');
155
151
  if (mainHeader) {
156
- const anchor = parse(`<a class="yfm-anchor" aria-hidden="true" href="${pageIdAnchor}" id="${pageId}"></a>`);
152
+ const anchor = parse(
153
+ `<a class="yfm-anchor" aria-hidden="true" href="${pageIdAnchor}" id="${pageId}"></a>`,
154
+ );
157
155
  if (!anchor) {
158
156
  return;
159
157
  }
@@ -214,7 +212,10 @@ export function transformLinkToOriginalArticle(opts: {root: string; currentPath:
214
212
  return currentPath.replace(root, '').replace(/\.(md|ya?ml|html)$/i, '');
215
213
  }
216
214
 
217
- export function preprocessPageHtmlForSinglePage(content: string, options: PreprocessSinglePageOptions) {
215
+ export function preprocessPageHtmlForSinglePage(
216
+ content: string,
217
+ options: PreprocessSinglePageOptions,
218
+ ) {
218
219
  const root = parse(content);
219
220
 
220
221
  addMainTitle(root, options);
package/src/utils/toc.ts CHANGED
@@ -13,9 +13,14 @@ export function transformToc(toc: YfmToc | null, pathToFileDirectory: string): Y
13
13
  const localToc: YfmToc = JSON.parse(JSON.stringify(toc));
14
14
 
15
15
  if (localToc.items) {
16
- localToc.items = filterFiles(localToc.items, 'items', {}, {
17
- removeHiddenTocItems: true,
18
- });
16
+ localToc.items = filterFiles(
17
+ localToc.items,
18
+ 'items',
19
+ {},
20
+ {
21
+ removeHiddenTocItems: true,
22
+ },
23
+ );
19
24
  }
20
25
 
21
26
  const baseTocPath: string = localToc.base || '';
@@ -52,7 +57,10 @@ export function transformToc(toc: YfmToc | null, pathToFileDirectory: string): Y
52
57
  return localToc;
53
58
  }
54
59
 
55
- export function transformTocForSinglePage(toc: YfmToc | null, options: {root: string; currentPath: string}) {
60
+ export function transformTocForSinglePage(
61
+ toc: YfmToc | null,
62
+ options: {root: string; currentPath: string},
63
+ ) {
56
64
  const {root, currentPath} = options;
57
65
 
58
66
  if (!toc) {
@@ -62,9 +70,14 @@ export function transformTocForSinglePage(toc: YfmToc | null, options: {root: st
62
70
  const localToc: YfmToc = JSON.parse(JSON.stringify(toc));
63
71
 
64
72
  if (localToc.items) {
65
- localToc.items = filterFiles(localToc.items, 'items', {}, {
66
- removeHiddenTocItems: true,
67
- });
73
+ localToc.items = filterFiles(
74
+ localToc.items,
75
+ 'items',
76
+ {},
77
+ {
78
+ removeHiddenTocItems: true,
79
+ },
80
+ );
68
81
  }
69
82
 
70
83
  function processItems(items: YfmToc[]) {
@@ -1,5 +1,5 @@
1
1
  export function splitOnChunks<T>(array: T[], chunkSize = 1000) {
2
- const chunks: (T[])[] = [];
2
+ const chunks: T[][] = [];
3
3
 
4
4
  for (let i = 0, j = array.length; i < j; i += chunkSize) {
5
5
  const chunk: T[] = array.slice(i, i + chunkSize);
package/src/validator.ts CHANGED
@@ -3,7 +3,7 @@ import {join, resolve} from 'path';
3
3
  import {readFileSync} from 'fs';
4
4
  import {load} from 'js-yaml';
5
5
  import merge from 'lodash/merge';
6
- import log from '@doc-tools/transform/lib/log';
6
+ import log from '@diplodoc/transform/lib/log';
7
7
  import {REDIRECTS_FILENAME, LINT_CONFIG_FILENAME, YFM_CONFIG_FILENAME} from './constants';
8
8
  import {ConnectorValidatorProps} from './vcs-connector/connector-models';
9
9
 
@@ -20,25 +20,25 @@ function requiredValueValidator(value: unknown): Boolean {
20
20
  }
21
21
 
22
22
  const validators: Record<string, ConnectorValidatorProps> = {
23
- 'storageEndpoint': {
23
+ storageEndpoint: {
24
24
  errorMessage: 'Endpoint of S3 storage must be provided when publishes.',
25
25
  validateFn: notEmptyStringValidator,
26
26
  },
27
- 'storageBucket': {
27
+ storageBucket: {
28
28
  errorMessage: 'Bucket name of S3 storage must be provided when publishes.',
29
29
  validateFn: notEmptyStringValidator,
30
30
  },
31
- 'storageKeyId': {
31
+ storageKeyId: {
32
32
  errorMessage: 'Key Id of S3 storage must be provided when publishes.',
33
33
  validateFn: notEmptyStringValidator,
34
34
  defaultValue: process.env.YFM_STORAGE_KEY_ID,
35
35
  },
36
- 'storageSecretKey': {
36
+ storageSecretKey: {
37
37
  errorMessage: 'Secret key of S3 storage must be provided when publishes.',
38
38
  validateFn: notEmptyStringValidator,
39
39
  defaultValue: process.env.YFM_STORAGE_SECRET_KEY,
40
40
  },
41
- 'storageRegion': {
41
+ storageRegion: {
42
42
  errorMessage: 'Region of S3 storage must be provided when publishes.',
43
43
  validateFn: notEmptyStringValidator,
44
44
  defaultValue: 'eu-central-1',
@@ -56,26 +56,32 @@ interface RedirectsConfig {
56
56
  }
57
57
 
58
58
  function validateRedirects(redirectsConfig: RedirectsConfig, pathToRedirects: string) {
59
- const redirects: Redirect[] = Object.keys(redirectsConfig).reduce((res, redirectSectionName) => {
60
- const sectionRedirects = redirectsConfig[redirectSectionName];
61
- res.push(...sectionRedirects);
62
- return res;
63
- }, [] as Redirect[]);
59
+ const redirects: Redirect[] = Object.keys(redirectsConfig).reduce(
60
+ (res, redirectSectionName) => {
61
+ const sectionRedirects = redirectsConfig[redirectSectionName];
62
+ res.push(...sectionRedirects);
63
+ return res;
64
+ },
65
+ [] as Redirect[],
66
+ );
64
67
 
65
68
  const getContext = (from: string, to: string) => ` [Context: \n- from: ${from}\n- to: ${to} ]`;
66
- const formatMessage = (message: string, pathname: string, from: string, to: string) => (
67
- `${pathname}: ${message} ${getContext(from, to)}`
68
- );
69
+ const formatMessage = (message: string, pathname: string, from: string, to: string) =>
70
+ `${pathname}: ${message} ${getContext(from, to)}`;
69
71
 
70
72
  redirects.forEach((redirect) => {
71
73
  const {from, to} = redirect;
72
74
 
73
75
  if (!from || !to) {
74
- throw new Error(formatMessage('One of the two parameters is missing', pathToRedirects, from, to));
76
+ throw new Error(
77
+ formatMessage('One of the two parameters is missing', pathToRedirects, from, to),
78
+ );
75
79
  }
76
80
 
77
81
  if (from === to) {
78
- throw new Error(formatMessage('Parameters must be different', pathToRedirects, from, to));
82
+ throw new Error(
83
+ formatMessage('Parameters must be different', pathToRedirects, from, to),
84
+ );
79
85
  }
80
86
  });
81
87
  }
@@ -83,7 +89,9 @@ function validateRedirects(redirectsConfig: RedirectsConfig, pathToRedirects: st
83
89
  export function argvValidator(argv: Arguments<Object>): Boolean {
84
90
  try {
85
91
  // Combine passed argv and properties from configuration file.
86
- const pathToConfig = argv.config ? String(argv.config) : join(String(argv.input), YFM_CONFIG_FILENAME);
92
+ const pathToConfig = argv.config
93
+ ? String(argv.config)
94
+ : join(String(argv.input), YFM_CONFIG_FILENAME);
87
95
  const content = readFileSync(resolve(pathToConfig), 'utf8');
88
96
  Object.assign(argv, load(content) || {});
89
97
  } catch (error) {
@@ -1,4 +1,4 @@
1
- import log from '@doc-tools/transform/lib/log';
1
+ import log from '@diplodoc/transform/lib/log';
2
2
  import {Octokit} from '@octokit/core';
3
3
  import {ArgvService} from '../../services';
4
4
  import {
@@ -22,7 +22,10 @@ async function getRepoUser(octokit: Octokit, username: string): Promise<GithubUs
22
22
  }
23
23
  }
24
24
 
25
- async function getRepoCommitByHash(httpClientByToken: Octokit, hashCommit: string): Promise<GithubCommitDTO | null> {
25
+ async function getRepoCommitByHash(
26
+ httpClientByToken: Octokit,
27
+ hashCommit: string,
28
+ ): Promise<GithubCommitDTO | null> {
26
29
  const {connector} = ArgvService.getConfig();
27
30
 
28
31
  const neededProperties = [GitHubConnectorFields.OWNER, GitHubConnectorFields.REPO];
@@ -33,15 +36,20 @@ async function getRepoCommitByHash(httpClientByToken: Octokit, hashCommit: strin
33
36
  }
34
37
 
35
38
  try {
36
- const commit = await httpClientByToken.request('GET /repos/{owner}/{repo}/commits/{commit_sha}', {
37
- owner: validatedFileds[GitHubConnectorFields.OWNER] as string,
38
- repo: validatedFileds[GitHubConnectorFields.REPO] as string,
39
- commit_sha: hashCommit,
40
- });
39
+ const commit = await httpClientByToken.request(
40
+ 'GET /repos/{owner}/{repo}/commits/{commit_sha}',
41
+ {
42
+ owner: validatedFileds[GitHubConnectorFields.OWNER] as string,
43
+ repo: validatedFileds[GitHubConnectorFields.REPO] as string,
44
+ commit_sha: hashCommit,
45
+ },
46
+ );
41
47
 
42
48
  return commit.data;
43
49
  } catch (error) {
44
- log.warn(`Getting commit by sha has been failed for GitHub. SHA commit: ${hashCommit}. Error: ${error}`);
50
+ log.warn(
51
+ `Getting commit by sha has been failed for GitHub. SHA commit: ${hashCommit}. Error: ${error}`,
52
+ );
45
53
  return null;
46
54
  }
47
55
  }
@@ -1,5 +1,10 @@
1
- import log from '@doc-tools/transform/lib/log';
2
- import {ConnectorValidatorProps, GitHubConnectorFields, SourceType, VCSConnectorConfig} from './connector-models';
1
+ import log from '@diplodoc/transform/lib/log';
2
+ import {
3
+ ConnectorValidatorProps,
4
+ GitHubConnectorFields,
5
+ SourceType,
6
+ VCSConnectorConfig,
7
+ } from './connector-models';
3
8
  import {getMsgСonfigurationMustBeProvided} from '../constants';
4
9
 
5
10
  const githubConnectorValidator: Record<string, ConnectorValidatorProps> = {
@@ -26,8 +31,8 @@ const githubConnectorValidator: Record<string, ConnectorValidatorProps> = {
26
31
  };
27
32
 
28
33
  const connectorValidator: Record<string, ConnectorValidatorProps> = {
29
- 'type': {
30
- warnMessage: '\'type\' must be provided for repo.',
34
+ type: {
35
+ warnMessage: "'type' must be provided for repo.",
31
36
  validateFn: notEmptyValue,
32
37
  },
33
38
  [SourceType.GITHUB]: {
@@ -56,8 +61,8 @@ function notEmptyValue(value?: unknown): boolean {
56
61
  export function validateConnectorFields(
57
62
  sourceType: SourceType,
58
63
  fieldNames: GitHubConnectorFields[],
59
- repoProperties?: VCSConnectorConfig): Record<string, unknown> {
60
-
64
+ repoProperties?: VCSConnectorConfig,
65
+ ): Record<string, unknown> {
61
66
  const repoValidator = connectorValidator[sourceType];
62
67
 
63
68
  if (!repoValidator) {
@@ -83,7 +88,8 @@ export function validateConnectorFields(
83
88
  continue;
84
89
  }
85
90
 
86
- const propertyValue = propertyValidator.defaultValue || repoProperties[sourceType]?.[property];
91
+ const propertyValue =
92
+ propertyValidator.defaultValue || repoProperties[sourceType]?.[property];
87
93
 
88
94
  if (!propertyValidator.validateFn(propertyValue)) {
89
95
  createLog(propertyValidator);
@@ -41,9 +41,11 @@ async function getGitHubVCSConnector(): Promise<VCSConnector | undefined> {
41
41
  return undefined;
42
42
  }
43
43
 
44
- let addNestedContributorsForPath: NestedContributorsForPathFunction = () => { };
45
- let getContributorsByPath: ContributorsByPathFunction = () => Promise.resolve({} as FileContributors);
46
- const getExternalAuthorByPath: ExternalAuthorByPathFunction = (path: string) => authorByPath.get(path) ?? null;
44
+ let addNestedContributorsForPath: NestedContributorsForPathFunction = () => {};
45
+ let getContributorsByPath: ContributorsByPathFunction = () =>
46
+ Promise.resolve({} as FileContributors);
47
+ const getExternalAuthorByPath: ExternalAuthorByPathFunction = (path: string) =>
48
+ authorByPath.get(path) ?? null;
47
49
 
48
50
  if (contributors) {
49
51
  await getAllContributorsTocFiles(httpClientByToken);
@@ -95,7 +97,14 @@ async function getAllContributorsTocFiles(httpClientByToken: Octokit): Promise<v
95
97
  const tmpMasterBranch = 'yfm-tmp-master';
96
98
 
97
99
  try {
98
- await simpleGit(options).raw('worktree', 'add', '-b', tmpMasterBranch, masterDir, 'origin/master');
100
+ await simpleGit(options).raw(
101
+ 'worktree',
102
+ 'add',
103
+ '-b',
104
+ tmpMasterBranch,
105
+ masterDir,
106
+ 'origin/master',
107
+ );
99
108
  const fullRepoLogString = await simpleGit({
100
109
  baseDir: join(rootInput, masterDir),
101
110
  }).raw(
@@ -127,8 +136,10 @@ async function getAllContributorsTocFiles(httpClientByToken: Octokit): Promise<v
127
136
  logger.info('', ALL_CONTRIBUTORS_RECEIVED);
128
137
  }
129
138
 
130
- async function matchContributionsForEachPath(repoLogs: string[], httpClientByToken: Octokit): Promise<void> {
131
-
139
+ async function matchContributionsForEachPath(
140
+ repoLogs: string[],
141
+ httpClientByToken: Octokit,
142
+ ): Promise<void> {
132
143
  for (const repoLog of repoLogs) {
133
144
  if (!repoLog) {
134
145
  continue;
@@ -149,7 +160,10 @@ async function matchContributionsForEachPath(repoLogs: string[], httpClientByTok
149
160
  if (hasContributorData === undefined) {
150
161
  logger.info('Contributors: Getting data for', email);
151
162
 
152
- contributorDataByHash = await getContributorDataByHashCommit(httpClientByToken, hashCommit);
163
+ contributorDataByHash = await getContributorDataByHashCommit(
164
+ httpClientByToken,
165
+ hashCommit,
166
+ );
153
167
 
154
168
  if (contributorDataByHash) {
155
169
  const paths = dataArray.splice(1);
@@ -185,7 +199,9 @@ async function matchAuthorsForEachPath(authorRepoLogs: string[], httpClientByTok
185
199
  }
186
200
  }
187
201
 
188
- async function getContributorDataByHashCommit(httpClientByToken: Octokit, hashCommit: string,
202
+ async function getContributorDataByHashCommit(
203
+ httpClientByToken: Octokit,
204
+ hashCommit: string,
189
205
  ): Promise<Contributor | null> {
190
206
  const repoCommit = await github.getRepoCommitByHash(httpClientByToken, hashCommit);
191
207
 
@@ -210,7 +226,11 @@ async function getContributorDataByHashCommit(httpClientByToken: Octokit, hashCo
210
226
  };
211
227
  }
212
228
 
213
- async function getAuthorByPaths(commitInfo: CommitInfo, paths: string[], httpClientByToken: Octokit) {
229
+ async function getAuthorByPaths(
230
+ commitInfo: CommitInfo,
231
+ paths: string[],
232
+ httpClientByToken: Octokit,
233
+ ) {
214
234
  for (const path of paths) {
215
235
  if (!path) {
216
236
  continue;
@@ -274,11 +294,18 @@ async function getUserByLogin(octokit: Octokit, userLogin: string): Promise<Cont
274
294
  };
275
295
  }
276
296
 
277
- function addNestedContributorsForPathFunction(path: string, nestedContributors: Contributors): void {
297
+ function addNestedContributorsForPathFunction(
298
+ path: string,
299
+ nestedContributors: Contributors,
300
+ ): void {
278
301
  addContributorForPath([path], nestedContributors, true);
279
302
  }
280
303
 
281
- function addContributorForPath(paths: string[], newContributor: Contributors, hasIncludes = false): void {
304
+ function addContributorForPath(
305
+ paths: string[],
306
+ newContributor: Contributors,
307
+ hasIncludes = false,
308
+ ): void {
282
309
  paths.forEach((path: string) => {
283
310
  const normalizePath = normalize(addSlashPrefix(path));
284
311
 
@@ -4,7 +4,7 @@ import {VCSConnector, SourceType} from './connector-models';
4
4
 
5
5
  export async function getVCSConnector(): Promise<VCSConnector | undefined> {
6
6
  const {connector} = ArgvService.getConfig();
7
- const connectorType = process.env.VCS_CONNECTOR_TYPE || connector && connector.type;
7
+ const connectorType = process.env.VCS_CONNECTOR_TYPE || (connector && connector.type);
8
8
 
9
9
  switch (connectorType) {
10
10
  case SourceType.GITHUB:
@@ -1,4 +1,4 @@
1
- import log from '@doc-tools/transform/lib/log';
1
+ import log from '@diplodoc/transform/lib/log';
2
2
  import {extname} from 'path';
3
3
  import {Observable, Subject} from 'threads/observable';
4
4
  import {expose} from 'threads';
@@ -17,11 +17,7 @@ interface ProcessLinterWorkerOptions {
17
17
  presetStorage: PresetStorage;
18
18
  }
19
19
 
20
- async function run({
21
- argvConfig,
22
- presetStorage,
23
- navigationPaths,
24
- }: ProcessLinterWorkerOptions) {
20
+ async function run({argvConfig, presetStorage, navigationPaths}: ProcessLinterWorkerOptions) {
25
21
  ArgvService.set(argvConfig);
26
22
  PresetService.setPresetStorage(presetStorage);
27
23
  TocService.setNavigationPaths(navigationPaths);