@diplodoc/cli 4.0.0 → 4.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/app.client.css +2678 -41
- package/build/app.client.js +1 -1
- package/build/index.js +418 -309
- package/build/index.js.map +3 -3
- package/build/linter.js +89 -81
- package/build/linter.js.map +2 -2
- package/package.json +12 -11
- package/src/cmd/build/index.ts +29 -15
- package/src/cmd/publish/index.ts +1 -6
- package/src/cmd/publish/upload.ts +22 -16
- package/src/cmd/translate/index.ts +51 -37
- package/src/cmd/xliff/compose.ts +16 -7
- package/src/cmd/xliff/extract.ts +18 -9
- package/src/cmd/xliff/index.ts +4 -1
- package/src/constants.ts +24 -24
- package/src/index.ts +3 -3
- package/src/models.ts +15 -10
- package/src/resolvers/lintPage.ts +3 -9
- package/src/resolvers/md2html.ts +27 -19
- package/src/resolvers/md2md.ts +16 -16
- package/src/services/authors.ts +4 -2
- package/src/services/contributors.ts +26 -12
- package/src/services/includers/batteries/common.ts +14 -2
- package/src/services/includers/batteries/generic.ts +27 -9
- package/src/services/includers/batteries/sourcedocs.ts +4 -1
- package/src/services/includers/batteries/unarchive.ts +8 -4
- package/src/services/includers/index.ts +6 -4
- package/src/services/leading.ts +24 -27
- package/src/services/metadata.ts +35 -10
- package/src/services/plugins.ts +1 -1
- package/src/services/preset.ts +2 -2
- package/src/services/tocs.ts +28 -23
- package/src/services/utils.ts +15 -5
- package/src/steps/processAssets.ts +2 -8
- package/src/steps/processExcludedFiles.ts +11 -20
- package/src/steps/processLinter.ts +9 -8
- package/src/steps/processLogs.ts +2 -7
- package/src/steps/processMapFile.ts +8 -11
- package/src/steps/processPages.ts +97 -67
- package/src/steps/processServiceFiles.ts +4 -2
- package/src/steps/publishFilesToS3.ts +16 -12
- package/src/utils/file.ts +5 -1
- package/src/utils/glob.ts +1 -3
- package/src/utils/logger.ts +1 -1
- package/src/utils/markup.ts +28 -13
- package/src/utils/singlePage.ts +11 -10
- package/src/utils/toc.ts +20 -7
- package/src/utils/worker.ts +1 -1
- package/src/validator.ts +25 -17
- package/src/vcs-connector/client/github.ts +16 -8
- package/src/vcs-connector/connector-validator.ts +13 -7
- package/src/vcs-connector/github.ts +38 -11
- package/src/vcs-connector/index.ts +1 -1
- package/src/workers/linter/index.ts +2 -6
- package/CHANGELOG.md +0 -785
- package/build/lib.js +0 -3374
- package/build/lib.js.map +0 -7
package/src/utils/markup.ts
CHANGED
|
@@ -12,8 +12,11 @@ export interface TitleMeta {
|
|
|
12
12
|
}
|
|
13
13
|
export type Meta = TitleMeta & Resources;
|
|
14
14
|
|
|
15
|
-
export function generateStaticMarkup(
|
|
16
|
-
|
|
15
|
+
export function generateStaticMarkup(
|
|
16
|
+
props: DocInnerProps<DocPageData>,
|
|
17
|
+
pathToBundle: string,
|
|
18
|
+
): string {
|
|
19
|
+
const {title: metaTitle, style, script} = (props.data.meta as Meta) || {};
|
|
17
20
|
const {title: tocTitle} = props.data.toc;
|
|
18
21
|
const {title: pageTitle} = props.data;
|
|
19
22
|
|
|
@@ -51,7 +54,9 @@ export function generateStaticMarkup(props: DocInnerProps<DocPageData>, pathToBu
|
|
|
51
54
|
window.STATIC_CONTENT = ${staticContent}
|
|
52
55
|
window.__DATA__ = ${JSON.stringify(props)};
|
|
53
56
|
</script>
|
|
54
|
-
<script type="application/javascript" src="${client.bundle.js(
|
|
57
|
+
<script type="application/javascript" src="${client.bundle.js(
|
|
58
|
+
pathToBundle,
|
|
59
|
+
)}"></script>
|
|
55
60
|
</body>
|
|
56
61
|
</html>
|
|
57
62
|
`;
|
|
@@ -83,7 +88,9 @@ function getMetadata(metadata: Record<string, string>): string {
|
|
|
83
88
|
}
|
|
84
89
|
|
|
85
90
|
// Exclude resources from meta, proceed them separately
|
|
86
|
-
const metaEntries = Object.entries(metadata).filter(
|
|
91
|
+
const metaEntries = Object.entries(metadata).filter(
|
|
92
|
+
([key]) => !Object.keys(ResourceType).includes(key),
|
|
93
|
+
);
|
|
87
94
|
|
|
88
95
|
return metaEntries
|
|
89
96
|
.map(([name, content]) => {
|
|
@@ -96,15 +103,17 @@ function getResources({style, script}: Resources) {
|
|
|
96
103
|
const resourcesTags: string[] = [];
|
|
97
104
|
|
|
98
105
|
if (style) {
|
|
99
|
-
style.forEach((el, id) =>
|
|
100
|
-
|
|
101
|
-
|
|
106
|
+
style.forEach((el, id) =>
|
|
107
|
+
resourcesTags.push(
|
|
108
|
+
`<link rel="stylesheet" type="text/css" href="${el}" ${
|
|
109
|
+
id === 0 && `id="${CUSTOM_STYLE}"`
|
|
110
|
+
}>`,
|
|
111
|
+
),
|
|
112
|
+
);
|
|
102
113
|
}
|
|
103
114
|
|
|
104
115
|
if (script) {
|
|
105
|
-
script.forEach((el) => resourcesTags.push(
|
|
106
|
-
`<script src="${el}"></script>`,
|
|
107
|
-
));
|
|
116
|
+
script.forEach((el) => resourcesTags.push(`<script src="${el}"></script>`));
|
|
108
117
|
}
|
|
109
118
|
|
|
110
119
|
return resourcesTags.join('\n');
|
|
@@ -112,14 +121,20 @@ function getResources({style, script}: Resources) {
|
|
|
112
121
|
|
|
113
122
|
export const сarriage = platform === Platforms.WINDOWS ? '\r\n' : '\n';
|
|
114
123
|
|
|
115
|
-
export function joinSinglePageResults(
|
|
124
|
+
export function joinSinglePageResults(
|
|
125
|
+
singlePageResults: SinglePageResult[],
|
|
126
|
+
root: string,
|
|
127
|
+
tocDir: string,
|
|
128
|
+
): string {
|
|
116
129
|
const delimeter = `${сarriage}${сarriage}<hr class="yfm-page__delimeter">${сarriage}${сarriage}`;
|
|
117
130
|
return singlePageResults
|
|
118
131
|
.filter(({content}) => content)
|
|
119
|
-
.map(({content, path, title}) =>
|
|
132
|
+
.map(({content, path, title}) =>
|
|
133
|
+
preprocessPageHtmlForSinglePage(content, {root, path, tocDir, title}),
|
|
134
|
+
)
|
|
120
135
|
.join(delimeter);
|
|
121
136
|
}
|
|
122
137
|
|
|
123
138
|
export function replaceDoubleToSingleQuotes(str: string): string {
|
|
124
|
-
return str.replace(/"/g, '
|
|
139
|
+
return str.replace(/"/g, "'");
|
|
125
140
|
}
|
package/src/utils/singlePage.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import HTMLElement from 'node-html-parser/dist/nodes/html';
|
|
2
2
|
import {parse} from 'node-html-parser';
|
|
3
3
|
import {resolve, sep, relative} from 'path';
|
|
4
|
-
import {resolveRelativePath} from '@
|
|
4
|
+
import {resolveRelativePath} from '@diplodoc/transform/lib/utilsFS';
|
|
5
5
|
import url from 'url';
|
|
6
6
|
import _ from 'lodash';
|
|
7
7
|
|
|
@@ -25,8 +25,9 @@ const HEADERS_SELECTOR = 'h1, h2, h3, h4, h5, h6';
|
|
|
25
25
|
function getNewNode(options: ModifyNode): HTMLElement | null {
|
|
26
26
|
const {rawTagName, innerHTML, attrEntries} = options;
|
|
27
27
|
|
|
28
|
-
const nodeNew = parse(`<html><${rawTagName}></${rawTagName}></html>`)
|
|
29
|
-
|
|
28
|
+
const nodeNew = parse(`<html><${rawTagName}></${rawTagName}></html>`).querySelector(
|
|
29
|
+
`${rawTagName}`,
|
|
30
|
+
);
|
|
30
31
|
|
|
31
32
|
if (!nodeNew) {
|
|
32
33
|
return null;
|
|
@@ -63,7 +64,6 @@ export function tryFixFirstPageHeader(root: HTMLElement) {
|
|
|
63
64
|
firstPageHeader.rawTagName = 'h1';
|
|
64
65
|
}
|
|
65
66
|
|
|
66
|
-
|
|
67
67
|
export function replaceLinks(rootEl: HTMLElement, options: PreprocessSinglePageOptions) {
|
|
68
68
|
const {root, path, tocDir} = options;
|
|
69
69
|
|
|
@@ -74,7 +74,6 @@ export function replaceLinks(rootEl: HTMLElement, options: PreprocessSinglePageO
|
|
|
74
74
|
const linkFullPath = resolveRelativePath(resolvedPath, href);
|
|
75
75
|
const isLinkOutOfToc = !linkFullPath.startsWith(tocDir);
|
|
76
76
|
|
|
77
|
-
|
|
78
77
|
let preparedHref = href;
|
|
79
78
|
|
|
80
79
|
if (isLinkOutOfToc) {
|
|
@@ -93,7 +92,6 @@ export function replaceLinks(rootEl: HTMLElement, options: PreprocessSinglePageO
|
|
|
93
92
|
}
|
|
94
93
|
}
|
|
95
94
|
|
|
96
|
-
|
|
97
95
|
node.setAttribute('href', preparedHref);
|
|
98
96
|
});
|
|
99
97
|
}
|
|
@@ -112,12 +110,10 @@ export function replaceImages(rootEl: HTMLElement, options: PreprocessSinglePage
|
|
|
112
110
|
const linkFullPath = resolveRelativePath(resolvedPath, href);
|
|
113
111
|
const preparedHref = relative(tocDir, linkFullPath);
|
|
114
112
|
|
|
115
|
-
|
|
116
113
|
node.setAttribute('src', preparedHref);
|
|
117
114
|
});
|
|
118
115
|
}
|
|
119
116
|
|
|
120
|
-
|
|
121
117
|
function prepareAnchorAttr(name: string, value: string, pageId: string) {
|
|
122
118
|
switch (name) {
|
|
123
119
|
case 'href':
|
|
@@ -153,7 +149,9 @@ export function addPagePrefixToAnchors(rootEl: HTMLElement, options: PreprocessS
|
|
|
153
149
|
|
|
154
150
|
const mainHeader = rootEl.querySelector('h1');
|
|
155
151
|
if (mainHeader) {
|
|
156
|
-
const anchor = parse(
|
|
152
|
+
const anchor = parse(
|
|
153
|
+
`<a class="yfm-anchor" aria-hidden="true" href="${pageIdAnchor}" id="${pageId}"></a>`,
|
|
154
|
+
);
|
|
157
155
|
if (!anchor) {
|
|
158
156
|
return;
|
|
159
157
|
}
|
|
@@ -214,7 +212,10 @@ export function transformLinkToOriginalArticle(opts: {root: string; currentPath:
|
|
|
214
212
|
return currentPath.replace(root, '').replace(/\.(md|ya?ml|html)$/i, '');
|
|
215
213
|
}
|
|
216
214
|
|
|
217
|
-
export function preprocessPageHtmlForSinglePage(
|
|
215
|
+
export function preprocessPageHtmlForSinglePage(
|
|
216
|
+
content: string,
|
|
217
|
+
options: PreprocessSinglePageOptions,
|
|
218
|
+
) {
|
|
218
219
|
const root = parse(content);
|
|
219
220
|
|
|
220
221
|
addMainTitle(root, options);
|
package/src/utils/toc.ts
CHANGED
|
@@ -13,9 +13,14 @@ export function transformToc(toc: YfmToc | null, pathToFileDirectory: string): Y
|
|
|
13
13
|
const localToc: YfmToc = JSON.parse(JSON.stringify(toc));
|
|
14
14
|
|
|
15
15
|
if (localToc.items) {
|
|
16
|
-
localToc.items = filterFiles(
|
|
17
|
-
|
|
18
|
-
|
|
16
|
+
localToc.items = filterFiles(
|
|
17
|
+
localToc.items,
|
|
18
|
+
'items',
|
|
19
|
+
{},
|
|
20
|
+
{
|
|
21
|
+
removeHiddenTocItems: true,
|
|
22
|
+
},
|
|
23
|
+
);
|
|
19
24
|
}
|
|
20
25
|
|
|
21
26
|
const baseTocPath: string = localToc.base || '';
|
|
@@ -52,7 +57,10 @@ export function transformToc(toc: YfmToc | null, pathToFileDirectory: string): Y
|
|
|
52
57
|
return localToc;
|
|
53
58
|
}
|
|
54
59
|
|
|
55
|
-
export function transformTocForSinglePage(
|
|
60
|
+
export function transformTocForSinglePage(
|
|
61
|
+
toc: YfmToc | null,
|
|
62
|
+
options: {root: string; currentPath: string},
|
|
63
|
+
) {
|
|
56
64
|
const {root, currentPath} = options;
|
|
57
65
|
|
|
58
66
|
if (!toc) {
|
|
@@ -62,9 +70,14 @@ export function transformTocForSinglePage(toc: YfmToc | null, options: {root: st
|
|
|
62
70
|
const localToc: YfmToc = JSON.parse(JSON.stringify(toc));
|
|
63
71
|
|
|
64
72
|
if (localToc.items) {
|
|
65
|
-
localToc.items = filterFiles(
|
|
66
|
-
|
|
67
|
-
|
|
73
|
+
localToc.items = filterFiles(
|
|
74
|
+
localToc.items,
|
|
75
|
+
'items',
|
|
76
|
+
{},
|
|
77
|
+
{
|
|
78
|
+
removeHiddenTocItems: true,
|
|
79
|
+
},
|
|
80
|
+
);
|
|
68
81
|
}
|
|
69
82
|
|
|
70
83
|
function processItems(items: YfmToc[]) {
|
package/src/utils/worker.ts
CHANGED
package/src/validator.ts
CHANGED
|
@@ -3,7 +3,7 @@ import {join, resolve} from 'path';
|
|
|
3
3
|
import {readFileSync} from 'fs';
|
|
4
4
|
import {load} from 'js-yaml';
|
|
5
5
|
import merge from 'lodash/merge';
|
|
6
|
-
import log from '@
|
|
6
|
+
import log from '@diplodoc/transform/lib/log';
|
|
7
7
|
import {REDIRECTS_FILENAME, LINT_CONFIG_FILENAME, YFM_CONFIG_FILENAME} from './constants';
|
|
8
8
|
import {ConnectorValidatorProps} from './vcs-connector/connector-models';
|
|
9
9
|
|
|
@@ -20,25 +20,25 @@ function requiredValueValidator(value: unknown): Boolean {
|
|
|
20
20
|
}
|
|
21
21
|
|
|
22
22
|
const validators: Record<string, ConnectorValidatorProps> = {
|
|
23
|
-
|
|
23
|
+
storageEndpoint: {
|
|
24
24
|
errorMessage: 'Endpoint of S3 storage must be provided when publishes.',
|
|
25
25
|
validateFn: notEmptyStringValidator,
|
|
26
26
|
},
|
|
27
|
-
|
|
27
|
+
storageBucket: {
|
|
28
28
|
errorMessage: 'Bucket name of S3 storage must be provided when publishes.',
|
|
29
29
|
validateFn: notEmptyStringValidator,
|
|
30
30
|
},
|
|
31
|
-
|
|
31
|
+
storageKeyId: {
|
|
32
32
|
errorMessage: 'Key Id of S3 storage must be provided when publishes.',
|
|
33
33
|
validateFn: notEmptyStringValidator,
|
|
34
34
|
defaultValue: process.env.YFM_STORAGE_KEY_ID,
|
|
35
35
|
},
|
|
36
|
-
|
|
36
|
+
storageSecretKey: {
|
|
37
37
|
errorMessage: 'Secret key of S3 storage must be provided when publishes.',
|
|
38
38
|
validateFn: notEmptyStringValidator,
|
|
39
39
|
defaultValue: process.env.YFM_STORAGE_SECRET_KEY,
|
|
40
40
|
},
|
|
41
|
-
|
|
41
|
+
storageRegion: {
|
|
42
42
|
errorMessage: 'Region of S3 storage must be provided when publishes.',
|
|
43
43
|
validateFn: notEmptyStringValidator,
|
|
44
44
|
defaultValue: 'eu-central-1',
|
|
@@ -56,26 +56,32 @@ interface RedirectsConfig {
|
|
|
56
56
|
}
|
|
57
57
|
|
|
58
58
|
function validateRedirects(redirectsConfig: RedirectsConfig, pathToRedirects: string) {
|
|
59
|
-
const redirects: Redirect[] = Object.keys(redirectsConfig).reduce(
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
59
|
+
const redirects: Redirect[] = Object.keys(redirectsConfig).reduce(
|
|
60
|
+
(res, redirectSectionName) => {
|
|
61
|
+
const sectionRedirects = redirectsConfig[redirectSectionName];
|
|
62
|
+
res.push(...sectionRedirects);
|
|
63
|
+
return res;
|
|
64
|
+
},
|
|
65
|
+
[] as Redirect[],
|
|
66
|
+
);
|
|
64
67
|
|
|
65
68
|
const getContext = (from: string, to: string) => ` [Context: \n- from: ${from}\n- to: ${to} ]`;
|
|
66
|
-
const formatMessage = (message: string, pathname: string, from: string, to: string) =>
|
|
67
|
-
`${pathname}: ${message} ${getContext(from, to)}
|
|
68
|
-
);
|
|
69
|
+
const formatMessage = (message: string, pathname: string, from: string, to: string) =>
|
|
70
|
+
`${pathname}: ${message} ${getContext(from, to)}`;
|
|
69
71
|
|
|
70
72
|
redirects.forEach((redirect) => {
|
|
71
73
|
const {from, to} = redirect;
|
|
72
74
|
|
|
73
75
|
if (!from || !to) {
|
|
74
|
-
throw new Error(
|
|
76
|
+
throw new Error(
|
|
77
|
+
formatMessage('One of the two parameters is missing', pathToRedirects, from, to),
|
|
78
|
+
);
|
|
75
79
|
}
|
|
76
80
|
|
|
77
81
|
if (from === to) {
|
|
78
|
-
throw new Error(
|
|
82
|
+
throw new Error(
|
|
83
|
+
formatMessage('Parameters must be different', pathToRedirects, from, to),
|
|
84
|
+
);
|
|
79
85
|
}
|
|
80
86
|
});
|
|
81
87
|
}
|
|
@@ -83,7 +89,9 @@ function validateRedirects(redirectsConfig: RedirectsConfig, pathToRedirects: st
|
|
|
83
89
|
export function argvValidator(argv: Arguments<Object>): Boolean {
|
|
84
90
|
try {
|
|
85
91
|
// Combine passed argv and properties from configuration file.
|
|
86
|
-
const pathToConfig = argv.config
|
|
92
|
+
const pathToConfig = argv.config
|
|
93
|
+
? String(argv.config)
|
|
94
|
+
: join(String(argv.input), YFM_CONFIG_FILENAME);
|
|
87
95
|
const content = readFileSync(resolve(pathToConfig), 'utf8');
|
|
88
96
|
Object.assign(argv, load(content) || {});
|
|
89
97
|
} catch (error) {
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import log from '@
|
|
1
|
+
import log from '@diplodoc/transform/lib/log';
|
|
2
2
|
import {Octokit} from '@octokit/core';
|
|
3
3
|
import {ArgvService} from '../../services';
|
|
4
4
|
import {
|
|
@@ -22,7 +22,10 @@ async function getRepoUser(octokit: Octokit, username: string): Promise<GithubUs
|
|
|
22
22
|
}
|
|
23
23
|
}
|
|
24
24
|
|
|
25
|
-
async function getRepoCommitByHash(
|
|
25
|
+
async function getRepoCommitByHash(
|
|
26
|
+
httpClientByToken: Octokit,
|
|
27
|
+
hashCommit: string,
|
|
28
|
+
): Promise<GithubCommitDTO | null> {
|
|
26
29
|
const {connector} = ArgvService.getConfig();
|
|
27
30
|
|
|
28
31
|
const neededProperties = [GitHubConnectorFields.OWNER, GitHubConnectorFields.REPO];
|
|
@@ -33,15 +36,20 @@ async function getRepoCommitByHash(httpClientByToken: Octokit, hashCommit: strin
|
|
|
33
36
|
}
|
|
34
37
|
|
|
35
38
|
try {
|
|
36
|
-
const commit = await httpClientByToken.request(
|
|
37
|
-
owner
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
39
|
+
const commit = await httpClientByToken.request(
|
|
40
|
+
'GET /repos/{owner}/{repo}/commits/{commit_sha}',
|
|
41
|
+
{
|
|
42
|
+
owner: validatedFileds[GitHubConnectorFields.OWNER] as string,
|
|
43
|
+
repo: validatedFileds[GitHubConnectorFields.REPO] as string,
|
|
44
|
+
commit_sha: hashCommit,
|
|
45
|
+
},
|
|
46
|
+
);
|
|
41
47
|
|
|
42
48
|
return commit.data;
|
|
43
49
|
} catch (error) {
|
|
44
|
-
log.warn(
|
|
50
|
+
log.warn(
|
|
51
|
+
`Getting commit by sha has been failed for GitHub. SHA commit: ${hashCommit}. Error: ${error}`,
|
|
52
|
+
);
|
|
45
53
|
return null;
|
|
46
54
|
}
|
|
47
55
|
}
|
|
@@ -1,5 +1,10 @@
|
|
|
1
|
-
import log from '@
|
|
2
|
-
import {
|
|
1
|
+
import log from '@diplodoc/transform/lib/log';
|
|
2
|
+
import {
|
|
3
|
+
ConnectorValidatorProps,
|
|
4
|
+
GitHubConnectorFields,
|
|
5
|
+
SourceType,
|
|
6
|
+
VCSConnectorConfig,
|
|
7
|
+
} from './connector-models';
|
|
3
8
|
import {getMsgСonfigurationMustBeProvided} from '../constants';
|
|
4
9
|
|
|
5
10
|
const githubConnectorValidator: Record<string, ConnectorValidatorProps> = {
|
|
@@ -26,8 +31,8 @@ const githubConnectorValidator: Record<string, ConnectorValidatorProps> = {
|
|
|
26
31
|
};
|
|
27
32
|
|
|
28
33
|
const connectorValidator: Record<string, ConnectorValidatorProps> = {
|
|
29
|
-
|
|
30
|
-
warnMessage: '
|
|
34
|
+
type: {
|
|
35
|
+
warnMessage: "'type' must be provided for repo.",
|
|
31
36
|
validateFn: notEmptyValue,
|
|
32
37
|
},
|
|
33
38
|
[SourceType.GITHUB]: {
|
|
@@ -56,8 +61,8 @@ function notEmptyValue(value?: unknown): boolean {
|
|
|
56
61
|
export function validateConnectorFields(
|
|
57
62
|
sourceType: SourceType,
|
|
58
63
|
fieldNames: GitHubConnectorFields[],
|
|
59
|
-
repoProperties?: VCSConnectorConfig
|
|
60
|
-
|
|
64
|
+
repoProperties?: VCSConnectorConfig,
|
|
65
|
+
): Record<string, unknown> {
|
|
61
66
|
const repoValidator = connectorValidator[sourceType];
|
|
62
67
|
|
|
63
68
|
if (!repoValidator) {
|
|
@@ -83,7 +88,8 @@ export function validateConnectorFields(
|
|
|
83
88
|
continue;
|
|
84
89
|
}
|
|
85
90
|
|
|
86
|
-
const propertyValue =
|
|
91
|
+
const propertyValue =
|
|
92
|
+
propertyValidator.defaultValue || repoProperties[sourceType]?.[property];
|
|
87
93
|
|
|
88
94
|
if (!propertyValidator.validateFn(propertyValue)) {
|
|
89
95
|
createLog(propertyValidator);
|
|
@@ -41,9 +41,11 @@ async function getGitHubVCSConnector(): Promise<VCSConnector | undefined> {
|
|
|
41
41
|
return undefined;
|
|
42
42
|
}
|
|
43
43
|
|
|
44
|
-
let addNestedContributorsForPath: NestedContributorsForPathFunction = () => {
|
|
45
|
-
let getContributorsByPath: ContributorsByPathFunction = () =>
|
|
46
|
-
|
|
44
|
+
let addNestedContributorsForPath: NestedContributorsForPathFunction = () => {};
|
|
45
|
+
let getContributorsByPath: ContributorsByPathFunction = () =>
|
|
46
|
+
Promise.resolve({} as FileContributors);
|
|
47
|
+
const getExternalAuthorByPath: ExternalAuthorByPathFunction = (path: string) =>
|
|
48
|
+
authorByPath.get(path) ?? null;
|
|
47
49
|
|
|
48
50
|
if (contributors) {
|
|
49
51
|
await getAllContributorsTocFiles(httpClientByToken);
|
|
@@ -95,7 +97,14 @@ async function getAllContributorsTocFiles(httpClientByToken: Octokit): Promise<v
|
|
|
95
97
|
const tmpMasterBranch = 'yfm-tmp-master';
|
|
96
98
|
|
|
97
99
|
try {
|
|
98
|
-
await simpleGit(options).raw(
|
|
100
|
+
await simpleGit(options).raw(
|
|
101
|
+
'worktree',
|
|
102
|
+
'add',
|
|
103
|
+
'-b',
|
|
104
|
+
tmpMasterBranch,
|
|
105
|
+
masterDir,
|
|
106
|
+
'origin/master',
|
|
107
|
+
);
|
|
99
108
|
const fullRepoLogString = await simpleGit({
|
|
100
109
|
baseDir: join(rootInput, masterDir),
|
|
101
110
|
}).raw(
|
|
@@ -127,8 +136,10 @@ async function getAllContributorsTocFiles(httpClientByToken: Octokit): Promise<v
|
|
|
127
136
|
logger.info('', ALL_CONTRIBUTORS_RECEIVED);
|
|
128
137
|
}
|
|
129
138
|
|
|
130
|
-
async function matchContributionsForEachPath(
|
|
131
|
-
|
|
139
|
+
async function matchContributionsForEachPath(
|
|
140
|
+
repoLogs: string[],
|
|
141
|
+
httpClientByToken: Octokit,
|
|
142
|
+
): Promise<void> {
|
|
132
143
|
for (const repoLog of repoLogs) {
|
|
133
144
|
if (!repoLog) {
|
|
134
145
|
continue;
|
|
@@ -149,7 +160,10 @@ async function matchContributionsForEachPath(repoLogs: string[], httpClientByTok
|
|
|
149
160
|
if (hasContributorData === undefined) {
|
|
150
161
|
logger.info('Contributors: Getting data for', email);
|
|
151
162
|
|
|
152
|
-
contributorDataByHash = await getContributorDataByHashCommit(
|
|
163
|
+
contributorDataByHash = await getContributorDataByHashCommit(
|
|
164
|
+
httpClientByToken,
|
|
165
|
+
hashCommit,
|
|
166
|
+
);
|
|
153
167
|
|
|
154
168
|
if (contributorDataByHash) {
|
|
155
169
|
const paths = dataArray.splice(1);
|
|
@@ -185,7 +199,9 @@ async function matchAuthorsForEachPath(authorRepoLogs: string[], httpClientByTok
|
|
|
185
199
|
}
|
|
186
200
|
}
|
|
187
201
|
|
|
188
|
-
async function getContributorDataByHashCommit(
|
|
202
|
+
async function getContributorDataByHashCommit(
|
|
203
|
+
httpClientByToken: Octokit,
|
|
204
|
+
hashCommit: string,
|
|
189
205
|
): Promise<Contributor | null> {
|
|
190
206
|
const repoCommit = await github.getRepoCommitByHash(httpClientByToken, hashCommit);
|
|
191
207
|
|
|
@@ -210,7 +226,11 @@ async function getContributorDataByHashCommit(httpClientByToken: Octokit, hashCo
|
|
|
210
226
|
};
|
|
211
227
|
}
|
|
212
228
|
|
|
213
|
-
async function getAuthorByPaths(
|
|
229
|
+
async function getAuthorByPaths(
|
|
230
|
+
commitInfo: CommitInfo,
|
|
231
|
+
paths: string[],
|
|
232
|
+
httpClientByToken: Octokit,
|
|
233
|
+
) {
|
|
214
234
|
for (const path of paths) {
|
|
215
235
|
if (!path) {
|
|
216
236
|
continue;
|
|
@@ -274,11 +294,18 @@ async function getUserByLogin(octokit: Octokit, userLogin: string): Promise<Cont
|
|
|
274
294
|
};
|
|
275
295
|
}
|
|
276
296
|
|
|
277
|
-
function addNestedContributorsForPathFunction(
|
|
297
|
+
function addNestedContributorsForPathFunction(
|
|
298
|
+
path: string,
|
|
299
|
+
nestedContributors: Contributors,
|
|
300
|
+
): void {
|
|
278
301
|
addContributorForPath([path], nestedContributors, true);
|
|
279
302
|
}
|
|
280
303
|
|
|
281
|
-
function addContributorForPath(
|
|
304
|
+
function addContributorForPath(
|
|
305
|
+
paths: string[],
|
|
306
|
+
newContributor: Contributors,
|
|
307
|
+
hasIncludes = false,
|
|
308
|
+
): void {
|
|
282
309
|
paths.forEach((path: string) => {
|
|
283
310
|
const normalizePath = normalize(addSlashPrefix(path));
|
|
284
311
|
|
|
@@ -4,7 +4,7 @@ import {VCSConnector, SourceType} from './connector-models';
|
|
|
4
4
|
|
|
5
5
|
export async function getVCSConnector(): Promise<VCSConnector | undefined> {
|
|
6
6
|
const {connector} = ArgvService.getConfig();
|
|
7
|
-
const connectorType = process.env.VCS_CONNECTOR_TYPE || connector && connector.type;
|
|
7
|
+
const connectorType = process.env.VCS_CONNECTOR_TYPE || (connector && connector.type);
|
|
8
8
|
|
|
9
9
|
switch (connectorType) {
|
|
10
10
|
case SourceType.GITHUB:
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import log from '@
|
|
1
|
+
import log from '@diplodoc/transform/lib/log';
|
|
2
2
|
import {extname} from 'path';
|
|
3
3
|
import {Observable, Subject} from 'threads/observable';
|
|
4
4
|
import {expose} from 'threads';
|
|
@@ -17,11 +17,7 @@ interface ProcessLinterWorkerOptions {
|
|
|
17
17
|
presetStorage: PresetStorage;
|
|
18
18
|
}
|
|
19
19
|
|
|
20
|
-
async function run({
|
|
21
|
-
argvConfig,
|
|
22
|
-
presetStorage,
|
|
23
|
-
navigationPaths,
|
|
24
|
-
}: ProcessLinterWorkerOptions) {
|
|
20
|
+
async function run({argvConfig, presetStorage, navigationPaths}: ProcessLinterWorkerOptions) {
|
|
25
21
|
ArgvService.set(argvConfig);
|
|
26
22
|
PresetService.setPresetStorage(presetStorage);
|
|
27
23
|
TocService.setNavigationPaths(navigationPaths);
|