@diplodoc/cli 4.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +785 -0
- package/LICENSE +21 -0
- package/README.md +62 -0
- package/README.ru.md +63 -0
- package/build/app.client.css +47 -0
- package/build/app.client.js +3 -0
- package/build/index.js +3993 -0
- package/build/index.js.map +7 -0
- package/build/lib.js +3374 -0
- package/build/lib.js.map +7 -0
- package/build/linter.js +1265 -0
- package/build/linter.js.map +7 -0
- package/package.json +126 -0
- package/src/cmd/build/index.ts +304 -0
- package/src/cmd/index.ts +4 -0
- package/src/cmd/publish/index.ts +92 -0
- package/src/cmd/publish/upload.ts +61 -0
- package/src/cmd/translate/index.ts +261 -0
- package/src/cmd/xliff/compose.ts +222 -0
- package/src/cmd/xliff/extract.ts +237 -0
- package/src/cmd/xliff/index.ts +27 -0
- package/src/constants.ts +122 -0
- package/src/globals.d.ts +1 -0
- package/src/index.ts +54 -0
- package/src/models.ts +249 -0
- package/src/packages/credentials/index.ts +1 -0
- package/src/packages/credentials/yandex-oauth.ts +42 -0
- package/src/resolvers/index.ts +3 -0
- package/src/resolvers/lintPage.ts +119 -0
- package/src/resolvers/md2html.ts +142 -0
- package/src/resolvers/md2md.ts +147 -0
- package/src/services/argv.ts +38 -0
- package/src/services/authors.ts +64 -0
- package/src/services/contributors.ts +104 -0
- package/src/services/includers/batteries/common.ts +34 -0
- package/src/services/includers/batteries/generic.ts +130 -0
- package/src/services/includers/batteries/index.ts +3 -0
- package/src/services/includers/batteries/sourcedocs.ts +33 -0
- package/src/services/includers/batteries/unarchive.ts +97 -0
- package/src/services/includers/index.ts +157 -0
- package/src/services/index.ts +6 -0
- package/src/services/leading.ts +88 -0
- package/src/services/metadata.ts +249 -0
- package/src/services/plugins.ts +76 -0
- package/src/services/preset.ts +55 -0
- package/src/services/tocs.ts +401 -0
- package/src/services/utils.ts +151 -0
- package/src/steps/index.ts +6 -0
- package/src/steps/processAssets.ts +36 -0
- package/src/steps/processExcludedFiles.ts +47 -0
- package/src/steps/processLinter.ts +100 -0
- package/src/steps/processLogs.ts +18 -0
- package/src/steps/processMapFile.ts +35 -0
- package/src/steps/processPages.ts +312 -0
- package/src/steps/processServiceFiles.ts +95 -0
- package/src/steps/publishFilesToS3.ts +47 -0
- package/src/utils/file.ts +17 -0
- package/src/utils/glob.ts +14 -0
- package/src/utils/index.ts +8 -0
- package/src/utils/logger.ts +42 -0
- package/src/utils/markup.ts +125 -0
- package/src/utils/path.ts +24 -0
- package/src/utils/presets.ts +20 -0
- package/src/utils/singlePage.ts +228 -0
- package/src/utils/toc.ts +87 -0
- package/src/utils/url.ts +3 -0
- package/src/utils/worker.ts +10 -0
- package/src/validator.ts +150 -0
- package/src/vcs-connector/client/github.ts +52 -0
- package/src/vcs-connector/connector-models.ts +76 -0
- package/src/vcs-connector/connector-validator.ts +114 -0
- package/src/vcs-connector/github.ts +333 -0
- package/src/vcs-connector/index.ts +15 -0
- package/src/workers/linter/index.ts +62 -0
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import log from '@doc-tools/transform/lib/log';
|
|
2
|
+
import {Octokit} from '@octokit/core';
|
|
3
|
+
import {ArgvService} from '../../services';
|
|
4
|
+
import {
|
|
5
|
+
GithubCommitDTO,
|
|
6
|
+
GitHubConnectorFields,
|
|
7
|
+
SourceType,
|
|
8
|
+
GithubUserDTO,
|
|
9
|
+
} from '../connector-models';
|
|
10
|
+
import {validateConnectorFields} from '../connector-validator';
|
|
11
|
+
|
|
12
|
+
async function getRepoUser(octokit: Octokit, username: string): Promise<GithubUserDTO | null> {
|
|
13
|
+
try {
|
|
14
|
+
const user = await octokit.request('GET /users/{username}', {
|
|
15
|
+
username,
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
return user.data as GithubUserDTO;
|
|
19
|
+
} catch (error) {
|
|
20
|
+
log.warn(`Getting user for GitHub has been failed. Username: ${username}. Error: ${error}`);
|
|
21
|
+
return null;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async function getRepoCommitByHash(httpClientByToken: Octokit, hashCommit: string): Promise<GithubCommitDTO | null> {
|
|
26
|
+
const {connector} = ArgvService.getConfig();
|
|
27
|
+
|
|
28
|
+
const neededProperties = [GitHubConnectorFields.OWNER, GitHubConnectorFields.REPO];
|
|
29
|
+
const validatedFileds = validateConnectorFields(SourceType.GITHUB, neededProperties, connector);
|
|
30
|
+
|
|
31
|
+
if (Object.keys(validatedFileds).length === 0) {
|
|
32
|
+
return null;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
try {
|
|
36
|
+
const commit = await httpClientByToken.request('GET /repos/{owner}/{repo}/commits/{commit_sha}', {
|
|
37
|
+
owner: validatedFileds[GitHubConnectorFields.OWNER] as string,
|
|
38
|
+
repo: validatedFileds[GitHubConnectorFields.REPO] as string,
|
|
39
|
+
commit_sha: hashCommit,
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
return commit.data;
|
|
43
|
+
} catch (error) {
|
|
44
|
+
log.warn(`Getting commit by sha has been failed for GitHub. SHA commit: ${hashCommit}. Error: ${error}`);
|
|
45
|
+
return null;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
export default {
|
|
50
|
+
getRepoUser,
|
|
51
|
+
getRepoCommitByHash,
|
|
52
|
+
};
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import {
|
|
2
|
+
Contributors,
|
|
3
|
+
ContributorsByPathFunction,
|
|
4
|
+
ExternalAuthorByPathFunction,
|
|
5
|
+
NestedContributorsForPathFunction,
|
|
6
|
+
UserByLoginFunction,
|
|
7
|
+
} from '../models';
|
|
8
|
+
|
|
9
|
+
/* eslint-disable camelcase */
|
|
10
|
+
export interface ConnectorValidatorProps {
|
|
11
|
+
validateFn: (value: unknown) => Boolean;
|
|
12
|
+
defaultValue?: unknown;
|
|
13
|
+
errorMessage?: string;
|
|
14
|
+
warnMessage?: string;
|
|
15
|
+
relatedValidator?: Record<string, ConnectorValidatorProps>;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export enum SourceType {
|
|
19
|
+
GITHUB = 'github',
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export enum GitHubConnectorFields {
|
|
23
|
+
OWNER = 'owner',
|
|
24
|
+
REPO = 'repo',
|
|
25
|
+
TOKEN = 'token',
|
|
26
|
+
ENDPOINT = 'endpoint',
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
export interface VCSConnector {
|
|
30
|
+
getExternalAuthorByPath: ExternalAuthorByPathFunction;
|
|
31
|
+
addNestedContributorsForPath: NestedContributorsForPathFunction;
|
|
32
|
+
getContributorsByPath: ContributorsByPathFunction;
|
|
33
|
+
getUserByLogin: UserByLoginFunction;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export interface VCSConnectorConfig {
|
|
37
|
+
type: string;
|
|
38
|
+
[SourceType.GITHUB]?: {
|
|
39
|
+
endpoint: string;
|
|
40
|
+
token: string;
|
|
41
|
+
owner: string;
|
|
42
|
+
repo: string;
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export interface FileContributors {
|
|
47
|
+
contributors: Contributors;
|
|
48
|
+
hasIncludes: boolean;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
export interface GithubCommitDTO {
|
|
52
|
+
commit: {
|
|
53
|
+
author: {
|
|
54
|
+
name: string;
|
|
55
|
+
email: string;
|
|
56
|
+
};
|
|
57
|
+
};
|
|
58
|
+
author: {
|
|
59
|
+
login: string;
|
|
60
|
+
avatar_url: string;
|
|
61
|
+
html_url: string;
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
export interface GithubUserDTO {
|
|
66
|
+
avatar_url: string;
|
|
67
|
+
html_url: string;
|
|
68
|
+
email: string;
|
|
69
|
+
login: string;
|
|
70
|
+
name: string;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
export interface GitLogsDTO {
|
|
74
|
+
author_email: string;
|
|
75
|
+
author_name: string;
|
|
76
|
+
}
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import log from '@doc-tools/transform/lib/log';
|
|
2
|
+
import {ConnectorValidatorProps, GitHubConnectorFields, SourceType, VCSConnectorConfig} from './connector-models';
|
|
3
|
+
import {getMsgСonfigurationMustBeProvided} from '../constants';
|
|
4
|
+
|
|
5
|
+
const githubConnectorValidator: Record<string, ConnectorValidatorProps> = {
|
|
6
|
+
[GitHubConnectorFields.ENDPOINT]: {
|
|
7
|
+
warnMessage: `'${GitHubConnectorFields.ENDPOINT}' must be provided for GitHub repo.`,
|
|
8
|
+
validateFn: notEmptyValue,
|
|
9
|
+
defaultValue: process.env.GITHUB_BASE_URL,
|
|
10
|
+
},
|
|
11
|
+
[GitHubConnectorFields.TOKEN]: {
|
|
12
|
+
warnMessage: `'${GitHubConnectorFields.TOKEN}' must be provided for GitHub repo.`,
|
|
13
|
+
validateFn: notEmptyValue,
|
|
14
|
+
defaultValue: process.env.GITHUB_TOKEN,
|
|
15
|
+
},
|
|
16
|
+
[GitHubConnectorFields.OWNER]: {
|
|
17
|
+
warnMessage: `'${GitHubConnectorFields.OWNER}' must be provided for GitHub repo.`,
|
|
18
|
+
validateFn: notEmptyValue,
|
|
19
|
+
defaultValue: process.env.GITHUB_OWNER,
|
|
20
|
+
},
|
|
21
|
+
[GitHubConnectorFields.REPO]: {
|
|
22
|
+
warnMessage: `'${GitHubConnectorFields.REPO}' must be provided for GitHub repo.`,
|
|
23
|
+
validateFn: notEmptyValue,
|
|
24
|
+
defaultValue: process.env.GITHUB_REPO,
|
|
25
|
+
},
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
const connectorValidator: Record<string, ConnectorValidatorProps> = {
|
|
29
|
+
'type': {
|
|
30
|
+
warnMessage: '\'type\' must be provided for repo.',
|
|
31
|
+
validateFn: notEmptyValue,
|
|
32
|
+
},
|
|
33
|
+
[SourceType.GITHUB]: {
|
|
34
|
+
warnMessage: `'${SourceType.GITHUB}' object must be filled needed fields.`,
|
|
35
|
+
validateFn: notEmptyObject,
|
|
36
|
+
relatedValidator: githubConnectorValidator,
|
|
37
|
+
},
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
function notEmptyObject(filed?: unknown): boolean {
|
|
41
|
+
if (typeof filed === 'object') {
|
|
42
|
+
return Boolean(filed && Object.getOwnPropertyNames(filed).length);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
return false;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
function notEmptyValue(value?: unknown): boolean {
|
|
49
|
+
if (typeof value === 'string') {
|
|
50
|
+
return Boolean(value);
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
return false;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export function validateConnectorFields(
|
|
57
|
+
sourceType: SourceType,
|
|
58
|
+
fieldNames: GitHubConnectorFields[],
|
|
59
|
+
repoProperties?: VCSConnectorConfig): Record<string, unknown> {
|
|
60
|
+
|
|
61
|
+
const repoValidator = connectorValidator[sourceType];
|
|
62
|
+
|
|
63
|
+
if (!repoValidator) {
|
|
64
|
+
log.error(`Invalid repo type: ${repoValidator}`);
|
|
65
|
+
return {};
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
const isValidRepo = repoValidator.validateFn(repoProperties && repoProperties[sourceType]);
|
|
69
|
+
const relatedRepoValidator = repoValidator.relatedValidator;
|
|
70
|
+
if (!repoProperties || !isValidRepo || !relatedRepoValidator) {
|
|
71
|
+
createLog(repoValidator);
|
|
72
|
+
return {};
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
let isValidProperties = true;
|
|
76
|
+
const validatedFields: Record<string, unknown> = {};
|
|
77
|
+
|
|
78
|
+
for (const property of fieldNames) {
|
|
79
|
+
const propertyValidator = relatedRepoValidator[property];
|
|
80
|
+
|
|
81
|
+
if (!propertyValidator) {
|
|
82
|
+
log.warn(`The property '${property}' doesn't exist in ${sourceType} repo.`);
|
|
83
|
+
continue;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
const propertyValue = propertyValidator.defaultValue || repoProperties[sourceType]?.[property];
|
|
87
|
+
|
|
88
|
+
if (!propertyValidator.validateFn(propertyValue)) {
|
|
89
|
+
createLog(propertyValidator);
|
|
90
|
+
isValidProperties = false;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
validatedFields[property] = propertyValue;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
if (isValidProperties) {
|
|
97
|
+
return validatedFields;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
log.warn(getMsgСonfigurationMustBeProvided(sourceType));
|
|
101
|
+
return {};
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
function createLog(validator: ConnectorValidatorProps): void {
|
|
105
|
+
if (validator.errorMessage) {
|
|
106
|
+
return log.error(validator.errorMessage);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
if (validator.warnMessage) {
|
|
110
|
+
return log.warn(validator.warnMessage);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
throw new Error(`Invalid validator: ${JSON.stringify(validator)}.`);
|
|
114
|
+
}
|
|
@@ -0,0 +1,333 @@
|
|
|
1
|
+
import {Octokit} from '@octokit/core';
|
|
2
|
+
import {join, normalize} from 'path';
|
|
3
|
+
import simpleGit, {SimpleGitOptions} from 'simple-git';
|
|
4
|
+
import {minimatch} from 'minimatch';
|
|
5
|
+
|
|
6
|
+
import github from './client/github';
|
|
7
|
+
import {ArgvService} from '../services';
|
|
8
|
+
import {
|
|
9
|
+
CommitInfo,
|
|
10
|
+
Contributor,
|
|
11
|
+
Contributors,
|
|
12
|
+
ContributorsByPathFunction,
|
|
13
|
+
ExternalAuthorByPathFunction,
|
|
14
|
+
NestedContributorsForPathFunction,
|
|
15
|
+
} from '../models';
|
|
16
|
+
import {
|
|
17
|
+
FileContributors,
|
|
18
|
+
GitHubConnectorFields,
|
|
19
|
+
SourceType,
|
|
20
|
+
VCSConnector,
|
|
21
|
+
} from './connector-models';
|
|
22
|
+
import {
|
|
23
|
+
ALL_CONTRIBUTORS_RECEIVED,
|
|
24
|
+
FIRST_COMMIT_FROM_ROBOT_IN_GITHUB,
|
|
25
|
+
GETTING_ALL_CONTRIBUTORS,
|
|
26
|
+
} from '../constants';
|
|
27
|
+
import {addSlashPrefix, logger} from '../utils';
|
|
28
|
+
import {validateConnectorFields} from './connector-validator';
|
|
29
|
+
import process from 'process';
|
|
30
|
+
|
|
31
|
+
const authorByGitEmail: Map<string, Contributor | null> = new Map();
|
|
32
|
+
const authorByPath: Map<string, Contributor | null> = new Map();
|
|
33
|
+
const contributorsByPath: Map<string, FileContributors> = new Map();
|
|
34
|
+
const contributorsData: Map<string, Contributor | null> = new Map();
|
|
35
|
+
|
|
36
|
+
async function getGitHubVCSConnector(): Promise<VCSConnector | undefined> {
|
|
37
|
+
const {contributors} = ArgvService.getConfig();
|
|
38
|
+
|
|
39
|
+
const httpClientByToken = getHttpClientByToken();
|
|
40
|
+
if (!httpClientByToken) {
|
|
41
|
+
return undefined;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
let addNestedContributorsForPath: NestedContributorsForPathFunction = () => { };
|
|
45
|
+
let getContributorsByPath: ContributorsByPathFunction = () => Promise.resolve({} as FileContributors);
|
|
46
|
+
const getExternalAuthorByPath: ExternalAuthorByPathFunction = (path: string) => authorByPath.get(path) ?? null;
|
|
47
|
+
|
|
48
|
+
if (contributors) {
|
|
49
|
+
await getAllContributorsTocFiles(httpClientByToken);
|
|
50
|
+
addNestedContributorsForPath = (path: string, nestedContributors: Contributors) =>
|
|
51
|
+
addNestedContributorsForPathFunction(path, nestedContributors);
|
|
52
|
+
getContributorsByPath = async (path: string) => getFileContributorsByPath(path);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
return {
|
|
56
|
+
getExternalAuthorByPath,
|
|
57
|
+
addNestedContributorsForPath,
|
|
58
|
+
getContributorsByPath,
|
|
59
|
+
getUserByLogin: (login: string) => getUserByLogin(httpClientByToken, login),
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
function getHttpClientByToken(): Octokit | null {
|
|
64
|
+
const {connector, contributors} = ArgvService.getConfig();
|
|
65
|
+
|
|
66
|
+
if (!contributors) {
|
|
67
|
+
return null;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
const neededProperties = [GitHubConnectorFields.TOKEN, GitHubConnectorFields.ENDPOINT];
|
|
71
|
+
const validatedFileds = validateConnectorFields(SourceType.GITHUB, neededProperties, connector);
|
|
72
|
+
|
|
73
|
+
if (Object.keys(validatedFileds).length === 0) {
|
|
74
|
+
return null;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
const octokit = new Octokit({
|
|
78
|
+
auth: validatedFileds[GitHubConnectorFields.TOKEN] as string,
|
|
79
|
+
baseUrl: validatedFileds[GitHubConnectorFields.ENDPOINT] as string,
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
return octokit;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
async function getAllContributorsTocFiles(httpClientByToken: Octokit): Promise<void> {
|
|
86
|
+
const {rootInput} = ArgvService.getConfig();
|
|
87
|
+
|
|
88
|
+
const options: Partial<SimpleGitOptions> = {
|
|
89
|
+
baseDir: rootInput,
|
|
90
|
+
};
|
|
91
|
+
|
|
92
|
+
logger.info('', GETTING_ALL_CONTRIBUTORS);
|
|
93
|
+
|
|
94
|
+
const masterDir = './_yfm-master';
|
|
95
|
+
const tmpMasterBranch = 'yfm-tmp-master';
|
|
96
|
+
|
|
97
|
+
try {
|
|
98
|
+
await simpleGit(options).raw('worktree', 'add', '-b', tmpMasterBranch, masterDir, 'origin/master');
|
|
99
|
+
const fullRepoLogString = await simpleGit({
|
|
100
|
+
baseDir: join(rootInput, masterDir),
|
|
101
|
+
}).raw(
|
|
102
|
+
'log',
|
|
103
|
+
`${FIRST_COMMIT_FROM_ROBOT_IN_GITHUB}..HEAD`,
|
|
104
|
+
'--pretty=format:%ae, %an, %H',
|
|
105
|
+
'--name-only',
|
|
106
|
+
);
|
|
107
|
+
const repoLogs = fullRepoLogString.split('\n\n');
|
|
108
|
+
if (process.env.ENABLE_EXPERIMANTAL_AUTHORS) {
|
|
109
|
+
const fullAuthorRepoLogString = await simpleGit({
|
|
110
|
+
baseDir: join(rootInput, masterDir),
|
|
111
|
+
}).raw(
|
|
112
|
+
'log',
|
|
113
|
+
`${FIRST_COMMIT_FROM_ROBOT_IN_GITHUB}..HEAD`,
|
|
114
|
+
'--diff-filter=A',
|
|
115
|
+
'--pretty=format:%ae;%an;%H',
|
|
116
|
+
'--name-only',
|
|
117
|
+
);
|
|
118
|
+
const authorRepoLog = fullAuthorRepoLogString.split('\n\n');
|
|
119
|
+
await matchAuthorsForEachPath(authorRepoLog, httpClientByToken);
|
|
120
|
+
}
|
|
121
|
+
await matchContributionsForEachPath(repoLogs, httpClientByToken);
|
|
122
|
+
} finally {
|
|
123
|
+
await simpleGit(options).raw('worktree', 'remove', masterDir);
|
|
124
|
+
await simpleGit(options).raw('branch', '-d', tmpMasterBranch);
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
logger.info('', ALL_CONTRIBUTORS_RECEIVED);
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
async function matchContributionsForEachPath(repoLogs: string[], httpClientByToken: Octokit): Promise<void> {
|
|
131
|
+
|
|
132
|
+
for (const repoLog of repoLogs) {
|
|
133
|
+
if (!repoLog) {
|
|
134
|
+
continue;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
const dataArray = repoLog.split('\n');
|
|
138
|
+
const userData = dataArray[0];
|
|
139
|
+
const [email, name, hashCommit] = userData.split(', ');
|
|
140
|
+
|
|
141
|
+
if (shouldAuthorBeIgnored({email, name})) {
|
|
142
|
+
continue;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
const hasContributorData = contributorsData.get(email);
|
|
146
|
+
|
|
147
|
+
let contributorDataByHash;
|
|
148
|
+
|
|
149
|
+
if (hasContributorData === undefined) {
|
|
150
|
+
logger.info('Contributors: Getting data for', email);
|
|
151
|
+
|
|
152
|
+
contributorDataByHash = await getContributorDataByHashCommit(httpClientByToken, hashCommit);
|
|
153
|
+
|
|
154
|
+
if (contributorDataByHash) {
|
|
155
|
+
const paths = dataArray.splice(1);
|
|
156
|
+
addContributorForPath(paths, {
|
|
157
|
+
[email]: contributorDataByHash,
|
|
158
|
+
});
|
|
159
|
+
}
|
|
160
|
+
contributorsData.set(email, contributorDataByHash);
|
|
161
|
+
} else if (hasContributorData) {
|
|
162
|
+
const paths = dataArray.splice(1);
|
|
163
|
+
addContributorForPath(paths, {
|
|
164
|
+
[email]: hasContributorData,
|
|
165
|
+
});
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
async function matchAuthorsForEachPath(authorRepoLogs: string[], httpClientByToken: Octokit) {
|
|
171
|
+
for (const repoLog of authorRepoLogs) {
|
|
172
|
+
if (!repoLog) {
|
|
173
|
+
continue;
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
const dataArray = repoLog.split('\n');
|
|
177
|
+
const [userData, ...paths] = dataArray;
|
|
178
|
+
const [email, name, hashCommit] = userData.split(';');
|
|
179
|
+
|
|
180
|
+
if (shouldAuthorBeIgnored({email, name})) {
|
|
181
|
+
continue;
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
await getAuthorByPaths({email, hashCommit}, paths, httpClientByToken);
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
async function getContributorDataByHashCommit(httpClientByToken: Octokit, hashCommit: string,
|
|
189
|
+
): Promise<Contributor | null> {
|
|
190
|
+
const repoCommit = await github.getRepoCommitByHash(httpClientByToken, hashCommit);
|
|
191
|
+
|
|
192
|
+
if (!repoCommit) {
|
|
193
|
+
return null;
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
const {author, commit} = repoCommit;
|
|
197
|
+
|
|
198
|
+
if (!author) {
|
|
199
|
+
return null;
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
const {avatar_url: avatar, html_url: url, login} = author;
|
|
203
|
+
|
|
204
|
+
return {
|
|
205
|
+
avatar,
|
|
206
|
+
email: commit.author.email,
|
|
207
|
+
login,
|
|
208
|
+
name: commit.author.name,
|
|
209
|
+
url,
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
async function getAuthorByPaths(commitInfo: CommitInfo, paths: string[], httpClientByToken: Octokit) {
|
|
214
|
+
for (const path of paths) {
|
|
215
|
+
if (!path) {
|
|
216
|
+
continue;
|
|
217
|
+
}
|
|
218
|
+
const normalizePath = normalize(addSlashPrefix(path));
|
|
219
|
+
const {email, hashCommit} = commitInfo;
|
|
220
|
+
|
|
221
|
+
let authorToReturn = authorByGitEmail.get(email) || null;
|
|
222
|
+
|
|
223
|
+
if (!authorToReturn) {
|
|
224
|
+
logger.info('Authors: Getting data for', email);
|
|
225
|
+
|
|
226
|
+
const repoCommit = await github.getRepoCommitByHash(httpClientByToken, hashCommit);
|
|
227
|
+
if (!repoCommit) {
|
|
228
|
+
continue;
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
const {author, commit} = repoCommit;
|
|
232
|
+
if (!author) {
|
|
233
|
+
continue;
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
const {avatar_url: avatar, html_url: url, login} = author;
|
|
237
|
+
authorToReturn = {
|
|
238
|
+
avatar,
|
|
239
|
+
email: commit.author.email,
|
|
240
|
+
login,
|
|
241
|
+
name: commit.author.name,
|
|
242
|
+
url,
|
|
243
|
+
};
|
|
244
|
+
authorByGitEmail.set(email, authorToReturn);
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
authorByPath.set(normalizePath, authorToReturn);
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
async function getFileContributorsByPath(path: string): Promise<FileContributors> {
|
|
252
|
+
if (contributorsData.size === 0 || !contributorsByPath.has(path)) {
|
|
253
|
+
return {} as FileContributors;
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
return contributorsByPath.get(path) as FileContributors;
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
async function getUserByLogin(octokit: Octokit, userLogin: string): Promise<Contributor | null> {
|
|
260
|
+
const user = await github.getRepoUser(octokit, userLogin);
|
|
261
|
+
|
|
262
|
+
if (!user) {
|
|
263
|
+
return null;
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
const {avatar_url: avatar, html_url: url, email, login, name} = user;
|
|
267
|
+
|
|
268
|
+
return {
|
|
269
|
+
avatar,
|
|
270
|
+
email,
|
|
271
|
+
login,
|
|
272
|
+
name,
|
|
273
|
+
url,
|
|
274
|
+
};
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
function addNestedContributorsForPathFunction(path: string, nestedContributors: Contributors): void {
|
|
278
|
+
addContributorForPath([path], nestedContributors, true);
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
function addContributorForPath(paths: string[], newContributor: Contributors, hasIncludes = false): void {
|
|
282
|
+
paths.forEach((path: string) => {
|
|
283
|
+
const normalizePath = normalize(addSlashPrefix(path));
|
|
284
|
+
|
|
285
|
+
if (!contributorsByPath.has(normalizePath)) {
|
|
286
|
+
contributorsByPath.set(normalizePath, {
|
|
287
|
+
contributors: newContributor,
|
|
288
|
+
hasIncludes,
|
|
289
|
+
});
|
|
290
|
+
return;
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
const oldContributors = contributorsByPath.get(normalizePath);
|
|
294
|
+
|
|
295
|
+
contributorsByPath.set(normalizePath, {
|
|
296
|
+
contributors: {
|
|
297
|
+
...oldContributors?.contributors,
|
|
298
|
+
...newContributor,
|
|
299
|
+
},
|
|
300
|
+
hasIncludes,
|
|
301
|
+
});
|
|
302
|
+
});
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
type ShouldAuthorBeIgnoredArgs = {
|
|
306
|
+
email?: string;
|
|
307
|
+
name?: string;
|
|
308
|
+
};
|
|
309
|
+
|
|
310
|
+
function shouldAuthorBeIgnored({email, name}: ShouldAuthorBeIgnoredArgs) {
|
|
311
|
+
if (!(email || name)) {
|
|
312
|
+
return false;
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
const {ignoreAuthorPatterns} = ArgvService.getConfig();
|
|
316
|
+
if (!ignoreAuthorPatterns) {
|
|
317
|
+
return false;
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
for (const pattern of ignoreAuthorPatterns) {
|
|
321
|
+
if (email && minimatch(email, pattern)) {
|
|
322
|
+
return true;
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
if (name && minimatch(name, pattern)) {
|
|
326
|
+
return true;
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
return false;
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
export default getGitHubVCSConnector;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import {ArgvService} from '../services';
|
|
2
|
+
import getGitHubVCSConnector from './github';
|
|
3
|
+
import {VCSConnector, SourceType} from './connector-models';
|
|
4
|
+
|
|
5
|
+
export async function getVCSConnector(): Promise<VCSConnector | undefined> {
|
|
6
|
+
const {connector} = ArgvService.getConfig();
|
|
7
|
+
const connectorType = process.env.VCS_CONNECTOR_TYPE || connector && connector.type;
|
|
8
|
+
|
|
9
|
+
switch (connectorType) {
|
|
10
|
+
case SourceType.GITHUB:
|
|
11
|
+
return getGitHubVCSConnector();
|
|
12
|
+
default:
|
|
13
|
+
return undefined;
|
|
14
|
+
}
|
|
15
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import log from '@doc-tools/transform/lib/log';
|
|
2
|
+
import {extname} from 'path';
|
|
3
|
+
import {Observable, Subject} from 'threads/observable';
|
|
4
|
+
import {expose} from 'threads';
|
|
5
|
+
|
|
6
|
+
import {ArgvService, PluginService, PresetService, TocService} from '../../services';
|
|
7
|
+
import {TocServiceData} from '../../services/tocs';
|
|
8
|
+
import {PresetStorage} from '../../services/preset';
|
|
9
|
+
import {YfmArgv} from '../../models';
|
|
10
|
+
import {lintPage} from '../../resolvers';
|
|
11
|
+
|
|
12
|
+
let processedPages = new Subject();
|
|
13
|
+
|
|
14
|
+
interface ProcessLinterWorkerOptions {
|
|
15
|
+
argvConfig: YfmArgv;
|
|
16
|
+
navigationPaths: TocServiceData['navigationPaths'];
|
|
17
|
+
presetStorage: PresetStorage;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
async function run({
|
|
21
|
+
argvConfig,
|
|
22
|
+
presetStorage,
|
|
23
|
+
navigationPaths,
|
|
24
|
+
}: ProcessLinterWorkerOptions) {
|
|
25
|
+
ArgvService.set(argvConfig);
|
|
26
|
+
PresetService.setPresetStorage(presetStorage);
|
|
27
|
+
TocService.setNavigationPaths(navigationPaths);
|
|
28
|
+
PluginService.setPlugins();
|
|
29
|
+
|
|
30
|
+
TocService.getNavigationPaths().forEach((pathToFile) => {
|
|
31
|
+
lintPage({
|
|
32
|
+
inputPath: pathToFile,
|
|
33
|
+
fileExtension: extname(pathToFile),
|
|
34
|
+
onFinish: () => {
|
|
35
|
+
processedPages.next(pathToFile);
|
|
36
|
+
},
|
|
37
|
+
});
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
async function finish() {
|
|
42
|
+
processedPages.complete();
|
|
43
|
+
processedPages = new Subject();
|
|
44
|
+
|
|
45
|
+
return log.get();
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
function getProcessedPages() {
|
|
49
|
+
return Observable.from(processedPages);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export type ProcessLinterWorker = {
|
|
53
|
+
run: typeof run;
|
|
54
|
+
finish: typeof finish;
|
|
55
|
+
getProcessedPages: typeof getProcessedPages;
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
expose({
|
|
59
|
+
run,
|
|
60
|
+
finish,
|
|
61
|
+
getProcessedPages,
|
|
62
|
+
});
|