@openinc/parse-server-opendash 3.21.0 → 3.23.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/features/documentation/core/Cleanup.d.ts +2 -1
- package/dist/features/documentation/core/Cleanup.js +33 -0
- package/dist/features/documentation/core/Importer.d.ts +1 -1
- package/dist/features/documentation/core/Importer.js +6 -6
- package/dist/features/documentation/core/Organizer.js +3 -3
- package/dist/features/documentation/functions/importDocs.js +7 -1
- package/dist/features/documentation/services/ConfigApplier.d.ts +1 -1
- package/dist/features/documentation/services/ConfigApplier.js +5 -5
- package/dist/features/documentation/services/ContentLoader.d.ts +1 -1
- package/dist/features/documentation/services/ContentLoader.js +2 -2
- package/dist/features/documentation/services/GitHubClient.d.ts +21 -9
- package/dist/features/documentation/services/GitHubClient.js +63 -15
- package/dist/features/documentation/services/LinkResolver.d.ts +13 -0
- package/dist/features/documentation/services/LinkResolver.js +137 -0
- package/dist/features/documentation/services/MetadataEnricher.d.ts +1 -1
- package/dist/features/documentation/services/MetadataEnricher.js +2 -2
- package/package.json +1 -1
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { _User, Tenant } from "../../../types";
|
|
1
|
+
import { _User, Tenant, Assets } from "../../../types";
|
|
2
2
|
import { DocumentationStructure } from "../types";
|
|
3
3
|
/**
|
|
4
4
|
* Service for cleaning up old documentation that no longer exists in the repository
|
|
@@ -11,6 +11,7 @@ export declare class DocumentationCleanup {
|
|
|
11
11
|
deletedDocuments: number;
|
|
12
12
|
deletedCategories: number;
|
|
13
13
|
}>;
|
|
14
|
+
static findExistingAssets(user: _User | undefined, tenant: Tenant | undefined): Promise<Assets[]>;
|
|
14
15
|
/**
|
|
15
16
|
* Collect all git paths from the documentation structure
|
|
16
17
|
*/
|
|
@@ -18,6 +18,7 @@ class DocumentationCleanup {
|
|
|
18
18
|
// Find all existing default documentation for this user/tenant
|
|
19
19
|
const existingDocuments = await this.findExistingDefaultDocuments(user, tenant);
|
|
20
20
|
const existingCategories = await this.findExistingDefaultCategories(user, tenant);
|
|
21
|
+
const existingAssets = await this.findExistingAssets(user, tenant);
|
|
21
22
|
console.log(`[DocumentationCleanup] Found ${existingDocuments.length} existing documents and ${existingCategories.length} existing categories`);
|
|
22
23
|
// Identify documents to delete
|
|
23
24
|
const documentsToDelete = existingDocuments.filter((doc) => {
|
|
@@ -60,9 +61,41 @@ class DocumentationCleanup {
|
|
|
60
61
|
console.error(`[DocumentationCleanup] Failed to delete category ${cat.id}:`, error);
|
|
61
62
|
}
|
|
62
63
|
}
|
|
64
|
+
// delete old assets
|
|
65
|
+
let deletedAssets = 0;
|
|
66
|
+
for (const asset of existingAssets) {
|
|
67
|
+
const meta = asset.get("meta") || {};
|
|
68
|
+
if (meta.importedByDocumentation) {
|
|
69
|
+
try {
|
|
70
|
+
console.log(`[DocumentationCleanup] Deleting asset: ${asset.get("description")} (id: ${asset.id})`);
|
|
71
|
+
await asset.destroy({ useMasterKey: true });
|
|
72
|
+
deletedAssets++;
|
|
73
|
+
}
|
|
74
|
+
catch (error) {
|
|
75
|
+
console.error(`[DocumentationCleanup] Failed to delete asset ${asset.id}:`, error);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
if (deletedAssets > 0) {
|
|
80
|
+
console.log(`[DocumentationCleanup] Deleted ${deletedAssets} old assets imported by documentation`);
|
|
81
|
+
}
|
|
63
82
|
console.log(`[DocumentationCleanup] Cleanup complete: deleted ${deletedDocuments} documents and ${deletedCategories} categories`);
|
|
64
83
|
return { deletedDocuments, deletedCategories };
|
|
65
84
|
}
|
|
85
|
+
static async findExistingAssets(user, tenant) {
|
|
86
|
+
const query = new Parse.Query(types_1.Assets);
|
|
87
|
+
if (tenant) {
|
|
88
|
+
query.equalTo("tenant", tenant);
|
|
89
|
+
}
|
|
90
|
+
if (user) {
|
|
91
|
+
query.equalTo("user", user);
|
|
92
|
+
}
|
|
93
|
+
const assets = await query
|
|
94
|
+
// @ts-expect-error
|
|
95
|
+
.exists("meta.importedByDocumentation")
|
|
96
|
+
.findAll({ useMasterKey: true });
|
|
97
|
+
return assets;
|
|
98
|
+
}
|
|
66
99
|
/**
|
|
67
100
|
* Collect all git paths from the documentation structure
|
|
68
101
|
*/
|
|
@@ -19,13 +19,13 @@ class DocumentationImporter {
|
|
|
19
19
|
// Validate token
|
|
20
20
|
const gitUser = await this.githubClient.validateToken();
|
|
21
21
|
// Get repository info
|
|
22
|
-
const repo = await this.githubClient.getRepository(
|
|
22
|
+
const repo = await this.githubClient.getRepository();
|
|
23
23
|
// Determine branch to use
|
|
24
24
|
const branch = options.branch || repo.default_branch;
|
|
25
25
|
// Validate branch exists
|
|
26
26
|
await this.validateBranch(options.organization, options.repository, branch);
|
|
27
27
|
// Get branch info (commit SHA)
|
|
28
|
-
const branchInfo = await this.githubClient.getBranch(
|
|
28
|
+
const branchInfo = await this.githubClient.getBranch();
|
|
29
29
|
const latestCommitSha = branchInfo.commit.sha;
|
|
30
30
|
// Attempt to load previously imported commit SHA from Parse
|
|
31
31
|
let previousCommitSha;
|
|
@@ -59,7 +59,7 @@ class DocumentationImporter {
|
|
|
59
59
|
},
|
|
60
60
|
};
|
|
61
61
|
}
|
|
62
|
-
const treeData = await this.githubClient.getTree(
|
|
62
|
+
const treeData = await this.githubClient.getTree(latestCommitSha);
|
|
63
63
|
// Organize structure with filtering and root path
|
|
64
64
|
const structure = await Organizer_1.DocumentationOrganizer.organizeRepositoryStructure(treeData, options.fileFilter, options.rootPath, this.githubClient, options.organization, options.repository, branch, options.defaultFolderConfig);
|
|
65
65
|
// Clean up old documentation that is no longer in the repository
|
|
@@ -113,7 +113,7 @@ class DocumentationImporter {
|
|
|
113
113
|
*/
|
|
114
114
|
async validateBranch(owner, repo, branch) {
|
|
115
115
|
try {
|
|
116
|
-
const branches = await this.githubClient.getBranches(
|
|
116
|
+
const branches = await this.githubClient.getBranches();
|
|
117
117
|
const branchNames = branches.map((b) => b.name);
|
|
118
118
|
if (!branchNames.includes(branch)) {
|
|
119
119
|
throw new Error(`Branch '${branch}' not found. Available branches: ${branchNames.join(", ")}`);
|
|
@@ -126,8 +126,8 @@ class DocumentationImporter {
|
|
|
126
126
|
/**
|
|
127
127
|
* Get file content from GitHub
|
|
128
128
|
*/
|
|
129
|
-
async getFileContent(
|
|
130
|
-
return this.githubClient.getFileContent(
|
|
129
|
+
async getFileContent(path) {
|
|
130
|
+
return this.githubClient.getFileContent(path);
|
|
131
131
|
}
|
|
132
132
|
}
|
|
133
133
|
exports.DocumentationImporter = DocumentationImporter;
|
|
@@ -20,7 +20,7 @@ class DocumentationOrganizer {
|
|
|
20
20
|
// 2. Build folder structure & identify config.json files
|
|
21
21
|
const { root: builtRoot, configFiles } = StructureBuilder_1.StructureBuilder.build(normalizedFiles, rootPath, defaultFolderConfig);
|
|
22
22
|
// 3. Apply configs
|
|
23
|
-
await ConfigApplier_1.ConfigApplier.apply(builtRoot, configFiles, githubClient,
|
|
23
|
+
await ConfigApplier_1.ConfigApplier.apply(builtRoot, configFiles, githubClient, branch, rootPath);
|
|
24
24
|
// 4. Apply feature filtering
|
|
25
25
|
console.log(`[DocumentationOrganizer] Applying feature-based filtering...`);
|
|
26
26
|
const preFilterFileCount = normalizedFiles.length;
|
|
@@ -35,9 +35,9 @@ class DocumentationOrganizer {
|
|
|
35
35
|
}
|
|
36
36
|
// 5. Enrich commit metadata
|
|
37
37
|
if (githubClient && owner && repo) {
|
|
38
|
-
await MetadataEnricher_1.MetadataEnricher.enrichCommits(filteredFiles, githubClient,
|
|
38
|
+
await MetadataEnricher_1.MetadataEnricher.enrichCommits(filteredFiles, githubClient, rootPath);
|
|
39
39
|
// 6. Load content
|
|
40
|
-
await ContentLoader_1.ContentLoader.populate(filteredFiles, githubClient,
|
|
40
|
+
await ContentLoader_1.ContentLoader.populate(filteredFiles, githubClient, branch, rootPath);
|
|
41
41
|
}
|
|
42
42
|
// Build extension map
|
|
43
43
|
const filesByExtension = new Map();
|
|
@@ -6,6 +6,7 @@ const core_1 = require("../core");
|
|
|
6
6
|
const config_1 = require("../config");
|
|
7
7
|
const Converter_1 = require("../core/Converter");
|
|
8
8
|
const types_1 = require("../../../types");
|
|
9
|
+
const LinkResolver_1 = require("../services/LinkResolver");
|
|
9
10
|
const GITHUB_TOKEN = process.env.OI_DOCUMENTATION_GITHUB_ACCESS_TOKEN;
|
|
10
11
|
async function importDocs() {
|
|
11
12
|
try {
|
|
@@ -23,7 +24,9 @@ async function importDocs() {
|
|
|
23
24
|
.equalTo("objectId", userId)
|
|
24
25
|
.include("tenant")
|
|
25
26
|
.first({ useMasterKey: true })
|
|
26
|
-
:
|
|
27
|
+
: await new Parse.Query(Parse.User)
|
|
28
|
+
.ascending("createdAt")
|
|
29
|
+
.first({ useMasterKey: true });
|
|
27
30
|
const tenantId = process.env.OI_DOCUMENTATION_TENANT_ID;
|
|
28
31
|
const tenant = tenantId
|
|
29
32
|
? new types_1.Tenant({ objectId: tenantId })
|
|
@@ -31,6 +34,9 @@ async function importDocs() {
|
|
|
31
34
|
const result = await importer.importFromRepository(config, user, tenant);
|
|
32
35
|
if (result.metadata.skipped)
|
|
33
36
|
return;
|
|
37
|
+
// resolve links in all files and fetch their content into
|
|
38
|
+
const linkResolver = new LinkResolver_1.LinkResolver(GITHUB_TOKEN, result.structure?.allFiles);
|
|
39
|
+
await linkResolver.resolveLinks(user, tenant);
|
|
34
40
|
if (result.structure)
|
|
35
41
|
await new Converter_1.DocumentationConverter(result).convert(user, tenant);
|
|
36
42
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { DocumentationFolder, DocumentationFile } from "../types";
|
|
2
2
|
import { GitHubClient } from "./GitHubClient";
|
|
3
3
|
export declare class ConfigApplier {
|
|
4
|
-
static apply(root: DocumentationFolder, configFiles: DocumentationFile[], githubClient?: GitHubClient,
|
|
4
|
+
static apply(root: DocumentationFolder, configFiles: DocumentationFile[], githubClient?: GitHubClient, branch?: string, rootPath?: string): Promise<void>;
|
|
5
5
|
/** Compare raw vs sanitized folder config and log warnings for dropped / invalid entries */
|
|
6
6
|
private static diffAndWarnFolderConfig;
|
|
7
7
|
private static diffAndWarnFileConfig;
|
|
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.ConfigApplier = void 0;
|
|
4
4
|
const config_1 = require("../config");
|
|
5
5
|
class ConfigApplier {
|
|
6
|
-
static async apply(root, configFiles, githubClient,
|
|
6
|
+
static async apply(root, configFiles, githubClient, branch, rootPath) {
|
|
7
7
|
if (!configFiles.length)
|
|
8
8
|
return;
|
|
9
9
|
for (const cfgFile of configFiles) {
|
|
@@ -16,8 +16,8 @@ class ConfigApplier {
|
|
|
16
16
|
if (!folder)
|
|
17
17
|
continue;
|
|
18
18
|
try {
|
|
19
|
-
if (githubClient
|
|
20
|
-
const customRaw = await this.fetchConfigFromGitHub(cfgFile, githubClient,
|
|
19
|
+
if (githubClient) {
|
|
20
|
+
const customRaw = await this.fetchConfigFromGitHub(cfgFile, githubClient, branch, rootPath);
|
|
21
21
|
const custom = (0, config_1.sanitizeFolderConfig)(customRaw);
|
|
22
22
|
this.diffAndWarnFolderConfig(customRaw, custom, cfgFile.path);
|
|
23
23
|
folder.config = { ...folder.config, ...custom };
|
|
@@ -74,12 +74,12 @@ class ConfigApplier {
|
|
|
74
74
|
console.warn(`[ConfigApplier] Invalid type for 'locations' (expected string[]) in file '${fileBase}' (${sourcePath})`);
|
|
75
75
|
}
|
|
76
76
|
}
|
|
77
|
-
static async fetchConfigFromGitHub(cfgFile, githubClient,
|
|
77
|
+
static async fetchConfigFromGitHub(cfgFile, githubClient, branch, rootPath) {
|
|
78
78
|
const normalizedRoot = rootPath?.replace(/^\/+/g, "").replace(/\/+$/g, "");
|
|
79
79
|
const fullPath = normalizedRoot
|
|
80
80
|
? `${normalizedRoot}/${cfgFile.path}`
|
|
81
81
|
: cfgFile.path;
|
|
82
|
-
const fileContent = await githubClient.getFileContent(
|
|
82
|
+
const fileContent = await githubClient.getFileContent(fullPath);
|
|
83
83
|
const json = Buffer.from(fileContent.content, "base64").toString("utf-8");
|
|
84
84
|
return JSON.parse(json);
|
|
85
85
|
}
|
|
@@ -2,5 +2,5 @@ import { DocumentationFile } from "../types";
|
|
|
2
2
|
import { GitHubClient } from "./GitHubClient";
|
|
3
3
|
export declare class ContentLoader {
|
|
4
4
|
private static readonly TEXT_EXTENSIONS;
|
|
5
|
-
static populate(files: DocumentationFile[], githubClient: GitHubClient,
|
|
5
|
+
static populate(files: DocumentationFile[], githubClient: GitHubClient, branch?: string, rootPath?: string): Promise<void>;
|
|
6
6
|
}
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.ContentLoader = void 0;
|
|
4
4
|
class ContentLoader {
|
|
5
|
-
static async populate(files, githubClient,
|
|
5
|
+
static async populate(files, githubClient, branch, rootPath) {
|
|
6
6
|
const candidates = files.filter((f) => this.TEXT_EXTENSIONS.has(f.extension));
|
|
7
7
|
if (!candidates.length)
|
|
8
8
|
return;
|
|
@@ -13,7 +13,7 @@ class ContentLoader {
|
|
|
13
13
|
const fullPath = normalizedRoot
|
|
14
14
|
? `${normalizedRoot}/${file.path}`
|
|
15
15
|
: file.path;
|
|
16
|
-
const ghContent = await githubClient.getFileContent(
|
|
16
|
+
const ghContent = await githubClient.getFileContent(fullPath);
|
|
17
17
|
if (ghContent?.content) {
|
|
18
18
|
file.content = Buffer.from(ghContent.content, "base64").toString("utf-8");
|
|
19
19
|
fetched++;
|
|
@@ -1,11 +1,16 @@
|
|
|
1
|
-
import { GitHubTreeResponse, GitHubBranch, GitHubRepository } from "../types";
|
|
1
|
+
import { GitHubTreeResponse, GitHubBranch, GitHubRepository, ImportOptions } from "../types";
|
|
2
2
|
/**
|
|
3
3
|
* GitHub API Client for fetching repository data
|
|
4
4
|
*/
|
|
5
5
|
export declare class GitHubClient {
|
|
6
6
|
private readonly baseUrl;
|
|
7
7
|
private readonly headers;
|
|
8
|
-
|
|
8
|
+
private readonly importConfig;
|
|
9
|
+
private branch;
|
|
10
|
+
private repository;
|
|
11
|
+
private tree;
|
|
12
|
+
private sha;
|
|
13
|
+
constructor(token: string, importConfig?: Partial<ImportOptions>);
|
|
9
14
|
/**
|
|
10
15
|
* Validate the GitHub token
|
|
11
16
|
*/
|
|
@@ -15,29 +20,36 @@ export declare class GitHubClient {
|
|
|
15
20
|
/**
|
|
16
21
|
* Get repository information
|
|
17
22
|
*/
|
|
18
|
-
getRepository(
|
|
23
|
+
getRepository(): Promise<GitHubRepository>;
|
|
19
24
|
/**
|
|
20
25
|
* Get all branches for a repository
|
|
21
26
|
*/
|
|
22
|
-
getBranches(
|
|
27
|
+
getBranches(): Promise<GitHubBranch[]>;
|
|
23
28
|
/**
|
|
24
29
|
* Get a specific branch
|
|
25
30
|
*/
|
|
26
|
-
getBranch(
|
|
31
|
+
getBranch(): Promise<GitHubBranch>;
|
|
27
32
|
/**
|
|
28
33
|
* Get repository tree (file structure)
|
|
29
34
|
*/
|
|
30
|
-
getTree(
|
|
35
|
+
getTree(sha: string, recursive?: boolean): Promise<GitHubTreeResponse>;
|
|
31
36
|
/**
|
|
32
37
|
* Get file content
|
|
33
38
|
*/
|
|
34
|
-
getFileContent(
|
|
39
|
+
getFileContent(path: string): Promise<any>;
|
|
35
40
|
/**
|
|
36
41
|
* Get the last commit information for a specific file
|
|
37
42
|
*/
|
|
38
|
-
getFileLastCommit(
|
|
43
|
+
getFileLastCommit(path: string, ref?: string): Promise<any>;
|
|
39
44
|
/**
|
|
40
45
|
* Get commit information for multiple files in batch
|
|
41
46
|
*/
|
|
42
|
-
getMultipleFileCommits(
|
|
47
|
+
getMultipleFileCommits(paths: string[]): Promise<Map<string, any>>;
|
|
48
|
+
/**
|
|
49
|
+
* Fetches the content of each linked file from GitHub.
|
|
50
|
+
* @param ref Branch or commit SHA
|
|
51
|
+
* @param paths Array of relative file paths (from repo root)
|
|
52
|
+
* @returns Map of path to file content (decoded as string if possible)
|
|
53
|
+
*/
|
|
54
|
+
private fetchLinkedFiles;
|
|
43
55
|
}
|
|
@@ -1,17 +1,23 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.GitHubClient = void 0;
|
|
4
|
+
const config_1 = require("../config");
|
|
4
5
|
/**
|
|
5
6
|
* GitHub API Client for fetching repository data
|
|
6
7
|
*/
|
|
7
8
|
class GitHubClient {
|
|
8
|
-
constructor(token) {
|
|
9
|
+
constructor(token, importConfig = config_1.DEFAULT_IMPORT_CONFIG) {
|
|
9
10
|
this.baseUrl = "https://api.github.com";
|
|
11
|
+
this.branch = null;
|
|
12
|
+
this.repository = null;
|
|
13
|
+
this.tree = null;
|
|
14
|
+
this.sha = null;
|
|
10
15
|
this.headers = {
|
|
11
16
|
Authorization: `Bearer ${token}`,
|
|
12
17
|
Accept: "application/vnd.github.v3+json",
|
|
13
18
|
"User-Agent": "OpenInc-Documentation-Importer",
|
|
14
19
|
};
|
|
20
|
+
this.importConfig = importConfig;
|
|
15
21
|
}
|
|
16
22
|
/**
|
|
17
23
|
* Validate the GitHub token
|
|
@@ -29,8 +35,9 @@ class GitHubClient {
|
|
|
29
35
|
/**
|
|
30
36
|
* Get repository information
|
|
31
37
|
*/
|
|
32
|
-
async getRepository(
|
|
33
|
-
const
|
|
38
|
+
async getRepository() {
|
|
39
|
+
const { organization, repository } = this.importConfig;
|
|
40
|
+
const response = await fetch(`${this.baseUrl}/repos/${organization}/${repository}`, {
|
|
34
41
|
headers: this.headers,
|
|
35
42
|
});
|
|
36
43
|
if (!response.ok) {
|
|
@@ -42,8 +49,9 @@ class GitHubClient {
|
|
|
42
49
|
/**
|
|
43
50
|
* Get all branches for a repository
|
|
44
51
|
*/
|
|
45
|
-
async getBranches(
|
|
46
|
-
const
|
|
52
|
+
async getBranches() {
|
|
53
|
+
const { organization, repository } = this.importConfig;
|
|
54
|
+
const response = await fetch(`${this.baseUrl}/repos/${organization}/${repository}/branches`, {
|
|
47
55
|
headers: this.headers,
|
|
48
56
|
});
|
|
49
57
|
if (!response.ok) {
|
|
@@ -54,8 +62,9 @@ class GitHubClient {
|
|
|
54
62
|
/**
|
|
55
63
|
* Get a specific branch
|
|
56
64
|
*/
|
|
57
|
-
async getBranch(
|
|
58
|
-
const
|
|
65
|
+
async getBranch() {
|
|
66
|
+
const { organization, repository, branch } = this.importConfig;
|
|
67
|
+
const response = await fetch(`${this.baseUrl}/repos/${organization}/${repository}/branches/${branch}`, {
|
|
59
68
|
headers: this.headers,
|
|
60
69
|
});
|
|
61
70
|
if (!response.ok) {
|
|
@@ -67,8 +76,9 @@ class GitHubClient {
|
|
|
67
76
|
/**
|
|
68
77
|
* Get repository tree (file structure)
|
|
69
78
|
*/
|
|
70
|
-
async getTree(
|
|
71
|
-
const
|
|
79
|
+
async getTree(sha, recursive = true) {
|
|
80
|
+
const { organization, repository } = this.importConfig;
|
|
81
|
+
const url = `${this.baseUrl}/repos/${organization}/${repository}/git/trees/${sha}${recursive ? "?recursive=1" : ""}`;
|
|
72
82
|
const response = await fetch(url, {
|
|
73
83
|
headers: this.headers,
|
|
74
84
|
});
|
|
@@ -80,8 +90,9 @@ class GitHubClient {
|
|
|
80
90
|
/**
|
|
81
91
|
* Get file content
|
|
82
92
|
*/
|
|
83
|
-
async getFileContent(
|
|
84
|
-
const
|
|
93
|
+
async getFileContent(path) {
|
|
94
|
+
const { organization, repository, branch } = this.importConfig;
|
|
95
|
+
const url = `${this.baseUrl}/repos/${organization}/${repository}/contents/${path}${`?ref=${branch}`}`;
|
|
85
96
|
const response = await fetch(url, {
|
|
86
97
|
headers: this.headers,
|
|
87
98
|
});
|
|
@@ -93,8 +104,9 @@ class GitHubClient {
|
|
|
93
104
|
/**
|
|
94
105
|
* Get the last commit information for a specific file
|
|
95
106
|
*/
|
|
96
|
-
async getFileLastCommit(
|
|
97
|
-
const
|
|
107
|
+
async getFileLastCommit(path, ref) {
|
|
108
|
+
const { organization, repository } = this.importConfig;
|
|
109
|
+
const url = `${this.baseUrl}/repos/${organization}/${repository}/commits?path=${encodeURIComponent(path)}&per_page=1${ref ? `&sha=${ref}` : ""}`;
|
|
98
110
|
const response = await fetch(url, {
|
|
99
111
|
headers: this.headers,
|
|
100
112
|
});
|
|
@@ -107,7 +119,7 @@ class GitHubClient {
|
|
|
107
119
|
/**
|
|
108
120
|
* Get commit information for multiple files in batch
|
|
109
121
|
*/
|
|
110
|
-
async getMultipleFileCommits(
|
|
122
|
+
async getMultipleFileCommits(paths) {
|
|
111
123
|
const commitMap = new Map();
|
|
112
124
|
// Process files in batches to avoid rate limiting
|
|
113
125
|
const batchSize = 5;
|
|
@@ -115,7 +127,7 @@ class GitHubClient {
|
|
|
115
127
|
const batch = paths.slice(i, i + batchSize);
|
|
116
128
|
const promises = batch.map(async (path) => {
|
|
117
129
|
try {
|
|
118
|
-
const commit = await this.getFileLastCommit(
|
|
130
|
+
const commit = await this.getFileLastCommit(path);
|
|
119
131
|
return { path, commit };
|
|
120
132
|
}
|
|
121
133
|
catch (error) {
|
|
@@ -136,5 +148,41 @@ class GitHubClient {
|
|
|
136
148
|
}
|
|
137
149
|
return commitMap;
|
|
138
150
|
}
|
|
151
|
+
/**
|
|
152
|
+
* Fetches the content of each linked file from GitHub.
|
|
153
|
+
* @param ref Branch or commit SHA
|
|
154
|
+
* @param paths Array of relative file paths (from repo root)
|
|
155
|
+
* @returns Map of path to file content (decoded as string if possible)
|
|
156
|
+
*/
|
|
157
|
+
async fetchLinkedFiles(ref, paths) {
|
|
158
|
+
const result = new Map();
|
|
159
|
+
for (const path of paths) {
|
|
160
|
+
try {
|
|
161
|
+
const file = await this.getFileContent(path);
|
|
162
|
+
// GitHub returns content as base64 for binary/text files
|
|
163
|
+
if (file && file.content && file.encoding === "base64") {
|
|
164
|
+
const buffer = Buffer.from(file.content, "base64");
|
|
165
|
+
// Try to decode as UTF-8 string, fallback to Buffer for binary
|
|
166
|
+
const asString = buffer.toString("utf8");
|
|
167
|
+
// Heuristic: treat as string if no replacement chars
|
|
168
|
+
if (!asString.includes("\uFFFD")) {
|
|
169
|
+
result.set(path, asString);
|
|
170
|
+
}
|
|
171
|
+
else {
|
|
172
|
+
result.set(path, buffer);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
else {
|
|
176
|
+
// For raw text files, just use the content
|
|
177
|
+
result.set(path, file.content ?? "");
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
catch (err) {
|
|
181
|
+
console.warn(`⚠️ Could not fetch ${path}:`, err);
|
|
182
|
+
result.set(path, "");
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
return result;
|
|
186
|
+
}
|
|
139
187
|
}
|
|
140
188
|
exports.GitHubClient = GitHubClient;
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { _User, Tenant } from "../../../types";
|
|
2
|
+
import { DocumentationFile } from "../types";
|
|
3
|
+
/**
|
|
4
|
+
* Service to resolve and upload linked local files/images in documentation content
|
|
5
|
+
*/
|
|
6
|
+
export declare class LinkResolver {
|
|
7
|
+
private gitHubClient;
|
|
8
|
+
private files;
|
|
9
|
+
constructor(gitHubToken: string, files?: DocumentationFile[]);
|
|
10
|
+
resolveLinks(user?: _User, tenant?: Tenant): Promise<void>;
|
|
11
|
+
private scanLinksWithPositions;
|
|
12
|
+
private replaceLinksInContent;
|
|
13
|
+
}
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.LinkResolver = void 0;
|
|
7
|
+
const types_1 = require("../../../types");
|
|
8
|
+
const GitHubClient_1 = require("./GitHubClient");
|
|
9
|
+
const path_1 = __importDefault(require("path"));
|
|
10
|
+
/**
|
|
11
|
+
* Service to resolve and upload linked local files/images in documentation content
|
|
12
|
+
*/
|
|
13
|
+
class LinkResolver {
|
|
14
|
+
constructor(gitHubToken, files = []) {
|
|
15
|
+
this.files = [];
|
|
16
|
+
this.gitHubClient = new GitHubClient_1.GitHubClient(gitHubToken);
|
|
17
|
+
this.files = files;
|
|
18
|
+
}
|
|
19
|
+
// Returns a map of unique local file/image links to their GitHub content
|
|
20
|
+
async resolveLinks(user, tenant) {
|
|
21
|
+
console.log("[LinkResolver] Resolving links in documentation files...");
|
|
22
|
+
const linkMap = this.scanLinksWithPositions();
|
|
23
|
+
// first we build an array with unique absoluteLinks.
|
|
24
|
+
const uniqueAbsoluteLinks = Array.from(new Set(Array.from(linkMap.values()).flatMap((occs) => occs.map((o) => o.absoluteLink))));
|
|
25
|
+
console.log(`[LinkResolver] Found ${uniqueAbsoluteLinks.length} unique local links to resolve.`);
|
|
26
|
+
// we can now fetch all files for absoluteLinks
|
|
27
|
+
const fetchedFiles = new Map();
|
|
28
|
+
await Promise.all(uniqueAbsoluteLinks.map(async (filePath) => {
|
|
29
|
+
try {
|
|
30
|
+
const file = await this.gitHubClient.getFileContent(filePath);
|
|
31
|
+
if (file && file.content && file.encoding === "base64") {
|
|
32
|
+
const buffer = Buffer.from(file.content, "base64");
|
|
33
|
+
// Try to decode as UTF-8 string, fallback to Buffer for binary
|
|
34
|
+
const asString = buffer.toString("utf8");
|
|
35
|
+
if (!asString.includes("\uFFFD")) {
|
|
36
|
+
fetchedFiles.set(filePath, asString);
|
|
37
|
+
}
|
|
38
|
+
else {
|
|
39
|
+
fetchedFiles.set(filePath, buffer);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
else {
|
|
43
|
+
fetchedFiles.set(filePath, file.content ?? "");
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
catch (err) {
|
|
47
|
+
console.warn(`[LinkResolver] ⚠️ Could not fetch ${filePath}:`, err);
|
|
48
|
+
fetchedFiles.set(filePath, "");
|
|
49
|
+
}
|
|
50
|
+
}));
|
|
51
|
+
// old absoluteLink => new file link
|
|
52
|
+
const newLinkMap = new Map();
|
|
53
|
+
// now we can build a map of absoluteLink => new Asset URL
|
|
54
|
+
for (const [absoluteLink, content] of fetchedFiles) {
|
|
55
|
+
const fileName = absoluteLink.split("/").pop();
|
|
56
|
+
// map the data to a Parse File and save as Asset
|
|
57
|
+
const data = typeof content === "string"
|
|
58
|
+
? { base64: Buffer.from(content, "utf8").toString("base64") }
|
|
59
|
+
: { base64: content.toString("base64") };
|
|
60
|
+
const parseFile = await new Parse.File(fileName || "file", data).save({
|
|
61
|
+
useMasterKey: true,
|
|
62
|
+
});
|
|
63
|
+
const asset = new types_1.Assets({
|
|
64
|
+
file: parseFile,
|
|
65
|
+
context: "documentation",
|
|
66
|
+
tenant,
|
|
67
|
+
user,
|
|
68
|
+
description: fileName,
|
|
69
|
+
meta: {
|
|
70
|
+
importedByDocumentation: true,
|
|
71
|
+
},
|
|
72
|
+
});
|
|
73
|
+
const savedAsset = await asset.save(null, { useMasterKey: true });
|
|
74
|
+
const newURL = savedAsset.file?.url() || "";
|
|
75
|
+
newLinkMap.set(absoluteLink, newURL);
|
|
76
|
+
}
|
|
77
|
+
console.log(`[LinkResolver] Resolved and uploaded ${newLinkMap.size} linked assets.`);
|
|
78
|
+
// Finally, replace links in each file's content
|
|
79
|
+
for (const file of this.files) {
|
|
80
|
+
const occurrences = linkMap.get(file.path);
|
|
81
|
+
if (occurrences && occurrences.length) {
|
|
82
|
+
file.content = this.replaceLinksInContent(occurrences, file.content ?? "", newLinkMap);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
console.log("[LinkResolver] Link resolution completed.");
|
|
86
|
+
}
|
|
87
|
+
// Scan all files and return a map: filePath -> array of link occurrences
|
|
88
|
+
scanLinksWithPositions() {
|
|
89
|
+
const regex = /(!?\[[^\]]*\]\(([^)]+)\))/g;
|
|
90
|
+
const result = new Map();
|
|
91
|
+
for (const file of this.files) {
|
|
92
|
+
const occurrences = [];
|
|
93
|
+
const content = file.content || "";
|
|
94
|
+
let match;
|
|
95
|
+
while ((match = regex.exec(content)) !== null) {
|
|
96
|
+
const markdown = match[1];
|
|
97
|
+
const link = match[2].trim();
|
|
98
|
+
// Skip URLs
|
|
99
|
+
if (/^https?:\/\//i.test(link) || /^mailto:/i.test(link))
|
|
100
|
+
continue;
|
|
101
|
+
const baseDir = path_1.default.posix.dirname(file.path);
|
|
102
|
+
const absoluteLink = path_1.default.posix.normalize(path_1.default.posix.join("./docs", baseDir, link));
|
|
103
|
+
occurrences.push({
|
|
104
|
+
filePath: file.path,
|
|
105
|
+
link,
|
|
106
|
+
absoluteLink,
|
|
107
|
+
start: match.index,
|
|
108
|
+
end: match.index + markdown.length,
|
|
109
|
+
markdown,
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
if (occurrences.length)
|
|
113
|
+
result.set(file.path, occurrences);
|
|
114
|
+
}
|
|
115
|
+
return result;
|
|
116
|
+
}
|
|
117
|
+
// Replace links in content with new links using positions
|
|
118
|
+
replaceLinksInContent(occurrences, content, assetLinkMap) {
|
|
119
|
+
let offset = 0; // track content length changes
|
|
120
|
+
let newContent = content;
|
|
121
|
+
for (const occ of occurrences) {
|
|
122
|
+
const newLink = assetLinkMap.get(occ.absoluteLink);
|
|
123
|
+
if (!newLink)
|
|
124
|
+
continue;
|
|
125
|
+
// Replace only the link part inside markdown
|
|
126
|
+
const before = newContent.slice(0, occ.start + offset);
|
|
127
|
+
const matchText = newContent.slice(occ.start + offset, occ.end + offset);
|
|
128
|
+
const after = newContent.slice(occ.end + offset);
|
|
129
|
+
// Replace link inside [text](link)
|
|
130
|
+
const replaced = matchText.replace(occ.link, newLink);
|
|
131
|
+
newContent = before + replaced + after;
|
|
132
|
+
offset += replaced.length - matchText.length;
|
|
133
|
+
}
|
|
134
|
+
return newContent;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
exports.LinkResolver = LinkResolver;
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { DocumentationFile } from "../types";
|
|
2
2
|
import { GitHubClient } from "./GitHubClient";
|
|
3
3
|
export declare class MetadataEnricher {
|
|
4
|
-
static enrichCommits(files: DocumentationFile[], githubClient: GitHubClient,
|
|
4
|
+
static enrichCommits(files: DocumentationFile[], githubClient: GitHubClient, rootPath?: string): Promise<void>;
|
|
5
5
|
}
|
|
@@ -2,11 +2,11 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.MetadataEnricher = void 0;
|
|
4
4
|
class MetadataEnricher {
|
|
5
|
-
static async enrichCommits(files, githubClient,
|
|
5
|
+
static async enrichCommits(files, githubClient, rootPath) {
|
|
6
6
|
const normalizedRoot = rootPath?.replace(/^\/+/g, "").replace(/\/+$/g, "");
|
|
7
7
|
const fullPaths = files.map((f) => normalizedRoot ? `${normalizedRoot}/${f.path}` : f.path);
|
|
8
8
|
try {
|
|
9
|
-
const commitMap = await githubClient.getMultipleFileCommits(
|
|
9
|
+
const commitMap = await githubClient.getMultipleFileCommits(fullPaths);
|
|
10
10
|
let enriched = 0;
|
|
11
11
|
files.forEach((file, idx) => {
|
|
12
12
|
const commit = commitMap.get(fullPaths[idx]);
|