@larkiny/astro-github-loader 0.11.3 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +28 -55
- package/dist/github.assets.d.ts +70 -0
- package/dist/github.assets.js +253 -0
- package/dist/github.auth.js +13 -9
- package/dist/github.cleanup.d.ts +3 -2
- package/dist/github.cleanup.js +30 -23
- package/dist/github.constants.d.ts +0 -16
- package/dist/github.constants.js +0 -16
- package/dist/github.content.d.ts +5 -131
- package/dist/github.content.js +152 -794
- package/dist/github.dryrun.d.ts +9 -5
- package/dist/github.dryrun.js +46 -25
- package/dist/github.link-transform.d.ts +2 -2
- package/dist/github.link-transform.js +65 -57
- package/dist/github.loader.js +30 -46
- package/dist/github.logger.d.ts +2 -2
- package/dist/github.logger.js +33 -24
- package/dist/github.paths.d.ts +76 -0
- package/dist/github.paths.js +190 -0
- package/dist/github.storage.d.ts +15 -0
- package/dist/github.storage.js +109 -0
- package/dist/github.types.d.ts +34 -4
- package/dist/index.d.ts +8 -6
- package/dist/index.js +3 -6
- package/dist/test-helpers.d.ts +130 -0
- package/dist/test-helpers.js +194 -0
- package/package.json +3 -1
- package/src/github.assets.spec.ts +717 -0
- package/src/github.assets.ts +365 -0
- package/src/github.auth.spec.ts +245 -0
- package/src/github.auth.ts +24 -10
- package/src/github.cleanup.spec.ts +380 -0
- package/src/github.cleanup.ts +91 -47
- package/src/github.constants.ts +0 -17
- package/src/github.content.spec.ts +305 -454
- package/src/github.content.ts +259 -957
- package/src/github.dryrun.spec.ts +586 -0
- package/src/github.dryrun.ts +105 -54
- package/src/github.link-transform.spec.ts +1345 -0
- package/src/github.link-transform.ts +174 -95
- package/src/github.loader.spec.ts +75 -50
- package/src/github.loader.ts +101 -76
- package/src/github.logger.spec.ts +795 -0
- package/src/github.logger.ts +77 -35
- package/src/github.paths.spec.ts +523 -0
- package/src/github.paths.ts +259 -0
- package/src/github.storage.spec.ts +367 -0
- package/src/github.storage.ts +127 -0
- package/src/github.types.ts +48 -9
- package/src/index.ts +43 -6
- package/src/test-helpers.ts +215 -0
package/dist/github.dryrun.d.ts
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import { Octokit } from "octokit";
|
|
2
|
+
import type { Logger } from "./github.logger.js";
|
|
1
3
|
import type { ImportOptions, LoaderContext } from "./github.types.js";
|
|
2
4
|
/**
|
|
3
5
|
* Represents the state of a single import configuration
|
|
@@ -49,11 +51,11 @@ export declare function createConfigId(config: ImportOptions): string;
|
|
|
49
51
|
/**
|
|
50
52
|
* Loads the import state from the state file
|
|
51
53
|
*/
|
|
52
|
-
export declare function loadImportState(workingDir: string): Promise<StateFile>;
|
|
54
|
+
export declare function loadImportState(workingDir: string, logger?: Logger): Promise<StateFile>;
|
|
53
55
|
/**
|
|
54
56
|
* Gets the latest commit information for a repository path
|
|
55
57
|
*/
|
|
56
|
-
export declare function getLatestCommitInfo(octokit:
|
|
58
|
+
export declare function getLatestCommitInfo(octokit: Octokit, config: ImportOptions, signal?: AbortSignal): Promise<{
|
|
57
59
|
sha: string;
|
|
58
60
|
message: string;
|
|
59
61
|
date: string;
|
|
@@ -61,12 +63,14 @@ export declare function getLatestCommitInfo(octokit: any, config: ImportOptions,
|
|
|
61
63
|
/**
|
|
62
64
|
* Updates the import state after a successful import
|
|
63
65
|
*/
|
|
64
|
-
export declare function updateImportState(workingDir: string, config: ImportOptions, commitSha?: string): Promise<void>;
|
|
66
|
+
export declare function updateImportState(workingDir: string, config: ImportOptions, commitSha?: string, logger?: Logger): Promise<void>;
|
|
65
67
|
/**
|
|
66
68
|
* Performs a dry run check on all configured repositories
|
|
67
69
|
*/
|
|
68
|
-
export declare function performDryRun(configs: ImportOptions[], context: LoaderContext, octokit:
|
|
70
|
+
export declare function performDryRun(configs: ImportOptions[], context: LoaderContext, octokit: Octokit, workingDir?: string, signal?: AbortSignal): Promise<RepositoryChangeInfo[]>;
|
|
69
71
|
/**
|
|
70
72
|
* Formats and displays the dry run results
|
|
71
73
|
*/
|
|
72
|
-
export declare function displayDryRunResults(results: RepositoryChangeInfo[], logger:
|
|
74
|
+
export declare function displayDryRunResults(results: RepositoryChangeInfo[], logger: {
|
|
75
|
+
info: (msg: string) => void;
|
|
76
|
+
}): void;
|
package/dist/github.dryrun.js
CHANGED
|
@@ -1,46 +1,61 @@
|
|
|
1
1
|
import { promises as fs } from "node:fs";
|
|
2
2
|
import { existsSync } from "node:fs";
|
|
3
3
|
import { join } from "node:path";
|
|
4
|
-
const STATE_FILENAME =
|
|
4
|
+
const STATE_FILENAME = ".github-import-state.json";
|
|
5
5
|
/**
|
|
6
6
|
* Creates a unique identifier for an import configuration
|
|
7
7
|
*/
|
|
8
8
|
export function createConfigId(config) {
|
|
9
|
-
return `${config.owner}/${config.repo}@${config.ref ||
|
|
9
|
+
return `${config.owner}/${config.repo}@${config.ref || "main"}`;
|
|
10
10
|
}
|
|
11
11
|
/**
|
|
12
12
|
* Loads the import state from the state file
|
|
13
13
|
*/
|
|
14
|
-
export async function loadImportState(workingDir) {
|
|
14
|
+
export async function loadImportState(workingDir, logger) {
|
|
15
15
|
const statePath = join(workingDir, STATE_FILENAME);
|
|
16
16
|
if (!existsSync(statePath)) {
|
|
17
17
|
return {
|
|
18
18
|
imports: {},
|
|
19
|
-
lastChecked: new Date().toISOString()
|
|
19
|
+
lastChecked: new Date().toISOString(),
|
|
20
20
|
};
|
|
21
21
|
}
|
|
22
22
|
try {
|
|
23
|
-
const content = await fs.readFile(statePath,
|
|
24
|
-
|
|
23
|
+
const content = await fs.readFile(statePath, "utf-8");
|
|
24
|
+
const parsed = JSON.parse(content);
|
|
25
|
+
// Validate the parsed state has the expected shape
|
|
26
|
+
if (!parsed ||
|
|
27
|
+
typeof parsed !== "object" ||
|
|
28
|
+
!("imports" in parsed) ||
|
|
29
|
+
typeof parsed.imports !== "object") {
|
|
30
|
+
const msg = `Malformed state file at ${statePath}, starting fresh`;
|
|
31
|
+
// eslint-disable-next-line no-console -- fallback when no logger provided
|
|
32
|
+
logger ? logger.warn(msg) : console.warn(msg);
|
|
33
|
+
return { imports: {}, lastChecked: new Date().toISOString() };
|
|
34
|
+
}
|
|
35
|
+
return parsed;
|
|
25
36
|
}
|
|
26
37
|
catch (error) {
|
|
27
|
-
|
|
38
|
+
const msg = `Failed to load import state from ${statePath}, starting fresh: ${error}`;
|
|
39
|
+
// eslint-disable-next-line no-console -- fallback when no logger provided
|
|
40
|
+
logger ? logger.warn(msg) : console.warn(msg);
|
|
28
41
|
return {
|
|
29
42
|
imports: {},
|
|
30
|
-
lastChecked: new Date().toISOString()
|
|
43
|
+
lastChecked: new Date().toISOString(),
|
|
31
44
|
};
|
|
32
45
|
}
|
|
33
46
|
}
|
|
34
47
|
/**
|
|
35
48
|
* Saves the import state to the state file
|
|
36
49
|
*/
|
|
37
|
-
async function saveImportState(workingDir, state) {
|
|
50
|
+
async function saveImportState(workingDir, state, logger) {
|
|
38
51
|
const statePath = join(workingDir, STATE_FILENAME);
|
|
39
52
|
try {
|
|
40
|
-
await fs.writeFile(statePath, JSON.stringify(state, null, 2),
|
|
53
|
+
await fs.writeFile(statePath, JSON.stringify(state, null, 2), "utf-8");
|
|
41
54
|
}
|
|
42
55
|
catch (error) {
|
|
43
|
-
|
|
56
|
+
const msg = `Failed to save import state to ${statePath}: ${error}`;
|
|
57
|
+
// eslint-disable-next-line no-console -- fallback when no logger provided
|
|
58
|
+
logger ? logger.warn(msg) : console.warn(msg);
|
|
44
59
|
}
|
|
45
60
|
}
|
|
46
61
|
/**
|
|
@@ -55,7 +70,7 @@ export async function getLatestCommitInfo(octokit, config, signal) {
|
|
|
55
70
|
repo,
|
|
56
71
|
sha: ref,
|
|
57
72
|
per_page: 1,
|
|
58
|
-
request: { signal }
|
|
73
|
+
request: { signal },
|
|
59
74
|
});
|
|
60
75
|
if (data.length === 0) {
|
|
61
76
|
return null;
|
|
@@ -63,12 +78,17 @@ export async function getLatestCommitInfo(octokit, config, signal) {
|
|
|
63
78
|
const latestCommit = data[0];
|
|
64
79
|
return {
|
|
65
80
|
sha: latestCommit.sha,
|
|
66
|
-
message: latestCommit.commit.message.split(
|
|
67
|
-
date: latestCommit.commit.committer?.date ||
|
|
81
|
+
message: latestCommit.commit.message.split("\n")[0], // First line only
|
|
82
|
+
date: latestCommit.commit.committer?.date ||
|
|
83
|
+
latestCommit.commit.author?.date ||
|
|
84
|
+
new Date().toISOString(),
|
|
68
85
|
};
|
|
69
86
|
}
|
|
70
87
|
catch (error) {
|
|
71
|
-
if (error
|
|
88
|
+
if (typeof error === "object" &&
|
|
89
|
+
error !== null &&
|
|
90
|
+
"status" in error &&
|
|
91
|
+
error.status === 404) {
|
|
72
92
|
throw new Error(`Repository not found: ${owner}/${repo}`);
|
|
73
93
|
}
|
|
74
94
|
throw error;
|
|
@@ -86,17 +106,18 @@ async function checkRepositoryForChanges(octokit, config, currentState, signal)
|
|
|
86
106
|
config,
|
|
87
107
|
state: currentState,
|
|
88
108
|
needsReimport: false,
|
|
89
|
-
error: "No commits found in repository"
|
|
109
|
+
error: "No commits found in repository",
|
|
90
110
|
};
|
|
91
111
|
}
|
|
92
|
-
const needsReimport = !currentState.lastCommitSha ||
|
|
112
|
+
const needsReimport = !currentState.lastCommitSha ||
|
|
113
|
+
currentState.lastCommitSha !== latestCommit.sha;
|
|
93
114
|
return {
|
|
94
115
|
config,
|
|
95
116
|
state: currentState,
|
|
96
117
|
needsReimport,
|
|
97
118
|
latestCommitSha: latestCommit.sha,
|
|
98
119
|
latestCommitMessage: latestCommit.message,
|
|
99
|
-
latestCommitDate: latestCommit.date
|
|
120
|
+
latestCommitDate: latestCommit.date,
|
|
100
121
|
};
|
|
101
122
|
}
|
|
102
123
|
catch (error) {
|
|
@@ -104,15 +125,15 @@ async function checkRepositoryForChanges(octokit, config, currentState, signal)
|
|
|
104
125
|
config,
|
|
105
126
|
state: currentState,
|
|
106
127
|
needsReimport: false,
|
|
107
|
-
error: error.message
|
|
128
|
+
error: error instanceof Error ? error.message : String(error),
|
|
108
129
|
};
|
|
109
130
|
}
|
|
110
131
|
}
|
|
111
132
|
/**
|
|
112
133
|
* Updates the import state after a successful import
|
|
113
134
|
*/
|
|
114
|
-
export async function updateImportState(workingDir, config, commitSha) {
|
|
115
|
-
const state = await loadImportState(workingDir);
|
|
135
|
+
export async function updateImportState(workingDir, config, commitSha, logger) {
|
|
136
|
+
const state = await loadImportState(workingDir, logger);
|
|
116
137
|
const configId = createConfigId(config);
|
|
117
138
|
const configName = config.name || `${config.owner}/${config.repo}`;
|
|
118
139
|
state.imports[configId] = {
|
|
@@ -120,9 +141,9 @@ export async function updateImportState(workingDir, config, commitSha) {
|
|
|
120
141
|
repoId: configId,
|
|
121
142
|
lastCommitSha: commitSha,
|
|
122
143
|
lastImported: new Date().toISOString(),
|
|
123
|
-
ref: config.ref ||
|
|
144
|
+
ref: config.ref || "main",
|
|
124
145
|
};
|
|
125
|
-
await saveImportState(workingDir, state);
|
|
146
|
+
await saveImportState(workingDir, state, logger);
|
|
126
147
|
}
|
|
127
148
|
/**
|
|
128
149
|
* Performs a dry run check on all configured repositories
|
|
@@ -145,7 +166,7 @@ export async function performDryRun(configs, context, octokit, workingDir = proc
|
|
|
145
166
|
const currentState = state.imports[configId] || {
|
|
146
167
|
name: configName,
|
|
147
168
|
repoId: configId,
|
|
148
|
-
ref: config.ref ||
|
|
169
|
+
ref: config.ref || "main",
|
|
149
170
|
};
|
|
150
171
|
logger.debug(`Checking ${configName}...`);
|
|
151
172
|
try {
|
|
@@ -159,7 +180,7 @@ export async function performDryRun(configs, context, octokit, workingDir = proc
|
|
|
159
180
|
config,
|
|
160
181
|
state: currentState,
|
|
161
182
|
needsReimport: false,
|
|
162
|
-
error: `Failed to check repository: ${error.message}
|
|
183
|
+
error: `Failed to check repository: ${error instanceof Error ? error.message : String(error)}`,
|
|
163
184
|
});
|
|
164
185
|
}
|
|
165
186
|
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import type { LinkMapping, LinkTransformContext, IncludePattern } from
|
|
2
|
-
import type { Logger } from
|
|
1
|
+
import type { LinkMapping, LinkTransformContext, IncludePattern } from "./github.types.js";
|
|
2
|
+
import type { Logger } from "./github.logger.js";
|
|
3
3
|
/**
|
|
4
4
|
* Represents an imported file with its content and metadata
|
|
5
5
|
*/
|
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
import { slug } from
|
|
2
|
-
import path from
|
|
1
|
+
import { slug } from "github-slugger";
|
|
2
|
+
import path from "node:path";
|
|
3
3
|
/**
|
|
4
4
|
* Extract anchor fragment from a link
|
|
5
5
|
*/
|
|
6
6
|
function extractAnchor(link) {
|
|
7
7
|
const anchorMatch = link.match(/#.*$/);
|
|
8
|
-
const anchor = anchorMatch ? anchorMatch[0] :
|
|
9
|
-
const path = link.replace(/#.*$/,
|
|
8
|
+
const anchor = anchorMatch ? anchorMatch[0] : "";
|
|
9
|
+
const path = link.replace(/#.*$/, "");
|
|
10
10
|
return { path, anchor };
|
|
11
11
|
}
|
|
12
12
|
/**
|
|
@@ -22,9 +22,9 @@ function isExternalLink(link) {
|
|
|
22
22
|
/^ftp:/.test(link) ||
|
|
23
23
|
/^ftps:\/\//.test(link) ||
|
|
24
24
|
// Any protocol with ://
|
|
25
|
-
link.includes(
|
|
25
|
+
link.includes("://") ||
|
|
26
26
|
// Anchor-only links (same page)
|
|
27
|
-
link.startsWith(
|
|
27
|
+
link.startsWith("#") ||
|
|
28
28
|
// Data URLs
|
|
29
29
|
/^data:/.test(link) ||
|
|
30
30
|
// File protocol
|
|
@@ -37,7 +37,9 @@ function normalizePath(linkPath, currentFilePath, logger) {
|
|
|
37
37
|
logger?.debug(`[normalizePath] BEFORE: linkPath="${linkPath}", currentFilePath="${currentFilePath}"`);
|
|
38
38
|
// Handle relative paths (including simple relative paths without ./ prefix)
|
|
39
39
|
// A link is relative if it doesn't start with / or contain a protocol
|
|
40
|
-
const isAbsoluteOrExternal = linkPath.startsWith(
|
|
40
|
+
const isAbsoluteOrExternal = linkPath.startsWith("/") ||
|
|
41
|
+
linkPath.includes("://") ||
|
|
42
|
+
linkPath.startsWith("#");
|
|
41
43
|
if (!isAbsoluteOrExternal) {
|
|
42
44
|
const currentDir = path.dirname(currentFilePath);
|
|
43
45
|
const resolved = path.posix.normalize(path.posix.join(currentDir, linkPath));
|
|
@@ -63,20 +65,20 @@ function applyLinkMappings(linkUrl, linkMappings, context) {
|
|
|
63
65
|
// Handle relative links automatically if enabled
|
|
64
66
|
if (mapping.relativeLinks && context.currentFile.linkContext) {
|
|
65
67
|
// Check if this is a relative link (doesn't start with /, http, etc.)
|
|
66
|
-
if (!linkPath.startsWith(
|
|
68
|
+
if (!linkPath.startsWith("/") && !isExternalLink(linkPath)) {
|
|
67
69
|
// Check if the link points to a known directory structure
|
|
68
|
-
const knownPaths = [
|
|
69
|
-
const isKnownPath = knownPaths.some(p => linkPath.startsWith(p));
|
|
70
|
+
const knownPaths = ["modules/", "classes/", "interfaces/", "enums/"];
|
|
71
|
+
const isKnownPath = knownPaths.some((p) => linkPath.startsWith(p));
|
|
70
72
|
if (isKnownPath) {
|
|
71
73
|
// Strip .md extension from the link path
|
|
72
|
-
const cleanLinkPath = linkPath.replace(/\.md$/,
|
|
74
|
+
const cleanLinkPath = linkPath.replace(/\.md$/, "");
|
|
73
75
|
// Convert relative path to absolute path using the target base
|
|
74
76
|
const targetBase = generateSiteUrl(context.currentFile.linkContext.basePath, context.global.stripPrefixes);
|
|
75
77
|
// Construct final URL with proper Starlight formatting
|
|
76
|
-
let finalUrl = targetBase.replace(/\/$/,
|
|
78
|
+
let finalUrl = targetBase.replace(/\/$/, "") + "/" + cleanLinkPath;
|
|
77
79
|
// Add trailing slash if it doesn't end with one and isn't empty
|
|
78
|
-
if (finalUrl && !finalUrl.endsWith(
|
|
79
|
-
finalUrl +=
|
|
80
|
+
if (finalUrl && !finalUrl.endsWith("/")) {
|
|
81
|
+
finalUrl += "/";
|
|
80
82
|
}
|
|
81
83
|
transformedPath = finalUrl;
|
|
82
84
|
return transformedPath + anchor;
|
|
@@ -84,16 +86,17 @@ function applyLinkMappings(linkUrl, linkMappings, context) {
|
|
|
84
86
|
}
|
|
85
87
|
}
|
|
86
88
|
let matched = false;
|
|
87
|
-
let replacement =
|
|
88
|
-
if (typeof mapping.pattern ===
|
|
89
|
+
let replacement = "";
|
|
90
|
+
if (typeof mapping.pattern === "string") {
|
|
89
91
|
// String pattern - exact match or contains
|
|
90
92
|
if (transformedPath.includes(mapping.pattern)) {
|
|
91
93
|
matched = true;
|
|
92
|
-
if (typeof mapping.replacement ===
|
|
94
|
+
if (typeof mapping.replacement === "string") {
|
|
93
95
|
replacement = transformedPath.replace(mapping.pattern, mapping.replacement);
|
|
94
96
|
}
|
|
95
97
|
else {
|
|
96
|
-
|
|
98
|
+
const linkTransformContext = context.currentFile.linkContext ?? {};
|
|
99
|
+
replacement = mapping.replacement(transformedPath, anchor, linkTransformContext);
|
|
97
100
|
}
|
|
98
101
|
}
|
|
99
102
|
}
|
|
@@ -102,11 +105,12 @@ function applyLinkMappings(linkUrl, linkMappings, context) {
|
|
|
102
105
|
const match = transformedPath.match(mapping.pattern);
|
|
103
106
|
if (match) {
|
|
104
107
|
matched = true;
|
|
105
|
-
if (typeof mapping.replacement ===
|
|
108
|
+
if (typeof mapping.replacement === "string") {
|
|
106
109
|
replacement = transformedPath.replace(mapping.pattern, mapping.replacement);
|
|
107
110
|
}
|
|
108
111
|
else {
|
|
109
|
-
|
|
112
|
+
const linkTransformContext = context.currentFile.linkContext ?? {};
|
|
113
|
+
replacement = mapping.replacement(transformedPath, anchor, linkTransformContext);
|
|
110
114
|
}
|
|
111
115
|
}
|
|
112
116
|
}
|
|
@@ -131,29 +135,31 @@ function generateSiteUrl(targetPath, stripPrefixes) {
|
|
|
131
135
|
}
|
|
132
136
|
}
|
|
133
137
|
// Remove leading slash if present
|
|
134
|
-
url = url.replace(/^\//,
|
|
138
|
+
url = url.replace(/^\//, "");
|
|
135
139
|
// Remove file extension
|
|
136
|
-
url = url.replace(/\.(md|mdx)$/i,
|
|
140
|
+
url = url.replace(/\.(md|mdx)$/i, "");
|
|
137
141
|
// Handle index files - they should resolve to parent directory
|
|
138
|
-
if (url.endsWith(
|
|
139
|
-
url = url.replace(
|
|
142
|
+
if (url.endsWith("/index")) {
|
|
143
|
+
url = url.replace("/index", "");
|
|
140
144
|
}
|
|
141
|
-
else if (url ===
|
|
142
|
-
url =
|
|
145
|
+
else if (url === "index") {
|
|
146
|
+
url = "";
|
|
143
147
|
}
|
|
144
148
|
// Split path into segments and slugify each
|
|
145
|
-
const segments = url
|
|
149
|
+
const segments = url
|
|
150
|
+
.split("/")
|
|
151
|
+
.map((segment) => (segment ? slug(segment) : ""));
|
|
146
152
|
// Reconstruct URL
|
|
147
|
-
url = segments.filter(s => s).join(
|
|
153
|
+
url = segments.filter((s) => s).join("/");
|
|
148
154
|
// Ensure leading slash
|
|
149
|
-
if (url && !url.startsWith(
|
|
150
|
-
url =
|
|
155
|
+
if (url && !url.startsWith("/")) {
|
|
156
|
+
url = "/" + url;
|
|
151
157
|
}
|
|
152
158
|
// Add trailing slash for non-empty paths
|
|
153
|
-
if (url && !url.endsWith(
|
|
154
|
-
url = url +
|
|
159
|
+
if (url && !url.endsWith("/")) {
|
|
160
|
+
url = url + "/";
|
|
155
161
|
}
|
|
156
|
-
return url ||
|
|
162
|
+
return url || "/";
|
|
157
163
|
}
|
|
158
164
|
/**
|
|
159
165
|
* Transform a single markdown link
|
|
@@ -177,7 +183,7 @@ function transformLink(linkText, linkUrl, context) {
|
|
|
177
183
|
// Apply global path mappings to the normalized path
|
|
178
184
|
let processedNormalizedPath = normalizedPath;
|
|
179
185
|
if (context.global.linkMappings) {
|
|
180
|
-
const globalMappings = context.global.linkMappings.filter(m => m.global);
|
|
186
|
+
const globalMappings = context.global.linkMappings.filter((m) => m.global);
|
|
181
187
|
if (globalMappings.length > 0) {
|
|
182
188
|
processedNormalizedPath = applyLinkMappings(normalizedPath + anchor, globalMappings, context);
|
|
183
189
|
// Extract path again after global mappings
|
|
@@ -188,8 +194,8 @@ function transformLink(linkText, linkUrl, context) {
|
|
|
188
194
|
// Check if this links to an imported file
|
|
189
195
|
let targetPath = context.global.sourceToTargetMap.get(normalizedPath);
|
|
190
196
|
// If not found and path ends with /, try looking for index.md
|
|
191
|
-
if (!targetPath && normalizedPath.endsWith(
|
|
192
|
-
targetPath = context.global.sourceToTargetMap.get(normalizedPath +
|
|
197
|
+
if (!targetPath && normalizedPath.endsWith("/")) {
|
|
198
|
+
targetPath = context.global.sourceToTargetMap.get(normalizedPath + "index.md");
|
|
193
199
|
}
|
|
194
200
|
if (targetPath) {
|
|
195
201
|
// This is an internal link to an imported file
|
|
@@ -198,10 +204,10 @@ function transformLink(linkText, linkUrl, context) {
|
|
|
198
204
|
}
|
|
199
205
|
// Apply non-global path mappings to unresolved links
|
|
200
206
|
if (context.global.linkMappings) {
|
|
201
|
-
const nonGlobalMappings = context.global.linkMappings.filter(m => !m.global);
|
|
207
|
+
const nonGlobalMappings = context.global.linkMappings.filter((m) => !m.global);
|
|
202
208
|
if (nonGlobalMappings.length > 0) {
|
|
203
209
|
const mappedUrl = applyLinkMappings(processedNormalizedPath + anchor, nonGlobalMappings, context);
|
|
204
|
-
if (mappedUrl !==
|
|
210
|
+
if (mappedUrl !== processedNormalizedPath + anchor) {
|
|
205
211
|
return `[${linkText}](${mappedUrl})`;
|
|
206
212
|
}
|
|
207
213
|
}
|
|
@@ -218,7 +224,7 @@ function transformLink(linkText, linkUrl, context) {
|
|
|
218
224
|
}
|
|
219
225
|
// No transformation matched - strip .md extension from unresolved internal links
|
|
220
226
|
// This handles links to files that weren't imported but should still use Starlight routing
|
|
221
|
-
const cleanPath = processedNormalizedPath.replace(/\.md$/i,
|
|
227
|
+
const cleanPath = processedNormalizedPath.replace(/\.md$/i, "");
|
|
222
228
|
return `[${linkText}](${cleanPath + anchor})`;
|
|
223
229
|
}
|
|
224
230
|
/**
|
|
@@ -243,7 +249,7 @@ export function globalLinkTransform(importedFiles, options) {
|
|
|
243
249
|
};
|
|
244
250
|
// Transform links in all files
|
|
245
251
|
const markdownLinkRegex = /\[([^\]]*)\]\(([^)]+)\)/g;
|
|
246
|
-
return importedFiles.map(file => ({
|
|
252
|
+
return importedFiles.map((file) => ({
|
|
247
253
|
...file,
|
|
248
254
|
content: file.content.replace(markdownLinkRegex, (match, linkText, linkUrl) => {
|
|
249
255
|
const linkContext = {
|
|
@@ -262,9 +268,7 @@ export function globalLinkTransform(importedFiles, options) {
|
|
|
262
268
|
* @returns Inferred cross-section path (e.g., '/reference/api')
|
|
263
269
|
*/
|
|
264
270
|
function inferCrossSectionPath(basePath) {
|
|
265
|
-
return basePath
|
|
266
|
-
.replace(/^src\/content\/docs/, '')
|
|
267
|
-
.replace(/\/$/, '') || '/';
|
|
271
|
+
return basePath.replace(/^src\/content\/docs/, "").replace(/\/$/, "") || "/";
|
|
268
272
|
}
|
|
269
273
|
/**
|
|
270
274
|
* Generate link mappings automatically from pathMappings in include patterns
|
|
@@ -280,25 +284,29 @@ export function generateAutoLinkMappings(includes, stripPrefixes = []) {
|
|
|
280
284
|
const inferredCrossSection = inferCrossSectionPath(includePattern.basePath);
|
|
281
285
|
for (const [sourcePath, mappingValue] of Object.entries(includePattern.pathMappings)) {
|
|
282
286
|
// Handle both string and enhanced object formats
|
|
283
|
-
const targetPath = typeof mappingValue ===
|
|
284
|
-
const crossSectionPath = typeof mappingValue ===
|
|
287
|
+
const targetPath = typeof mappingValue === "string" ? mappingValue : mappingValue.target;
|
|
288
|
+
const crossSectionPath = typeof mappingValue === "object" && mappingValue.crossSectionPath
|
|
285
289
|
? mappingValue.crossSectionPath
|
|
286
290
|
: inferredCrossSection;
|
|
287
|
-
if (sourcePath.endsWith(
|
|
291
|
+
if (sourcePath.endsWith("/")) {
|
|
288
292
|
// Folder mapping - use regex with capture group
|
|
289
|
-
const sourcePattern = sourcePath.replace(/[.*+?^${}()|[\]\\]/g,
|
|
293
|
+
const sourcePattern = sourcePath.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
290
294
|
linkMappings.push({
|
|
291
295
|
pattern: new RegExp(`^${sourcePattern}(.+)$`),
|
|
292
|
-
replacement: (transformedPath,
|
|
293
|
-
const relativePath = transformedPath.replace(new RegExp(`^${sourcePattern}`),
|
|
296
|
+
replacement: (transformedPath, _anchor, _context) => {
|
|
297
|
+
const relativePath = transformedPath.replace(new RegExp(`^${sourcePattern}`), "");
|
|
294
298
|
let finalPath;
|
|
295
|
-
if (crossSectionPath && crossSectionPath !==
|
|
296
|
-
finalPath =
|
|
297
|
-
|
|
298
|
-
|
|
299
|
+
if (crossSectionPath && crossSectionPath !== "/") {
|
|
300
|
+
finalPath =
|
|
301
|
+
targetPath === ""
|
|
302
|
+
? `${crossSectionPath}/${relativePath}`
|
|
303
|
+
: `${crossSectionPath}/${targetPath}${relativePath}`;
|
|
299
304
|
}
|
|
300
305
|
else {
|
|
301
|
-
finalPath =
|
|
306
|
+
finalPath =
|
|
307
|
+
targetPath === ""
|
|
308
|
+
? relativePath
|
|
309
|
+
: `${targetPath}${relativePath}`;
|
|
302
310
|
}
|
|
303
311
|
return generateSiteUrl(finalPath, stripPrefixes);
|
|
304
312
|
},
|
|
@@ -307,11 +315,11 @@ export function generateAutoLinkMappings(includes, stripPrefixes = []) {
|
|
|
307
315
|
}
|
|
308
316
|
else {
|
|
309
317
|
// File mapping - exact string match
|
|
310
|
-
const sourcePattern = sourcePath.replace(/[.*+?^${}()|[\]\\]/g,
|
|
318
|
+
const sourcePattern = sourcePath.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
311
319
|
linkMappings.push({
|
|
312
320
|
pattern: new RegExp(`^${sourcePattern}$`),
|
|
313
|
-
replacement: (
|
|
314
|
-
const finalPath = crossSectionPath && crossSectionPath !==
|
|
321
|
+
replacement: (_transformedPath, _anchor, _context) => {
|
|
322
|
+
const finalPath = crossSectionPath && crossSectionPath !== "/"
|
|
315
323
|
? `${crossSectionPath}/${targetPath}`
|
|
316
324
|
: targetPath;
|
|
317
325
|
return generateSiteUrl(finalPath, stripPrefixes);
|
package/dist/github.loader.js
CHANGED
|
@@ -1,33 +1,7 @@
|
|
|
1
1
|
import { toCollectionEntry } from "./github.content.js";
|
|
2
2
|
import { performSelectiveCleanup } from "./github.cleanup.js";
|
|
3
|
-
import { performDryRun, displayDryRunResults, updateImportState, loadImportState, createConfigId, getLatestCommitInfo } from "./github.dryrun.js";
|
|
4
|
-
import { createLogger } from "./github.logger.js";
|
|
5
|
-
/**
|
|
6
|
-
* Performs selective cleanup for configurations with basePath
|
|
7
|
-
* @param configs - Array of configuration objects
|
|
8
|
-
* @param context - Loader context
|
|
9
|
-
* @param octokit - GitHub API client
|
|
10
|
-
* @internal
|
|
11
|
-
*/
|
|
12
|
-
async function performSelectiveCleanups(configs, context, octokit) {
|
|
13
|
-
const results = [];
|
|
14
|
-
// Process each config sequentially to avoid overwhelming Astro's file watcher
|
|
15
|
-
for (const config of configs) {
|
|
16
|
-
if (config.enabled === false) {
|
|
17
|
-
context.logger.debug(`Skipping disabled config: ${config.name || `${config.owner}/${config.repo}`}`);
|
|
18
|
-
continue;
|
|
19
|
-
}
|
|
20
|
-
try {
|
|
21
|
-
const stats = await performSelectiveCleanup(config, context, octokit);
|
|
22
|
-
results.push(stats);
|
|
23
|
-
}
|
|
24
|
-
catch (error) {
|
|
25
|
-
context.logger.error(`Selective cleanup failed for ${config.name || `${config.owner}/${config.repo}`}: ${error}`);
|
|
26
|
-
// Continue with other configs even if one fails
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
return results;
|
|
30
|
-
}
|
|
3
|
+
import { performDryRun, displayDryRunResults, updateImportState, loadImportState, createConfigId, getLatestCommitInfo, } from "./github.dryrun.js";
|
|
4
|
+
import { createLogger, } from "./github.logger.js";
|
|
31
5
|
/**
|
|
32
6
|
* Loads data from GitHub repositories based on the provided configurations and options.
|
|
33
7
|
*
|
|
@@ -45,7 +19,7 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
|
|
|
45
19
|
name: "github-loader",
|
|
46
20
|
load: async (context) => {
|
|
47
21
|
// Create global logger with specified level or default
|
|
48
|
-
const globalLogger = createLogger(logLevel ||
|
|
22
|
+
const globalLogger = createLogger(logLevel || "default");
|
|
49
23
|
if (dryRun) {
|
|
50
24
|
globalLogger.info("🔍 Dry run mode enabled - checking for changes only");
|
|
51
25
|
try {
|
|
@@ -56,14 +30,16 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
|
|
|
56
30
|
return; // Exit without importing
|
|
57
31
|
}
|
|
58
32
|
catch (error) {
|
|
59
|
-
globalLogger.error(`Dry run failed: ${error.message}`);
|
|
33
|
+
globalLogger.error(`Dry run failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
60
34
|
throw error;
|
|
61
35
|
}
|
|
62
36
|
}
|
|
63
37
|
globalLogger.debug(`Loading data from ${configs.length} sources`);
|
|
64
38
|
// Log clear mode status - actual clearing happens per-entry in toCollectionEntry
|
|
65
39
|
// to avoid breaking Astro's content collection by emptying the store all at once
|
|
66
|
-
globalLogger.info(clear
|
|
40
|
+
globalLogger.info(clear
|
|
41
|
+
? "Processing with selective entry replacement"
|
|
42
|
+
: "Processing without entry replacement");
|
|
67
43
|
// Process each config sequentially to avoid overwhelming GitHub API/CDN
|
|
68
44
|
for (let i = 0; i < configs.length; i++) {
|
|
69
45
|
const config = configs[i];
|
|
@@ -73,12 +49,15 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
|
|
|
73
49
|
}
|
|
74
50
|
// Add small delay between configs to be gentler on GitHub's CDN
|
|
75
51
|
if (i > 0) {
|
|
76
|
-
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
52
|
+
await new Promise((resolve) => setTimeout(resolve, 1000));
|
|
77
53
|
}
|
|
78
54
|
// Determine the effective log level for this config
|
|
79
|
-
const effectiveLogLevel = logLevel || config.logLevel ||
|
|
55
|
+
const effectiveLogLevel = logLevel || config.logLevel || "default";
|
|
80
56
|
const configLogger = createLogger(effectiveLogLevel);
|
|
81
|
-
const
|
|
57
|
+
const langSuffix = config.language ? ` (${config.language})` : "";
|
|
58
|
+
const configName = config.name
|
|
59
|
+
? `${config.name}${langSuffix}`
|
|
60
|
+
: `${config.owner}/${config.repo}${langSuffix}`;
|
|
82
61
|
const repository = `${config.owner}/${config.repo}`;
|
|
83
62
|
let summary = {
|
|
84
63
|
configName,
|
|
@@ -88,7 +67,7 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
|
|
|
88
67
|
filesUpdated: 0,
|
|
89
68
|
filesUnchanged: 0,
|
|
90
69
|
duration: 0,
|
|
91
|
-
status:
|
|
70
|
+
status: "error",
|
|
92
71
|
};
|
|
93
72
|
const startTime = Date.now();
|
|
94
73
|
try {
|
|
@@ -96,24 +75,25 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
|
|
|
96
75
|
const configId = createConfigId(config);
|
|
97
76
|
if (!force) {
|
|
98
77
|
try {
|
|
99
|
-
const state = await loadImportState(process.cwd());
|
|
78
|
+
const state = await loadImportState(process.cwd(), configLogger);
|
|
100
79
|
const currentState = state.imports[configId];
|
|
101
80
|
if (currentState && currentState.lastCommitSha) {
|
|
102
81
|
configLogger.debug(`🔍 Checking repository changes for ${configName}...`);
|
|
103
82
|
const latestCommit = await getLatestCommitInfo(octokit, config);
|
|
104
|
-
if (latestCommit &&
|
|
83
|
+
if (latestCommit &&
|
|
84
|
+
currentState.lastCommitSha === latestCommit.sha) {
|
|
105
85
|
configLogger.info(`✅ Repository ${configName} unchanged (${latestCommit.sha.slice(0, 7)}) - skipping import`);
|
|
106
86
|
// Update summary for unchanged repository
|
|
107
87
|
summary.duration = Date.now() - startTime;
|
|
108
88
|
summary.filesProcessed = 0;
|
|
109
89
|
summary.filesUpdated = 0;
|
|
110
90
|
summary.filesUnchanged = 0;
|
|
111
|
-
summary.status =
|
|
91
|
+
summary.status = "success";
|
|
112
92
|
configLogger.logImportSummary(summary);
|
|
113
93
|
continue; // Skip to next config
|
|
114
94
|
}
|
|
115
95
|
else if (latestCommit) {
|
|
116
|
-
configLogger.info(`🔄 Repository ${configName} changed (${currentState.lastCommitSha?.slice(0, 7) ||
|
|
96
|
+
configLogger.info(`🔄 Repository ${configName} changed (${currentState.lastCommitSha?.slice(0, 7) || "unknown"} -> ${latestCommit.sha.slice(0, 7)}) - proceeding with import`);
|
|
117
97
|
}
|
|
118
98
|
}
|
|
119
99
|
else {
|
|
@@ -142,7 +122,10 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
|
|
|
142
122
|
}
|
|
143
123
|
// Perform the import with spinner
|
|
144
124
|
const stats = await globalLogger.withSpinner(`🔄 Importing ${configName}...`, () => toCollectionEntry({
|
|
145
|
-
context: {
|
|
125
|
+
context: {
|
|
126
|
+
...context,
|
|
127
|
+
logger: configLogger,
|
|
128
|
+
},
|
|
146
129
|
octokit,
|
|
147
130
|
options: config,
|
|
148
131
|
fetchOptions,
|
|
@@ -155,7 +138,7 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
|
|
|
155
138
|
summary.filesUnchanged = stats?.unchanged || 0;
|
|
156
139
|
summary.assetsDownloaded = stats?.assetsDownloaded || 0;
|
|
157
140
|
summary.assetsCached = stats?.assetsCached || 0;
|
|
158
|
-
summary.status =
|
|
141
|
+
summary.status = "success";
|
|
159
142
|
// Log structured summary
|
|
160
143
|
configLogger.logImportSummary(summary);
|
|
161
144
|
// Update state tracking for future dry runs
|
|
@@ -164,11 +147,11 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
|
|
|
164
147
|
const { data } = await octokit.rest.repos.listCommits({
|
|
165
148
|
owner: config.owner,
|
|
166
149
|
repo: config.repo,
|
|
167
|
-
sha: config.ref ||
|
|
168
|
-
per_page: 1
|
|
150
|
+
sha: config.ref || "main",
|
|
151
|
+
per_page: 1,
|
|
169
152
|
});
|
|
170
153
|
if (data.length > 0) {
|
|
171
|
-
await updateImportState(process.cwd(), config, data[0].sha);
|
|
154
|
+
await updateImportState(process.cwd(), config, data[0].sha, configLogger);
|
|
172
155
|
}
|
|
173
156
|
}
|
|
174
157
|
catch (error) {
|
|
@@ -178,8 +161,9 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
|
|
|
178
161
|
}
|
|
179
162
|
catch (error) {
|
|
180
163
|
summary.duration = Date.now() - startTime;
|
|
181
|
-
summary.status =
|
|
182
|
-
summary.error =
|
|
164
|
+
summary.status = "error";
|
|
165
|
+
summary.error =
|
|
166
|
+
error instanceof Error ? error.message : String(error);
|
|
183
167
|
configLogger.logImportSummary(summary);
|
|
184
168
|
// Continue with other configs even if one fails
|
|
185
169
|
}
|