@larkiny/astro-github-loader 0.11.2 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +69 -61
- package/dist/github.assets.d.ts +70 -0
- package/dist/github.assets.js +253 -0
- package/dist/github.auth.js +13 -9
- package/dist/github.cleanup.d.ts +3 -2
- package/dist/github.cleanup.js +30 -23
- package/dist/github.constants.d.ts +0 -16
- package/dist/github.constants.js +0 -16
- package/dist/github.content.d.ts +6 -132
- package/dist/github.content.js +154 -789
- package/dist/github.dryrun.d.ts +9 -5
- package/dist/github.dryrun.js +46 -25
- package/dist/github.link-transform.d.ts +2 -2
- package/dist/github.link-transform.js +65 -57
- package/dist/github.loader.js +45 -51
- package/dist/github.logger.d.ts +2 -2
- package/dist/github.logger.js +33 -24
- package/dist/github.paths.d.ts +76 -0
- package/dist/github.paths.js +190 -0
- package/dist/github.storage.d.ts +15 -0
- package/dist/github.storage.js +109 -0
- package/dist/github.types.d.ts +41 -4
- package/dist/index.d.ts +8 -6
- package/dist/index.js +3 -6
- package/dist/test-helpers.d.ts +130 -0
- package/dist/test-helpers.js +194 -0
- package/package.json +3 -1
- package/src/github.assets.spec.ts +717 -0
- package/src/github.assets.ts +365 -0
- package/src/github.auth.spec.ts +245 -0
- package/src/github.auth.ts +24 -10
- package/src/github.cleanup.spec.ts +380 -0
- package/src/github.cleanup.ts +91 -47
- package/src/github.constants.ts +0 -17
- package/src/github.content.spec.ts +305 -454
- package/src/github.content.ts +261 -950
- package/src/github.dryrun.spec.ts +586 -0
- package/src/github.dryrun.ts +105 -54
- package/src/github.link-transform.spec.ts +1345 -0
- package/src/github.link-transform.ts +174 -95
- package/src/github.loader.spec.ts +75 -50
- package/src/github.loader.ts +113 -78
- package/src/github.logger.spec.ts +795 -0
- package/src/github.logger.ts +77 -35
- package/src/github.paths.spec.ts +523 -0
- package/src/github.paths.ts +259 -0
- package/src/github.storage.spec.ts +367 -0
- package/src/github.storage.ts +127 -0
- package/src/github.types.ts +55 -9
- package/src/index.ts +43 -6
- package/src/test-helpers.ts +215 -0
|
@@ -0,0 +1,259 @@
|
|
|
1
|
+
import path, { join, basename } from "node:path";
|
|
2
|
+
import picomatch from "picomatch";
|
|
3
|
+
import type {
|
|
4
|
+
ExtendedLoaderContext,
|
|
5
|
+
ImportOptions,
|
|
6
|
+
MatchedPattern,
|
|
7
|
+
} from "./github.types.js";
|
|
8
|
+
|
|
9
|
+
export interface ImportStats {
|
|
10
|
+
processed: number;
|
|
11
|
+
updated: number;
|
|
12
|
+
unchanged: number;
|
|
13
|
+
assetsDownloaded?: number;
|
|
14
|
+
assetsCached?: number;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Generates a unique identifier from a file path by removing the extension
|
|
19
|
+
* @param filePath - The file path to generate ID from
|
|
20
|
+
* @return {string} The generated identifier as a string with extension removed
|
|
21
|
+
* @internal
|
|
22
|
+
*/
|
|
23
|
+
export function generateId(filePath: string): string {
|
|
24
|
+
let id = filePath;
|
|
25
|
+
|
|
26
|
+
// Remove file extension for ID generation
|
|
27
|
+
const lastDotIndex = id.lastIndexOf(".");
|
|
28
|
+
if (lastDotIndex > 0) {
|
|
29
|
+
id = id.substring(0, lastDotIndex);
|
|
30
|
+
}
|
|
31
|
+
return id;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Applies path mapping logic to get the final filename for a file
|
|
36
|
+
*
|
|
37
|
+
* Supports two types of path mappings:
|
|
38
|
+
* - **File mapping**: Exact file path match (e.g., 'docs/README.md' -> 'docs/overview.md')
|
|
39
|
+
* - **Folder mapping**: Folder path with trailing slash (e.g., 'docs/capabilities/' -> 'docs/')
|
|
40
|
+
*
|
|
41
|
+
* @param filePath - Original source file path
|
|
42
|
+
* @param matchedPattern - The pattern that matched this file
|
|
43
|
+
* @param options - Import options containing path mappings
|
|
44
|
+
* @returns Final filename after applying path mapping logic
|
|
45
|
+
* @internal
|
|
46
|
+
*/
|
|
47
|
+
export function applyRename(
|
|
48
|
+
filePath: string,
|
|
49
|
+
matchedPattern?: MatchedPattern | null,
|
|
50
|
+
options?: ImportOptions,
|
|
51
|
+
): string {
|
|
52
|
+
if (
|
|
53
|
+
options?.includes &&
|
|
54
|
+
matchedPattern &&
|
|
55
|
+
matchedPattern.index < options.includes.length
|
|
56
|
+
) {
|
|
57
|
+
const includePattern = options.includes[matchedPattern.index];
|
|
58
|
+
|
|
59
|
+
if (includePattern.pathMappings) {
|
|
60
|
+
// First check for exact file match (current behavior - backwards compatible)
|
|
61
|
+
if (includePattern.pathMappings[filePath]) {
|
|
62
|
+
const mappingValue = includePattern.pathMappings[filePath];
|
|
63
|
+
return typeof mappingValue === "string"
|
|
64
|
+
? mappingValue
|
|
65
|
+
: mappingValue.target;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// Then check for folder-to-folder mappings
|
|
69
|
+
for (const [sourceFolder, mappingValue] of Object.entries(
|
|
70
|
+
includePattern.pathMappings,
|
|
71
|
+
)) {
|
|
72
|
+
// Check if this is a folder mapping (ends with /) and file is within it
|
|
73
|
+
if (sourceFolder.endsWith("/") && filePath.startsWith(sourceFolder)) {
|
|
74
|
+
// Replace the source folder path with target folder path
|
|
75
|
+
const targetFolder =
|
|
76
|
+
typeof mappingValue === "string"
|
|
77
|
+
? mappingValue
|
|
78
|
+
: mappingValue.target;
|
|
79
|
+
const relativePath = filePath.slice(sourceFolder.length);
|
|
80
|
+
return path.posix.join(targetFolder, relativePath);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// Return original filename if no path mapping found
|
|
87
|
+
return basename(filePath);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Generates a local file path based on the matched pattern and file path
|
|
92
|
+
* @param filePath - The original file path from the repository
|
|
93
|
+
* @param matchedPattern - The pattern that matched this file (or null if no includes specified)
|
|
94
|
+
* @param options - Import options containing includes patterns for path mapping lookups
|
|
95
|
+
* @return {string} The local file path where this content should be stored
|
|
96
|
+
* @internal
|
|
97
|
+
*/
|
|
98
|
+
export function generatePath(
|
|
99
|
+
filePath: string,
|
|
100
|
+
matchedPattern?: MatchedPattern | null,
|
|
101
|
+
options?: ImportOptions,
|
|
102
|
+
): string {
|
|
103
|
+
if (matchedPattern) {
|
|
104
|
+
// Extract the directory part from the pattern (before any glob wildcards)
|
|
105
|
+
const pattern = matchedPattern.pattern;
|
|
106
|
+
const beforeGlob = pattern.split(/[*?{]/)[0];
|
|
107
|
+
|
|
108
|
+
// Remove the pattern prefix from the file path to get the relative path
|
|
109
|
+
let relativePath = filePath;
|
|
110
|
+
if (beforeGlob && filePath.startsWith(beforeGlob)) {
|
|
111
|
+
relativePath = filePath.substring(beforeGlob.length);
|
|
112
|
+
// Remove leading slash if present
|
|
113
|
+
if (relativePath.startsWith("/")) {
|
|
114
|
+
relativePath = relativePath.substring(1);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// If no relative path remains, use just the filename
|
|
119
|
+
if (!relativePath) {
|
|
120
|
+
relativePath = basename(filePath);
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// Apply path mapping logic
|
|
124
|
+
const finalFilename = applyRename(filePath, matchedPattern, options);
|
|
125
|
+
// Always apply path mapping if applyRename returned something different from the original basename
|
|
126
|
+
// OR if there are pathMappings configured (since empty string mappings might return same basename)
|
|
127
|
+
const hasPathMappings =
|
|
128
|
+
options?.includes?.[matchedPattern.index]?.pathMappings &&
|
|
129
|
+
Object.keys(options.includes[matchedPattern.index].pathMappings!).length >
|
|
130
|
+
0;
|
|
131
|
+
if (finalFilename !== basename(filePath) || hasPathMappings) {
|
|
132
|
+
// Check if applyRename returned a full path (contains path separators) or just a filename
|
|
133
|
+
if (finalFilename.includes("/") || finalFilename.includes("\\")) {
|
|
134
|
+
// applyRename returned a full relative path - need to extract relative part
|
|
135
|
+
// Remove the pattern prefix to get the relative path within the pattern context
|
|
136
|
+
const beforeGlob = pattern.split(/[*?{]/)[0];
|
|
137
|
+
if (beforeGlob && finalFilename.startsWith(beforeGlob)) {
|
|
138
|
+
relativePath = finalFilename.substring(beforeGlob.length);
|
|
139
|
+
// Remove leading slash if present
|
|
140
|
+
if (relativePath.startsWith("/")) {
|
|
141
|
+
relativePath = relativePath.substring(1);
|
|
142
|
+
}
|
|
143
|
+
} else {
|
|
144
|
+
relativePath = finalFilename;
|
|
145
|
+
}
|
|
146
|
+
} else {
|
|
147
|
+
// applyRename returned just a filename
|
|
148
|
+
// If the filename is different due to pathMapping, use it directly
|
|
149
|
+
// This handles cases where pathMappings flatten directory structures
|
|
150
|
+
relativePath = finalFilename;
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
return join(matchedPattern.basePath, relativePath);
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// Should not happen since we always use includes
|
|
158
|
+
throw new Error("No matched pattern provided - includes are required");
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
/**
|
|
162
|
+
* Checks if a file path should be included and returns the matching pattern
|
|
163
|
+
* @param filePath - The file path to check (relative to the repository root)
|
|
164
|
+
* @param options - Import options containing includes patterns
|
|
165
|
+
* @returns Object with include status and matched pattern, or null if not included
|
|
166
|
+
* @internal
|
|
167
|
+
*/
|
|
168
|
+
export function shouldIncludeFile(
|
|
169
|
+
filePath: string,
|
|
170
|
+
options: ImportOptions,
|
|
171
|
+
):
|
|
172
|
+
| { included: true; matchedPattern: MatchedPattern | null }
|
|
173
|
+
| { included: false; matchedPattern: null } {
|
|
174
|
+
const { includes } = options;
|
|
175
|
+
|
|
176
|
+
// If no include patterns specified, include all files
|
|
177
|
+
if (!includes || includes.length === 0) {
|
|
178
|
+
return { included: true, matchedPattern: null };
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// Check each include pattern to find a match
|
|
182
|
+
for (let i = 0; i < includes.length; i++) {
|
|
183
|
+
const includePattern = includes[i];
|
|
184
|
+
const matcher = picomatch(includePattern.pattern);
|
|
185
|
+
|
|
186
|
+
if (matcher(filePath)) {
|
|
187
|
+
return {
|
|
188
|
+
included: true,
|
|
189
|
+
matchedPattern: {
|
|
190
|
+
pattern: includePattern.pattern,
|
|
191
|
+
basePath: includePattern.basePath,
|
|
192
|
+
index: i,
|
|
193
|
+
},
|
|
194
|
+
};
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// No patterns matched
|
|
199
|
+
return { included: false, matchedPattern: null };
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
/**
|
|
203
|
+
* Get the headers needed to make a conditional request.
|
|
204
|
+
* Uses the etag and last-modified values from the meta store.
|
|
205
|
+
* @internal
|
|
206
|
+
*/
|
|
207
|
+
export function getHeaders({
|
|
208
|
+
init,
|
|
209
|
+
meta,
|
|
210
|
+
id,
|
|
211
|
+
}: {
|
|
212
|
+
/** Initial headers to include */
|
|
213
|
+
init?: RequestInit["headers"];
|
|
214
|
+
/** Meta store to get etag and last-modified values from */
|
|
215
|
+
meta: ExtendedLoaderContext["meta"];
|
|
216
|
+
id: string;
|
|
217
|
+
}): Headers {
|
|
218
|
+
const tag = `${id}-etag`;
|
|
219
|
+
const lastModifiedTag = `${id}-last-modified`;
|
|
220
|
+
const etag = meta.get(tag);
|
|
221
|
+
const lastModified = meta.get(lastModifiedTag);
|
|
222
|
+
const headers = new Headers(init);
|
|
223
|
+
|
|
224
|
+
if (etag) {
|
|
225
|
+
headers.set("If-None-Match", etag);
|
|
226
|
+
} else if (lastModified) {
|
|
227
|
+
headers.set("If-Modified-Since", lastModified);
|
|
228
|
+
}
|
|
229
|
+
return headers;
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
/**
|
|
233
|
+
* Store the etag or last-modified headers from a response in the meta store.
|
|
234
|
+
* @internal
|
|
235
|
+
*/
|
|
236
|
+
export function syncHeaders({
|
|
237
|
+
headers,
|
|
238
|
+
meta,
|
|
239
|
+
id,
|
|
240
|
+
}: {
|
|
241
|
+
/** Headers from the response */
|
|
242
|
+
headers: Headers;
|
|
243
|
+
/** Meta store to store etag and last-modified values in */
|
|
244
|
+
meta: ExtendedLoaderContext["meta"];
|
|
245
|
+
/** id string */
|
|
246
|
+
id: string;
|
|
247
|
+
}) {
|
|
248
|
+
const etag = headers.get("etag");
|
|
249
|
+
const lastModified = headers.get("last-modified");
|
|
250
|
+
const tag = `${id}-etag`;
|
|
251
|
+
const lastModifiedTag = `${id}-last-modified`;
|
|
252
|
+
meta.delete(tag);
|
|
253
|
+
meta.delete(lastModifiedTag);
|
|
254
|
+
if (etag) {
|
|
255
|
+
meta.set(tag, etag);
|
|
256
|
+
} else if (lastModified) {
|
|
257
|
+
meta.set(lastModifiedTag, lastModified);
|
|
258
|
+
}
|
|
259
|
+
}
|
|
@@ -0,0 +1,367 @@
|
|
|
1
|
+
import { beforeEach, describe, it, expect, vi } from "vitest";
|
|
2
|
+
import { fileURLToPath, pathToFileURL } from "node:url";
|
|
3
|
+
import { syncFile, storeProcessedFile } from "./github.storage.js";
|
|
4
|
+
import { createMockContext } from "./test-helpers.js";
|
|
5
|
+
|
|
6
|
+
vi.mock("node:fs", async () => {
|
|
7
|
+
const actual = await vi.importActual("node:fs");
|
|
8
|
+
return {
|
|
9
|
+
...actual,
|
|
10
|
+
existsSync: vi.fn(() => false),
|
|
11
|
+
promises: {
|
|
12
|
+
mkdir: vi.fn(),
|
|
13
|
+
writeFile: vi.fn(),
|
|
14
|
+
},
|
|
15
|
+
};
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
// Import the mocked fs after vi.mock so we can control return values per test
|
|
19
|
+
import { existsSync, promises as fs } from "node:fs";
|
|
20
|
+
|
|
21
|
+
const mockedExistsSync = existsSync as ReturnType<typeof vi.fn>;
|
|
22
|
+
const mockedMkdir = fs.mkdir as ReturnType<typeof vi.fn>;
|
|
23
|
+
const mockedWriteFile = fs.writeFile as ReturnType<typeof vi.fn>;
|
|
24
|
+
|
|
25
|
+
const mockFile = {
|
|
26
|
+
sourcePath: "docs/guide.md",
|
|
27
|
+
targetPath: "src/content/docs/guide.md",
|
|
28
|
+
content: "# Guide\nContent here",
|
|
29
|
+
id: "docs/guide",
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
describe("syncFile", () => {
|
|
33
|
+
beforeEach(() => {
|
|
34
|
+
vi.restoreAllMocks();
|
|
35
|
+
mockedExistsSync.mockReturnValue(false);
|
|
36
|
+
mockedMkdir.mockResolvedValue(undefined);
|
|
37
|
+
mockedWriteFile.mockResolvedValue(undefined);
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
it("creates directory and writes file when directory does not exist", async () => {
|
|
41
|
+
await syncFile("some/nested/dir/file.md", "content");
|
|
42
|
+
|
|
43
|
+
expect(mockedExistsSync).toHaveBeenCalledWith("some/nested/dir");
|
|
44
|
+
expect(mockedMkdir).toHaveBeenCalledWith("some/nested/dir", {
|
|
45
|
+
recursive: true,
|
|
46
|
+
});
|
|
47
|
+
expect(mockedWriteFile).toHaveBeenCalledWith(
|
|
48
|
+
"some/nested/dir/file.md",
|
|
49
|
+
"content",
|
|
50
|
+
"utf-8",
|
|
51
|
+
);
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
it("skips mkdir when directory already exists", async () => {
|
|
55
|
+
mockedExistsSync.mockReturnValue(true);
|
|
56
|
+
|
|
57
|
+
await syncFile("existing/dir/file.md", "content");
|
|
58
|
+
|
|
59
|
+
expect(mockedExistsSync).toHaveBeenCalledWith("existing/dir");
|
|
60
|
+
expect(mockedMkdir).not.toHaveBeenCalled();
|
|
61
|
+
expect(mockedWriteFile).toHaveBeenCalledWith(
|
|
62
|
+
"existing/dir/file.md",
|
|
63
|
+
"content",
|
|
64
|
+
"utf-8",
|
|
65
|
+
);
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
it("skips mkdir when path has no directory component", async () => {
|
|
69
|
+
await syncFile("file.md", "content");
|
|
70
|
+
|
|
71
|
+
// dir is "" which is falsy, so existsSync should not be called for dir check
|
|
72
|
+
expect(mockedMkdir).not.toHaveBeenCalled();
|
|
73
|
+
expect(mockedWriteFile).toHaveBeenCalledWith(
|
|
74
|
+
"file.md",
|
|
75
|
+
"content",
|
|
76
|
+
"utf-8",
|
|
77
|
+
);
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
it("writes content to the specified path", async () => {
|
|
81
|
+
const longContent = "# Title\n\nParagraph with **bold** and _italic_.";
|
|
82
|
+
await syncFile("output/test.md", longContent);
|
|
83
|
+
|
|
84
|
+
expect(mockedWriteFile).toHaveBeenCalledWith(
|
|
85
|
+
"output/test.md",
|
|
86
|
+
longContent,
|
|
87
|
+
"utf-8",
|
|
88
|
+
);
|
|
89
|
+
});
|
|
90
|
+
});
|
|
91
|
+
|
|
92
|
+
describe("storeProcessedFile", () => {
|
|
93
|
+
beforeEach(() => {
|
|
94
|
+
vi.restoreAllMocks();
|
|
95
|
+
mockedExistsSync.mockReturnValue(false);
|
|
96
|
+
mockedMkdir.mockResolvedValue(undefined);
|
|
97
|
+
mockedWriteFile.mockResolvedValue(undefined);
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
it("stores a basic entry without getRenderFunction or contentModuleTypes", async () => {
|
|
101
|
+
const ctx = createMockContext();
|
|
102
|
+
|
|
103
|
+
const result = await storeProcessedFile(mockFile, ctx as any, false);
|
|
104
|
+
|
|
105
|
+
const stored = ctx._store.get("docs/guide");
|
|
106
|
+
expect(stored).toBeDefined();
|
|
107
|
+
expect(stored.id).toBe("docs/guide");
|
|
108
|
+
expect(stored.body).toBe(mockFile.content);
|
|
109
|
+
expect(stored.filePath).toBe(mockFile.targetPath);
|
|
110
|
+
expect(stored.digest).toBe(String(mockFile.content.length));
|
|
111
|
+
expect(stored.rendered).toBeUndefined();
|
|
112
|
+
expect(stored.deferredRender).toBeUndefined();
|
|
113
|
+
expect(result).toEqual({
|
|
114
|
+
id: "docs/guide",
|
|
115
|
+
filePath: "src/content/docs/guide.md",
|
|
116
|
+
});
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
it("stores entry with rendered content when getRenderFunction is present", async () => {
|
|
120
|
+
const ctx = createMockContext();
|
|
121
|
+
ctx.entryTypes.set(".md", {
|
|
122
|
+
getEntryInfo: async ({ contents }: any) => ({
|
|
123
|
+
body: contents,
|
|
124
|
+
data: {},
|
|
125
|
+
}),
|
|
126
|
+
getRenderFunction: async () => async () => ({
|
|
127
|
+
html: "<p>rendered</p>",
|
|
128
|
+
}),
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
await storeProcessedFile(mockFile, ctx as any, false);
|
|
132
|
+
|
|
133
|
+
const stored = ctx._store.get("docs/guide");
|
|
134
|
+
expect(stored.rendered).toEqual({ html: "<p>rendered</p>" });
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
it("logs error and stores undefined rendered when getRenderFunction throws", async () => {
|
|
138
|
+
const ctx = createMockContext();
|
|
139
|
+
ctx.entryTypes.set(".md", {
|
|
140
|
+
getEntryInfo: async ({ contents }: any) => ({
|
|
141
|
+
body: contents,
|
|
142
|
+
data: {},
|
|
143
|
+
}),
|
|
144
|
+
getRenderFunction: async () => async () => {
|
|
145
|
+
throw new Error("render failed");
|
|
146
|
+
},
|
|
147
|
+
});
|
|
148
|
+
|
|
149
|
+
await storeProcessedFile(mockFile, ctx as any, false);
|
|
150
|
+
|
|
151
|
+
expect(ctx.logger.error).toHaveBeenCalledWith(
|
|
152
|
+
expect.stringContaining("render failed"),
|
|
153
|
+
);
|
|
154
|
+
const stored = ctx._store.get("docs/guide");
|
|
155
|
+
expect(stored).toBeDefined();
|
|
156
|
+
expect(stored.rendered).toBeUndefined();
|
|
157
|
+
});
|
|
158
|
+
|
|
159
|
+
it("sets deferredRender when entry type has contentModuleTypes", async () => {
|
|
160
|
+
const ctx = createMockContext();
|
|
161
|
+
ctx.entryTypes.set(".md", {
|
|
162
|
+
getEntryInfo: async ({ contents }: any) => ({
|
|
163
|
+
body: contents,
|
|
164
|
+
data: {},
|
|
165
|
+
}),
|
|
166
|
+
contentModuleTypes: "some types",
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
await storeProcessedFile(mockFile, ctx as any, false);
|
|
170
|
+
|
|
171
|
+
const stored = ctx._store.get("docs/guide");
|
|
172
|
+
expect(stored.deferredRender).toBe(true);
|
|
173
|
+
expect(stored.rendered).toBeUndefined();
|
|
174
|
+
});
|
|
175
|
+
|
|
176
|
+
it("deletes existing entry before setting when clear is true", async () => {
|
|
177
|
+
const ctx = createMockContext();
|
|
178
|
+
// Pre-populate the store with an existing entry
|
|
179
|
+
ctx.store.set({
|
|
180
|
+
id: "docs/guide",
|
|
181
|
+
data: {},
|
|
182
|
+
body: "old content",
|
|
183
|
+
filePath: "old/path.md",
|
|
184
|
+
digest: "0",
|
|
185
|
+
});
|
|
186
|
+
|
|
187
|
+
const deleteSpy = vi.spyOn(ctx.store, "delete");
|
|
188
|
+
|
|
189
|
+
await storeProcessedFile(mockFile, ctx as any, true);
|
|
190
|
+
|
|
191
|
+
expect(deleteSpy).toHaveBeenCalledWith("docs/guide");
|
|
192
|
+
const stored = ctx._store.get("docs/guide");
|
|
193
|
+
expect(stored.body).toBe(mockFile.content);
|
|
194
|
+
});
|
|
195
|
+
|
|
196
|
+
it("does not call delete when clear is true but entry does not exist", async () => {
|
|
197
|
+
const ctx = createMockContext();
|
|
198
|
+
const deleteSpy = vi.spyOn(ctx.store, "delete");
|
|
199
|
+
|
|
200
|
+
await storeProcessedFile(mockFile, ctx as any, true);
|
|
201
|
+
|
|
202
|
+
expect(deleteSpy).not.toHaveBeenCalled();
|
|
203
|
+
const stored = ctx._store.get("docs/guide");
|
|
204
|
+
expect(stored).toBeDefined();
|
|
205
|
+
});
|
|
206
|
+
|
|
207
|
+
it("skips writing file to disk when file already exists", async () => {
|
|
208
|
+
const ctx = createMockContext();
|
|
209
|
+
const expectedFileUrl = pathToFileURL(mockFile.targetPath);
|
|
210
|
+
mockedExistsSync.mockImplementation((path: string) => {
|
|
211
|
+
return path === fileURLToPath(expectedFileUrl);
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
await storeProcessedFile(mockFile, ctx as any, false);
|
|
215
|
+
|
|
216
|
+
expect(mockedWriteFile).not.toHaveBeenCalled();
|
|
217
|
+
// Entry should still be stored
|
|
218
|
+
expect(ctx._store.get("docs/guide")).toBeDefined();
|
|
219
|
+
});
|
|
220
|
+
|
|
221
|
+
it("writes file to disk when file does not exist", async () => {
|
|
222
|
+
const ctx = createMockContext();
|
|
223
|
+
mockedExistsSync.mockReturnValue(false);
|
|
224
|
+
|
|
225
|
+
await storeProcessedFile(mockFile, ctx as any, false);
|
|
226
|
+
|
|
227
|
+
expect(mockedWriteFile).toHaveBeenCalled();
|
|
228
|
+
expect(ctx.logger.verbose).toHaveBeenCalledWith(
|
|
229
|
+
expect.stringContaining("Writing docs/guide"),
|
|
230
|
+
);
|
|
231
|
+
});
|
|
232
|
+
|
|
233
|
+
it("throws when no entry type is found for the file extension", async () => {
|
|
234
|
+
const ctx = createMockContext();
|
|
235
|
+
const unknownFile = {
|
|
236
|
+
...mockFile,
|
|
237
|
+
sourcePath: "docs/data.yaml",
|
|
238
|
+
};
|
|
239
|
+
|
|
240
|
+
await expect(
|
|
241
|
+
storeProcessedFile(unknownFile, ctx as any, false),
|
|
242
|
+
).rejects.toThrow("No entry type found");
|
|
243
|
+
});
|
|
244
|
+
|
|
245
|
+
it("warns and throws when source path has no extension", async () => {
|
|
246
|
+
const ctx = createMockContext();
|
|
247
|
+
const noExtFile = {
|
|
248
|
+
...mockFile,
|
|
249
|
+
sourcePath: "Makefile",
|
|
250
|
+
};
|
|
251
|
+
|
|
252
|
+
// "Makefile" split by "." yields ["Makefile"], .at(-1) = "Makefile"
|
|
253
|
+
// entryTypes won't have ".Makefile", so it throws
|
|
254
|
+
await expect(
|
|
255
|
+
storeProcessedFile(noExtFile, ctx as any, false),
|
|
256
|
+
).rejects.toThrow("No entry type found");
|
|
257
|
+
});
|
|
258
|
+
|
|
259
|
+
it("returns id and filePath on success", async () => {
|
|
260
|
+
const ctx = createMockContext();
|
|
261
|
+
|
|
262
|
+
const result = await storeProcessedFile(mockFile, ctx as any, false);
|
|
263
|
+
|
|
264
|
+
expect(result).toEqual({
|
|
265
|
+
id: "docs/guide",
|
|
266
|
+
filePath: "src/content/docs/guide.md",
|
|
267
|
+
});
|
|
268
|
+
});
|
|
269
|
+
|
|
270
|
+
it("falls back to tmp.md when sourcePath is empty", async () => {
|
|
271
|
+
const ctx = createMockContext();
|
|
272
|
+
const fileWithoutSource = {
|
|
273
|
+
...mockFile,
|
|
274
|
+
sourcePath: "",
|
|
275
|
+
};
|
|
276
|
+
|
|
277
|
+
// Empty sourcePath falls back to "tmp.md", which resolves to .md entry type
|
|
278
|
+
const result = await storeProcessedFile(
|
|
279
|
+
fileWithoutSource,
|
|
280
|
+
ctx as any,
|
|
281
|
+
false,
|
|
282
|
+
);
|
|
283
|
+
|
|
284
|
+
expect(result).toEqual({
|
|
285
|
+
id: "docs/guide",
|
|
286
|
+
filePath: "src/content/docs/guide.md",
|
|
287
|
+
});
|
|
288
|
+
});
|
|
289
|
+
|
|
290
|
+
it("uses file content for digest generation", async () => {
|
|
291
|
+
const ctx = createMockContext();
|
|
292
|
+
const digestSpy = vi.fn((content: string) => `digest-${content.length}`);
|
|
293
|
+
ctx.generateDigest = digestSpy;
|
|
294
|
+
|
|
295
|
+
await storeProcessedFile(mockFile, ctx as any, false);
|
|
296
|
+
|
|
297
|
+
expect(digestSpy).toHaveBeenCalledWith(mockFile.content);
|
|
298
|
+
const stored = ctx._store.get("docs/guide");
|
|
299
|
+
expect(stored.digest).toBe(`digest-${mockFile.content.length}`);
|
|
300
|
+
});
|
|
301
|
+
|
|
302
|
+
it("logs debug message for existing entry update", async () => {
|
|
303
|
+
const ctx = createMockContext();
|
|
304
|
+
// Pre-populate so it's an update
|
|
305
|
+
ctx.store.set({
|
|
306
|
+
id: "docs/guide",
|
|
307
|
+
data: {},
|
|
308
|
+
body: "old",
|
|
309
|
+
filePath: "old.md",
|
|
310
|
+
digest: "0",
|
|
311
|
+
});
|
|
312
|
+
|
|
313
|
+
await storeProcessedFile(mockFile, ctx as any, false);
|
|
314
|
+
|
|
315
|
+
expect(ctx.logger.debug).toHaveBeenCalledWith(
|
|
316
|
+
expect.stringContaining("updating"),
|
|
317
|
+
);
|
|
318
|
+
});
|
|
319
|
+
|
|
320
|
+
it("logs debug message for new entry addition", async () => {
|
|
321
|
+
const ctx = createMockContext();
|
|
322
|
+
|
|
323
|
+
await storeProcessedFile(mockFile, ctx as any, false);
|
|
324
|
+
|
|
325
|
+
expect(ctx.logger.debug).toHaveBeenCalledWith(
|
|
326
|
+
expect.stringContaining("adding"),
|
|
327
|
+
);
|
|
328
|
+
});
|
|
329
|
+
|
|
330
|
+
it("passes file content and fileUrl to getEntryInfo", async () => {
|
|
331
|
+
const ctx = createMockContext();
|
|
332
|
+
const getEntryInfoSpy = vi.fn(async ({ contents }: any) => ({
|
|
333
|
+
body: contents,
|
|
334
|
+
data: { title: "Test" },
|
|
335
|
+
}));
|
|
336
|
+
ctx.entryTypes.set(".md", { getEntryInfo: getEntryInfoSpy });
|
|
337
|
+
|
|
338
|
+
await storeProcessedFile(mockFile, ctx as any, false);
|
|
339
|
+
|
|
340
|
+
expect(getEntryInfoSpy).toHaveBeenCalledWith({
|
|
341
|
+
contents: mockFile.content,
|
|
342
|
+
fileUrl: pathToFileURL(mockFile.targetPath),
|
|
343
|
+
});
|
|
344
|
+
const stored = ctx._store.get("docs/guide");
|
|
345
|
+
expect(stored.body).toBe(mockFile.content);
|
|
346
|
+
});
|
|
347
|
+
|
|
348
|
+
it("passes config to getRenderFunction", async () => {
|
|
349
|
+
const ctx = createMockContext();
|
|
350
|
+
const mockConfig = { root: "/project" };
|
|
351
|
+
ctx.config = mockConfig;
|
|
352
|
+
const getRenderFunctionSpy = vi
|
|
353
|
+
.fn()
|
|
354
|
+
.mockResolvedValue(async () => ({ html: "<p>ok</p>" }));
|
|
355
|
+
ctx.entryTypes.set(".md", {
|
|
356
|
+
getEntryInfo: async ({ contents }: any) => ({
|
|
357
|
+
body: contents,
|
|
358
|
+
data: {},
|
|
359
|
+
}),
|
|
360
|
+
getRenderFunction: getRenderFunctionSpy,
|
|
361
|
+
});
|
|
362
|
+
|
|
363
|
+
await storeProcessedFile(mockFile, ctx as any, false);
|
|
364
|
+
|
|
365
|
+
expect(getRenderFunctionSpy).toHaveBeenCalledWith(mockConfig);
|
|
366
|
+
});
|
|
367
|
+
});
|