@larkiny/astro-github-loader 0.11.3 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +28 -55
- package/dist/github.assets.d.ts +70 -0
- package/dist/github.assets.js +253 -0
- package/dist/github.auth.js +13 -9
- package/dist/github.cleanup.d.ts +3 -2
- package/dist/github.cleanup.js +30 -23
- package/dist/github.constants.d.ts +0 -16
- package/dist/github.constants.js +0 -16
- package/dist/github.content.d.ts +5 -131
- package/dist/github.content.js +152 -794
- package/dist/github.dryrun.d.ts +9 -5
- package/dist/github.dryrun.js +46 -25
- package/dist/github.link-transform.d.ts +2 -2
- package/dist/github.link-transform.js +65 -57
- package/dist/github.loader.js +30 -46
- package/dist/github.logger.d.ts +2 -2
- package/dist/github.logger.js +33 -24
- package/dist/github.paths.d.ts +76 -0
- package/dist/github.paths.js +190 -0
- package/dist/github.storage.d.ts +15 -0
- package/dist/github.storage.js +109 -0
- package/dist/github.types.d.ts +34 -4
- package/dist/index.d.ts +8 -6
- package/dist/index.js +3 -6
- package/dist/test-helpers.d.ts +130 -0
- package/dist/test-helpers.js +194 -0
- package/package.json +3 -1
- package/src/github.assets.spec.ts +717 -0
- package/src/github.assets.ts +365 -0
- package/src/github.auth.spec.ts +245 -0
- package/src/github.auth.ts +24 -10
- package/src/github.cleanup.spec.ts +380 -0
- package/src/github.cleanup.ts +91 -47
- package/src/github.constants.ts +0 -17
- package/src/github.content.spec.ts +305 -454
- package/src/github.content.ts +259 -957
- package/src/github.dryrun.spec.ts +586 -0
- package/src/github.dryrun.ts +105 -54
- package/src/github.link-transform.spec.ts +1345 -0
- package/src/github.link-transform.ts +174 -95
- package/src/github.loader.spec.ts +75 -50
- package/src/github.loader.ts +101 -76
- package/src/github.logger.spec.ts +795 -0
- package/src/github.logger.ts +77 -35
- package/src/github.paths.spec.ts +523 -0
- package/src/github.paths.ts +259 -0
- package/src/github.storage.spec.ts +367 -0
- package/src/github.storage.ts +127 -0
- package/src/github.types.ts +48 -9
- package/src/index.ts +43 -6
- package/src/test-helpers.ts +215 -0
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
import { existsSync, promises as fs } from "node:fs";
|
|
2
|
+
import { fileURLToPath, pathToFileURL } from "node:url";
|
|
3
|
+
import type { ImportedFile } from "./github.link-transform.js";
|
|
4
|
+
import type { ExtendedLoaderContext } from "./github.types.js";
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Ensures directory exists and writes file to disk.
|
|
8
|
+
* @internal
|
|
9
|
+
*/
|
|
10
|
+
export async function syncFile(path: string, content: string) {
|
|
11
|
+
const dir = path.substring(0, path.lastIndexOf("/"));
|
|
12
|
+
if (dir && !existsSync(dir)) {
|
|
13
|
+
await fs.mkdir(dir, { recursive: true });
|
|
14
|
+
}
|
|
15
|
+
await fs.writeFile(path, content, "utf-8");
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Stores a processed file in Astro's content store
|
|
20
|
+
* @internal
|
|
21
|
+
*/
|
|
22
|
+
export async function storeProcessedFile(
|
|
23
|
+
file: ImportedFile,
|
|
24
|
+
context: ExtendedLoaderContext,
|
|
25
|
+
clear: boolean,
|
|
26
|
+
) {
|
|
27
|
+
const { store, generateDigest, entryTypes, logger, parseData, config } =
|
|
28
|
+
context;
|
|
29
|
+
|
|
30
|
+
function configForFile(filePath: string) {
|
|
31
|
+
const ext = filePath.split(".").at(-1);
|
|
32
|
+
if (!ext) {
|
|
33
|
+
logger.warn(`No extension found for ${filePath}`);
|
|
34
|
+
return;
|
|
35
|
+
}
|
|
36
|
+
return entryTypes?.get(`.${ext}`);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
const entryType = configForFile(file.sourcePath || "tmp.md");
|
|
40
|
+
if (!entryType) throw new Error("No entry type found");
|
|
41
|
+
|
|
42
|
+
const fileUrl = pathToFileURL(file.targetPath);
|
|
43
|
+
const { body, data } = await entryType.getEntryInfo({
|
|
44
|
+
contents: file.content,
|
|
45
|
+
fileUrl: fileUrl,
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
// Generate digest for storage (repository-level caching handles change detection)
|
|
49
|
+
const digest = generateDigest(file.content);
|
|
50
|
+
const existingEntry = store.get(file.id);
|
|
51
|
+
|
|
52
|
+
if (existingEntry) {
|
|
53
|
+
logger.debug(`🔄 File ${file.id} - updating`);
|
|
54
|
+
} else {
|
|
55
|
+
logger.debug(`📄 File ${file.id} - adding`);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// Write file to disk
|
|
59
|
+
if (!existsSync(fileURLToPath(fileUrl))) {
|
|
60
|
+
logger.verbose(`Writing ${file.id} to ${fileUrl}`);
|
|
61
|
+
await syncFile(fileURLToPath(fileUrl), file.content);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
const parsedData = await parseData({
|
|
65
|
+
id: file.id,
|
|
66
|
+
data,
|
|
67
|
+
filePath: fileUrl.toString(),
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
// When clear mode is enabled, delete the existing entry before setting the new one.
|
|
71
|
+
// This provides atomic replacement without breaking Astro's content collection,
|
|
72
|
+
// as opposed to calling store.clear() which empties everything at once.
|
|
73
|
+
if (clear && existingEntry) {
|
|
74
|
+
logger.debug(`🗑️ Clearing existing entry before replacement: ${file.id}`);
|
|
75
|
+
store.delete(file.id);
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// Store in content store
|
|
79
|
+
if (entryType.getRenderFunction) {
|
|
80
|
+
logger.verbose(`Rendering ${file.id}`);
|
|
81
|
+
const render = await entryType.getRenderFunction(config);
|
|
82
|
+
let rendered = undefined;
|
|
83
|
+
try {
|
|
84
|
+
rendered = await render?.({
|
|
85
|
+
id: file.id,
|
|
86
|
+
data,
|
|
87
|
+
body,
|
|
88
|
+
filePath: fileUrl.toString(),
|
|
89
|
+
digest,
|
|
90
|
+
});
|
|
91
|
+
} catch (error: unknown) {
|
|
92
|
+
logger.error(
|
|
93
|
+
`Error rendering ${file.id}: ${error instanceof Error ? error.message : String(error)}`,
|
|
94
|
+
);
|
|
95
|
+
}
|
|
96
|
+
logger.debug(
|
|
97
|
+
`🔍 Storing collection entry: ${file.id} (${file.sourcePath} -> ${file.targetPath})`,
|
|
98
|
+
);
|
|
99
|
+
store.set({
|
|
100
|
+
id: file.id,
|
|
101
|
+
data: parsedData,
|
|
102
|
+
body,
|
|
103
|
+
filePath: file.targetPath,
|
|
104
|
+
digest,
|
|
105
|
+
rendered,
|
|
106
|
+
});
|
|
107
|
+
} else if ("contentModuleTypes" in entryType) {
|
|
108
|
+
store.set({
|
|
109
|
+
id: file.id,
|
|
110
|
+
data: parsedData,
|
|
111
|
+
body,
|
|
112
|
+
filePath: file.targetPath,
|
|
113
|
+
digest,
|
|
114
|
+
deferredRender: true,
|
|
115
|
+
});
|
|
116
|
+
} else {
|
|
117
|
+
store.set({
|
|
118
|
+
id: file.id,
|
|
119
|
+
data: parsedData,
|
|
120
|
+
body,
|
|
121
|
+
filePath: file.targetPath,
|
|
122
|
+
digest,
|
|
123
|
+
});
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
return { id: file.id, filePath: file.targetPath };
|
|
127
|
+
}
|
package/src/github.types.ts
CHANGED
|
@@ -3,12 +3,12 @@ import type {
|
|
|
3
3
|
LoaderContext as AstroLoaderContext,
|
|
4
4
|
} from "astro/loaders";
|
|
5
5
|
import type { ContentEntryType } from "astro";
|
|
6
|
-
import type {MarkdownHeading} from "@astrojs/markdown-remark";
|
|
7
|
-
import {Octokit} from "octokit";
|
|
6
|
+
import type { MarkdownHeading } from "@astrojs/markdown-remark";
|
|
7
|
+
import { Octokit } from "octokit";
|
|
8
8
|
|
|
9
9
|
// Import link transformation types from the dedicated module
|
|
10
10
|
import type { LinkHandler } from "./github.link-transform.js";
|
|
11
|
-
import type { LogLevel } from "./github.logger.js";
|
|
11
|
+
import type { LogLevel, Logger } from "./github.logger.js";
|
|
12
12
|
|
|
13
13
|
/**
|
|
14
14
|
* Context information for link transformations
|
|
@@ -33,7 +33,13 @@ export interface LinkMapping {
|
|
|
33
33
|
/** Pattern to match (string or regex) */
|
|
34
34
|
pattern: string | RegExp;
|
|
35
35
|
/** Replacement string or function */
|
|
36
|
-
replacement:
|
|
36
|
+
replacement:
|
|
37
|
+
| string
|
|
38
|
+
| ((
|
|
39
|
+
match: string,
|
|
40
|
+
anchor: string,
|
|
41
|
+
context: LinkTransformContext,
|
|
42
|
+
) => string);
|
|
37
43
|
/** Apply to all links, not just unresolved internal links (default: false) */
|
|
38
44
|
global?: boolean;
|
|
39
45
|
/** Function to determine if this mapping should apply to the current file context */
|
|
@@ -86,7 +92,10 @@ export interface TransformContext {
|
|
|
86
92
|
* @param context - Context information about the file being processed
|
|
87
93
|
* @returns The transformed content
|
|
88
94
|
*/
|
|
89
|
-
export type TransformFunction = (
|
|
95
|
+
export type TransformFunction = (
|
|
96
|
+
content: string,
|
|
97
|
+
context: TransformContext,
|
|
98
|
+
) => string;
|
|
90
99
|
|
|
91
100
|
/**
|
|
92
101
|
* Enhanced path mapping configuration that supports cross-section linking
|
|
@@ -139,7 +148,6 @@ export interface IncludePattern {
|
|
|
139
148
|
pathMappings?: Record<string, PathMappingValue>;
|
|
140
149
|
}
|
|
141
150
|
|
|
142
|
-
|
|
143
151
|
export type GithubLoaderOptions = {
|
|
144
152
|
octokit: Octokit;
|
|
145
153
|
configs: Array<ImportOptions>;
|
|
@@ -179,7 +187,7 @@ export type CollectionEntryOptions = {
|
|
|
179
187
|
* The LoaderContext may contain properties and methods that offer
|
|
180
188
|
* control or inspection over the loading behavior.
|
|
181
189
|
*/
|
|
182
|
-
context:
|
|
190
|
+
context: ExtendedLoaderContext;
|
|
183
191
|
/**
|
|
184
192
|
* An instance of the Octokit library, which provides a way to interact
|
|
185
193
|
* with GitHub's REST API. This variable allows you to access and perform
|
|
@@ -244,6 +252,17 @@ export interface RenderedContent {
|
|
|
244
252
|
};
|
|
245
253
|
}
|
|
246
254
|
|
|
255
|
+
/**
|
|
256
|
+
* Represents a version of a library variant to display in the devportal's version picker.
|
|
257
|
+
* Versions are manually curated in the import config — no auto-discovery.
|
|
258
|
+
*/
|
|
259
|
+
export interface VersionConfig {
|
|
260
|
+
/** URL segment for this version (e.g., "latest", "v8.0.0") */
|
|
261
|
+
slug: string;
|
|
262
|
+
/** Display name for this version (e.g., "Latest", "v8.0.0") */
|
|
263
|
+
label: string;
|
|
264
|
+
}
|
|
265
|
+
|
|
247
266
|
/**
|
|
248
267
|
* Represents configuration options for importing content from GitHub repositories.
|
|
249
268
|
*/
|
|
@@ -305,6 +324,17 @@ export type ImportOptions = {
|
|
|
305
324
|
* @default 'default'
|
|
306
325
|
*/
|
|
307
326
|
logLevel?: LogLevel;
|
|
327
|
+
/**
|
|
328
|
+
* Language for this import variant (e.g., "TypeScript", "Python", "Go").
|
|
329
|
+
* Used for logging and passed through to the devportal for UI display.
|
|
330
|
+
*/
|
|
331
|
+
language?: string;
|
|
332
|
+
/**
|
|
333
|
+
* Versions to display in the devportal's version picker.
|
|
334
|
+
* Informational — tells the loader which version folders exist in the source content.
|
|
335
|
+
* The loader imports content as-is; the version folder structure carries through from source to destination.
|
|
336
|
+
*/
|
|
337
|
+
versions?: VersionConfig[];
|
|
308
338
|
};
|
|
309
339
|
|
|
310
340
|
export type FetchOptions = RequestInit & {
|
|
@@ -313,13 +343,23 @@ export type FetchOptions = RequestInit & {
|
|
|
313
343
|
};
|
|
314
344
|
|
|
315
345
|
/**
|
|
316
|
-
*
|
|
346
|
+
* Astro loader context extended with optional entry type support.
|
|
347
|
+
* Use this type when calling `.load(context as LoaderContext)` in multi-loader patterns.
|
|
317
348
|
*/
|
|
318
349
|
export interface LoaderContext extends AstroLoaderContext {
|
|
319
350
|
/** @internal */
|
|
320
351
|
entryTypes?: Map<string, ContentEntryType>;
|
|
321
352
|
}
|
|
322
353
|
|
|
354
|
+
/**
|
|
355
|
+
* LoaderContext with Astro's logger replaced by our Logger class.
|
|
356
|
+
* Used by internal functions that need verbose/logFileProcessing/etc.
|
|
357
|
+
* @internal
|
|
358
|
+
*/
|
|
359
|
+
export type ExtendedLoaderContext = Omit<LoaderContext, "logger"> & {
|
|
360
|
+
logger: Logger;
|
|
361
|
+
};
|
|
362
|
+
|
|
323
363
|
/**
|
|
324
364
|
* @internal
|
|
325
365
|
*/
|
|
@@ -328,7 +368,6 @@ export interface Loader extends AstroLoader {
|
|
|
328
368
|
load: (context: LoaderContext) => Promise<void>;
|
|
329
369
|
}
|
|
330
370
|
|
|
331
|
-
|
|
332
371
|
/**
|
|
333
372
|
* Statistics for a sync operation
|
|
334
373
|
*/
|
package/src/index.ts
CHANGED
|
@@ -1,6 +1,43 @@
|
|
|
1
|
-
|
|
2
|
-
export
|
|
3
|
-
export
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
1
|
+
// Public API — functions
|
|
2
|
+
export { githubLoader } from "./github.loader.js";
|
|
3
|
+
export {
|
|
4
|
+
createAuthenticatedOctokit,
|
|
5
|
+
createOctokitFromEnv,
|
|
6
|
+
} from "./github.auth.js";
|
|
7
|
+
|
|
8
|
+
// Public API — types: loader config
|
|
9
|
+
export type {
|
|
10
|
+
GithubLoaderOptions,
|
|
11
|
+
ImportOptions,
|
|
12
|
+
FetchOptions,
|
|
13
|
+
IncludePattern,
|
|
14
|
+
PathMappingValue,
|
|
15
|
+
EnhancedPathMapping,
|
|
16
|
+
VersionConfig,
|
|
17
|
+
LoaderContext,
|
|
18
|
+
} from "./github.types.js";
|
|
19
|
+
|
|
20
|
+
// Public API — types: transforms
|
|
21
|
+
export type {
|
|
22
|
+
TransformFunction,
|
|
23
|
+
TransformContext,
|
|
24
|
+
MatchedPattern,
|
|
25
|
+
} from "./github.types.js";
|
|
26
|
+
|
|
27
|
+
// Public API — types: link transforms
|
|
28
|
+
export type {
|
|
29
|
+
LinkMapping,
|
|
30
|
+
LinkTransformContext,
|
|
31
|
+
ImportLinkTransformOptions,
|
|
32
|
+
} from "./github.types.js";
|
|
33
|
+
export type { LinkHandler } from "./github.link-transform.js";
|
|
34
|
+
|
|
35
|
+
// Public API — types: auth
|
|
36
|
+
export type {
|
|
37
|
+
GitHubAuthConfig,
|
|
38
|
+
GitHubAppAuthConfig,
|
|
39
|
+
GitHubPATAuthConfig,
|
|
40
|
+
} from "./github.auth.js";
|
|
41
|
+
|
|
42
|
+
// Public API — types: logging
|
|
43
|
+
export type { LogLevel } from "./github.logger.js";
|
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Shared test helpers for astro-github-loader test suite.
|
|
3
|
+
* Provides factory functions for creating mock Astro loader contexts,
|
|
4
|
+
* Octokit instances with pre-configured spies, and common fixtures.
|
|
5
|
+
*/
|
|
6
|
+
import { vi } from "vitest";
|
|
7
|
+
import { Octokit } from "octokit";
|
|
8
|
+
import type { ImportOptions } from "./github.types.js";
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Creates a mock Astro LoaderContext with all required properties.
|
|
12
|
+
* The returned store is a real Map wrapped in the store interface,
|
|
13
|
+
* so tests can inspect stored entries directly.
|
|
14
|
+
*/
|
|
15
|
+
export function createMockContext() {
|
|
16
|
+
const store = new Map<string, any>();
|
|
17
|
+
const meta = new Map<string, string>();
|
|
18
|
+
|
|
19
|
+
return {
|
|
20
|
+
store: {
|
|
21
|
+
set: (entry: any) => {
|
|
22
|
+
store.set(entry.id, entry);
|
|
23
|
+
return entry;
|
|
24
|
+
},
|
|
25
|
+
get: (id: string) => store.get(id),
|
|
26
|
+
delete: (id: string) => store.delete(id),
|
|
27
|
+
clear: () => store.clear(),
|
|
28
|
+
entries: () => store.entries(),
|
|
29
|
+
keys: () => store.keys(),
|
|
30
|
+
values: () => store.values(),
|
|
31
|
+
},
|
|
32
|
+
meta,
|
|
33
|
+
logger: {
|
|
34
|
+
info: vi.fn(),
|
|
35
|
+
warn: vi.fn(),
|
|
36
|
+
error: vi.fn(),
|
|
37
|
+
debug: vi.fn(),
|
|
38
|
+
verbose: vi.fn(),
|
|
39
|
+
logFileProcessing: vi.fn(),
|
|
40
|
+
logImportSummary: vi.fn(),
|
|
41
|
+
logAssetProcessing: vi.fn(),
|
|
42
|
+
withSpinner: async (_msg: string, fn: () => Promise<any>) => await fn(),
|
|
43
|
+
getLevel: () => "default" as const,
|
|
44
|
+
},
|
|
45
|
+
config: {},
|
|
46
|
+
entryTypes: new Map([
|
|
47
|
+
[
|
|
48
|
+
".md",
|
|
49
|
+
{
|
|
50
|
+
getEntryInfo: async ({
|
|
51
|
+
contents,
|
|
52
|
+
}: {
|
|
53
|
+
contents: string;
|
|
54
|
+
fileUrl: URL;
|
|
55
|
+
}) => ({
|
|
56
|
+
body: contents,
|
|
57
|
+
data: {},
|
|
58
|
+
}),
|
|
59
|
+
},
|
|
60
|
+
],
|
|
61
|
+
]),
|
|
62
|
+
generateDigest: (content: string) => String(content.length),
|
|
63
|
+
parseData: async (data: any) => data,
|
|
64
|
+
/** Direct access to the underlying store Map for assertions */
|
|
65
|
+
_store: store,
|
|
66
|
+
/** Direct access to the underlying meta Map for assertions */
|
|
67
|
+
_meta: meta,
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/** Standard mock commit used across tests */
|
|
72
|
+
export const MOCK_COMMIT = {
|
|
73
|
+
sha: "abc123def456",
|
|
74
|
+
commit: {
|
|
75
|
+
tree: { sha: "tree123abc456" },
|
|
76
|
+
message: "Test commit",
|
|
77
|
+
author: {
|
|
78
|
+
name: "Test Author",
|
|
79
|
+
email: "test@example.com",
|
|
80
|
+
date: "2024-01-01T00:00:00Z",
|
|
81
|
+
},
|
|
82
|
+
committer: {
|
|
83
|
+
name: "Test Committer",
|
|
84
|
+
email: "test@example.com",
|
|
85
|
+
date: "2024-01-01T00:00:00Z",
|
|
86
|
+
},
|
|
87
|
+
},
|
|
88
|
+
};
|
|
89
|
+
|
|
90
|
+
/** Mock tree data representing a typical repository structure */
|
|
91
|
+
export const MOCK_TREE_DATA = {
|
|
92
|
+
sha: "tree123abc456",
|
|
93
|
+
url: "https://api.github.com/repos/test/repo/git/trees/tree123abc456",
|
|
94
|
+
tree: [
|
|
95
|
+
{
|
|
96
|
+
path: "docs/algokit.md",
|
|
97
|
+
mode: "100644",
|
|
98
|
+
type: "blob",
|
|
99
|
+
sha: "file1sha",
|
|
100
|
+
size: 1234,
|
|
101
|
+
url: "https://api.github.com/repos/test/repo/git/blobs/file1sha",
|
|
102
|
+
},
|
|
103
|
+
{
|
|
104
|
+
path: "docs/features",
|
|
105
|
+
mode: "040000",
|
|
106
|
+
type: "tree",
|
|
107
|
+
sha: "dir1sha",
|
|
108
|
+
url: "https://api.github.com/repos/test/repo/git/trees/dir1sha",
|
|
109
|
+
},
|
|
110
|
+
{
|
|
111
|
+
path: "docs/features/accounts.md",
|
|
112
|
+
mode: "100644",
|
|
113
|
+
type: "blob",
|
|
114
|
+
sha: "file2sha",
|
|
115
|
+
size: 2345,
|
|
116
|
+
url: "https://api.github.com/repos/test/repo/git/blobs/file2sha",
|
|
117
|
+
},
|
|
118
|
+
{
|
|
119
|
+
path: "docs/features/tasks.md",
|
|
120
|
+
mode: "100644",
|
|
121
|
+
type: "blob",
|
|
122
|
+
sha: "file3sha",
|
|
123
|
+
size: 3456,
|
|
124
|
+
url: "https://api.github.com/repos/test/repo/git/blobs/file3sha",
|
|
125
|
+
},
|
|
126
|
+
{
|
|
127
|
+
path: "docs/features/generate.md",
|
|
128
|
+
mode: "100644",
|
|
129
|
+
type: "blob",
|
|
130
|
+
sha: "file4sha",
|
|
131
|
+
size: 4567,
|
|
132
|
+
url: "https://api.github.com/repos/test/repo/git/blobs/file4sha",
|
|
133
|
+
},
|
|
134
|
+
{
|
|
135
|
+
path: "docs/cli/index.md",
|
|
136
|
+
mode: "100644",
|
|
137
|
+
type: "blob",
|
|
138
|
+
sha: "file5sha",
|
|
139
|
+
size: 5678,
|
|
140
|
+
url: "https://api.github.com/repos/test/repo/git/blobs/file5sha",
|
|
141
|
+
},
|
|
142
|
+
{
|
|
143
|
+
path: "README.md",
|
|
144
|
+
mode: "100644",
|
|
145
|
+
type: "blob",
|
|
146
|
+
sha: "file6sha",
|
|
147
|
+
size: 678,
|
|
148
|
+
url: "https://api.github.com/repos/test/repo/git/blobs/file6sha",
|
|
149
|
+
},
|
|
150
|
+
{
|
|
151
|
+
path: "package.json",
|
|
152
|
+
mode: "100644",
|
|
153
|
+
type: "blob",
|
|
154
|
+
sha: "file7sha",
|
|
155
|
+
size: 789,
|
|
156
|
+
url: "https://api.github.com/repos/test/repo/git/blobs/file7sha",
|
|
157
|
+
},
|
|
158
|
+
],
|
|
159
|
+
truncated: false,
|
|
160
|
+
};
|
|
161
|
+
|
|
162
|
+
/**
|
|
163
|
+
* Creates an Octokit instance with mocked API methods for listCommits and getTree.
|
|
164
|
+
* Returns both the instance and the spies for assertions.
|
|
165
|
+
*/
|
|
166
|
+
export function createMockOctokit(options?: {
|
|
167
|
+
treeData?: typeof MOCK_TREE_DATA;
|
|
168
|
+
commitData?: typeof MOCK_COMMIT;
|
|
169
|
+
}) {
|
|
170
|
+
const octokit = new Octokit({ auth: "mock-token" });
|
|
171
|
+
const commit = options?.commitData ?? MOCK_COMMIT;
|
|
172
|
+
const tree = options?.treeData ?? MOCK_TREE_DATA;
|
|
173
|
+
|
|
174
|
+
const listCommitsSpy = vi
|
|
175
|
+
.spyOn(octokit.rest.repos, "listCommits")
|
|
176
|
+
.mockResolvedValue({
|
|
177
|
+
data: [commit],
|
|
178
|
+
status: 200,
|
|
179
|
+
url: "",
|
|
180
|
+
headers: {},
|
|
181
|
+
} as any);
|
|
182
|
+
|
|
183
|
+
const getTreeSpy = vi.spyOn(octokit.rest.git, "getTree").mockResolvedValue({
|
|
184
|
+
data: tree,
|
|
185
|
+
status: 200,
|
|
186
|
+
url: "",
|
|
187
|
+
headers: {},
|
|
188
|
+
} as any);
|
|
189
|
+
|
|
190
|
+
const getContentSpy = vi
|
|
191
|
+
.spyOn(octokit.rest.repos, "getContent")
|
|
192
|
+
.mockResolvedValue({ data: [], status: 200, url: "", headers: {} } as any);
|
|
193
|
+
|
|
194
|
+
return {
|
|
195
|
+
octokit,
|
|
196
|
+
spies: { listCommitsSpy, getTreeSpy, getContentSpy },
|
|
197
|
+
};
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
/**
|
|
201
|
+
* Sets up a global fetch mock that returns markdown content.
|
|
202
|
+
* Returns the mock for assertions.
|
|
203
|
+
*/
|
|
204
|
+
export function mockFetch(
|
|
205
|
+
content: string = "# Test Content\n\nThis is test markdown content.",
|
|
206
|
+
) {
|
|
207
|
+
const fetchMock = vi.fn().mockResolvedValue({
|
|
208
|
+
ok: true,
|
|
209
|
+
status: 200,
|
|
210
|
+
headers: new Headers(),
|
|
211
|
+
text: async () => content,
|
|
212
|
+
} as any);
|
|
213
|
+
global.fetch = fetchMock;
|
|
214
|
+
return fetchMock;
|
|
215
|
+
}
|