@larkiny/astro-github-loader 0.11.3 → 0.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +35 -55
- package/dist/github.assets.d.ts +70 -0
- package/dist/github.assets.js +253 -0
- package/dist/github.auth.js +13 -9
- package/dist/github.cleanup.d.ts +3 -2
- package/dist/github.cleanup.js +30 -23
- package/dist/github.constants.d.ts +0 -16
- package/dist/github.constants.js +0 -16
- package/dist/github.content.d.ts +5 -131
- package/dist/github.content.js +152 -794
- package/dist/github.dryrun.d.ts +9 -5
- package/dist/github.dryrun.js +49 -25
- package/dist/github.link-transform.d.ts +2 -2
- package/dist/github.link-transform.js +68 -57
- package/dist/github.loader.js +30 -46
- package/dist/github.logger.d.ts +2 -2
- package/dist/github.logger.js +33 -24
- package/dist/github.paths.d.ts +76 -0
- package/dist/github.paths.js +190 -0
- package/dist/github.storage.d.ts +16 -0
- package/dist/github.storage.js +115 -0
- package/dist/github.types.d.ts +40 -4
- package/dist/index.d.ts +8 -6
- package/dist/index.js +3 -6
- package/dist/test-helpers.d.ts +130 -0
- package/dist/test-helpers.js +194 -0
- package/package.json +3 -1
- package/src/github.assets.spec.ts +717 -0
- package/src/github.assets.ts +365 -0
- package/src/github.auth.spec.ts +245 -0
- package/src/github.auth.ts +24 -10
- package/src/github.cleanup.spec.ts +380 -0
- package/src/github.cleanup.ts +91 -47
- package/src/github.constants.ts +0 -17
- package/src/github.content.spec.ts +305 -454
- package/src/github.content.ts +259 -957
- package/src/github.dryrun.spec.ts +598 -0
- package/src/github.dryrun.ts +108 -54
- package/src/github.link-transform.spec.ts +1345 -0
- package/src/github.link-transform.ts +177 -95
- package/src/github.loader.spec.ts +75 -50
- package/src/github.loader.ts +101 -76
- package/src/github.logger.spec.ts +795 -0
- package/src/github.logger.ts +77 -35
- package/src/github.paths.spec.ts +523 -0
- package/src/github.paths.ts +259 -0
- package/src/github.storage.spec.ts +377 -0
- package/src/github.storage.ts +135 -0
- package/src/github.types.ts +54 -9
- package/src/index.ts +43 -6
- package/src/test-helpers.ts +215 -0
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
import { existsSync, promises as fs } from "node:fs";
|
|
2
|
+
import { resolve } from "node:path";
|
|
3
|
+
import { fileURLToPath, pathToFileURL } from "node:url";
|
|
4
|
+
/**
|
|
5
|
+
* Ensures directory exists and writes file to disk.
|
|
6
|
+
* Validates that the resolved path stays within the project root.
|
|
7
|
+
* @internal
|
|
8
|
+
*/
|
|
9
|
+
export async function syncFile(filePath, content) {
|
|
10
|
+
const resolved = resolve(filePath);
|
|
11
|
+
if (!resolved.startsWith(process.cwd())) {
|
|
12
|
+
throw new Error(`syncFile: path "${filePath}" resolves outside project root`);
|
|
13
|
+
}
|
|
14
|
+
const dir = resolved.substring(0, resolved.lastIndexOf("/"));
|
|
15
|
+
if (dir && !existsSync(dir)) {
|
|
16
|
+
await fs.mkdir(dir, { recursive: true });
|
|
17
|
+
}
|
|
18
|
+
await fs.writeFile(resolved, content, "utf-8");
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Stores a processed file in Astro's content store
|
|
22
|
+
* @internal
|
|
23
|
+
*/
|
|
24
|
+
export async function storeProcessedFile(file, context, clear) {
|
|
25
|
+
const { store, generateDigest, entryTypes, logger, parseData, config } = context;
|
|
26
|
+
function configForFile(filePath) {
|
|
27
|
+
const ext = filePath.split(".").at(-1);
|
|
28
|
+
if (!ext) {
|
|
29
|
+
logger.warn(`No extension found for ${filePath}`);
|
|
30
|
+
return;
|
|
31
|
+
}
|
|
32
|
+
return entryTypes?.get(`.${ext}`);
|
|
33
|
+
}
|
|
34
|
+
const entryType = configForFile(file.sourcePath || "tmp.md");
|
|
35
|
+
if (!entryType)
|
|
36
|
+
throw new Error("No entry type found");
|
|
37
|
+
const fileUrl = pathToFileURL(file.targetPath);
|
|
38
|
+
const { body, data } = await entryType.getEntryInfo({
|
|
39
|
+
contents: file.content,
|
|
40
|
+
fileUrl: fileUrl,
|
|
41
|
+
});
|
|
42
|
+
// Generate digest for storage (repository-level caching handles change detection)
|
|
43
|
+
const digest = generateDigest(file.content);
|
|
44
|
+
const existingEntry = store.get(file.id);
|
|
45
|
+
if (existingEntry) {
|
|
46
|
+
logger.debug(`🔄 File ${file.id} - updating`);
|
|
47
|
+
}
|
|
48
|
+
else {
|
|
49
|
+
logger.debug(`📄 File ${file.id} - adding`);
|
|
50
|
+
}
|
|
51
|
+
// Write file to disk
|
|
52
|
+
if (!existsSync(fileURLToPath(fileUrl))) {
|
|
53
|
+
logger.verbose(`Writing ${file.id} to ${fileUrl}`);
|
|
54
|
+
await syncFile(fileURLToPath(fileUrl), file.content);
|
|
55
|
+
}
|
|
56
|
+
const parsedData = await parseData({
|
|
57
|
+
id: file.id,
|
|
58
|
+
data,
|
|
59
|
+
filePath: fileUrl.toString(),
|
|
60
|
+
});
|
|
61
|
+
// When clear mode is enabled, delete the existing entry before setting the new one.
|
|
62
|
+
// This provides atomic replacement without breaking Astro's content collection,
|
|
63
|
+
// as opposed to calling store.clear() which empties everything at once.
|
|
64
|
+
if (clear && existingEntry) {
|
|
65
|
+
logger.debug(`🗑️ Clearing existing entry before replacement: ${file.id}`);
|
|
66
|
+
store.delete(file.id);
|
|
67
|
+
}
|
|
68
|
+
// Store in content store
|
|
69
|
+
if (entryType.getRenderFunction) {
|
|
70
|
+
logger.verbose(`Rendering ${file.id}`);
|
|
71
|
+
const render = await entryType.getRenderFunction(config);
|
|
72
|
+
let rendered = undefined;
|
|
73
|
+
try {
|
|
74
|
+
rendered = await render?.({
|
|
75
|
+
id: file.id,
|
|
76
|
+
data,
|
|
77
|
+
body,
|
|
78
|
+
filePath: fileUrl.toString(),
|
|
79
|
+
digest,
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
catch (error) {
|
|
83
|
+
logger.error(`Error rendering ${file.id}: ${error instanceof Error ? error.message : String(error)}`);
|
|
84
|
+
}
|
|
85
|
+
logger.debug(`🔍 Storing collection entry: ${file.id} (${file.sourcePath} -> ${file.targetPath})`);
|
|
86
|
+
store.set({
|
|
87
|
+
id: file.id,
|
|
88
|
+
data: parsedData,
|
|
89
|
+
body,
|
|
90
|
+
filePath: file.targetPath,
|
|
91
|
+
digest,
|
|
92
|
+
rendered,
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
else if ("contentModuleTypes" in entryType) {
|
|
96
|
+
store.set({
|
|
97
|
+
id: file.id,
|
|
98
|
+
data: parsedData,
|
|
99
|
+
body,
|
|
100
|
+
filePath: file.targetPath,
|
|
101
|
+
digest,
|
|
102
|
+
deferredRender: true,
|
|
103
|
+
});
|
|
104
|
+
}
|
|
105
|
+
else {
|
|
106
|
+
store.set({
|
|
107
|
+
id: file.id,
|
|
108
|
+
data: parsedData,
|
|
109
|
+
body,
|
|
110
|
+
filePath: file.targetPath,
|
|
111
|
+
digest,
|
|
112
|
+
});
|
|
113
|
+
}
|
|
114
|
+
return { id: file.id, filePath: file.targetPath };
|
|
115
|
+
}
|
package/dist/github.types.d.ts
CHANGED
|
@@ -3,7 +3,7 @@ import type { ContentEntryType } from "astro";
|
|
|
3
3
|
import type { MarkdownHeading } from "@astrojs/markdown-remark";
|
|
4
4
|
import { Octokit } from "octokit";
|
|
5
5
|
import type { LinkHandler } from "./github.link-transform.js";
|
|
6
|
-
import type { LogLevel } from "./github.logger.js";
|
|
6
|
+
import type { LogLevel, Logger } from "./github.logger.js";
|
|
7
7
|
/**
|
|
8
8
|
* Context information for link transformations
|
|
9
9
|
*/
|
|
@@ -26,7 +26,7 @@ export interface LinkMapping {
|
|
|
26
26
|
/** Pattern to match (string or regex) */
|
|
27
27
|
pattern: string | RegExp;
|
|
28
28
|
/** Replacement string or function */
|
|
29
|
-
replacement: string | ((match: string, anchor: string, context:
|
|
29
|
+
replacement: string | ((match: string, anchor: string, context: LinkTransformContext) => string);
|
|
30
30
|
/** Apply to all links, not just unresolved internal links (default: false) */
|
|
31
31
|
global?: boolean;
|
|
32
32
|
/** Function to determine if this mapping should apply to the current file context */
|
|
@@ -162,7 +162,7 @@ export type CollectionEntryOptions = {
|
|
|
162
162
|
* The LoaderContext may contain properties and methods that offer
|
|
163
163
|
* control or inspection over the loading behavior.
|
|
164
164
|
*/
|
|
165
|
-
context:
|
|
165
|
+
context: ExtendedLoaderContext;
|
|
166
166
|
/**
|
|
167
167
|
* An instance of the Octokit library, which provides a way to interact
|
|
168
168
|
* with GitHub's REST API. This variable allows you to access and perform
|
|
@@ -225,6 +225,16 @@ export interface RenderedContent {
|
|
|
225
225
|
[key: string]: unknown;
|
|
226
226
|
};
|
|
227
227
|
}
|
|
228
|
+
/**
|
|
229
|
+
* Represents a version of a library variant to display in the devportal's version picker.
|
|
230
|
+
* Versions are manually curated in the import config — no auto-discovery.
|
|
231
|
+
*/
|
|
232
|
+
export interface VersionConfig {
|
|
233
|
+
/** URL segment for this version (e.g., "latest", "v8.0.0") */
|
|
234
|
+
slug: string;
|
|
235
|
+
/** Display name for this version (e.g., "Latest", "v8.0.0") */
|
|
236
|
+
label: string;
|
|
237
|
+
}
|
|
228
238
|
/**
|
|
229
239
|
* Represents configuration options for importing content from GitHub repositories.
|
|
230
240
|
*/
|
|
@@ -233,6 +243,12 @@ export type ImportOptions = {
|
|
|
233
243
|
* Display name for this configuration (used in logging)
|
|
234
244
|
*/
|
|
235
245
|
name?: string;
|
|
246
|
+
/**
|
|
247
|
+
* Custom state key for import tracking. When provided, overrides the default
|
|
248
|
+
* `owner/repo@ref` key used to track import state. This allows the same repo
|
|
249
|
+
* to be imported independently to multiple locations.
|
|
250
|
+
*/
|
|
251
|
+
stateKey?: string;
|
|
236
252
|
/**
|
|
237
253
|
* Repository owner
|
|
238
254
|
*/
|
|
@@ -286,18 +302,38 @@ export type ImportOptions = {
|
|
|
286
302
|
* @default 'default'
|
|
287
303
|
*/
|
|
288
304
|
logLevel?: LogLevel;
|
|
305
|
+
/**
|
|
306
|
+
* Language for this import variant (e.g., "TypeScript", "Python", "Go").
|
|
307
|
+
* Used for logging and passed through to the devportal for UI display.
|
|
308
|
+
*/
|
|
309
|
+
language?: string;
|
|
310
|
+
/**
|
|
311
|
+
* Versions to display in the devportal's version picker.
|
|
312
|
+
* Informational — tells the loader which version folders exist in the source content.
|
|
313
|
+
* The loader imports content as-is; the version folder structure carries through from source to destination.
|
|
314
|
+
*/
|
|
315
|
+
versions?: VersionConfig[];
|
|
289
316
|
};
|
|
290
317
|
export type FetchOptions = RequestInit & {
|
|
291
318
|
signal?: AbortSignal;
|
|
292
319
|
concurrency?: number;
|
|
293
320
|
};
|
|
294
321
|
/**
|
|
295
|
-
*
|
|
322
|
+
* Astro loader context extended with optional entry type support.
|
|
323
|
+
* Use this type when calling `.load(context as LoaderContext)` in multi-loader patterns.
|
|
296
324
|
*/
|
|
297
325
|
export interface LoaderContext extends AstroLoaderContext {
|
|
298
326
|
/** @internal */
|
|
299
327
|
entryTypes?: Map<string, ContentEntryType>;
|
|
300
328
|
}
|
|
329
|
+
/**
|
|
330
|
+
* LoaderContext with Astro's logger replaced by our Logger class.
|
|
331
|
+
* Used by internal functions that need verbose/logFileProcessing/etc.
|
|
332
|
+
* @internal
|
|
333
|
+
*/
|
|
334
|
+
export type ExtendedLoaderContext = Omit<LoaderContext, "logger"> & {
|
|
335
|
+
logger: Logger;
|
|
336
|
+
};
|
|
301
337
|
/**
|
|
302
338
|
* @internal
|
|
303
339
|
*/
|
package/dist/index.d.ts
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
|
-
export
|
|
2
|
-
export
|
|
3
|
-
export
|
|
4
|
-
export
|
|
5
|
-
export
|
|
6
|
-
export
|
|
1
|
+
export { githubLoader } from "./github.loader.js";
|
|
2
|
+
export { createAuthenticatedOctokit, createOctokitFromEnv, } from "./github.auth.js";
|
|
3
|
+
export type { GithubLoaderOptions, ImportOptions, FetchOptions, IncludePattern, PathMappingValue, EnhancedPathMapping, VersionConfig, LoaderContext, } from "./github.types.js";
|
|
4
|
+
export type { TransformFunction, TransformContext, MatchedPattern, } from "./github.types.js";
|
|
5
|
+
export type { LinkMapping, LinkTransformContext, ImportLinkTransformOptions, } from "./github.types.js";
|
|
6
|
+
export type { LinkHandler } from "./github.link-transform.js";
|
|
7
|
+
export type { GitHubAuthConfig, GitHubAppAuthConfig, GitHubPATAuthConfig, } from "./github.auth.js";
|
|
8
|
+
export type { LogLevel } from "./github.logger.js";
|
package/dist/index.js
CHANGED
|
@@ -1,6 +1,3 @@
|
|
|
1
|
-
|
|
2
|
-
export
|
|
3
|
-
export
|
|
4
|
-
export * from './github.loader.js';
|
|
5
|
-
export * from './github.types.js';
|
|
6
|
-
export * from './github.link-transform.js';
|
|
1
|
+
// Public API — functions
|
|
2
|
+
export { githubLoader } from "./github.loader.js";
|
|
3
|
+
export { createAuthenticatedOctokit, createOctokitFromEnv, } from "./github.auth.js";
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Creates a mock Astro LoaderContext with all required properties.
|
|
3
|
+
* The returned store is a real Map wrapped in the store interface,
|
|
4
|
+
* so tests can inspect stored entries directly.
|
|
5
|
+
*/
|
|
6
|
+
export declare function createMockContext(): {
|
|
7
|
+
store: {
|
|
8
|
+
set: (entry: any) => any;
|
|
9
|
+
get: (id: string) => any;
|
|
10
|
+
delete: (id: string) => boolean;
|
|
11
|
+
clear: () => void;
|
|
12
|
+
entries: () => MapIterator<[string, any]>;
|
|
13
|
+
keys: () => MapIterator<string>;
|
|
14
|
+
values: () => MapIterator<any>;
|
|
15
|
+
};
|
|
16
|
+
meta: Map<string, string>;
|
|
17
|
+
logger: {
|
|
18
|
+
info: import("vitest").Mock<(...args: any[]) => any>;
|
|
19
|
+
warn: import("vitest").Mock<(...args: any[]) => any>;
|
|
20
|
+
error: import("vitest").Mock<(...args: any[]) => any>;
|
|
21
|
+
debug: import("vitest").Mock<(...args: any[]) => any>;
|
|
22
|
+
verbose: import("vitest").Mock<(...args: any[]) => any>;
|
|
23
|
+
logFileProcessing: import("vitest").Mock<(...args: any[]) => any>;
|
|
24
|
+
logImportSummary: import("vitest").Mock<(...args: any[]) => any>;
|
|
25
|
+
logAssetProcessing: import("vitest").Mock<(...args: any[]) => any>;
|
|
26
|
+
withSpinner: (_msg: string, fn: () => Promise<any>) => Promise<any>;
|
|
27
|
+
getLevel: () => "default";
|
|
28
|
+
};
|
|
29
|
+
config: {};
|
|
30
|
+
entryTypes: Map<string, {
|
|
31
|
+
getEntryInfo: ({ contents, }: {
|
|
32
|
+
contents: string;
|
|
33
|
+
fileUrl: URL;
|
|
34
|
+
}) => Promise<{
|
|
35
|
+
body: string;
|
|
36
|
+
data: {};
|
|
37
|
+
}>;
|
|
38
|
+
}>;
|
|
39
|
+
generateDigest: (content: string) => string;
|
|
40
|
+
parseData: (data: any) => Promise<any>;
|
|
41
|
+
/** Direct access to the underlying store Map for assertions */
|
|
42
|
+
_store: Map<string, any>;
|
|
43
|
+
/** Direct access to the underlying meta Map for assertions */
|
|
44
|
+
_meta: Map<string, string>;
|
|
45
|
+
};
|
|
46
|
+
/** Standard mock commit used across tests */
|
|
47
|
+
export declare const MOCK_COMMIT: {
|
|
48
|
+
sha: string;
|
|
49
|
+
commit: {
|
|
50
|
+
tree: {
|
|
51
|
+
sha: string;
|
|
52
|
+
};
|
|
53
|
+
message: string;
|
|
54
|
+
author: {
|
|
55
|
+
name: string;
|
|
56
|
+
email: string;
|
|
57
|
+
date: string;
|
|
58
|
+
};
|
|
59
|
+
committer: {
|
|
60
|
+
name: string;
|
|
61
|
+
email: string;
|
|
62
|
+
date: string;
|
|
63
|
+
};
|
|
64
|
+
};
|
|
65
|
+
};
|
|
66
|
+
/** Mock tree data representing a typical repository structure */
|
|
67
|
+
export declare const MOCK_TREE_DATA: {
|
|
68
|
+
sha: string;
|
|
69
|
+
url: string;
|
|
70
|
+
tree: ({
|
|
71
|
+
path: string;
|
|
72
|
+
mode: string;
|
|
73
|
+
type: string;
|
|
74
|
+
sha: string;
|
|
75
|
+
size: number;
|
|
76
|
+
url: string;
|
|
77
|
+
} | {
|
|
78
|
+
path: string;
|
|
79
|
+
mode: string;
|
|
80
|
+
type: string;
|
|
81
|
+
sha: string;
|
|
82
|
+
url: string;
|
|
83
|
+
size?: undefined;
|
|
84
|
+
})[];
|
|
85
|
+
truncated: boolean;
|
|
86
|
+
};
|
|
87
|
+
/**
|
|
88
|
+
* Creates an Octokit instance with mocked API methods for listCommits and getTree.
|
|
89
|
+
* Returns both the instance and the spies for assertions.
|
|
90
|
+
*/
|
|
91
|
+
export declare function createMockOctokit(options?: {
|
|
92
|
+
treeData?: typeof MOCK_TREE_DATA;
|
|
93
|
+
commitData?: typeof MOCK_COMMIT;
|
|
94
|
+
}): {
|
|
95
|
+
octokit: import("@octokit/core").Octokit & {
|
|
96
|
+
paginate: import("@octokit/plugin-paginate-rest").PaginateInterface;
|
|
97
|
+
} & import("@octokit/plugin-paginate-graphql").paginateGraphQLInterface & import("@octokit/plugin-rest-endpoint-methods").Api & {
|
|
98
|
+
retry: {
|
|
99
|
+
retryRequest: (error: import("octokit").RequestError, retries: number, retryAfter: number) => import("octokit").RequestError;
|
|
100
|
+
};
|
|
101
|
+
};
|
|
102
|
+
spies: {
|
|
103
|
+
listCommitsSpy: import("vitest").MockInstance<{
|
|
104
|
+
(params?: import("@octokit/plugin-rest-endpoint-methods").RestEndpointMethodTypes["repos"]["listCommits"]["parameters"]): Promise<import("@octokit/plugin-rest-endpoint-methods").RestEndpointMethodTypes["repos"]["listCommits"]["response"]>;
|
|
105
|
+
defaults: import("@octokit/types").RequestInterface["defaults"];
|
|
106
|
+
endpoint: import("@octokit/types").EndpointInterface<{
|
|
107
|
+
url: string;
|
|
108
|
+
}>;
|
|
109
|
+
}>;
|
|
110
|
+
getTreeSpy: import("vitest").MockInstance<{
|
|
111
|
+
(params?: import("@octokit/plugin-rest-endpoint-methods").RestEndpointMethodTypes["git"]["getTree"]["parameters"]): Promise<import("@octokit/plugin-rest-endpoint-methods").RestEndpointMethodTypes["git"]["getTree"]["response"]>;
|
|
112
|
+
defaults: import("@octokit/types").RequestInterface["defaults"];
|
|
113
|
+
endpoint: import("@octokit/types").EndpointInterface<{
|
|
114
|
+
url: string;
|
|
115
|
+
}>;
|
|
116
|
+
}>;
|
|
117
|
+
getContentSpy: import("vitest").MockInstance<{
|
|
118
|
+
(params?: import("@octokit/plugin-rest-endpoint-methods").RestEndpointMethodTypes["repos"]["getContent"]["parameters"]): Promise<import("@octokit/plugin-rest-endpoint-methods").RestEndpointMethodTypes["repos"]["getContent"]["response"]>;
|
|
119
|
+
defaults: import("@octokit/types").RequestInterface["defaults"];
|
|
120
|
+
endpoint: import("@octokit/types").EndpointInterface<{
|
|
121
|
+
url: string;
|
|
122
|
+
}>;
|
|
123
|
+
}>;
|
|
124
|
+
};
|
|
125
|
+
};
|
|
126
|
+
/**
|
|
127
|
+
* Sets up a global fetch mock that returns markdown content.
|
|
128
|
+
* Returns the mock for assertions.
|
|
129
|
+
*/
|
|
130
|
+
export declare function mockFetch(content?: string): import("vitest").Mock<(...args: any[]) => any>;
|
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Shared test helpers for astro-github-loader test suite.
|
|
3
|
+
* Provides factory functions for creating mock Astro loader contexts,
|
|
4
|
+
* Octokit instances with pre-configured spies, and common fixtures.
|
|
5
|
+
*/
|
|
6
|
+
import { vi } from "vitest";
|
|
7
|
+
import { Octokit } from "octokit";
|
|
8
|
+
/**
|
|
9
|
+
* Creates a mock Astro LoaderContext with all required properties.
|
|
10
|
+
* The returned store is a real Map wrapped in the store interface,
|
|
11
|
+
* so tests can inspect stored entries directly.
|
|
12
|
+
*/
|
|
13
|
+
export function createMockContext() {
|
|
14
|
+
const store = new Map();
|
|
15
|
+
const meta = new Map();
|
|
16
|
+
return {
|
|
17
|
+
store: {
|
|
18
|
+
set: (entry) => {
|
|
19
|
+
store.set(entry.id, entry);
|
|
20
|
+
return entry;
|
|
21
|
+
},
|
|
22
|
+
get: (id) => store.get(id),
|
|
23
|
+
delete: (id) => store.delete(id),
|
|
24
|
+
clear: () => store.clear(),
|
|
25
|
+
entries: () => store.entries(),
|
|
26
|
+
keys: () => store.keys(),
|
|
27
|
+
values: () => store.values(),
|
|
28
|
+
},
|
|
29
|
+
meta,
|
|
30
|
+
logger: {
|
|
31
|
+
info: vi.fn(),
|
|
32
|
+
warn: vi.fn(),
|
|
33
|
+
error: vi.fn(),
|
|
34
|
+
debug: vi.fn(),
|
|
35
|
+
verbose: vi.fn(),
|
|
36
|
+
logFileProcessing: vi.fn(),
|
|
37
|
+
logImportSummary: vi.fn(),
|
|
38
|
+
logAssetProcessing: vi.fn(),
|
|
39
|
+
withSpinner: async (_msg, fn) => await fn(),
|
|
40
|
+
getLevel: () => "default",
|
|
41
|
+
},
|
|
42
|
+
config: {},
|
|
43
|
+
entryTypes: new Map([
|
|
44
|
+
[
|
|
45
|
+
".md",
|
|
46
|
+
{
|
|
47
|
+
getEntryInfo: async ({ contents, }) => ({
|
|
48
|
+
body: contents,
|
|
49
|
+
data: {},
|
|
50
|
+
}),
|
|
51
|
+
},
|
|
52
|
+
],
|
|
53
|
+
]),
|
|
54
|
+
generateDigest: (content) => String(content.length),
|
|
55
|
+
parseData: async (data) => data,
|
|
56
|
+
/** Direct access to the underlying store Map for assertions */
|
|
57
|
+
_store: store,
|
|
58
|
+
/** Direct access to the underlying meta Map for assertions */
|
|
59
|
+
_meta: meta,
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
/** Standard mock commit used across tests */
|
|
63
|
+
export const MOCK_COMMIT = {
|
|
64
|
+
sha: "abc123def456",
|
|
65
|
+
commit: {
|
|
66
|
+
tree: { sha: "tree123abc456" },
|
|
67
|
+
message: "Test commit",
|
|
68
|
+
author: {
|
|
69
|
+
name: "Test Author",
|
|
70
|
+
email: "test@example.com",
|
|
71
|
+
date: "2024-01-01T00:00:00Z",
|
|
72
|
+
},
|
|
73
|
+
committer: {
|
|
74
|
+
name: "Test Committer",
|
|
75
|
+
email: "test@example.com",
|
|
76
|
+
date: "2024-01-01T00:00:00Z",
|
|
77
|
+
},
|
|
78
|
+
},
|
|
79
|
+
};
|
|
80
|
+
/** Mock tree data representing a typical repository structure */
|
|
81
|
+
export const MOCK_TREE_DATA = {
|
|
82
|
+
sha: "tree123abc456",
|
|
83
|
+
url: "https://api.github.com/repos/test/repo/git/trees/tree123abc456",
|
|
84
|
+
tree: [
|
|
85
|
+
{
|
|
86
|
+
path: "docs/algokit.md",
|
|
87
|
+
mode: "100644",
|
|
88
|
+
type: "blob",
|
|
89
|
+
sha: "file1sha",
|
|
90
|
+
size: 1234,
|
|
91
|
+
url: "https://api.github.com/repos/test/repo/git/blobs/file1sha",
|
|
92
|
+
},
|
|
93
|
+
{
|
|
94
|
+
path: "docs/features",
|
|
95
|
+
mode: "040000",
|
|
96
|
+
type: "tree",
|
|
97
|
+
sha: "dir1sha",
|
|
98
|
+
url: "https://api.github.com/repos/test/repo/git/trees/dir1sha",
|
|
99
|
+
},
|
|
100
|
+
{
|
|
101
|
+
path: "docs/features/accounts.md",
|
|
102
|
+
mode: "100644",
|
|
103
|
+
type: "blob",
|
|
104
|
+
sha: "file2sha",
|
|
105
|
+
size: 2345,
|
|
106
|
+
url: "https://api.github.com/repos/test/repo/git/blobs/file2sha",
|
|
107
|
+
},
|
|
108
|
+
{
|
|
109
|
+
path: "docs/features/tasks.md",
|
|
110
|
+
mode: "100644",
|
|
111
|
+
type: "blob",
|
|
112
|
+
sha: "file3sha",
|
|
113
|
+
size: 3456,
|
|
114
|
+
url: "https://api.github.com/repos/test/repo/git/blobs/file3sha",
|
|
115
|
+
},
|
|
116
|
+
{
|
|
117
|
+
path: "docs/features/generate.md",
|
|
118
|
+
mode: "100644",
|
|
119
|
+
type: "blob",
|
|
120
|
+
sha: "file4sha",
|
|
121
|
+
size: 4567,
|
|
122
|
+
url: "https://api.github.com/repos/test/repo/git/blobs/file4sha",
|
|
123
|
+
},
|
|
124
|
+
{
|
|
125
|
+
path: "docs/cli/index.md",
|
|
126
|
+
mode: "100644",
|
|
127
|
+
type: "blob",
|
|
128
|
+
sha: "file5sha",
|
|
129
|
+
size: 5678,
|
|
130
|
+
url: "https://api.github.com/repos/test/repo/git/blobs/file5sha",
|
|
131
|
+
},
|
|
132
|
+
{
|
|
133
|
+
path: "README.md",
|
|
134
|
+
mode: "100644",
|
|
135
|
+
type: "blob",
|
|
136
|
+
sha: "file6sha",
|
|
137
|
+
size: 678,
|
|
138
|
+
url: "https://api.github.com/repos/test/repo/git/blobs/file6sha",
|
|
139
|
+
},
|
|
140
|
+
{
|
|
141
|
+
path: "package.json",
|
|
142
|
+
mode: "100644",
|
|
143
|
+
type: "blob",
|
|
144
|
+
sha: "file7sha",
|
|
145
|
+
size: 789,
|
|
146
|
+
url: "https://api.github.com/repos/test/repo/git/blobs/file7sha",
|
|
147
|
+
},
|
|
148
|
+
],
|
|
149
|
+
truncated: false,
|
|
150
|
+
};
|
|
151
|
+
/**
|
|
152
|
+
* Creates an Octokit instance with mocked API methods for listCommits and getTree.
|
|
153
|
+
* Returns both the instance and the spies for assertions.
|
|
154
|
+
*/
|
|
155
|
+
export function createMockOctokit(options) {
|
|
156
|
+
const octokit = new Octokit({ auth: "mock-token" });
|
|
157
|
+
const commit = options?.commitData ?? MOCK_COMMIT;
|
|
158
|
+
const tree = options?.treeData ?? MOCK_TREE_DATA;
|
|
159
|
+
const listCommitsSpy = vi
|
|
160
|
+
.spyOn(octokit.rest.repos, "listCommits")
|
|
161
|
+
.mockResolvedValue({
|
|
162
|
+
data: [commit],
|
|
163
|
+
status: 200,
|
|
164
|
+
url: "",
|
|
165
|
+
headers: {},
|
|
166
|
+
});
|
|
167
|
+
const getTreeSpy = vi.spyOn(octokit.rest.git, "getTree").mockResolvedValue({
|
|
168
|
+
data: tree,
|
|
169
|
+
status: 200,
|
|
170
|
+
url: "",
|
|
171
|
+
headers: {},
|
|
172
|
+
});
|
|
173
|
+
const getContentSpy = vi
|
|
174
|
+
.spyOn(octokit.rest.repos, "getContent")
|
|
175
|
+
.mockResolvedValue({ data: [], status: 200, url: "", headers: {} });
|
|
176
|
+
return {
|
|
177
|
+
octokit,
|
|
178
|
+
spies: { listCommitsSpy, getTreeSpy, getContentSpy },
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
/**
|
|
182
|
+
* Sets up a global fetch mock that returns markdown content.
|
|
183
|
+
* Returns the mock for assertions.
|
|
184
|
+
*/
|
|
185
|
+
export function mockFetch(content = "# Test Content\n\nThis is test markdown content.") {
|
|
186
|
+
const fetchMock = vi.fn().mockResolvedValue({
|
|
187
|
+
ok: true,
|
|
188
|
+
status: 200,
|
|
189
|
+
headers: new Headers(),
|
|
190
|
+
text: async () => content,
|
|
191
|
+
});
|
|
192
|
+
global.fetch = fetchMock;
|
|
193
|
+
return fetchMock;
|
|
194
|
+
}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@larkiny/astro-github-loader",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.
|
|
4
|
+
"version": "0.13.0",
|
|
5
5
|
"description": "Load content from GitHub repositories into Astro content collections with asset management and content transformations",
|
|
6
6
|
"keywords": [
|
|
7
7
|
"astro",
|
|
@@ -38,6 +38,7 @@
|
|
|
38
38
|
"build": "tsc",
|
|
39
39
|
"test": "vitest run",
|
|
40
40
|
"test:watch": "vitest",
|
|
41
|
+
"test:coverage": "vitest run --coverage",
|
|
41
42
|
"lint": "eslint .",
|
|
42
43
|
"prettier": "prettier --check .",
|
|
43
44
|
"preview": "astro preview",
|
|
@@ -57,6 +58,7 @@
|
|
|
57
58
|
"@types/node": "^22.14.0",
|
|
58
59
|
"@types/picomatch": "^4.0.0",
|
|
59
60
|
"@typescript-eslint/parser": "^8.29.0",
|
|
61
|
+
"@vitest/coverage-v8": "^3.2.4",
|
|
60
62
|
"eslint": "^9.24.0",
|
|
61
63
|
"eslint-plugin-astro": "^1.3.1",
|
|
62
64
|
"prettier": "3.5.3",
|