@larkiny/astro-github-loader 0.12.0 → 0.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,5 +1,12 @@
1
1
  # Astro GitHub Loader
2
2
 
3
+ [![CI](https://github.com/larkiny/starlight-github-loader/actions/workflows/ci.yml/badge.svg)](https://github.com/larkiny/starlight-github-loader/actions/workflows/ci.yml)
4
+ [![npm version](https://img.shields.io/npm/v/@larkiny/astro-github-loader?style=flat-square)](https://www.npmjs.com/package/@larkiny/astro-github-loader)
5
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg?style=flat-square)](https://opensource.org/licenses/MIT)
6
+ [![Built with Astro](https://img.shields.io/badge/Astro-BC52EE?style=flat-square&logo=astro&logoColor=white)](https://astro.build)
7
+ [![Built with Starlight](https://img.shields.io/badge/Starlight-FFC517?style=flat-square&logo=astro&logoColor=black)](https://starlight.astro.build)
8
+ [![TypeScript](https://img.shields.io/badge/TypeScript-3178C6?style=flat-square&logo=typescript&logoColor=white)](https://www.typescriptlang.org/)
9
+
3
10
  Load content from GitHub repositories into Astro content collections with flexible pattern-based import, asset management, content transformations, and intelligent change detection.
4
11
 
5
12
  ## Features
@@ -6,6 +6,9 @@ const STATE_FILENAME = ".github-import-state.json";
6
6
  * Creates a unique identifier for an import configuration
7
7
  */
8
8
  export function createConfigId(config) {
9
+ if (config.stateKey) {
10
+ return config.stateKey;
11
+ }
9
12
  return `${config.owner}/${config.repo}@${config.ref || "main"}`;
10
13
  }
11
14
  /**
@@ -87,6 +87,11 @@ function applyLinkMappings(linkUrl, linkMappings, context) {
87
87
  }
88
88
  let matched = false;
89
89
  let replacement = "";
90
+ const getLinkTransformContext = () => context.currentFile.linkContext ?? {
91
+ sourcePath: context.currentFile.sourcePath,
92
+ targetPath: context.currentFile.targetPath,
93
+ basePath: "",
94
+ };
90
95
  if (typeof mapping.pattern === "string") {
91
96
  // String pattern - exact match or contains
92
97
  if (transformedPath.includes(mapping.pattern)) {
@@ -95,8 +100,7 @@ function applyLinkMappings(linkUrl, linkMappings, context) {
95
100
  replacement = transformedPath.replace(mapping.pattern, mapping.replacement);
96
101
  }
97
102
  else {
98
- const linkTransformContext = context.currentFile.linkContext ?? {};
99
- replacement = mapping.replacement(transformedPath, anchor, linkTransformContext);
103
+ replacement = mapping.replacement(transformedPath, anchor, getLinkTransformContext());
100
104
  }
101
105
  }
102
106
  }
@@ -109,8 +113,7 @@ function applyLinkMappings(linkUrl, linkMappings, context) {
109
113
  replacement = transformedPath.replace(mapping.pattern, mapping.replacement);
110
114
  }
111
115
  else {
112
- const linkTransformContext = context.currentFile.linkContext ?? {};
113
- replacement = mapping.replacement(transformedPath, anchor, linkTransformContext);
116
+ replacement = mapping.replacement(transformedPath, anchor, getLinkTransformContext());
114
117
  }
115
118
  }
116
119
  }
@@ -2,9 +2,10 @@ import type { ImportedFile } from "./github.link-transform.js";
2
2
  import type { ExtendedLoaderContext } from "./github.types.js";
3
3
  /**
4
4
  * Ensures directory exists and writes file to disk.
5
+ * Validates that the resolved path stays within the project root.
5
6
  * @internal
6
7
  */
7
- export declare function syncFile(path: string, content: string): Promise<void>;
8
+ export declare function syncFile(filePath: string, content: string): Promise<void>;
8
9
  /**
9
10
  * Stores a processed file in Astro's content store
10
11
  * @internal
@@ -1,15 +1,21 @@
1
1
  import { existsSync, promises as fs } from "node:fs";
2
+ import { resolve } from "node:path";
2
3
  import { fileURLToPath, pathToFileURL } from "node:url";
3
4
  /**
4
5
  * Ensures directory exists and writes file to disk.
6
+ * Validates that the resolved path stays within the project root.
5
7
  * @internal
6
8
  */
7
- export async function syncFile(path, content) {
8
- const dir = path.substring(0, path.lastIndexOf("/"));
9
+ export async function syncFile(filePath, content) {
10
+ const resolved = resolve(filePath);
11
+ if (!resolved.startsWith(process.cwd())) {
12
+ throw new Error(`syncFile: path "${filePath}" resolves outside project root`);
13
+ }
14
+ const dir = resolved.substring(0, resolved.lastIndexOf("/"));
9
15
  if (dir && !existsSync(dir)) {
10
16
  await fs.mkdir(dir, { recursive: true });
11
17
  }
12
- await fs.writeFile(path, content, "utf-8");
18
+ await fs.writeFile(resolved, content, "utf-8");
13
19
  }
14
20
  /**
15
21
  * Stores a processed file in Astro's content store
@@ -234,6 +234,8 @@ export interface VersionConfig {
234
234
  slug: string;
235
235
  /** Display name for this version (e.g., "Latest", "v8.0.0") */
236
236
  label: string;
237
+ /** Git ref this version was sourced from (e.g., "main", "release/v7"). Optional metadata — not used by the loader. */
238
+ ref?: string;
237
239
  }
238
240
  /**
239
241
  * Represents configuration options for importing content from GitHub repositories.
@@ -243,6 +245,12 @@ export type ImportOptions = {
243
245
  * Display name for this configuration (used in logging)
244
246
  */
245
247
  name?: string;
248
+ /**
249
+ * Custom state key for import tracking. When provided, overrides the default
250
+ * `owner/repo@ref` key used to track import state. This allows the same repo
251
+ * to be imported independently to multiple locations.
252
+ */
253
+ stateKey?: string;
246
254
  /**
247
255
  * Repository owner
248
256
  */
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@larkiny/astro-github-loader",
3
3
  "type": "module",
4
- "version": "0.12.0",
4
+ "version": "0.14.0",
5
5
  "description": "Load content from GitHub repositories into Astro content collections with asset management and content transformations",
6
6
  "keywords": [
7
7
  "astro",
@@ -51,6 +51,18 @@ describe("github.dryrun", () => {
51
51
  expect(id).toBe("algorand/docs@main");
52
52
  });
53
53
 
54
+ it("should use custom stateKey when provided", () => {
55
+ const config: ImportOptions = {
56
+ owner: "algorandfoundation",
57
+ repo: "puya",
58
+ ref: "devportal",
59
+ stateKey: "puya-legacy-guides",
60
+ includes: [],
61
+ };
62
+
63
+ expect(createConfigId(config)).toBe("puya-legacy-guides");
64
+ });
65
+
54
66
  it("should handle different refs correctly", () => {
55
67
  const config: ImportOptions = {
56
68
  name: "Test Repo",
@@ -57,6 +57,9 @@ export interface RepositoryChangeInfo {
57
57
  * Creates a unique identifier for an import configuration
58
58
  */
59
59
  export function createConfigId(config: ImportOptions): string {
60
+ if (config.stateKey) {
61
+ return config.stateKey;
62
+ }
60
63
  return `${config.owner}/${config.repo}@${config.ref || "main"}`;
61
64
  }
62
65
 
@@ -189,6 +189,13 @@ function applyLinkMappings(
189
189
  let matched = false;
190
190
  let replacement = "";
191
191
 
192
+ const getLinkTransformContext = (): LinkTransformContext =>
193
+ context.currentFile.linkContext ?? {
194
+ sourcePath: context.currentFile.sourcePath,
195
+ targetPath: context.currentFile.targetPath,
196
+ basePath: "",
197
+ };
198
+
192
199
  if (typeof mapping.pattern === "string") {
193
200
  // String pattern - exact match or contains
194
201
  if (transformedPath.includes(mapping.pattern)) {
@@ -199,12 +206,10 @@ function applyLinkMappings(
199
206
  mapping.replacement,
200
207
  );
201
208
  } else {
202
- const linkTransformContext =
203
- context.currentFile.linkContext ?? ({} as LinkTransformContext);
204
209
  replacement = mapping.replacement(
205
210
  transformedPath,
206
211
  anchor,
207
- linkTransformContext,
212
+ getLinkTransformContext(),
208
213
  );
209
214
  }
210
215
  }
@@ -219,12 +224,10 @@ function applyLinkMappings(
219
224
  mapping.replacement,
220
225
  );
221
226
  } else {
222
- const linkTransformContext =
223
- context.currentFile.linkContext ?? ({} as LinkTransformContext);
224
227
  replacement = mapping.replacement(
225
228
  transformedPath,
226
229
  anchor,
227
- linkTransformContext,
230
+ getLinkTransformContext(),
228
231
  );
229
232
  }
230
233
  }
@@ -1,4 +1,5 @@
1
1
  import { beforeEach, describe, it, expect, vi } from "vitest";
2
+ import { resolve } from "node:path";
2
3
  import { fileURLToPath, pathToFileURL } from "node:url";
3
4
  import { syncFile, storeProcessedFile } from "./github.storage.js";
4
5
  import { createMockContext } from "./test-helpers.js";
@@ -40,12 +41,14 @@ describe("syncFile", () => {
40
41
  it("creates directory and writes file when directory does not exist", async () => {
41
42
  await syncFile("some/nested/dir/file.md", "content");
42
43
 
43
- expect(mockedExistsSync).toHaveBeenCalledWith("some/nested/dir");
44
- expect(mockedMkdir).toHaveBeenCalledWith("some/nested/dir", {
44
+ const resolved = resolve("some/nested/dir/file.md");
45
+ const resolvedDir = resolved.substring(0, resolved.lastIndexOf("/"));
46
+ expect(mockedExistsSync).toHaveBeenCalledWith(resolvedDir);
47
+ expect(mockedMkdir).toHaveBeenCalledWith(resolvedDir, {
45
48
  recursive: true,
46
49
  });
47
50
  expect(mockedWriteFile).toHaveBeenCalledWith(
48
- "some/nested/dir/file.md",
51
+ resolved,
49
52
  "content",
50
53
  "utf-8",
51
54
  );
@@ -56,10 +59,12 @@ describe("syncFile", () => {
56
59
 
57
60
  await syncFile("existing/dir/file.md", "content");
58
61
 
59
- expect(mockedExistsSync).toHaveBeenCalledWith("existing/dir");
62
+ const resolved = resolve("existing/dir/file.md");
63
+ const resolvedDir = resolved.substring(0, resolved.lastIndexOf("/"));
64
+ expect(mockedExistsSync).toHaveBeenCalledWith(resolvedDir);
60
65
  expect(mockedMkdir).not.toHaveBeenCalled();
61
66
  expect(mockedWriteFile).toHaveBeenCalledWith(
62
- "existing/dir/file.md",
67
+ resolved,
63
68
  "content",
64
69
  "utf-8",
65
70
  );
@@ -68,10 +73,9 @@ describe("syncFile", () => {
68
73
  it("skips mkdir when path has no directory component", async () => {
69
74
  await syncFile("file.md", "content");
70
75
 
71
- // dir is "" which is falsy, so existsSync should not be called for dir check
72
- expect(mockedMkdir).not.toHaveBeenCalled();
76
+ // resolved path still has a directory (cwd), but it exists
73
77
  expect(mockedWriteFile).toHaveBeenCalledWith(
74
- "file.md",
78
+ resolve("file.md"),
75
79
  "content",
76
80
  "utf-8",
77
81
  );
@@ -82,11 +86,17 @@ describe("syncFile", () => {
82
86
  await syncFile("output/test.md", longContent);
83
87
 
84
88
  expect(mockedWriteFile).toHaveBeenCalledWith(
85
- "output/test.md",
89
+ resolve("output/test.md"),
86
90
  longContent,
87
91
  "utf-8",
88
92
  );
89
93
  });
94
+
95
+ it("rejects paths that escape project root", async () => {
96
+ await expect(
97
+ syncFile("../../etc/passwd", "malicious"),
98
+ ).rejects.toThrow("resolves outside project root");
99
+ });
90
100
  });
91
101
 
92
102
  describe("storeProcessedFile", () => {
@@ -1,18 +1,26 @@
1
1
  import { existsSync, promises as fs } from "node:fs";
2
+ import { resolve } from "node:path";
2
3
  import { fileURLToPath, pathToFileURL } from "node:url";
3
4
  import type { ImportedFile } from "./github.link-transform.js";
4
5
  import type { ExtendedLoaderContext } from "./github.types.js";
5
6
 
6
7
  /**
7
8
  * Ensures directory exists and writes file to disk.
9
+ * Validates that the resolved path stays within the project root.
8
10
  * @internal
9
11
  */
10
- export async function syncFile(path: string, content: string) {
11
- const dir = path.substring(0, path.lastIndexOf("/"));
12
+ export async function syncFile(filePath: string, content: string) {
13
+ const resolved = resolve(filePath);
14
+ if (!resolved.startsWith(process.cwd())) {
15
+ throw new Error(
16
+ `syncFile: path "${filePath}" resolves outside project root`,
17
+ );
18
+ }
19
+ const dir = resolved.substring(0, resolved.lastIndexOf("/"));
12
20
  if (dir && !existsSync(dir)) {
13
21
  await fs.mkdir(dir, { recursive: true });
14
22
  }
15
- await fs.writeFile(path, content, "utf-8");
23
+ await fs.writeFile(resolved, content, "utf-8");
16
24
  }
17
25
 
18
26
  /**
@@ -261,6 +261,8 @@ export interface VersionConfig {
261
261
  slug: string;
262
262
  /** Display name for this version (e.g., "Latest", "v8.0.0") */
263
263
  label: string;
264
+ /** Git ref this version was sourced from (e.g., "main", "release/v7"). Optional metadata — not used by the loader. */
265
+ ref?: string;
264
266
  }
265
267
 
266
268
  /**
@@ -271,6 +273,12 @@ export type ImportOptions = {
271
273
  * Display name for this configuration (used in logging)
272
274
  */
273
275
  name?: string;
276
+ /**
277
+ * Custom state key for import tracking. When provided, overrides the default
278
+ * `owner/repo@ref` key used to track import state. This allows the same repo
279
+ * to be imported independently to multiple locations.
280
+ */
281
+ stateKey?: string;
274
282
  /**
275
283
  * Repository owner
276
284
  */