@larkiny/astro-github-loader 0.11.2 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/README.md +69 -61
  2. package/dist/github.assets.d.ts +70 -0
  3. package/dist/github.assets.js +253 -0
  4. package/dist/github.auth.js +13 -9
  5. package/dist/github.cleanup.d.ts +3 -2
  6. package/dist/github.cleanup.js +30 -23
  7. package/dist/github.constants.d.ts +0 -16
  8. package/dist/github.constants.js +0 -16
  9. package/dist/github.content.d.ts +6 -132
  10. package/dist/github.content.js +154 -789
  11. package/dist/github.dryrun.d.ts +9 -5
  12. package/dist/github.dryrun.js +46 -25
  13. package/dist/github.link-transform.d.ts +2 -2
  14. package/dist/github.link-transform.js +65 -57
  15. package/dist/github.loader.js +45 -51
  16. package/dist/github.logger.d.ts +2 -2
  17. package/dist/github.logger.js +33 -24
  18. package/dist/github.paths.d.ts +76 -0
  19. package/dist/github.paths.js +190 -0
  20. package/dist/github.storage.d.ts +15 -0
  21. package/dist/github.storage.js +109 -0
  22. package/dist/github.types.d.ts +41 -4
  23. package/dist/index.d.ts +8 -6
  24. package/dist/index.js +3 -6
  25. package/dist/test-helpers.d.ts +130 -0
  26. package/dist/test-helpers.js +194 -0
  27. package/package.json +3 -1
  28. package/src/github.assets.spec.ts +717 -0
  29. package/src/github.assets.ts +365 -0
  30. package/src/github.auth.spec.ts +245 -0
  31. package/src/github.auth.ts +24 -10
  32. package/src/github.cleanup.spec.ts +380 -0
  33. package/src/github.cleanup.ts +91 -47
  34. package/src/github.constants.ts +0 -17
  35. package/src/github.content.spec.ts +305 -454
  36. package/src/github.content.ts +261 -950
  37. package/src/github.dryrun.spec.ts +586 -0
  38. package/src/github.dryrun.ts +105 -54
  39. package/src/github.link-transform.spec.ts +1345 -0
  40. package/src/github.link-transform.ts +174 -95
  41. package/src/github.loader.spec.ts +75 -50
  42. package/src/github.loader.ts +113 -78
  43. package/src/github.logger.spec.ts +795 -0
  44. package/src/github.logger.ts +77 -35
  45. package/src/github.paths.spec.ts +523 -0
  46. package/src/github.paths.ts +259 -0
  47. package/src/github.storage.spec.ts +367 -0
  48. package/src/github.storage.ts +127 -0
  49. package/src/github.types.ts +55 -9
  50. package/src/index.ts +43 -6
  51. package/src/test-helpers.ts +215 -0
@@ -1,460 +1,162 @@
1
- import { existsSync, promises as fs } from "node:fs";
1
+ import { existsSync } from "node:fs";
2
2
  import { fileURLToPath, pathToFileURL } from "node:url";
3
- import path, { join, dirname, basename, extname } from "node:path";
4
- import picomatch from "picomatch";
5
- import { globalLinkTransform, generateAutoLinkMappings } from "./github.link-transform.js";
6
- import { INVALID_SERVICE_RESPONSE, INVALID_STRING_ERROR, INVALID_URL_ERROR, } from "./github.constants.js";
3
+ import path from "node:path";
4
+ import { globalLinkTransform, generateAutoLinkMappings, } from "./github.link-transform.js";
5
+ import { INVALID_STRING_ERROR } from "./github.constants.js";
6
+ // Decomposed modules
7
+ import { generateId, generatePath, shouldIncludeFile, getHeaders, } from "./github.paths.js";
8
+ import { resolveAssetConfig, processAssets } from "./github.assets.js";
9
+ import { storeProcessedFile } from "./github.storage.js";
10
+ // Re-export items that used to live in this module so existing internal
11
+ // consumers can migrate gradually (cleanup.ts, spec files, etc.).
12
+ export { generateId, generatePath, shouldIncludeFile, applyRename, getHeaders, syncHeaders, } from "./github.paths.js";
13
+ export { syncFile } from "./github.storage.js";
14
+ export { resolveAssetConfig, detectAssets, downloadAsset, transformAssetReferences, } from "./github.assets.js";
7
15
  /**
8
- * Generates a unique identifier from a file path by removing the extension
9
- * @param filePath - The file path to generate ID from
10
- * @return {string} The generated identifier as a string with extension removed
16
+ * Validates that a basePath is relative and does not escape the project root.
11
17
  * @internal
12
18
  */
13
- export function generateId(filePath) {
14
- let id = filePath;
15
- // Remove file extension for ID generation
16
- const lastDotIndex = id.lastIndexOf('.');
17
- if (lastDotIndex > 0) {
18
- id = id.substring(0, lastDotIndex);
19
+ function validateBasePath(basePath, projectRoot) {
20
+ if (path.isAbsolute(basePath)) {
21
+ throw new Error(`basePath must be relative, got absolute path: ${basePath}`);
19
22
  }
20
- return id;
21
- }
22
- /**
23
- * Applies path mapping logic to get the final filename for a file
24
- *
25
- * Supports two types of path mappings:
26
- * - **File mapping**: Exact file path match (e.g., 'docs/README.md' -> 'docs/overview.md')
27
- * - **Folder mapping**: Folder path with trailing slash (e.g., 'docs/capabilities/' -> 'docs/')
28
- *
29
- * @param filePath - Original source file path
30
- * @param matchedPattern - The pattern that matched this file
31
- * @param options - Import options containing path mappings
32
- * @returns Final filename after applying path mapping logic
33
- * @internal
34
- */
35
- export function applyRename(filePath, matchedPattern, options) {
36
- if (options?.includes && matchedPattern && matchedPattern.index < options.includes.length) {
37
- const includePattern = options.includes[matchedPattern.index];
38
- if (includePattern.pathMappings) {
39
- // First check for exact file match (current behavior - backwards compatible)
40
- if (includePattern.pathMappings[filePath]) {
41
- const mappingValue = includePattern.pathMappings[filePath];
42
- return typeof mappingValue === 'string' ? mappingValue : mappingValue.target;
43
- }
44
- // Then check for folder-to-folder mappings
45
- for (const [sourceFolder, mappingValue] of Object.entries(includePattern.pathMappings)) {
46
- // Check if this is a folder mapping (ends with /) and file is within it
47
- if (sourceFolder.endsWith('/') && filePath.startsWith(sourceFolder)) {
48
- // Replace the source folder path with target folder path
49
- const targetFolder = typeof mappingValue === 'string' ? mappingValue : mappingValue.target;
50
- const relativePath = filePath.slice(sourceFolder.length);
51
- return path.posix.join(targetFolder, relativePath);
52
- }
53
- }
54
- }
55
- }
56
- // Return original filename if no path mapping found
57
- return basename(filePath);
58
- }
59
- /**
60
- * Generates a local file path based on the matched pattern and file path
61
- * @param filePath - The original file path from the repository
62
- * @param matchedPattern - The pattern that matched this file (or null if no includes specified)
63
- * @param options - Import options containing includes patterns for path mapping lookups
64
- * @return {string} The local file path where this content should be stored
65
- * @internal
66
- */
67
- export function generatePath(filePath, matchedPattern, options) {
68
- if (matchedPattern) {
69
- // Extract the directory part from the pattern (before any glob wildcards)
70
- const pattern = matchedPattern.pattern;
71
- const beforeGlob = pattern.split(/[*?{]/)[0];
72
- // Remove the pattern prefix from the file path to get the relative path
73
- let relativePath = filePath;
74
- if (beforeGlob && filePath.startsWith(beforeGlob)) {
75
- relativePath = filePath.substring(beforeGlob.length);
76
- // Remove leading slash if present
77
- if (relativePath.startsWith('/')) {
78
- relativePath = relativePath.substring(1);
79
- }
80
- }
81
- // If no relative path remains, use just the filename
82
- if (!relativePath) {
83
- relativePath = basename(filePath);
84
- }
85
- // Apply path mapping logic
86
- const finalFilename = applyRename(filePath, matchedPattern, options);
87
- // Always apply path mapping if applyRename returned something different from the original basename
88
- // OR if there are pathMappings configured (since empty string mappings might return same basename)
89
- const hasPathMappings = options?.includes?.[matchedPattern.index]?.pathMappings &&
90
- Object.keys(options.includes[matchedPattern.index].pathMappings).length > 0;
91
- if (finalFilename !== basename(filePath) || hasPathMappings) {
92
- // Check if applyRename returned a full path (contains path separators) or just a filename
93
- if (finalFilename.includes('/') || finalFilename.includes('\\')) {
94
- // applyRename returned a full relative path - need to extract relative part
95
- // Remove the pattern prefix to get the relative path within the pattern context
96
- const beforeGlob = pattern.split(/[*?{]/)[0];
97
- if (beforeGlob && finalFilename.startsWith(beforeGlob)) {
98
- relativePath = finalFilename.substring(beforeGlob.length);
99
- // Remove leading slash if present
100
- if (relativePath.startsWith('/')) {
101
- relativePath = relativePath.substring(1);
102
- }
103
- }
104
- else {
105
- relativePath = finalFilename;
106
- }
107
- }
108
- else {
109
- // applyRename returned just a filename
110
- // If the filename is different due to pathMapping, use it directly
111
- // This handles cases where pathMappings flatten directory structures
112
- relativePath = finalFilename;
113
- }
114
- }
115
- return join(matchedPattern.basePath, relativePath);
116
- }
117
- // Should not happen since we always use includes
118
- throw new Error("No matched pattern provided - includes are required");
119
- }
120
- /**
121
- * Synchronizes a file by ensuring the target directory exists and then writing the specified content to the file at the given path.
122
- *
123
- * @param {string} path - The path of the file to synchronize, including its directory and filename.
124
- * @param {string} content - The content to write into the file.
125
- * @return {Promise<void>} - A promise that resolves when the file has been successfully written.
126
- * @internal
127
- */
128
- export async function syncFile(path, content) {
129
- const dir = path.substring(0, path.lastIndexOf("/"));
130
- // Ensure the directory exists
131
- if (dir && !existsSync(dir)) {
132
- await fs.mkdir(dir, { recursive: true });
133
- }
134
- // Write the file to the filesystem and store
135
- await fs.writeFile(path, content, "utf-8");
136
- }
137
- /**
138
- * Default asset patterns for common image and media file types
139
- * @internal
140
- */
141
- const DEFAULT_ASSET_PATTERNS = ['.png', '.jpg', '.jpeg', '.gif', '.svg', '.webp', '.ico', '.bmp'];
142
- /**
143
- * Checks if a file path should be included and returns the matching pattern
144
- * @param filePath - The file path to check (relative to the repository root)
145
- * @param options - Import options containing includes patterns
146
- * @returns Object with include status and matched pattern, or null if not included
147
- * @internal
148
- */
149
- export function shouldIncludeFile(filePath, options) {
150
- const { includes } = options;
151
- // If no include patterns specified, include all files
152
- if (!includes || includes.length === 0) {
153
- return { included: true, matchedPattern: null };
154
- }
155
- // Check each include pattern to find a match
156
- for (let i = 0; i < includes.length; i++) {
157
- const includePattern = includes[i];
158
- const matcher = picomatch(includePattern.pattern);
159
- if (matcher(filePath)) {
160
- return {
161
- included: true,
162
- matchedPattern: {
163
- pattern: includePattern.pattern,
164
- basePath: includePattern.basePath,
165
- index: i
166
- }
167
- };
168
- }
23
+ const resolved = path.resolve(projectRoot, basePath);
24
+ const normalized = path.normalize(resolved);
25
+ if (!normalized.startsWith(path.normalize(projectRoot))) {
26
+ throw new Error(`basePath "${basePath}" resolves outside project root`);
169
27
  }
170
- // No patterns matched
171
- return { included: false, matchedPattern: null };
172
28
  }
29
+ const GITHUB_IDENTIFIER_RE = /^[a-zA-Z0-9._-]+$/;
30
+ const GITHUB_REF_RE = /^[a-zA-Z0-9._\-/]+$/;
173
31
  /**
174
- * Detects asset references in markdown content using regex patterns
175
- * @param content - The markdown content to parse
176
- * @param assetPatterns - File extensions to treat as assets
177
- * @returns Array of detected asset paths
32
+ * Validates a GitHub owner or repo identifier.
178
33
  * @internal
179
34
  */
180
- export function detectAssets(content, assetPatterns = DEFAULT_ASSET_PATTERNS) {
181
- const assets = [];
182
- const patterns = assetPatterns.map(ext => ext.toLowerCase());
183
- // Match markdown images: ![alt](path)
184
- const imageRegex = /!\[[^\]]*\]\(([^)]+)\)/g;
185
- let match;
186
- while ((match = imageRegex.exec(content)) !== null) {
187
- const assetPath = match[1];
188
- // Only include relative paths and assets matching our patterns
189
- if (assetPath.startsWith('./') || assetPath.startsWith('../') || !assetPath.includes('://')) {
190
- const ext = extname(assetPath).toLowerCase();
191
- if (patterns.includes(ext)) {
192
- assets.push(assetPath);
193
- }
194
- }
35
+ function validateGitHubIdentifier(value, name) {
36
+ if (!value || value.length > 100) {
37
+ throw new Error(`Invalid ${name}: must be 1-100 characters`);
195
38
  }
196
- // Match HTML img tags: <img src="path">
197
- const htmlImgRegex = /<img[^>]+src\s*=\s*["']([^"']+)["'][^>]*>/gi;
198
- while ((match = htmlImgRegex.exec(content)) !== null) {
199
- const assetPath = match[1];
200
- if (assetPath.startsWith('./') || assetPath.startsWith('../') || !assetPath.includes('://')) {
201
- const ext = extname(assetPath).toLowerCase();
202
- if (patterns.includes(ext)) {
203
- assets.push(assetPath);
204
- }
205
- }
39
+ if (!GITHUB_IDENTIFIER_RE.test(value)) {
40
+ throw new Error(`Invalid ${name}: "${value}" contains disallowed characters`);
206
41
  }
207
- return [...new Set(assets)]; // Remove duplicates
208
42
  }
209
43
  /**
210
- * Downloads an asset from GitHub and saves it locally
211
- * @param octokit - GitHub API client
212
- * @param owner - Repository owner
213
- * @param repo - Repository name
214
- * @param ref - Git reference
215
- * @param assetPath - Path to the asset in the repository
216
- * @param localPath - Local path where the asset should be saved
217
- * @param signal - Abort signal for cancellation
218
- * @returns Promise that resolves when the asset is downloaded
44
+ * Validates a GitHub ref (branch/tag name). More permissive than identifiers — allows `/`.
219
45
  * @internal
220
46
  */
221
- export async function downloadAsset(octokit, owner, repo, ref, assetPath, localPath, signal) {
222
- try {
223
- const { data } = await octokit.rest.repos.getContent({
224
- owner,
225
- repo,
226
- path: assetPath,
227
- ref,
228
- request: { signal },
229
- });
230
- if (Array.isArray(data) || data.type !== 'file' || !data.download_url) {
231
- throw new Error(`Asset ${assetPath} is not a valid file (type: ${data.type}, downloadUrl: ${data.download_url})`);
232
- }
233
- const response = await fetch(data.download_url, { signal });
234
- if (!response.ok) {
235
- throw new Error(`Failed to download asset: ${response.status} ${response.statusText}`);
236
- }
237
- const buffer = await response.arrayBuffer();
238
- const dir = dirname(localPath);
239
- if (!existsSync(dir)) {
240
- await fs.mkdir(dir, { recursive: true });
241
- }
242
- await fs.writeFile(localPath, new Uint8Array(buffer));
47
+ function validateGitHubRef(value) {
48
+ if (!value || value.length > 256) {
49
+ throw new Error(`Invalid ref: must be 1-256 characters`);
243
50
  }
244
- catch (error) {
245
- if (error.status === 404) {
246
- throw new Error(`Asset not found: ${assetPath}`);
247
- }
248
- throw error;
51
+ if (!GITHUB_REF_RE.test(value)) {
52
+ throw new Error(`Invalid ref: "${value}" contains disallowed characters`);
249
53
  }
250
54
  }
251
55
  /**
252
- * Transforms asset references in markdown content to use local paths
253
- * @param content - The markdown content to transform
254
- * @param assetMap - Map of original asset paths to new local paths
255
- * @returns Transformed content with updated asset references
56
+ * Collects file data by downloading content and applying transforms.
57
+ * Extracted from the nested closure inside toCollectionEntry for clarity.
256
58
  * @internal
257
59
  */
258
- export function transformAssetReferences(content, assetMap) {
259
- let transformedContent = content;
260
- for (const [originalPath, newPath] of assetMap) {
261
- // Transform markdown images
262
- const imageRegex = new RegExp(`(!)\\[([^\\]]*)\\]\\(\\s*${escapeRegExp(originalPath)}\\s*\\)`, 'g');
263
- transformedContent = transformedContent.replace(imageRegex, `$1[$2](${newPath})`);
264
- // Transform HTML img tags
265
- const htmlRegex = new RegExp(`(<img[^>]+src\\s*=\\s*["'])${escapeRegExp(originalPath)}(["'][^>]*>)`, 'gi');
266
- transformedContent = transformedContent.replace(htmlRegex, `$1${newPath}$2`);
267
- }
268
- return transformedContent;
269
- }
270
- /**
271
- * Escapes special regex characters in a string
272
- * @internal
273
- */
274
- function escapeRegExp(string) {
275
- return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
276
- }
277
- /**
278
- * Processes assets in markdown content by detecting, downloading, and transforming references
279
- * @param content - The markdown content to process
280
- * @param options - Configuration options including asset settings
281
- * @param octokit - GitHub API client
282
- * @param signal - Abort signal for cancellation
283
- * @returns Promise that resolves to transformed content
284
- * @internal
285
- */
286
- async function processAssets(content, filePath, options, octokit, logger, signal) {
287
- const { owner, repo, ref = 'main', assetsPath, assetsBaseUrl, assetPatterns } = options;
288
- logger.verbose(`🖼️ Processing assets for ${filePath}`);
289
- logger.debug(` assetsPath: ${assetsPath}`);
290
- logger.debug(` assetsBaseUrl: ${assetsBaseUrl}`);
291
- if (!assetsPath || !assetsBaseUrl) {
292
- logger.verbose(` ⏭️ Skipping asset processing - missing assetsPath or assetsBaseUrl`);
293
- return { content, assetsDownloaded: 0, assetsCached: 0 };
294
- }
295
- // Detect assets in the content
296
- const detectedAssets = detectAssets(content, assetPatterns);
297
- logger.verbose(` 📸 Detected ${detectedAssets.length} assets`);
298
- if (detectedAssets.length > 0) {
299
- logger.debug(` Assets: ${detectedAssets.join(', ')}`);
300
- }
301
- if (detectedAssets.length === 0) {
302
- return { content, assetsDownloaded: 0, assetsCached: 0 };
303
- }
304
- const assetMap = new Map();
305
- let assetsDownloaded = 0;
306
- let assetsCached = 0;
307
- // Process each detected asset
308
- await Promise.all(detectedAssets.map(async (assetPath) => {
309
- logger.logAssetProcessing("Processing", assetPath);
60
+ async function collectFileData({ url, editUrl: _editUrl }, filePath, options, context, octokit, signal) {
61
+ const logger = context.logger;
62
+ if (url === null || typeof url !== "string") {
63
+ return null;
64
+ }
65
+ const urlObj = new URL(url);
66
+ // Determine if file needs renaming and generate appropriate ID
67
+ const includeCheck = shouldIncludeFile(filePath, options);
68
+ const matchedPattern = includeCheck.included
69
+ ? includeCheck.matchedPattern
70
+ : null;
71
+ // Check if this file has a path mapping
72
+ const hasPathMapping = matchedPattern &&
73
+ options?.includes &&
74
+ matchedPattern.index < options.includes.length &&
75
+ options.includes[matchedPattern.index].pathMappings &&
76
+ options.includes[matchedPattern.index].pathMappings[filePath];
77
+ // Generate ID based on appropriate path
78
+ const id = hasPathMapping
79
+ ? generateId(generatePath(filePath, matchedPattern, options)) // Use path-mapped path for ID
80
+ : generateId(filePath); // Use original path for ID
81
+ const finalPath = generatePath(filePath, matchedPattern, options);
82
+ let contents;
83
+ logger.logFileProcessing("Fetching", filePath, `from ${urlObj.toString()}`);
84
+ // Download file content
85
+ const init = {
86
+ signal,
87
+ headers: getHeaders({ init: {}, meta: context.meta, id }),
88
+ };
89
+ let res = null;
90
+ // Fetch with retries (simplified version of syncEntry logic)
91
+ for (let attempt = 0; attempt < 3; attempt++) {
310
92
  try {
311
- // Resolve the asset path relative to the current markdown file
312
- const resolvedAssetPath = resolveAssetPath(filePath, assetPath);
313
- logger.debug(` 🔗 Resolved path: ${resolvedAssetPath}`);
314
- // Generate unique filename to avoid conflicts
315
- const originalFilename = basename(assetPath);
316
- const ext = extname(originalFilename);
317
- const nameWithoutExt = basename(originalFilename, ext);
318
- const uniqueFilename = `${nameWithoutExt}-${Date.now()}${ext}`;
319
- const localPath = join(assetsPath, uniqueFilename);
320
- logger.debug(` 💾 Local path: ${localPath}`);
321
- // Check if asset already exists (simple cache check)
322
- if (existsSync(localPath)) {
323
- logger.logAssetProcessing("Cached", assetPath);
324
- assetsCached++;
325
- }
326
- else {
327
- // Download the asset
328
- logger.logAssetProcessing("Downloading", assetPath, `from ${owner}/${repo}@${ref}:${resolvedAssetPath}`);
329
- await downloadAsset(octokit, owner, repo, ref, resolvedAssetPath, localPath, signal);
330
- logger.logAssetProcessing("Downloaded", assetPath);
331
- assetsDownloaded++;
332
- }
333
- // Generate URL for the transformed reference
334
- const assetUrl = `${assetsBaseUrl}/${uniqueFilename}`.replace(/\/+/g, '/');
335
- logger.debug(` 🔄 Transform: ${assetPath} -> ${assetUrl}`);
336
- // Map the transformation
337
- assetMap.set(assetPath, assetUrl);
93
+ res = await fetch(urlObj, init);
94
+ if (res.ok)
95
+ break;
338
96
  }
339
97
  catch (error) {
340
- logger.warn(` ❌ Failed to process asset ${assetPath}: ${error}`);
98
+ if (attempt === 2)
99
+ throw error;
100
+ await new Promise((resolve) => setTimeout(resolve, 1000 * (attempt + 1)));
341
101
  }
342
- }));
343
- logger.verbose(` 🗺️ Processed ${assetMap.size} assets: ${assetsDownloaded} downloaded, ${assetsCached} cached`);
344
- // Transform the content with new asset references
345
- const transformedContent = transformAssetReferences(content, assetMap);
346
- return { content: transformedContent, assetsDownloaded, assetsCached };
347
- }
348
- /**
349
- * Resolves an asset path relative to a base path
350
- * @internal
351
- */
352
- function resolveAssetPath(basePath, assetPath) {
353
- if (assetPath.startsWith('./')) {
354
- return join(dirname(basePath), assetPath.slice(2));
355
102
  }
356
- else if (assetPath.startsWith('../')) {
357
- return join(dirname(basePath), assetPath);
103
+ if (!res) {
104
+ throw new Error(`No response received for ${urlObj.toString()}`);
358
105
  }
359
- return assetPath;
360
- }
361
- /**
362
- * Synchronizes an entry by fetching its contents, validating its metadata, and storing or rendering it as needed.
363
- *
364
- * @param {LoaderContext} context - The loader context containing the required utilities, metadata, and configuration.
365
- * @param {Object} urls - Object containing URL data.
366
- * @param {string | URL | null} urls.url - The URL of the entry to fetch. Throws an error if null or invalid.
367
- * @param {string} urls.editUrl - The URL for editing the entry.
368
- * @param {RootOptions} options - Configuration settings for processing the entry such as file paths and custom options.
369
- * @param {any} octokit - GitHub API client for downloading assets.
370
- * @param {RequestInit} [init] - Optional parameter for customizing the fetch request.
371
- * @return {Promise<void>} Resolves when the entry has been successfully processed and stored. Throws errors if invalid URL, missing configuration, or other issues occur.
372
- * @internal
373
- */
374
- export async function syncEntry(context, { url, editUrl }, filePath, options, octokit, init = {}) {
375
- // Exit on null or if the URL is invalid
376
- if (url === null || (typeof url !== "string" && !(url instanceof URL))) {
377
- throw new TypeError(INVALID_URL_ERROR);
378
- }
379
- // Validate URL
380
- if (typeof url === "string")
381
- url = new URL(url);
382
- const { meta, store, generateDigest, entryTypes, logger, parseData, config } = context;
383
- function configForFile(file) {
384
- const ext = file.split(".").at(-1);
385
- if (!ext) {
386
- logger.warn(`No extension found for ${file}`);
387
- return;
388
- }
389
- return entryTypes?.get(`.${ext}`);
390
- }
391
- // Custom ID, TODO: Allow custom id generators
392
- let id = generateId(filePath);
393
- init.headers = getHeaders({
394
- init: init.headers,
395
- meta,
396
- id,
397
- });
398
- let res = await fetch(url, init);
399
106
  if (res.status === 304) {
400
- // Only skip if the local file actually exists
107
+ // File not modified, read existing content from disk if it exists
401
108
  const includeResult = shouldIncludeFile(filePath, options);
402
109
  const relativePath = generatePath(filePath, includeResult.included ? includeResult.matchedPattern : null, options);
403
110
  const fileUrl = pathToFileURL(relativePath);
404
111
  if (existsSync(fileURLToPath(fileUrl))) {
405
- logger.info(`Skipping ${id} as it has not changed`);
406
- return;
112
+ logger.logFileProcessing("Using cached", filePath, "304 not modified");
113
+ const { promises: fs } = await import("node:fs");
114
+ contents = await fs.readFile(fileURLToPath(fileUrl), "utf-8");
407
115
  }
408
116
  else {
409
- logger.info(`File ${id} missing locally, re-fetching despite 304`);
410
- // File is missing locally, fetch without ETag headers
117
+ // File is missing locally, re-fetch without cache headers
118
+ logger.logFileProcessing("Re-fetching", filePath, "missing locally despite 304");
411
119
  const freshInit = { ...init };
412
120
  freshInit.headers = new Headers(init.headers);
413
- freshInit.headers.delete('If-None-Match');
414
- freshInit.headers.delete('If-Modified-Since');
415
- res = await fetch(url, freshInit);
416
- if (!res.ok)
417
- throw new Error(res.statusText);
121
+ freshInit.headers.delete("If-None-Match");
122
+ freshInit.headers.delete("If-Modified-Since");
123
+ res = await fetch(urlObj, freshInit);
124
+ if (!res.ok) {
125
+ throw new Error(`Failed to fetch file content from ${urlObj.toString()}: ${res.status} ${res.statusText || "Unknown error"}`);
126
+ }
127
+ contents = await res.text();
418
128
  }
419
129
  }
420
- if (!res.ok)
421
- throw new Error(res.statusText);
422
- let contents = await res.text();
423
- const entryType = configForFile(filePath || "tmp.md");
424
- if (!entryType)
425
- throw new Error("No entry type found");
426
- // Process assets FIRST if configuration is provided - before content transforms
427
- // This ensures asset detection works with original markdown links before they get transformed
428
- if (options.assetsPath && options.assetsBaseUrl) {
130
+ else if (!res.ok) {
131
+ throw new Error(`Failed to fetch file content from ${urlObj.toString()}: ${res.status} ${res.statusText || "Unknown error"}`);
132
+ }
133
+ else {
134
+ contents = await res.text();
135
+ }
136
+ // Process assets FIRST if configuration is provided (or co-located defaults apply)
137
+ const resolvedAssetConfig = resolveAssetConfig(options, filePath);
138
+ if (resolvedAssetConfig) {
429
139
  try {
430
- // Create a dummy logger for syncEntry since it uses Astro's logger
431
- const dummyLogger = {
432
- verbose: (msg) => logger.info(msg),
433
- debug: (msg) => logger.debug(msg),
434
- warn: (msg) => logger.warn(msg),
435
- logAssetProcessing: (action, path, details) => {
436
- const msg = details ? `Asset ${action}: ${path} - ${details}` : `Asset ${action}: ${path}`;
437
- logger.info(msg);
438
- }
439
- };
440
- const assetResult = await processAssets(contents, filePath, options, octokit, dummyLogger, init.signal || undefined);
140
+ const optionsWithAssets = { ...options, ...resolvedAssetConfig };
141
+ const assetResult = await processAssets(contents, filePath, optionsWithAssets, octokit, logger, signal);
441
142
  contents = assetResult.content;
442
143
  }
443
144
  catch (error) {
444
- logger.warn(`Asset processing failed for ${id}: ${error.message}`);
145
+ logger.warn(`Asset processing failed for ${id}: ${error instanceof Error ? error.message : String(error)}`);
445
146
  }
446
147
  }
447
- // Apply content transforms if provided - both global and pattern-specific
448
- // This runs after asset processing so transforms work with processed content
449
- const includeResultForTransforms = shouldIncludeFile(filePath, options);
148
+ // Apply content transforms
149
+ const includeResult = shouldIncludeFile(filePath, options);
450
150
  const transformsToApply = [];
451
151
  // Add global transforms first
452
152
  if (options.transforms && options.transforms.length > 0) {
453
153
  transformsToApply.push(...options.transforms);
454
154
  }
455
155
  // Add pattern-specific transforms
456
- if (includeResultForTransforms.included && includeResultForTransforms.matchedPattern && options.includes) {
457
- const matchedInclude = options.includes[includeResultForTransforms.matchedPattern.index];
156
+ if (includeResult.included &&
157
+ includeResult.matchedPattern &&
158
+ options.includes) {
159
+ const matchedInclude = options.includes[includeResult.matchedPattern.index];
458
160
  if (matchedInclude.transforms && matchedInclude.transforms.length > 0) {
459
161
  transformsToApply.push(...matchedInclude.transforms);
460
162
  }
@@ -464,84 +166,38 @@ export async function syncEntry(context, { url, editUrl }, filePath, options, oc
464
166
  id,
465
167
  path: filePath,
466
168
  options,
467
- matchedPattern: includeResultForTransforms.included ? includeResultForTransforms.matchedPattern : undefined,
169
+ matchedPattern: includeResult.included && includeResult.matchedPattern
170
+ ? includeResult.matchedPattern
171
+ : undefined,
468
172
  };
469
173
  for (const transform of transformsToApply) {
470
174
  try {
471
175
  contents = transform(contents, transformContext);
472
176
  }
473
177
  catch (error) {
474
- logger.warn(`Transform failed for ${id}: ${error}`);
178
+ context.logger?.warn(`Transform failed for ${id}: ${error}`);
475
179
  }
476
180
  }
477
181
  }
478
- const includeResult = shouldIncludeFile(filePath, options);
479
- const relativePath = generatePath(filePath, includeResult.included ? includeResult.matchedPattern : null, options);
480
- const fileUrl = pathToFileURL(relativePath);
481
- const { body, data } = await entryType.getEntryInfo({
482
- contents,
483
- fileUrl: fileUrl,
484
- });
485
- const existingEntry = store.get(id);
486
- const digest = generateDigest(contents);
487
- if (existingEntry &&
488
- existingEntry.digest === digest &&
489
- existingEntry.filePath) {
490
- return;
491
- }
492
- // Write file to path
493
- if (!existsSync(fileURLToPath(fileUrl))) {
494
- logger.verbose(`Writing ${id} to ${fileUrl}`);
495
- await syncFile(fileURLToPath(fileUrl), contents);
496
- }
497
- const parsedData = await parseData({
498
- id,
499
- data,
500
- filePath: fileUrl.toString(),
501
- });
502
- if (entryType.getRenderFunction) {
503
- logger.verbose(`Rendering ${id}`);
504
- const render = await entryType.getRenderFunction(config);
505
- let rendered = undefined;
506
- try {
507
- rendered = await render?.({
508
- id,
509
- data,
510
- body,
511
- filePath: fileUrl.toString(),
512
- digest,
513
- });
514
- }
515
- catch (error) {
516
- logger.error(`Error rendering ${id}: ${error.message}`);
182
+ // Build link context for this file
183
+ const linkContext = includeResult.included && includeResult.matchedPattern
184
+ ? {
185
+ sourcePath: filePath,
186
+ targetPath: finalPath,
187
+ basePath: includeResult.matchedPattern.basePath,
188
+ pathMappings: options.includes?.[includeResult.matchedPattern.index]
189
+ ?.pathMappings,
190
+ matchedPattern: includeResult.matchedPattern,
517
191
  }
518
- store.set({
519
- id,
520
- data: parsedData,
521
- body,
522
- filePath: relativePath,
523
- digest,
524
- rendered,
525
- });
526
- }
527
- else if ("contentModuleTypes" in entryType) {
528
- store.set({
529
- id,
530
- data: parsedData,
531
- body,
532
- filePath: relativePath,
533
- digest,
534
- deferredRender: true,
535
- });
536
- }
537
- else {
538
- store.set({ id, data: parsedData, body, filePath: relativePath, digest });
539
- }
540
- syncHeaders({
541
- headers: res.headers,
542
- meta,
192
+ : undefined;
193
+ // Use the finalPath we already computed
194
+ return {
195
+ sourcePath: filePath,
196
+ targetPath: finalPath,
197
+ content: contents,
543
198
  id,
544
- });
199
+ linkContext,
200
+ };
545
201
  }
546
202
  /**
547
203
  * Converts a given GitHub repository path into a collection entry by fetching the content
@@ -549,11 +205,25 @@ export async function syncEntry(context, { url, editUrl }, filePath, options, oc
549
205
  * Handles both files and directories, recursively processing directories if needed.
550
206
  * @internal
551
207
  */
552
- export async function toCollectionEntry({ context, octokit, options, signal, force = false, }) {
208
+ export async function toCollectionEntry({ context, octokit, options, signal, force = false, clear = false, }) {
553
209
  const { owner, repo, ref = "main" } = options || {};
554
210
  if (typeof repo !== "string" || typeof owner !== "string")
555
211
  throw new TypeError(INVALID_STRING_ERROR);
556
- // Get logger from context - it should be our Logger instance (initialize early)
212
+ // Validate identifiers to prevent injection into API calls / URLs
213
+ validateGitHubIdentifier(owner, "owner");
214
+ validateGitHubIdentifier(repo, "repo");
215
+ if (ref !== "main")
216
+ validateGitHubRef(ref);
217
+ // Validate include pattern basePaths don't escape the project
218
+ const projectRoot = process.cwd();
219
+ if (options.includes) {
220
+ for (const inc of options.includes) {
221
+ validateBasePath(inc.basePath, projectRoot);
222
+ }
223
+ }
224
+ if (options.assetsPath) {
225
+ validateBasePath(options.assetsPath, projectRoot);
226
+ }
557
227
  const logger = context.logger;
558
228
  /**
559
229
  * OPTIMIZATION: Use Git Trees API for efficient file discovery
@@ -584,7 +254,7 @@ export async function toCollectionEntry({ context, octokit, options, signal, for
584
254
  repo,
585
255
  sha: ref,
586
256
  per_page: 1,
587
- request: { signal }
257
+ request: { signal },
588
258
  });
589
259
  if (commits.length === 0) {
590
260
  throw new Error(`No commits found for ref ${ref}`);
@@ -598,12 +268,12 @@ export async function toCollectionEntry({ context, octokit, options, signal, for
598
268
  repo,
599
269
  tree_sha: treeSha,
600
270
  recursive: "true",
601
- request: { signal }
271
+ request: { signal },
602
272
  });
603
273
  logger.debug(`Retrieved ${treeData.tree.length} items from repository tree`);
604
274
  // Filter tree to only include files (not dirs/submodules) that match our patterns
605
275
  const fileEntries = treeData.tree.filter((item) => {
606
- if (item.type !== 'blob')
276
+ if (item.type !== "blob")
607
277
  return false; // Only process files (blobs)
608
278
  const includeCheck = shouldIncludeFile(item.path, options);
609
279
  return includeCheck.included;
@@ -614,9 +284,10 @@ export async function toCollectionEntry({ context, octokit, options, signal, for
614
284
  for (const treeItem of fileEntries) {
615
285
  const filePath = treeItem.path;
616
286
  // Construct the download URL (raw.githubusercontent.com format)
617
- const downloadUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${commitSha}/${filePath}`;
618
- const editUrl = treeItem.url || ''; // Git blob URL (use empty string as fallback)
619
- const fileData = await collectFileData({ url: downloadUrl, editUrl }, filePath);
287
+ const encodedPath = filePath.split("/").map(encodeURIComponent).join("/");
288
+ const downloadUrl = `https://raw.githubusercontent.com/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}/${commitSha}/${encodedPath}`;
289
+ const editUrl = treeItem.url || ""; // Git blob URL (use empty string as fallback)
290
+ const fileData = await collectFileData({ url: downloadUrl, editUrl }, filePath, options, context, octokit, signal);
620
291
  if (fileData) {
621
292
  allFiles.push(fileData);
622
293
  }
@@ -640,7 +311,7 @@ export async function toCollectionEntry({ context, octokit, options, signal, for
640
311
  // Combine auto-generated mappings with user-defined mappings
641
312
  const allLinkMappings = [
642
313
  ...autoGeneratedMappings,
643
- ...(options.linkTransform.linkMappings || [])
314
+ ...(options.linkTransform.linkMappings || []),
644
315
  ];
645
316
  logger.debug(`Generated ${autoGeneratedMappings.length} automatic link mappings from pathMappings`);
646
317
  processedFiles = globalLinkTransform(allFiles, {
@@ -654,7 +325,7 @@ export async function toCollectionEntry({ context, octokit, options, signal, for
654
325
  stats.processed = processedFiles.length;
655
326
  for (const file of processedFiles) {
656
327
  logger.logFileProcessing("Storing", file.sourcePath);
657
- const result = await storeProcessedFile(file, context, options);
328
+ const result = await storeProcessedFile(file, context, clear);
658
329
  if (result) {
659
330
  stats.updated++;
660
331
  }
@@ -663,310 +334,4 @@ export async function toCollectionEntry({ context, octokit, options, signal, for
663
334
  }
664
335
  }
665
336
  return stats;
666
- // Helper function to collect file data with content transforms applied
667
- async function collectFileData({ url, editUrl }, filePath) {
668
- if (url === null || typeof url !== "string") {
669
- return null;
670
- }
671
- const urlObj = new URL(url);
672
- // Determine if file needs renaming and generate appropriate ID
673
- const includeCheck = shouldIncludeFile(filePath, options);
674
- const matchedPattern = includeCheck.included ? includeCheck.matchedPattern : null;
675
- // Check if this file has a path mapping
676
- const hasPathMapping = matchedPattern &&
677
- options?.includes &&
678
- matchedPattern.index < options.includes.length &&
679
- options.includes[matchedPattern.index].pathMappings &&
680
- options.includes[matchedPattern.index].pathMappings[filePath];
681
- // Generate ID based on appropriate path
682
- const id = hasPathMapping ?
683
- generateId(generatePath(filePath, matchedPattern, options)) : // Use path-mapped path for ID
684
- generateId(filePath); // Use original path for ID
685
- const finalPath = generatePath(filePath, matchedPattern, options);
686
- let contents;
687
- logger.logFileProcessing("Fetching", filePath, `from ${urlObj.toString()}`);
688
- // Download file content
689
- const init = { signal, headers: getHeaders({ init: {}, meta: context.meta, id }) };
690
- let res = null;
691
- // Fetch with retries (simplified version of syncEntry logic)
692
- for (let attempt = 0; attempt < 3; attempt++) {
693
- try {
694
- res = await fetch(urlObj, init);
695
- if (res.ok)
696
- break;
697
- }
698
- catch (error) {
699
- if (attempt === 2)
700
- throw error;
701
- await new Promise(resolve => setTimeout(resolve, 1000 * (attempt + 1)));
702
- }
703
- }
704
- if (!res) {
705
- throw new Error(`No response received for ${urlObj.toString()}`);
706
- }
707
- if (res.status === 304) {
708
- // File not modified, read existing content from disk if it exists
709
- const includeResult = shouldIncludeFile(filePath, options);
710
- const relativePath = generatePath(filePath, includeResult.included ? includeResult.matchedPattern : null, options);
711
- const fileUrl = pathToFileURL(relativePath);
712
- if (existsSync(fileURLToPath(fileUrl))) {
713
- logger.logFileProcessing("Using cached", filePath, "304 not modified");
714
- const { promises: fs } = await import('node:fs');
715
- contents = await fs.readFile(fileURLToPath(fileUrl), 'utf-8');
716
- }
717
- else {
718
- // File is missing locally, re-fetch without cache headers
719
- logger.logFileProcessing("Re-fetching", filePath, "missing locally despite 304");
720
- const freshInit = { ...init };
721
- freshInit.headers = new Headers(init.headers);
722
- freshInit.headers.delete('If-None-Match');
723
- freshInit.headers.delete('If-Modified-Since');
724
- res = await fetch(urlObj, freshInit);
725
- if (!res.ok) {
726
- throw new Error(`Failed to fetch file content from ${urlObj.toString()}: ${res.status} ${res.statusText || 'Unknown error'}`);
727
- }
728
- contents = await res.text();
729
- }
730
- }
731
- else if (!res.ok) {
732
- throw new Error(`Failed to fetch file content from ${urlObj.toString()}: ${res.status} ${res.statusText || 'Unknown error'}`);
733
- }
734
- else {
735
- contents = await res.text();
736
- }
737
- // Process assets FIRST if configuration is provided
738
- let fileAssetsDownloaded = 0;
739
- let fileAssetsCached = 0;
740
- if (options.assetsPath && options.assetsBaseUrl) {
741
- try {
742
- const assetResult = await processAssets(contents, filePath, options, octokit, logger, signal);
743
- contents = assetResult.content;
744
- fileAssetsDownloaded = assetResult.assetsDownloaded;
745
- fileAssetsCached = assetResult.assetsCached;
746
- }
747
- catch (error) {
748
- logger.warn(`Asset processing failed for ${id}: ${error instanceof Error ? error.message : String(error)}`);
749
- }
750
- }
751
- // Apply content transforms
752
- const includeResult = shouldIncludeFile(filePath, options);
753
- const transformsToApply = [];
754
- // Add global transforms first
755
- if (options.transforms && options.transforms.length > 0) {
756
- transformsToApply.push(...options.transforms);
757
- }
758
- // Add pattern-specific transforms
759
- if (includeResult.included && includeResult.matchedPattern && options.includes) {
760
- const matchedInclude = options.includes[includeResult.matchedPattern.index];
761
- if (matchedInclude.transforms && matchedInclude.transforms.length > 0) {
762
- transformsToApply.push(...matchedInclude.transforms);
763
- }
764
- }
765
- if (transformsToApply.length > 0) {
766
- const transformContext = {
767
- id,
768
- path: filePath,
769
- options,
770
- matchedPattern: includeResult.included ? includeResult.matchedPattern : undefined,
771
- };
772
- for (const transform of transformsToApply) {
773
- try {
774
- contents = transform(contents, transformContext);
775
- }
776
- catch (error) {
777
- context.logger?.warn(`Transform failed for ${id}: ${error}`);
778
- }
779
- }
780
- }
781
- // Build link context for this file
782
- const linkContext = includeResult.included && includeResult.matchedPattern ? {
783
- sourcePath: filePath,
784
- targetPath: finalPath,
785
- basePath: includeResult.matchedPattern.basePath,
786
- pathMappings: options.includes?.[includeResult.matchedPattern.index]?.pathMappings,
787
- matchedPattern: includeResult.matchedPattern,
788
- } : undefined;
789
- // Use the finalPath we already computed
790
- return {
791
- sourcePath: filePath,
792
- targetPath: finalPath,
793
- content: contents,
794
- id,
795
- linkContext,
796
- };
797
- }
798
- // Helper function to store a processed file
799
- async function storeProcessedFile(file, context, options) {
800
- const { store, generateDigest, entryTypes, logger, parseData, config } = context;
801
- function configForFile(filePath) {
802
- const ext = filePath.split(".").at(-1);
803
- if (!ext) {
804
- logger.warn(`No extension found for ${filePath}`);
805
- return;
806
- }
807
- return entryTypes?.get(`.${ext}`);
808
- }
809
- const entryType = configForFile(file.sourcePath || "tmp.md");
810
- if (!entryType)
811
- throw new Error("No entry type found");
812
- const fileUrl = pathToFileURL(file.targetPath);
813
- const { body, data } = await entryType.getEntryInfo({
814
- contents: file.content,
815
- fileUrl: fileUrl,
816
- });
817
- // Generate digest for storage (repository-level caching handles change detection)
818
- const digest = generateDigest(file.content);
819
- const existingEntry = store.get(file.id);
820
- if (existingEntry) {
821
- logger.debug(`🔄 File ${file.id} - updating`);
822
- }
823
- else {
824
- logger.debug(`📄 File ${file.id} - adding`);
825
- }
826
- // Write file to disk
827
- if (!existsSync(fileURLToPath(fileUrl))) {
828
- logger.verbose(`Writing ${file.id} to ${fileUrl}`);
829
- await syncFile(fileURLToPath(fileUrl), file.content);
830
- }
831
- const parsedData = await parseData({
832
- id: file.id,
833
- data,
834
- filePath: fileUrl.toString(),
835
- });
836
- // Store in content store
837
- if (entryType.getRenderFunction) {
838
- logger.verbose(`Rendering ${file.id}`);
839
- const render = await entryType.getRenderFunction(config);
840
- let rendered = undefined;
841
- try {
842
- rendered = await render?.({
843
- id: file.id,
844
- data,
845
- body,
846
- filePath: fileUrl.toString(),
847
- digest,
848
- });
849
- }
850
- catch (error) {
851
- logger.error(`Error rendering ${file.id}: ${error.message}`);
852
- }
853
- logger.debug(`🔍 Storing collection entry: ${file.id} (${file.sourcePath} -> ${file.targetPath})`);
854
- store.set({
855
- id: file.id,
856
- data: parsedData,
857
- body,
858
- filePath: file.targetPath,
859
- digest,
860
- rendered,
861
- });
862
- }
863
- else if ("contentModuleTypes" in entryType) {
864
- store.set({
865
- id: file.id,
866
- data: parsedData,
867
- body,
868
- filePath: file.targetPath,
869
- digest,
870
- deferredRender: true,
871
- });
872
- }
873
- else {
874
- store.set({
875
- id: file.id,
876
- data: parsedData,
877
- body,
878
- filePath: file.targetPath,
879
- digest
880
- });
881
- }
882
- return { id: file.id, filePath: file.targetPath };
883
- }
884
- async function processDirectoryRecursively(path) {
885
- // Fetch the content
886
- const { data, status } = await octokit.rest.repos.getContent({
887
- owner,
888
- repo,
889
- path,
890
- ref,
891
- request: { signal },
892
- });
893
- if (status !== 200)
894
- throw new Error(INVALID_SERVICE_RESPONSE);
895
- // Matches for regular files
896
- if (!Array.isArray(data)) {
897
- const filePath = data.path;
898
- switch (data.type) {
899
- // Return
900
- case "file":
901
- return await syncEntry(context, { url: data.download_url, editUrl: data.url }, filePath, options, octokit, { signal });
902
- default:
903
- throw new Error("Invalid type");
904
- }
905
- }
906
- // Directory listing with filtering - process sequentially
907
- const filteredEntries = data
908
- .filter(({ type, path }) => {
909
- // Always include directories for recursion
910
- if (type === "dir")
911
- return true;
912
- // Apply filtering logic to files
913
- if (type === "file") {
914
- return shouldIncludeFile(path, options).included;
915
- }
916
- return false;
917
- });
918
- const results = [];
919
- for (const { type, path, download_url, url } of filteredEntries) {
920
- switch (type) {
921
- // Recurse
922
- case "dir":
923
- results.push(await processDirectoryRecursively(path));
924
- break;
925
- // Return
926
- case "file":
927
- results.push(await syncEntry(context, { url: download_url, editUrl: url }, path, options, octokit, { signal }));
928
- break;
929
- default:
930
- throw new Error("Invalid type");
931
- }
932
- }
933
- return results;
934
- } // End of processDirectoryRecursively function
935
- }
936
- /**
937
- * Get the headers needed to make a conditional request.
938
- * Uses the etag and last-modified values from the meta store.
939
- * @internal
940
- */
941
- export function getHeaders({ init, meta, id, }) {
942
- const tag = `${id}-etag`;
943
- const lastModifiedTag = `${id}-last-modified`;
944
- const etag = meta.get(tag);
945
- const lastModified = meta.get(lastModifiedTag);
946
- const headers = new Headers(init);
947
- if (etag) {
948
- headers.set("If-None-Match", etag);
949
- }
950
- else if (lastModified) {
951
- headers.set("If-Modified-Since", lastModified);
952
- }
953
- return headers;
954
- }
955
- /**
956
- * Store the etag or last-modified headers from a response in the meta store.
957
- * @internal
958
- */
959
- export function syncHeaders({ headers, meta, id, }) {
960
- const etag = headers.get("etag");
961
- const lastModified = headers.get("last-modified");
962
- const tag = `${id}-etag`;
963
- const lastModifiedTag = `${id}-last-modified`;
964
- meta.delete(tag);
965
- meta.delete(lastModifiedTag);
966
- if (etag) {
967
- meta.set(tag, etag);
968
- }
969
- else if (lastModified) {
970
- meta.set(lastModifiedTag, lastModified);
971
- }
972
337
  }