@larkiny/astro-github-loader 0.11.2 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/README.md +69 -61
  2. package/dist/github.assets.d.ts +70 -0
  3. package/dist/github.assets.js +253 -0
  4. package/dist/github.auth.js +13 -9
  5. package/dist/github.cleanup.d.ts +3 -2
  6. package/dist/github.cleanup.js +30 -23
  7. package/dist/github.constants.d.ts +0 -16
  8. package/dist/github.constants.js +0 -16
  9. package/dist/github.content.d.ts +6 -132
  10. package/dist/github.content.js +154 -789
  11. package/dist/github.dryrun.d.ts +9 -5
  12. package/dist/github.dryrun.js +46 -25
  13. package/dist/github.link-transform.d.ts +2 -2
  14. package/dist/github.link-transform.js +65 -57
  15. package/dist/github.loader.js +45 -51
  16. package/dist/github.logger.d.ts +2 -2
  17. package/dist/github.logger.js +33 -24
  18. package/dist/github.paths.d.ts +76 -0
  19. package/dist/github.paths.js +190 -0
  20. package/dist/github.storage.d.ts +15 -0
  21. package/dist/github.storage.js +109 -0
  22. package/dist/github.types.d.ts +41 -4
  23. package/dist/index.d.ts +8 -6
  24. package/dist/index.js +3 -6
  25. package/dist/test-helpers.d.ts +130 -0
  26. package/dist/test-helpers.js +194 -0
  27. package/package.json +3 -1
  28. package/src/github.assets.spec.ts +717 -0
  29. package/src/github.assets.ts +365 -0
  30. package/src/github.auth.spec.ts +245 -0
  31. package/src/github.auth.ts +24 -10
  32. package/src/github.cleanup.spec.ts +380 -0
  33. package/src/github.cleanup.ts +91 -47
  34. package/src/github.constants.ts +0 -17
  35. package/src/github.content.spec.ts +305 -454
  36. package/src/github.content.ts +261 -950
  37. package/src/github.dryrun.spec.ts +586 -0
  38. package/src/github.dryrun.ts +105 -54
  39. package/src/github.link-transform.spec.ts +1345 -0
  40. package/src/github.link-transform.ts +174 -95
  41. package/src/github.loader.spec.ts +75 -50
  42. package/src/github.loader.ts +113 -78
  43. package/src/github.logger.spec.ts +795 -0
  44. package/src/github.logger.ts +77 -35
  45. package/src/github.paths.spec.ts +523 -0
  46. package/src/github.paths.ts +259 -0
  47. package/src/github.storage.spec.ts +367 -0
  48. package/src/github.storage.ts +127 -0
  49. package/src/github.types.ts +55 -9
  50. package/src/index.ts +43 -6
  51. package/src/test-helpers.ts +215 -0
@@ -1,33 +1,7 @@
1
1
  import { toCollectionEntry } from "./github.content.js";
2
2
  import { performSelectiveCleanup } from "./github.cleanup.js";
3
- import { performDryRun, displayDryRunResults, updateImportState, loadImportState, createConfigId, getLatestCommitInfo } from "./github.dryrun.js";
4
- import { createLogger } from "./github.logger.js";
5
- /**
6
- * Performs selective cleanup for configurations with basePath
7
- * @param configs - Array of configuration objects
8
- * @param context - Loader context
9
- * @param octokit - GitHub API client
10
- * @internal
11
- */
12
- async function performSelectiveCleanups(configs, context, octokit) {
13
- const results = [];
14
- // Process each config sequentially to avoid overwhelming Astro's file watcher
15
- for (const config of configs) {
16
- if (config.enabled === false) {
17
- context.logger.debug(`Skipping disabled config: ${config.name || `${config.owner}/${config.repo}`}`);
18
- continue;
19
- }
20
- try {
21
- const stats = await performSelectiveCleanup(config, context, octokit);
22
- results.push(stats);
23
- }
24
- catch (error) {
25
- context.logger.error(`Selective cleanup failed for ${config.name || `${config.owner}/${config.repo}`}: ${error}`);
26
- // Continue with other configs even if one fails
27
- }
28
- }
29
- return results;
30
- }
3
+ import { performDryRun, displayDryRunResults, updateImportState, loadImportState, createConfigId, getLatestCommitInfo, } from "./github.dryrun.js";
4
+ import { createLogger, } from "./github.logger.js";
31
5
  /**
32
6
  * Loads data from GitHub repositories based on the provided configurations and options.
33
7
  *
@@ -44,9 +18,8 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
44
18
  return {
45
19
  name: "github-loader",
46
20
  load: async (context) => {
47
- const { store } = context;
48
21
  // Create global logger with specified level or default
49
- const globalLogger = createLogger(logLevel || 'default');
22
+ const globalLogger = createLogger(logLevel || "default");
50
23
  if (dryRun) {
51
24
  globalLogger.info("🔍 Dry run mode enabled - checking for changes only");
52
25
  try {
@@ -57,16 +30,16 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
57
30
  return; // Exit without importing
58
31
  }
59
32
  catch (error) {
60
- globalLogger.error(`Dry run failed: ${error.message}`);
33
+ globalLogger.error(`Dry run failed: ${error instanceof Error ? error.message : String(error)}`);
61
34
  throw error;
62
35
  }
63
36
  }
64
37
  globalLogger.debug(`Loading data from ${configs.length} sources`);
65
- // Always use standard processing - no file deletions to avoid Astro issues
66
- globalLogger.info(clear ? "Processing with content store clear" : "Processing without content store clear");
67
- if (clear) {
68
- store.clear();
69
- }
38
+ // Log clear mode status - actual clearing happens per-entry in toCollectionEntry
39
+ // to avoid breaking Astro's content collection by emptying the store all at once
40
+ globalLogger.info(clear
41
+ ? "Processing with selective entry replacement"
42
+ : "Processing without entry replacement");
70
43
  // Process each config sequentially to avoid overwhelming GitHub API/CDN
71
44
  for (let i = 0; i < configs.length; i++) {
72
45
  const config = configs[i];
@@ -76,12 +49,15 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
76
49
  }
77
50
  // Add small delay between configs to be gentler on GitHub's CDN
78
51
  if (i > 0) {
79
- await new Promise(resolve => setTimeout(resolve, 1000));
52
+ await new Promise((resolve) => setTimeout(resolve, 1000));
80
53
  }
81
54
  // Determine the effective log level for this config
82
- const effectiveLogLevel = logLevel || config.logLevel || 'default';
55
+ const effectiveLogLevel = logLevel || config.logLevel || "default";
83
56
  const configLogger = createLogger(effectiveLogLevel);
84
- const configName = config.name || `${config.owner}/${config.repo}`;
57
+ const langSuffix = config.language ? ` (${config.language})` : "";
58
+ const configName = config.name
59
+ ? `${config.name}${langSuffix}`
60
+ : `${config.owner}/${config.repo}${langSuffix}`;
85
61
  const repository = `${config.owner}/${config.repo}`;
86
62
  let summary = {
87
63
  configName,
@@ -91,7 +67,7 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
91
67
  filesUpdated: 0,
92
68
  filesUnchanged: 0,
93
69
  duration: 0,
94
- status: 'error',
70
+ status: "error",
95
71
  };
96
72
  const startTime = Date.now();
97
73
  try {
@@ -99,24 +75,25 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
99
75
  const configId = createConfigId(config);
100
76
  if (!force) {
101
77
  try {
102
- const state = await loadImportState(process.cwd());
78
+ const state = await loadImportState(process.cwd(), configLogger);
103
79
  const currentState = state.imports[configId];
104
80
  if (currentState && currentState.lastCommitSha) {
105
81
  configLogger.debug(`🔍 Checking repository changes for ${configName}...`);
106
82
  const latestCommit = await getLatestCommitInfo(octokit, config);
107
- if (latestCommit && currentState.lastCommitSha === latestCommit.sha) {
83
+ if (latestCommit &&
84
+ currentState.lastCommitSha === latestCommit.sha) {
108
85
  configLogger.info(`✅ Repository ${configName} unchanged (${latestCommit.sha.slice(0, 7)}) - skipping import`);
109
86
  // Update summary for unchanged repository
110
87
  summary.duration = Date.now() - startTime;
111
88
  summary.filesProcessed = 0;
112
89
  summary.filesUpdated = 0;
113
90
  summary.filesUnchanged = 0;
114
- summary.status = 'success';
91
+ summary.status = "success";
115
92
  configLogger.logImportSummary(summary);
116
93
  continue; // Skip to next config
117
94
  }
118
95
  else if (latestCommit) {
119
- configLogger.info(`🔄 Repository ${configName} changed (${currentState.lastCommitSha?.slice(0, 7) || 'unknown'} -> ${latestCommit.sha.slice(0, 7)}) - proceeding with import`);
96
+ configLogger.info(`🔄 Repository ${configName} changed (${currentState.lastCommitSha?.slice(0, 7) || "unknown"} -> ${latestCommit.sha.slice(0, 7)}) - proceeding with import`);
120
97
  }
121
98
  }
122
99
  else {
@@ -131,13 +108,29 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
131
108
  else {
132
109
  configLogger.info(`🔄 Force mode enabled for ${configName} - proceeding with full import`);
133
110
  }
111
+ // Determine effective clear setting: per-config takes precedence over global
112
+ const effectiveClear = config.clear ?? clear;
113
+ // Perform selective cleanup before importing if clear is enabled
114
+ if (effectiveClear) {
115
+ configLogger.info(`🧹 Clearing obsolete files for ${configName}...`);
116
+ try {
117
+ await performSelectiveCleanup(config, { ...context, logger: configLogger }, octokit);
118
+ }
119
+ catch (error) {
120
+ configLogger.warn(`Cleanup failed for ${configName}, continuing with import: ${error}`);
121
+ }
122
+ }
134
123
  // Perform the import with spinner
135
124
  const stats = await globalLogger.withSpinner(`🔄 Importing ${configName}...`, () => toCollectionEntry({
136
- context: { ...context, logger: configLogger },
125
+ context: {
126
+ ...context,
127
+ logger: configLogger,
128
+ },
137
129
  octokit,
138
130
  options: config,
139
131
  fetchOptions,
140
132
  force,
133
+ clear: effectiveClear,
141
134
  }), `✅ ${configName} imported successfully`, `❌ ${configName} import failed`);
142
135
  summary.duration = Date.now() - startTime;
143
136
  summary.filesProcessed = stats?.processed || 0;
@@ -145,7 +138,7 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
145
138
  summary.filesUnchanged = stats?.unchanged || 0;
146
139
  summary.assetsDownloaded = stats?.assetsDownloaded || 0;
147
140
  summary.assetsCached = stats?.assetsCached || 0;
148
- summary.status = 'success';
141
+ summary.status = "success";
149
142
  // Log structured summary
150
143
  configLogger.logImportSummary(summary);
151
144
  // Update state tracking for future dry runs
@@ -154,11 +147,11 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
154
147
  const { data } = await octokit.rest.repos.listCommits({
155
148
  owner: config.owner,
156
149
  repo: config.repo,
157
- sha: config.ref || 'main',
158
- per_page: 1
150
+ sha: config.ref || "main",
151
+ per_page: 1,
159
152
  });
160
153
  if (data.length > 0) {
161
- await updateImportState(process.cwd(), config, data[0].sha);
154
+ await updateImportState(process.cwd(), config, data[0].sha, configLogger);
162
155
  }
163
156
  }
164
157
  catch (error) {
@@ -168,8 +161,9 @@ export function githubLoader({ octokit, configs, fetchOptions = {}, clear = fals
168
161
  }
169
162
  catch (error) {
170
163
  summary.duration = Date.now() - startTime;
171
- summary.status = 'error';
172
- summary.error = error.message;
164
+ summary.status = "error";
165
+ summary.error =
166
+ error instanceof Error ? error.message : String(error);
173
167
  configLogger.logImportSummary(summary);
174
168
  // Continue with other configs even if one fails
175
169
  }
@@ -1,7 +1,7 @@
1
1
  /**
2
2
  * Multi-level logging system for astro-github-loader
3
3
  */
4
- export type LogLevel = 'silent' | 'default' | 'verbose' | 'debug';
4
+ export type LogLevel = "silent" | "default" | "verbose" | "debug";
5
5
  export interface LoggerOptions {
6
6
  level: LogLevel;
7
7
  prefix?: string;
@@ -16,7 +16,7 @@ export interface ImportSummary {
16
16
  assetsDownloaded?: number;
17
17
  assetsCached?: number;
18
18
  duration: number;
19
- status: 'success' | 'error' | 'cancelled';
19
+ status: "success" | "error" | "cancelled";
20
20
  error?: string;
21
21
  }
22
22
  export interface SyncSummary {
@@ -6,10 +6,10 @@
6
6
  */
7
7
  export class Logger {
8
8
  constructor(options) {
9
- this.spinnerChars = ['', '', '', '', '', '', '', '', '', ''];
9
+ this.spinnerChars = ["", "", "", "", "", "", "", "", "", ""];
10
10
  this.spinnerIndex = 0;
11
11
  this.level = options.level;
12
- this.prefix = options.prefix || '';
12
+ this.prefix = options.prefix || "";
13
13
  }
14
14
  /**
15
15
  * Set the logging level
@@ -51,7 +51,7 @@ export class Logger {
51
51
  * Default level - summary information only
52
52
  */
53
53
  info(message) {
54
- if (this.shouldLog('default')) {
54
+ if (this.shouldLog("default")) {
55
55
  console.log(this.formatMessage(message));
56
56
  }
57
57
  }
@@ -59,7 +59,7 @@ export class Logger {
59
59
  * Verbose level - detailed operation information
60
60
  */
61
61
  verbose(message) {
62
- if (this.shouldLog('verbose')) {
62
+ if (this.shouldLog("verbose")) {
63
63
  console.log(this.formatMessage(message));
64
64
  }
65
65
  }
@@ -67,7 +67,7 @@ export class Logger {
67
67
  * Debug level - all information including diagnostics
68
68
  */
69
69
  debug(message) {
70
- if (this.shouldLog('debug')) {
70
+ if (this.shouldLog("debug")) {
71
71
  console.log(this.formatMessage(message));
72
72
  }
73
73
  }
@@ -75,7 +75,7 @@ export class Logger {
75
75
  * Error - always shown unless silent
76
76
  */
77
77
  error(message) {
78
- if (this.shouldLog('default')) {
78
+ if (this.shouldLog("default")) {
79
79
  console.error(this.formatMessage(message));
80
80
  }
81
81
  }
@@ -83,7 +83,7 @@ export class Logger {
83
83
  * Warning - shown at default level and above
84
84
  */
85
85
  warn(message) {
86
- if (this.shouldLog('default')) {
86
+ if (this.shouldLog("default")) {
87
87
  console.warn(this.formatMessage(message));
88
88
  }
89
89
  }
@@ -91,27 +91,32 @@ export class Logger {
91
91
  * Log structured import summary (default level)
92
92
  */
93
93
  logImportSummary(summary) {
94
- if (!this.shouldLog('default'))
94
+ if (!this.shouldLog("default"))
95
95
  return;
96
- const statusIcon = summary.status === 'success' ? '✅' : summary.status === 'error' ? '❌' : '🚫';
97
- this.info('');
96
+ const statusIcon = summary.status === "success"
97
+ ? "✅"
98
+ : summary.status === "error"
99
+ ? "❌"
100
+ : "🚫";
101
+ this.info("");
98
102
  this.info(`📊 Import Summary: ${summary.configName}`);
99
- this.info(`├─ Repository: ${summary.repository}${summary.ref ? `@${summary.ref}` : ''}`);
103
+ this.info(`├─ Repository: ${summary.repository}${summary.ref ? `@${summary.ref}` : ""}`);
100
104
  this.info(`├─ Files: ${summary.filesProcessed} processed, ${summary.filesUpdated} updated, ${summary.filesUnchanged} unchanged`);
101
- if (summary.assetsDownloaded !== undefined || summary.assetsCached !== undefined) {
105
+ if (summary.assetsDownloaded !== undefined ||
106
+ summary.assetsCached !== undefined) {
102
107
  const downloaded = summary.assetsDownloaded || 0;
103
108
  const cached = summary.assetsCached || 0;
104
109
  this.info(`├─ Assets: ${downloaded} downloaded, ${cached} cached`);
105
110
  }
106
111
  this.info(`├─ Duration: ${(summary.duration / 1000).toFixed(1)}s`);
107
- this.info(`└─ Status: ${statusIcon} ${summary.status === 'success' ? 'Success' : summary.status === 'error' ? `Error: ${summary.error}` : 'Cancelled'}`);
108
- this.info('');
112
+ this.info(`└─ Status: ${statusIcon} ${summary.status === "success" ? "Success" : summary.status === "error" ? `Error: ${summary.error}` : "Cancelled"}`);
113
+ this.info("");
109
114
  }
110
115
  /**
111
116
  * Log sync operation summary (default level)
112
117
  */
113
118
  logSyncSummary(configName, summary) {
114
- if (!this.shouldLog('default'))
119
+ if (!this.shouldLog("default"))
115
120
  return;
116
121
  if (summary.added > 0 || summary.updated > 0 || summary.deleted > 0) {
117
122
  this.info(`Sync completed for ${configName}: ${summary.added} added, ${summary.updated} updated, ${summary.deleted} deleted (${summary.duration}ms)`);
@@ -124,7 +129,7 @@ export class Logger {
124
129
  * Log cleanup operation summary (default level)
125
130
  */
126
131
  logCleanupSummary(configName, summary) {
127
- if (!this.shouldLog('default'))
132
+ if (!this.shouldLog("default"))
128
133
  return;
129
134
  if (summary.deleted > 0) {
130
135
  this.info(`Cleanup completed for ${configName}: ${summary.deleted} obsolete files deleted (${summary.duration}ms)`);
@@ -137,14 +142,18 @@ export class Logger {
137
142
  * Log file-level processing (verbose level)
138
143
  */
139
144
  logFileProcessing(action, filePath, details) {
140
- const message = details ? `${action}: ${filePath} - ${details}` : `${action}: ${filePath}`;
145
+ const message = details
146
+ ? `${action}: ${filePath} - ${details}`
147
+ : `${action}: ${filePath}`;
141
148
  this.verbose(message);
142
149
  }
143
150
  /**
144
151
  * Log asset processing (verbose level)
145
152
  */
146
153
  logAssetProcessing(action, assetPath, details) {
147
- const message = details ? `Asset ${action}: ${assetPath} - ${details}` : `Asset ${action}: ${assetPath}`;
154
+ const message = details
155
+ ? `Asset ${action}: ${assetPath} - ${details}`
156
+ : `Asset ${action}: ${assetPath}`;
148
157
  this.verbose(message);
149
158
  }
150
159
  /**
@@ -195,8 +204,8 @@ export class Logger {
195
204
  /**
196
205
  * Start a spinner with duration timer for long-running operations
197
206
  */
198
- startSpinner(message = 'Processing...') {
199
- if (this.level === 'silent')
207
+ startSpinner(message = "Processing...") {
208
+ if (this.level === "silent")
200
209
  return;
201
210
  this.spinnerStartTime = Date.now();
202
211
  this.spinnerIndex = 0;
@@ -232,7 +241,7 @@ export class Logger {
232
241
  process.stdout.write(`\r${formattedMessage}\n`);
233
242
  }
234
243
  else {
235
- process.stdout.write('\r\x1b[K'); // Clear the line
244
+ process.stdout.write("\r\x1b[K"); // Clear the line
236
245
  }
237
246
  this.spinnerStartTime = undefined;
238
247
  }
@@ -243,11 +252,11 @@ export class Logger {
243
252
  this.startSpinner(message);
244
253
  try {
245
254
  const result = await fn();
246
- this.stopSpinner(successMessage || `✅ ${message.replace(/^[🔄⏳]?\s*/, '')} completed`);
255
+ this.stopSpinner(successMessage || `✅ ${message.replace(/^[🔄⏳]?\s*/, "")} completed`);
247
256
  return result;
248
257
  }
249
258
  catch (error) {
250
- this.stopSpinner(errorMessage || `❌ ${message.replace(/^[🔄⏳]?\s*/, '')} failed`);
259
+ this.stopSpinner(errorMessage || `❌ ${message.replace(/^[🔄⏳]?\s*/, "")} failed`);
251
260
  throw error;
252
261
  }
253
262
  }
@@ -255,6 +264,6 @@ export class Logger {
255
264
  /**
256
265
  * Create a logger instance with the specified level
257
266
  */
258
- export function createLogger(level = 'default', prefix) {
267
+ export function createLogger(level = "default", prefix) {
259
268
  return new Logger({ level, prefix });
260
269
  }
@@ -0,0 +1,76 @@
1
+ import type { ExtendedLoaderContext, ImportOptions, MatchedPattern } from "./github.types.js";
2
+ export interface ImportStats {
3
+ processed: number;
4
+ updated: number;
5
+ unchanged: number;
6
+ assetsDownloaded?: number;
7
+ assetsCached?: number;
8
+ }
9
+ /**
10
+ * Generates a unique identifier from a file path by removing the extension
11
+ * @param filePath - The file path to generate ID from
12
+ * @return {string} The generated identifier as a string with extension removed
13
+ * @internal
14
+ */
15
+ export declare function generateId(filePath: string): string;
16
+ /**
17
+ * Applies path mapping logic to get the final filename for a file
18
+ *
19
+ * Supports two types of path mappings:
20
+ * - **File mapping**: Exact file path match (e.g., 'docs/README.md' -> 'docs/overview.md')
21
+ * - **Folder mapping**: Folder path with trailing slash (e.g., 'docs/capabilities/' -> 'docs/')
22
+ *
23
+ * @param filePath - Original source file path
24
+ * @param matchedPattern - The pattern that matched this file
25
+ * @param options - Import options containing path mappings
26
+ * @returns Final filename after applying path mapping logic
27
+ * @internal
28
+ */
29
+ export declare function applyRename(filePath: string, matchedPattern?: MatchedPattern | null, options?: ImportOptions): string;
30
+ /**
31
+ * Generates a local file path based on the matched pattern and file path
32
+ * @param filePath - The original file path from the repository
33
+ * @param matchedPattern - The pattern that matched this file (or null if no includes specified)
34
+ * @param options - Import options containing includes patterns for path mapping lookups
35
+ * @return {string} The local file path where this content should be stored
36
+ * @internal
37
+ */
38
+ export declare function generatePath(filePath: string, matchedPattern?: MatchedPattern | null, options?: ImportOptions): string;
39
+ /**
40
+ * Checks if a file path should be included and returns the matching pattern
41
+ * @param filePath - The file path to check (relative to the repository root)
42
+ * @param options - Import options containing includes patterns
43
+ * @returns Object with include status and matched pattern, or null if not included
44
+ * @internal
45
+ */
46
+ export declare function shouldIncludeFile(filePath: string, options: ImportOptions): {
47
+ included: true;
48
+ matchedPattern: MatchedPattern | null;
49
+ } | {
50
+ included: false;
51
+ matchedPattern: null;
52
+ };
53
+ /**
54
+ * Get the headers needed to make a conditional request.
55
+ * Uses the etag and last-modified values from the meta store.
56
+ * @internal
57
+ */
58
+ export declare function getHeaders({ init, meta, id, }: {
59
+ /** Initial headers to include */
60
+ init?: RequestInit["headers"];
61
+ /** Meta store to get etag and last-modified values from */
62
+ meta: ExtendedLoaderContext["meta"];
63
+ id: string;
64
+ }): Headers;
65
+ /**
66
+ * Store the etag or last-modified headers from a response in the meta store.
67
+ * @internal
68
+ */
69
+ export declare function syncHeaders({ headers, meta, id, }: {
70
+ /** Headers from the response */
71
+ headers: Headers;
72
+ /** Meta store to store etag and last-modified values in */
73
+ meta: ExtendedLoaderContext["meta"];
74
+ /** id string */
75
+ id: string;
76
+ }): void;
@@ -0,0 +1,190 @@
1
+ import path, { join, basename } from "node:path";
2
+ import picomatch from "picomatch";
3
+ /**
4
+ * Generates a unique identifier from a file path by removing the extension
5
+ * @param filePath - The file path to generate ID from
6
+ * @return {string} The generated identifier as a string with extension removed
7
+ * @internal
8
+ */
9
+ export function generateId(filePath) {
10
+ let id = filePath;
11
+ // Remove file extension for ID generation
12
+ const lastDotIndex = id.lastIndexOf(".");
13
+ if (lastDotIndex > 0) {
14
+ id = id.substring(0, lastDotIndex);
15
+ }
16
+ return id;
17
+ }
18
+ /**
19
+ * Applies path mapping logic to get the final filename for a file
20
+ *
21
+ * Supports two types of path mappings:
22
+ * - **File mapping**: Exact file path match (e.g., 'docs/README.md' -> 'docs/overview.md')
23
+ * - **Folder mapping**: Folder path with trailing slash (e.g., 'docs/capabilities/' -> 'docs/')
24
+ *
25
+ * @param filePath - Original source file path
26
+ * @param matchedPattern - The pattern that matched this file
27
+ * @param options - Import options containing path mappings
28
+ * @returns Final filename after applying path mapping logic
29
+ * @internal
30
+ */
31
+ export function applyRename(filePath, matchedPattern, options) {
32
+ if (options?.includes &&
33
+ matchedPattern &&
34
+ matchedPattern.index < options.includes.length) {
35
+ const includePattern = options.includes[matchedPattern.index];
36
+ if (includePattern.pathMappings) {
37
+ // First check for exact file match (current behavior - backwards compatible)
38
+ if (includePattern.pathMappings[filePath]) {
39
+ const mappingValue = includePattern.pathMappings[filePath];
40
+ return typeof mappingValue === "string"
41
+ ? mappingValue
42
+ : mappingValue.target;
43
+ }
44
+ // Then check for folder-to-folder mappings
45
+ for (const [sourceFolder, mappingValue] of Object.entries(includePattern.pathMappings)) {
46
+ // Check if this is a folder mapping (ends with /) and file is within it
47
+ if (sourceFolder.endsWith("/") && filePath.startsWith(sourceFolder)) {
48
+ // Replace the source folder path with target folder path
49
+ const targetFolder = typeof mappingValue === "string"
50
+ ? mappingValue
51
+ : mappingValue.target;
52
+ const relativePath = filePath.slice(sourceFolder.length);
53
+ return path.posix.join(targetFolder, relativePath);
54
+ }
55
+ }
56
+ }
57
+ }
58
+ // Return original filename if no path mapping found
59
+ return basename(filePath);
60
+ }
61
+ /**
62
+ * Generates a local file path based on the matched pattern and file path
63
+ * @param filePath - The original file path from the repository
64
+ * @param matchedPattern - The pattern that matched this file (or null if no includes specified)
65
+ * @param options - Import options containing includes patterns for path mapping lookups
66
+ * @return {string} The local file path where this content should be stored
67
+ * @internal
68
+ */
69
+ export function generatePath(filePath, matchedPattern, options) {
70
+ if (matchedPattern) {
71
+ // Extract the directory part from the pattern (before any glob wildcards)
72
+ const pattern = matchedPattern.pattern;
73
+ const beforeGlob = pattern.split(/[*?{]/)[0];
74
+ // Remove the pattern prefix from the file path to get the relative path
75
+ let relativePath = filePath;
76
+ if (beforeGlob && filePath.startsWith(beforeGlob)) {
77
+ relativePath = filePath.substring(beforeGlob.length);
78
+ // Remove leading slash if present
79
+ if (relativePath.startsWith("/")) {
80
+ relativePath = relativePath.substring(1);
81
+ }
82
+ }
83
+ // If no relative path remains, use just the filename
84
+ if (!relativePath) {
85
+ relativePath = basename(filePath);
86
+ }
87
+ // Apply path mapping logic
88
+ const finalFilename = applyRename(filePath, matchedPattern, options);
89
+ // Always apply path mapping if applyRename returned something different from the original basename
90
+ // OR if there are pathMappings configured (since empty string mappings might return same basename)
91
+ const hasPathMappings = options?.includes?.[matchedPattern.index]?.pathMappings &&
92
+ Object.keys(options.includes[matchedPattern.index].pathMappings).length >
93
+ 0;
94
+ if (finalFilename !== basename(filePath) || hasPathMappings) {
95
+ // Check if applyRename returned a full path (contains path separators) or just a filename
96
+ if (finalFilename.includes("/") || finalFilename.includes("\\")) {
97
+ // applyRename returned a full relative path - need to extract relative part
98
+ // Remove the pattern prefix to get the relative path within the pattern context
99
+ const beforeGlob = pattern.split(/[*?{]/)[0];
100
+ if (beforeGlob && finalFilename.startsWith(beforeGlob)) {
101
+ relativePath = finalFilename.substring(beforeGlob.length);
102
+ // Remove leading slash if present
103
+ if (relativePath.startsWith("/")) {
104
+ relativePath = relativePath.substring(1);
105
+ }
106
+ }
107
+ else {
108
+ relativePath = finalFilename;
109
+ }
110
+ }
111
+ else {
112
+ // applyRename returned just a filename
113
+ // If the filename is different due to pathMapping, use it directly
114
+ // This handles cases where pathMappings flatten directory structures
115
+ relativePath = finalFilename;
116
+ }
117
+ }
118
+ return join(matchedPattern.basePath, relativePath);
119
+ }
120
+ // Should not happen since we always use includes
121
+ throw new Error("No matched pattern provided - includes are required");
122
+ }
123
+ /**
124
+ * Checks if a file path should be included and returns the matching pattern
125
+ * @param filePath - The file path to check (relative to the repository root)
126
+ * @param options - Import options containing includes patterns
127
+ * @returns Object with include status and matched pattern, or null if not included
128
+ * @internal
129
+ */
130
+ export function shouldIncludeFile(filePath, options) {
131
+ const { includes } = options;
132
+ // If no include patterns specified, include all files
133
+ if (!includes || includes.length === 0) {
134
+ return { included: true, matchedPattern: null };
135
+ }
136
+ // Check each include pattern to find a match
137
+ for (let i = 0; i < includes.length; i++) {
138
+ const includePattern = includes[i];
139
+ const matcher = picomatch(includePattern.pattern);
140
+ if (matcher(filePath)) {
141
+ return {
142
+ included: true,
143
+ matchedPattern: {
144
+ pattern: includePattern.pattern,
145
+ basePath: includePattern.basePath,
146
+ index: i,
147
+ },
148
+ };
149
+ }
150
+ }
151
+ // No patterns matched
152
+ return { included: false, matchedPattern: null };
153
+ }
154
+ /**
155
+ * Get the headers needed to make a conditional request.
156
+ * Uses the etag and last-modified values from the meta store.
157
+ * @internal
158
+ */
159
+ export function getHeaders({ init, meta, id, }) {
160
+ const tag = `${id}-etag`;
161
+ const lastModifiedTag = `${id}-last-modified`;
162
+ const etag = meta.get(tag);
163
+ const lastModified = meta.get(lastModifiedTag);
164
+ const headers = new Headers(init);
165
+ if (etag) {
166
+ headers.set("If-None-Match", etag);
167
+ }
168
+ else if (lastModified) {
169
+ headers.set("If-Modified-Since", lastModified);
170
+ }
171
+ return headers;
172
+ }
173
+ /**
174
+ * Store the etag or last-modified headers from a response in the meta store.
175
+ * @internal
176
+ */
177
+ export function syncHeaders({ headers, meta, id, }) {
178
+ const etag = headers.get("etag");
179
+ const lastModified = headers.get("last-modified");
180
+ const tag = `${id}-etag`;
181
+ const lastModifiedTag = `${id}-last-modified`;
182
+ meta.delete(tag);
183
+ meta.delete(lastModifiedTag);
184
+ if (etag) {
185
+ meta.set(tag, etag);
186
+ }
187
+ else if (lastModified) {
188
+ meta.set(lastModifiedTag, lastModified);
189
+ }
190
+ }
@@ -0,0 +1,15 @@
1
+ import type { ImportedFile } from "./github.link-transform.js";
2
+ import type { ExtendedLoaderContext } from "./github.types.js";
3
+ /**
4
+ * Ensures directory exists and writes file to disk.
5
+ * @internal
6
+ */
7
+ export declare function syncFile(path: string, content: string): Promise<void>;
8
+ /**
9
+ * Stores a processed file in Astro's content store
10
+ * @internal
11
+ */
12
+ export declare function storeProcessedFile(file: ImportedFile, context: ExtendedLoaderContext, clear: boolean): Promise<{
13
+ id: string;
14
+ filePath: string;
15
+ }>;