@larkiny/astro-github-loader 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,315 @@
1
+ import type { Loader as AstroLoader, LoaderContext as AstroLoaderContext } from "astro/loaders";
2
+ import type { ContentEntryType } from "astro";
3
+ import type { MarkdownHeading } from "@astrojs/markdown-remark";
4
+ import { Octokit } from "octokit";
5
+ import type { LinkHandler } from "./github.link-transform.js";
6
+ import type { LogLevel } from "./github.logger.js";
7
+ /**
8
+ * Context information for link transformations
9
+ */
10
+ export interface LinkTransformContext {
11
+ /** Original source path in the repository */
12
+ sourcePath: string;
13
+ /** Target path where the file will be written */
14
+ targetPath: string;
15
+ /** Base path for this include pattern */
16
+ basePath: string;
17
+ /** Path mappings used for this file */
18
+ pathMappings?: Record<string, PathMappingValue>;
19
+ /** The include pattern that matched this file */
20
+ matchedPattern?: MatchedPattern;
21
+ }
22
+ /**
23
+ * Link mapping for transforming URLs in markdown links
24
+ */
25
+ export interface LinkMapping {
26
+ /** Pattern to match (string or regex) */
27
+ pattern: string | RegExp;
28
+ /** Replacement string or function */
29
+ replacement: string | ((match: string, anchor: string, context: any) => string);
30
+ /** Apply to all links, not just unresolved internal links (default: false) */
31
+ global?: boolean;
32
+ /** Function to determine if this mapping should apply to the current file context */
33
+ contextFilter?: (context: LinkTransformContext) => boolean;
34
+ /** Automatically handle relative links by prefixing with target base path (default: false) */
35
+ relativeLinks?: boolean;
36
+ }
37
+ /**
38
+ * Configuration for import link transformation
39
+ */
40
+ export interface ImportLinkTransformOptions {
41
+ /** Base paths to strip from final URLs (e.g., ["src/content/docs"]) */
42
+ stripPrefixes: string[];
43
+ /** Custom handlers for special link types */
44
+ customHandlers?: LinkHandler[];
45
+ /** Link mappings to transform URLs in markdown links */
46
+ linkMappings?: LinkMapping[];
47
+ }
48
+ /**
49
+ * Information about which include pattern matched a file
50
+ */
51
+ export interface MatchedPattern {
52
+ /** The glob pattern that matched */
53
+ pattern: string;
54
+ /** The base path for this pattern */
55
+ basePath: string;
56
+ /** Index of the pattern in the includes array */
57
+ index: number;
58
+ }
59
+ /**
60
+ * Context object passed to transform functions
61
+ */
62
+ export interface TransformContext {
63
+ /** Generated ID for the content */
64
+ id: string;
65
+ /** File path within the repository */
66
+ path: string;
67
+ /** Full configuration options */
68
+ options: ImportOptions;
69
+ /** Information about which include pattern matched (if any) */
70
+ matchedPattern?: MatchedPattern;
71
+ }
72
+ /**
73
+ * Function type for content transformations
74
+ * @param content - The markdown content to transform
75
+ * @param context - Context information about the file being processed
76
+ * @returns The transformed content
77
+ */
78
+ export type TransformFunction = (content: string, context: TransformContext) => string;
79
+ /**
80
+ * Enhanced path mapping configuration that supports cross-section linking
81
+ */
82
+ export interface EnhancedPathMapping {
83
+ /** Target path where the file should be imported */
84
+ target: string;
85
+ /**
86
+ * Cross-section path for generating links to this content from other sections.
87
+ * If not specified, will be inferred from the basePath.
88
+ * Example: '/reference/algokit-utils-ts/api'
89
+ */
90
+ crossSectionPath?: string;
91
+ }
92
+ /**
93
+ * Path mapping value - can be a simple string or an enhanced configuration object
94
+ */
95
+ export type PathMappingValue = string | EnhancedPathMapping;
96
+ /**
97
+ * Configuration for a single include pattern
98
+ */
99
+ export interface IncludePattern {
100
+ /** Glob pattern to match files (relative to repository root) */
101
+ pattern: string;
102
+ /** Local base path where matching files should be imported */
103
+ basePath: string;
104
+ /** Transforms to apply only to files matching this pattern */
105
+ transforms?: TransformFunction[];
106
+ /**
107
+ * Map of source paths to target paths for controlling where files are imported.
108
+ *
109
+ * Supports multiple mapping formats:
110
+ *
111
+ * **Simple string format:**
112
+ * - **File mapping**: `'docs/README.md': 'docs/overview.md'` - moves a specific file to a new path
113
+ * - **Folder mapping**: `'docs/capabilities/': 'docs/'` - moves all files from source folder to target folder
114
+ *
115
+ * **Enhanced object format with cross-section linking:**
116
+ * - `'docs/api/': { target: 'api/', crossSectionPath: '/reference/api' }`
117
+ *
118
+ * **Important**: Folder mappings require trailing slashes to distinguish from file mappings.
119
+ * - ✅ `'docs/capabilities/': 'docs/'` (folder mapping - moves all files)
120
+ * - ❌ `'docs/capabilities': 'docs/'` (treated as exact file match)
121
+ *
122
+ * When using enhanced format, link mappings will be automatically generated for cross-section references.
123
+ * If `crossSectionPath` is not specified, it will be inferred from the basePath.
124
+ */
125
+ pathMappings?: Record<string, PathMappingValue>;
126
+ }
127
+ export type GithubLoaderOptions = {
128
+ octokit: Octokit;
129
+ configs: Array<ImportOptions>;
130
+ clear?: boolean;
131
+ gitIgnore?: string;
132
+ basePath?: string;
133
+ fetchOptions?: FetchOptions;
134
+ /**
135
+ * When true, only checks for repository changes without importing.
136
+ * Returns a report of which repositories have new commits.
137
+ * @default false
138
+ */
139
+ dryRun?: boolean;
140
+ /**
141
+ * Global logging level for all import operations
142
+ * Overrides individual ImportOptions logLevel settings
143
+ * @default 'default'
144
+ */
145
+ logLevel?: LogLevel;
146
+ /**
147
+ * When true, forces a full import even if no repository changes are detected.
148
+ * When false (default), skips processing if repository hasn't changed.
149
+ * @default false
150
+ */
151
+ force?: boolean;
152
+ };
153
+ /**
154
+ * Represents the configuration options for a collection entry operation.
155
+ * @internal
156
+ */
157
+ export type CollectionEntryOptions = {
158
+ /**
159
+ * Represents the context object for a loader, providing metadata
160
+ * and utilities for the current loading process.
161
+ *
162
+ * The LoaderContext may contain properties and methods that offer
163
+ * control or inspection over the loading behavior.
164
+ */
165
+ context: LoaderContext;
166
+ /**
167
+ * An instance of the Octokit library, which provides a way to interact
168
+ * with GitHub's REST API. This variable allows you to access and perform
169
+ * operations such as creating repositories, managing issues, handling
170
+ * pull requests, fetching user data, and more.
171
+ *
172
+ * The Octokit instance must be configured*/
173
+ octokit: Octokit;
174
+ /**
175
+ * Represents the configuration options for initializing or customizing the root application behavior.
176
+ * The option object may include various properties that control specific features or behavior of the application.
177
+ */
178
+ options: ImportOptions;
179
+ /**
180
+ * An optional AbortSignal instance that enables observing and controlling the
181
+ * abort state of an operation. It can be used to signal cancellation requests
182
+ * to an ongoing task, such as a fetch request or custom asynchronous operations.
183
+ *
184
+ * If provided, the corresponding task can listen to the `abort` event of the signal
185
+ * to handle early termination or cleanup logic appropriately.
186
+ *
187
+ * If the signal is already aborted at the time it is assigned or checked, the task
188
+ * may respond to the abort condition immediately.
189
+ */
190
+ signal?: AbortSignal;
191
+ /**
192
+ * Represents the optional configuration settings for a fetch operation.
193
+ * This variable allows customization of the behavior of the fetch process.
194
+ */
195
+ fetchOptions?: FetchOptions;
196
+ /**
197
+ * When true, forces a full import even if no repository changes are detected.
198
+ * When false (default), skips processing if repository hasn't changed.
199
+ * @default false
200
+ */
201
+ force?: boolean;
202
+ };
203
+ /**
204
+ * Interface representing rendered content, including HTML and associated metadata.
205
+ * @internal
206
+ */
207
+ export interface RenderedContent {
208
+ /** Rendered HTML string. If present then `render(entry)` will return a component that renders this HTML. */
209
+ html: string;
210
+ metadata?: {
211
+ /** Any images that are present in this entry. Relative to the {@link DataEntry} filePath. */
212
+ imagePaths?: Array<string>;
213
+ /** Any headings that are present in this file. */
214
+ headings?: MarkdownHeading[];
215
+ /** Raw frontmatter, parsed parsed from the file. This may include data from remark plugins. */
216
+ frontmatter?: Record<string, any>;
217
+ /** Any other metadata that is present in this file. */
218
+ [key: string]: unknown;
219
+ };
220
+ }
221
+ /**
222
+ * Represents configuration options for importing content from GitHub repositories.
223
+ */
224
+ export type ImportOptions = {
225
+ /**
226
+ * Display name for this configuration (used in logging)
227
+ */
228
+ name?: string;
229
+ /**
230
+ * Repository owner
231
+ */
232
+ owner: string;
233
+ /**
234
+ * Repository Name
235
+ */
236
+ repo: string;
237
+ /**
238
+ * A specific reference in Github
239
+ */
240
+ ref?: string;
241
+ /**
242
+ * Local directory path where downloaded assets should be stored
243
+ */
244
+ assetsPath?: string;
245
+ /**
246
+ * Base URL prefix for asset references in transformed markdown content
247
+ */
248
+ assetsBaseUrl?: string;
249
+ /**
250
+ * Array of file extensions to treat as assets (e.g., ['.png', '.jpg', '.svg'])
251
+ * Defaults to common image formats if not specified
252
+ */
253
+ assetPatterns?: string[];
254
+ /**
255
+ * Whether this configuration is enabled for processing
256
+ */
257
+ enabled?: boolean;
258
+ /**
259
+ * Whether to clear target directories before importing content
260
+ */
261
+ clear?: boolean;
262
+ /**
263
+ * Array of transform functions to apply to all imported content
264
+ */
265
+ transforms?: TransformFunction[];
266
+ /**
267
+ * Array of include patterns defining which files to import and where to put them
268
+ * If not specified, all files will be imported (backward compatibility mode)
269
+ */
270
+ includes?: IncludePattern[];
271
+ /**
272
+ * Link transformation options
273
+ * Applied after all content transforms and across all include patterns
274
+ */
275
+ linkTransform?: ImportLinkTransformOptions;
276
+ /**
277
+ * Logging level for this import configuration
278
+ * Can be overridden by global logLevel in GithubLoaderOptions
279
+ * @default 'default'
280
+ */
281
+ logLevel?: LogLevel;
282
+ };
283
+ export type FetchOptions = RequestInit & {
284
+ signal?: AbortSignal;
285
+ concurrency?: number;
286
+ };
287
+ /**
288
+ * @internal
289
+ */
290
+ export interface LoaderContext extends AstroLoaderContext {
291
+ /** @internal */
292
+ entryTypes?: Map<string, ContentEntryType>;
293
+ }
294
+ /**
295
+ * @internal
296
+ */
297
+ export interface Loader extends AstroLoader {
298
+ /** Do the actual loading of the data */
299
+ load: (context: LoaderContext) => Promise<void>;
300
+ }
301
+ /**
302
+ * Statistics for a sync operation
303
+ */
304
+ export interface SyncStats {
305
+ /** Number of files added */
306
+ added: number;
307
+ /** Number of files updated */
308
+ updated: number;
309
+ /** Number of files deleted */
310
+ deleted: number;
311
+ /** Number of files unchanged */
312
+ unchanged: number;
313
+ /** Total processing time in ms */
314
+ duration: number;
315
+ }
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,5 @@
1
+ export * from './github.constants.js';
2
+ export * from './github.content.js';
3
+ export * from './github.loader.js';
4
+ export * from './github.types.js';
5
+ export * from './github.link-transform.js';
package/dist/index.js ADDED
@@ -0,0 +1,5 @@
1
+ export * from './github.constants.js';
2
+ export * from './github.content.js';
3
+ export * from './github.loader.js';
4
+ export * from './github.types.js';
5
+ export * from './github.link-transform.js';
package/package.json ADDED
@@ -0,0 +1,66 @@
1
+ {
2
+ "name": "@larkiny/astro-github-loader",
3
+ "type": "module",
4
+ "version": "0.9.0",
5
+ "description": "Load content from GitHub repositories into Astro content collections with asset management and content transformations",
6
+ "keywords": [
7
+ "astro",
8
+ "github",
9
+ "content",
10
+ "starlight",
11
+ "loader",
12
+ "markdown"
13
+ ],
14
+ "author": "Michael Feher, Larkin Young",
15
+ "license": "MIT",
16
+ "main": "./dist/index.js",
17
+ "types": "./dist/index.d.ts",
18
+ "module": "./dist/index.js",
19
+ "repository": {
20
+ "type": "git",
21
+ "url": "git+https://github.com/larkiny/starlight-github-loader-fork.git",
22
+ "directory": "packages/astro-github-loader"
23
+ },
24
+ "publishConfig": {
25
+ "registry": "https://npm.pkg.github.com"
26
+ },
27
+ "files": [
28
+ "dist",
29
+ "src"
30
+ ],
31
+ "exports": {
32
+ ".": {
33
+ "types": "./dist/index.d.ts",
34
+ "default": "./dist/index.js"
35
+ }
36
+ },
37
+ "scripts": {
38
+ "build": "tsc",
39
+ "lint": "eslint .",
40
+ "prettier": "prettier --check .",
41
+ "preview": "astro preview",
42
+ "astro": "astro"
43
+ },
44
+ "dependencies": {
45
+ "github-slugger": "^2.0.0",
46
+ "octokit": "^4.1.2",
47
+ "picomatch": "^4.0.2"
48
+ },
49
+ "peerDependencies": {
50
+ "astro": "^5.5.6"
51
+ },
52
+ "devDependencies": {
53
+ "@types/js-yaml": "^4.0.9",
54
+ "@types/node": "^22.14.0",
55
+ "@types/picomatch": "^4.0.0",
56
+ "@typescript-eslint/parser": "^8.29.0",
57
+ "eslint": "^9.24.0",
58
+ "eslint-plugin-astro": "^1.3.1",
59
+ "prettier": "3.5.3",
60
+ "prettier-plugin-astro": "0.14.1",
61
+ "starlight-typedoc": "^0.21.0",
62
+ "typedoc": "^0.28.1",
63
+ "typedoc-plugin-markdown": "^4.6.1",
64
+ "vitest": "^3.1.1"
65
+ }
66
+ }
@@ -0,0 +1,243 @@
1
+ import { promises as fs } from "node:fs";
2
+ import { existsSync } from "node:fs";
3
+ import { join } from "node:path";
4
+ import { generateId, generatePath, shouldIncludeFile } from "./github.content.js";
5
+ import type { ImportOptions, LoaderContext, SyncStats } from "./github.types.js";
6
+
7
+ const SLEEP_BETWEEN_DELETES = 10; // ms between file deletions
8
+
9
+ /**
10
+ * Sleep utility for pacing file operations
11
+ */
12
+ function sleep(ms: number): Promise<void> {
13
+ return new Promise(resolve => setTimeout(resolve, ms));
14
+ }
15
+
16
+ /**
17
+ * Gets all files that should exist locally based on remote repository state
18
+ */
19
+ async function getExpectedFiles(
20
+ octokit: any,
21
+ options: ImportOptions,
22
+ signal?: AbortSignal
23
+ ): Promise<Set<string>> {
24
+ const { owner, repo, ref = "main" } = options;
25
+ const expectedFiles = new Set<string>();
26
+
27
+ // Get all unique directory prefixes from include patterns to limit scanning
28
+ const directoriesToScan = new Set<string>();
29
+ if (options.includes && options.includes.length > 0) {
30
+ for (const includePattern of options.includes) {
31
+ // Extract directory part from pattern (before any glob wildcards)
32
+ const pattern = includePattern.pattern;
33
+ const beforeGlob = pattern.split(/[*?{]/)[0];
34
+ const dirPart = beforeGlob.includes('/') ? beforeGlob.substring(0, beforeGlob.lastIndexOf('/')) : '';
35
+ directoriesToScan.add(dirPart);
36
+ }
37
+ } else {
38
+ // If no includes specified, scan from root
39
+ directoriesToScan.add('');
40
+ }
41
+
42
+ async function processDirectory(dirPath: string) {
43
+ try {
44
+ const { data } = await octokit.rest.repos.getContent({
45
+ owner,
46
+ repo,
47
+ path: dirPath,
48
+ ref,
49
+ request: { signal }
50
+ });
51
+
52
+ if (!Array.isArray(data)) {
53
+ // Single file
54
+ if (data.type === 'file' && shouldIncludeFile(data.path, options).included) {
55
+ const id = generateId(data.path);
56
+ const includeResult = shouldIncludeFile(data.path, options);
57
+ const localPath = generatePath(data.path, includeResult.included ? includeResult.matchedPattern : null, options);
58
+ // Convert to absolute path for consistent comparison
59
+ const absolutePath = localPath.startsWith('/') ? localPath : join(process.cwd(), localPath);
60
+ expectedFiles.add(absolutePath);
61
+ }
62
+ return;
63
+ }
64
+
65
+ // Directory listing
66
+ const promises = data
67
+ .filter(({ type, path }) => {
68
+ if (type === "dir") return true;
69
+ if (type === "file") return shouldIncludeFile(path, options).included;
70
+ return false;
71
+ })
72
+ .map(async ({ type, path: itemPath }) => {
73
+ if (type === "dir") {
74
+ await processDirectory(itemPath);
75
+ } else if (type === "file") {
76
+ const id = generateId(itemPath);
77
+ const includeResult = shouldIncludeFile(itemPath, options);
78
+ const localPath = generatePath(itemPath, includeResult.included ? includeResult.matchedPattern : null, options);
79
+ // Convert to absolute path for consistent comparison
80
+ const absolutePath = localPath.startsWith('/') ? localPath : join(process.cwd(), localPath);
81
+ expectedFiles.add(absolutePath);
82
+ }
83
+ });
84
+
85
+ await Promise.all(promises);
86
+ } catch (error: any) {
87
+ if (signal?.aborted) throw error;
88
+ console.warn(`Failed to process directory ${dirPath}:`, error);
89
+ }
90
+ }
91
+
92
+ // Process only the directories that match our include patterns
93
+ for (const dirPath of directoriesToScan) {
94
+ await processDirectory(dirPath);
95
+ }
96
+ return expectedFiles;
97
+ }
98
+
99
+ /**
100
+ * Gets all existing local files in the basePath as absolute paths
101
+ */
102
+ async function getExistingFiles(basePath: string): Promise<Set<string>> {
103
+ const existingFiles = new Set<string>();
104
+
105
+ if (!existsSync(basePath)) {
106
+ return existingFiles;
107
+ }
108
+
109
+ async function walkDirectory(dirPath: string) {
110
+ try {
111
+ const entries = await fs.readdir(dirPath, { withFileTypes: true });
112
+
113
+ for (const entry of entries) {
114
+ const fullPath = join(dirPath, entry.name);
115
+
116
+ if (entry.isDirectory()) {
117
+ // Skip manifest files and other system directories
118
+ if (!entry.name.startsWith('.')) {
119
+ await walkDirectory(fullPath);
120
+ }
121
+ } else if (entry.isFile()) {
122
+ // Skip manifest and system files
123
+ if (!entry.name.startsWith('.')) {
124
+ existingFiles.add(fullPath);
125
+ }
126
+ }
127
+ }
128
+ } catch (error) {
129
+ console.warn(`Failed to read directory ${dirPath}:`, error);
130
+ }
131
+ }
132
+
133
+ await walkDirectory(basePath);
134
+ return existingFiles;
135
+ }
136
+
137
+ /**
138
+ * Performs selective cleanup of obsolete files
139
+ */
140
+ export async function performSelectiveCleanup(
141
+ config: ImportOptions,
142
+ context: LoaderContext,
143
+ octokit: any,
144
+ signal?: AbortSignal
145
+ ): Promise<SyncStats> {
146
+ const startTime = Date.now();
147
+ const { logger } = context;
148
+ const configName = config.name || `${config.owner}/${config.repo}`;
149
+
150
+ if (!config.includes || config.includes.length === 0) {
151
+ // No cleanup needed if no include patterns specified
152
+ return {
153
+ added: 0,
154
+ updated: 0,
155
+ deleted: 0,
156
+ unchanged: 0,
157
+ duration: Date.now() - startTime
158
+ };
159
+ }
160
+
161
+ logger.debug(`Starting selective cleanup for ${configName}`);
162
+
163
+ try {
164
+ // Get existing local files from all include pattern base paths
165
+ const allExistingFiles = new Set<string>();
166
+ for (const includePattern of config.includes) {
167
+ const existingFiles = await getExistingFiles(includePattern.basePath);
168
+ existingFiles.forEach(file => allExistingFiles.add(file));
169
+ }
170
+
171
+ // If no existing files, skip cleanup (fresh import)
172
+ if (allExistingFiles.size === 0) {
173
+ logger.debug(`No existing files found in any base paths, skipping cleanup`);
174
+ return {
175
+ added: 0,
176
+ updated: 0,
177
+ deleted: 0,
178
+ unchanged: 0,
179
+ duration: Date.now() - startTime
180
+ };
181
+ }
182
+
183
+ // Get expected files from remote repository
184
+ const expectedFiles = await getExpectedFiles(octokit, config, signal);
185
+
186
+ // Find files to delete (exist locally but not in remote)
187
+ const filesToDelete: string[] = [];
188
+ for (const existingFile of allExistingFiles) {
189
+ if (!expectedFiles.has(existingFile)) {
190
+ filesToDelete.push(existingFile);
191
+ }
192
+ }
193
+
194
+ // Delete obsolete files with pacing
195
+ let deletedCount = 0;
196
+ for (const filePath of filesToDelete) {
197
+ try {
198
+ if (existsSync(filePath)) {
199
+ await fs.unlink(filePath);
200
+ logger.debug(`Deleted obsolete file: ${filePath}`);
201
+ deletedCount++;
202
+ await sleep(SLEEP_BETWEEN_DELETES);
203
+ }
204
+ } catch (error) {
205
+ logger.warn(`Failed to delete ${filePath}: ${error}`);
206
+ }
207
+ }
208
+
209
+ const duration = Date.now() - startTime;
210
+ const stats: SyncStats = {
211
+ added: 0, // Will be counted by main sync process
212
+ updated: 0, // Will be counted by main sync process
213
+ deleted: deletedCount,
214
+ unchanged: 0, // Will be counted by main sync process
215
+ duration
216
+ };
217
+
218
+ if (deletedCount > 0) {
219
+ logger.info(`Cleanup completed for ${configName}: ${deletedCount} obsolete files deleted (${duration}ms)`);
220
+ } else {
221
+ logger.debug(`No cleanup needed for ${configName} (${duration}ms)`);
222
+ }
223
+
224
+ return stats;
225
+
226
+ } catch (error: any) {
227
+ if (signal?.aborted) {
228
+ logger.info(`Cleanup cancelled for ${configName}`);
229
+ throw error;
230
+ }
231
+
232
+ const duration = Date.now() - startTime;
233
+ logger.error(`Cleanup failed for ${configName} after ${duration}ms: ${error}`);
234
+ // Don't throw - let the main sync process continue
235
+ return {
236
+ added: 0,
237
+ updated: 0,
238
+ deleted: 0,
239
+ unchanged: 0,
240
+ duration
241
+ };
242
+ }
243
+ }
@@ -0,0 +1,25 @@
1
+ /**
2
+ * This variable represents an error message indicating that the provided string is invalid.
3
+ * It is typically used to flag inputs or data that do not meet the required string format or criteria.
4
+ * The value is a constant string: 'Invalid string'.
5
+ *
6
+ * @internal
7
+ */
8
+ export const INVALID_STRING_ERROR = "Invalid string";
9
+ /**
10
+ * Represents an error message indicating that a provided URL is invalid.
11
+ * This constant is typically used for validation or error handling when a URL
12
+ * does not conform to the expected format or requirements.
13
+ *
14
+ * @internal
15
+ */
16
+ export const INVALID_URL_ERROR = "Invalid url";
17
+
18
+ /**
19
+ * A constant that holds a default error message indicating that a service response is invalid.
20
+ * This value is typically used to signify that the response from a service or API call
21
+ * does not meet the expected format, structure, or criteria.
22
+ *
23
+ * @internal
24
+ */
25
+ export const INVALID_SERVICE_RESPONSE = "Invalid service response";