@apitap/core 1.5.4 → 1.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +372 -20
- package/dist/cli.js.map +1 -1
- package/dist/discovery/openapi.js +23 -50
- package/dist/discovery/openapi.js.map +1 -1
- package/dist/replay/engine.d.ts +13 -0
- package/dist/replay/engine.js +20 -0
- package/dist/replay/engine.js.map +1 -1
- package/dist/skill/apis-guru.d.ts +35 -0
- package/dist/skill/apis-guru.js +128 -0
- package/dist/skill/apis-guru.js.map +1 -0
- package/dist/skill/merge.d.ts +29 -0
- package/dist/skill/merge.js +252 -0
- package/dist/skill/merge.js.map +1 -0
- package/dist/skill/openapi-converter.d.ts +31 -0
- package/dist/skill/openapi-converter.js +383 -0
- package/dist/skill/openapi-converter.js.map +1 -0
- package/dist/types.d.ts +41 -0
- package/package.json +1 -1
- package/src/cli.ts +400 -18
- package/src/discovery/openapi.ts +25 -56
- package/src/replay/engine.ts +19 -0
- package/src/skill/apis-guru.ts +163 -0
- package/src/skill/merge.ts +281 -0
- package/src/skill/openapi-converter.ts +426 -0
- package/src/types.ts +42 -1
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
// src/skill/apis-guru.ts
|
|
2
|
+
import { resolveAndValidateUrl } from '../skill/ssrf.js';
|
|
3
|
+
|
|
4
|
+
export interface ApisGuruEntry {
|
|
5
|
+
apiId: string; // e.g., "twilio.com:api"
|
|
6
|
+
providerName: string; // e.g., "twilio.com"
|
|
7
|
+
title: string;
|
|
8
|
+
specUrl: string; // direct URL to OpenAPI JSON spec
|
|
9
|
+
openapiVer: string;
|
|
10
|
+
updated: string;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
const APIS_GURU_LIST_URL = 'https://api.apis.guru/v2/list.json';
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Parse raw APIs.guru list.json response into ApisGuruEntry array.
|
|
17
|
+
* For each API, use the preferred version's data.
|
|
18
|
+
*/
|
|
19
|
+
export function parseApisGuruList(raw: Record<string, any>): ApisGuruEntry[] {
|
|
20
|
+
const entries: ApisGuruEntry[] = [];
|
|
21
|
+
|
|
22
|
+
for (const apiId of Object.keys(raw)) {
|
|
23
|
+
const apiData = raw[apiId];
|
|
24
|
+
if (!apiData || typeof apiData !== 'object') continue;
|
|
25
|
+
|
|
26
|
+
const preferred: string = apiData.preferred;
|
|
27
|
+
if (!preferred) continue;
|
|
28
|
+
|
|
29
|
+
const versions = apiData.versions;
|
|
30
|
+
if (!versions || typeof versions !== 'object') continue;
|
|
31
|
+
|
|
32
|
+
const versionData = versions[preferred];
|
|
33
|
+
if (!versionData || typeof versionData !== 'object') continue;
|
|
34
|
+
|
|
35
|
+
const swaggerUrl: string | undefined = versionData.swaggerUrl;
|
|
36
|
+
if (!swaggerUrl) continue;
|
|
37
|
+
|
|
38
|
+
const info = versionData.info ?? {};
|
|
39
|
+
const title: string = info.title ?? '';
|
|
40
|
+
const openapiVer: string = versionData.openapiVer ?? '';
|
|
41
|
+
const updated: string = versionData.updated ?? '';
|
|
42
|
+
|
|
43
|
+
// providerName: prefer info.x-providerName, else split apiId on ':', else use apiId
|
|
44
|
+
let providerName: string;
|
|
45
|
+
if (info['x-providerName']) {
|
|
46
|
+
providerName = info['x-providerName'];
|
|
47
|
+
} else {
|
|
48
|
+
const colonIdx = apiId.indexOf(':');
|
|
49
|
+
providerName = colonIdx >= 0 ? apiId.slice(0, colonIdx) : apiId;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
entries.push({
|
|
53
|
+
apiId,
|
|
54
|
+
providerName,
|
|
55
|
+
title,
|
|
56
|
+
specUrl: swaggerUrl,
|
|
57
|
+
openapiVer,
|
|
58
|
+
updated,
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
return entries;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
export interface FilterOptions {
|
|
66
|
+
search?: string;
|
|
67
|
+
limit?: number;
|
|
68
|
+
noAuthOnly?: boolean;
|
|
69
|
+
preferOpenapi3?: boolean;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Filter and sort ApisGuruEntry array.
|
|
74
|
+
* - search: substring match (case-insensitive) on providerName or title
|
|
75
|
+
* - preferOpenapi3: sort 3.x entries before 2.x, then by recency within groups
|
|
76
|
+
* - default sort: by recency (updated desc)
|
|
77
|
+
* - limit: cap result count
|
|
78
|
+
*/
|
|
79
|
+
export function filterEntries(
|
|
80
|
+
entries: ApisGuruEntry[],
|
|
81
|
+
options: FilterOptions,
|
|
82
|
+
): ApisGuruEntry[] {
|
|
83
|
+
const { search, limit, preferOpenapi3 } = options;
|
|
84
|
+
|
|
85
|
+
let result = entries;
|
|
86
|
+
|
|
87
|
+
// Filter by search term
|
|
88
|
+
if (search) {
|
|
89
|
+
const lower = search.toLowerCase();
|
|
90
|
+
result = result.filter(
|
|
91
|
+
e =>
|
|
92
|
+
e.providerName.toLowerCase().includes(lower) ||
|
|
93
|
+
e.title.toLowerCase().includes(lower),
|
|
94
|
+
);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// Sort
|
|
98
|
+
result = [...result].sort((a, b) => {
|
|
99
|
+
if (preferOpenapi3) {
|
|
100
|
+
const aIs3 = a.openapiVer.startsWith('3') ? 0 : 1;
|
|
101
|
+
const bIs3 = b.openapiVer.startsWith('3') ? 0 : 1;
|
|
102
|
+
if (aIs3 !== bIs3) return aIs3 - bIs3;
|
|
103
|
+
}
|
|
104
|
+
// Within same group (or when not preferring 3.x), sort by recency desc
|
|
105
|
+
return b.updated.localeCompare(a.updated);
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
// Apply limit
|
|
109
|
+
if (typeof limit === 'number' && limit > 0) {
|
|
110
|
+
result = result.slice(0, limit);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
return result;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
/**
|
|
117
|
+
* Fetch the APIs.guru list.json and parse it into ApisGuruEntry array.
|
|
118
|
+
*/
|
|
119
|
+
export async function fetchApisGuruList(): Promise<ApisGuruEntry[]> {
|
|
120
|
+
const ssrf = await resolveAndValidateUrl(APIS_GURU_LIST_URL);
|
|
121
|
+
if (!ssrf.safe) {
|
|
122
|
+
throw new Error(`SSRF check failed for APIs.guru list URL: ${ssrf.reason}`);
|
|
123
|
+
}
|
|
124
|
+
const response = await fetch(APIS_GURU_LIST_URL, {
|
|
125
|
+
signal: AbortSignal.timeout(30_000),
|
|
126
|
+
});
|
|
127
|
+
if (!response.ok) {
|
|
128
|
+
throw new Error(
|
|
129
|
+
`Failed to fetch APIs.guru list: ${response.status} ${response.statusText}`,
|
|
130
|
+
);
|
|
131
|
+
}
|
|
132
|
+
const text = await response.text();
|
|
133
|
+
try {
|
|
134
|
+
return parseApisGuruList(JSON.parse(text) as Record<string, any>);
|
|
135
|
+
} catch {
|
|
136
|
+
throw new Error(`Invalid JSON from ${APIS_GURU_LIST_URL}: ${text.slice(0, 100)}`);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
/**
|
|
141
|
+
* Fetch a single OpenAPI spec by URL and return the parsed JSON.
|
|
142
|
+
*/
|
|
143
|
+
export async function fetchSpec(specUrl: string): Promise<Record<string, any>> {
|
|
144
|
+
const ssrf = await resolveAndValidateUrl(specUrl);
|
|
145
|
+
if (!ssrf.safe) {
|
|
146
|
+
throw new Error(`SSRF check failed for spec URL ${specUrl}: ${ssrf.reason}`);
|
|
147
|
+
}
|
|
148
|
+
const response = await fetch(specUrl, {
|
|
149
|
+
signal: AbortSignal.timeout(30_000),
|
|
150
|
+
redirect: 'error',
|
|
151
|
+
});
|
|
152
|
+
if (!response.ok) {
|
|
153
|
+
throw new Error(
|
|
154
|
+
`Failed to fetch spec at ${specUrl}: ${response.status} ${response.statusText}`,
|
|
155
|
+
);
|
|
156
|
+
}
|
|
157
|
+
const text = await response.text();
|
|
158
|
+
try {
|
|
159
|
+
return JSON.parse(text) as Record<string, any>;
|
|
160
|
+
} catch {
|
|
161
|
+
throw new Error(`Invalid JSON from ${specUrl}: ${text.slice(0, 100)}`);
|
|
162
|
+
}
|
|
163
|
+
}
|
|
@@ -0,0 +1,281 @@
|
|
|
1
|
+
// src/skill/merge.ts
|
|
2
|
+
import type { SkillFile, SkillEndpoint, ImportMeta, MergeResult } from '../types.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Normalize a parameterized path by replacing all named param placeholders
|
|
6
|
+
* with the generic `:_` placeholder, enabling matching across different param
|
|
7
|
+
* naming conventions (e.g. `:id` vs `:userId` vs `:user_id`).
|
|
8
|
+
*
|
|
9
|
+
* @example
|
|
10
|
+
* normalizePath('/repos/:owner/:repo') // → '/repos/:_/:_'
|
|
11
|
+
* normalizePath('/users/list') // → '/users/list'
|
|
12
|
+
*/
|
|
13
|
+
export function normalizePath(path: string): string {
|
|
14
|
+
return path.replace(/:[a-zA-Z_]\w*/g, ':_');
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Build a match key for endpoint deduplication: METHOD + normalized path.
|
|
19
|
+
*/
|
|
20
|
+
function matchKey(method: string, path: string): string {
|
|
21
|
+
return `${method.toUpperCase()} ${normalizePath(path)}`;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Merge query params from a captured endpoint with params from an imported
|
|
26
|
+
* spec endpoint.
|
|
27
|
+
*
|
|
28
|
+
* Rules:
|
|
29
|
+
* - Captured `example` values are preserved (captured data is sacred).
|
|
30
|
+
* - Spec `type`, `required`, `enum` augment the captured params.
|
|
31
|
+
* - New params that only exist in the spec are added wholesale.
|
|
32
|
+
*/
|
|
33
|
+
function mergeQueryParams(
|
|
34
|
+
captured: SkillEndpoint['queryParams'],
|
|
35
|
+
specParams: SkillEndpoint['queryParams'],
|
|
36
|
+
): SkillEndpoint['queryParams'] {
|
|
37
|
+
const merged = { ...captured };
|
|
38
|
+
|
|
39
|
+
for (const [name, specParam] of Object.entries(specParams)) {
|
|
40
|
+
if (name in merged) {
|
|
41
|
+
// Param exists in captured — keep captured example, augment with spec metadata
|
|
42
|
+
const existing = merged[name];
|
|
43
|
+
merged[name] = {
|
|
44
|
+
...existing,
|
|
45
|
+
// Take spec type only if captured has generic 'string' and spec is more specific
|
|
46
|
+
type: existing.type === 'string' && specParam.type !== 'string' ? specParam.type : existing.type,
|
|
47
|
+
// Always preserve captured example value
|
|
48
|
+
example: existing.example,
|
|
49
|
+
// Add spec enum if present
|
|
50
|
+
...(specParam.enum !== undefined ? { enum: specParam.enum } : {}),
|
|
51
|
+
// Add spec required flag if present
|
|
52
|
+
...(specParam.required !== undefined ? { required: specParam.required } : {}),
|
|
53
|
+
// Mark as also coming from spec
|
|
54
|
+
...(specParam.fromSpec ? { fromSpec: true } : {}),
|
|
55
|
+
};
|
|
56
|
+
} else {
|
|
57
|
+
// New param only in spec — add it
|
|
58
|
+
merged[name] = specParam;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
return merged;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Determine whether an imported endpoint would enrich an existing captured
|
|
67
|
+
* endpoint (i.e. adds new metadata not already present).
|
|
68
|
+
*/
|
|
69
|
+
function wouldEnrich(existing: SkillEndpoint, imported: SkillEndpoint): boolean {
|
|
70
|
+
if (!existing.description && imported.description) return true;
|
|
71
|
+
if (!existing.specSource && imported.specSource) return true;
|
|
72
|
+
|
|
73
|
+
// Check if any new query params would be added or enriched
|
|
74
|
+
for (const [name, specParam] of Object.entries(imported.queryParams)) {
|
|
75
|
+
if (!(name in existing.queryParams)) return true;
|
|
76
|
+
const ep = existing.queryParams[name];
|
|
77
|
+
if (!ep.enum && specParam.enum) return true;
|
|
78
|
+
if (ep.required === undefined && specParam.required !== undefined) return true;
|
|
79
|
+
if (ep.type === 'string' && specParam.type !== 'string') return true;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
return false;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* Pure function — no I/O.
|
|
87
|
+
*
|
|
88
|
+
* Merges imported OpenAPI endpoints into an existing skill file.
|
|
89
|
+
*
|
|
90
|
+
* Captured data is sacred: it always wins on confidence, examples, and
|
|
91
|
+
* endpoint provenance. Spec data can only enrich (add description, specSource,
|
|
92
|
+
* query param enum/required/type) or fill gaps (add missing endpoints).
|
|
93
|
+
*
|
|
94
|
+
* Match logic: METHOD + normalizePath(path). This allows `:owner` and `:user`
|
|
95
|
+
* to be considered the same parameter slot.
|
|
96
|
+
*
|
|
97
|
+
* @param existing The existing skill file on disk, or null if none exists.
|
|
98
|
+
* @param imported Endpoints parsed from the OpenAPI spec.
|
|
99
|
+
* @param importMeta Metadata about the import (spec URL, version, etc.).
|
|
100
|
+
* @returns MergeResult with the updated SkillFile and a diff summary.
|
|
101
|
+
*/
|
|
102
|
+
export function mergeSkillFile(
|
|
103
|
+
existing: SkillFile | null,
|
|
104
|
+
imported: SkillEndpoint[],
|
|
105
|
+
importMeta: ImportMeta,
|
|
106
|
+
): MergeResult {
|
|
107
|
+
const now = new Date().toISOString();
|
|
108
|
+
|
|
109
|
+
let preserved = 0;
|
|
110
|
+
let added = 0;
|
|
111
|
+
let enriched = 0;
|
|
112
|
+
let skipped = 0;
|
|
113
|
+
|
|
114
|
+
// --- Case: no existing file — create a new SkillFile from imported endpoints ---
|
|
115
|
+
if (existing === null) {
|
|
116
|
+
const endpoints = imported.map(ep => ({
|
|
117
|
+
...ep,
|
|
118
|
+
normalizedPath: normalizePath(ep.path),
|
|
119
|
+
}));
|
|
120
|
+
|
|
121
|
+
const skillFile: SkillFile = {
|
|
122
|
+
version: '1.2',
|
|
123
|
+
domain: extractDomainFromMeta(importMeta),
|
|
124
|
+
capturedAt: now,
|
|
125
|
+
baseUrl: extractBaseUrlFromMeta(importMeta),
|
|
126
|
+
endpoints,
|
|
127
|
+
metadata: {
|
|
128
|
+
captureCount: 0,
|
|
129
|
+
filteredCount: 0,
|
|
130
|
+
toolVersion: '1.0.0',
|
|
131
|
+
importHistory: [{
|
|
132
|
+
specUrl: importMeta.specUrl,
|
|
133
|
+
specVersion: importMeta.specVersion,
|
|
134
|
+
importedAt: now,
|
|
135
|
+
endpointsAdded: endpoints.length,
|
|
136
|
+
endpointsEnriched: 0,
|
|
137
|
+
}],
|
|
138
|
+
},
|
|
139
|
+
provenance: 'imported',
|
|
140
|
+
};
|
|
141
|
+
|
|
142
|
+
added = endpoints.length;
|
|
143
|
+
|
|
144
|
+
return {
|
|
145
|
+
skillFile,
|
|
146
|
+
diff: { preserved, added, enriched, skipped },
|
|
147
|
+
};
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
// --- Case: existing file present — merge into it ---
|
|
151
|
+
|
|
152
|
+
// Build a map from match-key → existing endpoint (mutable copy)
|
|
153
|
+
const existingMap = new Map<string, SkillEndpoint>();
|
|
154
|
+
for (const ep of existing.endpoints) {
|
|
155
|
+
existingMap.set(matchKey(ep.method, ep.path), ep);
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
// Build a map from match-key → imported endpoint
|
|
159
|
+
const importedMap = new Map<string, SkillEndpoint>();
|
|
160
|
+
for (const ep of imported) {
|
|
161
|
+
const key = matchKey(ep.method, ep.path);
|
|
162
|
+
// If multiple imported endpoints map to the same key, last wins
|
|
163
|
+
if (importedMap.has(key)) {
|
|
164
|
+
process.stderr.write(`[openapi-import] Warning: ${ep.method} ${ep.path} collides with existing import after normalization\n`);
|
|
165
|
+
}
|
|
166
|
+
importedMap.set(key, ep);
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
// Process: update or preserve existing endpoints
|
|
170
|
+
const resultEndpoints: SkillEndpoint[] = [];
|
|
171
|
+
|
|
172
|
+
for (const [key, existingEp] of existingMap) {
|
|
173
|
+
const importedEp = importedMap.get(key);
|
|
174
|
+
|
|
175
|
+
if (!importedEp) {
|
|
176
|
+
// Not in import — preserve as-is (captured endpoint with no match in spec)
|
|
177
|
+
resultEndpoints.push({
|
|
178
|
+
...existingEp,
|
|
179
|
+
normalizedPath: normalizePath(existingEp.path),
|
|
180
|
+
});
|
|
181
|
+
preserved++;
|
|
182
|
+
continue;
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// Imported endpoint matches an existing one — check if it would add anything new
|
|
186
|
+
if (!wouldEnrich(existingEp, importedEp)) {
|
|
187
|
+
resultEndpoints.push({
|
|
188
|
+
...existingEp,
|
|
189
|
+
normalizedPath: normalizePath(existingEp.path),
|
|
190
|
+
});
|
|
191
|
+
// "skipped" means the import is redundant — existing already has spec metadata.
|
|
192
|
+
// "preserved" means the captured endpoint is untouched (import had nothing for it,
|
|
193
|
+
// or both sides are bare with no spec data to exchange).
|
|
194
|
+
const existingHasSpecData = !!(existingEp.specSource || existingEp.description);
|
|
195
|
+
const importHasSpecData = !!(importedEp.specSource || importedEp.description);
|
|
196
|
+
if (existingHasSpecData || importHasSpecData) {
|
|
197
|
+
// Spec data was already integrated (or import tried to add it but it's already present)
|
|
198
|
+
skipped++;
|
|
199
|
+
} else {
|
|
200
|
+
// Neither side has spec enrichment data — captured endpoint simply preserved
|
|
201
|
+
preserved++;
|
|
202
|
+
}
|
|
203
|
+
continue;
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// Enrich the captured endpoint with spec metadata
|
|
207
|
+
const mergedQueryParams = mergeQueryParams(existingEp.queryParams, importedEp.queryParams);
|
|
208
|
+
|
|
209
|
+
const enrichedEp: SkillEndpoint = {
|
|
210
|
+
...existingEp,
|
|
211
|
+
normalizedPath: normalizePath(existingEp.path),
|
|
212
|
+
// Augment with spec fields (only if not already present)
|
|
213
|
+
...(importedEp.description && !existingEp.description ? { description: importedEp.description } : {}),
|
|
214
|
+
...(importedEp.specSource && !existingEp.specSource ? { specSource: importedEp.specSource } : {}),
|
|
215
|
+
// Confidence never downgrades
|
|
216
|
+
confidence: Math.max(existingEp.confidence ?? 0, importedEp.confidence ?? 0) || existingEp.confidence,
|
|
217
|
+
// Keep captured provenance
|
|
218
|
+
endpointProvenance: existingEp.endpointProvenance,
|
|
219
|
+
queryParams: mergedQueryParams,
|
|
220
|
+
};
|
|
221
|
+
|
|
222
|
+
resultEndpoints.push(enrichedEp);
|
|
223
|
+
enriched++;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
// Add endpoints from import that don't exist in the existing file
|
|
227
|
+
for (const [key, importedEp] of importedMap) {
|
|
228
|
+
if (!existingMap.has(key)) {
|
|
229
|
+
resultEndpoints.push({
|
|
230
|
+
...importedEp,
|
|
231
|
+
normalizedPath: normalizePath(importedEp.path),
|
|
232
|
+
});
|
|
233
|
+
added++;
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
// Build updated import history
|
|
238
|
+
const prevHistory = existing.metadata.importHistory ?? [];
|
|
239
|
+
const newHistoryEntry = {
|
|
240
|
+
specUrl: importMeta.specUrl,
|
|
241
|
+
specVersion: importMeta.specVersion,
|
|
242
|
+
importedAt: now,
|
|
243
|
+
endpointsAdded: added,
|
|
244
|
+
endpointsEnriched: enriched,
|
|
245
|
+
};
|
|
246
|
+
|
|
247
|
+
const skillFile: SkillFile = {
|
|
248
|
+
...existing,
|
|
249
|
+
endpoints: resultEndpoints,
|
|
250
|
+
metadata: {
|
|
251
|
+
...existing.metadata,
|
|
252
|
+
importHistory: [...prevHistory, newHistoryEntry],
|
|
253
|
+
},
|
|
254
|
+
};
|
|
255
|
+
|
|
256
|
+
return {
|
|
257
|
+
skillFile,
|
|
258
|
+
diff: { preserved, added, enriched, skipped },
|
|
259
|
+
};
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
// ---------------------------------------------------------------------------
|
|
263
|
+
// Internal helpers
|
|
264
|
+
// ---------------------------------------------------------------------------
|
|
265
|
+
|
|
266
|
+
function extractDomainFromMeta(meta: ImportMeta): string {
|
|
267
|
+
try {
|
|
268
|
+
return new URL(meta.specUrl).hostname;
|
|
269
|
+
} catch {
|
|
270
|
+
throw new Error(`Cannot determine domain from specUrl: ${meta.specUrl}`);
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
function extractBaseUrlFromMeta(meta: ImportMeta): string {
|
|
275
|
+
try {
|
|
276
|
+
const u = new URL(meta.specUrl);
|
|
277
|
+
return `${u.protocol}//${u.hostname}`;
|
|
278
|
+
} catch {
|
|
279
|
+
throw new Error(`Cannot determine base URL from specUrl: ${meta.specUrl}`);
|
|
280
|
+
}
|
|
281
|
+
}
|