@apitap/core 1.5.4 → 1.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +410 -21
- package/dist/cli.js.map +1 -1
- package/dist/discovery/openapi.js +23 -50
- package/dist/discovery/openapi.js.map +1 -1
- package/dist/replay/engine.d.ts +19 -0
- package/dist/replay/engine.js +46 -5
- package/dist/replay/engine.js.map +1 -1
- package/dist/skill/apis-guru.d.ts +35 -0
- package/dist/skill/apis-guru.js +136 -0
- package/dist/skill/apis-guru.js.map +1 -0
- package/dist/skill/merge.d.ts +29 -0
- package/dist/skill/merge.js +252 -0
- package/dist/skill/merge.js.map +1 -0
- package/dist/skill/openapi-converter.d.ts +31 -0
- package/dist/skill/openapi-converter.js +390 -0
- package/dist/skill/openapi-converter.js.map +1 -0
- package/dist/skill/signing.d.ts +5 -0
- package/dist/skill/signing.js +9 -0
- package/dist/skill/signing.js.map +1 -1
- package/dist/skill/store.js +9 -4
- package/dist/skill/store.js.map +1 -1
- package/dist/types.d.ts +43 -2
- package/package.json +3 -1
- package/src/cli.ts +442 -19
- package/src/discovery/openapi.ts +25 -56
- package/src/replay/engine.ts +48 -5
- package/src/skill/apis-guru.ts +169 -0
- package/src/skill/merge.ts +281 -0
- package/src/skill/openapi-converter.ts +434 -0
- package/src/skill/signing.ts +14 -0
- package/src/skill/store.ts +8 -4
- package/src/types.ts +44 -3
package/src/replay/engine.ts
CHANGED
|
@@ -45,6 +45,15 @@ const BLOCKED_REPLAY_HEADERS = new Set([
|
|
|
45
45
|
'sec-fetch-user',
|
|
46
46
|
]);
|
|
47
47
|
|
|
48
|
+
export function safeParseJson(text: string): unknown {
|
|
49
|
+
if (text.length === 0) return text;
|
|
50
|
+
try {
|
|
51
|
+
return JSON.parse(text);
|
|
52
|
+
} catch {
|
|
53
|
+
return text;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
48
57
|
export interface ReplayOptions {
|
|
49
58
|
/** User-provided parameters for path, query, and body substitution */
|
|
50
59
|
params?: Record<string, string>;
|
|
@@ -70,6 +79,8 @@ export interface ReplayResult {
|
|
|
70
79
|
truncated?: boolean;
|
|
71
80
|
/** Contract warnings from schema drift detection */
|
|
72
81
|
contractWarnings?: ContractWarning[];
|
|
82
|
+
/** Upgrade hint: set when a low-confidence endpoint gets a 2xx response */
|
|
83
|
+
upgrade?: { confidence: 1.0; endpointProvenance: 'captured' };
|
|
73
84
|
}
|
|
74
85
|
|
|
75
86
|
/**
|
|
@@ -204,6 +215,24 @@ function wrapAuthError(
|
|
|
204
215
|
};
|
|
205
216
|
}
|
|
206
217
|
|
|
218
|
+
/**
|
|
219
|
+
* Returns a user-facing hint about confidence level, or null if confidence is high enough.
|
|
220
|
+
*/
|
|
221
|
+
export function getConfidenceHint(confidence: number | undefined): string | null {
|
|
222
|
+
const c = confidence ?? 1.0;
|
|
223
|
+
if (c >= 0.85) return null;
|
|
224
|
+
if (c >= 0.7) return '(imported from spec — params may need adjustment)';
|
|
225
|
+
return '(imported from spec — provide params explicitly, no captured examples available)';
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
/**
|
|
229
|
+
* Returns true if a query param should be omitted from the request.
|
|
230
|
+
* Omits spec-derived params that have no real example value.
|
|
231
|
+
*/
|
|
232
|
+
export function shouldOmitQueryParam(param: { type: string; example: string; fromSpec?: boolean }): boolean {
|
|
233
|
+
return param.fromSpec === true && param.example === '';
|
|
234
|
+
}
|
|
235
|
+
|
|
207
236
|
/**
|
|
208
237
|
* Replay a captured API endpoint.
|
|
209
238
|
*
|
|
@@ -240,6 +269,7 @@ export async function replayEndpoint(
|
|
|
240
269
|
|
|
241
270
|
// Apply query params: start with captured defaults, override with provided params
|
|
242
271
|
for (const [key, val] of Object.entries(endpoint.queryParams)) {
|
|
272
|
+
if (shouldOmitQueryParam(val)) continue;
|
|
243
273
|
url.searchParams.set(key, val.example);
|
|
244
274
|
}
|
|
245
275
|
if (params) {
|
|
@@ -529,8 +559,8 @@ export async function replayEndpoint(
|
|
|
529
559
|
let retryData: unknown;
|
|
530
560
|
const retryCt = retryResponse.headers.get('content-type') ?? '';
|
|
531
561
|
const retryText = await retryResponse.text();
|
|
532
|
-
if (retryCt.includes('json')
|
|
533
|
-
retryData =
|
|
562
|
+
if (retryCt.includes('json')) {
|
|
563
|
+
retryData = safeParseJson(retryText);
|
|
534
564
|
} else {
|
|
535
565
|
retryData = retryText;
|
|
536
566
|
}
|
|
@@ -539,6 +569,11 @@ export async function replayEndpoint(
|
|
|
539
569
|
? wrapAuthError(retryResponse.status, retryData, skill.domain)
|
|
540
570
|
: retryData;
|
|
541
571
|
|
|
572
|
+
const retryUpgrade = (endpoint.confidence !== undefined && endpoint.confidence < 1.0
|
|
573
|
+
&& retryResponse.status >= 200 && retryResponse.status < 300)
|
|
574
|
+
? { confidence: 1.0 as const, endpointProvenance: 'captured' as const }
|
|
575
|
+
: undefined;
|
|
576
|
+
|
|
542
577
|
if (options.maxBytes) {
|
|
543
578
|
const truncated = truncateResponse(retryFinalData, { maxBytes: options.maxBytes });
|
|
544
579
|
return {
|
|
@@ -547,6 +582,7 @@ export async function replayEndpoint(
|
|
|
547
582
|
data: truncated.data,
|
|
548
583
|
refreshed,
|
|
549
584
|
...(truncated.truncated ? { truncated: true } : {}),
|
|
585
|
+
...(retryUpgrade ? { upgrade: retryUpgrade } : {}),
|
|
550
586
|
};
|
|
551
587
|
}
|
|
552
588
|
|
|
@@ -555,6 +591,7 @@ export async function replayEndpoint(
|
|
|
555
591
|
headers: retryHeaders,
|
|
556
592
|
data: retryFinalData,
|
|
557
593
|
refreshed,
|
|
594
|
+
...(retryUpgrade ? { upgrade: retryUpgrade } : {}),
|
|
558
595
|
};
|
|
559
596
|
}
|
|
560
597
|
}
|
|
@@ -567,8 +604,8 @@ export async function replayEndpoint(
|
|
|
567
604
|
let data: unknown;
|
|
568
605
|
const ct = response.headers.get('content-type') ?? '';
|
|
569
606
|
const text = await response.text();
|
|
570
|
-
if (ct.includes('json')
|
|
571
|
-
data =
|
|
607
|
+
if (ct.includes('json')) {
|
|
608
|
+
data = safeParseJson(text);
|
|
572
609
|
} else {
|
|
573
610
|
data = text;
|
|
574
611
|
}
|
|
@@ -587,6 +624,11 @@ export async function replayEndpoint(
|
|
|
587
624
|
}
|
|
588
625
|
}
|
|
589
626
|
|
|
627
|
+
const upgrade = (endpoint.confidence !== undefined && endpoint.confidence < 1.0
|
|
628
|
+
&& response.status >= 200 && response.status < 300)
|
|
629
|
+
? { confidence: 1.0 as const, endpointProvenance: 'captured' as const }
|
|
630
|
+
: undefined;
|
|
631
|
+
|
|
590
632
|
// Apply truncation if maxBytes is set
|
|
591
633
|
if (options.maxBytes) {
|
|
592
634
|
const truncated = truncateResponse(finalData, { maxBytes: options.maxBytes });
|
|
@@ -597,10 +639,11 @@ export async function replayEndpoint(
|
|
|
597
639
|
...(refreshed ? { refreshed } : {}),
|
|
598
640
|
...(truncated.truncated ? { truncated: true } : {}),
|
|
599
641
|
...(contractWarnings ? { contractWarnings } : {}),
|
|
642
|
+
...(upgrade ? { upgrade } : {}),
|
|
600
643
|
};
|
|
601
644
|
}
|
|
602
645
|
|
|
603
|
-
return { status: response.status, headers: responseHeaders, data: finalData, ...(refreshed ? { refreshed } : {}), ...(contractWarnings ? { contractWarnings } : {}) };
|
|
646
|
+
return { status: response.status, headers: responseHeaders, data: finalData, ...(refreshed ? { refreshed } : {}), ...(contractWarnings ? { contractWarnings } : {}), ...(upgrade ? { upgrade } : {}) };
|
|
604
647
|
}
|
|
605
648
|
|
|
606
649
|
// --- Batch replay ---
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
// src/skill/apis-guru.ts
|
|
2
|
+
import { resolveAndValidateUrl } from '../skill/ssrf.js';
|
|
3
|
+
|
|
4
|
+
const MAX_SPEC_SIZE = 10 * 1024 * 1024; // 10 MB per spec
|
|
5
|
+
const MAX_LIST_SIZE = 100 * 1024 * 1024; // 100 MB for APIs.guru list
|
|
6
|
+
|
|
7
|
+
async function fetchWithSizeLimit(url: string, maxBytes: number, options?: RequestInit): Promise<string> {
|
|
8
|
+
const response = await fetch(url, {
|
|
9
|
+
signal: AbortSignal.timeout(30_000),
|
|
10
|
+
...options,
|
|
11
|
+
headers: { 'User-Agent': 'apitap-import/1.0', ...(options?.headers as Record<string, string> || {}) },
|
|
12
|
+
});
|
|
13
|
+
if (!response.ok) {
|
|
14
|
+
throw new Error(`HTTP ${response.status} ${response.statusText} for ${url}`);
|
|
15
|
+
}
|
|
16
|
+
const contentLength = response.headers.get('content-length');
|
|
17
|
+
if (contentLength && parseInt(contentLength, 10) > maxBytes) {
|
|
18
|
+
throw new Error(`Response too large: ${contentLength} bytes (limit: ${maxBytes})`);
|
|
19
|
+
}
|
|
20
|
+
const text = await response.text();
|
|
21
|
+
if (text.length > maxBytes) {
|
|
22
|
+
throw new Error(`Response body too large: ${text.length} bytes (limit: ${maxBytes})`);
|
|
23
|
+
}
|
|
24
|
+
return text;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export interface ApisGuruEntry {
|
|
28
|
+
apiId: string; // e.g., "twilio.com:api"
|
|
29
|
+
providerName: string; // e.g., "twilio.com"
|
|
30
|
+
title: string;
|
|
31
|
+
specUrl: string; // direct URL to OpenAPI JSON spec
|
|
32
|
+
openapiVer: string;
|
|
33
|
+
updated: string;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const APIS_GURU_LIST_URL = 'https://api.apis.guru/v2/list.json';
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Parse raw APIs.guru list.json response into ApisGuruEntry array.
|
|
40
|
+
* For each API, use the preferred version's data.
|
|
41
|
+
*/
|
|
42
|
+
export function parseApisGuruList(raw: Record<string, any>): ApisGuruEntry[] {
|
|
43
|
+
const entries: ApisGuruEntry[] = [];
|
|
44
|
+
|
|
45
|
+
for (const apiId of Object.keys(raw)) {
|
|
46
|
+
const apiData = raw[apiId];
|
|
47
|
+
if (!apiData || typeof apiData !== 'object') continue;
|
|
48
|
+
|
|
49
|
+
const preferred: string = apiData.preferred;
|
|
50
|
+
if (!preferred) continue;
|
|
51
|
+
|
|
52
|
+
const versions = apiData.versions;
|
|
53
|
+
if (!versions || typeof versions !== 'object') continue;
|
|
54
|
+
|
|
55
|
+
const versionData = versions[preferred];
|
|
56
|
+
if (!versionData || typeof versionData !== 'object') continue;
|
|
57
|
+
|
|
58
|
+
const swaggerUrl: string | undefined = versionData.swaggerUrl;
|
|
59
|
+
if (!swaggerUrl) continue;
|
|
60
|
+
|
|
61
|
+
const info = versionData.info ?? {};
|
|
62
|
+
const title: string = info.title ?? '';
|
|
63
|
+
const openapiVer: string = versionData.openapiVer ?? '';
|
|
64
|
+
const updated: string = versionData.updated ?? '';
|
|
65
|
+
|
|
66
|
+
// providerName: prefer info.x-providerName, else split apiId on ':', else use apiId
|
|
67
|
+
let providerName: string;
|
|
68
|
+
if (info['x-providerName']) {
|
|
69
|
+
providerName = info['x-providerName'];
|
|
70
|
+
} else {
|
|
71
|
+
const colonIdx = apiId.indexOf(':');
|
|
72
|
+
providerName = colonIdx >= 0 ? apiId.slice(0, colonIdx) : apiId;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
entries.push({
|
|
76
|
+
apiId,
|
|
77
|
+
providerName,
|
|
78
|
+
title,
|
|
79
|
+
specUrl: swaggerUrl,
|
|
80
|
+
openapiVer,
|
|
81
|
+
updated,
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
return entries;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
export interface FilterOptions {
|
|
89
|
+
search?: string;
|
|
90
|
+
limit?: number;
|
|
91
|
+
noAuthOnly?: boolean;
|
|
92
|
+
preferOpenapi3?: boolean;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* Filter and sort ApisGuruEntry array.
|
|
97
|
+
* - search: substring match (case-insensitive) on providerName or title
|
|
98
|
+
* - preferOpenapi3: sort 3.x entries before 2.x, then by recency within groups
|
|
99
|
+
* - default sort: by recency (updated desc)
|
|
100
|
+
* - limit: cap result count
|
|
101
|
+
*/
|
|
102
|
+
export function filterEntries(
|
|
103
|
+
entries: ApisGuruEntry[],
|
|
104
|
+
options: FilterOptions,
|
|
105
|
+
): ApisGuruEntry[] {
|
|
106
|
+
const { search, limit, preferOpenapi3 } = options;
|
|
107
|
+
|
|
108
|
+
let result = entries;
|
|
109
|
+
|
|
110
|
+
// Filter by search term
|
|
111
|
+
if (search) {
|
|
112
|
+
const lower = search.toLowerCase();
|
|
113
|
+
result = result.filter(
|
|
114
|
+
e =>
|
|
115
|
+
e.providerName.toLowerCase().includes(lower) ||
|
|
116
|
+
e.title.toLowerCase().includes(lower),
|
|
117
|
+
);
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// Sort
|
|
121
|
+
result = [...result].sort((a, b) => {
|
|
122
|
+
if (preferOpenapi3) {
|
|
123
|
+
const aIs3 = a.openapiVer.startsWith('3') ? 0 : 1;
|
|
124
|
+
const bIs3 = b.openapiVer.startsWith('3') ? 0 : 1;
|
|
125
|
+
if (aIs3 !== bIs3) return aIs3 - bIs3;
|
|
126
|
+
}
|
|
127
|
+
// Within same group (or when not preferring 3.x), sort by recency desc
|
|
128
|
+
return b.updated.localeCompare(a.updated);
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
// Apply limit
|
|
132
|
+
if (typeof limit === 'number' && limit > 0) {
|
|
133
|
+
result = result.slice(0, limit);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
return result;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
/**
|
|
140
|
+
* Fetch the APIs.guru list.json and parse it into ApisGuruEntry array.
|
|
141
|
+
*/
|
|
142
|
+
export async function fetchApisGuruList(): Promise<ApisGuruEntry[]> {
|
|
143
|
+
const ssrf = await resolveAndValidateUrl(APIS_GURU_LIST_URL);
|
|
144
|
+
if (!ssrf.safe) {
|
|
145
|
+
throw new Error(`SSRF check failed for APIs.guru list URL: ${ssrf.reason}`);
|
|
146
|
+
}
|
|
147
|
+
const text = await fetchWithSizeLimit(APIS_GURU_LIST_URL, MAX_LIST_SIZE);
|
|
148
|
+
try {
|
|
149
|
+
return parseApisGuruList(JSON.parse(text) as Record<string, any>);
|
|
150
|
+
} catch {
|
|
151
|
+
throw new Error(`Invalid JSON from ${APIS_GURU_LIST_URL}: ${text.slice(0, 100)}`);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
/**
|
|
156
|
+
* Fetch a single OpenAPI spec by URL and return the parsed JSON.
|
|
157
|
+
*/
|
|
158
|
+
export async function fetchSpec(specUrl: string): Promise<Record<string, any>> {
|
|
159
|
+
const ssrf = await resolveAndValidateUrl(specUrl);
|
|
160
|
+
if (!ssrf.safe) {
|
|
161
|
+
throw new Error(`SSRF check failed for spec URL ${specUrl}: ${ssrf.reason}`);
|
|
162
|
+
}
|
|
163
|
+
const text = await fetchWithSizeLimit(specUrl, MAX_SPEC_SIZE, { redirect: 'error' });
|
|
164
|
+
try {
|
|
165
|
+
return JSON.parse(text) as Record<string, any>;
|
|
166
|
+
} catch {
|
|
167
|
+
throw new Error(`Invalid JSON from ${specUrl}: ${text.slice(0, 100)}`);
|
|
168
|
+
}
|
|
169
|
+
}
|
|
@@ -0,0 +1,281 @@
|
|
|
1
|
+
// src/skill/merge.ts
|
|
2
|
+
import type { SkillFile, SkillEndpoint, ImportMeta, MergeResult } from '../types.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Normalize a parameterized path by replacing all named param placeholders
|
|
6
|
+
* with the generic `:_` placeholder, enabling matching across different param
|
|
7
|
+
* naming conventions (e.g. `:id` vs `:userId` vs `:user_id`).
|
|
8
|
+
*
|
|
9
|
+
* @example
|
|
10
|
+
* normalizePath('/repos/:owner/:repo') // → '/repos/:_/:_'
|
|
11
|
+
* normalizePath('/users/list') // → '/users/list'
|
|
12
|
+
*/
|
|
13
|
+
export function normalizePath(path: string): string {
|
|
14
|
+
return path.replace(/:[a-zA-Z_]\w*/g, ':_');
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Build a match key for endpoint deduplication: METHOD + normalized path.
|
|
19
|
+
*/
|
|
20
|
+
function matchKey(method: string, path: string): string {
|
|
21
|
+
return `${method.toUpperCase()} ${normalizePath(path)}`;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Merge query params from a captured endpoint with params from an imported
|
|
26
|
+
* spec endpoint.
|
|
27
|
+
*
|
|
28
|
+
* Rules:
|
|
29
|
+
* - Captured `example` values are preserved (captured data is sacred).
|
|
30
|
+
* - Spec `type`, `required`, `enum` augment the captured params.
|
|
31
|
+
* - New params that only exist in the spec are added wholesale.
|
|
32
|
+
*/
|
|
33
|
+
function mergeQueryParams(
|
|
34
|
+
captured: SkillEndpoint['queryParams'],
|
|
35
|
+
specParams: SkillEndpoint['queryParams'],
|
|
36
|
+
): SkillEndpoint['queryParams'] {
|
|
37
|
+
const merged = { ...captured };
|
|
38
|
+
|
|
39
|
+
for (const [name, specParam] of Object.entries(specParams)) {
|
|
40
|
+
if (name in merged) {
|
|
41
|
+
// Param exists in captured — keep captured example, augment with spec metadata
|
|
42
|
+
const existing = merged[name];
|
|
43
|
+
merged[name] = {
|
|
44
|
+
...existing,
|
|
45
|
+
// Take spec type only if captured has generic 'string' and spec is more specific
|
|
46
|
+
type: existing.type === 'string' && specParam.type !== 'string' ? specParam.type : existing.type,
|
|
47
|
+
// Always preserve captured example value
|
|
48
|
+
example: existing.example,
|
|
49
|
+
// Add spec enum if present
|
|
50
|
+
...(specParam.enum !== undefined ? { enum: specParam.enum } : {}),
|
|
51
|
+
// Add spec required flag if present
|
|
52
|
+
...(specParam.required !== undefined ? { required: specParam.required } : {}),
|
|
53
|
+
// Mark as also coming from spec
|
|
54
|
+
...(specParam.fromSpec ? { fromSpec: true } : {}),
|
|
55
|
+
};
|
|
56
|
+
} else {
|
|
57
|
+
// New param only in spec — add it
|
|
58
|
+
merged[name] = specParam;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
return merged;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Determine whether an imported endpoint would enrich an existing captured
|
|
67
|
+
* endpoint (i.e. adds new metadata not already present).
|
|
68
|
+
*/
|
|
69
|
+
function wouldEnrich(existing: SkillEndpoint, imported: SkillEndpoint): boolean {
|
|
70
|
+
if (!existing.description && imported.description) return true;
|
|
71
|
+
if (!existing.specSource && imported.specSource) return true;
|
|
72
|
+
|
|
73
|
+
// Check if any new query params would be added or enriched
|
|
74
|
+
for (const [name, specParam] of Object.entries(imported.queryParams)) {
|
|
75
|
+
if (!(name in existing.queryParams)) return true;
|
|
76
|
+
const ep = existing.queryParams[name];
|
|
77
|
+
if (!ep.enum && specParam.enum) return true;
|
|
78
|
+
if (ep.required === undefined && specParam.required !== undefined) return true;
|
|
79
|
+
if (ep.type === 'string' && specParam.type !== 'string') return true;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
return false;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* Pure function — no I/O.
|
|
87
|
+
*
|
|
88
|
+
* Merges imported OpenAPI endpoints into an existing skill file.
|
|
89
|
+
*
|
|
90
|
+
* Captured data is sacred: it always wins on confidence, examples, and
|
|
91
|
+
* endpoint provenance. Spec data can only enrich (add description, specSource,
|
|
92
|
+
* query param enum/required/type) or fill gaps (add missing endpoints).
|
|
93
|
+
*
|
|
94
|
+
* Match logic: METHOD + normalizePath(path). This allows `:owner` and `:user`
|
|
95
|
+
* to be considered the same parameter slot.
|
|
96
|
+
*
|
|
97
|
+
* @param existing The existing skill file on disk, or null if none exists.
|
|
98
|
+
* @param imported Endpoints parsed from the OpenAPI spec.
|
|
99
|
+
* @param importMeta Metadata about the import (spec URL, version, etc.).
|
|
100
|
+
* @returns MergeResult with the updated SkillFile and a diff summary.
|
|
101
|
+
*/
|
|
102
|
+
export function mergeSkillFile(
|
|
103
|
+
existing: SkillFile | null,
|
|
104
|
+
imported: SkillEndpoint[],
|
|
105
|
+
importMeta: ImportMeta,
|
|
106
|
+
): MergeResult {
|
|
107
|
+
const now = new Date().toISOString();
|
|
108
|
+
|
|
109
|
+
let preserved = 0;
|
|
110
|
+
let added = 0;
|
|
111
|
+
let enriched = 0;
|
|
112
|
+
let skipped = 0;
|
|
113
|
+
|
|
114
|
+
// --- Case: no existing file — create a new SkillFile from imported endpoints ---
|
|
115
|
+
if (existing === null) {
|
|
116
|
+
const endpoints = imported.map(ep => ({
|
|
117
|
+
...ep,
|
|
118
|
+
normalizedPath: normalizePath(ep.path),
|
|
119
|
+
}));
|
|
120
|
+
|
|
121
|
+
const skillFile: SkillFile = {
|
|
122
|
+
version: '1.2',
|
|
123
|
+
domain: extractDomainFromMeta(importMeta),
|
|
124
|
+
capturedAt: now,
|
|
125
|
+
baseUrl: extractBaseUrlFromMeta(importMeta),
|
|
126
|
+
endpoints,
|
|
127
|
+
metadata: {
|
|
128
|
+
captureCount: 0,
|
|
129
|
+
filteredCount: 0,
|
|
130
|
+
toolVersion: '1.0.0',
|
|
131
|
+
importHistory: [{
|
|
132
|
+
specUrl: importMeta.specUrl,
|
|
133
|
+
specVersion: importMeta.specVersion,
|
|
134
|
+
importedAt: now,
|
|
135
|
+
endpointsAdded: endpoints.length,
|
|
136
|
+
endpointsEnriched: 0,
|
|
137
|
+
}],
|
|
138
|
+
},
|
|
139
|
+
provenance: 'imported',
|
|
140
|
+
};
|
|
141
|
+
|
|
142
|
+
added = endpoints.length;
|
|
143
|
+
|
|
144
|
+
return {
|
|
145
|
+
skillFile,
|
|
146
|
+
diff: { preserved, added, enriched, skipped },
|
|
147
|
+
};
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
// --- Case: existing file present — merge into it ---
|
|
151
|
+
|
|
152
|
+
// Build a map from match-key → existing endpoint (mutable copy)
|
|
153
|
+
const existingMap = new Map<string, SkillEndpoint>();
|
|
154
|
+
for (const ep of existing.endpoints) {
|
|
155
|
+
existingMap.set(matchKey(ep.method, ep.path), ep);
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
// Build a map from match-key → imported endpoint
|
|
159
|
+
const importedMap = new Map<string, SkillEndpoint>();
|
|
160
|
+
for (const ep of imported) {
|
|
161
|
+
const key = matchKey(ep.method, ep.path);
|
|
162
|
+
// If multiple imported endpoints map to the same key, last wins
|
|
163
|
+
if (importedMap.has(key)) {
|
|
164
|
+
process.stderr.write(`[openapi-import] Warning: ${ep.method} ${ep.path} collides with existing import after normalization\n`);
|
|
165
|
+
}
|
|
166
|
+
importedMap.set(key, ep);
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
// Process: update or preserve existing endpoints
|
|
170
|
+
const resultEndpoints: SkillEndpoint[] = [];
|
|
171
|
+
|
|
172
|
+
for (const [key, existingEp] of existingMap) {
|
|
173
|
+
const importedEp = importedMap.get(key);
|
|
174
|
+
|
|
175
|
+
if (!importedEp) {
|
|
176
|
+
// Not in import — preserve as-is (captured endpoint with no match in spec)
|
|
177
|
+
resultEndpoints.push({
|
|
178
|
+
...existingEp,
|
|
179
|
+
normalizedPath: normalizePath(existingEp.path),
|
|
180
|
+
});
|
|
181
|
+
preserved++;
|
|
182
|
+
continue;
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// Imported endpoint matches an existing one — check if it would add anything new
|
|
186
|
+
if (!wouldEnrich(existingEp, importedEp)) {
|
|
187
|
+
resultEndpoints.push({
|
|
188
|
+
...existingEp,
|
|
189
|
+
normalizedPath: normalizePath(existingEp.path),
|
|
190
|
+
});
|
|
191
|
+
// "skipped" means the import is redundant — existing already has spec metadata.
|
|
192
|
+
// "preserved" means the captured endpoint is untouched (import had nothing for it,
|
|
193
|
+
// or both sides are bare with no spec data to exchange).
|
|
194
|
+
const existingHasSpecData = !!(existingEp.specSource || existingEp.description);
|
|
195
|
+
const importHasSpecData = !!(importedEp.specSource || importedEp.description);
|
|
196
|
+
if (existingHasSpecData || importHasSpecData) {
|
|
197
|
+
// Spec data was already integrated (or import tried to add it but it's already present)
|
|
198
|
+
skipped++;
|
|
199
|
+
} else {
|
|
200
|
+
// Neither side has spec enrichment data — captured endpoint simply preserved
|
|
201
|
+
preserved++;
|
|
202
|
+
}
|
|
203
|
+
continue;
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// Enrich the captured endpoint with spec metadata
|
|
207
|
+
const mergedQueryParams = mergeQueryParams(existingEp.queryParams, importedEp.queryParams);
|
|
208
|
+
|
|
209
|
+
const enrichedEp: SkillEndpoint = {
|
|
210
|
+
...existingEp,
|
|
211
|
+
normalizedPath: normalizePath(existingEp.path),
|
|
212
|
+
// Augment with spec fields (only if not already present)
|
|
213
|
+
...(importedEp.description && !existingEp.description ? { description: importedEp.description } : {}),
|
|
214
|
+
...(importedEp.specSource && !existingEp.specSource ? { specSource: importedEp.specSource } : {}),
|
|
215
|
+
// Confidence never downgrades
|
|
216
|
+
confidence: Math.max(existingEp.confidence ?? 0, importedEp.confidence ?? 0) || existingEp.confidence,
|
|
217
|
+
// Keep captured provenance
|
|
218
|
+
endpointProvenance: existingEp.endpointProvenance,
|
|
219
|
+
queryParams: mergedQueryParams,
|
|
220
|
+
};
|
|
221
|
+
|
|
222
|
+
resultEndpoints.push(enrichedEp);
|
|
223
|
+
enriched++;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
// Add endpoints from import that don't exist in the existing file
|
|
227
|
+
for (const [key, importedEp] of importedMap) {
|
|
228
|
+
if (!existingMap.has(key)) {
|
|
229
|
+
resultEndpoints.push({
|
|
230
|
+
...importedEp,
|
|
231
|
+
normalizedPath: normalizePath(importedEp.path),
|
|
232
|
+
});
|
|
233
|
+
added++;
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
// Build updated import history
|
|
238
|
+
const prevHistory = existing.metadata.importHistory ?? [];
|
|
239
|
+
const newHistoryEntry = {
|
|
240
|
+
specUrl: importMeta.specUrl,
|
|
241
|
+
specVersion: importMeta.specVersion,
|
|
242
|
+
importedAt: now,
|
|
243
|
+
endpointsAdded: added,
|
|
244
|
+
endpointsEnriched: enriched,
|
|
245
|
+
};
|
|
246
|
+
|
|
247
|
+
const skillFile: SkillFile = {
|
|
248
|
+
...existing,
|
|
249
|
+
endpoints: resultEndpoints,
|
|
250
|
+
metadata: {
|
|
251
|
+
...existing.metadata,
|
|
252
|
+
importHistory: [...prevHistory, newHistoryEntry],
|
|
253
|
+
},
|
|
254
|
+
};
|
|
255
|
+
|
|
256
|
+
return {
|
|
257
|
+
skillFile,
|
|
258
|
+
diff: { preserved, added, enriched, skipped },
|
|
259
|
+
};
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
// ---------------------------------------------------------------------------
|
|
263
|
+
// Internal helpers
|
|
264
|
+
// ---------------------------------------------------------------------------
|
|
265
|
+
|
|
266
|
+
function extractDomainFromMeta(meta: ImportMeta): string {
|
|
267
|
+
try {
|
|
268
|
+
return new URL(meta.specUrl).hostname;
|
|
269
|
+
} catch {
|
|
270
|
+
throw new Error(`Cannot determine domain from specUrl: ${meta.specUrl}`);
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
function extractBaseUrlFromMeta(meta: ImportMeta): string {
|
|
275
|
+
try {
|
|
276
|
+
const u = new URL(meta.specUrl);
|
|
277
|
+
return `${u.protocol}//${u.hostname}`;
|
|
278
|
+
} catch {
|
|
279
|
+
throw new Error(`Cannot determine base URL from specUrl: ${meta.specUrl}`);
|
|
280
|
+
}
|
|
281
|
+
}
|