@scalar/workspace-store 0.25.3 → 0.26.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +20 -0
- package/dist/client.d.ts.map +1 -1
- package/dist/client.js +25 -19
- package/dist/client.js.map +2 -2
- package/dist/events/definitions/hooks.d.ts +13 -2
- package/dist/events/definitions/hooks.d.ts.map +1 -1
- package/dist/events/definitions/operation.d.ts +11 -0
- package/dist/events/definitions/operation.d.ts.map +1 -1
- package/dist/events/definitions/ui.d.ts +12 -0
- package/dist/events/definitions/ui.d.ts.map +1 -1
- package/dist/helpers/apply-selective-updates.d.ts +1 -1
- package/dist/helpers/apply-selective-updates.d.ts.map +1 -1
- package/dist/helpers/apply-selective-updates.js +13 -3
- package/dist/helpers/apply-selective-updates.js.map +3 -3
- package/dist/mutators/fetch-request-to-har.d.ts +62 -0
- package/dist/mutators/fetch-request-to-har.d.ts.map +1 -0
- package/dist/mutators/fetch-request-to-har.js +117 -0
- package/dist/mutators/fetch-request-to-har.js.map +7 -0
- package/dist/mutators/fetch-response-to-har.d.ts +67 -0
- package/dist/mutators/fetch-response-to-har.d.ts.map +1 -0
- package/dist/mutators/fetch-response-to-har.js +104 -0
- package/dist/mutators/fetch-response-to-har.js.map +7 -0
- package/dist/mutators/har-to-operation.d.ts +37 -0
- package/dist/mutators/har-to-operation.d.ts.map +1 -0
- package/dist/mutators/har-to-operation.js +146 -0
- package/dist/mutators/har-to-operation.js.map +7 -0
- package/dist/mutators/index.d.ts +4 -0
- package/dist/mutators/index.d.ts.map +1 -1
- package/dist/mutators/operation.d.ts +5 -0
- package/dist/mutators/operation.d.ts.map +1 -1
- package/dist/mutators/operation.js +74 -1
- package/dist/mutators/operation.js.map +2 -2
- package/dist/schemas/extensions/document/x-scalar-is-dirty.d.ts +43 -0
- package/dist/schemas/extensions/document/x-scalar-is-dirty.d.ts.map +1 -0
- package/dist/schemas/extensions/document/x-scalar-is-dirty.js +9 -0
- package/dist/schemas/extensions/document/x-scalar-is-dirty.js.map +7 -0
- package/dist/schemas/extensions/operation/x-scalar-history.d.ts +217 -0
- package/dist/schemas/extensions/operation/x-scalar-history.d.ts.map +1 -0
- package/dist/schemas/extensions/operation/x-scalar-history.js +100 -0
- package/dist/schemas/extensions/operation/x-scalar-history.js.map +7 -0
- package/dist/schemas/inmemory-workspace.d.ts +64 -0
- package/dist/schemas/inmemory-workspace.d.ts.map +1 -1
- package/dist/schemas/reference-config/index.d.ts +64 -0
- package/dist/schemas/reference-config/index.d.ts.map +1 -1
- package/dist/schemas/reference-config/settings.d.ts +64 -0
- package/dist/schemas/reference-config/settings.d.ts.map +1 -1
- package/dist/schemas/v3.1/strict/openapi-document.d.ts +2306 -1
- package/dist/schemas/v3.1/strict/openapi-document.d.ts.map +1 -1
- package/dist/schemas/v3.1/strict/openapi-document.js +3 -1
- package/dist/schemas/v3.1/strict/openapi-document.js.map +2 -2
- package/dist/schemas/v3.1/strict/operation.d.ts +64 -1
- package/dist/schemas/v3.1/strict/operation.d.ts.map +1 -1
- package/dist/schemas/v3.1/strict/operation.js +3 -1
- package/dist/schemas/v3.1/strict/operation.js.map +2 -2
- package/dist/schemas/workspace.d.ts +448 -0
- package/dist/schemas/workspace.d.ts.map +1 -1
- package/package.json +5 -5
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import type { HarRequest } from '@scalar/snippetz';
|
|
2
|
+
type FetchRequestToHarProps = {
|
|
3
|
+
/** The Fetch API Request object to convert */
|
|
4
|
+
request: Request;
|
|
5
|
+
/**
|
|
6
|
+
* Whether to include the request body in the HAR postData.
|
|
7
|
+
* Note: Reading the body consumes it, so the request will be cloned automatically.
|
|
8
|
+
* @default true
|
|
9
|
+
*/
|
|
10
|
+
includeBody?: boolean;
|
|
11
|
+
/**
|
|
12
|
+
* HTTP version string to use (since Fetch API does not expose this)
|
|
13
|
+
* @default 'HTTP/1.1'
|
|
14
|
+
*/
|
|
15
|
+
httpVersion?: string;
|
|
16
|
+
/**
|
|
17
|
+
* The maximum size of the request body to include in the HAR postData.
|
|
18
|
+
* @default 1MB
|
|
19
|
+
*/
|
|
20
|
+
bodySizeLimit?: number;
|
|
21
|
+
};
|
|
22
|
+
/**
|
|
23
|
+
* Converts a Fetch API Request object to HAR (HTTP Archive) Request format.
|
|
24
|
+
*
|
|
25
|
+
* This function transforms a standard JavaScript Fetch API Request into the
|
|
26
|
+
* HAR format, which is useful for:
|
|
27
|
+
* - Recording HTTP requests for replay or analysis
|
|
28
|
+
* - Creating request fixtures from real API calls
|
|
29
|
+
* - Debugging and monitoring HTTP traffic
|
|
30
|
+
* - Storing request history in a standard format
|
|
31
|
+
* - Generating API documentation from real requests
|
|
32
|
+
*
|
|
33
|
+
* The conversion handles:
|
|
34
|
+
* - Request method and URL
|
|
35
|
+
* - Headers extraction (excluding sensitive headers if needed)
|
|
36
|
+
* - Query parameters extraction from URL
|
|
37
|
+
* - Cookie extraction from headers
|
|
38
|
+
* - Request body reading (with automatic cloning to preserve the original)
|
|
39
|
+
* - Content-Type detection and MIME type extraction
|
|
40
|
+
* - Size calculations for headers and body
|
|
41
|
+
* - Form data bodies are converted to params array
|
|
42
|
+
* - Other body types are read as text
|
|
43
|
+
*
|
|
44
|
+
* Note: The Fetch API does not expose the HTTP version, so it defaults to HTTP/1.1
|
|
45
|
+
* unless specified otherwise.
|
|
46
|
+
*
|
|
47
|
+
* @see https://w3c.github.io/web-performance/specs/HAR/Overview.html
|
|
48
|
+
* @see https://developer.mozilla.org/en-US/docs/Web/API/Request
|
|
49
|
+
*
|
|
50
|
+
* @example
|
|
51
|
+
* const request = new Request('https://api.example.com/users', {
|
|
52
|
+
* method: 'POST',
|
|
53
|
+
* headers: { 'Content-Type': 'application/json' },
|
|
54
|
+
* body: JSON.stringify({ name: 'John' })
|
|
55
|
+
* })
|
|
56
|
+
* const harRequest = await fetchRequestToHar({ request })
|
|
57
|
+
* console.log(harRequest.method) // 'POST'
|
|
58
|
+
* console.log(harRequest.postData?.text) // '{"name":"John"}'
|
|
59
|
+
*/
|
|
60
|
+
export declare const fetchRequestToHar: ({ request, includeBody, httpVersion, bodySizeLimit, }: FetchRequestToHarProps) => Promise<HarRequest>;
|
|
61
|
+
export {};
|
|
62
|
+
//# sourceMappingURL=fetch-request-to-har.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"fetch-request-to-har.d.ts","sourceRoot":"","sources":["../../src/mutators/fetch-request-to-har.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAA;AAElD,KAAK,sBAAsB,GAAG;IAC5B,8CAA8C;IAC9C,OAAO,EAAE,OAAO,CAAA;IAChB;;;;OAIG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IACrB;;;OAGG;IACH,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAA;CACvB,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAqCG;AACH,eAAO,MAAM,iBAAiB,GAAU,uDAMrC,sBAAsB,KAAG,OAAO,CAAC,UAAU,CA+C7C,CAAA"}
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
const fetchRequestToHar = async ({
|
|
2
|
+
request,
|
|
3
|
+
includeBody = true,
|
|
4
|
+
httpVersion = "HTTP/1.1",
|
|
5
|
+
// Default to 1MB
|
|
6
|
+
bodySizeLimit = 1048576
|
|
7
|
+
}) => {
|
|
8
|
+
const url = new URL(request.url);
|
|
9
|
+
const queryString = Array.from(url.searchParams.entries()).map(([name, value]) => ({ name, value }));
|
|
10
|
+
const { headers, headersSize, cookies } = processRequestHeaders(request);
|
|
11
|
+
const mimeType = request.headers.get("content-type")?.split(";")[0]?.trim() ?? "text/plain";
|
|
12
|
+
const bodyDetails = await (async () => {
|
|
13
|
+
if (includeBody && request.body) {
|
|
14
|
+
const details = await processRequestBody(request.clone());
|
|
15
|
+
if (details.size <= bodySizeLimit) {
|
|
16
|
+
return details;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
return { text: "", size: -1 };
|
|
20
|
+
})();
|
|
21
|
+
const harRequest = {
|
|
22
|
+
method: request.method,
|
|
23
|
+
url: request.url,
|
|
24
|
+
httpVersion,
|
|
25
|
+
headers,
|
|
26
|
+
cookies,
|
|
27
|
+
queryString,
|
|
28
|
+
headersSize,
|
|
29
|
+
bodySize: bodyDetails.size,
|
|
30
|
+
postData: "params" in bodyDetails ? {
|
|
31
|
+
mimeType,
|
|
32
|
+
params: bodyDetails.params
|
|
33
|
+
} : {
|
|
34
|
+
mimeType,
|
|
35
|
+
text: bodyDetails.text
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
return harRequest;
|
|
39
|
+
};
|
|
40
|
+
const processRequestBody = async (request) => {
|
|
41
|
+
const formData = await tryGetRequestFormData(request.clone());
|
|
42
|
+
if (formData) {
|
|
43
|
+
return Array.from(formData.entries()).reduce(
|
|
44
|
+
(acc, [name, value]) => {
|
|
45
|
+
if (value instanceof File) {
|
|
46
|
+
const fileName = `@${value.name}`;
|
|
47
|
+
acc.params.push({ name, value: fileName });
|
|
48
|
+
acc.size += fileName.length;
|
|
49
|
+
return acc;
|
|
50
|
+
}
|
|
51
|
+
acc.params.push({ name, value });
|
|
52
|
+
acc.size += value.length;
|
|
53
|
+
return acc;
|
|
54
|
+
},
|
|
55
|
+
{ params: [], size: 0 }
|
|
56
|
+
);
|
|
57
|
+
}
|
|
58
|
+
if (request.headers.get("content-type")?.includes("application/octet-stream")) {
|
|
59
|
+
return { text: "", size: -1 };
|
|
60
|
+
}
|
|
61
|
+
const arrayBuffer = await request.arrayBuffer();
|
|
62
|
+
const size = arrayBuffer.byteLength;
|
|
63
|
+
return { size, text: new TextDecoder().decode(arrayBuffer) };
|
|
64
|
+
};
|
|
65
|
+
async function tryGetRequestFormData(request) {
|
|
66
|
+
if (typeof request.formData !== "function") {
|
|
67
|
+
return null;
|
|
68
|
+
}
|
|
69
|
+
if (request.bodyUsed) {
|
|
70
|
+
return null;
|
|
71
|
+
}
|
|
72
|
+
const contentType = request.headers.get("content-type") ?? "";
|
|
73
|
+
if (!contentType.includes("multipart/form-data") && !contentType.includes("application/x-www-form-urlencoded")) {
|
|
74
|
+
return null;
|
|
75
|
+
}
|
|
76
|
+
try {
|
|
77
|
+
return await request.formData();
|
|
78
|
+
} catch {
|
|
79
|
+
return null;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
const processRequestHeaders = (request) => {
|
|
83
|
+
return Array.from(request.headers.entries()).reduce(
|
|
84
|
+
(acc, [name, value]) => {
|
|
85
|
+
if (name.toLowerCase() === "cookie") {
|
|
86
|
+
const parsedCookies = parseCookieHeader(value);
|
|
87
|
+
acc.cookies.push(...parsedCookies.cookies);
|
|
88
|
+
} else {
|
|
89
|
+
acc.headers.push({ name, value });
|
|
90
|
+
acc.headersSize += name.length + 2 + value.length + 2;
|
|
91
|
+
}
|
|
92
|
+
return acc;
|
|
93
|
+
},
|
|
94
|
+
{ headers: [], headersSize: 0, cookies: [] }
|
|
95
|
+
);
|
|
96
|
+
};
|
|
97
|
+
const parseCookieHeader = (cookieValue) => {
|
|
98
|
+
return cookieValue.split(";").reduce(
|
|
99
|
+
(acc, part) => {
|
|
100
|
+
const trimmedPart = part.trim();
|
|
101
|
+
const equalIndex = trimmedPart.indexOf("=");
|
|
102
|
+
if (equalIndex === -1) {
|
|
103
|
+
return acc;
|
|
104
|
+
}
|
|
105
|
+
const name = trimmedPart.substring(0, equalIndex).trim();
|
|
106
|
+
const value = trimmedPart.substring(equalIndex + 1).trim();
|
|
107
|
+
acc.cookies.push({ name, value });
|
|
108
|
+
acc.size += name.length + 2 + value.length + 2;
|
|
109
|
+
return acc;
|
|
110
|
+
},
|
|
111
|
+
{ cookies: [], size: 0 }
|
|
112
|
+
);
|
|
113
|
+
};
|
|
114
|
+
export {
|
|
115
|
+
fetchRequestToHar
|
|
116
|
+
};
|
|
117
|
+
//# sourceMappingURL=fetch-request-to-har.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../src/mutators/fetch-request-to-har.ts"],
|
|
4
|
+
"sourcesContent": ["import type { HarRequest } from '@scalar/snippetz'\n\ntype FetchRequestToHarProps = {\n /** The Fetch API Request object to convert */\n request: Request\n /**\n * Whether to include the request body in the HAR postData.\n * Note: Reading the body consumes it, so the request will be cloned automatically.\n * @default true\n */\n includeBody?: boolean\n /**\n * HTTP version string to use (since Fetch API does not expose this)\n * @default 'HTTP/1.1'\n */\n httpVersion?: string\n /**\n * The maximum size of the request body to include in the HAR postData.\n * @default 1MB\n */\n bodySizeLimit?: number\n}\n\n/**\n * Converts a Fetch API Request object to HAR (HTTP Archive) Request format.\n *\n * This function transforms a standard JavaScript Fetch API Request into the\n * HAR format, which is useful for:\n * - Recording HTTP requests for replay or analysis\n * - Creating request fixtures from real API calls\n * - Debugging and monitoring HTTP traffic\n * - Storing request history in a standard format\n * - Generating API documentation from real requests\n *\n * The conversion handles:\n * - Request method and URL\n * - Headers extraction (excluding sensitive headers if needed)\n * - Query parameters extraction from URL\n * - Cookie extraction from headers\n * - Request body reading (with automatic cloning to preserve the original)\n * - Content-Type detection and MIME type extraction\n * - Size calculations for headers and body\n * - Form data bodies are converted to params array\n * - Other body types are read as text\n *\n * Note: The Fetch API does not expose the HTTP version, so it defaults to HTTP/1.1\n * unless specified otherwise.\n *\n * @see https://w3c.github.io/web-performance/specs/HAR/Overview.html\n * @see https://developer.mozilla.org/en-US/docs/Web/API/Request\n *\n * @example\n * const request = new Request('https://api.example.com/users', {\n * method: 'POST',\n * headers: { 'Content-Type': 'application/json' },\n * body: JSON.stringify({ name: 'John' })\n * })\n * const harRequest = await fetchRequestToHar({ request })\n * console.log(harRequest.method) // 'POST'\n * console.log(harRequest.postData?.text) // '{\"name\":\"John\"}'\n */\nexport const fetchRequestToHar = async ({\n request,\n includeBody = true,\n httpVersion = 'HTTP/1.1',\n // Default to 1MB\n bodySizeLimit = 1048576,\n}: FetchRequestToHarProps): Promise<HarRequest> => {\n // Extract query string from URL\n const url = new URL(request.url)\n\n // Extract the query strings from the URL\n const queryString = Array.from(url.searchParams.entries()).map(([name, value]) => ({ name, value }))\n\n // Extract the headers from the request\n const { headers, headersSize, cookies } = processRequestHeaders(request)\n\n // Extract the MIME type from the request headers\n const mimeType = request.headers.get('content-type')?.split(';')[0]?.trim() ?? 'text/plain'\n\n // Read the request body if requested\n const bodyDetails = await (async () => {\n if (includeBody && request.body) {\n const details = await processRequestBody(request.clone())\n if (details.size <= bodySizeLimit) {\n return details\n }\n }\n return { text: '', size: -1 }\n })()\n\n // Create the HAR request object\n const harRequest: HarRequest = {\n method: request.method,\n url: request.url,\n httpVersion,\n headers,\n cookies,\n queryString,\n headersSize,\n bodySize: bodyDetails.size,\n postData:\n 'params' in bodyDetails\n ? {\n mimeType,\n params: bodyDetails.params,\n }\n : {\n mimeType,\n text: bodyDetails.text,\n },\n }\n\n return harRequest\n}\n\nconst processRequestBody = async (request: Request) => {\n const formData = await tryGetRequestFormData(request.clone())\n if (formData) {\n return Array.from(formData.entries()).reduce<{ params: { name: string; value: string }[]; size: number }>(\n (acc, [name, value]) => {\n if (value instanceof File) {\n const fileName = `@${value.name}`\n acc.params.push({ name, value: fileName })\n acc.size += fileName.length\n return acc\n }\n\n acc.params.push({ name, value })\n acc.size += value.length\n return acc\n },\n { params: [], size: 0 },\n )\n }\n // Skip binary bodies\n if (request.headers.get('content-type')?.includes('application/octet-stream')) {\n return { text: '', size: -1 }\n }\n\n // Read the request body as text\n const arrayBuffer = await request.arrayBuffer()\n const size = arrayBuffer.byteLength\n return { size, text: new TextDecoder().decode(arrayBuffer) }\n}\n\nasync function tryGetRequestFormData(request: Request): Promise<FormData | null> {\n if (typeof request.formData !== 'function') {\n return null\n }\n\n if (request.bodyUsed) {\n return null\n }\n\n const contentType = request.headers.get('content-type') ?? ''\n if (!contentType.includes('multipart/form-data') && !contentType.includes('application/x-www-form-urlencoded')) {\n return null\n }\n\n try {\n return await request.formData()\n } catch {\n return null\n }\n}\n\nconst processRequestHeaders = (request: Request) => {\n return Array.from(request.headers.entries()).reduce<{\n headers: { name: string; value: string }[]\n headersSize: number\n cookies: { name: string; value: string }[]\n }>(\n (acc, [name, value]) => {\n if (name.toLowerCase() === 'cookie') {\n const parsedCookies = parseCookieHeader(value)\n acc.cookies.push(...parsedCookies.cookies)\n } else {\n acc.headers.push({ name, value })\n acc.headersSize += name.length + 2 + value.length + 2\n }\n return acc\n },\n { headers: [], headersSize: 0, cookies: [] },\n )\n}\n\n/**\n * Parses a Cookie header value into an array of cookie objects.\n * Cookie format: name1=value1; name2=value2\n */\nconst parseCookieHeader = (cookieValue: string) => {\n return cookieValue.split(';').reduce<{ cookies: { name: string; value: string }[]; size: number }>(\n (acc, part) => {\n const trimmedPart = part.trim()\n const equalIndex = trimmedPart.indexOf('=')\n\n if (equalIndex === -1) {\n return acc\n }\n\n const name = trimmedPart.substring(0, equalIndex).trim()\n const value = trimmedPart.substring(equalIndex + 1).trim()\n\n acc.cookies.push({ name, value })\n acc.size += name.length + 2 + value.length + 2\n return acc\n },\n { cookies: [], size: 0 },\n )\n}\n"],
|
|
5
|
+
"mappings": "AA6DO,MAAM,oBAAoB,OAAO;AAAA,EACtC;AAAA,EACA,cAAc;AAAA,EACd,cAAc;AAAA;AAAA,EAEd,gBAAgB;AAClB,MAAmD;AAEjD,QAAM,MAAM,IAAI,IAAI,QAAQ,GAAG;AAG/B,QAAM,cAAc,MAAM,KAAK,IAAI,aAAa,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC,MAAM,KAAK,OAAO,EAAE,MAAM,MAAM,EAAE;AAGnG,QAAM,EAAE,SAAS,aAAa,QAAQ,IAAI,sBAAsB,OAAO;AAGvE,QAAM,WAAW,QAAQ,QAAQ,IAAI,cAAc,GAAG,MAAM,GAAG,EAAE,CAAC,GAAG,KAAK,KAAK;AAG/E,QAAM,cAAc,OAAO,YAAY;AACrC,QAAI,eAAe,QAAQ,MAAM;AAC/B,YAAM,UAAU,MAAM,mBAAmB,QAAQ,MAAM,CAAC;AACxD,UAAI,QAAQ,QAAQ,eAAe;AACjC,eAAO;AAAA,MACT;AAAA,IACF;AACA,WAAO,EAAE,MAAM,IAAI,MAAM,GAAG;AAAA,EAC9B,GAAG;AAGH,QAAM,aAAyB;AAAA,IAC7B,QAAQ,QAAQ;AAAA,IAChB,KAAK,QAAQ;AAAA,IACb;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,YAAY;AAAA,IACtB,UACE,YAAY,cACR;AAAA,MACE;AAAA,MACA,QAAQ,YAAY;AAAA,IACtB,IACA;AAAA,MACE;AAAA,MACA,MAAM,YAAY;AAAA,IACpB;AAAA,EACR;AAEA,SAAO;AACT;AAEA,MAAM,qBAAqB,OAAO,YAAqB;AACrD,QAAM,WAAW,MAAM,sBAAsB,QAAQ,MAAM,CAAC;AAC5D,MAAI,UAAU;AACZ,WAAO,MAAM,KAAK,SAAS,QAAQ,CAAC,EAAE;AAAA,MACpC,CAAC,KAAK,CAAC,MAAM,KAAK,MAAM;AACtB,YAAI,iBAAiB,MAAM;AACzB,gBAAM,WAAW,IAAI,MAAM,IAAI;AAC/B,cAAI,OAAO,KAAK,EAAE,MAAM,OAAO,SAAS,CAAC;AACzC,cAAI,QAAQ,SAAS;AACrB,iBAAO;AAAA,QACT;AAEA,YAAI,OAAO,KAAK,EAAE,MAAM,MAAM,CAAC;AAC/B,YAAI,QAAQ,MAAM;AAClB,eAAO;AAAA,MACT;AAAA,MACA,EAAE,QAAQ,CAAC,GAAG,MAAM,EAAE;AAAA,IACxB;AAAA,EACF;AAEA,MAAI,QAAQ,QAAQ,IAAI,cAAc,GAAG,SAAS,0BAA0B,GAAG;AAC7E,WAAO,EAAE,MAAM,IAAI,MAAM,GAAG;AAAA,EAC9B;AAGA,QAAM,cAAc,MAAM,QAAQ,YAAY;AAC9C,QAAM,OAAO,YAAY;AACzB,SAAO,EAAE,MAAM,MAAM,IAAI,YAAY,EAAE,OAAO,WAAW,EAAE;AAC7D;AAEA,eAAe,sBAAsB,SAA4C;AAC/E,MAAI,OAAO,QAAQ,aAAa,YAAY;AAC1C,WAAO;AAAA,EACT;AAEA,MAAI,QAAQ,UAAU;AACpB,WAAO;AAAA,EACT;AAEA,QAAM,cAAc,QAAQ,QAAQ,IAAI,cAAc,KAAK;AAC3D,MAAI,CAAC,YAAY,SAAS,qBAAqB,KAAK,CAAC,YAAY,SAAS,mCAAmC,GAAG;AAC9G,WAAO;AAAA,EACT;AAEA,MAAI;AACF,WAAO,MAAM,QAAQ,SAAS;AAAA,EAChC,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,MAAM,wBAAwB,CAAC,YAAqB;AAClD,SAAO,MAAM,KAAK,QAAQ,QAAQ,QAAQ,CAAC,EAAE;AAAA,IAK3C,CAAC,KAAK,CAAC,MAAM,KAAK,MAAM;AACtB,UAAI,KAAK,YAAY,MAAM,UAAU;AACnC,cAAM,gBAAgB,kBAAkB,KAAK;AAC7C,YAAI,QAAQ,KAAK,GAAG,cAAc,OAAO;AAAA,MAC3C,OAAO;AACL,YAAI,QAAQ,KAAK,EAAE,MAAM,MAAM,CAAC;AAChC,YAAI,eAAe,KAAK,SAAS,IAAI,MAAM,SAAS;AAAA,MACtD;AACA,aAAO;AAAA,IACT;AAAA,IACA,EAAE,SAAS,CAAC,GAAG,aAAa,GAAG,SAAS,CAAC,EAAE;AAAA,EAC7C;AACF;AAMA,MAAM,oBAAoB,CAAC,gBAAwB;AACjD,SAAO,YAAY,MAAM,GAAG,EAAE;AAAA,IAC5B,CAAC,KAAK,SAAS;AACb,YAAM,cAAc,KAAK,KAAK;AAC9B,YAAM,aAAa,YAAY,QAAQ,GAAG;AAE1C,UAAI,eAAe,IAAI;AACrB,eAAO;AAAA,MACT;AAEA,YAAM,OAAO,YAAY,UAAU,GAAG,UAAU,EAAE,KAAK;AACvD,YAAM,QAAQ,YAAY,UAAU,aAAa,CAAC,EAAE,KAAK;AAEzD,UAAI,QAAQ,KAAK,EAAE,MAAM,MAAM,CAAC;AAChC,UAAI,QAAQ,KAAK,SAAS,IAAI,MAAM,SAAS;AAC7C,aAAO;AAAA,IACT;AAAA,IACA,EAAE,SAAS,CAAC,GAAG,MAAM,EAAE;AAAA,EACzB;AACF;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import type { HarResponse } from '@scalar/snippetz';
|
|
2
|
+
type FetchResponseToHarProps = {
|
|
3
|
+
/** The Fetch API Response object to convert */
|
|
4
|
+
response: Response;
|
|
5
|
+
/**
|
|
6
|
+
* Whether to include the response body in the HAR content.
|
|
7
|
+
* Note: Reading the body consumes it, so the response will be cloned automatically.
|
|
8
|
+
* Bodies will only be included if they meet the following criteria:
|
|
9
|
+
* - Not a streaming response (text/event-stream)
|
|
10
|
+
* - Text-based content (not binary)
|
|
11
|
+
* - Under 1MB in size
|
|
12
|
+
* @default true
|
|
13
|
+
*/
|
|
14
|
+
includeBody?: boolean;
|
|
15
|
+
/**
|
|
16
|
+
* HTTP version string to use (since Fetch API does not expose this)
|
|
17
|
+
* @default 'HTTP/1.1'
|
|
18
|
+
*/
|
|
19
|
+
httpVersion?: string;
|
|
20
|
+
/**
|
|
21
|
+
* The maximum size of the response body to include in the HAR content.
|
|
22
|
+
* @default 1MB
|
|
23
|
+
*/
|
|
24
|
+
bodySizeLimit?: number;
|
|
25
|
+
};
|
|
26
|
+
/**
|
|
27
|
+
* Converts a Fetch API Response object to HAR (HTTP Archive) Response format.
|
|
28
|
+
*
|
|
29
|
+
* This function transforms a standard JavaScript Fetch API Response into the
|
|
30
|
+
* HAR format, which is useful for:
|
|
31
|
+
* - Recording HTTP responses for replay or analysis
|
|
32
|
+
* - Creating test fixtures from real API responses
|
|
33
|
+
* - Debugging and monitoring HTTP traffic
|
|
34
|
+
* - Generating API documentation from real responses
|
|
35
|
+
*
|
|
36
|
+
* The conversion handles:
|
|
37
|
+
* - Response status and status text
|
|
38
|
+
* - Headers extraction (including Set-Cookie headers converted to cookies)
|
|
39
|
+
* - Response body reading (with automatic cloning to preserve the original)
|
|
40
|
+
* - Content-Type detection and MIME type extraction
|
|
41
|
+
* - Size calculations for headers and body
|
|
42
|
+
* - Redirect URL extraction from Location header
|
|
43
|
+
*
|
|
44
|
+
* Note: The Fetch API does not expose the HTTP version, so it defaults to HTTP/1.1
|
|
45
|
+
* unless specified otherwise.
|
|
46
|
+
*
|
|
47
|
+
* @see https://w3c.github.io/web-performance/specs/HAR/Overview.html
|
|
48
|
+
* @see https://developer.mozilla.org/en-US/docs/Web/API/Response
|
|
49
|
+
*
|
|
50
|
+
* @example
|
|
51
|
+
* const response = await fetch('https://api.example.com/users')
|
|
52
|
+
* const harResponse = await fetchResponseToHar({ response })
|
|
53
|
+
* console.log(harResponse.status) // 200
|
|
54
|
+
*/
|
|
55
|
+
export declare const fetchResponseToHar: ({ response, includeBody, httpVersion, bodySizeLimit, }: FetchResponseToHarProps) => Promise<HarResponse>;
|
|
56
|
+
/**
|
|
57
|
+
* Checks if the content type is text-based and should be included in HAR.
|
|
58
|
+
* Text-based content types include:
|
|
59
|
+
* - text/* (text/plain, text/html, text/css, etc.)
|
|
60
|
+
* - application/json
|
|
61
|
+
* - application/xml and text/xml
|
|
62
|
+
* - application/javascript
|
|
63
|
+
* - application/*+json and application/*+xml variants
|
|
64
|
+
*/
|
|
65
|
+
export declare const isTextBasedContent: (contentType: string) => boolean;
|
|
66
|
+
export {};
|
|
67
|
+
//# sourceMappingURL=fetch-response-to-har.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"fetch-response-to-har.d.ts","sourceRoot":"","sources":["../../src/mutators/fetch-response-to-har.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAA;AAEnD,KAAK,uBAAuB,GAAG;IAC7B,+CAA+C;IAC/C,QAAQ,EAAE,QAAQ,CAAA;IAClB;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IACrB;;;OAGG;IACH,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAA;CACvB,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4BG;AACH,eAAO,MAAM,kBAAkB,GAAU,wDAKtC,uBAAuB,KAAG,OAAO,CAAC,WAAW,CAwC/C,CAAA;AA4CD;;;;;;;;GAQG;AACH,eAAO,MAAM,kBAAkB,GAAI,aAAa,MAAM,KAAG,OAoCxD,CAAA"}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
const fetchResponseToHar = async ({
|
|
2
|
+
response,
|
|
3
|
+
includeBody = true,
|
|
4
|
+
httpVersion = "HTTP/1.1",
|
|
5
|
+
bodySizeLimit = 1048576
|
|
6
|
+
}) => {
|
|
7
|
+
const { headers, headersSize, cookies } = processResponseHeaders(response);
|
|
8
|
+
const redirectURL = response.headers.get("location") || "";
|
|
9
|
+
const contentType = response.headers.get("content-type") ?? "text/plain";
|
|
10
|
+
const bodyDetails = await (async () => {
|
|
11
|
+
if (includeBody && response.body) {
|
|
12
|
+
const details = await processResponseBody(response.clone());
|
|
13
|
+
if (details.size <= bodySizeLimit) {
|
|
14
|
+
return details;
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
return { text: "", size: -1, encoding: void 0 };
|
|
18
|
+
})();
|
|
19
|
+
const harResponse = {
|
|
20
|
+
status: response.status,
|
|
21
|
+
statusText: response.statusText,
|
|
22
|
+
httpVersion,
|
|
23
|
+
headers,
|
|
24
|
+
cookies,
|
|
25
|
+
content: {
|
|
26
|
+
size: bodyDetails.size,
|
|
27
|
+
mimeType: contentType,
|
|
28
|
+
text: bodyDetails.text,
|
|
29
|
+
encoding: bodyDetails.encoding
|
|
30
|
+
},
|
|
31
|
+
redirectURL,
|
|
32
|
+
headersSize,
|
|
33
|
+
bodySize: bodyDetails.size
|
|
34
|
+
};
|
|
35
|
+
return harResponse;
|
|
36
|
+
};
|
|
37
|
+
const processResponseHeaders = (response) => {
|
|
38
|
+
return Array.from(response.headers.entries()).reduce(
|
|
39
|
+
(acc, [name, value]) => {
|
|
40
|
+
acc.headers.push({ name, value });
|
|
41
|
+
acc.headersSize += name.length + 2 + value.length + 2;
|
|
42
|
+
if (name.toLowerCase() === "set-cookie") {
|
|
43
|
+
const cookie = parseSetCookieHeader(value);
|
|
44
|
+
if (cookie) {
|
|
45
|
+
acc.cookies.push(cookie);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
return acc;
|
|
49
|
+
},
|
|
50
|
+
{ headers: [], headersSize: 0, cookies: [] }
|
|
51
|
+
);
|
|
52
|
+
};
|
|
53
|
+
const processResponseBody = async (response) => {
|
|
54
|
+
const contentType = response.headers.get("content-type");
|
|
55
|
+
if (!contentType || !isTextBasedContent(contentType)) {
|
|
56
|
+
return { text: "", size: -1, encoding: void 0 };
|
|
57
|
+
}
|
|
58
|
+
try {
|
|
59
|
+
const arrayBuffer = await response.arrayBuffer();
|
|
60
|
+
const bodySize = arrayBuffer.byteLength;
|
|
61
|
+
const text = new TextDecoder("utf-8").decode(arrayBuffer);
|
|
62
|
+
return { text, size: bodySize, encoding: void 0 };
|
|
63
|
+
} catch {
|
|
64
|
+
return { text: "", size: -1, encoding: void 0 };
|
|
65
|
+
}
|
|
66
|
+
};
|
|
67
|
+
const isTextBasedContent = (contentType) => {
|
|
68
|
+
const lowerContentType = contentType.toLowerCase();
|
|
69
|
+
if (lowerContentType.startsWith("text/")) {
|
|
70
|
+
return true;
|
|
71
|
+
}
|
|
72
|
+
if (lowerContentType.includes("application/json") || lowerContentType.includes("+json")) {
|
|
73
|
+
return true;
|
|
74
|
+
}
|
|
75
|
+
if (lowerContentType.includes("application/xml") || lowerContentType.includes("text/xml") || lowerContentType.includes("+xml")) {
|
|
76
|
+
return true;
|
|
77
|
+
}
|
|
78
|
+
if (lowerContentType.includes("application/javascript") || lowerContentType.includes("application/x-javascript")) {
|
|
79
|
+
return true;
|
|
80
|
+
}
|
|
81
|
+
if (lowerContentType.includes("application/x-www-form-urlencoded") || lowerContentType.includes("application/graphql")) {
|
|
82
|
+
return true;
|
|
83
|
+
}
|
|
84
|
+
return false;
|
|
85
|
+
};
|
|
86
|
+
const parseSetCookieHeader = (setCookieValue) => {
|
|
87
|
+
const parts = setCookieValue.split(";");
|
|
88
|
+
if (parts.length === 0 || !parts[0]) {
|
|
89
|
+
return null;
|
|
90
|
+
}
|
|
91
|
+
const cookiePart = parts[0].trim();
|
|
92
|
+
const equalIndex = cookiePart.indexOf("=");
|
|
93
|
+
if (equalIndex === -1) {
|
|
94
|
+
return null;
|
|
95
|
+
}
|
|
96
|
+
const name = cookiePart.substring(0, equalIndex).trim();
|
|
97
|
+
const value = cookiePart.substring(equalIndex + 1).trim();
|
|
98
|
+
return { name, value };
|
|
99
|
+
};
|
|
100
|
+
export {
|
|
101
|
+
fetchResponseToHar,
|
|
102
|
+
isTextBasedContent
|
|
103
|
+
};
|
|
104
|
+
//# sourceMappingURL=fetch-response-to-har.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../src/mutators/fetch-response-to-har.ts"],
|
|
4
|
+
"sourcesContent": ["import type { HarResponse } from '@scalar/snippetz'\n\ntype FetchResponseToHarProps = {\n /** The Fetch API Response object to convert */\n response: Response\n /**\n * Whether to include the response body in the HAR content.\n * Note: Reading the body consumes it, so the response will be cloned automatically.\n * Bodies will only be included if they meet the following criteria:\n * - Not a streaming response (text/event-stream)\n * - Text-based content (not binary)\n * - Under 1MB in size\n * @default true\n */\n includeBody?: boolean\n /**\n * HTTP version string to use (since Fetch API does not expose this)\n * @default 'HTTP/1.1'\n */\n httpVersion?: string\n /**\n * The maximum size of the response body to include in the HAR content.\n * @default 1MB\n */\n bodySizeLimit?: number\n}\n\n/**\n * Converts a Fetch API Response object to HAR (HTTP Archive) Response format.\n *\n * This function transforms a standard JavaScript Fetch API Response into the\n * HAR format, which is useful for:\n * - Recording HTTP responses for replay or analysis\n * - Creating test fixtures from real API responses\n * - Debugging and monitoring HTTP traffic\n * - Generating API documentation from real responses\n *\n * The conversion handles:\n * - Response status and status text\n * - Headers extraction (including Set-Cookie headers converted to cookies)\n * - Response body reading (with automatic cloning to preserve the original)\n * - Content-Type detection and MIME type extraction\n * - Size calculations for headers and body\n * - Redirect URL extraction from Location header\n *\n * Note: The Fetch API does not expose the HTTP version, so it defaults to HTTP/1.1\n * unless specified otherwise.\n *\n * @see https://w3c.github.io/web-performance/specs/HAR/Overview.html\n * @see https://developer.mozilla.org/en-US/docs/Web/API/Response\n *\n * @example\n * const response = await fetch('https://api.example.com/users')\n * const harResponse = await fetchResponseToHar({ response })\n * console.log(harResponse.status) // 200\n */\nexport const fetchResponseToHar = async ({\n response,\n includeBody = true,\n httpVersion = 'HTTP/1.1',\n bodySizeLimit = 1048576,\n}: FetchResponseToHarProps): Promise<HarResponse> => {\n // Extract the headers from the response\n const { headers, headersSize, cookies } = processResponseHeaders(response)\n\n // Extract redirect URL from Location header\n const redirectURL = response.headers.get('location') || ''\n\n // Get content type\n const contentType = response.headers.get('content-type') ?? 'text/plain'\n\n // Read the response body if requested\n const bodyDetails = await (async () => {\n if (includeBody && response.body) {\n const details = await processResponseBody(response.clone())\n if (details.size <= bodySizeLimit) {\n return details\n }\n }\n return { text: '', size: -1, encoding: undefined }\n })()\n\n // Create the HAR response object\n const harResponse: HarResponse = {\n status: response.status,\n statusText: response.statusText,\n httpVersion,\n headers,\n cookies,\n content: {\n size: bodyDetails.size,\n mimeType: contentType,\n text: bodyDetails.text,\n encoding: bodyDetails.encoding,\n },\n redirectURL,\n headersSize,\n bodySize: bodyDetails.size,\n }\n\n return harResponse\n}\n\nconst processResponseHeaders = (response: Response) => {\n return Array.from(response.headers.entries()).reduce<{\n headers: { name: string; value: string }[]\n headersSize: number\n cookies: { name: string; value: string }[]\n }>(\n (acc, [name, value]) => {\n acc.headers.push({ name, value })\n acc.headersSize += name.length + 2 + value.length + 2\n\n // Parse Set-Cookie headers into cookies array\n if (name.toLowerCase() === 'set-cookie') {\n const cookie = parseSetCookieHeader(value)\n if (cookie) {\n acc.cookies.push(cookie)\n }\n }\n\n return acc\n },\n { headers: [], headersSize: 0, cookies: [] },\n )\n}\n\nconst processResponseBody = async (response: Response) => {\n const contentType = response.headers.get('content-type')\n if (!contentType || !isTextBasedContent(contentType)) {\n return { text: '', size: -1, encoding: undefined }\n }\n\n try {\n // Read as ArrayBuffer to get the size\n const arrayBuffer = await response.arrayBuffer()\n const bodySize = arrayBuffer.byteLength\n const text = new TextDecoder('utf-8').decode(arrayBuffer)\n return { text, size: bodySize, encoding: undefined }\n } catch {\n // If body cannot be read, leave it empty\n return { text: '', size: -1, encoding: undefined }\n }\n}\n\n/**\n * Checks if the content type is text-based and should be included in HAR.\n * Text-based content types include:\n * - text/* (text/plain, text/html, text/css, etc.)\n * - application/json\n * - application/xml and text/xml\n * - application/javascript\n * - application/*+json and application/*+xml variants\n */\nexport const isTextBasedContent = (contentType: string): boolean => {\n const lowerContentType = contentType.toLowerCase()\n\n // Check for text/* types\n if (lowerContentType.startsWith('text/')) {\n return true\n }\n\n // Check for JSON types\n if (lowerContentType.includes('application/json') || lowerContentType.includes('+json')) {\n return true\n }\n\n // Check for XML types\n if (\n lowerContentType.includes('application/xml') ||\n lowerContentType.includes('text/xml') ||\n lowerContentType.includes('+xml')\n ) {\n return true\n }\n\n // Check for JavaScript\n if (lowerContentType.includes('application/javascript') || lowerContentType.includes('application/x-javascript')) {\n return true\n }\n\n // Check for common text-based formats\n if (\n lowerContentType.includes('application/x-www-form-urlencoded') ||\n lowerContentType.includes('application/graphql')\n ) {\n return true\n }\n\n return false\n}\n\n/**\n * Parses a Set-Cookie header value into a cookie object.\n * This is a simplified parser that extracts the name and value.\n * For full cookie parsing with attributes, a more robust parser would be needed.\n */\nconst parseSetCookieHeader = (setCookieValue: string): { name: string; value: string } | null => {\n // Set-Cookie format: name=value; attribute1=value1; attribute2=value2\n const parts = setCookieValue.split(';')\n if (parts.length === 0 || !parts[0]) {\n return null\n }\n\n const cookiePart = parts[0].trim()\n const equalIndex = cookiePart.indexOf('=')\n\n if (equalIndex === -1) {\n return null\n }\n\n const name = cookiePart.substring(0, equalIndex).trim()\n const value = cookiePart.substring(equalIndex + 1).trim()\n\n return { name, value }\n}\n"],
|
|
5
|
+
"mappings": "AAwDO,MAAM,qBAAqB,OAAO;AAAA,EACvC;AAAA,EACA,cAAc;AAAA,EACd,cAAc;AAAA,EACd,gBAAgB;AAClB,MAAqD;AAEnD,QAAM,EAAE,SAAS,aAAa,QAAQ,IAAI,uBAAuB,QAAQ;AAGzE,QAAM,cAAc,SAAS,QAAQ,IAAI,UAAU,KAAK;AAGxD,QAAM,cAAc,SAAS,QAAQ,IAAI,cAAc,KAAK;AAG5D,QAAM,cAAc,OAAO,YAAY;AACrC,QAAI,eAAe,SAAS,MAAM;AAChC,YAAM,UAAU,MAAM,oBAAoB,SAAS,MAAM,CAAC;AAC1D,UAAI,QAAQ,QAAQ,eAAe;AACjC,eAAO;AAAA,MACT;AAAA,IACF;AACA,WAAO,EAAE,MAAM,IAAI,MAAM,IAAI,UAAU,OAAU;AAAA,EACnD,GAAG;AAGH,QAAM,cAA2B;AAAA,IAC/B,QAAQ,SAAS;AAAA,IACjB,YAAY,SAAS;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA,SAAS;AAAA,MACP,MAAM,YAAY;AAAA,MAClB,UAAU;AAAA,MACV,MAAM,YAAY;AAAA,MAClB,UAAU,YAAY;AAAA,IACxB;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,YAAY;AAAA,EACxB;AAEA,SAAO;AACT;AAEA,MAAM,yBAAyB,CAAC,aAAuB;AACrD,SAAO,MAAM,KAAK,SAAS,QAAQ,QAAQ,CAAC,EAAE;AAAA,IAK5C,CAAC,KAAK,CAAC,MAAM,KAAK,MAAM;AACtB,UAAI,QAAQ,KAAK,EAAE,MAAM,MAAM,CAAC;AAChC,UAAI,eAAe,KAAK,SAAS,IAAI,MAAM,SAAS;AAGpD,UAAI,KAAK,YAAY,MAAM,cAAc;AACvC,cAAM,SAAS,qBAAqB,KAAK;AACzC,YAAI,QAAQ;AACV,cAAI,QAAQ,KAAK,MAAM;AAAA,QACzB;AAAA,MACF;AAEA,aAAO;AAAA,IACT;AAAA,IACA,EAAE,SAAS,CAAC,GAAG,aAAa,GAAG,SAAS,CAAC,EAAE;AAAA,EAC7C;AACF;AAEA,MAAM,sBAAsB,OAAO,aAAuB;AACxD,QAAM,cAAc,SAAS,QAAQ,IAAI,cAAc;AACvD,MAAI,CAAC,eAAe,CAAC,mBAAmB,WAAW,GAAG;AACpD,WAAO,EAAE,MAAM,IAAI,MAAM,IAAI,UAAU,OAAU;AAAA,EACnD;AAEA,MAAI;AAEF,UAAM,cAAc,MAAM,SAAS,YAAY;AAC/C,UAAM,WAAW,YAAY;AAC7B,UAAM,OAAO,IAAI,YAAY,OAAO,EAAE,OAAO,WAAW;AACxD,WAAO,EAAE,MAAM,MAAM,UAAU,UAAU,OAAU;AAAA,EACrD,QAAQ;AAEN,WAAO,EAAE,MAAM,IAAI,MAAM,IAAI,UAAU,OAAU;AAAA,EACnD;AACF;AAWO,MAAM,qBAAqB,CAAC,gBAAiC;AAClE,QAAM,mBAAmB,YAAY,YAAY;AAGjD,MAAI,iBAAiB,WAAW,OAAO,GAAG;AACxC,WAAO;AAAA,EACT;AAGA,MAAI,iBAAiB,SAAS,kBAAkB,KAAK,iBAAiB,SAAS,OAAO,GAAG;AACvF,WAAO;AAAA,EACT;AAGA,MACE,iBAAiB,SAAS,iBAAiB,KAC3C,iBAAiB,SAAS,UAAU,KACpC,iBAAiB,SAAS,MAAM,GAChC;AACA,WAAO;AAAA,EACT;AAGA,MAAI,iBAAiB,SAAS,wBAAwB,KAAK,iBAAiB,SAAS,0BAA0B,GAAG;AAChH,WAAO;AAAA,EACT;AAGA,MACE,iBAAiB,SAAS,mCAAmC,KAC7D,iBAAiB,SAAS,qBAAqB,GAC/C;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAOA,MAAM,uBAAuB,CAAC,mBAAmE;AAE/F,QAAM,QAAQ,eAAe,MAAM,GAAG;AACtC,MAAI,MAAM,WAAW,KAAK,CAAC,MAAM,CAAC,GAAG;AACnC,WAAO;AAAA,EACT;AAEA,QAAM,aAAa,MAAM,CAAC,EAAE,KAAK;AACjC,QAAM,aAAa,WAAW,QAAQ,GAAG;AAEzC,MAAI,eAAe,IAAI;AACrB,WAAO;AAAA,EACT;AAEA,QAAM,OAAO,WAAW,UAAU,GAAG,UAAU,EAAE,KAAK;AACtD,QAAM,QAAQ,WAAW,UAAU,aAAa,CAAC,EAAE,KAAK;AAExD,SAAO,EAAE,MAAM,MAAM;AACvB;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import type { HarRequest } from '@scalar/snippetz';
|
|
2
|
+
import type { OperationObject } from '@scalar/workspace-store/schemas/v3.1/strict/openapi-document';
|
|
3
|
+
type HarToOperationProps = {
|
|
4
|
+
/** HAR request to convert */
|
|
5
|
+
harRequest: HarRequest;
|
|
6
|
+
/** Name of the example to populate (e.g., 'default', 'example1') */
|
|
7
|
+
exampleKey: string;
|
|
8
|
+
/** Optional base operation to merge with */
|
|
9
|
+
baseOperation?: OperationObject;
|
|
10
|
+
/** Optional path variables to merge with */
|
|
11
|
+
pathVariables?: Record<string, string>;
|
|
12
|
+
};
|
|
13
|
+
/**
|
|
14
|
+
* Converts a HAR request back to an OpenAPI Operation object with populated examples.
|
|
15
|
+
*
|
|
16
|
+
* This function is the reverse of operationToHar - it takes a HAR request and
|
|
17
|
+
* converts it back into an OpenAPI operation structure, populating the example
|
|
18
|
+
* values based on the HAR request data.
|
|
19
|
+
*
|
|
20
|
+
* The conversion handles:
|
|
21
|
+
* - URL parsing to extract path and query parameters
|
|
22
|
+
* - Header extraction and mapping to operation parameters
|
|
23
|
+
* - Query string parsing and mapping to parameters
|
|
24
|
+
* - Cookie extraction and mapping to cookie parameters
|
|
25
|
+
* - Request body extraction and mapping to requestBody with examples
|
|
26
|
+
* - Content-Type detection and media type assignment
|
|
27
|
+
*
|
|
28
|
+
* Note: This function focuses on populating examples and does not reconstruct
|
|
29
|
+
* schema definitions. If you need full schema generation, consider combining
|
|
30
|
+
* this with a schema inference tool.
|
|
31
|
+
*
|
|
32
|
+
* @see https://w3c.github.io/web-performance/specs/HAR/Overview.html
|
|
33
|
+
* @see https://spec.openapis.org/oas/v3.1.0#operation-object
|
|
34
|
+
*/
|
|
35
|
+
export declare const harToOperation: ({ harRequest, exampleKey, baseOperation, pathVariables, }: HarToOperationProps) => OperationObject;
|
|
36
|
+
export {};
|
|
37
|
+
//# sourceMappingURL=har-to-operation.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"har-to-operation.d.ts","sourceRoot":"","sources":["../../src/mutators/har-to-operation.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAA;AAElD,OAAO,KAAK,EAAE,eAAe,EAAmB,MAAM,8DAA8D,CAAA;AAKpH,KAAK,mBAAmB,GAAG;IACzB,6BAA6B;IAC7B,UAAU,EAAE,UAAU,CAAA;IACtB,oEAAoE;IACpE,UAAU,EAAE,MAAM,CAAA;IAClB,4CAA4C;IAC5C,aAAa,CAAC,EAAE,eAAe,CAAA;IAC/B,4CAA4C;IAC5C,aAAa,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;CACvC,CAAA;AAyBD;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,eAAO,MAAM,cAAc,GAAI,2DAK5B,mBAAmB,KAAG,eAyHxB,CAAA"}
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
import { getResolvedRef } from "@scalar/workspace-store/helpers/get-resolved-ref";
|
|
2
|
+
import { isContentTypeParameterObject } from "../schemas/v3.1/strict/type-guards.js";
|
|
3
|
+
const preprocessParameters = (parameters, pathVariables, exampleKey) => {
|
|
4
|
+
parameters.forEach((param) => {
|
|
5
|
+
const resolvedParam = getResolvedRef(param);
|
|
6
|
+
if (isContentTypeParameterObject(resolvedParam)) {
|
|
7
|
+
return;
|
|
8
|
+
}
|
|
9
|
+
setParameterDisabled(getResolvedRef(param), exampleKey, true);
|
|
10
|
+
if (resolvedParam.in === "path") {
|
|
11
|
+
resolvedParam.examples ||= {};
|
|
12
|
+
resolvedParam.examples[exampleKey] = {
|
|
13
|
+
value: pathVariables[resolvedParam.name] ?? "",
|
|
14
|
+
"x-disabled": false
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
});
|
|
18
|
+
};
|
|
19
|
+
const harToOperation = ({
|
|
20
|
+
harRequest,
|
|
21
|
+
exampleKey,
|
|
22
|
+
baseOperation = {},
|
|
23
|
+
pathVariables = {}
|
|
24
|
+
}) => {
|
|
25
|
+
if (!baseOperation.parameters) {
|
|
26
|
+
baseOperation.parameters = [];
|
|
27
|
+
}
|
|
28
|
+
preprocessParameters(baseOperation.parameters, pathVariables, exampleKey);
|
|
29
|
+
if (harRequest.queryString && harRequest.queryString.length > 0) {
|
|
30
|
+
for (const queryParam of harRequest.queryString) {
|
|
31
|
+
const param = findOrCreateParameter(baseOperation.parameters, queryParam.name, "query");
|
|
32
|
+
if (!param || isContentTypeParameterObject(param)) {
|
|
33
|
+
continue;
|
|
34
|
+
}
|
|
35
|
+
param.examples ||= {};
|
|
36
|
+
param.examples[exampleKey] = {
|
|
37
|
+
value: queryParam.value,
|
|
38
|
+
"x-disabled": false
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
if (harRequest.headers && harRequest.headers.length > 0) {
|
|
43
|
+
for (const header of harRequest.headers) {
|
|
44
|
+
const param = findOrCreateParameter(baseOperation.parameters, header.name, "header");
|
|
45
|
+
if (!param || isContentTypeParameterObject(param)) {
|
|
46
|
+
continue;
|
|
47
|
+
}
|
|
48
|
+
param.examples ||= {};
|
|
49
|
+
param.examples[exampleKey] = {
|
|
50
|
+
value: header.value,
|
|
51
|
+
"x-disabled": false
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
if (harRequest.cookies && harRequest.cookies.length > 0) {
|
|
56
|
+
for (const cookie of harRequest.cookies) {
|
|
57
|
+
const param = findOrCreateParameter(baseOperation.parameters, cookie.name, "cookie");
|
|
58
|
+
if (!param || isContentTypeParameterObject(param)) {
|
|
59
|
+
continue;
|
|
60
|
+
}
|
|
61
|
+
param.examples ||= {};
|
|
62
|
+
param.examples[exampleKey] = {
|
|
63
|
+
value: cookie.value,
|
|
64
|
+
"x-disabled": false
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
if (harRequest.postData) {
|
|
69
|
+
const { mimeType, text, params } = harRequest.postData;
|
|
70
|
+
if (!baseOperation.requestBody) {
|
|
71
|
+
baseOperation.requestBody = {
|
|
72
|
+
content: {}
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
const requestBody = getResolvedRef(baseOperation.requestBody);
|
|
76
|
+
if (!requestBody.content[mimeType]) {
|
|
77
|
+
requestBody.content[mimeType] = {
|
|
78
|
+
schema: {
|
|
79
|
+
type: "object"
|
|
80
|
+
}
|
|
81
|
+
};
|
|
82
|
+
}
|
|
83
|
+
const mediaType = requestBody.content[mimeType];
|
|
84
|
+
if (!mediaType) {
|
|
85
|
+
return baseOperation;
|
|
86
|
+
}
|
|
87
|
+
mediaType.examples ||= {};
|
|
88
|
+
let exampleValue;
|
|
89
|
+
if (params && params.length > 0) {
|
|
90
|
+
exampleValue = [];
|
|
91
|
+
for (const param of params) {
|
|
92
|
+
exampleValue.push({
|
|
93
|
+
name: param.name,
|
|
94
|
+
value: param.value,
|
|
95
|
+
"x-disabled": false
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
} else {
|
|
99
|
+
exampleValue = text;
|
|
100
|
+
}
|
|
101
|
+
mediaType.examples[exampleKey] = {
|
|
102
|
+
value: exampleValue,
|
|
103
|
+
"x-disabled": false
|
|
104
|
+
};
|
|
105
|
+
requestBody["x-scalar-selected-content-type"] ||= {};
|
|
106
|
+
requestBody["x-scalar-selected-content-type"][exampleKey] = mimeType;
|
|
107
|
+
}
|
|
108
|
+
return baseOperation;
|
|
109
|
+
};
|
|
110
|
+
const setParameterDisabled = (param, exampleKey, disabled) => {
|
|
111
|
+
if (isContentTypeParameterObject(param)) {
|
|
112
|
+
return;
|
|
113
|
+
}
|
|
114
|
+
if (!param.examples?.[exampleKey]) {
|
|
115
|
+
return;
|
|
116
|
+
}
|
|
117
|
+
getResolvedRef(param.examples[exampleKey])["x-disabled"] = disabled;
|
|
118
|
+
};
|
|
119
|
+
const findOrCreateParameter = (parameters, name, inValue) => {
|
|
120
|
+
for (const param of parameters) {
|
|
121
|
+
const resolved = getResolvedRef(param);
|
|
122
|
+
if (isContentTypeParameterObject(resolved)) {
|
|
123
|
+
continue;
|
|
124
|
+
}
|
|
125
|
+
if (resolved.in !== inValue) {
|
|
126
|
+
continue;
|
|
127
|
+
}
|
|
128
|
+
const namesMatch = inValue === "header" ? resolved.name.toLowerCase() === name.toLowerCase() : resolved.name === name;
|
|
129
|
+
if (namesMatch) {
|
|
130
|
+
return resolved;
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
const newParam = {
|
|
134
|
+
name,
|
|
135
|
+
in: inValue,
|
|
136
|
+
schema: {
|
|
137
|
+
type: "string"
|
|
138
|
+
}
|
|
139
|
+
};
|
|
140
|
+
parameters.push(newParam);
|
|
141
|
+
return newParam;
|
|
142
|
+
};
|
|
143
|
+
export {
|
|
144
|
+
harToOperation
|
|
145
|
+
};
|
|
146
|
+
//# sourceMappingURL=har-to-operation.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../src/mutators/har-to-operation.ts"],
|
|
4
|
+
"sourcesContent": ["import type { HarRequest } from '@scalar/snippetz'\nimport { getResolvedRef } from '@scalar/workspace-store/helpers/get-resolved-ref'\nimport type { OperationObject, ParameterObject } from '@scalar/workspace-store/schemas/v3.1/strict/openapi-document'\nimport type { ReferenceType } from '@scalar/workspace-store/schemas/v3.1/strict/reference'\n\nimport { isContentTypeParameterObject } from '@/schemas/v3.1/strict/type-guards'\n\ntype HarToOperationProps = {\n /** HAR request to convert */\n harRequest: HarRequest\n /** Name of the example to populate (e.g., 'default', 'example1') */\n exampleKey: string\n /** Optional base operation to merge with */\n baseOperation?: OperationObject\n /** Optional path variables to merge with */\n pathVariables?: Record<string, string>\n}\n\nconst preprocessParameters = (\n parameters: ReferenceType<ParameterObject>[],\n pathVariables: Record<string, string>,\n exampleKey: string,\n) => {\n parameters.forEach((param) => {\n const resolvedParam = getResolvedRef(param)\n if (isContentTypeParameterObject(resolvedParam)) {\n return\n }\n\n setParameterDisabled(getResolvedRef(param), exampleKey, true)\n\n if (resolvedParam.in === 'path') {\n resolvedParam.examples ||= {}\n resolvedParam.examples[exampleKey] = {\n value: pathVariables[resolvedParam.name] ?? '',\n 'x-disabled': false,\n }\n }\n })\n}\n\n/**\n * Converts a HAR request back to an OpenAPI Operation object with populated examples.\n *\n * This function is the reverse of operationToHar - it takes a HAR request and\n * converts it back into an OpenAPI operation structure, populating the example\n * values based on the HAR request data.\n *\n * The conversion handles:\n * - URL parsing to extract path and query parameters\n * - Header extraction and mapping to operation parameters\n * - Query string parsing and mapping to parameters\n * - Cookie extraction and mapping to cookie parameters\n * - Request body extraction and mapping to requestBody with examples\n * - Content-Type detection and media type assignment\n *\n * Note: This function focuses on populating examples and does not reconstruct\n * schema definitions. If you need full schema generation, consider combining\n * this with a schema inference tool.\n *\n * @see https://w3c.github.io/web-performance/specs/HAR/Overview.html\n * @see https://spec.openapis.org/oas/v3.1.0#operation-object\n */\nexport const harToOperation = ({\n harRequest,\n exampleKey,\n baseOperation = {},\n pathVariables = {},\n}: HarToOperationProps): OperationObject => {\n // Ensure parameters array exists on the base operation\n if (!baseOperation.parameters) {\n baseOperation.parameters = []\n }\n\n // Set any other parameters as disabled and set the path variables\n preprocessParameters(baseOperation.parameters, pathVariables, exampleKey)\n\n // Process query string parameters from the HAR request\n if (harRequest.queryString && harRequest.queryString.length > 0) {\n for (const queryParam of harRequest.queryString) {\n const param = findOrCreateParameter(baseOperation.parameters, queryParam.name, 'query')\n\n if (!param || isContentTypeParameterObject(param)) {\n continue\n }\n\n param.examples ||= {}\n param.examples[exampleKey] = {\n value: queryParam.value,\n 'x-disabled': false,\n }\n }\n }\n\n // Process headers from the HAR request\n if (harRequest.headers && harRequest.headers.length > 0) {\n for (const header of harRequest.headers) {\n const param = findOrCreateParameter(baseOperation.parameters, header.name, 'header')\n\n if (!param || isContentTypeParameterObject(param)) {\n continue\n }\n\n param.examples ||= {}\n param.examples[exampleKey] = {\n value: header.value,\n 'x-disabled': false,\n }\n }\n }\n\n // Process cookies from the HAR request\n if (harRequest.cookies && harRequest.cookies.length > 0) {\n for (const cookie of harRequest.cookies) {\n const param = findOrCreateParameter(baseOperation.parameters, cookie.name, 'cookie')\n\n if (!param || isContentTypeParameterObject(param)) {\n continue\n }\n\n param.examples ||= {}\n param.examples[exampleKey] = {\n value: cookie.value,\n 'x-disabled': false,\n }\n }\n }\n\n // Process request body from the HAR request\n if (harRequest.postData) {\n const { mimeType, text, params } = harRequest.postData\n\n // Ensure requestBody exists on the base operation\n if (!baseOperation.requestBody) {\n baseOperation.requestBody = {\n content: {},\n }\n }\n\n // Resolve the request body in case it is a reference\n const requestBody = getResolvedRef(baseOperation.requestBody)\n\n // Ensure the content type exists in the requestBody\n if (!requestBody.content[mimeType]) {\n requestBody.content[mimeType] = {\n schema: {\n type: 'object',\n },\n }\n }\n\n // Get the media type object\n const mediaType = requestBody.content[mimeType]\n if (!mediaType) {\n return baseOperation\n }\n\n // Ensure examples object exists\n mediaType.examples ||= {}\n\n // Convert the HAR postData to an example value\n let exampleValue: any\n\n // If params exist (form data), convert to array\n if (params && params.length > 0) {\n exampleValue = []\n for (const param of params) {\n exampleValue.push({\n name: param.name,\n value: param.value,\n 'x-disabled': false,\n })\n }\n } else {\n exampleValue = text\n }\n\n // Add the example to the media type\n mediaType.examples[exampleKey] = {\n value: exampleValue,\n 'x-disabled': false,\n }\n\n // Update the selected media type\n requestBody['x-scalar-selected-content-type'] ||= {}\n requestBody['x-scalar-selected-content-type'][exampleKey] = mimeType\n }\n\n return baseOperation\n}\n\nconst setParameterDisabled = (param: ParameterObject, exampleKey: string, disabled: boolean): void => {\n if (isContentTypeParameterObject(param)) {\n return\n }\n\n if (!param.examples?.[exampleKey]) {\n return\n }\n\n getResolvedRef(param.examples[exampleKey])['x-disabled'] = disabled\n}\n\n/**\n * Finds an existing parameter in the parameters array or creates a new one.\n * This ensures we do not create duplicate parameters.\n */\nconst findOrCreateParameter = (\n parameters: ReferenceType<ParameterObject>[],\n name: string,\n inValue: 'query' | 'header' | 'path' | 'cookie',\n): ParameterObject => {\n // Try to find existing parameter using getResolvedRef to handle references\n for (const param of parameters) {\n const resolved = getResolvedRef(param)\n if (isContentTypeParameterObject(resolved)) {\n continue\n }\n\n // Check if parameter location matches\n if (resolved.in !== inValue) {\n continue\n }\n\n // For headers, use case-insensitive comparison; otherwise use exact match\n const namesMatch =\n inValue === 'header' ? resolved.name.toLowerCase() === name.toLowerCase() : resolved.name === name\n\n if (namesMatch) {\n return resolved\n }\n }\n\n // Create new parameter with schema\n const newParam: ParameterObject = {\n name,\n in: inValue,\n schema: {\n type: 'string',\n },\n }\n\n parameters.push(newParam)\n return newParam\n}\n"],
|
|
5
|
+
"mappings": "AACA,SAAS,sBAAsB;AAI/B,SAAS,oCAAoC;AAa7C,MAAM,uBAAuB,CAC3B,YACA,eACA,eACG;AACH,aAAW,QAAQ,CAAC,UAAU;AAC5B,UAAM,gBAAgB,eAAe,KAAK;AAC1C,QAAI,6BAA6B,aAAa,GAAG;AAC/C;AAAA,IACF;AAEA,yBAAqB,eAAe,KAAK,GAAG,YAAY,IAAI;AAE5D,QAAI,cAAc,OAAO,QAAQ;AAC/B,oBAAc,aAAa,CAAC;AAC5B,oBAAc,SAAS,UAAU,IAAI;AAAA,QACnC,OAAO,cAAc,cAAc,IAAI,KAAK;AAAA,QAC5C,cAAc;AAAA,MAChB;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAwBO,MAAM,iBAAiB,CAAC;AAAA,EAC7B;AAAA,EACA;AAAA,EACA,gBAAgB,CAAC;AAAA,EACjB,gBAAgB,CAAC;AACnB,MAA4C;AAE1C,MAAI,CAAC,cAAc,YAAY;AAC7B,kBAAc,aAAa,CAAC;AAAA,EAC9B;AAGA,uBAAqB,cAAc,YAAY,eAAe,UAAU;AAGxE,MAAI,WAAW,eAAe,WAAW,YAAY,SAAS,GAAG;AAC/D,eAAW,cAAc,WAAW,aAAa;AAC/C,YAAM,QAAQ,sBAAsB,cAAc,YAAY,WAAW,MAAM,OAAO;AAEtF,UAAI,CAAC,SAAS,6BAA6B,KAAK,GAAG;AACjD;AAAA,MACF;AAEA,YAAM,aAAa,CAAC;AACpB,YAAM,SAAS,UAAU,IAAI;AAAA,QAC3B,OAAO,WAAW;AAAA,QAClB,cAAc;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AAGA,MAAI,WAAW,WAAW,WAAW,QAAQ,SAAS,GAAG;AACvD,eAAW,UAAU,WAAW,SAAS;AACvC,YAAM,QAAQ,sBAAsB,cAAc,YAAY,OAAO,MAAM,QAAQ;AAEnF,UAAI,CAAC,SAAS,6BAA6B,KAAK,GAAG;AACjD;AAAA,MACF;AAEA,YAAM,aAAa,CAAC;AACpB,YAAM,SAAS,UAAU,IAAI;AAAA,QAC3B,OAAO,OAAO;AAAA,QACd,cAAc;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AAGA,MAAI,WAAW,WAAW,WAAW,QAAQ,SAAS,GAAG;AACvD,eAAW,UAAU,WAAW,SAAS;AACvC,YAAM,QAAQ,sBAAsB,cAAc,YAAY,OAAO,MAAM,QAAQ;AAEnF,UAAI,CAAC,SAAS,6BAA6B,KAAK,GAAG;AACjD;AAAA,MACF;AAEA,YAAM,aAAa,CAAC;AACpB,YAAM,SAAS,UAAU,IAAI;AAAA,QAC3B,OAAO,OAAO;AAAA,QACd,cAAc;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AAGA,MAAI,WAAW,UAAU;AACvB,UAAM,EAAE,UAAU,MAAM,OAAO,IAAI,WAAW;AAG9C,QAAI,CAAC,cAAc,aAAa;AAC9B,oBAAc,cAAc;AAAA,QAC1B,SAAS,CAAC;AAAA,MACZ;AAAA,IACF;AAGA,UAAM,cAAc,eAAe,cAAc,WAAW;AAG5D,QAAI,CAAC,YAAY,QAAQ,QAAQ,GAAG;AAClC,kBAAY,QAAQ,QAAQ,IAAI;AAAA,QAC9B,QAAQ;AAAA,UACN,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAGA,UAAM,YAAY,YAAY,QAAQ,QAAQ;AAC9C,QAAI,CAAC,WAAW;AACd,aAAO;AAAA,IACT;AAGA,cAAU,aAAa,CAAC;AAGxB,QAAI;AAGJ,QAAI,UAAU,OAAO,SAAS,GAAG;AAC/B,qBAAe,CAAC;AAChB,iBAAW,SAAS,QAAQ;AAC1B,qBAAa,KAAK;AAAA,UAChB,MAAM,MAAM;AAAA,UACZ,OAAO,MAAM;AAAA,UACb,cAAc;AAAA,QAChB,CAAC;AAAA,MACH;AAAA,IACF,OAAO;AACL,qBAAe;AAAA,IACjB;AAGA,cAAU,SAAS,UAAU,IAAI;AAAA,MAC/B,OAAO;AAAA,MACP,cAAc;AAAA,IAChB;AAGA,gBAAY,gCAAgC,MAAM,CAAC;AACnD,gBAAY,gCAAgC,EAAE,UAAU,IAAI;AAAA,EAC9D;AAEA,SAAO;AACT;AAEA,MAAM,uBAAuB,CAAC,OAAwB,YAAoB,aAA4B;AACpG,MAAI,6BAA6B,KAAK,GAAG;AACvC;AAAA,EACF;AAEA,MAAI,CAAC,MAAM,WAAW,UAAU,GAAG;AACjC;AAAA,EACF;AAEA,iBAAe,MAAM,SAAS,UAAU,CAAC,EAAE,YAAY,IAAI;AAC7D;AAMA,MAAM,wBAAwB,CAC5B,YACA,MACA,YACoB;AAEpB,aAAW,SAAS,YAAY;AAC9B,UAAM,WAAW,eAAe,KAAK;AACrC,QAAI,6BAA6B,QAAQ,GAAG;AAC1C;AAAA,IACF;AAGA,QAAI,SAAS,OAAO,SAAS;AAC3B;AAAA,IACF;AAGA,UAAM,aACJ,YAAY,WAAW,SAAS,KAAK,YAAY,MAAM,KAAK,YAAY,IAAI,SAAS,SAAS;AAEhG,QAAI,YAAY;AACd,aAAO;AAAA,IACT;AAAA,EACF;AAGA,QAAM,WAA4B;AAAA,IAChC;AAAA,IACA,IAAI;AAAA,IACJ,QAAQ;AAAA,MACN,MAAM;AAAA,IACR;AAAA,EACF;AAEA,aAAW,KAAK,QAAQ;AACxB,SAAO;AACT;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
package/dist/mutators/index.d.ts
CHANGED
|
@@ -104,6 +104,8 @@ export declare function generateClientMutators(store: WorkspaceStore | null): {
|
|
|
104
104
|
updateOperationRequestBodyContentType: (payload: import("../events/definitions/operation.js").OperationEvents["operation:update:requestBody:contentType"]) => void;
|
|
105
105
|
updateOperationRequestBodyExample: (payload: import("../events/definitions/operation.js").OperationEvents["operation:update:requestBody:value"]) => void;
|
|
106
106
|
updateOperationRequestBodyFormValue: (payload: import("../events/definitions/operation.js").OperationEvents["operation:update:requestBody:formValue"]) => void;
|
|
107
|
+
addResponseToHistory: (payload: import("../events/definitions/hooks.js").HooksEvents["hooks:on:request:complete"]) => Promise<void>;
|
|
108
|
+
reloadOperationHistory: (payload: import("../events/definitions/operation.js").OperationEvents["operation:reload:history"]) => void;
|
|
107
109
|
};
|
|
108
110
|
server: {
|
|
109
111
|
addServer: () => import("../schemas/v3.1/strict/server.js").ServerObject | undefined;
|
|
@@ -185,6 +187,8 @@ export declare function generateClientMutators(store: WorkspaceStore | null): {
|
|
|
185
187
|
updateOperationRequestBodyContentType: (payload: import("../events/definitions/operation.js").OperationEvents["operation:update:requestBody:contentType"]) => void;
|
|
186
188
|
updateOperationRequestBodyExample: (payload: import("../events/definitions/operation.js").OperationEvents["operation:update:requestBody:value"]) => void;
|
|
187
189
|
updateOperationRequestBodyFormValue: (payload: import("../events/definitions/operation.js").OperationEvents["operation:update:requestBody:formValue"]) => void;
|
|
190
|
+
addResponseToHistory: (payload: import("../events/definitions/hooks.js").HooksEvents["hooks:on:request:complete"]) => Promise<void>;
|
|
191
|
+
reloadOperationHistory: (payload: import("../events/definitions/operation.js").OperationEvents["operation:reload:history"]) => void;
|
|
188
192
|
};
|
|
189
193
|
server: {
|
|
190
194
|
addServer: () => import("../schemas/v3.1/strict/server.js").ServerObject | undefined;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/mutators/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,UAAU,CAAA;AAa9C;;;;;GAKG;AACH,wBAAgB,sBAAsB,CAAC,KAAK,EAAE,cAAc,GAAG,IAAI;IAqC/D;;OAEG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IAEH;;;OAGG
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/mutators/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,UAAU,CAAA;AAa9C;;;;;GAKG;AACH,wBAAgB,sBAAsB,CAAC,KAAK,EAAE,cAAc,GAAG,IAAI;IAqC/D;;OAEG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IAEH;;;OAGG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IAEH;;;;OAIG;gBACS,MAAM;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAErB"}
|