@proofkit/fmodata 0.1.0-alpha.6 → 0.1.0-alpha.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +376 -34
- package/dist/esm/client/base-table.d.ts +24 -29
- package/dist/esm/client/base-table.js +4 -7
- package/dist/esm/client/base-table.js.map +1 -1
- package/dist/esm/client/batch-builder.d.ts +54 -0
- package/dist/esm/client/batch-builder.js +179 -0
- package/dist/esm/client/batch-builder.js.map +1 -0
- package/dist/esm/client/batch-request.d.ts +61 -0
- package/dist/esm/client/batch-request.js +252 -0
- package/dist/esm/client/batch-request.js.map +1 -0
- package/dist/esm/client/database.d.ts +44 -12
- package/dist/esm/client/database.js +64 -10
- package/dist/esm/client/database.js.map +1 -1
- package/dist/esm/client/delete-builder.d.ts +21 -2
- package/dist/esm/client/delete-builder.js +76 -9
- package/dist/esm/client/delete-builder.js.map +1 -1
- package/dist/esm/client/entity-set.d.ts +17 -6
- package/dist/esm/client/entity-set.js +26 -10
- package/dist/esm/client/entity-set.js.map +1 -1
- package/dist/esm/client/filemaker-odata.d.ts +11 -5
- package/dist/esm/client/filemaker-odata.js +46 -14
- package/dist/esm/client/filemaker-odata.js.map +1 -1
- package/dist/esm/client/insert-builder.d.ts +38 -3
- package/dist/esm/client/insert-builder.js +195 -9
- package/dist/esm/client/insert-builder.js.map +1 -1
- package/dist/esm/client/query-builder.d.ts +20 -4
- package/dist/esm/client/query-builder.js +195 -19
- package/dist/esm/client/query-builder.js.map +1 -1
- package/dist/esm/client/record-builder.d.ts +18 -3
- package/dist/esm/client/record-builder.js +87 -5
- package/dist/esm/client/record-builder.js.map +1 -1
- package/dist/esm/client/response-processor.d.ts +38 -0
- package/dist/esm/client/schema-manager.d.ts +57 -0
- package/dist/esm/client/schema-manager.js +132 -0
- package/dist/esm/client/schema-manager.js.map +1 -0
- package/dist/esm/client/table-occurrence.d.ts +25 -42
- package/dist/esm/client/table-occurrence.js +9 -17
- package/dist/esm/client/table-occurrence.js.map +1 -1
- package/dist/esm/client/update-builder.d.ts +34 -11
- package/dist/esm/client/update-builder.js +119 -19
- package/dist/esm/client/update-builder.js.map +1 -1
- package/dist/esm/errors.d.ts +14 -1
- package/dist/esm/errors.js +26 -0
- package/dist/esm/errors.js.map +1 -1
- package/dist/esm/index.d.ts +5 -4
- package/dist/esm/index.js +7 -6
- package/dist/esm/transform.d.ts +9 -0
- package/dist/esm/transform.js +7 -0
- package/dist/esm/transform.js.map +1 -1
- package/dist/esm/types.d.ts +69 -1
- package/package.json +1 -1
- package/src/client/base-table.ts +30 -36
- package/src/client/batch-builder.ts +265 -0
- package/src/client/batch-request.ts +485 -0
- package/src/client/database.ts +110 -56
- package/src/client/delete-builder.ts +116 -14
- package/src/client/entity-set.ts +89 -12
- package/src/client/filemaker-odata.ts +65 -19
- package/src/client/insert-builder.ts +296 -18
- package/src/client/query-builder.ts +285 -18
- package/src/client/query-builder.ts.bak +1457 -0
- package/src/client/record-builder.ts +120 -12
- package/src/client/response-processor.ts +103 -0
- package/src/client/schema-manager.ts +246 -0
- package/src/client/table-occurrence.ts +41 -80
- package/src/client/update-builder.ts +195 -37
- package/src/errors.ts +33 -1
- package/src/index.ts +15 -3
- package/src/transform.ts +19 -6
- package/src/types.ts +89 -1
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"base-table.js","sources":["../../../src/client/base-table.ts"],"sourcesContent":["import { StandardSchemaV1 } from \"@standard-schema/spec\";\n\n/**\n * BaseTable defines the schema and configuration for a table.\n *\n * @template Schema - Record of field names to StandardSchemaV1 validators\n * @template IdField - The name of the primary key field (optional, automatically read-only)\n * @template Required - Additional field names to require on insert (beyond auto-inferred required fields)\n * @template ReadOnly - Field names that cannot be modified via insert/update (idField is automatically read-only)\n *\n * @example Basic table with auto-inferred required fields\n * ```ts\n * import { z } from \"zod\";\n *\n * const usersTable = new BaseTable({\n * schema: {\n * id: z.string(), // Auto-required (not nullable), auto-readOnly (idField)\n * name: z.string(), // Auto-required (not nullable)\n * email: z.string().nullable(), // Optional (nullable)\n * },\n * idField: \"id\",\n * });\n * // On insert: name is required, email is optional (id is excluded - readOnly)\n * // On update: name and email available (id is excluded - readOnly)\n * ```\n *\n * @example Table with additional required and readOnly fields\n * ```ts\n * import { z } from \"zod\";\n *\n * const usersTable = new BaseTable({\n * schema: {\n * id: z.string(), // Auto-required, auto-readOnly (idField)\n * createdAt: z.string(), // Read-only system field\n * name: z.string(), // Auto-required\n * email: z.string().nullable(), // Optional by default...\n * legacyField: z.string().nullable(), // Optional by default...\n * },\n * idField: \"id\",\n * required: [\"legacyField\"], // Make legacyField required for new inserts\n * readOnly: [\"createdAt\"], // Exclude from insert/update\n * });\n * // On insert: name and legacyField required; email optional (id and createdAt excluded)\n * // On update: all fields optional (id and createdAt excluded)\n * ```\n *\n * @example Table with multiple read-only fields\n * ```ts\n * import { z } from \"zod\";\n *\n * const usersTable = new BaseTable({\n * schema: {\n * id: z.string(),\n * createdAt: z.string(),\n * modifiedAt: z.string(),\n * createdBy: z.string(),\n * notes: z.string().nullable(),\n * },\n * idField: \"id\",\n * readOnly: [\"createdAt\", \"modifiedAt\", \"createdBy\"],\n * });\n * // On insert/update: only notes is available (id and system fields excluded)\n * ```\n */\nexport class BaseTable<\n Schema extends Record<string, StandardSchemaV1> = any,\n IdField extends keyof Schema | undefined = undefined,\n Required extends readonly (keyof Schema)[] = readonly [],\n ReadOnly extends readonly (keyof Schema)[] = readonly [],\n> {\n public readonly schema: Schema;\n public readonly idField?: IdField;\n public readonly required?: Required;\n public readonly readOnly?: ReadOnly;\n public readonly fmfIds?: Record<keyof Schema, `FMFID:${string}`>;\n\n constructor(config: {\n schema: Schema;\n idField?: IdField;\n required?: Required;\n readOnly?: ReadOnly;\n }) {\n this.schema = config.schema;\n this.idField = config.idField;\n this.required = config.required;\n this.readOnly = config.readOnly;\n }\n\n /**\n * Returns the FileMaker field ID (FMFID) for a given field name, or the field name itself if not using IDs.\n * @param fieldName - The field name to get the ID for\n * @returns The FMFID string or the original field name\n */\n getFieldId(fieldName: keyof Schema): string {\n if (this.fmfIds && fieldName in this.fmfIds) {\n return this.fmfIds[fieldName];\n }\n return String(fieldName);\n }\n\n /**\n * Returns the field name for a given FileMaker field ID (FMFID), or the ID itself if not found.\n * @param fieldId - The FMFID to get the field name for\n * @returns The field name or the original ID\n */\n getFieldName(fieldId: string): string {\n if (this.fmfIds) {\n // Search for the field name that corresponds to this FMFID\n for (const [fieldName, fmfId] of Object.entries(this.fmfIds)) {\n if (fmfId === fieldId) {\n return fieldName;\n }\n }\n }\n return fieldId;\n }\n\n /**\n * Returns true if this BaseTable is using FileMaker field IDs.\n */\n isUsingFieldIds(): boolean {\n return this.fmfIds !== undefined;\n }\n}\n\n/**\n *
|
|
1
|
+
{"version":3,"file":"base-table.js","sources":["../../../src/client/base-table.ts"],"sourcesContent":["import { StandardSchemaV1 } from \"@standard-schema/spec\";\n\n/**\n * BaseTable defines the schema and configuration for a table.\n *\n * @template Schema - Record of field names to StandardSchemaV1 validators\n * @template IdField - The name of the primary key field (optional, automatically read-only)\n * @template Required - Additional field names to require on insert (beyond auto-inferred required fields)\n * @template ReadOnly - Field names that cannot be modified via insert/update (idField is automatically read-only)\n *\n * @example Basic table with auto-inferred required fields\n * ```ts\n * import { z } from \"zod\";\n *\n * const usersTable = new BaseTable({\n * schema: {\n * id: z.string(), // Auto-required (not nullable), auto-readOnly (idField)\n * name: z.string(), // Auto-required (not nullable)\n * email: z.string().nullable(), // Optional (nullable)\n * },\n * idField: \"id\",\n * });\n * // On insert: name is required, email is optional (id is excluded - readOnly)\n * // On update: name and email available (id is excluded - readOnly)\n * ```\n *\n * @example Table with additional required and readOnly fields\n * ```ts\n * import { z } from \"zod\";\n *\n * const usersTable = new BaseTable({\n * schema: {\n * id: z.string(), // Auto-required, auto-readOnly (idField)\n * createdAt: z.string(), // Read-only system field\n * name: z.string(), // Auto-required\n * email: z.string().nullable(), // Optional by default...\n * legacyField: z.string().nullable(), // Optional by default...\n * },\n * idField: \"id\",\n * required: [\"legacyField\"], // Make legacyField required for new inserts\n * readOnly: [\"createdAt\"], // Exclude from insert/update\n * });\n * // On insert: name and legacyField required; email optional (id and createdAt excluded)\n * // On update: all fields optional (id and createdAt excluded)\n * ```\n *\n * @example Table with multiple read-only fields\n * ```ts\n * import { z } from \"zod\";\n *\n * const usersTable = new BaseTable({\n * schema: {\n * id: z.string(),\n * createdAt: z.string(),\n * modifiedAt: z.string(),\n * createdBy: z.string(),\n * notes: z.string().nullable(),\n * },\n * idField: \"id\",\n * readOnly: [\"createdAt\", \"modifiedAt\", \"createdBy\"],\n * });\n * // On insert/update: only notes is available (id and system fields excluded)\n * ```\n */\nexport class BaseTable<\n Schema extends Record<string, StandardSchemaV1> = any,\n IdField extends keyof Schema | undefined = undefined,\n Required extends readonly (keyof Schema)[] = readonly [],\n ReadOnly extends readonly (keyof Schema)[] = readonly [],\n> {\n public readonly schema: Schema;\n public readonly idField?: IdField;\n public readonly required?: Required;\n public readonly readOnly?: ReadOnly;\n public readonly fmfIds?: Record<keyof Schema, `FMFID:${string}`>;\n\n constructor(config: {\n schema: Schema;\n idField?: IdField;\n required?: Required;\n readOnly?: ReadOnly;\n fmfIds?: Record<string, `FMFID:${string}`>;\n }) {\n this.schema = config.schema;\n this.idField = config.idField;\n this.required = config.required;\n this.readOnly = config.readOnly;\n this.fmfIds = config.fmfIds as\n | Record<keyof Schema, `FMFID:${string}`>\n | undefined;\n }\n\n /**\n * Returns the FileMaker field ID (FMFID) for a given field name, or the field name itself if not using IDs.\n * @param fieldName - The field name to get the ID for\n * @returns The FMFID string or the original field name\n */\n getFieldId(fieldName: keyof Schema): string {\n if (this.fmfIds && fieldName in this.fmfIds) {\n return this.fmfIds[fieldName];\n }\n return String(fieldName);\n }\n\n /**\n * Returns the field name for a given FileMaker field ID (FMFID), or the ID itself if not found.\n * @param fieldId - The FMFID to get the field name for\n * @returns The field name or the original ID\n */\n getFieldName(fieldId: string): string {\n if (this.fmfIds) {\n // Search for the field name that corresponds to this FMFID\n for (const [fieldName, fmfId] of Object.entries(this.fmfIds)) {\n if (fmfId === fieldId) {\n return fieldName;\n }\n }\n }\n return fieldId;\n }\n\n /**\n * Returns true if this BaseTable is using FileMaker field IDs.\n */\n isUsingFieldIds(): boolean {\n return this.fmfIds !== undefined;\n }\n}\n\n/**\n * Creates a BaseTable with proper TypeScript type inference.\n *\n * This function should be used instead of `new BaseTable()` to ensure\n * field names are properly typed throughout the library.\n *\n * @example Without entity IDs\n * ```ts\n * const users = defineBaseTable({\n * schema: { id: z.string(), name: z.string() },\n * idField: \"id\",\n * });\n * ```\n *\n * @example With entity IDs (FileMaker field IDs)\n * ```ts\n * const products = defineBaseTable({\n * schema: { id: z.string(), name: z.string() },\n * idField: \"id\",\n * fmfIds: { id: \"FMFID:1\", name: \"FMFID:2\" },\n * });\n * ```\n */\nexport function defineBaseTable<\n const Schema extends Record<string, StandardSchemaV1>,\n IdField extends keyof Schema | undefined = undefined,\n const Required extends readonly (keyof Schema)[] = readonly [],\n const ReadOnly extends readonly (keyof Schema)[] = readonly [],\n>(config: {\n schema: Schema;\n idField?: IdField;\n required?: Required;\n readOnly?: ReadOnly;\n fmfIds?: { [K in keyof Schema]: `FMFID:${string}` };\n}): BaseTable<Schema, IdField, Required, ReadOnly> {\n return new BaseTable(config);\n}\n"],"names":[],"mappings":";;;AAgEO,MAAM,UAKX;AAAA,EAOA,YAAY,QAMT;AAZa;AACA;AACA;AACA;AACA;AASd,SAAK,SAAS,OAAO;AACrB,SAAK,UAAU,OAAO;AACtB,SAAK,WAAW,OAAO;AACvB,SAAK,WAAW,OAAO;AACvB,SAAK,SAAS,OAAO;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUvB,WAAW,WAAiC;AAC1C,QAAI,KAAK,UAAU,aAAa,KAAK,QAAQ;AACpC,aAAA,KAAK,OAAO,SAAS;AAAA,IAAA;AAE9B,WAAO,OAAO,SAAS;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQzB,aAAa,SAAyB;AACpC,QAAI,KAAK,QAAQ;AAEJ,iBAAA,CAAC,WAAW,KAAK,KAAK,OAAO,QAAQ,KAAK,MAAM,GAAG;AAC5D,YAAI,UAAU,SAAS;AACd,iBAAA;AAAA,QAAA;AAAA,MACT;AAAA,IACF;AAEK,WAAA;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA,EAMT,kBAA2B;AACzB,WAAO,KAAK,WAAW;AAAA,EAAA;AAE3B;AAyBO,SAAS,gBAKd,QAMiD;AAC1C,SAAA,IAAI,UAAU,MAAM;AAC7B;"}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { ExecutableBuilder, ExecutionContext, Result, ExecuteOptions } from '../types.js';
|
|
2
|
+
import { FFetchOptions } from '@fetchkit/ffetch';
|
|
3
|
+
/**
|
|
4
|
+
* Helper type to extract result types from a tuple of ExecutableBuilders.
|
|
5
|
+
* Uses a mapped type which TypeScript 4.1+ can handle for tuples.
|
|
6
|
+
*/
|
|
7
|
+
type ExtractTupleTypes<T extends readonly ExecutableBuilder<any>[]> = {
|
|
8
|
+
[K in keyof T]: T[K] extends ExecutableBuilder<infer U> ? U : never;
|
|
9
|
+
};
|
|
10
|
+
/**
|
|
11
|
+
* Builder for batch operations that allows multiple queries to be executed together
|
|
12
|
+
* in a single transactional request.
|
|
13
|
+
*/
|
|
14
|
+
export declare class BatchBuilder<Builders extends readonly ExecutableBuilder<any>[]> implements ExecutableBuilder<ExtractTupleTypes<Builders>> {
|
|
15
|
+
private readonly databaseName;
|
|
16
|
+
private readonly context;
|
|
17
|
+
private builders;
|
|
18
|
+
private readonly originalBuilders;
|
|
19
|
+
constructor(builders: Builders, databaseName: string, context: ExecutionContext);
|
|
20
|
+
/**
|
|
21
|
+
* Add a request to the batch dynamically.
|
|
22
|
+
* This allows building up batch operations programmatically.
|
|
23
|
+
*
|
|
24
|
+
* @param builder - An executable builder to add to the batch
|
|
25
|
+
* @returns This BatchBuilder for method chaining
|
|
26
|
+
* @example
|
|
27
|
+
* ```ts
|
|
28
|
+
* const batch = db.batch([]);
|
|
29
|
+
* batch.addRequest(db.from('contacts').list());
|
|
30
|
+
* batch.addRequest(db.from('users').list());
|
|
31
|
+
* const result = await batch.execute();
|
|
32
|
+
* ```
|
|
33
|
+
*/
|
|
34
|
+
addRequest<T>(builder: ExecutableBuilder<T>): this;
|
|
35
|
+
/**
|
|
36
|
+
* Get the request configuration for this batch operation.
|
|
37
|
+
* This is used internally by the execution system.
|
|
38
|
+
*/
|
|
39
|
+
getRequestConfig(): {
|
|
40
|
+
method: string;
|
|
41
|
+
url: string;
|
|
42
|
+
body?: any;
|
|
43
|
+
};
|
|
44
|
+
toRequest(baseUrl: string): Request;
|
|
45
|
+
processResponse(response: Response, options?: ExecuteOptions): Promise<Result<any>>;
|
|
46
|
+
/**
|
|
47
|
+
* Execute the batch operation.
|
|
48
|
+
*
|
|
49
|
+
* @param options - Optional fetch options and batch-specific options (includes beforeRequest hook)
|
|
50
|
+
* @returns A tuple of results matching the input builders
|
|
51
|
+
*/
|
|
52
|
+
execute<EO extends ExecuteOptions>(options?: RequestInit & FFetchOptions & EO): Promise<Result<ExtractTupleTypes<Builders>>>;
|
|
53
|
+
}
|
|
54
|
+
export {};
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
3
|
+
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
4
|
+
import { formatBatchRequestFromNative, parseBatchResponse } from "./batch-request.js";
|
|
5
|
+
function parsedToResponse(parsed) {
|
|
6
|
+
const headers = new Headers(parsed.headers);
|
|
7
|
+
if (parsed.body === null || parsed.body === void 0) {
|
|
8
|
+
return new Response(null, {
|
|
9
|
+
status: parsed.status,
|
|
10
|
+
statusText: parsed.statusText,
|
|
11
|
+
headers
|
|
12
|
+
});
|
|
13
|
+
}
|
|
14
|
+
const bodyString = typeof parsed.body === "string" ? parsed.body : JSON.stringify(parsed.body);
|
|
15
|
+
let status = parsed.status;
|
|
16
|
+
if (status === 204 && bodyString && bodyString.trim() !== "") {
|
|
17
|
+
status = 200;
|
|
18
|
+
}
|
|
19
|
+
return new Response(status === 204 ? null : bodyString, {
|
|
20
|
+
status,
|
|
21
|
+
statusText: parsed.statusText,
|
|
22
|
+
headers
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
class BatchBuilder {
|
|
26
|
+
constructor(builders, databaseName, context) {
|
|
27
|
+
__publicField(this, "builders");
|
|
28
|
+
__publicField(this, "originalBuilders");
|
|
29
|
+
this.databaseName = databaseName;
|
|
30
|
+
this.context = context;
|
|
31
|
+
this.builders = [...builders];
|
|
32
|
+
this.originalBuilders = builders;
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Add a request to the batch dynamically.
|
|
36
|
+
* This allows building up batch operations programmatically.
|
|
37
|
+
*
|
|
38
|
+
* @param builder - An executable builder to add to the batch
|
|
39
|
+
* @returns This BatchBuilder for method chaining
|
|
40
|
+
* @example
|
|
41
|
+
* ```ts
|
|
42
|
+
* const batch = db.batch([]);
|
|
43
|
+
* batch.addRequest(db.from('contacts').list());
|
|
44
|
+
* batch.addRequest(db.from('users').list());
|
|
45
|
+
* const result = await batch.execute();
|
|
46
|
+
* ```
|
|
47
|
+
*/
|
|
48
|
+
addRequest(builder) {
|
|
49
|
+
this.builders.push(builder);
|
|
50
|
+
return this;
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Get the request configuration for this batch operation.
|
|
54
|
+
* This is used internally by the execution system.
|
|
55
|
+
*/
|
|
56
|
+
getRequestConfig() {
|
|
57
|
+
return {
|
|
58
|
+
method: "POST",
|
|
59
|
+
url: `/${this.databaseName}/$batch`,
|
|
60
|
+
body: void 0
|
|
61
|
+
// Body is constructed in execute()
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
toRequest(baseUrl) {
|
|
65
|
+
const fullUrl = `${baseUrl}/${this.databaseName}/$batch`;
|
|
66
|
+
return new Request(fullUrl, {
|
|
67
|
+
method: "POST",
|
|
68
|
+
headers: {
|
|
69
|
+
"Content-Type": "multipart/mixed",
|
|
70
|
+
"OData-Version": "4.0"
|
|
71
|
+
}
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
async processResponse(response, options) {
|
|
75
|
+
return {
|
|
76
|
+
data: void 0,
|
|
77
|
+
error: {
|
|
78
|
+
name: "NotImplementedError",
|
|
79
|
+
message: "Batch operations handle response processing internally",
|
|
80
|
+
timestamp: /* @__PURE__ */ new Date()
|
|
81
|
+
}
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
/**
|
|
85
|
+
* Execute the batch operation.
|
|
86
|
+
*
|
|
87
|
+
* @param options - Optional fetch options and batch-specific options (includes beforeRequest hook)
|
|
88
|
+
* @returns A tuple of results matching the input builders
|
|
89
|
+
*/
|
|
90
|
+
async execute(options) {
|
|
91
|
+
var _a, _b;
|
|
92
|
+
const baseUrl = (_b = (_a = this.context)._getBaseUrl) == null ? void 0 : _b.call(_a);
|
|
93
|
+
if (!baseUrl) {
|
|
94
|
+
return {
|
|
95
|
+
data: void 0,
|
|
96
|
+
error: {
|
|
97
|
+
name: "ConfigurationError",
|
|
98
|
+
message: "Base URL not available - execution context must implement _getBaseUrl()",
|
|
99
|
+
timestamp: /* @__PURE__ */ new Date()
|
|
100
|
+
}
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
try {
|
|
104
|
+
const requests = this.builders.map(
|
|
105
|
+
(builder) => builder.toRequest(baseUrl)
|
|
106
|
+
);
|
|
107
|
+
const { body, boundary } = await formatBatchRequestFromNative(
|
|
108
|
+
requests,
|
|
109
|
+
baseUrl
|
|
110
|
+
);
|
|
111
|
+
const response = await this.context._makeRequest(
|
|
112
|
+
`/${this.databaseName}/$batch`,
|
|
113
|
+
{
|
|
114
|
+
...options,
|
|
115
|
+
method: "POST",
|
|
116
|
+
headers: {
|
|
117
|
+
...options == null ? void 0 : options.headers,
|
|
118
|
+
"Content-Type": `multipart/mixed; boundary=${boundary}`,
|
|
119
|
+
"OData-Version": "4.0"
|
|
120
|
+
},
|
|
121
|
+
body
|
|
122
|
+
}
|
|
123
|
+
);
|
|
124
|
+
if (response.error) {
|
|
125
|
+
return { data: void 0, error: response.error };
|
|
126
|
+
}
|
|
127
|
+
const firstLine = response.data.split("\r\n")[0] || response.data.split("\n")[0] || "";
|
|
128
|
+
const actualBoundary = firstLine.startsWith("--") ? firstLine.substring(2) : boundary;
|
|
129
|
+
const contentTypeHeader = `multipart/mixed; boundary=${actualBoundary}`;
|
|
130
|
+
const parsedResponses = parseBatchResponse(
|
|
131
|
+
response.data,
|
|
132
|
+
contentTypeHeader
|
|
133
|
+
);
|
|
134
|
+
if (parsedResponses.length !== this.builders.length) {
|
|
135
|
+
return {
|
|
136
|
+
data: void 0,
|
|
137
|
+
error: {
|
|
138
|
+
name: "BatchError",
|
|
139
|
+
message: `Expected ${this.builders.length} responses but got ${parsedResponses.length}`,
|
|
140
|
+
timestamp: /* @__PURE__ */ new Date()
|
|
141
|
+
}
|
|
142
|
+
};
|
|
143
|
+
}
|
|
144
|
+
const processedResults = [];
|
|
145
|
+
for (let i = 0; i < this.originalBuilders.length; i++) {
|
|
146
|
+
const builder = this.originalBuilders[i];
|
|
147
|
+
const parsed = parsedResponses[i];
|
|
148
|
+
if (!builder || !parsed) {
|
|
149
|
+
processedResults.push(void 0);
|
|
150
|
+
continue;
|
|
151
|
+
}
|
|
152
|
+
const nativeResponse = parsedToResponse(parsed);
|
|
153
|
+
const result = await builder.processResponse(nativeResponse, options);
|
|
154
|
+
if (result.error) {
|
|
155
|
+
processedResults.push(void 0);
|
|
156
|
+
} else {
|
|
157
|
+
processedResults.push(result.data);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
return {
|
|
161
|
+
data: processedResults,
|
|
162
|
+
error: void 0
|
|
163
|
+
};
|
|
164
|
+
} catch (err) {
|
|
165
|
+
return {
|
|
166
|
+
data: void 0,
|
|
167
|
+
error: {
|
|
168
|
+
name: "BatchError",
|
|
169
|
+
message: err instanceof Error ? err.message : "Unknown error",
|
|
170
|
+
timestamp: /* @__PURE__ */ new Date()
|
|
171
|
+
}
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
export {
|
|
177
|
+
BatchBuilder
|
|
178
|
+
};
|
|
179
|
+
//# sourceMappingURL=batch-builder.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"batch-builder.js","sources":["../../../src/client/batch-builder.ts"],"sourcesContent":["import type {\n ExecutableBuilder,\n ExecutionContext,\n Result,\n ExecuteOptions,\n} from \"../types\";\nimport { type FFetchOptions } from \"@fetchkit/ffetch\";\nimport {\n formatBatchRequestFromNative,\n parseBatchResponse,\n type ParsedBatchResponse,\n} from \"./batch-request\";\n\n/**\n * Helper type to extract result types from a tuple of ExecutableBuilders.\n * Uses a mapped type which TypeScript 4.1+ can handle for tuples.\n */\ntype ExtractTupleTypes<T extends readonly ExecutableBuilder<any>[]> = {\n [K in keyof T]: T[K] extends ExecutableBuilder<infer U> ? U : never;\n};\n\n/**\n * Converts a ParsedBatchResponse to a native Response object\n * @param parsed - The parsed batch response\n * @returns A native Response object\n */\nfunction parsedToResponse(parsed: ParsedBatchResponse): Response {\n const headers = new Headers(parsed.headers);\n\n // Handle null body\n if (parsed.body === null || parsed.body === undefined) {\n return new Response(null, {\n status: parsed.status,\n statusText: parsed.statusText,\n headers,\n });\n }\n\n // Convert body to string if it's not already\n const bodyString =\n typeof parsed.body === \"string\" ? parsed.body : JSON.stringify(parsed.body);\n\n // Handle 204 No Content status - it cannot have a body per HTTP spec\n // If FileMaker returns 204 with a body, treat it as 200\n let status = parsed.status;\n if (status === 204 && bodyString && bodyString.trim() !== \"\") {\n status = 200;\n }\n\n return new Response(status === 204 ? null : bodyString, {\n status: status,\n statusText: parsed.statusText,\n headers,\n });\n}\n\n/**\n * Builder for batch operations that allows multiple queries to be executed together\n * in a single transactional request.\n */\nexport class BatchBuilder<Builders extends readonly ExecutableBuilder<any>[]>\n implements ExecutableBuilder<ExtractTupleTypes<Builders>>\n{\n private builders: ExecutableBuilder<any>[];\n private readonly originalBuilders: Builders;\n\n constructor(\n builders: Builders,\n private readonly databaseName: string,\n private readonly context: ExecutionContext,\n ) {\n // Convert readonly tuple to mutable array for dynamic additions\n this.builders = [...builders];\n // Store original tuple for type preservation\n this.originalBuilders = builders;\n }\n\n /**\n * Add a request to the batch dynamically.\n * This allows building up batch operations programmatically.\n *\n * @param builder - An executable builder to add to the batch\n * @returns This BatchBuilder for method chaining\n * @example\n * ```ts\n * const batch = db.batch([]);\n * batch.addRequest(db.from('contacts').list());\n * batch.addRequest(db.from('users').list());\n * const result = await batch.execute();\n * ```\n */\n addRequest<T>(builder: ExecutableBuilder<T>): this {\n this.builders.push(builder);\n return this;\n }\n\n /**\n * Get the request configuration for this batch operation.\n * This is used internally by the execution system.\n */\n getRequestConfig(): { method: string; url: string; body?: any } {\n // Note: This method is kept for compatibility but batch operations\n // should use execute() directly which handles the full Request/Response flow\n return {\n method: \"POST\",\n url: `/${this.databaseName}/$batch`,\n body: undefined, // Body is constructed in execute()\n };\n }\n\n toRequest(baseUrl: string): Request {\n // Batch operations are not designed to be nested, but we provide\n // a basic implementation for interface compliance\n const fullUrl = `${baseUrl}/${this.databaseName}/$batch`;\n return new Request(fullUrl, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"multipart/mixed\",\n \"OData-Version\": \"4.0\",\n },\n });\n }\n\n async processResponse(\n response: Response,\n options?: ExecuteOptions,\n ): Promise<Result<any>> {\n // This should not typically be called for batch operations\n // as they handle their own response processing\n return {\n data: undefined,\n error: {\n name: \"NotImplementedError\",\n message: \"Batch operations handle response processing internally\",\n timestamp: new Date(),\n } as any,\n };\n }\n\n /**\n * Execute the batch operation.\n *\n * @param options - Optional fetch options and batch-specific options (includes beforeRequest hook)\n * @returns A tuple of results matching the input builders\n */\n async execute<EO extends ExecuteOptions>(\n options?: RequestInit & FFetchOptions & EO,\n ): Promise<Result<ExtractTupleTypes<Builders>>> {\n const baseUrl = this.context._getBaseUrl?.();\n if (!baseUrl) {\n return {\n data: undefined,\n error: {\n name: \"ConfigurationError\",\n message:\n \"Base URL not available - execution context must implement _getBaseUrl()\",\n timestamp: new Date(),\n } as any,\n };\n }\n\n try {\n // Convert builders to native Request objects\n const requests: Request[] = this.builders.map((builder) =>\n builder.toRequest(baseUrl),\n );\n\n // Format batch request (automatically groups mutations into changesets)\n const { body, boundary } = await formatBatchRequestFromNative(\n requests,\n baseUrl,\n );\n\n // Execute the batch request\n const response = await this.context._makeRequest<string>(\n `/${this.databaseName}/$batch`,\n {\n ...options,\n method: \"POST\",\n headers: {\n ...options?.headers,\n \"Content-Type\": `multipart/mixed; boundary=${boundary}`,\n \"OData-Version\": \"4.0\",\n },\n body,\n },\n );\n\n if (response.error) {\n return { data: undefined, error: response.error };\n }\n\n // Extract the actual boundary from the response\n // FileMaker uses its own boundary, not the one we sent\n const firstLine =\n response.data.split(\"\\r\\n\")[0] || response.data.split(\"\\n\")[0] || \"\";\n const actualBoundary = firstLine.startsWith(\"--\")\n ? firstLine.substring(2)\n : boundary;\n\n // Parse the multipart response\n const contentTypeHeader = `multipart/mixed; boundary=${actualBoundary}`;\n const parsedResponses = parseBatchResponse(\n response.data,\n contentTypeHeader,\n );\n\n // Check if we got the expected number of responses\n if (parsedResponses.length !== this.builders.length) {\n return {\n data: undefined,\n error: {\n name: \"BatchError\",\n message: `Expected ${this.builders.length} responses but got ${parsedResponses.length}`,\n timestamp: new Date(),\n } as any,\n };\n }\n\n // Process each response using the corresponding builder\n // Build tuple by processing each builder in order\n type ResultTuple = ExtractTupleTypes<Builders>;\n\n // Process builders sequentially to preserve tuple order\n const processedResults: any[] = [];\n for (let i = 0; i < this.originalBuilders.length; i++) {\n const builder = this.originalBuilders[i];\n const parsed = parsedResponses[i];\n\n if (!builder || !parsed) {\n processedResults.push(undefined);\n continue;\n }\n\n // Convert parsed response to native Response\n const nativeResponse = parsedToResponse(parsed);\n\n // Let the builder process its own response\n const result = await builder.processResponse(nativeResponse, options);\n\n if (result.error) {\n processedResults.push(undefined);\n } else {\n processedResults.push(result.data);\n }\n }\n\n // Use a type assertion that TypeScript will respect\n // ExtractTupleTypes ensures this is a proper tuple type\n return {\n data: processedResults as unknown as ResultTuple,\n error: undefined,\n };\n } catch (err) {\n return {\n data: undefined,\n error: {\n name: \"BatchError\",\n message: err instanceof Error ? err.message : \"Unknown error\",\n timestamp: new Date(),\n } as any,\n };\n }\n }\n}\n"],"names":[],"mappings":";;;;AA0BA,SAAS,iBAAiB,QAAuC;AAC/D,QAAM,UAAU,IAAI,QAAQ,OAAO,OAAO;AAG1C,MAAI,OAAO,SAAS,QAAQ,OAAO,SAAS,QAAW;AAC9C,WAAA,IAAI,SAAS,MAAM;AAAA,MACxB,QAAQ,OAAO;AAAA,MACf,YAAY,OAAO;AAAA,MACnB;AAAA,IAAA,CACD;AAAA,EAAA;AAIG,QAAA,aACJ,OAAO,OAAO,SAAS,WAAW,OAAO,OAAO,KAAK,UAAU,OAAO,IAAI;AAI5E,MAAI,SAAS,OAAO;AACpB,MAAI,WAAW,OAAO,cAAc,WAAW,WAAW,IAAI;AACnD,aAAA;AAAA,EAAA;AAGX,SAAO,IAAI,SAAS,WAAW,MAAM,OAAO,YAAY;AAAA,IACtD;AAAA,IACA,YAAY,OAAO;AAAA,IACnB;AAAA,EAAA,CACD;AACH;AAMO,MAAM,aAEb;AAAA,EAIE,YACE,UACiB,cACA,SACjB;AAPM;AACS;AAIE,SAAA,eAAA;AACA,SAAA,UAAA;AAGZ,SAAA,WAAW,CAAC,GAAG,QAAQ;AAE5B,SAAK,mBAAmB;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiB1B,WAAc,SAAqC;AAC5C,SAAA,SAAS,KAAK,OAAO;AACnB,WAAA;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOT,mBAAgE;AAGvD,WAAA;AAAA,MACL,QAAQ;AAAA,MACR,KAAK,IAAI,KAAK,YAAY;AAAA,MAC1B,MAAM;AAAA;AAAA,IACR;AAAA,EAAA;AAAA,EAGF,UAAU,SAA0B;AAGlC,UAAM,UAAU,GAAG,OAAO,IAAI,KAAK,YAAY;AACxC,WAAA,IAAI,QAAQ,SAAS;AAAA,MAC1B,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,iBAAiB;AAAA,MAAA;AAAA,IACnB,CACD;AAAA,EAAA;AAAA,EAGH,MAAM,gBACJ,UACA,SACsB;AAGf,WAAA;AAAA,MACL,MAAM;AAAA,MACN,OAAO;AAAA,QACL,MAAM;AAAA,QACN,SAAS;AAAA,QACT,+BAAe,KAAK;AAAA,MAAA;AAAA,IAExB;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASF,MAAM,QACJ,SAC8C;;AACxC,UAAA,WAAU,gBAAK,SAAQ,gBAAb;AAChB,QAAI,CAAC,SAAS;AACL,aAAA;AAAA,QACL,MAAM;AAAA,QACN,OAAO;AAAA,UACL,MAAM;AAAA,UACN,SACE;AAAA,UACF,+BAAe,KAAK;AAAA,QAAA;AAAA,MAExB;AAAA,IAAA;AAGE,QAAA;AAEI,YAAA,WAAsB,KAAK,SAAS;AAAA,QAAI,CAAC,YAC7C,QAAQ,UAAU,OAAO;AAAA,MAC3B;AAGA,YAAM,EAAE,MAAM,SAAS,IAAI,MAAM;AAAA,QAC/B;AAAA,QACA;AAAA,MACF;AAGM,YAAA,WAAW,MAAM,KAAK,QAAQ;AAAA,QAClC,IAAI,KAAK,YAAY;AAAA,QACrB;AAAA,UACE,GAAG;AAAA,UACH,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,GAAG,mCAAS;AAAA,YACZ,gBAAgB,6BAA6B,QAAQ;AAAA,YACrD,iBAAiB;AAAA,UACnB;AAAA,UACA;AAAA,QAAA;AAAA,MAEJ;AAEA,UAAI,SAAS,OAAO;AAClB,eAAO,EAAE,MAAM,QAAW,OAAO,SAAS,MAAM;AAAA,MAAA;AAKlD,YAAM,YACJ,SAAS,KAAK,MAAM,MAAM,EAAE,CAAC,KAAK,SAAS,KAAK,MAAM,IAAI,EAAE,CAAC,KAAK;AAC9D,YAAA,iBAAiB,UAAU,WAAW,IAAI,IAC5C,UAAU,UAAU,CAAC,IACrB;AAGE,YAAA,oBAAoB,6BAA6B,cAAc;AACrE,YAAM,kBAAkB;AAAA,QACtB,SAAS;AAAA,QACT;AAAA,MACF;AAGA,UAAI,gBAAgB,WAAW,KAAK,SAAS,QAAQ;AAC5C,eAAA;AAAA,UACL,MAAM;AAAA,UACN,OAAO;AAAA,YACL,MAAM;AAAA,YACN,SAAS,YAAY,KAAK,SAAS,MAAM,sBAAsB,gBAAgB,MAAM;AAAA,YACrF,+BAAe,KAAK;AAAA,UAAA;AAAA,QAExB;AAAA,MAAA;AAQF,YAAM,mBAA0B,CAAC;AACjC,eAAS,IAAI,GAAG,IAAI,KAAK,iBAAiB,QAAQ,KAAK;AAC/C,cAAA,UAAU,KAAK,iBAAiB,CAAC;AACjC,cAAA,SAAS,gBAAgB,CAAC;AAE5B,YAAA,CAAC,WAAW,CAAC,QAAQ;AACvB,2BAAiB,KAAK,MAAS;AAC/B;AAAA,QAAA;AAII,cAAA,iBAAiB,iBAAiB,MAAM;AAG9C,cAAM,SAAS,MAAM,QAAQ,gBAAgB,gBAAgB,OAAO;AAEpE,YAAI,OAAO,OAAO;AAChB,2BAAiB,KAAK,MAAS;AAAA,QAAA,OAC1B;AACY,2BAAA,KAAK,OAAO,IAAI;AAAA,QAAA;AAAA,MACnC;AAKK,aAAA;AAAA,QACL,MAAM;AAAA,QACN,OAAO;AAAA,MACT;AAAA,aACO,KAAK;AACL,aAAA;AAAA,QACL,MAAM;AAAA,QACN,OAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS,eAAe,QAAQ,IAAI,UAAU;AAAA,UAC9C,+BAAe,KAAK;AAAA,QAAA;AAAA,MAExB;AAAA,IAAA;AAAA,EACF;AAEJ;"}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Batch Request Utilities
|
|
3
|
+
*
|
|
4
|
+
* Utilities for formatting and parsing OData batch requests using multipart/mixed format.
|
|
5
|
+
* OData batch requests allow bundling multiple operations into a single HTTP request,
|
|
6
|
+
* with support for transactional changesets.
|
|
7
|
+
*/
|
|
8
|
+
export interface RequestConfig {
|
|
9
|
+
method: string;
|
|
10
|
+
url: string;
|
|
11
|
+
body?: string;
|
|
12
|
+
headers?: Record<string, string>;
|
|
13
|
+
}
|
|
14
|
+
export interface ParsedBatchResponse {
|
|
15
|
+
status: number;
|
|
16
|
+
statusText: string;
|
|
17
|
+
headers: Record<string, string>;
|
|
18
|
+
body: any;
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Generates a random boundary string for multipart requests
|
|
22
|
+
* @param prefix - Prefix for the boundary (e.g., "batch_" or "changeset_")
|
|
23
|
+
* @returns A boundary string with the prefix and 32 random hex characters
|
|
24
|
+
*/
|
|
25
|
+
export declare function generateBoundary(prefix?: string): string;
|
|
26
|
+
/**
|
|
27
|
+
* Formats multiple requests into a batch request body
|
|
28
|
+
* @param requests - Array of request configurations
|
|
29
|
+
* @param baseUrl - The base URL to prepend to relative URLs
|
|
30
|
+
* @param batchBoundary - Optional boundary string for the batch (generated if not provided)
|
|
31
|
+
* @returns Object containing the formatted body and boundary
|
|
32
|
+
*/
|
|
33
|
+
export declare function formatBatchRequest(requests: RequestConfig[], baseUrl: string, batchBoundary?: string): {
|
|
34
|
+
body: string;
|
|
35
|
+
boundary: string;
|
|
36
|
+
};
|
|
37
|
+
/**
|
|
38
|
+
* Formats multiple Request objects into a batch request body
|
|
39
|
+
* Supports explicit changesets via Request arrays
|
|
40
|
+
* @param requests - Array of Request objects or Request arrays (for explicit changesets)
|
|
41
|
+
* @param baseUrl - The base URL to prepend to relative URLs
|
|
42
|
+
* @param batchBoundary - Optional boundary string for the batch (generated if not provided)
|
|
43
|
+
* @returns Promise resolving to object containing the formatted body and boundary
|
|
44
|
+
*/
|
|
45
|
+
export declare function formatBatchRequestFromNative(requests: Array<Request | Request[]>, baseUrl: string, batchBoundary?: string): Promise<{
|
|
46
|
+
body: string;
|
|
47
|
+
boundary: string;
|
|
48
|
+
}>;
|
|
49
|
+
/**
|
|
50
|
+
* Extracts the boundary from a Content-Type header
|
|
51
|
+
* @param contentType - The Content-Type header value
|
|
52
|
+
* @returns The boundary string, or null if not found
|
|
53
|
+
*/
|
|
54
|
+
export declare function extractBoundary(contentType: string): string | null;
|
|
55
|
+
/**
|
|
56
|
+
* Parses a batch response into individual responses
|
|
57
|
+
* @param responseText - The raw batch response text
|
|
58
|
+
* @param contentType - The Content-Type header from the response
|
|
59
|
+
* @returns Array of parsed responses in the same order as the request
|
|
60
|
+
*/
|
|
61
|
+
export declare function parseBatchResponse(responseText: string, contentType: string): ParsedBatchResponse[];
|
|
@@ -0,0 +1,252 @@
|
|
|
1
|
+
function generateBoundary(prefix = "batch_") {
|
|
2
|
+
const randomHex = Array.from(
|
|
3
|
+
{ length: 32 },
|
|
4
|
+
() => Math.floor(Math.random() * 16).toString(16)
|
|
5
|
+
).join("");
|
|
6
|
+
return `${prefix}${randomHex}`;
|
|
7
|
+
}
|
|
8
|
+
async function requestToConfig(request) {
|
|
9
|
+
const headers = {};
|
|
10
|
+
request.headers.forEach((value, key) => {
|
|
11
|
+
headers[key] = value;
|
|
12
|
+
});
|
|
13
|
+
let body;
|
|
14
|
+
if (request.body) {
|
|
15
|
+
const clonedRequest = request.clone();
|
|
16
|
+
body = await clonedRequest.text();
|
|
17
|
+
}
|
|
18
|
+
return {
|
|
19
|
+
method: request.method,
|
|
20
|
+
url: request.url,
|
|
21
|
+
body,
|
|
22
|
+
headers
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
function formatSubRequest(request, baseUrl) {
|
|
26
|
+
const lines = [];
|
|
27
|
+
lines.push("Content-Type: application/http");
|
|
28
|
+
lines.push("Content-Transfer-Encoding: binary");
|
|
29
|
+
lines.push("");
|
|
30
|
+
const fullUrl = request.url.startsWith("http") ? request.url : `${baseUrl}${request.url}`;
|
|
31
|
+
lines.push(`${request.method} ${fullUrl} HTTP/1.1`);
|
|
32
|
+
if (request.body) {
|
|
33
|
+
if (request.headers) {
|
|
34
|
+
for (const [key, value] of Object.entries(request.headers)) {
|
|
35
|
+
if (key.toLowerCase() !== "authorization") {
|
|
36
|
+
lines.push(`${key}: ${value}`);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
const hasContentType = request.headers && Object.keys(request.headers).some(
|
|
41
|
+
(k) => k.toLowerCase() === "content-type"
|
|
42
|
+
);
|
|
43
|
+
if (!hasContentType) {
|
|
44
|
+
lines.push("Content-Type: application/json");
|
|
45
|
+
}
|
|
46
|
+
const hasContentLength = request.headers && Object.keys(request.headers).some(
|
|
47
|
+
(k) => k.toLowerCase() === "content-length"
|
|
48
|
+
);
|
|
49
|
+
if (!hasContentLength) {
|
|
50
|
+
lines.push(`Content-Length: ${request.body.length}`);
|
|
51
|
+
}
|
|
52
|
+
lines.push("");
|
|
53
|
+
lines.push(request.body);
|
|
54
|
+
} else {
|
|
55
|
+
lines.push("");
|
|
56
|
+
lines.push("");
|
|
57
|
+
}
|
|
58
|
+
return lines.join("\r\n");
|
|
59
|
+
}
|
|
60
|
+
function formatChangeset(requests, baseUrl, changesetBoundary) {
|
|
61
|
+
const lines = [];
|
|
62
|
+
lines.push(`Content-Type: multipart/mixed; boundary=${changesetBoundary}`);
|
|
63
|
+
lines.push("");
|
|
64
|
+
for (const request of requests) {
|
|
65
|
+
lines.push(`--${changesetBoundary}`);
|
|
66
|
+
lines.push(formatSubRequest(request, baseUrl));
|
|
67
|
+
}
|
|
68
|
+
lines.push(`--${changesetBoundary}--`);
|
|
69
|
+
return lines.join("\r\n");
|
|
70
|
+
}
|
|
71
|
+
async function formatBatchRequestFromNative(requests, baseUrl, batchBoundary) {
|
|
72
|
+
const boundary = batchBoundary || generateBoundary("batch_");
|
|
73
|
+
const lines = [];
|
|
74
|
+
for (const item of requests) {
|
|
75
|
+
if (Array.isArray(item)) {
|
|
76
|
+
const changesetBoundary = generateBoundary("changeset_");
|
|
77
|
+
const changesetConfigs = [];
|
|
78
|
+
for (const request of item) {
|
|
79
|
+
changesetConfigs.push(await requestToConfig(request));
|
|
80
|
+
}
|
|
81
|
+
lines.push(`--${boundary}`);
|
|
82
|
+
lines.push(formatChangeset(changesetConfigs, baseUrl, changesetBoundary));
|
|
83
|
+
} else {
|
|
84
|
+
const config = await requestToConfig(item);
|
|
85
|
+
if (config.method === "GET") {
|
|
86
|
+
lines.push(`--${boundary}`);
|
|
87
|
+
lines.push(formatSubRequest(config, baseUrl));
|
|
88
|
+
} else {
|
|
89
|
+
const changesetBoundary = generateBoundary("changeset_");
|
|
90
|
+
lines.push(`--${boundary}`);
|
|
91
|
+
lines.push(formatChangeset([config], baseUrl, changesetBoundary));
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
lines.push(`--${boundary}--`);
|
|
96
|
+
return {
|
|
97
|
+
body: lines.join("\r\n"),
|
|
98
|
+
boundary
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
function extractBoundary(contentType) {
|
|
102
|
+
const match = contentType.match(/boundary=([^;]+)/);
|
|
103
|
+
return match && match[1] ? match[1].trim() : null;
|
|
104
|
+
}
|
|
105
|
+
function parseStatusLine(line) {
|
|
106
|
+
var _a;
|
|
107
|
+
const match = line.match(/HTTP\/\d\.\d\s+(\d+)\s*(.*)/);
|
|
108
|
+
if (!match || !match[1]) {
|
|
109
|
+
return { status: 0, statusText: "" };
|
|
110
|
+
}
|
|
111
|
+
return {
|
|
112
|
+
status: parseInt(match[1], 10),
|
|
113
|
+
statusText: ((_a = match[2]) == null ? void 0 : _a.trim()) || ""
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
function parseHeaders(lines) {
|
|
117
|
+
const headers = {};
|
|
118
|
+
for (const line of lines) {
|
|
119
|
+
const colonIndex = line.indexOf(":");
|
|
120
|
+
if (colonIndex > 0) {
|
|
121
|
+
const key = line.substring(0, colonIndex).trim();
|
|
122
|
+
const value = line.substring(colonIndex + 1).trim();
|
|
123
|
+
headers[key.toLowerCase()] = value;
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
return headers;
|
|
127
|
+
}
|
|
128
|
+
function parseHttpResponse(part) {
|
|
129
|
+
const lines = part.split(/\r\n/);
|
|
130
|
+
let statusLineIndex = -1;
|
|
131
|
+
for (let i = 0; i < lines.length; i++) {
|
|
132
|
+
const line = lines[i];
|
|
133
|
+
if (line && line.startsWith("HTTP/")) {
|
|
134
|
+
statusLineIndex = i;
|
|
135
|
+
break;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
if (statusLineIndex === -1) {
|
|
139
|
+
return {
|
|
140
|
+
status: 0,
|
|
141
|
+
statusText: "Invalid response",
|
|
142
|
+
headers: {},
|
|
143
|
+
body: null
|
|
144
|
+
};
|
|
145
|
+
}
|
|
146
|
+
const statusLine = lines[statusLineIndex];
|
|
147
|
+
if (!statusLine) {
|
|
148
|
+
return {
|
|
149
|
+
status: 0,
|
|
150
|
+
statusText: "Invalid response",
|
|
151
|
+
headers: {},
|
|
152
|
+
body: null
|
|
153
|
+
};
|
|
154
|
+
}
|
|
155
|
+
const { status, statusText } = parseStatusLine(statusLine);
|
|
156
|
+
const headerLines = [];
|
|
157
|
+
let bodyStartIndex = lines.length;
|
|
158
|
+
let foundEmptyLine = false;
|
|
159
|
+
for (let i = statusLineIndex + 1; i < lines.length; i++) {
|
|
160
|
+
const line = lines[i];
|
|
161
|
+
if (line === "") {
|
|
162
|
+
bodyStartIndex = i + 1;
|
|
163
|
+
foundEmptyLine = true;
|
|
164
|
+
break;
|
|
165
|
+
}
|
|
166
|
+
if (line && line.startsWith("--")) {
|
|
167
|
+
break;
|
|
168
|
+
}
|
|
169
|
+
if (line) {
|
|
170
|
+
headerLines.push(line);
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
const headers = parseHeaders(headerLines);
|
|
174
|
+
let bodyText = "";
|
|
175
|
+
if (foundEmptyLine && bodyStartIndex < lines.length) {
|
|
176
|
+
const bodyLines = lines.slice(bodyStartIndex);
|
|
177
|
+
const bodyLinesFiltered = [];
|
|
178
|
+
for (const line of bodyLines) {
|
|
179
|
+
if (line.startsWith("--")) {
|
|
180
|
+
break;
|
|
181
|
+
}
|
|
182
|
+
bodyLinesFiltered.push(line);
|
|
183
|
+
}
|
|
184
|
+
bodyText = bodyLinesFiltered.join("\r\n").trim();
|
|
185
|
+
}
|
|
186
|
+
let body = null;
|
|
187
|
+
if (bodyText) {
|
|
188
|
+
try {
|
|
189
|
+
body = JSON.parse(bodyText);
|
|
190
|
+
} catch {
|
|
191
|
+
body = bodyText;
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
return {
|
|
195
|
+
status,
|
|
196
|
+
statusText,
|
|
197
|
+
headers,
|
|
198
|
+
body
|
|
199
|
+
};
|
|
200
|
+
}
|
|
201
|
+
function parseBatchResponse(responseText, contentType) {
|
|
202
|
+
var _a;
|
|
203
|
+
const boundary = extractBoundary(contentType);
|
|
204
|
+
if (!boundary) {
|
|
205
|
+
throw new Error("Could not extract boundary from Content-Type header");
|
|
206
|
+
}
|
|
207
|
+
const results = [];
|
|
208
|
+
const boundaryPattern = `--${boundary}`;
|
|
209
|
+
const parts = responseText.split(boundaryPattern);
|
|
210
|
+
for (const part of parts) {
|
|
211
|
+
const trimmedPart = part.trim();
|
|
212
|
+
if (!trimmedPart || trimmedPart === "--") {
|
|
213
|
+
continue;
|
|
214
|
+
}
|
|
215
|
+
if (trimmedPart.includes("Content-Type: multipart/mixed")) {
|
|
216
|
+
const changesetContentTypeMatch = trimmedPart.match(
|
|
217
|
+
/Content-Type: multipart\/mixed;\s*boundary=([^\r\n]+)/
|
|
218
|
+
);
|
|
219
|
+
if (changesetContentTypeMatch) {
|
|
220
|
+
const changesetBoundary = (_a = changesetContentTypeMatch == null ? void 0 : changesetContentTypeMatch[1]) == null ? void 0 : _a.trim();
|
|
221
|
+
const changesetPattern = `--${changesetBoundary}`;
|
|
222
|
+
const changesetParts = trimmedPart.split(changesetPattern);
|
|
223
|
+
for (const changesetPart of changesetParts) {
|
|
224
|
+
const trimmedChangesetPart = changesetPart.trim();
|
|
225
|
+
if (!trimmedChangesetPart || trimmedChangesetPart === "--") {
|
|
226
|
+
continue;
|
|
227
|
+
}
|
|
228
|
+
if (trimmedChangesetPart.startsWith("Content-Type: multipart/mixed")) {
|
|
229
|
+
continue;
|
|
230
|
+
}
|
|
231
|
+
const response = parseHttpResponse(trimmedChangesetPart);
|
|
232
|
+
if (response.status > 0) {
|
|
233
|
+
results.push(response);
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
} else {
|
|
238
|
+
const response = parseHttpResponse(trimmedPart);
|
|
239
|
+
if (response.status > 0) {
|
|
240
|
+
results.push(response);
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
return results;
|
|
245
|
+
}
|
|
246
|
+
export {
|
|
247
|
+
extractBoundary,
|
|
248
|
+
formatBatchRequestFromNative,
|
|
249
|
+
generateBoundary,
|
|
250
|
+
parseBatchResponse
|
|
251
|
+
};
|
|
252
|
+
//# sourceMappingURL=batch-request.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"batch-request.js","sources":["../../../src/client/batch-request.ts"],"sourcesContent":["/**\n * Batch Request Utilities\n *\n * Utilities for formatting and parsing OData batch requests using multipart/mixed format.\n * OData batch requests allow bundling multiple operations into a single HTTP request,\n * with support for transactional changesets.\n */\n\nexport interface RequestConfig {\n method: string;\n url: string;\n body?: string;\n headers?: Record<string, string>;\n}\n\nexport interface ParsedBatchResponse {\n status: number;\n statusText: string;\n headers: Record<string, string>;\n body: any;\n}\n\n/**\n * Generates a random boundary string for multipart requests\n * @param prefix - Prefix for the boundary (e.g., \"batch_\" or \"changeset_\")\n * @returns A boundary string with the prefix and 32 random hex characters\n */\nexport function generateBoundary(prefix: string = \"batch_\"): string {\n const randomHex = Array.from({ length: 32 }, () =>\n Math.floor(Math.random() * 16).toString(16),\n ).join(\"\");\n return `${prefix}${randomHex}`;\n}\n\n/**\n * Converts a native Request object to RequestConfig\n * @param request - Native Request object\n * @returns RequestConfig object\n */\nasync function requestToConfig(request: Request): Promise<RequestConfig> {\n const headers: Record<string, string> = {};\n request.headers.forEach((value, key) => {\n headers[key] = value;\n });\n\n let body: string | undefined;\n if (request.body) {\n // Clone the request to read the body without consuming it\n const clonedRequest = request.clone();\n body = await clonedRequest.text();\n }\n\n return {\n method: request.method,\n url: request.url,\n body,\n headers,\n };\n}\n\n/**\n * Formats a single HTTP request for inclusion in a batch\n * @param request - The request configuration\n * @param baseUrl - The base URL to prepend to relative URLs\n * @returns Formatted request string with CRLF line endings\n *\n * Formatting rules for FileMaker OData:\n * - GET (no body): request line → blank → blank\n * - POST/PATCH (with body): request line → headers → blank → body (NO blank after!)\n */\nfunction formatSubRequest(request: RequestConfig, baseUrl: string): string {\n const lines: string[] = [];\n\n // Add required headers for sub-request\n lines.push(\"Content-Type: application/http\");\n lines.push(\"Content-Transfer-Encoding: binary\");\n lines.push(\"\"); // Empty line after multipart headers\n\n // Construct full URL (convert relative to absolute)\n const fullUrl = request.url.startsWith(\"http\")\n ? request.url\n : `${baseUrl}${request.url}`;\n\n // Add HTTP request line\n lines.push(`${request.method} ${fullUrl} HTTP/1.1`);\n\n // For requests with body, add headers\n if (request.body) {\n // Add request headers (excluding Authorization - it's in the outer request)\n if (request.headers) {\n for (const [key, value] of Object.entries(request.headers)) {\n if (key.toLowerCase() !== \"authorization\") {\n lines.push(`${key}: ${value}`);\n }\n }\n }\n\n // Check if Content-Type is already set\n const hasContentType =\n request.headers &&\n Object.keys(request.headers).some(\n (k) => k.toLowerCase() === \"content-type\",\n );\n\n if (!hasContentType) {\n lines.push(\"Content-Type: application/json\");\n }\n\n // Add Content-Length (required for FileMaker to read the body)\n const hasContentLength =\n request.headers &&\n Object.keys(request.headers).some(\n (k) => k.toLowerCase() === \"content-length\",\n );\n\n if (!hasContentLength) {\n lines.push(`Content-Length: ${request.body.length}`);\n }\n\n lines.push(\"\"); // Empty line between headers and body\n lines.push(request.body);\n // NO blank line after body - the boundary comes immediately\n } else {\n // For GET requests (no body), add TWO blank lines\n lines.push(\"\"); // First blank\n lines.push(\"\"); // Second blank\n }\n\n return lines.join(\"\\r\\n\");\n}\n\n/**\n * Formats a changeset containing multiple non-GET operations\n * @param requests - Array of request configurations (should be non-GET)\n * @param baseUrl - The base URL to prepend to relative URLs\n * @param changesetBoundary - Boundary string for the changeset\n * @returns Formatted changeset string with CRLF line endings\n */\nfunction formatChangeset(\n requests: RequestConfig[],\n baseUrl: string,\n changesetBoundary: string,\n): string {\n const lines: string[] = [];\n\n lines.push(`Content-Type: multipart/mixed; boundary=${changesetBoundary}`);\n lines.push(\"\"); // Empty line after headers\n\n // Add each request in the changeset\n for (const request of requests) {\n lines.push(`--${changesetBoundary}`);\n lines.push(formatSubRequest(request, baseUrl));\n }\n\n // Close the changeset\n lines.push(`--${changesetBoundary}--`);\n\n return lines.join(\"\\r\\n\");\n}\n\n/**\n * Formats multiple requests into a batch request body\n * @param requests - Array of request configurations\n * @param baseUrl - The base URL to prepend to relative URLs\n * @param batchBoundary - Optional boundary string for the batch (generated if not provided)\n * @returns Object containing the formatted body and boundary\n */\nexport function formatBatchRequest(\n requests: RequestConfig[],\n baseUrl: string,\n batchBoundary?: string,\n): { body: string; boundary: string } {\n const boundary = batchBoundary || generateBoundary(\"batch_\");\n const lines: string[] = [];\n\n // Group requests: consecutive non-GET operations go into changesets\n let currentChangeset: RequestConfig[] | null = null;\n\n for (const request of requests) {\n if (request.method === \"GET\") {\n // GET operations break changesets and are added individually\n if (currentChangeset) {\n // Close and add the current changeset\n const changesetBoundary = generateBoundary(\"changeset_\");\n lines.push(`--${boundary}`);\n lines.push(\n formatChangeset(currentChangeset, baseUrl, changesetBoundary),\n );\n currentChangeset = null;\n }\n\n // Add GET request\n lines.push(`--${boundary}`);\n lines.push(formatSubRequest(request, baseUrl));\n } else {\n // Non-GET operations: add to current changeset or create new one\n if (!currentChangeset) {\n currentChangeset = [];\n }\n currentChangeset.push(request);\n }\n }\n\n // Add any remaining changeset\n if (currentChangeset) {\n const changesetBoundary = generateBoundary(\"changeset_\");\n lines.push(`--${boundary}`);\n lines.push(formatChangeset(currentChangeset, baseUrl, changesetBoundary));\n }\n\n // Close the batch\n lines.push(`--${boundary}--`);\n\n return {\n body: lines.join(\"\\r\\n\"),\n boundary,\n };\n}\n\n/**\n * Formats multiple Request objects into a batch request body\n * Supports explicit changesets via Request arrays\n * @param requests - Array of Request objects or Request arrays (for explicit changesets)\n * @param baseUrl - The base URL to prepend to relative URLs\n * @param batchBoundary - Optional boundary string for the batch (generated if not provided)\n * @returns Promise resolving to object containing the formatted body and boundary\n */\nexport async function formatBatchRequestFromNative(\n requests: Array<Request | Request[]>,\n baseUrl: string,\n batchBoundary?: string,\n): Promise<{ body: string; boundary: string }> {\n const boundary = batchBoundary || generateBoundary(\"batch_\");\n const lines: string[] = [];\n\n for (const item of requests) {\n if (Array.isArray(item)) {\n // Explicit changeset - array of Requests\n const changesetBoundary = generateBoundary(\"changeset_\");\n const changesetConfigs: RequestConfig[] = [];\n\n for (const request of item) {\n changesetConfigs.push(await requestToConfig(request));\n }\n\n lines.push(`--${boundary}`);\n lines.push(formatChangeset(changesetConfigs, baseUrl, changesetBoundary));\n } else {\n // Single request\n const config = await requestToConfig(item);\n\n if (config.method === \"GET\") {\n // GET requests are always individual\n lines.push(`--${boundary}`);\n lines.push(formatSubRequest(config, baseUrl));\n } else {\n // Non-GET operations wrapped in a changeset\n const changesetBoundary = generateBoundary(\"changeset_\");\n lines.push(`--${boundary}`);\n lines.push(formatChangeset([config], baseUrl, changesetBoundary));\n }\n }\n }\n\n // Close the batch\n lines.push(`--${boundary}--`);\n\n return {\n body: lines.join(\"\\r\\n\"),\n boundary,\n };\n}\n\n/**\n * Extracts the boundary from a Content-Type header\n * @param contentType - The Content-Type header value\n * @returns The boundary string, or null if not found\n */\nexport function extractBoundary(contentType: string): string | null {\n const match = contentType.match(/boundary=([^;]+)/);\n return match && match[1] ? match[1].trim() : null;\n}\n\n/**\n * Parses an HTTP response line (status line)\n * @param line - The HTTP status line (e.g., \"HTTP/1.1 200 OK\")\n * @returns Object containing status code and status text\n */\nfunction parseStatusLine(line: string): {\n status: number;\n statusText: string;\n} {\n const match = line.match(/HTTP\\/\\d\\.\\d\\s+(\\d+)\\s*(.*)/);\n if (!match || !match[1]) {\n return { status: 0, statusText: \"\" };\n }\n return {\n status: parseInt(match[1], 10),\n statusText: match[2]?.trim() || \"\",\n };\n}\n\n/**\n * Parses headers from an array of header lines\n * @param lines - Array of header lines\n * @returns Object containing parsed headers\n */\nfunction parseHeaders(lines: string[]): Record<string, string> {\n const headers: Record<string, string> = {};\n for (const line of lines) {\n const colonIndex = line.indexOf(\":\");\n if (colonIndex > 0) {\n const key = line.substring(0, colonIndex).trim();\n const value = line.substring(colonIndex + 1).trim();\n headers[key.toLowerCase()] = value;\n }\n }\n return headers;\n}\n\n/**\n * Parses a single HTTP response from a batch part\n * @param part - The raw HTTP response string\n * @returns Parsed response object\n */\nfunction parseHttpResponse(part: string): ParsedBatchResponse {\n const lines = part.split(/\\r\\n/);\n\n // Find the HTTP status line (skip multipart headers)\n let statusLineIndex = -1;\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i];\n if (line && line.startsWith(\"HTTP/\")) {\n statusLineIndex = i;\n break;\n }\n }\n\n if (statusLineIndex === -1) {\n return {\n status: 0,\n statusText: \"Invalid response\",\n headers: {},\n body: null,\n };\n }\n\n const statusLine = lines[statusLineIndex];\n if (!statusLine) {\n return {\n status: 0,\n statusText: \"Invalid response\",\n headers: {},\n body: null,\n };\n }\n\n const { status, statusText } = parseStatusLine(statusLine);\n\n // Parse headers (between status line and empty line)\n const headerLines: string[] = [];\n let bodyStartIndex = lines.length; // Default to end of lines (no body)\n let foundEmptyLine = false;\n\n for (let i = statusLineIndex + 1; i < lines.length; i++) {\n const line = lines[i];\n if (line === \"\") {\n bodyStartIndex = i + 1;\n foundEmptyLine = true;\n break;\n }\n // Stop at boundary markers (for responses without bodies like 204)\n if (line && line.startsWith(\"--\")) {\n break;\n }\n if (line) {\n headerLines.push(line);\n }\n }\n\n const headers = parseHeaders(headerLines);\n\n // Parse body (everything after the empty line, if there was one)\n let bodyText = \"\";\n if (foundEmptyLine && bodyStartIndex < lines.length) {\n const bodyLines = lines.slice(bodyStartIndex);\n // Stop at boundary markers\n const bodyLinesFiltered: string[] = [];\n for (const line of bodyLines) {\n if (line.startsWith(\"--\")) {\n break;\n }\n bodyLinesFiltered.push(line);\n }\n bodyText = bodyLinesFiltered.join(\"\\r\\n\").trim();\n }\n\n let body: any = null;\n if (bodyText) {\n try {\n body = JSON.parse(bodyText);\n } catch {\n // If not JSON, return as text\n body = bodyText;\n }\n }\n\n return {\n status,\n statusText,\n headers,\n body,\n };\n}\n\n/**\n * Parses a batch response into individual responses\n * @param responseText - The raw batch response text\n * @param contentType - The Content-Type header from the response\n * @returns Array of parsed responses in the same order as the request\n */\nexport function parseBatchResponse(\n responseText: string,\n contentType: string,\n): ParsedBatchResponse[] {\n const boundary = extractBoundary(contentType);\n if (!boundary) {\n throw new Error(\"Could not extract boundary from Content-Type header\");\n }\n\n const results: ParsedBatchResponse[] = [];\n\n // Split by boundary (handle both --boundary and --boundary--)\n const boundaryPattern = `--${boundary}`;\n const parts = responseText.split(boundaryPattern);\n\n for (const part of parts) {\n const trimmedPart = part.trim();\n\n // Skip empty parts and the closing boundary marker\n if (!trimmedPart || trimmedPart === \"--\") {\n continue;\n }\n\n // Check if this part is a changeset (nested multipart)\n if (trimmedPart.includes(\"Content-Type: multipart/mixed\")) {\n // Extract the changeset boundary\n const changesetContentTypeMatch = trimmedPart.match(\n /Content-Type: multipart\\/mixed;\\s*boundary=([^\\r\\n]+)/,\n );\n if (changesetContentTypeMatch) {\n const changesetBoundary = changesetContentTypeMatch?.[1]?.trim();\n const changesetPattern = `--${changesetBoundary}`;\n const changesetParts = trimmedPart.split(changesetPattern);\n\n for (const changesetPart of changesetParts) {\n const trimmedChangesetPart = changesetPart.trim();\n if (!trimmedChangesetPart || trimmedChangesetPart === \"--\") {\n continue;\n }\n\n // Skip the changeset header\n if (\n trimmedChangesetPart.startsWith(\"Content-Type: multipart/mixed\")\n ) {\n continue;\n }\n\n const response = parseHttpResponse(trimmedChangesetPart);\n if (response.status > 0) {\n results.push(response);\n }\n }\n }\n } else {\n // Regular response (not a changeset)\n const response = parseHttpResponse(trimmedPart);\n if (response.status > 0) {\n results.push(response);\n }\n }\n }\n\n return results;\n}\n"],"names":[],"mappings":"AA2BgB,SAAA,iBAAiB,SAAiB,UAAkB;AAClE,QAAM,YAAY,MAAM;AAAA,IAAK,EAAE,QAAQ,GAAG;AAAA,IAAG,MAC3C,KAAK,MAAM,KAAK,WAAW,EAAE,EAAE,SAAS,EAAE;AAAA,EAAA,EAC1C,KAAK,EAAE;AACF,SAAA,GAAG,MAAM,GAAG,SAAS;AAC9B;AAOA,eAAe,gBAAgB,SAA0C;AACvE,QAAM,UAAkC,CAAC;AACzC,UAAQ,QAAQ,QAAQ,CAAC,OAAO,QAAQ;AACtC,YAAQ,GAAG,IAAI;AAAA,EAAA,CAChB;AAEG,MAAA;AACJ,MAAI,QAAQ,MAAM;AAEV,UAAA,gBAAgB,QAAQ,MAAM;AAC7B,WAAA,MAAM,cAAc,KAAK;AAAA,EAAA;AAG3B,SAAA;AAAA,IACL,QAAQ,QAAQ;AAAA,IAChB,KAAK,QAAQ;AAAA,IACb;AAAA,IACA;AAAA,EACF;AACF;AAYA,SAAS,iBAAiB,SAAwB,SAAyB;AACzE,QAAM,QAAkB,CAAC;AAGzB,QAAM,KAAK,gCAAgC;AAC3C,QAAM,KAAK,mCAAmC;AAC9C,QAAM,KAAK,EAAE;AAGb,QAAM,UAAU,QAAQ,IAAI,WAAW,MAAM,IACzC,QAAQ,MACR,GAAG,OAAO,GAAG,QAAQ,GAAG;AAG5B,QAAM,KAAK,GAAG,QAAQ,MAAM,IAAI,OAAO,WAAW;AAGlD,MAAI,QAAQ,MAAM;AAEhB,QAAI,QAAQ,SAAS;AACR,iBAAA,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,QAAQ,OAAO,GAAG;AACtD,YAAA,IAAI,YAAY,MAAM,iBAAiB;AACzC,gBAAM,KAAK,GAAG,GAAG,KAAK,KAAK,EAAE;AAAA,QAAA;AAAA,MAC/B;AAAA,IACF;AAIF,UAAM,iBACJ,QAAQ,WACR,OAAO,KAAK,QAAQ,OAAO,EAAE;AAAA,MAC3B,CAAC,MAAM,EAAE,kBAAkB;AAAA,IAC7B;AAEF,QAAI,CAAC,gBAAgB;AACnB,YAAM,KAAK,gCAAgC;AAAA,IAAA;AAI7C,UAAM,mBACJ,QAAQ,WACR,OAAO,KAAK,QAAQ,OAAO,EAAE;AAAA,MAC3B,CAAC,MAAM,EAAE,kBAAkB;AAAA,IAC7B;AAEF,QAAI,CAAC,kBAAkB;AACrB,YAAM,KAAK,mBAAmB,QAAQ,KAAK,MAAM,EAAE;AAAA,IAAA;AAGrD,UAAM,KAAK,EAAE;AACP,UAAA,KAAK,QAAQ,IAAI;AAAA,EAAA,OAElB;AAEL,UAAM,KAAK,EAAE;AACb,UAAM,KAAK,EAAE;AAAA,EAAA;AAGR,SAAA,MAAM,KAAK,MAAM;AAC1B;AASA,SAAS,gBACP,UACA,SACA,mBACQ;AACR,QAAM,QAAkB,CAAC;AAEnB,QAAA,KAAK,2CAA2C,iBAAiB,EAAE;AACzE,QAAM,KAAK,EAAE;AAGb,aAAW,WAAW,UAAU;AACxB,UAAA,KAAK,KAAK,iBAAiB,EAAE;AACnC,UAAM,KAAK,iBAAiB,SAAS,OAAO,CAAC;AAAA,EAAA;AAIzC,QAAA,KAAK,KAAK,iBAAiB,IAAI;AAE9B,SAAA,MAAM,KAAK,MAAM;AAC1B;AAqEsB,eAAA,6BACpB,UACA,SACA,eAC6C;AACvC,QAAA,WAAW,iBAAiB,iBAAiB,QAAQ;AAC3D,QAAM,QAAkB,CAAC;AAEzB,aAAW,QAAQ,UAAU;AACvB,QAAA,MAAM,QAAQ,IAAI,GAAG;AAEjB,YAAA,oBAAoB,iBAAiB,YAAY;AACvD,YAAM,mBAAoC,CAAC;AAE3C,iBAAW,WAAW,MAAM;AAC1B,yBAAiB,KAAK,MAAM,gBAAgB,OAAO,CAAC;AAAA,MAAA;AAGhD,YAAA,KAAK,KAAK,QAAQ,EAAE;AAC1B,YAAM,KAAK,gBAAgB,kBAAkB,SAAS,iBAAiB,CAAC;AAAA,IAAA,OACnE;AAEC,YAAA,SAAS,MAAM,gBAAgB,IAAI;AAErC,UAAA,OAAO,WAAW,OAAO;AAErB,cAAA,KAAK,KAAK,QAAQ,EAAE;AAC1B,cAAM,KAAK,iBAAiB,QAAQ,OAAO,CAAC;AAAA,MAAA,OACvC;AAEC,cAAA,oBAAoB,iBAAiB,YAAY;AACjD,cAAA,KAAK,KAAK,QAAQ,EAAE;AAC1B,cAAM,KAAK,gBAAgB,CAAC,MAAM,GAAG,SAAS,iBAAiB,CAAC;AAAA,MAAA;AAAA,IAClE;AAAA,EACF;AAII,QAAA,KAAK,KAAK,QAAQ,IAAI;AAErB,SAAA;AAAA,IACL,MAAM,MAAM,KAAK,MAAM;AAAA,IACvB;AAAA,EACF;AACF;AAOO,SAAS,gBAAgB,aAAoC;AAC5D,QAAA,QAAQ,YAAY,MAAM,kBAAkB;AAC3C,SAAA,SAAS,MAAM,CAAC,IAAI,MAAM,CAAC,EAAE,SAAS;AAC/C;AAOA,SAAS,gBAAgB,MAGvB;AAxQc;AAyQR,QAAA,QAAQ,KAAK,MAAM,6BAA6B;AACtD,MAAI,CAAC,SAAS,CAAC,MAAM,CAAC,GAAG;AACvB,WAAO,EAAE,QAAQ,GAAG,YAAY,GAAG;AAAA,EAAA;AAE9B,SAAA;AAAA,IACL,QAAQ,SAAS,MAAM,CAAC,GAAG,EAAE;AAAA,IAC7B,cAAY,WAAM,CAAC,MAAP,mBAAU,WAAU;AAAA,EAClC;AACF;AAOA,SAAS,aAAa,OAAyC;AAC7D,QAAM,UAAkC,CAAC;AACzC,aAAW,QAAQ,OAAO;AAClB,UAAA,aAAa,KAAK,QAAQ,GAAG;AACnC,QAAI,aAAa,GAAG;AAClB,YAAM,MAAM,KAAK,UAAU,GAAG,UAAU,EAAE,KAAK;AAC/C,YAAM,QAAQ,KAAK,UAAU,aAAa,CAAC,EAAE,KAAK;AAC1C,cAAA,IAAI,YAAa,CAAA,IAAI;AAAA,IAAA;AAAA,EAC/B;AAEK,SAAA;AACT;AAOA,SAAS,kBAAkB,MAAmC;AACtD,QAAA,QAAQ,KAAK,MAAM,MAAM;AAG/B,MAAI,kBAAkB;AACtB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AAC/B,UAAA,OAAO,MAAM,CAAC;AACpB,QAAI,QAAQ,KAAK,WAAW,OAAO,GAAG;AAClB,wBAAA;AAClB;AAAA,IAAA;AAAA,EACF;AAGF,MAAI,oBAAoB,IAAI;AACnB,WAAA;AAAA,MACL,QAAQ;AAAA,MACR,YAAY;AAAA,MACZ,SAAS,CAAC;AAAA,MACV,MAAM;AAAA,IACR;AAAA,EAAA;AAGI,QAAA,aAAa,MAAM,eAAe;AACxC,MAAI,CAAC,YAAY;AACR,WAAA;AAAA,MACL,QAAQ;AAAA,MACR,YAAY;AAAA,MACZ,SAAS,CAAC;AAAA,MACV,MAAM;AAAA,IACR;AAAA,EAAA;AAGF,QAAM,EAAE,QAAQ,eAAe,gBAAgB,UAAU;AAGzD,QAAM,cAAwB,CAAC;AAC/B,MAAI,iBAAiB,MAAM;AAC3B,MAAI,iBAAiB;AAErB,WAAS,IAAI,kBAAkB,GAAG,IAAI,MAAM,QAAQ,KAAK;AACjD,UAAA,OAAO,MAAM,CAAC;AACpB,QAAI,SAAS,IAAI;AACf,uBAAiB,IAAI;AACJ,uBAAA;AACjB;AAAA,IAAA;AAGF,QAAI,QAAQ,KAAK,WAAW,IAAI,GAAG;AACjC;AAAA,IAAA;AAEF,QAAI,MAAM;AACR,kBAAY,KAAK,IAAI;AAAA,IAAA;AAAA,EACvB;AAGI,QAAA,UAAU,aAAa,WAAW;AAGxC,MAAI,WAAW;AACX,MAAA,kBAAkB,iBAAiB,MAAM,QAAQ;AAC7C,UAAA,YAAY,MAAM,MAAM,cAAc;AAE5C,UAAM,oBAA8B,CAAC;AACrC,eAAW,QAAQ,WAAW;AACxB,UAAA,KAAK,WAAW,IAAI,GAAG;AACzB;AAAA,MAAA;AAEF,wBAAkB,KAAK,IAAI;AAAA,IAAA;AAE7B,eAAW,kBAAkB,KAAK,MAAM,EAAE,KAAK;AAAA,EAAA;AAGjD,MAAI,OAAY;AAChB,MAAI,UAAU;AACR,QAAA;AACK,aAAA,KAAK,MAAM,QAAQ;AAAA,IAAA,QACpB;AAEC,aAAA;AAAA,IAAA;AAAA,EACT;AAGK,SAAA;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAQgB,SAAA,mBACd,cACA,aACuB;AA7YT;AA8YR,QAAA,WAAW,gBAAgB,WAAW;AAC5C,MAAI,CAAC,UAAU;AACP,UAAA,IAAI,MAAM,qDAAqD;AAAA,EAAA;AAGvE,QAAM,UAAiC,CAAC;AAGlC,QAAA,kBAAkB,KAAK,QAAQ;AAC/B,QAAA,QAAQ,aAAa,MAAM,eAAe;AAEhD,aAAW,QAAQ,OAAO;AAClB,UAAA,cAAc,KAAK,KAAK;AAG1B,QAAA,CAAC,eAAe,gBAAgB,MAAM;AACxC;AAAA,IAAA;AAIE,QAAA,YAAY,SAAS,+BAA+B,GAAG;AAEzD,YAAM,4BAA4B,YAAY;AAAA,QAC5C;AAAA,MACF;AACA,UAAI,2BAA2B;AAC7B,cAAM,qBAAoB,4EAA4B,OAA5B,mBAAgC;AACpD,cAAA,mBAAmB,KAAK,iBAAiB;AACzC,cAAA,iBAAiB,YAAY,MAAM,gBAAgB;AAEzD,mBAAW,iBAAiB,gBAAgB;AACpC,gBAAA,uBAAuB,cAAc,KAAK;AAC5C,cAAA,CAAC,wBAAwB,yBAAyB,MAAM;AAC1D;AAAA,UAAA;AAKA,cAAA,qBAAqB,WAAW,+BAA+B,GAC/D;AACA;AAAA,UAAA;AAGI,gBAAA,WAAW,kBAAkB,oBAAoB;AACnD,cAAA,SAAS,SAAS,GAAG;AACvB,oBAAQ,KAAK,QAAQ;AAAA,UAAA;AAAA,QACvB;AAAA,MACF;AAAA,IACF,OACK;AAEC,YAAA,WAAW,kBAAkB,WAAW;AAC1C,UAAA,SAAS,SAAS,GAAG;AACvB,gBAAQ,KAAK,QAAQ;AAAA,MAAA;AAAA,IACvB;AAAA,EACF;AAGK,SAAA;AACT;"}
|