@proofkit/fmodata 0.1.0-alpha.6 → 0.1.0-alpha.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +333 -3
- package/dist/esm/client/batch-builder.d.ts +54 -0
- package/dist/esm/client/batch-builder.js +179 -0
- package/dist/esm/client/batch-builder.js.map +1 -0
- package/dist/esm/client/batch-request.d.ts +61 -0
- package/dist/esm/client/batch-request.js +252 -0
- package/dist/esm/client/batch-request.js.map +1 -0
- package/dist/esm/client/database.d.ts +43 -11
- package/dist/esm/client/database.js +64 -10
- package/dist/esm/client/database.js.map +1 -1
- package/dist/esm/client/delete-builder.d.ts +21 -2
- package/dist/esm/client/delete-builder.js +76 -9
- package/dist/esm/client/delete-builder.js.map +1 -1
- package/dist/esm/client/entity-set.d.ts +15 -4
- package/dist/esm/client/entity-set.js +23 -7
- package/dist/esm/client/entity-set.js.map +1 -1
- package/dist/esm/client/filemaker-odata.d.ts +11 -5
- package/dist/esm/client/filemaker-odata.js +46 -14
- package/dist/esm/client/filemaker-odata.js.map +1 -1
- package/dist/esm/client/insert-builder.d.ts +38 -3
- package/dist/esm/client/insert-builder.js +195 -9
- package/dist/esm/client/insert-builder.js.map +1 -1
- package/dist/esm/client/query-builder.d.ts +19 -3
- package/dist/esm/client/query-builder.js +193 -17
- package/dist/esm/client/query-builder.js.map +1 -1
- package/dist/esm/client/record-builder.d.ts +17 -2
- package/dist/esm/client/record-builder.js +87 -5
- package/dist/esm/client/record-builder.js.map +1 -1
- package/dist/esm/client/response-processor.d.ts +38 -0
- package/dist/esm/client/schema-manager.d.ts +57 -0
- package/dist/esm/client/schema-manager.js +132 -0
- package/dist/esm/client/schema-manager.js.map +1 -0
- package/dist/esm/client/update-builder.d.ts +34 -11
- package/dist/esm/client/update-builder.js +119 -19
- package/dist/esm/client/update-builder.js.map +1 -1
- package/dist/esm/errors.d.ts +14 -1
- package/dist/esm/errors.js +26 -0
- package/dist/esm/errors.js.map +1 -1
- package/dist/esm/index.d.ts +3 -2
- package/dist/esm/index.js +3 -1
- package/dist/esm/transform.d.ts +9 -0
- package/dist/esm/transform.js +7 -0
- package/dist/esm/transform.js.map +1 -1
- package/dist/esm/types.d.ts +69 -1
- package/package.json +1 -1
- package/src/client/batch-builder.ts +265 -0
- package/src/client/batch-request.ts +485 -0
- package/src/client/database.ts +106 -52
- package/src/client/delete-builder.ts +116 -14
- package/src/client/entity-set.ts +80 -6
- package/src/client/filemaker-odata.ts +65 -19
- package/src/client/insert-builder.ts +296 -18
- package/src/client/query-builder.ts +278 -17
- package/src/client/record-builder.ts +119 -11
- package/src/client/response-processor.ts +103 -0
- package/src/client/schema-manager.ts +246 -0
- package/src/client/update-builder.ts +195 -37
- package/src/errors.ts +33 -1
- package/src/index.ts +13 -0
- package/src/transform.ts +19 -6
- package/src/types.ts +89 -1
|
@@ -0,0 +1,252 @@
|
|
|
1
|
+
function generateBoundary(prefix = "batch_") {
|
|
2
|
+
const randomHex = Array.from(
|
|
3
|
+
{ length: 32 },
|
|
4
|
+
() => Math.floor(Math.random() * 16).toString(16)
|
|
5
|
+
).join("");
|
|
6
|
+
return `${prefix}${randomHex}`;
|
|
7
|
+
}
|
|
8
|
+
async function requestToConfig(request) {
|
|
9
|
+
const headers = {};
|
|
10
|
+
request.headers.forEach((value, key) => {
|
|
11
|
+
headers[key] = value;
|
|
12
|
+
});
|
|
13
|
+
let body;
|
|
14
|
+
if (request.body) {
|
|
15
|
+
const clonedRequest = request.clone();
|
|
16
|
+
body = await clonedRequest.text();
|
|
17
|
+
}
|
|
18
|
+
return {
|
|
19
|
+
method: request.method,
|
|
20
|
+
url: request.url,
|
|
21
|
+
body,
|
|
22
|
+
headers
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
function formatSubRequest(request, baseUrl) {
|
|
26
|
+
const lines = [];
|
|
27
|
+
lines.push("Content-Type: application/http");
|
|
28
|
+
lines.push("Content-Transfer-Encoding: binary");
|
|
29
|
+
lines.push("");
|
|
30
|
+
const fullUrl = request.url.startsWith("http") ? request.url : `${baseUrl}${request.url}`;
|
|
31
|
+
lines.push(`${request.method} ${fullUrl} HTTP/1.1`);
|
|
32
|
+
if (request.body) {
|
|
33
|
+
if (request.headers) {
|
|
34
|
+
for (const [key, value] of Object.entries(request.headers)) {
|
|
35
|
+
if (key.toLowerCase() !== "authorization") {
|
|
36
|
+
lines.push(`${key}: ${value}`);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
const hasContentType = request.headers && Object.keys(request.headers).some(
|
|
41
|
+
(k) => k.toLowerCase() === "content-type"
|
|
42
|
+
);
|
|
43
|
+
if (!hasContentType) {
|
|
44
|
+
lines.push("Content-Type: application/json");
|
|
45
|
+
}
|
|
46
|
+
const hasContentLength = request.headers && Object.keys(request.headers).some(
|
|
47
|
+
(k) => k.toLowerCase() === "content-length"
|
|
48
|
+
);
|
|
49
|
+
if (!hasContentLength) {
|
|
50
|
+
lines.push(`Content-Length: ${request.body.length}`);
|
|
51
|
+
}
|
|
52
|
+
lines.push("");
|
|
53
|
+
lines.push(request.body);
|
|
54
|
+
} else {
|
|
55
|
+
lines.push("");
|
|
56
|
+
lines.push("");
|
|
57
|
+
}
|
|
58
|
+
return lines.join("\r\n");
|
|
59
|
+
}
|
|
60
|
+
function formatChangeset(requests, baseUrl, changesetBoundary) {
|
|
61
|
+
const lines = [];
|
|
62
|
+
lines.push(`Content-Type: multipart/mixed; boundary=${changesetBoundary}`);
|
|
63
|
+
lines.push("");
|
|
64
|
+
for (const request of requests) {
|
|
65
|
+
lines.push(`--${changesetBoundary}`);
|
|
66
|
+
lines.push(formatSubRequest(request, baseUrl));
|
|
67
|
+
}
|
|
68
|
+
lines.push(`--${changesetBoundary}--`);
|
|
69
|
+
return lines.join("\r\n");
|
|
70
|
+
}
|
|
71
|
+
async function formatBatchRequestFromNative(requests, baseUrl, batchBoundary) {
|
|
72
|
+
const boundary = batchBoundary || generateBoundary("batch_");
|
|
73
|
+
const lines = [];
|
|
74
|
+
for (const item of requests) {
|
|
75
|
+
if (Array.isArray(item)) {
|
|
76
|
+
const changesetBoundary = generateBoundary("changeset_");
|
|
77
|
+
const changesetConfigs = [];
|
|
78
|
+
for (const request of item) {
|
|
79
|
+
changesetConfigs.push(await requestToConfig(request));
|
|
80
|
+
}
|
|
81
|
+
lines.push(`--${boundary}`);
|
|
82
|
+
lines.push(formatChangeset(changesetConfigs, baseUrl, changesetBoundary));
|
|
83
|
+
} else {
|
|
84
|
+
const config = await requestToConfig(item);
|
|
85
|
+
if (config.method === "GET") {
|
|
86
|
+
lines.push(`--${boundary}`);
|
|
87
|
+
lines.push(formatSubRequest(config, baseUrl));
|
|
88
|
+
} else {
|
|
89
|
+
const changesetBoundary = generateBoundary("changeset_");
|
|
90
|
+
lines.push(`--${boundary}`);
|
|
91
|
+
lines.push(formatChangeset([config], baseUrl, changesetBoundary));
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
lines.push(`--${boundary}--`);
|
|
96
|
+
return {
|
|
97
|
+
body: lines.join("\r\n"),
|
|
98
|
+
boundary
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
function extractBoundary(contentType) {
|
|
102
|
+
const match = contentType.match(/boundary=([^;]+)/);
|
|
103
|
+
return match && match[1] ? match[1].trim() : null;
|
|
104
|
+
}
|
|
105
|
+
function parseStatusLine(line) {
|
|
106
|
+
var _a;
|
|
107
|
+
const match = line.match(/HTTP\/\d\.\d\s+(\d+)\s*(.*)/);
|
|
108
|
+
if (!match || !match[1]) {
|
|
109
|
+
return { status: 0, statusText: "" };
|
|
110
|
+
}
|
|
111
|
+
return {
|
|
112
|
+
status: parseInt(match[1], 10),
|
|
113
|
+
statusText: ((_a = match[2]) == null ? void 0 : _a.trim()) || ""
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
function parseHeaders(lines) {
|
|
117
|
+
const headers = {};
|
|
118
|
+
for (const line of lines) {
|
|
119
|
+
const colonIndex = line.indexOf(":");
|
|
120
|
+
if (colonIndex > 0) {
|
|
121
|
+
const key = line.substring(0, colonIndex).trim();
|
|
122
|
+
const value = line.substring(colonIndex + 1).trim();
|
|
123
|
+
headers[key.toLowerCase()] = value;
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
return headers;
|
|
127
|
+
}
|
|
128
|
+
function parseHttpResponse(part) {
|
|
129
|
+
const lines = part.split(/\r\n/);
|
|
130
|
+
let statusLineIndex = -1;
|
|
131
|
+
for (let i = 0; i < lines.length; i++) {
|
|
132
|
+
const line = lines[i];
|
|
133
|
+
if (line && line.startsWith("HTTP/")) {
|
|
134
|
+
statusLineIndex = i;
|
|
135
|
+
break;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
if (statusLineIndex === -1) {
|
|
139
|
+
return {
|
|
140
|
+
status: 0,
|
|
141
|
+
statusText: "Invalid response",
|
|
142
|
+
headers: {},
|
|
143
|
+
body: null
|
|
144
|
+
};
|
|
145
|
+
}
|
|
146
|
+
const statusLine = lines[statusLineIndex];
|
|
147
|
+
if (!statusLine) {
|
|
148
|
+
return {
|
|
149
|
+
status: 0,
|
|
150
|
+
statusText: "Invalid response",
|
|
151
|
+
headers: {},
|
|
152
|
+
body: null
|
|
153
|
+
};
|
|
154
|
+
}
|
|
155
|
+
const { status, statusText } = parseStatusLine(statusLine);
|
|
156
|
+
const headerLines = [];
|
|
157
|
+
let bodyStartIndex = lines.length;
|
|
158
|
+
let foundEmptyLine = false;
|
|
159
|
+
for (let i = statusLineIndex + 1; i < lines.length; i++) {
|
|
160
|
+
const line = lines[i];
|
|
161
|
+
if (line === "") {
|
|
162
|
+
bodyStartIndex = i + 1;
|
|
163
|
+
foundEmptyLine = true;
|
|
164
|
+
break;
|
|
165
|
+
}
|
|
166
|
+
if (line && line.startsWith("--")) {
|
|
167
|
+
break;
|
|
168
|
+
}
|
|
169
|
+
if (line) {
|
|
170
|
+
headerLines.push(line);
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
const headers = parseHeaders(headerLines);
|
|
174
|
+
let bodyText = "";
|
|
175
|
+
if (foundEmptyLine && bodyStartIndex < lines.length) {
|
|
176
|
+
const bodyLines = lines.slice(bodyStartIndex);
|
|
177
|
+
const bodyLinesFiltered = [];
|
|
178
|
+
for (const line of bodyLines) {
|
|
179
|
+
if (line.startsWith("--")) {
|
|
180
|
+
break;
|
|
181
|
+
}
|
|
182
|
+
bodyLinesFiltered.push(line);
|
|
183
|
+
}
|
|
184
|
+
bodyText = bodyLinesFiltered.join("\r\n").trim();
|
|
185
|
+
}
|
|
186
|
+
let body = null;
|
|
187
|
+
if (bodyText) {
|
|
188
|
+
try {
|
|
189
|
+
body = JSON.parse(bodyText);
|
|
190
|
+
} catch {
|
|
191
|
+
body = bodyText;
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
return {
|
|
195
|
+
status,
|
|
196
|
+
statusText,
|
|
197
|
+
headers,
|
|
198
|
+
body
|
|
199
|
+
};
|
|
200
|
+
}
|
|
201
|
+
function parseBatchResponse(responseText, contentType) {
|
|
202
|
+
var _a;
|
|
203
|
+
const boundary = extractBoundary(contentType);
|
|
204
|
+
if (!boundary) {
|
|
205
|
+
throw new Error("Could not extract boundary from Content-Type header");
|
|
206
|
+
}
|
|
207
|
+
const results = [];
|
|
208
|
+
const boundaryPattern = `--${boundary}`;
|
|
209
|
+
const parts = responseText.split(boundaryPattern);
|
|
210
|
+
for (const part of parts) {
|
|
211
|
+
const trimmedPart = part.trim();
|
|
212
|
+
if (!trimmedPart || trimmedPart === "--") {
|
|
213
|
+
continue;
|
|
214
|
+
}
|
|
215
|
+
if (trimmedPart.includes("Content-Type: multipart/mixed")) {
|
|
216
|
+
const changesetContentTypeMatch = trimmedPart.match(
|
|
217
|
+
/Content-Type: multipart\/mixed;\s*boundary=([^\r\n]+)/
|
|
218
|
+
);
|
|
219
|
+
if (changesetContentTypeMatch) {
|
|
220
|
+
const changesetBoundary = (_a = changesetContentTypeMatch == null ? void 0 : changesetContentTypeMatch[1]) == null ? void 0 : _a.trim();
|
|
221
|
+
const changesetPattern = `--${changesetBoundary}`;
|
|
222
|
+
const changesetParts = trimmedPart.split(changesetPattern);
|
|
223
|
+
for (const changesetPart of changesetParts) {
|
|
224
|
+
const trimmedChangesetPart = changesetPart.trim();
|
|
225
|
+
if (!trimmedChangesetPart || trimmedChangesetPart === "--") {
|
|
226
|
+
continue;
|
|
227
|
+
}
|
|
228
|
+
if (trimmedChangesetPart.startsWith("Content-Type: multipart/mixed")) {
|
|
229
|
+
continue;
|
|
230
|
+
}
|
|
231
|
+
const response = parseHttpResponse(trimmedChangesetPart);
|
|
232
|
+
if (response.status > 0) {
|
|
233
|
+
results.push(response);
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
} else {
|
|
238
|
+
const response = parseHttpResponse(trimmedPart);
|
|
239
|
+
if (response.status > 0) {
|
|
240
|
+
results.push(response);
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
return results;
|
|
245
|
+
}
|
|
246
|
+
export {
|
|
247
|
+
extractBoundary,
|
|
248
|
+
formatBatchRequestFromNative,
|
|
249
|
+
generateBoundary,
|
|
250
|
+
parseBatchResponse
|
|
251
|
+
};
|
|
252
|
+
//# sourceMappingURL=batch-request.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"batch-request.js","sources":["../../../src/client/batch-request.ts"],"sourcesContent":["/**\n * Batch Request Utilities\n *\n * Utilities for formatting and parsing OData batch requests using multipart/mixed format.\n * OData batch requests allow bundling multiple operations into a single HTTP request,\n * with support for transactional changesets.\n */\n\nexport interface RequestConfig {\n method: string;\n url: string;\n body?: string;\n headers?: Record<string, string>;\n}\n\nexport interface ParsedBatchResponse {\n status: number;\n statusText: string;\n headers: Record<string, string>;\n body: any;\n}\n\n/**\n * Generates a random boundary string for multipart requests\n * @param prefix - Prefix for the boundary (e.g., \"batch_\" or \"changeset_\")\n * @returns A boundary string with the prefix and 32 random hex characters\n */\nexport function generateBoundary(prefix: string = \"batch_\"): string {\n const randomHex = Array.from({ length: 32 }, () =>\n Math.floor(Math.random() * 16).toString(16),\n ).join(\"\");\n return `${prefix}${randomHex}`;\n}\n\n/**\n * Converts a native Request object to RequestConfig\n * @param request - Native Request object\n * @returns RequestConfig object\n */\nasync function requestToConfig(request: Request): Promise<RequestConfig> {\n const headers: Record<string, string> = {};\n request.headers.forEach((value, key) => {\n headers[key] = value;\n });\n\n let body: string | undefined;\n if (request.body) {\n // Clone the request to read the body without consuming it\n const clonedRequest = request.clone();\n body = await clonedRequest.text();\n }\n\n return {\n method: request.method,\n url: request.url,\n body,\n headers,\n };\n}\n\n/**\n * Formats a single HTTP request for inclusion in a batch\n * @param request - The request configuration\n * @param baseUrl - The base URL to prepend to relative URLs\n * @returns Formatted request string with CRLF line endings\n *\n * Formatting rules for FileMaker OData:\n * - GET (no body): request line → blank → blank\n * - POST/PATCH (with body): request line → headers → blank → body (NO blank after!)\n */\nfunction formatSubRequest(request: RequestConfig, baseUrl: string): string {\n const lines: string[] = [];\n\n // Add required headers for sub-request\n lines.push(\"Content-Type: application/http\");\n lines.push(\"Content-Transfer-Encoding: binary\");\n lines.push(\"\"); // Empty line after multipart headers\n\n // Construct full URL (convert relative to absolute)\n const fullUrl = request.url.startsWith(\"http\")\n ? request.url\n : `${baseUrl}${request.url}`;\n\n // Add HTTP request line\n lines.push(`${request.method} ${fullUrl} HTTP/1.1`);\n\n // For requests with body, add headers\n if (request.body) {\n // Add request headers (excluding Authorization - it's in the outer request)\n if (request.headers) {\n for (const [key, value] of Object.entries(request.headers)) {\n if (key.toLowerCase() !== \"authorization\") {\n lines.push(`${key}: ${value}`);\n }\n }\n }\n\n // Check if Content-Type is already set\n const hasContentType =\n request.headers &&\n Object.keys(request.headers).some(\n (k) => k.toLowerCase() === \"content-type\",\n );\n\n if (!hasContentType) {\n lines.push(\"Content-Type: application/json\");\n }\n\n // Add Content-Length (required for FileMaker to read the body)\n const hasContentLength =\n request.headers &&\n Object.keys(request.headers).some(\n (k) => k.toLowerCase() === \"content-length\",\n );\n\n if (!hasContentLength) {\n lines.push(`Content-Length: ${request.body.length}`);\n }\n\n lines.push(\"\"); // Empty line between headers and body\n lines.push(request.body);\n // NO blank line after body - the boundary comes immediately\n } else {\n // For GET requests (no body), add TWO blank lines\n lines.push(\"\"); // First blank\n lines.push(\"\"); // Second blank\n }\n\n return lines.join(\"\\r\\n\");\n}\n\n/**\n * Formats a changeset containing multiple non-GET operations\n * @param requests - Array of request configurations (should be non-GET)\n * @param baseUrl - The base URL to prepend to relative URLs\n * @param changesetBoundary - Boundary string for the changeset\n * @returns Formatted changeset string with CRLF line endings\n */\nfunction formatChangeset(\n requests: RequestConfig[],\n baseUrl: string,\n changesetBoundary: string,\n): string {\n const lines: string[] = [];\n\n lines.push(`Content-Type: multipart/mixed; boundary=${changesetBoundary}`);\n lines.push(\"\"); // Empty line after headers\n\n // Add each request in the changeset\n for (const request of requests) {\n lines.push(`--${changesetBoundary}`);\n lines.push(formatSubRequest(request, baseUrl));\n }\n\n // Close the changeset\n lines.push(`--${changesetBoundary}--`);\n\n return lines.join(\"\\r\\n\");\n}\n\n/**\n * Formats multiple requests into a batch request body\n * @param requests - Array of request configurations\n * @param baseUrl - The base URL to prepend to relative URLs\n * @param batchBoundary - Optional boundary string for the batch (generated if not provided)\n * @returns Object containing the formatted body and boundary\n */\nexport function formatBatchRequest(\n requests: RequestConfig[],\n baseUrl: string,\n batchBoundary?: string,\n): { body: string; boundary: string } {\n const boundary = batchBoundary || generateBoundary(\"batch_\");\n const lines: string[] = [];\n\n // Group requests: consecutive non-GET operations go into changesets\n let currentChangeset: RequestConfig[] | null = null;\n\n for (const request of requests) {\n if (request.method === \"GET\") {\n // GET operations break changesets and are added individually\n if (currentChangeset) {\n // Close and add the current changeset\n const changesetBoundary = generateBoundary(\"changeset_\");\n lines.push(`--${boundary}`);\n lines.push(\n formatChangeset(currentChangeset, baseUrl, changesetBoundary),\n );\n currentChangeset = null;\n }\n\n // Add GET request\n lines.push(`--${boundary}`);\n lines.push(formatSubRequest(request, baseUrl));\n } else {\n // Non-GET operations: add to current changeset or create new one\n if (!currentChangeset) {\n currentChangeset = [];\n }\n currentChangeset.push(request);\n }\n }\n\n // Add any remaining changeset\n if (currentChangeset) {\n const changesetBoundary = generateBoundary(\"changeset_\");\n lines.push(`--${boundary}`);\n lines.push(formatChangeset(currentChangeset, baseUrl, changesetBoundary));\n }\n\n // Close the batch\n lines.push(`--${boundary}--`);\n\n return {\n body: lines.join(\"\\r\\n\"),\n boundary,\n };\n}\n\n/**\n * Formats multiple Request objects into a batch request body\n * Supports explicit changesets via Request arrays\n * @param requests - Array of Request objects or Request arrays (for explicit changesets)\n * @param baseUrl - The base URL to prepend to relative URLs\n * @param batchBoundary - Optional boundary string for the batch (generated if not provided)\n * @returns Promise resolving to object containing the formatted body and boundary\n */\nexport async function formatBatchRequestFromNative(\n requests: Array<Request | Request[]>,\n baseUrl: string,\n batchBoundary?: string,\n): Promise<{ body: string; boundary: string }> {\n const boundary = batchBoundary || generateBoundary(\"batch_\");\n const lines: string[] = [];\n\n for (const item of requests) {\n if (Array.isArray(item)) {\n // Explicit changeset - array of Requests\n const changesetBoundary = generateBoundary(\"changeset_\");\n const changesetConfigs: RequestConfig[] = [];\n\n for (const request of item) {\n changesetConfigs.push(await requestToConfig(request));\n }\n\n lines.push(`--${boundary}`);\n lines.push(formatChangeset(changesetConfigs, baseUrl, changesetBoundary));\n } else {\n // Single request\n const config = await requestToConfig(item);\n\n if (config.method === \"GET\") {\n // GET requests are always individual\n lines.push(`--${boundary}`);\n lines.push(formatSubRequest(config, baseUrl));\n } else {\n // Non-GET operations wrapped in a changeset\n const changesetBoundary = generateBoundary(\"changeset_\");\n lines.push(`--${boundary}`);\n lines.push(formatChangeset([config], baseUrl, changesetBoundary));\n }\n }\n }\n\n // Close the batch\n lines.push(`--${boundary}--`);\n\n return {\n body: lines.join(\"\\r\\n\"),\n boundary,\n };\n}\n\n/**\n * Extracts the boundary from a Content-Type header\n * @param contentType - The Content-Type header value\n * @returns The boundary string, or null if not found\n */\nexport function extractBoundary(contentType: string): string | null {\n const match = contentType.match(/boundary=([^;]+)/);\n return match && match[1] ? match[1].trim() : null;\n}\n\n/**\n * Parses an HTTP response line (status line)\n * @param line - The HTTP status line (e.g., \"HTTP/1.1 200 OK\")\n * @returns Object containing status code and status text\n */\nfunction parseStatusLine(line: string): {\n status: number;\n statusText: string;\n} {\n const match = line.match(/HTTP\\/\\d\\.\\d\\s+(\\d+)\\s*(.*)/);\n if (!match || !match[1]) {\n return { status: 0, statusText: \"\" };\n }\n return {\n status: parseInt(match[1], 10),\n statusText: match[2]?.trim() || \"\",\n };\n}\n\n/**\n * Parses headers from an array of header lines\n * @param lines - Array of header lines\n * @returns Object containing parsed headers\n */\nfunction parseHeaders(lines: string[]): Record<string, string> {\n const headers: Record<string, string> = {};\n for (const line of lines) {\n const colonIndex = line.indexOf(\":\");\n if (colonIndex > 0) {\n const key = line.substring(0, colonIndex).trim();\n const value = line.substring(colonIndex + 1).trim();\n headers[key.toLowerCase()] = value;\n }\n }\n return headers;\n}\n\n/**\n * Parses a single HTTP response from a batch part\n * @param part - The raw HTTP response string\n * @returns Parsed response object\n */\nfunction parseHttpResponse(part: string): ParsedBatchResponse {\n const lines = part.split(/\\r\\n/);\n\n // Find the HTTP status line (skip multipart headers)\n let statusLineIndex = -1;\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i];\n if (line && line.startsWith(\"HTTP/\")) {\n statusLineIndex = i;\n break;\n }\n }\n\n if (statusLineIndex === -1) {\n return {\n status: 0,\n statusText: \"Invalid response\",\n headers: {},\n body: null,\n };\n }\n\n const statusLine = lines[statusLineIndex];\n if (!statusLine) {\n return {\n status: 0,\n statusText: \"Invalid response\",\n headers: {},\n body: null,\n };\n }\n\n const { status, statusText } = parseStatusLine(statusLine);\n\n // Parse headers (between status line and empty line)\n const headerLines: string[] = [];\n let bodyStartIndex = lines.length; // Default to end of lines (no body)\n let foundEmptyLine = false;\n\n for (let i = statusLineIndex + 1; i < lines.length; i++) {\n const line = lines[i];\n if (line === \"\") {\n bodyStartIndex = i + 1;\n foundEmptyLine = true;\n break;\n }\n // Stop at boundary markers (for responses without bodies like 204)\n if (line && line.startsWith(\"--\")) {\n break;\n }\n if (line) {\n headerLines.push(line);\n }\n }\n\n const headers = parseHeaders(headerLines);\n\n // Parse body (everything after the empty line, if there was one)\n let bodyText = \"\";\n if (foundEmptyLine && bodyStartIndex < lines.length) {\n const bodyLines = lines.slice(bodyStartIndex);\n // Stop at boundary markers\n const bodyLinesFiltered: string[] = [];\n for (const line of bodyLines) {\n if (line.startsWith(\"--\")) {\n break;\n }\n bodyLinesFiltered.push(line);\n }\n bodyText = bodyLinesFiltered.join(\"\\r\\n\").trim();\n }\n\n let body: any = null;\n if (bodyText) {\n try {\n body = JSON.parse(bodyText);\n } catch {\n // If not JSON, return as text\n body = bodyText;\n }\n }\n\n return {\n status,\n statusText,\n headers,\n body,\n };\n}\n\n/**\n * Parses a batch response into individual responses\n * @param responseText - The raw batch response text\n * @param contentType - The Content-Type header from the response\n * @returns Array of parsed responses in the same order as the request\n */\nexport function parseBatchResponse(\n responseText: string,\n contentType: string,\n): ParsedBatchResponse[] {\n const boundary = extractBoundary(contentType);\n if (!boundary) {\n throw new Error(\"Could not extract boundary from Content-Type header\");\n }\n\n const results: ParsedBatchResponse[] = [];\n\n // Split by boundary (handle both --boundary and --boundary--)\n const boundaryPattern = `--${boundary}`;\n const parts = responseText.split(boundaryPattern);\n\n for (const part of parts) {\n const trimmedPart = part.trim();\n\n // Skip empty parts and the closing boundary marker\n if (!trimmedPart || trimmedPart === \"--\") {\n continue;\n }\n\n // Check if this part is a changeset (nested multipart)\n if (trimmedPart.includes(\"Content-Type: multipart/mixed\")) {\n // Extract the changeset boundary\n const changesetContentTypeMatch = trimmedPart.match(\n /Content-Type: multipart\\/mixed;\\s*boundary=([^\\r\\n]+)/,\n );\n if (changesetContentTypeMatch) {\n const changesetBoundary = changesetContentTypeMatch?.[1]?.trim();\n const changesetPattern = `--${changesetBoundary}`;\n const changesetParts = trimmedPart.split(changesetPattern);\n\n for (const changesetPart of changesetParts) {\n const trimmedChangesetPart = changesetPart.trim();\n if (!trimmedChangesetPart || trimmedChangesetPart === \"--\") {\n continue;\n }\n\n // Skip the changeset header\n if (\n trimmedChangesetPart.startsWith(\"Content-Type: multipart/mixed\")\n ) {\n continue;\n }\n\n const response = parseHttpResponse(trimmedChangesetPart);\n if (response.status > 0) {\n results.push(response);\n }\n }\n }\n } else {\n // Regular response (not a changeset)\n const response = parseHttpResponse(trimmedPart);\n if (response.status > 0) {\n results.push(response);\n }\n }\n }\n\n return results;\n}\n"],"names":[],"mappings":"AA2BgB,SAAA,iBAAiB,SAAiB,UAAkB;AAClE,QAAM,YAAY,MAAM;AAAA,IAAK,EAAE,QAAQ,GAAG;AAAA,IAAG,MAC3C,KAAK,MAAM,KAAK,WAAW,EAAE,EAAE,SAAS,EAAE;AAAA,EAAA,EAC1C,KAAK,EAAE;AACF,SAAA,GAAG,MAAM,GAAG,SAAS;AAC9B;AAOA,eAAe,gBAAgB,SAA0C;AACvE,QAAM,UAAkC,CAAC;AACzC,UAAQ,QAAQ,QAAQ,CAAC,OAAO,QAAQ;AACtC,YAAQ,GAAG,IAAI;AAAA,EAAA,CAChB;AAEG,MAAA;AACJ,MAAI,QAAQ,MAAM;AAEV,UAAA,gBAAgB,QAAQ,MAAM;AAC7B,WAAA,MAAM,cAAc,KAAK;AAAA,EAAA;AAG3B,SAAA;AAAA,IACL,QAAQ,QAAQ;AAAA,IAChB,KAAK,QAAQ;AAAA,IACb;AAAA,IACA;AAAA,EACF;AACF;AAYA,SAAS,iBAAiB,SAAwB,SAAyB;AACzE,QAAM,QAAkB,CAAC;AAGzB,QAAM,KAAK,gCAAgC;AAC3C,QAAM,KAAK,mCAAmC;AAC9C,QAAM,KAAK,EAAE;AAGb,QAAM,UAAU,QAAQ,IAAI,WAAW,MAAM,IACzC,QAAQ,MACR,GAAG,OAAO,GAAG,QAAQ,GAAG;AAG5B,QAAM,KAAK,GAAG,QAAQ,MAAM,IAAI,OAAO,WAAW;AAGlD,MAAI,QAAQ,MAAM;AAEhB,QAAI,QAAQ,SAAS;AACR,iBAAA,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,QAAQ,OAAO,GAAG;AACtD,YAAA,IAAI,YAAY,MAAM,iBAAiB;AACzC,gBAAM,KAAK,GAAG,GAAG,KAAK,KAAK,EAAE;AAAA,QAAA;AAAA,MAC/B;AAAA,IACF;AAIF,UAAM,iBACJ,QAAQ,WACR,OAAO,KAAK,QAAQ,OAAO,EAAE;AAAA,MAC3B,CAAC,MAAM,EAAE,kBAAkB;AAAA,IAC7B;AAEF,QAAI,CAAC,gBAAgB;AACnB,YAAM,KAAK,gCAAgC;AAAA,IAAA;AAI7C,UAAM,mBACJ,QAAQ,WACR,OAAO,KAAK,QAAQ,OAAO,EAAE;AAAA,MAC3B,CAAC,MAAM,EAAE,kBAAkB;AAAA,IAC7B;AAEF,QAAI,CAAC,kBAAkB;AACrB,YAAM,KAAK,mBAAmB,QAAQ,KAAK,MAAM,EAAE;AAAA,IAAA;AAGrD,UAAM,KAAK,EAAE;AACP,UAAA,KAAK,QAAQ,IAAI;AAAA,EAAA,OAElB;AAEL,UAAM,KAAK,EAAE;AACb,UAAM,KAAK,EAAE;AAAA,EAAA;AAGR,SAAA,MAAM,KAAK,MAAM;AAC1B;AASA,SAAS,gBACP,UACA,SACA,mBACQ;AACR,QAAM,QAAkB,CAAC;AAEnB,QAAA,KAAK,2CAA2C,iBAAiB,EAAE;AACzE,QAAM,KAAK,EAAE;AAGb,aAAW,WAAW,UAAU;AACxB,UAAA,KAAK,KAAK,iBAAiB,EAAE;AACnC,UAAM,KAAK,iBAAiB,SAAS,OAAO,CAAC;AAAA,EAAA;AAIzC,QAAA,KAAK,KAAK,iBAAiB,IAAI;AAE9B,SAAA,MAAM,KAAK,MAAM;AAC1B;AAqEsB,eAAA,6BACpB,UACA,SACA,eAC6C;AACvC,QAAA,WAAW,iBAAiB,iBAAiB,QAAQ;AAC3D,QAAM,QAAkB,CAAC;AAEzB,aAAW,QAAQ,UAAU;AACvB,QAAA,MAAM,QAAQ,IAAI,GAAG;AAEjB,YAAA,oBAAoB,iBAAiB,YAAY;AACvD,YAAM,mBAAoC,CAAC;AAE3C,iBAAW,WAAW,MAAM;AAC1B,yBAAiB,KAAK,MAAM,gBAAgB,OAAO,CAAC;AAAA,MAAA;AAGhD,YAAA,KAAK,KAAK,QAAQ,EAAE;AAC1B,YAAM,KAAK,gBAAgB,kBAAkB,SAAS,iBAAiB,CAAC;AAAA,IAAA,OACnE;AAEC,YAAA,SAAS,MAAM,gBAAgB,IAAI;AAErC,UAAA,OAAO,WAAW,OAAO;AAErB,cAAA,KAAK,KAAK,QAAQ,EAAE;AAC1B,cAAM,KAAK,iBAAiB,QAAQ,OAAO,CAAC;AAAA,MAAA,OACvC;AAEC,cAAA,oBAAoB,iBAAiB,YAAY;AACjD,cAAA,KAAK,KAAK,QAAQ,EAAE;AAC1B,cAAM,KAAK,gBAAgB,CAAC,MAAM,GAAG,SAAS,iBAAiB,CAAC;AAAA,MAAA;AAAA,IAClE;AAAA,EACF;AAII,QAAA,KAAK,KAAK,QAAQ,IAAI;AAErB,SAAA;AAAA,IACL,MAAM,MAAM,KAAK,MAAM;AAAA,IACvB;AAAA,EACF;AACF;AAOO,SAAS,gBAAgB,aAAoC;AAC5D,QAAA,QAAQ,YAAY,MAAM,kBAAkB;AAC3C,SAAA,SAAS,MAAM,CAAC,IAAI,MAAM,CAAC,EAAE,SAAS;AAC/C;AAOA,SAAS,gBAAgB,MAGvB;AAxQc;AAyQR,QAAA,QAAQ,KAAK,MAAM,6BAA6B;AACtD,MAAI,CAAC,SAAS,CAAC,MAAM,CAAC,GAAG;AACvB,WAAO,EAAE,QAAQ,GAAG,YAAY,GAAG;AAAA,EAAA;AAE9B,SAAA;AAAA,IACL,QAAQ,SAAS,MAAM,CAAC,GAAG,EAAE;AAAA,IAC7B,cAAY,WAAM,CAAC,MAAP,mBAAU,WAAU;AAAA,EAClC;AACF;AAOA,SAAS,aAAa,OAAyC;AAC7D,QAAM,UAAkC,CAAC;AACzC,aAAW,QAAQ,OAAO;AAClB,UAAA,aAAa,KAAK,QAAQ,GAAG;AACnC,QAAI,aAAa,GAAG;AAClB,YAAM,MAAM,KAAK,UAAU,GAAG,UAAU,EAAE,KAAK;AAC/C,YAAM,QAAQ,KAAK,UAAU,aAAa,CAAC,EAAE,KAAK;AAC1C,cAAA,IAAI,YAAa,CAAA,IAAI;AAAA,IAAA;AAAA,EAC/B;AAEK,SAAA;AACT;AAOA,SAAS,kBAAkB,MAAmC;AACtD,QAAA,QAAQ,KAAK,MAAM,MAAM;AAG/B,MAAI,kBAAkB;AACtB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AAC/B,UAAA,OAAO,MAAM,CAAC;AACpB,QAAI,QAAQ,KAAK,WAAW,OAAO,GAAG;AAClB,wBAAA;AAClB;AAAA,IAAA;AAAA,EACF;AAGF,MAAI,oBAAoB,IAAI;AACnB,WAAA;AAAA,MACL,QAAQ;AAAA,MACR,YAAY;AAAA,MACZ,SAAS,CAAC;AAAA,MACV,MAAM;AAAA,IACR;AAAA,EAAA;AAGI,QAAA,aAAa,MAAM,eAAe;AACxC,MAAI,CAAC,YAAY;AACR,WAAA;AAAA,MACL,QAAQ;AAAA,MACR,YAAY;AAAA,MACZ,SAAS,CAAC;AAAA,MACV,MAAM;AAAA,IACR;AAAA,EAAA;AAGF,QAAM,EAAE,QAAQ,eAAe,gBAAgB,UAAU;AAGzD,QAAM,cAAwB,CAAC;AAC/B,MAAI,iBAAiB,MAAM;AAC3B,MAAI,iBAAiB;AAErB,WAAS,IAAI,kBAAkB,GAAG,IAAI,MAAM,QAAQ,KAAK;AACjD,UAAA,OAAO,MAAM,CAAC;AACpB,QAAI,SAAS,IAAI;AACf,uBAAiB,IAAI;AACJ,uBAAA;AACjB;AAAA,IAAA;AAGF,QAAI,QAAQ,KAAK,WAAW,IAAI,GAAG;AACjC;AAAA,IAAA;AAEF,QAAI,MAAM;AACR,kBAAY,KAAK,IAAI;AAAA,IAAA;AAAA,EACvB;AAGI,QAAA,UAAU,aAAa,WAAW;AAGxC,MAAI,WAAW;AACX,MAAA,kBAAkB,iBAAiB,MAAM,QAAQ;AAC7C,UAAA,YAAY,MAAM,MAAM,cAAc;AAE5C,UAAM,oBAA8B,CAAC;AACrC,eAAW,QAAQ,WAAW;AACxB,UAAA,KAAK,WAAW,IAAI,GAAG;AACzB;AAAA,MAAA;AAEF,wBAAkB,KAAK,IAAI;AAAA,IAAA;AAE7B,eAAW,kBAAkB,KAAK,MAAM,EAAE,KAAK;AAAA,EAAA;AAGjD,MAAI,OAAY;AAChB,MAAI,UAAU;AACR,QAAA;AACK,aAAA,KAAK,MAAM,QAAQ;AAAA,IAAA,QACpB;AAEC,aAAA;AAAA,IAAA;AAAA,EACT;AAGK,SAAA;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAQgB,SAAA,mBACd,cACA,aACuB;AA7YT;AA8YR,QAAA,WAAW,gBAAgB,WAAW;AAC5C,MAAI,CAAC,UAAU;AACP,UAAA,IAAI,MAAM,qDAAqD;AAAA,EAAA;AAGvE,QAAM,UAAiC,CAAC;AAGlC,QAAA,kBAAkB,KAAK,QAAQ;AAC/B,QAAA,QAAQ,aAAa,MAAM,eAAe;AAEhD,aAAW,QAAQ,OAAO;AAClB,UAAA,cAAc,KAAK,KAAK;AAG1B,QAAA,CAAC,eAAe,gBAAgB,MAAM;AACxC;AAAA,IAAA;AAIE,QAAA,YAAY,SAAS,+BAA+B,GAAG;AAEzD,YAAM,4BAA4B,YAAY;AAAA,QAC5C;AAAA,MACF;AACA,UAAI,2BAA2B;AAC7B,cAAM,qBAAoB,4EAA4B,OAA5B,mBAAgC;AACpD,cAAA,mBAAmB,KAAK,iBAAiB;AACzC,cAAA,iBAAiB,YAAY,MAAM,gBAAgB;AAEzD,mBAAW,iBAAiB,gBAAgB;AACpC,gBAAA,uBAAuB,cAAc,KAAK;AAC5C,cAAA,CAAC,wBAAwB,yBAAyB,MAAM;AAC1D;AAAA,UAAA;AAKA,cAAA,qBAAqB,WAAW,+BAA+B,GAC/D;AACA;AAAA,UAAA;AAGI,gBAAA,WAAW,kBAAkB,oBAAoB;AACnD,cAAA,SAAS,SAAS,GAAG;AACvB,oBAAQ,KAAK,QAAQ;AAAA,UAAA;AAAA,QACvB;AAAA,MACF;AAAA,IACF,OACK;AAEC,YAAA,WAAW,kBAAkB,WAAW;AAC1C,UAAA,SAAS,SAAS,GAAG;AACvB,gBAAQ,KAAK,QAAQ;AAAA,MAAA;AAAA,IACvB;AAAA,EACF;AAGK,SAAA;AACT;"}
|
|
@@ -1,14 +1,10 @@
|
|
|
1
1
|
import { StandardSchemaV1 } from '@standard-schema/spec';
|
|
2
|
-
import { ExecutionContext } from '../types.js';
|
|
2
|
+
import { ExecutionContext, ExecutableBuilder, Metadata } from '../types.js';
|
|
3
3
|
import { BaseTable } from './base-table.js';
|
|
4
4
|
import { TableOccurrence } from './table-occurrence.js';
|
|
5
5
|
import { EntitySet } from './entity-set.js';
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
} ? true : false;
|
|
9
|
-
type AllHaveFmtId<Occurrences extends readonly any[]> = Occurrences extends readonly [infer First, ...infer Rest] ? HasFmtId<First> extends true ? Rest extends readonly [] ? true : AllHaveFmtId<Rest> : false : true;
|
|
10
|
-
type NoneHaveFmtId<Occurrences extends readonly any[]> = Occurrences extends readonly [infer First, ...infer Rest] ? HasFmtId<First> extends false ? Rest extends readonly [] ? true : NoneHaveFmtId<Rest> : false : true;
|
|
11
|
-
export type ValidOccurrenceMix<Occurrences extends readonly any[]> = AllHaveFmtId<Occurrences> extends true ? true : NoneHaveFmtId<Occurrences> extends true ? true : false;
|
|
6
|
+
import { BatchBuilder } from './batch-builder.js';
|
|
7
|
+
import { SchemaManager } from './schema-manager.js';
|
|
12
8
|
type ExtractSchemaFromOccurrence<O> = O extends TableOccurrence<infer BT, any, any, any> ? BT extends BaseTable<infer S, any> ? S : never : never;
|
|
13
9
|
type FindOccurrenceByName<Occurrences extends readonly TableOccurrence<any, any, any, any>[], Name extends string> = Occurrences extends readonly [
|
|
14
10
|
infer First,
|
|
@@ -20,10 +16,15 @@ export declare class Database<Occurrences extends readonly TableOccurrence<any,
|
|
|
20
16
|
private readonly context;
|
|
21
17
|
private occurrenceMap;
|
|
22
18
|
private _useEntityIds;
|
|
19
|
+
readonly schema: SchemaManager;
|
|
23
20
|
constructor(databaseName: string, context: ExecutionContext, config?: {
|
|
24
|
-
occurrences?:
|
|
25
|
-
|
|
26
|
-
|
|
21
|
+
occurrences?: Occurrences | undefined;
|
|
22
|
+
/**
|
|
23
|
+
* Whether to use entity IDs instead of field names in the actual requests to the server
|
|
24
|
+
* Defaults to true if all occurrences use entity IDs, false otherwise
|
|
25
|
+
* If set to false but some occurrences do not use entity IDs, an error will be thrown
|
|
26
|
+
*/
|
|
27
|
+
useEntityIds?: boolean;
|
|
27
28
|
});
|
|
28
29
|
/**
|
|
29
30
|
* Returns true if any table occurrence in this database is using entity IDs.
|
|
@@ -35,7 +36,18 @@ export declare class Database<Occurrences extends readonly TableOccurrence<any,
|
|
|
35
36
|
*/
|
|
36
37
|
getOccurrence(name: string): TableOccurrence<any, any, any, any> | undefined;
|
|
37
38
|
from<Name extends ExtractOccurrenceNames<Occurrences> | (string & {})>(name: Name): Occurrences extends readonly [] ? EntitySet<Record<string, StandardSchemaV1>, undefined> : Name extends ExtractOccurrenceNames<Occurrences> ? EntitySet<ExtractSchemaFromOccurrence<FindOccurrenceByName<Occurrences, Name>>, FindOccurrenceByName<Occurrences, Name>> : EntitySet<Record<string, StandardSchemaV1>, undefined>;
|
|
38
|
-
|
|
39
|
+
/**
|
|
40
|
+
* Retrieves the OData metadata for this database.
|
|
41
|
+
* @param args Optional configuration object
|
|
42
|
+
* @param args.format The format to retrieve metadata in. Defaults to "json".
|
|
43
|
+
* @returns The metadata in the specified format
|
|
44
|
+
*/
|
|
45
|
+
getMetadata(args: {
|
|
46
|
+
format: "xml";
|
|
47
|
+
}): Promise<string>;
|
|
48
|
+
getMetadata(args?: {
|
|
49
|
+
format?: "json";
|
|
50
|
+
}): Promise<Metadata>;
|
|
39
51
|
/**
|
|
40
52
|
* Lists all available tables (entity sets) in this database.
|
|
41
53
|
* @returns Promise resolving to an array of table names
|
|
@@ -62,5 +74,25 @@ export declare class Database<Occurrences extends readonly TableOccurrence<any,
|
|
|
62
74
|
resultCode: number;
|
|
63
75
|
result?: string;
|
|
64
76
|
}>;
|
|
77
|
+
/**
|
|
78
|
+
* Create a batch operation builder that allows multiple queries to be executed together
|
|
79
|
+
* in a single atomic request. All operations succeed or fail together (transactional).
|
|
80
|
+
*
|
|
81
|
+
* @param builders - Array of executable query builders to batch
|
|
82
|
+
* @returns A BatchBuilder that can be executed
|
|
83
|
+
* @example
|
|
84
|
+
* ```ts
|
|
85
|
+
* const result = await db.batch([
|
|
86
|
+
* db.from('contacts').list().top(5),
|
|
87
|
+
* db.from('users').list().top(5),
|
|
88
|
+
* db.from('contacts').insert({ name: 'John' })
|
|
89
|
+
* ]).execute();
|
|
90
|
+
*
|
|
91
|
+
* if (result.data) {
|
|
92
|
+
* const [contacts, users, insertResult] = result.data;
|
|
93
|
+
* }
|
|
94
|
+
* ```
|
|
95
|
+
*/
|
|
96
|
+
batch<const Builders extends readonly ExecutableBuilder<any>[]>(builders: Builders): BatchBuilder<Builders>;
|
|
65
97
|
}
|
|
66
98
|
export {};
|
|
@@ -2,10 +2,13 @@ var __defProp = Object.defineProperty;
|
|
|
2
2
|
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
3
3
|
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
4
4
|
import { EntitySet } from "./entity-set.js";
|
|
5
|
+
import { BatchBuilder } from "./batch-builder.js";
|
|
6
|
+
import { SchemaManager } from "./schema-manager.js";
|
|
5
7
|
class Database {
|
|
6
8
|
constructor(databaseName, context, config) {
|
|
7
9
|
__publicField(this, "occurrenceMap");
|
|
8
10
|
__publicField(this, "_useEntityIds", false);
|
|
11
|
+
__publicField(this, "schema");
|
|
9
12
|
this.databaseName = databaseName;
|
|
10
13
|
this.context = context;
|
|
11
14
|
this.occurrenceMap = /* @__PURE__ */ new Map();
|
|
@@ -18,7 +21,6 @@ class Database {
|
|
|
18
21
|
const hasFieldIds = occ.baseTable.isUsingFieldIds();
|
|
19
22
|
if (hasTableId && hasFieldIds) {
|
|
20
23
|
occurrencesWithIds.push(occ.name);
|
|
21
|
-
this._useEntityIds = true;
|
|
22
24
|
} else if (!hasTableId && !hasFieldIds) {
|
|
23
25
|
occurrencesWithoutIds.push(occ.name);
|
|
24
26
|
} else {
|
|
@@ -27,15 +29,34 @@ class Database {
|
|
|
27
29
|
);
|
|
28
30
|
}
|
|
29
31
|
}
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
)
|
|
32
|
+
const allOccurrencesUseEntityIds = occurrencesWithIds.length > 0 && occurrencesWithoutIds.length === 0;
|
|
33
|
+
const hasMixedUsage = occurrencesWithIds.length > 0 && occurrencesWithoutIds.length > 0;
|
|
34
|
+
if (config.useEntityIds !== void 0) {
|
|
35
|
+
if (config.useEntityIds === false) {
|
|
36
|
+
this._useEntityIds = false;
|
|
37
|
+
} else if (config.useEntityIds === true) {
|
|
38
|
+
if (hasMixedUsage || occurrencesWithoutIds.length > 0) {
|
|
39
|
+
throw new Error(
|
|
40
|
+
`useEntityIds is set to true but some occurrences do not use entity IDs. Occurrences without entity IDs: [${occurrencesWithoutIds.join(", ")}]. Either set useEntityIds to false or configure all occurrences with entity IDs.`
|
|
41
|
+
);
|
|
42
|
+
}
|
|
43
|
+
this._useEntityIds = true;
|
|
44
|
+
}
|
|
45
|
+
} else {
|
|
46
|
+
if (hasMixedUsage) {
|
|
47
|
+
throw new Error(
|
|
48
|
+
`Cannot mix TableOccurrence instances with and without entity IDs in the same database. Occurrences with entity IDs: [${occurrencesWithIds.join(", ")}]. Occurrences without entity IDs: [${occurrencesWithoutIds.join(", ")}]. Either all table occurrences must use entity IDs (fmtId + fmfIds), none should, or explicitly set useEntityIds to false.`
|
|
49
|
+
);
|
|
50
|
+
}
|
|
51
|
+
this._useEntityIds = allOccurrencesUseEntityIds;
|
|
34
52
|
}
|
|
53
|
+
} else {
|
|
54
|
+
this._useEntityIds = (config == null ? void 0 : config.useEntityIds) ?? false;
|
|
35
55
|
}
|
|
36
56
|
if (this.context._setUseEntityIds) {
|
|
37
57
|
this.context._setUseEntityIds(this._useEntityIds);
|
|
38
58
|
}
|
|
59
|
+
this.schema = new SchemaManager(this.databaseName, this.context);
|
|
39
60
|
}
|
|
40
61
|
/**
|
|
41
62
|
* Returns true if any table occurrence in this database is using entity IDs.
|
|
@@ -69,14 +90,25 @@ class Database {
|
|
|
69
90
|
});
|
|
70
91
|
}
|
|
71
92
|
}
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
93
|
+
async getMetadata(args) {
|
|
94
|
+
const result = await this.context._makeRequest(`/${this.databaseName}/$metadata`, {
|
|
95
|
+
headers: {
|
|
96
|
+
Accept: (args == null ? void 0 : args.format) === "xml" ? "application/xml" : "application/json"
|
|
97
|
+
}
|
|
98
|
+
});
|
|
77
99
|
if (result.error) {
|
|
78
100
|
throw result.error;
|
|
79
101
|
}
|
|
102
|
+
if ((args == null ? void 0 : args.format) === "json") {
|
|
103
|
+
const data = result.data;
|
|
104
|
+
const metadata = data[this.databaseName];
|
|
105
|
+
if (!metadata) {
|
|
106
|
+
throw new Error(
|
|
107
|
+
`Metadata for database "${this.databaseName}" not found in response`
|
|
108
|
+
);
|
|
109
|
+
}
|
|
110
|
+
return metadata;
|
|
111
|
+
}
|
|
80
112
|
return result.data;
|
|
81
113
|
}
|
|
82
114
|
/**
|
|
@@ -132,6 +164,28 @@ class Database {
|
|
|
132
164
|
result: response.scriptResult.resultParameter
|
|
133
165
|
};
|
|
134
166
|
}
|
|
167
|
+
/**
|
|
168
|
+
* Create a batch operation builder that allows multiple queries to be executed together
|
|
169
|
+
* in a single atomic request. All operations succeed or fail together (transactional).
|
|
170
|
+
*
|
|
171
|
+
* @param builders - Array of executable query builders to batch
|
|
172
|
+
* @returns A BatchBuilder that can be executed
|
|
173
|
+
* @example
|
|
174
|
+
* ```ts
|
|
175
|
+
* const result = await db.batch([
|
|
176
|
+
* db.from('contacts').list().top(5),
|
|
177
|
+
* db.from('users').list().top(5),
|
|
178
|
+
* db.from('contacts').insert({ name: 'John' })
|
|
179
|
+
* ]).execute();
|
|
180
|
+
*
|
|
181
|
+
* if (result.data) {
|
|
182
|
+
* const [contacts, users, insertResult] = result.data;
|
|
183
|
+
* }
|
|
184
|
+
* ```
|
|
185
|
+
*/
|
|
186
|
+
batch(builders) {
|
|
187
|
+
return new BatchBuilder(builders, this.databaseName, this.context);
|
|
188
|
+
}
|
|
135
189
|
}
|
|
136
190
|
export {
|
|
137
191
|
Database
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"database.js","sources":["../../../src/client/database.ts"],"sourcesContent":["import type { StandardSchemaV1 } from \"@standard-schema/spec\";\nimport type { ExecutionContext } from \"../types\";\nimport type { BaseTable } from \"./base-table\";\nimport type { TableOccurrence } from \"./table-occurrence\";\nimport { EntitySet } from \"./entity-set\";\n\n// Type-level validation: Check if a TableOccurrence has fmtId (is TableOccurrenceWithIds)\ntype HasFmtId<T> = T extends { fmtId: string } ? true : false;\n\n// Check if all occurrences in a tuple have fmtId\ntype AllHaveFmtId<Occurrences extends readonly any[]> =\n Occurrences extends readonly [infer First, ...infer Rest]\n ? HasFmtId<First> extends true\n ? Rest extends readonly []\n ? true\n : AllHaveFmtId<Rest>\n : false\n : true; // empty array is valid\n\n// Check if none have fmtId\ntype NoneHaveFmtId<Occurrences extends readonly any[]> =\n Occurrences extends readonly [infer First, ...infer Rest]\n ? HasFmtId<First> extends false\n ? Rest extends readonly []\n ? true\n : NoneHaveFmtId<Rest>\n : false\n : true; // empty array is valid\n\n// Valid if all have fmtId or none have fmtId (no mixing allowed)\nexport type ValidOccurrenceMix<Occurrences extends readonly any[]> =\n AllHaveFmtId<Occurrences> extends true\n ? true\n : NoneHaveFmtId<Occurrences> extends true\n ? true\n : false;\n\n// Helper type to extract schema from a TableOccurrence\ntype ExtractSchemaFromOccurrence<O> =\n O extends TableOccurrence<infer BT, any, any, any>\n ? BT extends BaseTable<infer S, any>\n ? S\n : never\n : never;\n\n// Helper type to find an occurrence by name in the occurrences tuple\ntype FindOccurrenceByName<\n Occurrences extends readonly TableOccurrence<any, any, any, any>[],\n Name extends string,\n> = Occurrences extends readonly [\n infer First,\n ...infer Rest extends readonly TableOccurrence<any, any, any, any>[],\n]\n ? First extends TableOccurrence<any, any, any, any>\n ? First[\"name\"] extends Name\n ? First\n : FindOccurrenceByName<Rest, Name>\n : never\n : never;\n\n// Helper type to extract all occurrence names from the tuple\ntype ExtractOccurrenceNames<\n Occurrences extends readonly TableOccurrence<any, any, any, any>[],\n> = Occurrences extends readonly []\n ? string // If no occurrences, allow any string\n : Occurrences[number][\"name\"]; // Otherwise, extract union of names\n\nexport class Database<\n Occurrences extends readonly TableOccurrence<\n any,\n any,\n any,\n any\n >[] = readonly [],\n> {\n private occurrenceMap: Map<string, TableOccurrence<any, any, any, any>>;\n private _useEntityIds: boolean = false;\n\n constructor(\n private readonly databaseName: string,\n private readonly context: ExecutionContext,\n config?: {\n occurrences?: ValidOccurrenceMix<Occurrences> extends true\n ? Occurrences\n : Occurrences & {\n __type_error__: \"❌ Cannot mix TableOccurrence with and without entity IDs. Either all occurrences must use TableOccurrenceWithIds (with fmtId and fmfIds) or all must be regular TableOccurrence.\";\n };\n },\n ) {\n this.occurrenceMap = new Map();\n if (config?.occurrences) {\n // Validate consistency: either all occurrences use entity IDs or none do\n const occurrencesWithIds: string[] = [];\n const occurrencesWithoutIds: string[] = [];\n\n for (const occ of config.occurrences) {\n this.occurrenceMap.set(occ.name, occ);\n\n const hasTableId = occ.isUsingTableId();\n const hasFieldIds = occ.baseTable.isUsingFieldIds();\n\n // An occurrence uses entity IDs if it has both fmtId and fmfIds\n if (hasTableId && hasFieldIds) {\n occurrencesWithIds.push(occ.name);\n this._useEntityIds = true;\n } else if (!hasTableId && !hasFieldIds) {\n occurrencesWithoutIds.push(occ.name);\n } else {\n // Partial entity ID usage (only one of fmtId or fmfIds) - this is an error\n throw new Error(\n `TableOccurrence \"${occ.name}\" has inconsistent entity ID configuration. ` +\n `Both fmtId (${hasTableId ? \"present\" : \"missing\"}) and fmfIds (${hasFieldIds ? \"present\" : \"missing\"}) must be defined together.`,\n );\n }\n }\n\n // Check for mixed usage\n if (occurrencesWithIds.length > 0 && occurrencesWithoutIds.length > 0) {\n throw new Error(\n `Cannot mix TableOccurrence instances with and without entity IDs in the same database. ` +\n `Occurrences with entity IDs: [${occurrencesWithIds.join(\", \")}]. ` +\n `Occurrences without entity IDs: [${occurrencesWithoutIds.join(\", \")}]. ` +\n `Either all table occurrences must use entity IDs (fmtId + fmfIds) or none should.`,\n );\n }\n }\n\n // Inform the execution context whether to use entity IDs\n if (this.context._setUseEntityIds) {\n this.context._setUseEntityIds(this._useEntityIds);\n }\n }\n\n /**\n * Returns true if any table occurrence in this database is using entity IDs.\n */\n isUsingEntityIds(): boolean {\n return this._useEntityIds;\n }\n\n /**\n * Gets a table occurrence by name.\n * @internal\n */\n getOccurrence(name: string): TableOccurrence<any, any, any, any> | undefined {\n return this.occurrenceMap.get(name);\n }\n\n from<Name extends ExtractOccurrenceNames<Occurrences> | (string & {})>(\n name: Name,\n ): Occurrences extends readonly []\n ? EntitySet<Record<string, StandardSchemaV1>, undefined>\n : Name extends ExtractOccurrenceNames<Occurrences>\n ? EntitySet<\n ExtractSchemaFromOccurrence<FindOccurrenceByName<Occurrences, Name>>,\n FindOccurrenceByName<Occurrences, Name>\n >\n : EntitySet<Record<string, StandardSchemaV1>, undefined> {\n const occurrence = this.occurrenceMap.get(name as string);\n\n if (occurrence) {\n // Use EntitySet.create to preserve types better\n type OccType = FindOccurrenceByName<Occurrences, Name>;\n type SchemaType = ExtractSchemaFromOccurrence<OccType>;\n\n return EntitySet.create<SchemaType, OccType>({\n occurrence: occurrence as any,\n tableName: name as string,\n databaseName: this.databaseName,\n context: this.context,\n database: this as any,\n }) as any;\n } else {\n // Return untyped EntitySet for dynamic table access\n return new EntitySet<Record<string, StandardSchemaV1>, undefined>({\n tableName: name as string,\n databaseName: this.databaseName,\n context: this.context,\n database: this as any,\n }) as any;\n }\n }\n\n // Example method showing how to use the request method\n async getMetadata() {\n const result = await this.context._makeRequest(\n `/${this.databaseName}/$metadata`,\n );\n if (result.error) {\n throw result.error;\n }\n return result.data;\n }\n\n /**\n * Lists all available tables (entity sets) in this database.\n * @returns Promise resolving to an array of table names\n */\n async listTableNames(): Promise<string[]> {\n const result = await this.context._makeRequest<{\n value?: Array<{ name: string }>;\n }>(`/${this.databaseName}`);\n if (result.error) {\n throw result.error;\n }\n if (result.data.value && Array.isArray(result.data.value)) {\n return result.data.value.map((item) => item.name);\n }\n return [];\n }\n\n /**\n * Executes a FileMaker script.\n * @param scriptName - The name of the script to execute (must be valid according to OData rules)\n * @param options - Optional script parameter and result schema\n * @returns Promise resolving to script execution result\n */\n async runScript<ResultSchema extends StandardSchemaV1<string, any> = never>(\n scriptName: string,\n options?: {\n scriptParam?: string | number | Record<string, any>;\n resultSchema?: ResultSchema;\n },\n ): Promise<\n [ResultSchema] extends [never]\n ? { resultCode: number; result?: string }\n : ResultSchema extends StandardSchemaV1<string, infer Output>\n ? { resultCode: number; result: Output }\n : { resultCode: number; result?: string }\n > {\n const body: { scriptParameterValue?: unknown } = {};\n if (options?.scriptParam !== undefined) {\n body.scriptParameterValue = options.scriptParam;\n }\n\n const result = await this.context._makeRequest<{\n scriptResult: {\n code: number;\n resultParameter?: string;\n };\n }>(`/${this.databaseName}/Script.${scriptName}`, {\n method: \"POST\",\n body: Object.keys(body).length > 0 ? JSON.stringify(body) : undefined,\n });\n\n if (result.error) {\n throw result.error;\n }\n\n const response = result.data;\n\n // If resultSchema is provided, validate the result through it\n if (options?.resultSchema && response.scriptResult !== undefined) {\n const validationResult = options.resultSchema[\"~standard\"].validate(\n response.scriptResult.resultParameter,\n );\n // Handle both sync and async validation\n const result =\n validationResult instanceof Promise\n ? await validationResult\n : validationResult;\n\n if (result.issues) {\n throw new Error(\n `Script result validation failed: ${JSON.stringify(result.issues)}`,\n );\n }\n\n return {\n resultCode: response.scriptResult.code,\n result: result.value,\n } as any;\n }\n\n return {\n resultCode: response.scriptResult.code,\n result: response.scriptResult.resultParameter,\n } as any;\n }\n}\n"],"names":["result"],"mappings":";;;;AAmEO,MAAM,SAOX;AAAA,EAIA,YACmB,cACA,SACjB,QAOA;AAbM;AACA,yCAAyB;AAGd,SAAA,eAAA;AACA,SAAA,UAAA;AASZ,SAAA,oCAAoB,IAAI;AAC7B,QAAI,iCAAQ,aAAa;AAEvB,YAAM,qBAA+B,CAAC;AACtC,YAAM,wBAAkC,CAAC;AAE9B,iBAAA,OAAO,OAAO,aAAa;AACpC,aAAK,cAAc,IAAI,IAAI,MAAM,GAAG;AAE9B,cAAA,aAAa,IAAI,eAAe;AAChC,cAAA,cAAc,IAAI,UAAU,gBAAgB;AAGlD,YAAI,cAAc,aAAa;AACV,6BAAA,KAAK,IAAI,IAAI;AAChC,eAAK,gBAAgB;AAAA,QAAA,WACZ,CAAC,cAAc,CAAC,aAAa;AAChB,gCAAA,KAAK,IAAI,IAAI;AAAA,QAAA,OAC9B;AAEL,gBAAM,IAAI;AAAA,YACR,oBAAoB,IAAI,IAAI,2DACX,aAAa,YAAY,SAAS,iBAAiB,cAAc,YAAY,SAAS;AAAA,UACzG;AAAA,QAAA;AAAA,MACF;AAIF,UAAI,mBAAmB,SAAS,KAAK,sBAAsB,SAAS,GAAG;AACrE,cAAM,IAAI;AAAA,UACR,wHACmC,mBAAmB,KAAK,IAAI,CAAC,uCAC1B,sBAAsB,KAAK,IAAI,CAAC;AAAA,QAExE;AAAA,MAAA;AAAA,IACF;AAIE,QAAA,KAAK,QAAQ,kBAAkB;AAC5B,WAAA,QAAQ,iBAAiB,KAAK,aAAa;AAAA,IAAA;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA,EAMF,mBAA4B;AAC1B,WAAO,KAAK;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOd,cAAc,MAA+D;AACpE,WAAA,KAAK,cAAc,IAAI,IAAI;AAAA,EAAA;AAAA,EAGpC,KACE,MAQ2D;AAC3D,UAAM,aAAa,KAAK,cAAc,IAAI,IAAc;AAExD,QAAI,YAAY;AAKd,aAAO,UAAU,OAA4B;AAAA,QAC3C;AAAA,QACA,WAAW;AAAA,QACX,cAAc,KAAK;AAAA,QACnB,SAAS,KAAK;AAAA,QACd,UAAU;AAAA,MAAA,CACX;AAAA,IAAA,OACI;AAEL,aAAO,IAAI,UAAuD;AAAA,QAChE,WAAW;AAAA,QACX,cAAc,KAAK;AAAA,QACnB,SAAS,KAAK;AAAA,QACd,UAAU;AAAA,MAAA,CACX;AAAA,IAAA;AAAA,EACH;AAAA;AAAA,EAIF,MAAM,cAAc;AACZ,UAAA,SAAS,MAAM,KAAK,QAAQ;AAAA,MAChC,IAAI,KAAK,YAAY;AAAA,IACvB;AACA,QAAI,OAAO,OAAO;AAChB,YAAM,OAAO;AAAA,IAAA;AAEf,WAAO,OAAO;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOhB,MAAM,iBAAoC;AAClC,UAAA,SAAS,MAAM,KAAK,QAAQ,aAE/B,IAAI,KAAK,YAAY,EAAE;AAC1B,QAAI,OAAO,OAAO;AAChB,YAAM,OAAO;AAAA,IAAA;AAEX,QAAA,OAAO,KAAK,SAAS,MAAM,QAAQ,OAAO,KAAK,KAAK,GAAG;AACzD,aAAO,OAAO,KAAK,MAAM,IAAI,CAAC,SAAS,KAAK,IAAI;AAAA,IAAA;AAElD,WAAO,CAAC;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASV,MAAM,UACJ,YACA,SAUA;AACA,UAAM,OAA2C,CAAC;AAC9C,SAAA,mCAAS,iBAAgB,QAAW;AACtC,WAAK,uBAAuB,QAAQ;AAAA,IAAA;AAGhC,UAAA,SAAS,MAAM,KAAK,QAAQ,aAK/B,IAAI,KAAK,YAAY,WAAW,UAAU,IAAI;AAAA,MAC/C,QAAQ;AAAA,MACR,MAAM,OAAO,KAAK,IAAI,EAAE,SAAS,IAAI,KAAK,UAAU,IAAI,IAAI;AAAA,IAAA,CAC7D;AAED,QAAI,OAAO,OAAO;AAChB,YAAM,OAAO;AAAA,IAAA;AAGf,UAAM,WAAW,OAAO;AAGxB,SAAI,mCAAS,iBAAgB,SAAS,iBAAiB,QAAW;AAChE,YAAM,mBAAmB,QAAQ,aAAa,WAAW,EAAE;AAAA,QACzD,SAAS,aAAa;AAAA,MACxB;AAEA,YAAMA,UACJ,4BAA4B,UACxB,MAAM,mBACN;AAEN,UAAIA,QAAO,QAAQ;AACjB,cAAM,IAAI;AAAA,UACR,oCAAoC,KAAK,UAAUA,QAAO,MAAM,CAAC;AAAA,QACnE;AAAA,MAAA;AAGK,aAAA;AAAA,QACL,YAAY,SAAS,aAAa;AAAA,QAClC,QAAQA,QAAO;AAAA,MACjB;AAAA,IAAA;AAGK,WAAA;AAAA,MACL,YAAY,SAAS,aAAa;AAAA,MAClC,QAAQ,SAAS,aAAa;AAAA,IAChC;AAAA,EAAA;AAEJ;"}
|
|
1
|
+
{"version":3,"file":"database.js","sources":["../../../src/client/database.ts"],"sourcesContent":["import type { StandardSchemaV1 } from \"@standard-schema/spec\";\nimport type { ExecutionContext, ExecutableBuilder, Metadata } from \"../types\";\nimport type { BaseTable } from \"./base-table\";\nimport type { TableOccurrence } from \"./table-occurrence\";\nimport { EntitySet } from \"./entity-set\";\nimport { BatchBuilder } from \"./batch-builder\";\nimport { SchemaManager } from \"./schema-manager\";\n\n// Helper type to extract schema from a TableOccurrence\ntype ExtractSchemaFromOccurrence<O> =\n O extends TableOccurrence<infer BT, any, any, any>\n ? BT extends BaseTable<infer S, any>\n ? S\n : never\n : never;\n\n// Helper type to find an occurrence by name in the occurrences tuple\ntype FindOccurrenceByName<\n Occurrences extends readonly TableOccurrence<any, any, any, any>[],\n Name extends string,\n> = Occurrences extends readonly [\n infer First,\n ...infer Rest extends readonly TableOccurrence<any, any, any, any>[],\n]\n ? First extends TableOccurrence<any, any, any, any>\n ? First[\"name\"] extends Name\n ? First\n : FindOccurrenceByName<Rest, Name>\n : never\n : never;\n\n// Helper type to extract all occurrence names from the tuple\ntype ExtractOccurrenceNames<\n Occurrences extends readonly TableOccurrence<any, any, any, any>[],\n> = Occurrences extends readonly []\n ? string // If no occurrences, allow any string\n : Occurrences[number][\"name\"]; // Otherwise, extract union of names\n\nexport class Database<\n Occurrences extends readonly TableOccurrence<\n any,\n any,\n any,\n any\n >[] = readonly [],\n> {\n private occurrenceMap: Map<string, TableOccurrence<any, any, any, any>>;\n private _useEntityIds: boolean = false;\n public readonly schema: SchemaManager;\n\n constructor(\n private readonly databaseName: string,\n private readonly context: ExecutionContext,\n config?: {\n occurrences?: Occurrences | undefined;\n /**\n * Whether to use entity IDs instead of field names in the actual requests to the server\n * Defaults to true if all occurrences use entity IDs, false otherwise\n * If set to false but some occurrences do not use entity IDs, an error will be thrown\n */\n useEntityIds?: boolean;\n },\n ) {\n this.occurrenceMap = new Map();\n if (config?.occurrences) {\n // Validate consistency: either all occurrences use entity IDs or none do\n const occurrencesWithIds: string[] = [];\n const occurrencesWithoutIds: string[] = [];\n\n for (const occ of config.occurrences) {\n this.occurrenceMap.set(occ.name, occ);\n\n const hasTableId = occ.isUsingTableId();\n const hasFieldIds = occ.baseTable.isUsingFieldIds();\n\n // An occurrence uses entity IDs if it has both fmtId and fmfIds\n if (hasTableId && hasFieldIds) {\n occurrencesWithIds.push(occ.name);\n } else if (!hasTableId && !hasFieldIds) {\n occurrencesWithoutIds.push(occ.name);\n } else {\n // Partial entity ID usage (only one of fmtId or fmfIds) - this is an error\n throw new Error(\n `TableOccurrence \"${occ.name}\" has inconsistent entity ID configuration. ` +\n `Both fmtId (${hasTableId ? \"present\" : \"missing\"}) and fmfIds (${hasFieldIds ? \"present\" : \"missing\"}) must be defined together.`,\n );\n }\n }\n\n // Determine default value: true if all occurrences use entity IDs, false otherwise\n const allOccurrencesUseEntityIds =\n occurrencesWithIds.length > 0 && occurrencesWithoutIds.length === 0;\n const hasMixedUsage =\n occurrencesWithIds.length > 0 && occurrencesWithoutIds.length > 0;\n\n // Handle explicit useEntityIds config\n if (config.useEntityIds !== undefined) {\n if (config.useEntityIds === false) {\n // If explicitly set to false, allow mixed usage and use false\n this._useEntityIds = false;\n } else if (config.useEntityIds === true) {\n // If explicitly set to true, validate that all occurrences use entity IDs\n if (hasMixedUsage || occurrencesWithoutIds.length > 0) {\n throw new Error(\n `useEntityIds is set to true but some occurrences do not use entity IDs. ` +\n `Occurrences without entity IDs: [${occurrencesWithoutIds.join(\", \")}]. ` +\n `Either set useEntityIds to false or configure all occurrences with entity IDs.`,\n );\n }\n this._useEntityIds = true;\n }\n } else {\n // Default: true if all occurrences use entity IDs, false otherwise\n // But throw error if there's mixed usage when using defaults\n if (hasMixedUsage) {\n throw new Error(\n `Cannot mix TableOccurrence instances with and without entity IDs in the same database. ` +\n `Occurrences with entity IDs: [${occurrencesWithIds.join(\", \")}]. ` +\n `Occurrences without entity IDs: [${occurrencesWithoutIds.join(\", \")}]. ` +\n `Either all table occurrences must use entity IDs (fmtId + fmfIds), none should, or explicitly set useEntityIds to false.`,\n );\n }\n this._useEntityIds = allOccurrencesUseEntityIds;\n }\n } else {\n // No occurrences provided, use explicit config or default to false\n this._useEntityIds = config?.useEntityIds ?? false;\n }\n\n // Inform the execution context whether to use entity IDs\n if (this.context._setUseEntityIds) {\n this.context._setUseEntityIds(this._useEntityIds);\n }\n\n // Initialize schema manager\n this.schema = new SchemaManager(this.databaseName, this.context);\n }\n\n /**\n * Returns true if any table occurrence in this database is using entity IDs.\n */\n isUsingEntityIds(): boolean {\n return this._useEntityIds;\n }\n\n /**\n * Gets a table occurrence by name.\n * @internal\n */\n getOccurrence(name: string): TableOccurrence<any, any, any, any> | undefined {\n return this.occurrenceMap.get(name);\n }\n\n from<Name extends ExtractOccurrenceNames<Occurrences> | (string & {})>(\n name: Name,\n ): Occurrences extends readonly []\n ? EntitySet<Record<string, StandardSchemaV1>, undefined>\n : Name extends ExtractOccurrenceNames<Occurrences>\n ? EntitySet<\n ExtractSchemaFromOccurrence<FindOccurrenceByName<Occurrences, Name>>,\n FindOccurrenceByName<Occurrences, Name>\n >\n : EntitySet<Record<string, StandardSchemaV1>, undefined> {\n const occurrence = this.occurrenceMap.get(name as string);\n\n if (occurrence) {\n // Use EntitySet.create to preserve types better\n type OccType = FindOccurrenceByName<Occurrences, Name>;\n type SchemaType = ExtractSchemaFromOccurrence<OccType>;\n\n return EntitySet.create<SchemaType, OccType>({\n occurrence: occurrence as any,\n tableName: name as string,\n databaseName: this.databaseName,\n context: this.context,\n database: this as any,\n }) as any;\n } else {\n // Return untyped EntitySet for dynamic table access\n return new EntitySet<Record<string, StandardSchemaV1>, undefined>({\n tableName: name as string,\n databaseName: this.databaseName,\n context: this.context,\n database: this as any,\n }) as any;\n }\n }\n\n /**\n * Retrieves the OData metadata for this database.\n * @param args Optional configuration object\n * @param args.format The format to retrieve metadata in. Defaults to \"json\".\n * @returns The metadata in the specified format\n */\n async getMetadata(args: { format: \"xml\" }): Promise<string>;\n async getMetadata(args?: { format?: \"json\" }): Promise<Metadata>;\n async getMetadata(args?: {\n format?: \"xml\" | \"json\";\n }): Promise<string | Metadata> {\n const result = await this.context._makeRequest<\n Record<string, Metadata> | string\n >(`/${this.databaseName}/$metadata`, {\n headers: {\n Accept: args?.format === \"xml\" ? \"application/xml\" : \"application/json\",\n },\n });\n if (result.error) {\n throw result.error;\n }\n\n if (args?.format === \"json\") {\n const data = result.data as Record<string, Metadata>;\n const metadata = data[this.databaseName];\n if (!metadata) {\n throw new Error(\n `Metadata for database \"${this.databaseName}\" not found in response`,\n );\n }\n return metadata;\n }\n return result.data as string;\n }\n\n /**\n * Lists all available tables (entity sets) in this database.\n * @returns Promise resolving to an array of table names\n */\n async listTableNames(): Promise<string[]> {\n const result = await this.context._makeRequest<{\n value?: Array<{ name: string }>;\n }>(`/${this.databaseName}`);\n if (result.error) {\n throw result.error;\n }\n if (result.data.value && Array.isArray(result.data.value)) {\n return result.data.value.map((item) => item.name);\n }\n return [];\n }\n\n /**\n * Executes a FileMaker script.\n * @param scriptName - The name of the script to execute (must be valid according to OData rules)\n * @param options - Optional script parameter and result schema\n * @returns Promise resolving to script execution result\n */\n async runScript<ResultSchema extends StandardSchemaV1<string, any> = never>(\n scriptName: string,\n options?: {\n scriptParam?: string | number | Record<string, any>;\n resultSchema?: ResultSchema;\n },\n ): Promise<\n [ResultSchema] extends [never]\n ? { resultCode: number; result?: string }\n : ResultSchema extends StandardSchemaV1<string, infer Output>\n ? { resultCode: number; result: Output }\n : { resultCode: number; result?: string }\n > {\n const body: { scriptParameterValue?: unknown } = {};\n if (options?.scriptParam !== undefined) {\n body.scriptParameterValue = options.scriptParam;\n }\n\n const result = await this.context._makeRequest<{\n scriptResult: {\n code: number;\n resultParameter?: string;\n };\n }>(`/${this.databaseName}/Script.${scriptName}`, {\n method: \"POST\",\n body: Object.keys(body).length > 0 ? JSON.stringify(body) : undefined,\n });\n\n if (result.error) {\n throw result.error;\n }\n\n const response = result.data;\n\n // If resultSchema is provided, validate the result through it\n if (options?.resultSchema && response.scriptResult !== undefined) {\n const validationResult = options.resultSchema[\"~standard\"].validate(\n response.scriptResult.resultParameter,\n );\n // Handle both sync and async validation\n const result =\n validationResult instanceof Promise\n ? await validationResult\n : validationResult;\n\n if (result.issues) {\n throw new Error(\n `Script result validation failed: ${JSON.stringify(result.issues)}`,\n );\n }\n\n return {\n resultCode: response.scriptResult.code,\n result: result.value,\n } as any;\n }\n\n return {\n resultCode: response.scriptResult.code,\n result: response.scriptResult.resultParameter,\n } as any;\n }\n\n /**\n * Create a batch operation builder that allows multiple queries to be executed together\n * in a single atomic request. All operations succeed or fail together (transactional).\n *\n * @param builders - Array of executable query builders to batch\n * @returns A BatchBuilder that can be executed\n * @example\n * ```ts\n * const result = await db.batch([\n * db.from('contacts').list().top(5),\n * db.from('users').list().top(5),\n * db.from('contacts').insert({ name: 'John' })\n * ]).execute();\n *\n * if (result.data) {\n * const [contacts, users, insertResult] = result.data;\n * }\n * ```\n */\n batch<const Builders extends readonly ExecutableBuilder<any>[]>(\n builders: Builders,\n ): BatchBuilder<Builders> {\n return new BatchBuilder(builders, this.databaseName, this.context);\n }\n}\n"],"names":["result"],"mappings":";;;;;;AAsCO,MAAM,SAOX;AAAA,EAKA,YACmB,cACA,SACjB,QASA;AAhBM;AACA,yCAAyB;AACjB;AAGG,SAAA,eAAA;AACA,SAAA,UAAA;AAWZ,SAAA,oCAAoB,IAAI;AAC7B,QAAI,iCAAQ,aAAa;AAEvB,YAAM,qBAA+B,CAAC;AACtC,YAAM,wBAAkC,CAAC;AAE9B,iBAAA,OAAO,OAAO,aAAa;AACpC,aAAK,cAAc,IAAI,IAAI,MAAM,GAAG;AAE9B,cAAA,aAAa,IAAI,eAAe;AAChC,cAAA,cAAc,IAAI,UAAU,gBAAgB;AAGlD,YAAI,cAAc,aAAa;AACV,6BAAA,KAAK,IAAI,IAAI;AAAA,QAAA,WACvB,CAAC,cAAc,CAAC,aAAa;AAChB,gCAAA,KAAK,IAAI,IAAI;AAAA,QAAA,OAC9B;AAEL,gBAAM,IAAI;AAAA,YACR,oBAAoB,IAAI,IAAI,2DACX,aAAa,YAAY,SAAS,iBAAiB,cAAc,YAAY,SAAS;AAAA,UACzG;AAAA,QAAA;AAAA,MACF;AAIF,YAAM,6BACJ,mBAAmB,SAAS,KAAK,sBAAsB,WAAW;AACpE,YAAM,gBACJ,mBAAmB,SAAS,KAAK,sBAAsB,SAAS;AAG9D,UAAA,OAAO,iBAAiB,QAAW;AACjC,YAAA,OAAO,iBAAiB,OAAO;AAEjC,eAAK,gBAAgB;AAAA,QAAA,WACZ,OAAO,iBAAiB,MAAM;AAEnC,cAAA,iBAAiB,sBAAsB,SAAS,GAAG;AACrD,kBAAM,IAAI;AAAA,cACR,4GACsC,sBAAsB,KAAK,IAAI,CAAC;AAAA,YAExE;AAAA,UAAA;AAEF,eAAK,gBAAgB;AAAA,QAAA;AAAA,MACvB,OACK;AAGL,YAAI,eAAe;AACjB,gBAAM,IAAI;AAAA,YACR,wHACmC,mBAAmB,KAAK,IAAI,CAAC,uCAC1B,sBAAsB,KAAK,IAAI,CAAC;AAAA,UAExE;AAAA,QAAA;AAEF,aAAK,gBAAgB;AAAA,MAAA;AAAA,IACvB,OACK;AAEA,WAAA,iBAAgB,iCAAQ,iBAAgB;AAAA,IAAA;AAI3C,QAAA,KAAK,QAAQ,kBAAkB;AAC5B,WAAA,QAAQ,iBAAiB,KAAK,aAAa;AAAA,IAAA;AAIlD,SAAK,SAAS,IAAI,cAAc,KAAK,cAAc,KAAK,OAAO;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA,EAMjE,mBAA4B;AAC1B,WAAO,KAAK;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOd,cAAc,MAA+D;AACpE,WAAA,KAAK,cAAc,IAAI,IAAI;AAAA,EAAA;AAAA,EAGpC,KACE,MAQ2D;AAC3D,UAAM,aAAa,KAAK,cAAc,IAAI,IAAc;AAExD,QAAI,YAAY;AAKd,aAAO,UAAU,OAA4B;AAAA,QAC3C;AAAA,QACA,WAAW;AAAA,QACX,cAAc,KAAK;AAAA,QACnB,SAAS,KAAK;AAAA,QACd,UAAU;AAAA,MAAA,CACX;AAAA,IAAA,OACI;AAEL,aAAO,IAAI,UAAuD;AAAA,QAChE,WAAW;AAAA,QACX,cAAc,KAAK;AAAA,QACnB,SAAS,KAAK;AAAA,QACd,UAAU;AAAA,MAAA,CACX;AAAA,IAAA;AAAA,EACH;AAAA,EAWF,MAAM,YAAY,MAEa;AACvB,UAAA,SAAS,MAAM,KAAK,QAAQ,aAEhC,IAAI,KAAK,YAAY,cAAc;AAAA,MACnC,SAAS;AAAA,QACP,SAAQ,6BAAM,YAAW,QAAQ,oBAAoB;AAAA,MAAA;AAAA,IACvD,CACD;AACD,QAAI,OAAO,OAAO;AAChB,YAAM,OAAO;AAAA,IAAA;AAGX,SAAA,6BAAM,YAAW,QAAQ;AAC3B,YAAM,OAAO,OAAO;AACd,YAAA,WAAW,KAAK,KAAK,YAAY;AACvC,UAAI,CAAC,UAAU;AACb,cAAM,IAAI;AAAA,UACR,0BAA0B,KAAK,YAAY;AAAA,QAC7C;AAAA,MAAA;AAEK,aAAA;AAAA,IAAA;AAET,WAAO,OAAO;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOhB,MAAM,iBAAoC;AAClC,UAAA,SAAS,MAAM,KAAK,QAAQ,aAE/B,IAAI,KAAK,YAAY,EAAE;AAC1B,QAAI,OAAO,OAAO;AAChB,YAAM,OAAO;AAAA,IAAA;AAEX,QAAA,OAAO,KAAK,SAAS,MAAM,QAAQ,OAAO,KAAK,KAAK,GAAG;AACzD,aAAO,OAAO,KAAK,MAAM,IAAI,CAAC,SAAS,KAAK,IAAI;AAAA,IAAA;AAElD,WAAO,CAAC;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASV,MAAM,UACJ,YACA,SAUA;AACA,UAAM,OAA2C,CAAC;AAC9C,SAAA,mCAAS,iBAAgB,QAAW;AACtC,WAAK,uBAAuB,QAAQ;AAAA,IAAA;AAGhC,UAAA,SAAS,MAAM,KAAK,QAAQ,aAK/B,IAAI,KAAK,YAAY,WAAW,UAAU,IAAI;AAAA,MAC/C,QAAQ;AAAA,MACR,MAAM,OAAO,KAAK,IAAI,EAAE,SAAS,IAAI,KAAK,UAAU,IAAI,IAAI;AAAA,IAAA,CAC7D;AAED,QAAI,OAAO,OAAO;AAChB,YAAM,OAAO;AAAA,IAAA;AAGf,UAAM,WAAW,OAAO;AAGxB,SAAI,mCAAS,iBAAgB,SAAS,iBAAiB,QAAW;AAChE,YAAM,mBAAmB,QAAQ,aAAa,WAAW,EAAE;AAAA,QACzD,SAAS,aAAa;AAAA,MACxB;AAEA,YAAMA,UACJ,4BAA4B,UACxB,MAAM,mBACN;AAEN,UAAIA,QAAO,QAAQ;AACjB,cAAM,IAAI;AAAA,UACR,oCAAoC,KAAK,UAAUA,QAAO,MAAM,CAAC;AAAA,QACnE;AAAA,MAAA;AAGK,aAAA;AAAA,QACL,YAAY,SAAS,aAAa;AAAA,QAClC,QAAQA,QAAO;AAAA,MACjB;AAAA,IAAA;AAGK,WAAA;AAAA,MACL,YAAY,SAAS,aAAa;AAAA,MAClC,QAAQ,SAAS,aAAa;AAAA,IAChC;AAAA,EAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsBF,MACE,UACwB;AACxB,WAAO,IAAI,aAAa,UAAU,KAAK,cAAc,KAAK,OAAO;AAAA,EAAA;AAErE;"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { ExecutionContext, ExecutableBuilder, Result, WithSystemFields } from '../types.js';
|
|
1
|
+
import { ExecutionContext, ExecutableBuilder, Result, WithSystemFields, ExecuteOptions } from '../types.js';
|
|
2
2
|
import { TableOccurrence } from './table-occurrence.js';
|
|
3
3
|
import { QueryBuilder } from './query-builder.js';
|
|
4
4
|
import { FFetchOptions } from '@fetchkit/ffetch';
|
|
@@ -11,11 +11,13 @@ export declare class DeleteBuilder<T extends Record<string, any>> {
|
|
|
11
11
|
private databaseName;
|
|
12
12
|
private context;
|
|
13
13
|
private occurrence?;
|
|
14
|
+
private databaseUseEntityIds;
|
|
14
15
|
constructor(config: {
|
|
15
16
|
occurrence?: TableOccurrence<any, any, any, any>;
|
|
16
17
|
tableName: string;
|
|
17
18
|
databaseName: string;
|
|
18
19
|
context: ExecutionContext;
|
|
20
|
+
databaseUseEntityIds?: boolean;
|
|
19
21
|
});
|
|
20
22
|
/**
|
|
21
23
|
* Delete a single record by ID
|
|
@@ -41,6 +43,7 @@ export declare class ExecutableDeleteBuilder<T extends Record<string, any>> impl
|
|
|
41
43
|
private mode;
|
|
42
44
|
private recordId?;
|
|
43
45
|
private queryBuilder?;
|
|
46
|
+
private databaseUseEntityIds;
|
|
44
47
|
constructor(config: {
|
|
45
48
|
occurrence?: TableOccurrence<any, any, any, any>;
|
|
46
49
|
tableName: string;
|
|
@@ -49,8 +52,20 @@ export declare class ExecutableDeleteBuilder<T extends Record<string, any>> impl
|
|
|
49
52
|
mode: "byId" | "byFilter";
|
|
50
53
|
recordId?: string | number;
|
|
51
54
|
queryBuilder?: QueryBuilder<any>;
|
|
55
|
+
databaseUseEntityIds?: boolean;
|
|
52
56
|
});
|
|
53
|
-
|
|
57
|
+
/**
|
|
58
|
+
* Helper to merge database-level useEntityIds with per-request options
|
|
59
|
+
*/
|
|
60
|
+
private mergeExecuteOptions;
|
|
61
|
+
/**
|
|
62
|
+
* Gets the table ID (FMTID) if using entity IDs, otherwise returns the table name
|
|
63
|
+
* @param useEntityIds - Optional override for entity ID usage
|
|
64
|
+
*/
|
|
65
|
+
private getTableId;
|
|
66
|
+
execute(options?: RequestInit & FFetchOptions & {
|
|
67
|
+
useEntityIds?: boolean;
|
|
68
|
+
}): Promise<Result<{
|
|
54
69
|
deletedCount: number;
|
|
55
70
|
}>>;
|
|
56
71
|
getRequestConfig(): {
|
|
@@ -58,4 +73,8 @@ export declare class ExecutableDeleteBuilder<T extends Record<string, any>> impl
|
|
|
58
73
|
url: string;
|
|
59
74
|
body?: any;
|
|
60
75
|
};
|
|
76
|
+
toRequest(baseUrl: string): Request;
|
|
77
|
+
processResponse(response: Response, options?: ExecuteOptions): Promise<Result<{
|
|
78
|
+
deletedCount: number;
|
|
79
|
+
}>>;
|
|
61
80
|
}
|