@proofkit/fmdapi 5.0.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +21 -0
- package/README.md +110 -0
- package/dist/esm/adapters/core.d.ts +55 -0
- package/dist/esm/adapters/fetch-base-types.d.ts +7 -0
- package/dist/esm/adapters/fetch-base.d.ts +60 -0
- package/dist/esm/adapters/fetch-base.js +256 -0
- package/dist/esm/adapters/fetch-base.js.map +1 -0
- package/dist/esm/adapters/fetch.d.ts +27 -0
- package/dist/esm/adapters/fetch.js +79 -0
- package/dist/esm/adapters/fetch.js.map +1 -0
- package/dist/esm/adapters/otto.d.ts +26 -0
- package/dist/esm/adapters/otto.js +29 -0
- package/dist/esm/adapters/otto.js.map +1 -0
- package/dist/esm/client-types.d.ts +226 -0
- package/dist/esm/client-types.js +41 -0
- package/dist/esm/client-types.js.map +1 -0
- package/dist/esm/client.d.ts +133 -0
- package/dist/esm/client.js +295 -0
- package/dist/esm/client.js.map +1 -0
- package/dist/esm/index.d.ts +8 -0
- package/dist/esm/index.js +16 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/tokenStore/file.d.ts +3 -0
- package/dist/esm/tokenStore/index.d.ts +3 -0
- package/dist/esm/tokenStore/memory.d.ts +3 -0
- package/dist/esm/tokenStore/memory.js +21 -0
- package/dist/esm/tokenStore/memory.js.map +1 -0
- package/dist/esm/tokenStore/types.d.ts +8 -0
- package/dist/esm/tokenStore/upstash.d.ts +6 -0
- package/dist/esm/utils.d.ts +8 -0
- package/dist/esm/utils.js +16 -0
- package/dist/esm/utils.js.map +1 -0
- package/package.json +99 -0
- package/src/adapters/core.ts +62 -0
- package/src/adapters/fetch-base-types.ts +5 -0
- package/src/adapters/fetch-base.ts +339 -0
- package/src/adapters/fetch.ts +95 -0
- package/src/adapters/otto.ts +59 -0
- package/src/client-types.ts +296 -0
- package/src/client.ts +534 -0
- package/src/index.ts +11 -0
- package/src/tokenStore/file.ts +33 -0
- package/src/tokenStore/index.ts +3 -0
- package/src/tokenStore/memory.ts +20 -0
- package/src/tokenStore/types.ts +7 -0
- package/src/tokenStore/upstash.ts +31 -0
- package/src/utils.ts +29 -0
- package/stubs/fmschema.config.stub.js +17 -0
- package/stubs/fmschema.config.stub.mjs +17 -0
package/src/client.ts
ADDED
|
@@ -0,0 +1,534 @@
|
|
|
1
|
+
import type { Adapter } from "./adapters/core.js";
|
|
2
|
+
import type {
|
|
3
|
+
CreateParams,
|
|
4
|
+
CreateResponse,
|
|
5
|
+
DeleteParams,
|
|
6
|
+
DeleteResponse,
|
|
7
|
+
FMRecord,
|
|
8
|
+
FieldData,
|
|
9
|
+
GenericPortalData,
|
|
10
|
+
GetParams,
|
|
11
|
+
GetResponse,
|
|
12
|
+
GetResponseOne,
|
|
13
|
+
ListParams,
|
|
14
|
+
PortalsWithIds,
|
|
15
|
+
Query,
|
|
16
|
+
UpdateParams,
|
|
17
|
+
UpdateResponse,
|
|
18
|
+
} from "./client-types.js";
|
|
19
|
+
import { FileMakerError } from "./index.js";
|
|
20
|
+
import type { StandardSchemaV1 } from "@standard-schema/spec";
|
|
21
|
+
|
|
22
|
+
function asNumber(input: string | number): number {
|
|
23
|
+
return typeof input === "string" ? parseInt(input) : input;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export type ClientObjectProps = {
|
|
27
|
+
/**
|
|
28
|
+
* The layout to use by default for all requests. Can be overrridden on each request.
|
|
29
|
+
*/
|
|
30
|
+
layout: string;
|
|
31
|
+
schema?: {
|
|
32
|
+
/**
|
|
33
|
+
* The schema for the field data.
|
|
34
|
+
*/
|
|
35
|
+
fieldData: StandardSchemaV1<FieldData>;
|
|
36
|
+
/**
|
|
37
|
+
* The schema for the portal data.
|
|
38
|
+
*/
|
|
39
|
+
portalData?: StandardSchemaV1<GenericPortalData>;
|
|
40
|
+
};
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
type FetchOptions = {
|
|
44
|
+
fetch?: RequestInit;
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
function DataApi<
|
|
48
|
+
Fd extends FieldData = FieldData,
|
|
49
|
+
Pd extends GenericPortalData = GenericPortalData,
|
|
50
|
+
Opts extends ClientObjectProps = ClientObjectProps,
|
|
51
|
+
Adp extends Adapter = Adapter,
|
|
52
|
+
>(options: Opts & { adapter: Adp }) {
|
|
53
|
+
type InferredFieldData = Opts["schema"] extends object
|
|
54
|
+
? StandardSchemaV1.InferOutput<Opts["schema"]["fieldData"]>
|
|
55
|
+
: Fd;
|
|
56
|
+
type InferredPortalData = Opts["schema"] extends object
|
|
57
|
+
? Opts["schema"]["portalData"] extends object
|
|
58
|
+
? StandardSchemaV1.InferOutput<Opts["schema"]["portalData"]>
|
|
59
|
+
: Pd
|
|
60
|
+
: Pd;
|
|
61
|
+
|
|
62
|
+
if ("zodValidators" in options) {
|
|
63
|
+
throw new Error(
|
|
64
|
+
"zodValidators is no longer supported. Use schema instead, or re-run the typegen command",
|
|
65
|
+
);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
const schema = options.schema;
|
|
69
|
+
const layout = options.layout;
|
|
70
|
+
const {
|
|
71
|
+
create,
|
|
72
|
+
delete: _adapterDelete,
|
|
73
|
+
find,
|
|
74
|
+
get,
|
|
75
|
+
list,
|
|
76
|
+
update,
|
|
77
|
+
layoutMetadata,
|
|
78
|
+
containerUpload,
|
|
79
|
+
...otherMethods
|
|
80
|
+
} = options.adapter;
|
|
81
|
+
|
|
82
|
+
type CreateArgs<
|
|
83
|
+
T extends InferredFieldData = InferredFieldData,
|
|
84
|
+
U extends InferredPortalData = InferredPortalData,
|
|
85
|
+
> = CreateParams<U> & {
|
|
86
|
+
fieldData: Partial<T>;
|
|
87
|
+
};
|
|
88
|
+
type GetArgs<U extends InferredPortalData = InferredPortalData> =
|
|
89
|
+
GetParams<U> & {
|
|
90
|
+
recordId: number | string;
|
|
91
|
+
};
|
|
92
|
+
type UpdateArgs<
|
|
93
|
+
T extends InferredFieldData = InferredFieldData,
|
|
94
|
+
U extends InferredPortalData = InferredPortalData,
|
|
95
|
+
> = UpdateParams<U> & {
|
|
96
|
+
fieldData: Partial<T>;
|
|
97
|
+
recordId: number | string;
|
|
98
|
+
};
|
|
99
|
+
type ContainerUploadArgs<T extends InferredFieldData = InferredFieldData> = {
|
|
100
|
+
containerFieldName: keyof T;
|
|
101
|
+
containerFieldRepetition?: string | number;
|
|
102
|
+
file: Blob;
|
|
103
|
+
recordId: number | string;
|
|
104
|
+
modId?: number;
|
|
105
|
+
timeout?: number;
|
|
106
|
+
};
|
|
107
|
+
type DeleteArgs = DeleteParams & {
|
|
108
|
+
recordId: number | string;
|
|
109
|
+
};
|
|
110
|
+
type IgnoreEmptyResult = {
|
|
111
|
+
/**
|
|
112
|
+
* If true, a find that returns no results will retun an empty array instead of throwing an error.
|
|
113
|
+
* @default false
|
|
114
|
+
*/
|
|
115
|
+
ignoreEmptyResult?: boolean;
|
|
116
|
+
};
|
|
117
|
+
type FindArgs<
|
|
118
|
+
T extends FieldData = InferredFieldData,
|
|
119
|
+
U extends InferredPortalData = InferredPortalData,
|
|
120
|
+
> = ListParams<T, U> & {
|
|
121
|
+
query: Query<T> | Array<Query<T>>;
|
|
122
|
+
timeout?: number;
|
|
123
|
+
};
|
|
124
|
+
|
|
125
|
+
/**
|
|
126
|
+
* List all records from a given layout, no find criteria applied.
|
|
127
|
+
*/
|
|
128
|
+
async function _list(): Promise<
|
|
129
|
+
GetResponse<InferredFieldData, InferredPortalData>
|
|
130
|
+
>;
|
|
131
|
+
async function _list(
|
|
132
|
+
args: ListParams<InferredFieldData, InferredPortalData> & FetchOptions,
|
|
133
|
+
): Promise<GetResponse<InferredFieldData, InferredPortalData>>;
|
|
134
|
+
async function _list(
|
|
135
|
+
args?: ListParams<InferredFieldData, InferredPortalData> & FetchOptions,
|
|
136
|
+
): Promise<GetResponse<InferredFieldData, InferredPortalData>> {
|
|
137
|
+
const { fetch, timeout, ...params } = args ?? {};
|
|
138
|
+
|
|
139
|
+
// rename and refactor limit, offset, and sort keys for this request
|
|
140
|
+
if ("limit" in params && params.limit !== undefined)
|
|
141
|
+
delete Object.assign(params, { _limit: params.limit })["limit"];
|
|
142
|
+
if ("offset" in params && params.offset !== undefined) {
|
|
143
|
+
if (params.offset <= 1) delete params.offset;
|
|
144
|
+
else delete Object.assign(params, { _offset: params.offset })["offset"];
|
|
145
|
+
}
|
|
146
|
+
if ("sort" in params && params.sort !== undefined)
|
|
147
|
+
delete Object.assign(params, {
|
|
148
|
+
_sort: Array.isArray(params.sort) ? params.sort : [params.sort],
|
|
149
|
+
})["sort"];
|
|
150
|
+
|
|
151
|
+
const result = await list({
|
|
152
|
+
layout,
|
|
153
|
+
data: params,
|
|
154
|
+
fetch,
|
|
155
|
+
timeout,
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
if (result.dataInfo.foundCount > result.dataInfo.returnedCount) {
|
|
159
|
+
// more records found than returned
|
|
160
|
+
if (args?.limit === undefined && args?.offset === undefined) {
|
|
161
|
+
// and the user didn't specify a limit or offset, so we should warn them
|
|
162
|
+
console.warn(
|
|
163
|
+
`🚨 @proofkit/fmdapi: Loaded only ${result.dataInfo.returnedCount} of the ${result.dataInfo.foundCount} records from your "${layout}" layout. Use the "listAll" method to automatically paginate through all records, or specify a "limit" and "offset" to handle pagination yourself.`,
|
|
164
|
+
);
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
return await runSchemaValidationAndTransform(
|
|
169
|
+
schema,
|
|
170
|
+
result as GetResponse<InferredFieldData, InferredPortalData>,
|
|
171
|
+
);
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
/**
|
|
175
|
+
* Paginate through all records from a given layout, no find criteria applied.
|
|
176
|
+
* ⚠️ WARNING: Use this method with caution, as it can be slow with large datasets
|
|
177
|
+
*/
|
|
178
|
+
async function listAll<
|
|
179
|
+
T extends FieldData = InferredFieldData,
|
|
180
|
+
U extends InferredPortalData = InferredPortalData,
|
|
181
|
+
>(): Promise<FMRecord<T, U>[]>;
|
|
182
|
+
async function listAll<
|
|
183
|
+
T extends FieldData = InferredFieldData,
|
|
184
|
+
U extends InferredPortalData = InferredPortalData,
|
|
185
|
+
>(args: ListParams<T, U> & FetchOptions): Promise<FMRecord<T, U>[]>;
|
|
186
|
+
async function listAll<
|
|
187
|
+
T extends FieldData = InferredFieldData,
|
|
188
|
+
U extends InferredPortalData = InferredPortalData,
|
|
189
|
+
>(args?: ListParams<T, U> & FetchOptions): Promise<FMRecord<T, U>[]> {
|
|
190
|
+
let runningData: GetResponse<T, U>["data"] = [];
|
|
191
|
+
const limit = args?.limit ?? 100;
|
|
192
|
+
let offset = args?.offset ?? 1;
|
|
193
|
+
|
|
194
|
+
// eslint-disable-next-line no-constant-condition
|
|
195
|
+
while (true) {
|
|
196
|
+
const data = (await _list({
|
|
197
|
+
...args,
|
|
198
|
+
offset,
|
|
199
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
200
|
+
} as any)) as unknown as GetResponse<T, U>;
|
|
201
|
+
runningData = [...runningData, ...data.data];
|
|
202
|
+
if (runningData.length >= data.dataInfo.foundCount) break;
|
|
203
|
+
offset = offset + limit;
|
|
204
|
+
}
|
|
205
|
+
return runningData;
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
/**
|
|
209
|
+
* Create a new record in a given layout
|
|
210
|
+
*/
|
|
211
|
+
async function _create<
|
|
212
|
+
T extends InferredFieldData = InferredFieldData,
|
|
213
|
+
U extends InferredPortalData = InferredPortalData,
|
|
214
|
+
>(args: CreateArgs<T, U> & FetchOptions): Promise<CreateResponse> {
|
|
215
|
+
const { fetch, timeout, ...params } = args ?? {};
|
|
216
|
+
return await create({
|
|
217
|
+
layout,
|
|
218
|
+
data: params,
|
|
219
|
+
fetch,
|
|
220
|
+
timeout,
|
|
221
|
+
});
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
/**
|
|
225
|
+
* Get a single record by Internal RecordId
|
|
226
|
+
*/
|
|
227
|
+
async function _get(
|
|
228
|
+
args: GetArgs<InferredPortalData> & FetchOptions,
|
|
229
|
+
): Promise<GetResponse<InferredFieldData, InferredPortalData>> {
|
|
230
|
+
args.recordId = asNumber(args.recordId);
|
|
231
|
+
const { recordId, fetch, timeout, ...params } = args;
|
|
232
|
+
|
|
233
|
+
const result = await get({
|
|
234
|
+
layout,
|
|
235
|
+
data: { ...params, recordId },
|
|
236
|
+
fetch,
|
|
237
|
+
timeout,
|
|
238
|
+
});
|
|
239
|
+
return await runSchemaValidationAndTransform(
|
|
240
|
+
schema,
|
|
241
|
+
result as GetResponse<InferredFieldData, InferredPortalData>,
|
|
242
|
+
);
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
/**
|
|
246
|
+
* Update a single record by internal RecordId
|
|
247
|
+
*/
|
|
248
|
+
async function _update(
|
|
249
|
+
args: UpdateArgs<InferredFieldData, InferredPortalData> & FetchOptions,
|
|
250
|
+
): Promise<UpdateResponse> {
|
|
251
|
+
args.recordId = asNumber(args.recordId);
|
|
252
|
+
const { recordId, fetch, timeout, ...params } = args;
|
|
253
|
+
return await update({
|
|
254
|
+
layout,
|
|
255
|
+
data: { ...params, recordId },
|
|
256
|
+
fetch,
|
|
257
|
+
timeout,
|
|
258
|
+
});
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
/**
|
|
262
|
+
* Delete a single record by internal RecordId
|
|
263
|
+
*/
|
|
264
|
+
async function deleteRecord(
|
|
265
|
+
args: DeleteArgs & FetchOptions,
|
|
266
|
+
): Promise<DeleteResponse> {
|
|
267
|
+
args.recordId = asNumber(args.recordId);
|
|
268
|
+
const { recordId, fetch, timeout, ...params } = args;
|
|
269
|
+
|
|
270
|
+
return _adapterDelete({
|
|
271
|
+
layout,
|
|
272
|
+
data: { ...params, recordId },
|
|
273
|
+
fetch,
|
|
274
|
+
timeout,
|
|
275
|
+
});
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
/**
|
|
279
|
+
* Find records in a given layout
|
|
280
|
+
*/
|
|
281
|
+
async function _find(
|
|
282
|
+
args: FindArgs<InferredFieldData, InferredPortalData> &
|
|
283
|
+
IgnoreEmptyResult &
|
|
284
|
+
FetchOptions,
|
|
285
|
+
): Promise<GetResponse<InferredFieldData, InferredPortalData>> {
|
|
286
|
+
const {
|
|
287
|
+
query: queryInput,
|
|
288
|
+
ignoreEmptyResult = false,
|
|
289
|
+
timeout,
|
|
290
|
+
fetch,
|
|
291
|
+
...params
|
|
292
|
+
} = args;
|
|
293
|
+
const query = !Array.isArray(queryInput) ? [queryInput] : queryInput;
|
|
294
|
+
|
|
295
|
+
// rename and refactor limit, offset, and sort keys for this request
|
|
296
|
+
if ("offset" in params && params.offset !== undefined) {
|
|
297
|
+
if (params.offset <= 1) delete params.offset;
|
|
298
|
+
}
|
|
299
|
+
if ("dateformats" in params && params.dateformats !== undefined) {
|
|
300
|
+
// reassign dateformats to match FileMaker's expected values
|
|
301
|
+
// @ts-expect-error FM wants a string, so this is fine
|
|
302
|
+
params.dateformats = (
|
|
303
|
+
params.dateformats === "US"
|
|
304
|
+
? 0
|
|
305
|
+
: params.dateformats === "file_locale"
|
|
306
|
+
? 1
|
|
307
|
+
: params.dateformats === "ISO8601"
|
|
308
|
+
? 2
|
|
309
|
+
: 0
|
|
310
|
+
).toString();
|
|
311
|
+
}
|
|
312
|
+
const result = (await find({
|
|
313
|
+
data: { ...params, query },
|
|
314
|
+
layout,
|
|
315
|
+
fetch,
|
|
316
|
+
timeout,
|
|
317
|
+
}).catch((e: unknown) => {
|
|
318
|
+
if (ignoreEmptyResult && e instanceof FileMakerError && e.code === "401")
|
|
319
|
+
return { data: [], dataInfo: { foundCount: 0, returnedCount: 0 } };
|
|
320
|
+
throw e;
|
|
321
|
+
})) as GetResponse<InferredFieldData, InferredPortalData>;
|
|
322
|
+
|
|
323
|
+
if (result.dataInfo.foundCount > result.dataInfo.returnedCount) {
|
|
324
|
+
// more records found than returned
|
|
325
|
+
if (args?.limit === undefined && args?.offset === undefined) {
|
|
326
|
+
console.warn(
|
|
327
|
+
`🚨 @proofkit/fmdapi: Loaded only ${result.dataInfo.returnedCount} of the ${result.dataInfo.foundCount} records from your "${layout}" layout. Use the "findAll" method to automatically paginate through all records, or specify a "limit" and "offset" to handle pagination yourself.`,
|
|
328
|
+
);
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
return await runSchemaValidationAndTransform(schema, result);
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
/**
|
|
336
|
+
* Helper method for `find`. Will only return the first result or throw error if there is more than 1 result.
|
|
337
|
+
*/
|
|
338
|
+
async function findOne(
|
|
339
|
+
args: FindArgs<InferredFieldData, InferredPortalData> & FetchOptions,
|
|
340
|
+
): Promise<GetResponseOne<InferredFieldData, InferredPortalData>> {
|
|
341
|
+
const result = await _find(args);
|
|
342
|
+
if (result.data.length !== 1)
|
|
343
|
+
throw new Error(
|
|
344
|
+
`${result.data.length} records found; expecting exactly 1`,
|
|
345
|
+
);
|
|
346
|
+
const transformedResult = await runSchemaValidationAndTransform(
|
|
347
|
+
schema,
|
|
348
|
+
result,
|
|
349
|
+
);
|
|
350
|
+
if (!transformedResult.data[0]) throw new Error("No data found");
|
|
351
|
+
return { ...transformedResult, data: transformedResult.data[0] };
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
/**
|
|
355
|
+
* Helper method for `find`. Will only return the first result instead of an array.
|
|
356
|
+
*/
|
|
357
|
+
async function findFirst(
|
|
358
|
+
args: FindArgs<InferredFieldData, InferredPortalData> &
|
|
359
|
+
IgnoreEmptyResult &
|
|
360
|
+
FetchOptions,
|
|
361
|
+
): Promise<GetResponseOne<InferredFieldData, InferredPortalData>> {
|
|
362
|
+
const result = await _find(args);
|
|
363
|
+
const transformedResult = await runSchemaValidationAndTransform(
|
|
364
|
+
schema,
|
|
365
|
+
result,
|
|
366
|
+
);
|
|
367
|
+
|
|
368
|
+
if (!transformedResult.data[0]) throw new Error("No data found");
|
|
369
|
+
return { ...transformedResult, data: transformedResult.data[0] };
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
/**
|
|
373
|
+
* Helper method for `find`. Will return the first result or null if no results are found.
|
|
374
|
+
*/
|
|
375
|
+
async function maybeFindFirst(
|
|
376
|
+
args: FindArgs<InferredFieldData, InferredPortalData> &
|
|
377
|
+
IgnoreEmptyResult &
|
|
378
|
+
FetchOptions,
|
|
379
|
+
): Promise<GetResponseOne<InferredFieldData, InferredPortalData> | null> {
|
|
380
|
+
const result = await _find({ ...args, ignoreEmptyResult: true });
|
|
381
|
+
const transformedResult = await runSchemaValidationAndTransform(
|
|
382
|
+
schema,
|
|
383
|
+
result,
|
|
384
|
+
);
|
|
385
|
+
if (!transformedResult.data[0]) return null;
|
|
386
|
+
return { ...transformedResult, data: transformedResult.data[0] };
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
/**
|
|
390
|
+
* Helper method for `find` to page through all found results.
|
|
391
|
+
* ⚠️ WARNING: Use with caution as this can be a slow operation with large datasets
|
|
392
|
+
*/
|
|
393
|
+
async function findAll(
|
|
394
|
+
args: FindArgs<InferredFieldData, InferredPortalData> & FetchOptions,
|
|
395
|
+
): Promise<FMRecord<InferredFieldData, InferredPortalData>[]> {
|
|
396
|
+
let runningData: GetResponse<
|
|
397
|
+
InferredFieldData,
|
|
398
|
+
InferredPortalData
|
|
399
|
+
>["data"] = [];
|
|
400
|
+
const limit = args.limit ?? 100;
|
|
401
|
+
let offset = args.offset ?? 1;
|
|
402
|
+
// eslint-disable-next-line no-constant-condition
|
|
403
|
+
while (true) {
|
|
404
|
+
const data = await _find({
|
|
405
|
+
...args,
|
|
406
|
+
offset,
|
|
407
|
+
ignoreEmptyResult: true,
|
|
408
|
+
});
|
|
409
|
+
runningData = [...runningData, ...data.data];
|
|
410
|
+
if (
|
|
411
|
+
runningData.length === 0 ||
|
|
412
|
+
runningData.length >= data.dataInfo.foundCount
|
|
413
|
+
)
|
|
414
|
+
break;
|
|
415
|
+
offset = offset + limit;
|
|
416
|
+
}
|
|
417
|
+
return runningData;
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
async function _layoutMetadata(args?: { timeout?: number } & FetchOptions) {
|
|
421
|
+
const { ...restArgs } = args ?? {};
|
|
422
|
+
// Explicitly define the type for params based on FetchOptions
|
|
423
|
+
const params: FetchOptions & { timeout?: number } = restArgs;
|
|
424
|
+
|
|
425
|
+
return await layoutMetadata({
|
|
426
|
+
layout,
|
|
427
|
+
fetch: params.fetch, // Now should correctly resolve to undefined if not present
|
|
428
|
+
timeout: params.timeout, // Now should correctly resolve to undefined if not present
|
|
429
|
+
});
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
async function _containerUpload<
|
|
433
|
+
T extends InferredFieldData = InferredFieldData,
|
|
434
|
+
>(args: ContainerUploadArgs<T> & FetchOptions) {
|
|
435
|
+
const { ...params } = args;
|
|
436
|
+
return await containerUpload({
|
|
437
|
+
layout,
|
|
438
|
+
data: {
|
|
439
|
+
...params,
|
|
440
|
+
containerFieldName: params.containerFieldName as string,
|
|
441
|
+
repetition: params.containerFieldRepetition,
|
|
442
|
+
},
|
|
443
|
+
fetch: params.fetch,
|
|
444
|
+
timeout: params.timeout,
|
|
445
|
+
});
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
async function runSchemaValidationAndTransform(
|
|
449
|
+
schema: ClientObjectProps["schema"],
|
|
450
|
+
result: GetResponse<InferredFieldData, InferredPortalData>,
|
|
451
|
+
): Promise<GetResponse<InferredFieldData, InferredPortalData>> {
|
|
452
|
+
const fieldDataIssues: StandardSchemaV1.Issue[] = [];
|
|
453
|
+
const portalDataIssues: StandardSchemaV1.Issue[] = [];
|
|
454
|
+
|
|
455
|
+
if (!schema) return result;
|
|
456
|
+
const transformedData: FMRecord<InferredFieldData, InferredPortalData>[] =
|
|
457
|
+
[];
|
|
458
|
+
for (const record of result.data) {
|
|
459
|
+
let fieldResult = schema.fieldData["~standard"].validate(
|
|
460
|
+
record.fieldData,
|
|
461
|
+
);
|
|
462
|
+
if (fieldResult instanceof Promise) fieldResult = await fieldResult;
|
|
463
|
+
if ("value" in fieldResult) {
|
|
464
|
+
record.fieldData = fieldResult.value as InferredFieldData;
|
|
465
|
+
} else {
|
|
466
|
+
fieldDataIssues.push(...fieldResult.issues);
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
if (schema.portalData) {
|
|
470
|
+
for (const [portalName, portalRecords] of Object.entries(
|
|
471
|
+
record.portalData,
|
|
472
|
+
)) {
|
|
473
|
+
const validatedPortalRecords: PortalsWithIds<GenericPortalData>[] =
|
|
474
|
+
[];
|
|
475
|
+
for (const portalRecord of portalRecords) {
|
|
476
|
+
let portalResult = schema.portalData["~standard"].validate({
|
|
477
|
+
[portalName]: portalRecord,
|
|
478
|
+
});
|
|
479
|
+
if (portalResult instanceof Promise)
|
|
480
|
+
portalResult = await portalResult;
|
|
481
|
+
if ("value" in portalResult) {
|
|
482
|
+
validatedPortalRecords.push({
|
|
483
|
+
...portalResult.value[portalName],
|
|
484
|
+
recordId: portalRecord.recordId,
|
|
485
|
+
modId: portalRecord.modId,
|
|
486
|
+
});
|
|
487
|
+
} else {
|
|
488
|
+
portalDataIssues.push(...portalResult.issues);
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
// @ts-expect-error We know portalName is a valid key, but can't figure out the right assertions
|
|
492
|
+
record.portalData[portalName] = validatedPortalRecords;
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
|
|
496
|
+
transformedData.push(record);
|
|
497
|
+
}
|
|
498
|
+
result.data = transformedData;
|
|
499
|
+
|
|
500
|
+
if (fieldDataIssues.length > 0 || portalDataIssues.length > 0) {
|
|
501
|
+
console.error(
|
|
502
|
+
`🚨 @proofkit/fmdapi: Validation issues for layout "${layout}". Run the typegen command again to generate the latest field definitions from your layout.`,
|
|
503
|
+
{
|
|
504
|
+
fieldDataIssues,
|
|
505
|
+
portalDataIssues,
|
|
506
|
+
},
|
|
507
|
+
);
|
|
508
|
+
throw new Error("Schema validation issues");
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
return result;
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
return {
|
|
515
|
+
...otherMethods,
|
|
516
|
+
layout: options.layout as Opts["layout"],
|
|
517
|
+
list: _list,
|
|
518
|
+
listAll,
|
|
519
|
+
create: _create,
|
|
520
|
+
get: _get,
|
|
521
|
+
update: _update,
|
|
522
|
+
delete: deleteRecord,
|
|
523
|
+
find: _find,
|
|
524
|
+
findOne,
|
|
525
|
+
findFirst,
|
|
526
|
+
maybeFindFirst,
|
|
527
|
+
findAll,
|
|
528
|
+
layoutMetadata: _layoutMetadata,
|
|
529
|
+
containerUpload: _containerUpload,
|
|
530
|
+
};
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
export default DataApi;
|
|
534
|
+
export { DataApi };
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { FileMakerError } from "./client-types.js";
|
|
2
|
+
import { DataApi } from "./client.js";
|
|
3
|
+
|
|
4
|
+
export { DataApi, FileMakerError };
|
|
5
|
+
export * from "./utils.js";
|
|
6
|
+
export * as clientTypes from "./client-types.js";
|
|
7
|
+
|
|
8
|
+
export { FetchAdapter } from "./adapters/fetch.js";
|
|
9
|
+
export { OttoAdapter, type OttoAPIKey } from "./adapters/otto.js";
|
|
10
|
+
|
|
11
|
+
export default DataApi;
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import type { TokenStoreDefinitions } from "./types.js";
|
|
2
|
+
import fs from "fs-extra";
|
|
3
|
+
|
|
4
|
+
function getDataFromFile(devFileName: string): Record<string, string> {
|
|
5
|
+
const data: Record<string, string> = {};
|
|
6
|
+
fs.ensureFileSync(devFileName);
|
|
7
|
+
const fileString = fs.readFileSync(devFileName, "utf8");
|
|
8
|
+
try {
|
|
9
|
+
return JSON.parse(fileString);
|
|
10
|
+
} catch {
|
|
11
|
+
return data;
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
const setSharedData = (key: string, value: string, devFileName: string) => {
|
|
15
|
+
const data = getDataFromFile(devFileName);
|
|
16
|
+
data[key] = value;
|
|
17
|
+
fs.ensureFileSync(devFileName);
|
|
18
|
+
fs.writeFileSync(devFileName, JSON.stringify(data, null, 2));
|
|
19
|
+
};
|
|
20
|
+
const getSharedData = (key: string, devFileName: string): string | null => {
|
|
21
|
+
const data = getDataFromFile(devFileName);
|
|
22
|
+
return data[key] ?? null;
|
|
23
|
+
};
|
|
24
|
+
export const fileTokenStore = (
|
|
25
|
+
fileName = "shared.json",
|
|
26
|
+
): TokenStoreDefinitions => {
|
|
27
|
+
return {
|
|
28
|
+
setToken: (key, value) => setSharedData(key, value, fileName),
|
|
29
|
+
getToken: (key) => getSharedData(key, fileName),
|
|
30
|
+
clearToken: () => fs.removeSync(fileName),
|
|
31
|
+
};
|
|
32
|
+
};
|
|
33
|
+
export default fileTokenStore;
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import type { TokenStoreDefinitions } from "./types.js";
|
|
2
|
+
|
|
3
|
+
export function memoryStore(): TokenStoreDefinitions {
|
|
4
|
+
const data: Record<string, string> = {};
|
|
5
|
+
return {
|
|
6
|
+
getToken: (key: string): string | null => {
|
|
7
|
+
try {
|
|
8
|
+
return data[key] ?? null;
|
|
9
|
+
} catch {
|
|
10
|
+
return null;
|
|
11
|
+
}
|
|
12
|
+
},
|
|
13
|
+
clearToken: (key: string) => delete data[key],
|
|
14
|
+
setToken: (key: string, value: string): void => {
|
|
15
|
+
data[key] = value;
|
|
16
|
+
},
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export default memoryStore;
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import type { TokenStoreDefinitions } from "./types.js";
|
|
2
|
+
import type { RedisConfigNodejs } from "@upstash/redis";
|
|
3
|
+
|
|
4
|
+
export function upstashTokenStore(
|
|
5
|
+
config: RedisConfigNodejs,
|
|
6
|
+
options: { prefix?: string } = {},
|
|
7
|
+
): TokenStoreDefinitions {
|
|
8
|
+
const { prefix = "" } = options;
|
|
9
|
+
|
|
10
|
+
const getRedis = async () => {
|
|
11
|
+
const redis = await import("@upstash/redis");
|
|
12
|
+
return new redis.Redis(config);
|
|
13
|
+
};
|
|
14
|
+
|
|
15
|
+
return {
|
|
16
|
+
getToken: async (key: string) => {
|
|
17
|
+
const redis = await getRedis();
|
|
18
|
+
return redis.get(prefix + key);
|
|
19
|
+
},
|
|
20
|
+
setToken: async (key: string, value: string) => {
|
|
21
|
+
const redis = await getRedis();
|
|
22
|
+
await redis.set(prefix + key, value);
|
|
23
|
+
},
|
|
24
|
+
clearToken: async (key: string) => {
|
|
25
|
+
const redis = await getRedis();
|
|
26
|
+
await redis.del(prefix + key);
|
|
27
|
+
},
|
|
28
|
+
};
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
export default upstashTokenStore;
|
package/src/utils.ts
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import type { S, L, U } from "ts-toolbelt";
|
|
2
|
+
|
|
3
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
4
|
+
type TransformedFields<T extends Record<string, any>> = U.Merge<
|
|
5
|
+
{
|
|
6
|
+
[Field in keyof T]: {
|
|
7
|
+
[Key in Field extends string
|
|
8
|
+
? L.Last<S.Split<Field, "::">>
|
|
9
|
+
: Field]: T[Field];
|
|
10
|
+
};
|
|
11
|
+
}[keyof T]
|
|
12
|
+
>;
|
|
13
|
+
|
|
14
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
15
|
+
export function removeFMTableNames<T extends Record<string, any>>(
|
|
16
|
+
obj: T,
|
|
17
|
+
): TransformedFields<T> {
|
|
18
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
19
|
+
const newObj: any = {};
|
|
20
|
+
for (const key in obj) {
|
|
21
|
+
if (key.includes("::")) {
|
|
22
|
+
const newKey = key.split("::")[1];
|
|
23
|
+
newObj[newKey as keyof TransformedFields<T>] = obj[key];
|
|
24
|
+
} else {
|
|
25
|
+
newObj[key] = obj[key];
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
return newObj;
|
|
29
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @type {import("@proofgeist/fmdapi/dist/utils/typegen/types.d.ts").GenerateSchemaOptions}
|
|
3
|
+
*/
|
|
4
|
+
const config = {
|
|
5
|
+
clientSuffix: "Layout",
|
|
6
|
+
schemas: [
|
|
7
|
+
// add your layouts and name schemas here
|
|
8
|
+
{ layout: "my_layout", schemaName: "MySchema" },
|
|
9
|
+
|
|
10
|
+
// repeat as needed for each schema...
|
|
11
|
+
// { layout: "my_other_layout", schemaName: "MyOtherSchema" },
|
|
12
|
+
],
|
|
13
|
+
// change this value to generate the files in a different directory
|
|
14
|
+
path: "schema",
|
|
15
|
+
clearOldFiles: true,
|
|
16
|
+
};
|
|
17
|
+
module.exports = config;
|