@statezero/core 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. package/dist/adaptors/react/composables.d.ts +1 -0
  2. package/dist/adaptors/react/composables.js +4 -0
  3. package/dist/adaptors/react/index.d.ts +1 -0
  4. package/dist/adaptors/react/index.js +1 -0
  5. package/dist/adaptors/vue/composables.d.ts +2 -0
  6. package/dist/adaptors/vue/composables.js +36 -0
  7. package/dist/adaptors/vue/index.d.ts +2 -0
  8. package/dist/adaptors/vue/index.js +2 -0
  9. package/dist/adaptors/vue/reactivity.d.ts +18 -0
  10. package/dist/adaptors/vue/reactivity.js +125 -0
  11. package/dist/cli/commands/syncModels.d.ts +132 -0
  12. package/dist/cli/commands/syncModels.js +1040 -0
  13. package/dist/cli/configFileLoader.d.ts +10 -0
  14. package/dist/cli/configFileLoader.js +85 -0
  15. package/dist/cli/index.d.ts +2 -0
  16. package/dist/cli/index.js +14 -0
  17. package/dist/config.d.ts +52 -0
  18. package/dist/config.js +242 -0
  19. package/dist/core/eventReceivers.d.ts +179 -0
  20. package/dist/core/eventReceivers.js +210 -0
  21. package/dist/core/utils.d.ts +8 -0
  22. package/dist/core/utils.js +62 -0
  23. package/dist/filtering/localFiltering.d.ts +116 -0
  24. package/dist/filtering/localFiltering.js +834 -0
  25. package/dist/flavours/django/dates.d.ts +33 -0
  26. package/dist/flavours/django/dates.js +99 -0
  27. package/dist/flavours/django/errors.d.ts +138 -0
  28. package/dist/flavours/django/errors.js +187 -0
  29. package/dist/flavours/django/f.d.ts +6 -0
  30. package/dist/flavours/django/f.js +91 -0
  31. package/dist/flavours/django/files.d.ts +76 -0
  32. package/dist/flavours/django/files.js +338 -0
  33. package/dist/flavours/django/makeApiCall.d.ts +20 -0
  34. package/dist/flavours/django/makeApiCall.js +169 -0
  35. package/dist/flavours/django/manager.d.ts +197 -0
  36. package/dist/flavours/django/manager.js +222 -0
  37. package/dist/flavours/django/model.d.ts +112 -0
  38. package/dist/flavours/django/model.js +253 -0
  39. package/dist/flavours/django/operationFactory.d.ts +65 -0
  40. package/dist/flavours/django/operationFactory.js +216 -0
  41. package/dist/flavours/django/q.d.ts +70 -0
  42. package/dist/flavours/django/q.js +43 -0
  43. package/dist/flavours/django/queryExecutor.d.ts +131 -0
  44. package/dist/flavours/django/queryExecutor.js +468 -0
  45. package/dist/flavours/django/querySet.d.ts +412 -0
  46. package/dist/flavours/django/querySet.js +601 -0
  47. package/dist/flavours/django/tempPk.d.ts +19 -0
  48. package/dist/flavours/django/tempPk.js +48 -0
  49. package/dist/flavours/django/utils.d.ts +19 -0
  50. package/dist/flavours/django/utils.js +29 -0
  51. package/dist/index.d.ts +38 -0
  52. package/dist/index.js +38 -0
  53. package/dist/react-entry.d.ts +2 -0
  54. package/dist/react-entry.js +2 -0
  55. package/dist/reactiveAdaptor.d.ts +24 -0
  56. package/dist/reactiveAdaptor.js +38 -0
  57. package/dist/setup.d.ts +15 -0
  58. package/dist/setup.js +22 -0
  59. package/dist/syncEngine/cache/cache.d.ts +75 -0
  60. package/dist/syncEngine/cache/cache.js +341 -0
  61. package/dist/syncEngine/metrics/metricOptCalcs.d.ts +79 -0
  62. package/dist/syncEngine/metrics/metricOptCalcs.js +284 -0
  63. package/dist/syncEngine/registries/metricRegistry.d.ts +53 -0
  64. package/dist/syncEngine/registries/metricRegistry.js +162 -0
  65. package/dist/syncEngine/registries/modelStoreRegistry.d.ts +11 -0
  66. package/dist/syncEngine/registries/modelStoreRegistry.js +56 -0
  67. package/dist/syncEngine/registries/querysetStoreRegistry.d.ts +55 -0
  68. package/dist/syncEngine/registries/querysetStoreRegistry.js +244 -0
  69. package/dist/syncEngine/stores/metricStore.d.ts +55 -0
  70. package/dist/syncEngine/stores/metricStore.js +222 -0
  71. package/dist/syncEngine/stores/modelStore.d.ts +40 -0
  72. package/dist/syncEngine/stores/modelStore.js +405 -0
  73. package/dist/syncEngine/stores/operation.d.ts +99 -0
  74. package/dist/syncEngine/stores/operation.js +224 -0
  75. package/dist/syncEngine/stores/operationEventHandlers.d.ts +8 -0
  76. package/dist/syncEngine/stores/operationEventHandlers.js +239 -0
  77. package/dist/syncEngine/stores/querysetStore.d.ts +32 -0
  78. package/dist/syncEngine/stores/querysetStore.js +200 -0
  79. package/dist/syncEngine/stores/reactivity.d.ts +3 -0
  80. package/dist/syncEngine/stores/reactivity.js +4 -0
  81. package/dist/syncEngine/stores/utils.d.ts +14 -0
  82. package/dist/syncEngine/stores/utils.js +32 -0
  83. package/dist/syncEngine/sync.d.ts +32 -0
  84. package/dist/syncEngine/sync.js +169 -0
  85. package/dist/vue-entry.d.ts +6 -0
  86. package/dist/vue-entry.js +2 -0
  87. package/license.md +116 -0
  88. package/package.json +123 -0
  89. package/readme.md +222 -0
@@ -0,0 +1,338 @@
1
+ import axios from "axios";
2
+ import { configInstance } from "../../config.js";
3
+ import PQueue from "p-queue";
4
+ /**
5
+ * FileObject - A file wrapper that handles uploads to StateZero backend
6
+ */
7
+ export class FileObject {
8
+ constructor(file, options = {}) {
9
+ if (!file || !(file instanceof File)) {
10
+ throw new Error("FileObject requires a File object");
11
+ }
12
+ // Store file metadata directly
13
+ this.name = file.name;
14
+ this.size = file.size;
15
+ this.type = file.type;
16
+ this.lastModified = file.lastModified;
17
+ // Initialize state properties
18
+ this.uploaded = false;
19
+ this.uploading = false;
20
+ this.uploadResult = null;
21
+ this.uploadError = null;
22
+ this.fileData = null;
23
+ // Multipart upload properties
24
+ this.uploadType = null; // 'single' or 'multipart'
25
+ this.uploadId = null;
26
+ this.totalChunks = 0;
27
+ this.completedChunks = 0;
28
+ this.chunkSize = options.chunkSize || 5 * 1024 * 1024; // 5MB default
29
+ if (this.chunkSize < this.constructor.MIN_CHUNK_SIZE) {
30
+ throw new Error(`Chunk size must be at least ${this.constructor.MIN_CHUNK_SIZE / (1024 * 1024)}MB for multipart uploads. ` +
31
+ `Provided: ${this.chunkSize / (1024 * 1024)}MB`);
32
+ }
33
+ this.maxConcurrency = options.maxConcurrency || 3;
34
+ this.uploadPromise = this._initializeAndStartUpload(file, options);
35
+ }
36
+ get status() {
37
+ if (this.uploadError)
38
+ return "failed";
39
+ if (this.uploading)
40
+ return "uploading";
41
+ if (this.uploaded)
42
+ return "uploaded";
43
+ return "pending";
44
+ }
45
+ get filePath() {
46
+ return this.uploadResult?.file_path;
47
+ }
48
+ get fileUrl() {
49
+ return this.uploadResult?.file_url;
50
+ }
51
+ async _initializeAndStartUpload(file, options) {
52
+ const config = configInstance.getConfig();
53
+ const backend = config.backendConfigs?.[this.constructor.configKey];
54
+ if (!backend) {
55
+ throw new Error(`No backend configuration found for key: ${this.constructor.configKey}`);
56
+ }
57
+ // Check if fast uploads are enabled
58
+ if (backend.fileUploadMode === "s3") {
59
+ return this._fastUpload(file, options);
60
+ }
61
+ else {
62
+ // Read file data for direct upload
63
+ await this._readFileData(file);
64
+ return this._directUpload(options);
65
+ }
66
+ }
67
+ /**
68
+ * Fast upload using S3 presigned URLs with multipart support
69
+ */
70
+ async _fastUpload(file, options = {}) {
71
+ if (this.uploading)
72
+ return this.uploadPromise;
73
+ if (this.uploaded)
74
+ return Promise.resolve(this.uploadResult);
75
+ this.uploading = true;
76
+ this.uploadError = null;
77
+ try {
78
+ const config = configInstance.getConfig();
79
+ const backend = config.backendConfigs[this.constructor.configKey];
80
+ const baseUrl = backend.API_URL.replace(/\/+$/, "");
81
+ const headers = backend.getAuthHeaders ? backend.getAuthHeaders() : {};
82
+ // Determine if we need multipart upload
83
+ const needsMultipart = this.size > this.chunkSize;
84
+ const numChunks = needsMultipart
85
+ ? Math.ceil(this.size / this.chunkSize)
86
+ : 1;
87
+ this.totalChunks = numChunks;
88
+ this.uploadType = needsMultipart ? "multipart" : "single";
89
+ // Step 1: Initiate fast upload
90
+ const initiateResponse = await axios.post(`${baseUrl}/files/fast-upload/`, {
91
+ action: "initiate",
92
+ filename: this.name,
93
+ content_type: this.type,
94
+ file_size: this.size,
95
+ num_chunks: numChunks,
96
+ }, { headers });
97
+ const uploadData = initiateResponse.data;
98
+ if (uploadData.upload_type === "single") {
99
+ // Single file upload
100
+ return await this._singleUpload(file, uploadData, options);
101
+ }
102
+ else {
103
+ // Multipart upload
104
+ this.uploadId = uploadData.upload_id;
105
+ return await this._multipartUpload(file, uploadData, options);
106
+ }
107
+ }
108
+ catch (error) {
109
+ this.uploading = false;
110
+ this.uploadError =
111
+ error.response?.data?.error || error.message || "Fast upload failed";
112
+ const uploadFailedError = new Error(`Fast upload failed: ${this.uploadError}`);
113
+ uploadFailedError.originalError = error;
114
+ throw uploadFailedError;
115
+ }
116
+ }
117
+ /**
118
+ * Handle single file upload
119
+ */
120
+ async _singleUpload(file, uploadData, options) {
121
+ const { upload_url, content_type, file_path } = uploadData;
122
+ // Upload directly to S3 using PUT with raw file
123
+ await axios.put(upload_url, file, {
124
+ headers: {
125
+ "Content-Type": content_type,
126
+ },
127
+ ...(options.onProgress && {
128
+ onUploadProgress: (progressEvent) => {
129
+ const total = progressEvent.total > 0 ? progressEvent.total : 0;
130
+ const percentage = total > 0 ? Math.round((progressEvent.loaded / total) * 100) : 0;
131
+ if (options.onProgress) {
132
+ options.onProgress(percentage);
133
+ }
134
+ },
135
+ }),
136
+ });
137
+ // Complete the upload
138
+ return await this._completeUpload(file_path, this.name);
139
+ }
140
+ /**
141
+ * Handle multipart upload with concurrency using p-queue
142
+ */
143
+ async _multipartUpload(file, uploadData, options) {
144
+ const { upload_urls, file_path } = uploadData;
145
+ const parts = [];
146
+ const chunks = this._createFileChunks(file);
147
+ // Create p-queue instance with concurrency control
148
+ const queue = new PQueue({
149
+ concurrency: this.maxConcurrency,
150
+ });
151
+ // Create upload tasks for each chunk
152
+ const uploadTasks = chunks.map((chunk, index) => {
153
+ const partNumber = index + 1;
154
+ const uploadUrl = upload_urls[partNumber];
155
+ return queue.add(async () => {
156
+ try {
157
+ const response = await axios.put(uploadUrl, chunk, {
158
+ headers: {
159
+ "Content-Type": "application/octet-stream",
160
+ },
161
+ });
162
+ const etag = response.headers.etag?.replace(/"/g, "");
163
+ parts[index] = {
164
+ PartNumber: partNumber,
165
+ ETag: etag,
166
+ };
167
+ this.completedChunks++;
168
+ // Report progress
169
+ if (options.onProgress) {
170
+ const progress = Math.round((this.completedChunks / this.totalChunks) * 100);
171
+ options.onProgress(progress);
172
+ }
173
+ return parts[index];
174
+ }
175
+ catch (error) {
176
+ console.error(`Failed to upload chunk ${partNumber}:`, error);
177
+ throw error;
178
+ }
179
+ });
180
+ });
181
+ // Wait for all uploads to complete
182
+ await Promise.all(uploadTasks);
183
+ // Complete multipart upload
184
+ return await this._completeUpload(file_path, this.name, this.uploadId, parts);
185
+ }
186
+ /**
187
+ * Create file chunks for multipart upload
188
+ */
189
+ _createFileChunks(file) {
190
+ const chunks = [];
191
+ let offset = 0;
192
+ while (offset < file.size) {
193
+ const chunkSize = Math.min(this.chunkSize, file.size - offset);
194
+ const chunk = file.slice(offset, offset + chunkSize);
195
+ chunks.push(chunk);
196
+ offset += chunkSize;
197
+ }
198
+ return chunks;
199
+ }
200
+ /**
201
+ * Complete the upload (both single and multipart)
202
+ */
203
+ async _completeUpload(filePath, originalName, uploadId = null, parts = null) {
204
+ const config = configInstance.getConfig();
205
+ const backend = config.backendConfigs[this.constructor.configKey];
206
+ const baseUrl = backend.API_URL.replace(/\/+$/, "");
207
+ const headers = backend.getAuthHeaders ? backend.getAuthHeaders() : {};
208
+ const completeData = {
209
+ action: "complete",
210
+ file_path: filePath,
211
+ original_name: originalName,
212
+ };
213
+ if (uploadId && parts) {
214
+ completeData.upload_id = uploadId;
215
+ completeData.parts = parts;
216
+ }
217
+ const completeResponse = await axios.post(`${baseUrl}/files/fast-upload/`, completeData, { headers });
218
+ this.uploadResult = {
219
+ ...completeResponse.data,
220
+ uploadedAt: new Date(),
221
+ };
222
+ this.uploaded = true;
223
+ this.uploading = false;
224
+ return this.uploadResult;
225
+ }
226
+ /**
227
+ * Direct upload to Django backend (original method)
228
+ */
229
+ async _directUpload(options = {}) {
230
+ if (this.uploading)
231
+ return this.uploadPromise;
232
+ if (this.uploaded)
233
+ return Promise.resolve(this.uploadResult);
234
+ if (this.uploadError && !this.uploading && !this.uploaded) {
235
+ return Promise.reject(new Error(`Cannot upload: file processing failed earlier - ${this.uploadError}`));
236
+ }
237
+ this.uploading = true;
238
+ this.uploadError = null;
239
+ try {
240
+ if (!this.fileData) {
241
+ throw new Error("File data is not available. Upload cannot proceed.");
242
+ }
243
+ const config = configInstance.getConfig();
244
+ const backend = config.backendConfigs[this.constructor.configKey];
245
+ if (!backend.API_URL) {
246
+ throw new Error(`API_URL is not defined in backend configuration for key: ${this.constructor.configKey}`);
247
+ }
248
+ const formData = new FormData();
249
+ const fileBlob = this.getBlob();
250
+ const reconstructedFile = new File([fileBlob], this.name, {
251
+ type: this.type,
252
+ lastModified: this.lastModified,
253
+ });
254
+ formData.append("file", reconstructedFile);
255
+ if (options.additionalFields) {
256
+ Object.entries(options.additionalFields).forEach(([key, value]) => {
257
+ formData.append(key, value);
258
+ });
259
+ }
260
+ const baseUrl = backend.API_URL.replace(/\/+$/, "");
261
+ const uploadUrl = `${baseUrl}/files/upload/`;
262
+ const headers = backend.getAuthHeaders ? backend.getAuthHeaders() : {};
263
+ const response = await axios.post(uploadUrl, formData, {
264
+ headers: {
265
+ ...headers,
266
+ "Content-Type": "multipart/form-data",
267
+ },
268
+ ...(options.onProgress && {
269
+ onUploadProgress: (progressEvent) => {
270
+ const total = progressEvent.total > 0 ? progressEvent.total : 0;
271
+ const percentage = total > 0 ? Math.round((progressEvent.loaded / total) * 100) : 0;
272
+ if (options.onProgress) {
273
+ options.onProgress(percentage);
274
+ }
275
+ },
276
+ }),
277
+ });
278
+ this.uploadResult = {
279
+ ...response.data,
280
+ uploadedAt: new Date(),
281
+ };
282
+ this.uploaded = true;
283
+ this.uploading = false;
284
+ return this.uploadResult;
285
+ }
286
+ catch (error) {
287
+ this.uploading = false;
288
+ this.uploadError =
289
+ error.response?.data?.error || error.message || "Unknown upload error";
290
+ const uploadFailedError = new Error(`Upload failed: ${this.uploadError}`);
291
+ uploadFailedError.originalError = error;
292
+ throw uploadFailedError;
293
+ }
294
+ }
295
+ /**
296
+ * Reads the file content into an ArrayBuffer (for direct uploads only)
297
+ */
298
+ async _readFileData(file) {
299
+ try {
300
+ this.fileData = await file.arrayBuffer();
301
+ }
302
+ catch (error) {
303
+ console.error("Failed to read file data:", error);
304
+ throw new Error(`Failed to read file data: ${error.message}`);
305
+ }
306
+ }
307
+ /**
308
+ * Gets the file data as a Blob (for direct uploads only)
309
+ */
310
+ getBlob() {
311
+ if (!this.fileData) {
312
+ throw new Error("File data not yet loaded or failed to load.");
313
+ }
314
+ return new Blob([this.fileData], { type: this.type });
315
+ }
316
+ async waitForUpload() {
317
+ return this.uploadPromise;
318
+ }
319
+ toJSON() {
320
+ return {
321
+ name: this.name,
322
+ size: this.size,
323
+ type: this.type,
324
+ status: this.status,
325
+ uploaded: this.uploaded,
326
+ filePath: this.filePath,
327
+ fileUrl: this.fileUrl,
328
+ uploadResult: this.uploadResult,
329
+ uploadError: this.uploadError ? String(this.uploadError) : null,
330
+ uploadType: this.uploadType,
331
+ uploadId: this.uploadId,
332
+ totalChunks: this.totalChunks,
333
+ completedChunks: this.completedChunks,
334
+ };
335
+ }
336
+ }
337
+ FileObject.configKey = "default";
338
+ FileObject.MIN_CHUNK_SIZE = 5 * 1024 * 1024; // 5MB minimum for S3 multipart
@@ -0,0 +1,20 @@
1
+ /**
2
+ * Process included entities from a response and register them in the model store.
3
+ * Uses the model registry to find the appropriate model class for each entity type.
4
+ *
5
+ * @param {ModelStoreRegistry} modelStoreRegistry - The model store registry to use
6
+ * @param {Object} included - The included entities object from the response
7
+ * @param {Function} ModelClass - The base model class to get the configKey from
8
+ */
9
+ export function processIncludedEntities(modelStoreRegistry: ModelStoreRegistry, included: Object, ModelClass: Function): void;
10
+ /**
11
+ * Makes an API call to the backend with the given QuerySet.
12
+ * Automatically handles FileObject replacement with file paths for write operations.
13
+ *
14
+ * @param {QuerySet} querySet - The QuerySet to execute.
15
+ * @param {string} operationType - The type of operation to perform.
16
+ * @param {Object} args - Additional arguments for the operation.
17
+ * @param {string} operationId - A unique id for the operation
18
+ * @returns {Promise<Object>} The API response.
19
+ */
20
+ export function makeApiCall(querySet: QuerySet, operationType: string, args: Object | undefined, operationId: string, beforeExit?: null): Promise<Object>;
@@ -0,0 +1,169 @@
1
+ import PQueue from 'p-queue';
2
+ import axios from 'axios';
3
+ import { configInstance } from '../../config.js';
4
+ import { replaceTempPks } from './tempPk.js';
5
+ import { parseStateZeroError, MultipleObjectsReturned, DoesNotExist } from './errors.js';
6
+ import { FileObject } from './files.js';
7
+ const apiCallQueue = new PQueue({ concurrency: 1 });
8
+ /**
9
+ * Process included entities from a response and register them in the model store.
10
+ * Uses the model registry to find the appropriate model class for each entity type.
11
+ *
12
+ * @param {ModelStoreRegistry} modelStoreRegistry - The model store registry to use
13
+ * @param {Object} included - The included entities object from the response
14
+ * @param {Function} ModelClass - The base model class to get the configKey from
15
+ */
16
+ export function processIncludedEntities(modelStoreRegistry, included, ModelClass) {
17
+ if (!included)
18
+ return;
19
+ const configKey = ModelClass.configKey;
20
+ try {
21
+ // Process each model type
22
+ for (const [modelName, entityMap] of Object.entries(included)) {
23
+ // Get the appropriate model class for this model name
24
+ const EntityClass = configInstance.getModelClass(modelName, configKey);
25
+ if (!EntityClass) {
26
+ console.error(`Model class not found for ${modelName} in config ${configKey}`);
27
+ throw new Error(`Model class not found for ${modelName}`);
28
+ }
29
+ // Register each entity in the model store
30
+ for (const [pk, entity] of Object.entries(entityMap)) {
31
+ modelStoreRegistry.setEntity(EntityClass, pk, entity);
32
+ }
33
+ }
34
+ }
35
+ catch (error) {
36
+ console.error("Error processing included entities with model registry:", error);
37
+ throw new Error(`Failed to process included entities: ${error.message}`);
38
+ }
39
+ }
40
+ /**
41
+ * Recursively processes an object to replace FileObject instances with their file paths.
42
+ * Throws an error if any FileObject is not yet uploaded.
43
+ *
44
+ * @param {any} obj - The object to process
45
+ * @returns {any} The processed object with FileObjects replaced by paths
46
+ */
47
+ function processFileObjects(obj) {
48
+ if (obj === null || obj === undefined) {
49
+ return obj;
50
+ }
51
+ // Handle FileObject instances
52
+ if (obj instanceof FileObject) {
53
+ const status = obj.status;
54
+ if (status === 'uploaded' && obj.filePath) {
55
+ return obj.filePath;
56
+ }
57
+ else if (status === 'error') {
58
+ throw new Error(`Cannot use FileObject in query - upload failed: ${obj.uploadError}`);
59
+ }
60
+ else if (status === 'uploading') {
61
+ throw new Error(`Cannot use FileObject in query - file is still uploading. Wait for upload to complete before executing the query.`);
62
+ }
63
+ else if (status === 'pending') {
64
+ throw new Error(`Cannot use FileObject in query - file upload has not started yet.`);
65
+ }
66
+ else {
67
+ throw new Error(`Cannot use FileObject in query - unexpected status: ${status}`);
68
+ }
69
+ }
70
+ // Handle arrays
71
+ if (Array.isArray(obj)) {
72
+ return obj.map(item => processFileObjects(item));
73
+ }
74
+ // Handle plain objects
75
+ if (typeof obj === 'object' && obj.constructor === Object) {
76
+ const processedObj = {};
77
+ for (const [key, value] of Object.entries(obj)) {
78
+ processedObj[key] = processFileObjects(value);
79
+ }
80
+ return processedObj;
81
+ }
82
+ // Return primitive values as-is
83
+ return obj;
84
+ }
85
+ /**
86
+ * Makes an API call to the backend with the given QuerySet.
87
+ * Automatically handles FileObject replacement with file paths for write operations.
88
+ *
89
+ * @param {QuerySet} querySet - The QuerySet to execute.
90
+ * @param {string} operationType - The type of operation to perform.
91
+ * @param {Object} args - Additional arguments for the operation.
92
+ * @param {string} operationId - A unique id for the operation
93
+ * @returns {Promise<Object>} The API response.
94
+ */
95
+ export async function makeApiCall(querySet, operationType, args = {}, operationId, beforeExit = null) {
96
+ const ModelClass = querySet.ModelClass;
97
+ const config = configInstance.getConfig();
98
+ const backend = config.backendConfigs[ModelClass.configKey];
99
+ if (!backend) {
100
+ throw new Error(`No backend configuration found for key: ${ModelClass.configKey}`);
101
+ }
102
+ // Build the base query
103
+ let query = {
104
+ ...querySet.build(),
105
+ type: operationType,
106
+ };
107
+ // Add args to the query if provided
108
+ if (args && Object.keys(args).length > 0) {
109
+ query = {
110
+ ...query,
111
+ ...args,
112
+ };
113
+ }
114
+ const { serializerOptions, ...restOfQuery } = query;
115
+ let payload = {
116
+ ast: {
117
+ query: restOfQuery,
118
+ serializerOptions,
119
+ },
120
+ };
121
+ let limit = payload?.ast?.serializerOptions?.limit;
122
+ let overfetch = payload?.ast?.serializerOptions?.overfetch || 10;
123
+ if (limit && overfetch) {
124
+ payload.ast.serializerOptions.limit = limit + overfetch;
125
+ }
126
+ // Determine if this is a write operation that needs FileObject processing
127
+ const writeOperations = [
128
+ "create", "update", "delete", "update_instance", "delete_instance",
129
+ "get_or_create", "update_or_create"
130
+ ];
131
+ const isWriteOperation = writeOperations.includes(operationType);
132
+ // Process FileObjects for write operations
133
+ if (isWriteOperation) {
134
+ try {
135
+ payload = processFileObjects(payload);
136
+ }
137
+ catch (error) {
138
+ throw new Error(`Failed to process file uploads: ${error.message}`);
139
+ }
140
+ }
141
+ const baseUrl = backend.API_URL.replace(/\/+$/, "");
142
+ const finalUrl = `${baseUrl}/${ModelClass.modelName}/`;
143
+ const headers = backend.getAuthHeaders ? backend.getAuthHeaders() : {};
144
+ if (operationId) {
145
+ headers["X-Operation-ID"] = operationId;
146
+ }
147
+ // Use the queue for write operations, bypass for read operations
148
+ const apiCall = async () => {
149
+ try {
150
+ let response = await axios.post(finalUrl, replaceTempPks(payload), { headers });
151
+ if (typeof beforeExit === 'function' && response?.data) {
152
+ await beforeExit(response.data);
153
+ }
154
+ return response.data;
155
+ }
156
+ catch (error) {
157
+ if (error.response && error.response.data) {
158
+ const parsedError = parseStateZeroError(error.response.data);
159
+ if (Error.captureStackTrace) {
160
+ Error.captureStackTrace(parsedError, makeApiCall);
161
+ }
162
+ throw parsedError;
163
+ }
164
+ throw new Error(`API call failed: ${error.message}`);
165
+ }
166
+ };
167
+ // Queue write operations, execute read operations immediately
168
+ return isWriteOperation ? apiCallQueue.add(apiCall) : apiCall();
169
+ }