@uploadista/data-store-s3 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/.turbo/turbo-build.log +5 -0
  2. package/.turbo/turbo-check.log +5 -0
  3. package/LICENSE +21 -0
  4. package/README.md +588 -0
  5. package/dist/index.d.ts +2 -0
  6. package/dist/index.d.ts.map +1 -0
  7. package/dist/index.js +1 -0
  8. package/dist/observability.d.ts +45 -0
  9. package/dist/observability.d.ts.map +1 -0
  10. package/dist/observability.js +155 -0
  11. package/dist/s3-store-old.d.ts +51 -0
  12. package/dist/s3-store-old.d.ts.map +1 -0
  13. package/dist/s3-store-old.js +765 -0
  14. package/dist/s3-store.d.ts +9 -0
  15. package/dist/s3-store.d.ts.map +1 -0
  16. package/dist/s3-store.js +666 -0
  17. package/dist/services/__mocks__/s3-client-mock.service.d.ts +44 -0
  18. package/dist/services/__mocks__/s3-client-mock.service.d.ts.map +1 -0
  19. package/dist/services/__mocks__/s3-client-mock.service.js +379 -0
  20. package/dist/services/index.d.ts +2 -0
  21. package/dist/services/index.d.ts.map +1 -0
  22. package/dist/services/index.js +1 -0
  23. package/dist/services/s3-client.service.d.ts +68 -0
  24. package/dist/services/s3-client.service.d.ts.map +1 -0
  25. package/dist/services/s3-client.service.js +209 -0
  26. package/dist/test-observability.d.ts +6 -0
  27. package/dist/test-observability.d.ts.map +1 -0
  28. package/dist/test-observability.js +62 -0
  29. package/dist/types.d.ts +81 -0
  30. package/dist/types.d.ts.map +1 -0
  31. package/dist/types.js +1 -0
  32. package/dist/utils/calculations.d.ts +7 -0
  33. package/dist/utils/calculations.d.ts.map +1 -0
  34. package/dist/utils/calculations.js +41 -0
  35. package/dist/utils/error-handling.d.ts +7 -0
  36. package/dist/utils/error-handling.d.ts.map +1 -0
  37. package/dist/utils/error-handling.js +29 -0
  38. package/dist/utils/index.d.ts +4 -0
  39. package/dist/utils/index.d.ts.map +1 -0
  40. package/dist/utils/index.js +3 -0
  41. package/dist/utils/stream-adapter.d.ts +14 -0
  42. package/dist/utils/stream-adapter.d.ts.map +1 -0
  43. package/dist/utils/stream-adapter.js +41 -0
  44. package/package.json +36 -0
  45. package/src/__tests__/integration/s3-store.integration.test.ts +548 -0
  46. package/src/__tests__/multipart-logic.test.ts +395 -0
  47. package/src/__tests__/s3-store.edge-cases.test.ts +681 -0
  48. package/src/__tests__/s3-store.performance.test.ts +622 -0
  49. package/src/__tests__/s3-store.test.ts +662 -0
  50. package/src/__tests__/utils/performance-helpers.ts +459 -0
  51. package/src/__tests__/utils/test-data-generator.ts +331 -0
  52. package/src/__tests__/utils/test-setup.ts +256 -0
  53. package/src/index.ts +1 -0
  54. package/src/s3-store.ts +1059 -0
  55. package/src/services/__mocks__/s3-client-mock.service.ts +604 -0
  56. package/src/services/index.ts +1 -0
  57. package/src/services/s3-client.service.ts +359 -0
  58. package/src/types.ts +96 -0
  59. package/src/utils/calculations.ts +61 -0
  60. package/src/utils/error-handling.ts +52 -0
  61. package/src/utils/index.ts +3 -0
  62. package/src/utils/stream-adapter.ts +50 -0
  63. package/tsconfig.json +19 -0
  64. package/tsconfig.tsbuildinfo +1 -0
  65. package/vitest.config.ts +15 -0
@@ -0,0 +1,548 @@
1
+ import { UploadistaError } from "@uploadista/core/errors";
2
+ import { UploadFileKVStore } from "@uploadista/core/types";
3
+ import { Effect, Stream } from "effect";
4
+ import { afterEach, beforeEach, describe, expect, it } from "vitest";
5
+ import { createS3StoreImplementation } from "../../s3-store";
6
+ import type { S3Store } from "../../types";
7
+ import {
8
+ compareArrays,
9
+ createTestDataStream,
10
+ generateData,
11
+ streamToArray,
12
+ TEST_FILE_SIZES,
13
+ } from "../utils/test-data-generator";
14
+ import {
15
+ createTestS3StoreConfig,
16
+ createTestUploadFile,
17
+ type MockS3TestMethods,
18
+ runTestWithTimeout,
19
+ setupTestEnvironment,
20
+ TestLayersWithMockS3,
21
+ } from "../utils/test-setup";
22
+
23
+ describe("S3Store - Integration Tests", () => {
24
+ let s3Store: S3Store;
25
+ let mockService: MockS3TestMethods;
26
+
27
+ beforeEach(async () => {
28
+ await runTestWithTimeout(
29
+ Effect.gen(function* () {
30
+ mockService = yield* setupTestEnvironment();
31
+
32
+ const kvStore = yield* UploadFileKVStore;
33
+ const config = createTestS3StoreConfig();
34
+
35
+ s3Store = yield* createS3StoreImplementation({
36
+ ...config,
37
+ kvStore,
38
+ });
39
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
40
+ );
41
+ });
42
+
43
+ afterEach(async () => {
44
+ await runTestWithTimeout(
45
+ Effect.gen(function* () {
46
+ // Clear both S3 mock storage and KV store
47
+ yield* mockService.clearStorage();
48
+
49
+ // Clear all entries from KV store
50
+ const kvStore = yield* UploadFileKVStore;
51
+ if (!kvStore.list) {
52
+ return;
53
+ }
54
+ const keys = yield* kvStore.list();
55
+ if (keys && keys.length > 0) {
56
+ yield* Effect.all(
57
+ keys.map((key) => kvStore.delete(key)),
58
+ { concurrency: "unbounded" },
59
+ );
60
+ }
61
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
62
+ );
63
+ });
64
+
65
+ describe("End-to-End Upload Workflows", () => {
66
+ it("should handle complete upload workflow for medium file", async () => {
67
+ const testFile = createTestUploadFile(
68
+ "e2e-medium",
69
+ TEST_FILE_SIZES.MEDIUM.size,
70
+ );
71
+ const size = testFile.size ?? 0;
72
+ const originalData = generateData(size, {
73
+ type: "pattern",
74
+ pattern: new Uint8Array([0x12, 0x34, 0x56, 0x78]),
75
+ });
76
+ const testData = createTestDataStream(size, {
77
+ type: "pattern",
78
+ pattern: new Uint8Array([0x12, 0x34, 0x56, 0x78]),
79
+ });
80
+
81
+ await runTestWithTimeout(
82
+ Effect.gen(function* () {
83
+ // Step 1: Create upload
84
+ const createdFile = yield* s3Store.create(testFile);
85
+ expect(createdFile.id).toBe(testFile.id);
86
+ expect(createdFile.size).toBe(testFile.size);
87
+
88
+ // Verify upload exists in KV store
89
+ const initialUploadInfo = yield* s3Store.getUpload(testFile.id);
90
+ expect(initialUploadInfo.offset).toBe(0);
91
+ expect(initialUploadInfo.size).toBe(testFile.size);
92
+
93
+ // Step 2: Upload data
94
+ const progressUpdates: number[] = [];
95
+ const finalOffset = yield* s3Store.write(
96
+ {
97
+ file_id: testFile.id,
98
+ stream: testData,
99
+ offset: 0,
100
+ },
101
+ {
102
+ onProgress: (offset) => progressUpdates.push(offset),
103
+ },
104
+ );
105
+
106
+ expect(finalOffset).toBe(testFile.size);
107
+ expect(progressUpdates.length).toBeGreaterThan(0);
108
+ expect(progressUpdates[progressUpdates.length - 1]).toBe(
109
+ testFile.size,
110
+ );
111
+
112
+ // Step 3: Verify upload completion
113
+ const completedUploadInfo = yield* s3Store.getUpload(testFile.id);
114
+ expect(completedUploadInfo.offset).toBe(testFile.size);
115
+
116
+ // Step 4: Read back data and verify integrity
117
+ const readStream = yield* s3Store.read(testFile.id);
118
+ const readData = yield* Effect.promise(
119
+ async () => await streamToArray(readStream),
120
+ );
121
+
122
+ expect(compareArrays(readData, originalData)).toBe(true);
123
+
124
+ // Step 5: Verify metrics were recorded
125
+ const metrics = yield* mockService.getMetrics();
126
+ expect(metrics.operationCounts.get("createMultipartUpload")).toBe(1);
127
+ expect(metrics.operationCounts.get("completeMultipartUpload")).toBe(
128
+ 1,
129
+ );
130
+ expect(metrics.operationCounts.get("uploadPart")).toBeGreaterThan(0);
131
+ expect(metrics.totalBytesUploaded).toBe(testFile.size);
132
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
133
+ 30000,
134
+ );
135
+ });
136
+
137
+ it("should handle complete upload workflow for large file", async () => {
138
+ const testFile = createTestUploadFile(
139
+ "e2e-large",
140
+ TEST_FILE_SIZES.LARGE.size,
141
+ );
142
+ const size = testFile.size ?? 0;
143
+ const originalData = generateData(size, {
144
+ type: "random",
145
+ seed: 42,
146
+ });
147
+ const testData = createTestDataStream(size, {
148
+ type: "random",
149
+ seed: 42,
150
+ });
151
+
152
+ await runTestWithTimeout(
153
+ Effect.gen(function* () {
154
+ yield* s3Store.create(testFile);
155
+
156
+ const progressUpdates: number[] = [];
157
+ const finalOffset = yield* s3Store.write(
158
+ {
159
+ file_id: testFile.id,
160
+ stream: testData,
161
+ offset: 0,
162
+ },
163
+ {
164
+ onProgress: (offset) => progressUpdates.push(offset),
165
+ },
166
+ );
167
+
168
+ expect(finalOffset).toBe(testFile.size);
169
+
170
+ // Verify data integrity for large file
171
+ const readStream = yield* s3Store.read(testFile.id);
172
+ const readData = yield* Effect.promise(
173
+ async () => await streamToArray(readStream),
174
+ );
175
+
176
+ expect(readData.length).toBe(originalData.length);
177
+ expect(compareArrays(readData, originalData)).toBe(true);
178
+
179
+ // Verify efficient part usage
180
+ const metrics = yield* mockService.getMetrics();
181
+ const partUploads = metrics.operationCounts.get("uploadPart") || 0;
182
+
183
+ // Should use reasonable number of parts for 50MB file
184
+ expect(partUploads).toBeGreaterThanOrEqual(6);
185
+ expect(partUploads).toBeLessThanOrEqual(10);
186
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
187
+ 60000,
188
+ );
189
+ });
190
+ });
191
+
192
+ describe("Resumable Upload Scenarios", () => {
193
+ it("should handle resumable upload after interruption", async () => {
194
+ const testFile = createTestUploadFile(
195
+ "resumable",
196
+ TEST_FILE_SIZES.MEDIUM.size,
197
+ );
198
+ const fullData = generateData(testFile.size ?? 0, {
199
+ type: "text",
200
+ });
201
+
202
+ const halfSize = Math.floor((testFile.size ?? 0) / 2);
203
+ const _firstHalf = fullData.slice(0, halfSize);
204
+ const secondHalf = fullData.slice(halfSize);
205
+
206
+ await runTestWithTimeout(
207
+ Effect.gen(function* () {
208
+ // Step 1: Create upload and upload first half
209
+ yield* s3Store.create(testFile);
210
+
211
+ const firstHalfStream = createTestDataStream(halfSize, {
212
+ type: "text",
213
+ });
214
+ const firstOffset = yield* s3Store.write(
215
+ {
216
+ file_id: testFile.id,
217
+ stream: firstHalfStream,
218
+ offset: 0,
219
+ },
220
+ { onProgress: undefined },
221
+ );
222
+
223
+ expect(firstOffset).toBe(halfSize);
224
+
225
+ // Step 2: Check upload status
226
+ const partialUploadInfo = yield* s3Store.getUpload(testFile.id);
227
+ expect(partialUploadInfo.offset).toBe(halfSize);
228
+ expect(partialUploadInfo.size).toBe(testFile.size);
229
+
230
+ // Step 3: Resume upload with second half
231
+ const _secondHalfSize = (testFile.size ?? 0) - halfSize;
232
+ const secondHalfStream = Stream.fromIterable([secondHalf]);
233
+
234
+ const finalOffset = yield* s3Store.write(
235
+ {
236
+ file_id: testFile.id,
237
+ stream: secondHalfStream,
238
+ offset: halfSize,
239
+ },
240
+ { onProgress: undefined },
241
+ );
242
+
243
+ expect(finalOffset).toBe(testFile.size);
244
+
245
+ // Step 4: Verify complete file integrity
246
+ const readStream = yield* s3Store.read(testFile.id);
247
+ const readData = yield* Effect.promise(
248
+ async () => await streamToArray(readStream),
249
+ );
250
+
251
+ expect(compareArrays(readData, fullData)).toBe(true);
252
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
253
+ 45000,
254
+ );
255
+ });
256
+
257
+ it("should handle multiple resume attempts", async () => {
258
+ const testFile = createTestUploadFile(
259
+ "multiple-resume",
260
+ TEST_FILE_SIZES.LARGE.size,
261
+ );
262
+ const size = testFile.size ?? 0;
263
+ const chunkSize = Math.floor(size / 4); // Upload in quarters
264
+
265
+ await runTestWithTimeout(
266
+ Effect.gen(function* () {
267
+ yield* s3Store.create(testFile);
268
+
269
+ // Upload in chunks, checking status after each
270
+ for (let i = 0; i < 4; i++) {
271
+ const startOffset = i * chunkSize;
272
+ const endOffset = i === 3 ? size : (i + 1) * chunkSize;
273
+ const currentChunkSize = endOffset - startOffset;
274
+
275
+ const chunkData = createTestDataStream(currentChunkSize, {
276
+ type: "pattern",
277
+ pattern: new Uint8Array([i, i, i, i]),
278
+ });
279
+
280
+ const offset = yield* s3Store.write(
281
+ {
282
+ file_id: testFile.id,
283
+ stream: chunkData,
284
+ offset: startOffset,
285
+ },
286
+ { onProgress: undefined },
287
+ );
288
+
289
+ expect(offset).toBe(endOffset);
290
+
291
+ // Check status after each chunk
292
+ const uploadInfo = yield* s3Store.getUpload(testFile.id);
293
+ expect(uploadInfo.offset).toBe(endOffset);
294
+ }
295
+
296
+ // Verify final state
297
+ const finalUploadInfo = yield* s3Store.getUpload(testFile.id);
298
+ expect(finalUploadInfo.offset).toBe(testFile.size);
299
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
300
+ 60000,
301
+ );
302
+ });
303
+ });
304
+
305
+ describe("Error Recovery Integration", () => {
306
+ it("should recover from S3 service errors", async () => {
307
+ const testFile = createTestUploadFile(
308
+ "error-recovery",
309
+ TEST_FILE_SIZES.MEDIUM.size,
310
+ );
311
+ const testData = createTestDataStream(testFile.size ?? 0);
312
+
313
+ await runTestWithTimeout(
314
+ Effect.gen(function* () {
315
+ yield* s3Store.create(testFile);
316
+
317
+ // Set up intermittent failures
318
+ yield* mockService.setConfig({
319
+ simulateLatency: 0,
320
+ uploadFailureRate: 0.3, // 30% failure rate to test retry
321
+ });
322
+
323
+ // This might fail initially but should recover with retries
324
+ const result = yield* Effect.either(
325
+ s3Store.write(
326
+ {
327
+ file_id: testFile.id,
328
+ stream: testData,
329
+ offset: 0,
330
+ },
331
+ { onProgress: undefined },
332
+ ),
333
+ );
334
+
335
+ // If it succeeds despite errors, verify it worked correctly
336
+ if (result._tag === "Right") {
337
+ expect(result.right).toBe(testFile.size ?? 0);
338
+
339
+ const uploadInfo = yield* s3Store.getUpload(testFile.id);
340
+ expect(uploadInfo.offset).toBe(testFile.size ?? 0);
341
+ } else {
342
+ // If it fails, it should be with a proper UploadistaError
343
+ expect(result.left).toBeInstanceOf(UploadistaError);
344
+ }
345
+
346
+ // Reset error rate for cleanup
347
+ yield* mockService.setConfig({ uploadFailureRate: 0 });
348
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
349
+ 30000,
350
+ );
351
+ });
352
+
353
+ it("should handle cleanup after failed uploads", async () => {
354
+ const testFile = createTestUploadFile(
355
+ "cleanup-test",
356
+ TEST_FILE_SIZES.MEDIUM.size,
357
+ );
358
+ const testData = createTestDataStream(testFile.size ?? 0);
359
+
360
+ await runTestWithTimeout(
361
+ Effect.gen(function* () {
362
+ yield* s3Store.create(testFile);
363
+
364
+ // Force all upload parts to fail
365
+ yield* mockService.setConfig({ uploadFailureRate: 1.0 });
366
+
367
+ // Upload should fail
368
+ const result = yield* Effect.either(
369
+ s3Store.write(
370
+ {
371
+ file_id: testFile.id,
372
+ stream: testData,
373
+ offset: 0,
374
+ },
375
+ { onProgress: undefined },
376
+ ),
377
+ );
378
+
379
+ expect(result._tag).toBe("Left");
380
+
381
+ // Reset error rate
382
+ yield* mockService.setConfig({ uploadFailureRate: 0 });
383
+
384
+ // Remove the failed upload
385
+ yield* s3Store.remove(testFile.id);
386
+
387
+ // Verify cleanup
388
+ const storage = yield* mockService.getStorage();
389
+ expect(storage.objects.has(testFile.id)).toBe(false);
390
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
391
+ 60_000, // Increased timeout for retry logic
392
+ );
393
+ }, 60_000);
394
+ });
395
+
396
+ describe("Concurrent Operations Integration", () => {
397
+ it("should handle multiple concurrent uploads", async () => {
398
+ const fileCount = 3;
399
+ const uploads = Array.from({ length: fileCount }, (_, i) => {
400
+ const testFile = createTestUploadFile(
401
+ `concurrent-${i}`,
402
+ TEST_FILE_SIZES.SMALL_BASIC.size,
403
+ );
404
+ const testData = createTestDataStream(testFile.size ?? 0, {
405
+ type: "random",
406
+ seed: i + 1000,
407
+ });
408
+
409
+ return Effect.gen(function* () {
410
+ yield* s3Store.create(testFile);
411
+
412
+ const finalOffset = yield* s3Store.write(
413
+ {
414
+ file_id: testFile.id,
415
+ stream: testData,
416
+ offset: 0,
417
+ },
418
+ { onProgress: undefined },
419
+ );
420
+
421
+ return { fileId: testFile.id, size: testFile.size, finalOffset };
422
+ });
423
+ });
424
+
425
+ await runTestWithTimeout(
426
+ Effect.gen(function* () {
427
+ const results = yield* Effect.all(uploads, { concurrency: 2 });
428
+
429
+ // All uploads should succeed
430
+ expect(results).toHaveLength(fileCount);
431
+ results.forEach((result, i) => {
432
+ expect(result.fileId).toBe(`concurrent-${i}`);
433
+ expect(result.finalOffset).toBe(result.size);
434
+ });
435
+
436
+ // Verify all files are in storage
437
+ const storage = yield* mockService.getStorage();
438
+ for (let i = 0; i < fileCount; i++) {
439
+ expect(storage.objects.has(`concurrent-${i}`)).toBe(true);
440
+ }
441
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
442
+ 30000,
443
+ );
444
+ });
445
+ });
446
+
447
+ describe("Storage and Capabilities Integration", () => {
448
+ it("should report correct capabilities", () => {
449
+ const capabilities = s3Store.getCapabilities();
450
+
451
+ expect(capabilities.supportsParallelUploads).toBe(true);
452
+ expect(capabilities.supportsConcatenation).toBe(true);
453
+ expect(capabilities.supportsDeferredLength).toBe(true);
454
+ expect(capabilities.supportsResumableUploads).toBe(true);
455
+ expect(capabilities.supportsTransactionalUploads).toBe(true);
456
+ expect(capabilities.maxConcurrentUploads).toBeGreaterThan(0);
457
+ expect(capabilities.minChunkSize).toBe(5 * 1024 * 1024); // 5MB
458
+ expect(capabilities.maxChunkSize).toBe(5 * 1024 * 1024 * 1024); // 5GB
459
+ expect(capabilities.optimalChunkSize).toBe(8 * 1024 * 1024); // 8MB
460
+ });
461
+
462
+ it("should validate upload strategies correctly", async () => {
463
+ await runTestWithTimeout(
464
+ Effect.gen(function* () {
465
+ const parallelValid =
466
+ yield* s3Store.validateUploadStrategy("parallel");
467
+ expect(parallelValid).toBe(true);
468
+
469
+ const singleValid = yield* s3Store.validateUploadStrategy("single");
470
+ expect(singleValid).toBe(true);
471
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
472
+ );
473
+ });
474
+
475
+ it("should provide correct chunker constraints", () => {
476
+ const constraints = s3Store.getChunkerConstraints();
477
+
478
+ expect(constraints.minChunkSize).toBe(5 * 1024 * 1024);
479
+ expect(constraints.maxChunkSize).toBe(5 * 1024 * 1024 * 1024);
480
+ expect(constraints.optimalChunkSize).toBe(8 * 1024 * 1024);
481
+ expect(constraints.requiresOrderedChunks).toBe(false);
482
+ });
483
+ });
484
+
485
+ describe("Metadata and URL Integration", () => {
486
+ it("should handle file metadata correctly", async () => {
487
+ const testFile = createTestUploadFile(
488
+ "metadata-integration",
489
+ TEST_FILE_SIZES.SMALL_BASIC.size,
490
+ {
491
+ metadata: {
492
+ contentType: "application/pdf",
493
+ cacheControl: "max-age=3600",
494
+ },
495
+ },
496
+ );
497
+ const testData = createTestDataStream(testFile.size ?? 0);
498
+
499
+ await runTestWithTimeout(
500
+ Effect.gen(function* () {
501
+ const createdFile = yield* s3Store.create(testFile);
502
+
503
+ expect(createdFile.metadata).toEqual(testFile.metadata);
504
+ expect(createdFile.url).toContain(testFile.id);
505
+
506
+ yield* s3Store.write(
507
+ {
508
+ file_id: testFile.id,
509
+ stream: testData,
510
+ offset: 0,
511
+ },
512
+ { onProgress: undefined },
513
+ );
514
+
515
+ const uploadInfo = yield* s3Store.getUpload(testFile.id);
516
+ expect(uploadInfo.metadata).toEqual(testFile.metadata);
517
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
518
+ );
519
+ });
520
+ });
521
+
522
+ describe("Cleanup and Expiration Integration", () => {
523
+ it("should handle expired upload deletion", async () => {
524
+ await runTestWithTimeout(
525
+ Effect.gen(function* () {
526
+ // Create some uploads (they won't actually be expired in the mock)
527
+ const testFile1 = createTestUploadFile(
528
+ "expired-1",
529
+ TEST_FILE_SIZES.SMALL_BASIC.size,
530
+ );
531
+ const testFile2 = createTestUploadFile(
532
+ "expired-2",
533
+ TEST_FILE_SIZES.SMALL_BASIC.size,
534
+ );
535
+
536
+ yield* s3Store.create(testFile1);
537
+ yield* s3Store.create(testFile2);
538
+
539
+ // The mock implementation will handle this gracefully
540
+ const deletedCount = yield* s3Store.deleteExpired;
541
+
542
+ // Should complete without error
543
+ expect(deletedCount).toBeGreaterThanOrEqual(0);
544
+ }).pipe(Effect.provide(TestLayersWithMockS3())),
545
+ );
546
+ });
547
+ });
548
+ });