@pol-studios/storage 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,20 @@
1
+ // src/config/buckets.ts
2
+ var BUCKETS = {
3
+ AVATARS: "avatars",
4
+ FIXTURE_CATALOG_COVER: "fixture-catalog-cover",
5
+ FIXTURE_ATTACHMENTS: "fixture-attachments",
6
+ UNIT_DOCUMENTATION: "unit-documentation-attachments",
7
+ DATASHEETS: "datasheets",
8
+ PATTERNS: "patterns",
9
+ LOGOS: "logos",
10
+ RECEIPTS: "receipts",
11
+ TICKET_ATTACHMENTS: "ticket-attachment",
12
+ PROCESS_RESULTS: "process-results",
13
+ DATA_EXCHANGE_IMPORTS: "data-exchange-imports",
14
+ EMAIL_TEMPLATES: "email-templates",
15
+ EMAIL_TEMPLATES_ASSETS: "email-templates-assets"
16
+ };
17
+ export {
18
+ BUCKETS
19
+ };
20
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/config/buckets.ts"],"sourcesContent":["/**\n * Storage bucket names for Supabase storage.\n * Use these constants when calling storage APIs.\n */\nexport const BUCKETS = {\n AVATARS: \"avatars\",\n FIXTURE_CATALOG_COVER: \"fixture-catalog-cover\",\n FIXTURE_ATTACHMENTS: \"fixture-attachments\",\n UNIT_DOCUMENTATION: \"unit-documentation-attachments\",\n DATASHEETS: \"datasheets\",\n PATTERNS: \"patterns\",\n LOGOS: \"logos\",\n RECEIPTS: \"receipts\",\n TICKET_ATTACHMENTS: \"ticket-attachment\",\n PROCESS_RESULTS: \"process-results\",\n DATA_EXCHANGE_IMPORTS: \"data-exchange-imports\",\n EMAIL_TEMPLATES: \"email-templates\",\n EMAIL_TEMPLATES_ASSETS: \"email-templates-assets\",\n} as const;\n\nexport type BucketName = (typeof BUCKETS)[keyof typeof BUCKETS];\n"],"mappings":";AAIO,IAAM,UAAU;AAAA,EACrB,SAAS;AAAA,EACT,uBAAuB;AAAA,EACvB,qBAAqB;AAAA,EACrB,oBAAoB;AAAA,EACpB,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,OAAO;AAAA,EACP,UAAU;AAAA,EACV,oBAAoB;AAAA,EACpB,iBAAiB;AAAA,EACjB,uBAAuB;AAAA,EACvB,iBAAiB;AAAA,EACjB,wBAAwB;AAC1B;","names":[]}
@@ -0,0 +1,494 @@
1
+ // src/hooks/useUpload.ts
2
+ import { useMutation } from "@tanstack/react-query";
3
+ import { Upload } from "tus-js-client";
4
+ import { newUuid } from "@pol-studios/utils/uuid";
5
+ import { isBlank } from "@pol-studios/utils/string";
6
+ import { useSupabase, getSupabaseUrl } from "@pol-studios/db/client";
7
+ async function uploadFileWithMetadata(supabase, bucketName, filePath, file, metadata, onProgress) {
8
+ const {
9
+ data: { session }
10
+ } = await supabase.auth.getSession();
11
+ if (!session) {
12
+ throw new Error("User must be logged in to upload files");
13
+ }
14
+ const contentType = "type" in file ? file.type : metadata.contentType ?? "application/octet-stream";
15
+ const supabaseUrl = getSupabaseUrl();
16
+ return new Promise((resolve, reject) => {
17
+ const upload = new Upload(file, {
18
+ endpoint: supabaseUrl + "/storage/v1/upload/resumable",
19
+ retryDelays: [0, 3e3, 5e3, 1e4, 2e4],
20
+ headers: {
21
+ authorization: "Bearer " + session.access_token,
22
+ "x-upsert": "true"
23
+ },
24
+ uploadDataDuringCreation: true,
25
+ removeFingerprintOnSuccess: true,
26
+ metadata: {
27
+ bucketName,
28
+ objectName: filePath,
29
+ contentType,
30
+ cacheControl: "3600",
31
+ // Custom metadata gets stored in storage.objects.metadata
32
+ ...Object.fromEntries(
33
+ Object.entries(metadata).map(([k, v]) => [k, String(v)])
34
+ )
35
+ },
36
+ chunkSize: 6 * 1024 * 1024,
37
+ // 6MB - required by Supabase
38
+ onError: (error) => {
39
+ console.error("Upload failed:", error);
40
+ reject(error);
41
+ },
42
+ onProgress,
43
+ onSuccess: () => {
44
+ resolve();
45
+ }
46
+ });
47
+ upload.findPreviousUploads().then((previousUploads) => {
48
+ if (previousUploads.length) {
49
+ upload.resumeFromPreviousUpload(previousUploads[0]);
50
+ }
51
+ upload.start();
52
+ });
53
+ });
54
+ }
55
+ function useUpload(options) {
56
+ const { bucketId } = options;
57
+ const supabase = useSupabase();
58
+ return useMutation({
59
+ mutationFn: async (input) => {
60
+ const { file, fileName, directory, metadata = {}, onUploadProgress } = input;
61
+ const originalFileName = fileName ?? ("name" in file ? file["name"] : "") ?? ("fileName" in file ? file["fileName"] : "file");
62
+ if (isBlank(originalFileName)) {
63
+ throw new Error("File must have a valid name");
64
+ }
65
+ const extension = originalFileName.split(".").pop() ?? "";
66
+ const uniqueFileName = newUuid() + "." + extension;
67
+ const path = directory ? directory + "/14/" + uniqueFileName : "14/" + uniqueFileName;
68
+ const contentType = "type" in file ? file.type.split("/").pop()?.toUpperCase() ?? "Unknown" : extension.toUpperCase();
69
+ const uploadMetadata = {
70
+ processingStatus: "None",
71
+ contentType,
72
+ originalFileName,
73
+ ...metadata
74
+ };
75
+ await uploadFileWithMetadata(
76
+ supabase,
77
+ bucketId,
78
+ path,
79
+ file,
80
+ uploadMetadata,
81
+ onUploadProgress
82
+ );
83
+ return {
84
+ path,
85
+ bucketId,
86
+ originalFileName,
87
+ contentType
88
+ };
89
+ }
90
+ });
91
+ }
92
+ function useUploadWithEntity(options) {
93
+ const { pathField, updateEntity, ...uploadOptions } = options;
94
+ const uploadMutation = useUpload(uploadOptions);
95
+ return useMutation({
96
+ mutationFn: async (input) => {
97
+ const { entity, ...uploadInput } = input;
98
+ const result = await uploadMutation.mutateAsync(uploadInput);
99
+ await updateEntity(entity, result.path);
100
+ return result;
101
+ }
102
+ });
103
+ }
104
+
105
+ // src/hooks/useUrl.ts
106
+ import { isUsable } from "@pol-studios/utils/types";
107
+ import { useIndexedDB } from "@pol-studios/hooks/storage";
108
+ import { useSupabase as useSupabase2 } from "@pol-studios/db/client";
109
+ import moment from "moment";
110
+ import { useRef } from "react";
111
+ var cacheVersions = /* @__PURE__ */ new Map();
112
+ var cacheVersionListeners = /* @__PURE__ */ new Map();
113
+ var retryAttempts = /* @__PURE__ */ new Map();
114
+ function useUrl() {
115
+ const db = useIndexedDB({
116
+ dbName: "polstudios",
117
+ storeName: "cached-urls"
118
+ });
119
+ const storedUrls = useRef(/* @__PURE__ */ new Map());
120
+ const supabase = useSupabase2();
121
+ function getCacheVersion(entity) {
122
+ const key = entity.bucketId + entity.path;
123
+ return cacheVersions.get(key) || 0;
124
+ }
125
+ function subscribeToCacheVersion(entity, callback) {
126
+ const key = entity.bucketId + entity.path;
127
+ if (!cacheVersionListeners.has(key)) {
128
+ cacheVersionListeners.set(key, /* @__PURE__ */ new Set());
129
+ }
130
+ cacheVersionListeners.get(key).add(callback);
131
+ return () => {
132
+ cacheVersionListeners.get(key)?.delete(callback);
133
+ };
134
+ }
135
+ async function expireCache(entity) {
136
+ const key = entity.bucketId + entity.path;
137
+ const dbKeys = await db.getAllKeys();
138
+ const keysToDelete = Array.from(
139
+ new Set(
140
+ [...Array.from(storedUrls.current.keys()), ...dbKeys].filter(
141
+ (value) => value.startsWith(key)
142
+ )
143
+ )
144
+ );
145
+ await Promise.all(
146
+ keysToDelete.map(
147
+ async (x) => {
148
+ storedUrls.current.delete(x);
149
+ await db.removeItem(x);
150
+ }
151
+ )
152
+ );
153
+ const currentVersion = cacheVersions.get(key) || 0;
154
+ cacheVersions.set(key, currentVersion + 1);
155
+ const listeners = cacheVersionListeners.get(key);
156
+ if (listeners) {
157
+ listeners.forEach((callback) => {
158
+ callback();
159
+ });
160
+ }
161
+ }
162
+ async function baseFetchUrl(entity, options, isPublic = false) {
163
+ if (isUsable(entity) === false) return;
164
+ const optionsString = JSON.stringify(options);
165
+ if (isUsable(entity.bucketId) === false) {
166
+ return;
167
+ }
168
+ if (isUsable(entity.path) === false) {
169
+ return;
170
+ }
171
+ const key = entity.bucketId + entity.path + (optionsString ?? "") + "-cached-url";
172
+ const inMemoryItem = storedUrls.current.get(key);
173
+ let item = inMemoryItem;
174
+ if (isUsable(inMemoryItem) === false) {
175
+ item = await db.getItem(key);
176
+ }
177
+ if (isUsable(item) && moment(item.expiresOn).isAfter(moment().add(-1 * 60, "seconds"))) {
178
+ return item.url;
179
+ }
180
+ const base = supabase.storage.from(entity.bucketId);
181
+ let download = options?.download;
182
+ if (typeof options?.download === "string") {
183
+ const ext = entity.path.split(".").pop() ?? "";
184
+ download = options?.download.endsWith(ext) ? options?.download : options?.download + "." + ext;
185
+ }
186
+ const newOptions = options ? { ...options, download } : void 0;
187
+ const retryKey = entity.bucketId + "/" + entity.path;
188
+ const currentRetries = retryAttempts.get(retryKey) || 0;
189
+ let url;
190
+ if (isPublic) {
191
+ url = base.getPublicUrl(entity.path, newOptions).data.publicUrl;
192
+ } else {
193
+ try {
194
+ console.log("Creating signed URL for", entity.path);
195
+ const result = await base.createSignedUrl(entity.path, 60 * 100, newOptions);
196
+ url = result.data?.signedUrl;
197
+ if (isUsable(url)) {
198
+ retryAttempts.delete(retryKey);
199
+ } else if (currentRetries < 3) {
200
+ retryAttempts.set(retryKey, currentRetries + 1);
201
+ throw new Error("Failed to get signed URL");
202
+ }
203
+ } catch (error) {
204
+ if (currentRetries < 3) {
205
+ retryAttempts.set(retryKey, currentRetries + 1);
206
+ const delay = Math.min(1e3 * Math.pow(2, currentRetries), 5e3);
207
+ await new Promise((resolve) => setTimeout(resolve, delay));
208
+ return baseFetchUrl(entity, options, isPublic);
209
+ }
210
+ retryAttempts.delete(retryKey);
211
+ return void 0;
212
+ }
213
+ }
214
+ if (isUsable(url) === false) return url;
215
+ const cachedUrl = {
216
+ key,
217
+ url,
218
+ expiresOn: moment().add(60 * 100, "seconds").toISOString(true)
219
+ };
220
+ storedUrls.current.set(key, cachedUrl);
221
+ db.setItem(key, cachedUrl);
222
+ return url;
223
+ }
224
+ async function fetchUrl(entity, options) {
225
+ return baseFetchUrl(entity, options, false);
226
+ }
227
+ async function fetchPublicUrl(entity, options) {
228
+ return baseFetchUrl(entity, options, true);
229
+ }
230
+ async function prefetchImage(entity, options) {
231
+ const url = await fetchUrl(entity, options);
232
+ if (url) {
233
+ new Image().src = url;
234
+ }
235
+ }
236
+ async function fetchUrls(entities, options) {
237
+ const results = /* @__PURE__ */ new Map();
238
+ if (entities.length === 0) return results;
239
+ const optionsString = JSON.stringify(options);
240
+ const expirySeconds = 60 * 100;
241
+ const byBucket = /* @__PURE__ */ new Map();
242
+ for (const entity of entities) {
243
+ if (!isUsable(entity.bucketId) || !isUsable(entity.path)) continue;
244
+ const list = byBucket.get(entity.bucketId) ?? [];
245
+ list.push(entity);
246
+ byBucket.set(entity.bucketId, list);
247
+ }
248
+ for (const [bucketId, bucketEntities] of byBucket) {
249
+ const uncached = [];
250
+ for (const entity of bucketEntities) {
251
+ const key = entity.bucketId + entity.path + (optionsString ?? "") + "-cached-url";
252
+ const entityKey = entity.bucketId + "/" + entity.path;
253
+ let item = storedUrls.current.get(key);
254
+ if (!isUsable(item)) {
255
+ item = await db.getItem(key);
256
+ }
257
+ if (isUsable(item) && moment(item.expiresOn).isAfter(moment().add(-1 * 60, "seconds"))) {
258
+ results.set(entityKey, item.url);
259
+ } else {
260
+ uncached.push(entity);
261
+ }
262
+ }
263
+ if (uncached.length > 0) {
264
+ const paths = uncached.map((e) => e.path);
265
+ const base = supabase.storage.from(bucketId);
266
+ try {
267
+ console.log("Signed URLs created");
268
+ const { data, error } = await base.createSignedUrls(paths, expirySeconds);
269
+ if (!error && data) {
270
+ const expiresOn = moment().add(expirySeconds, "seconds").toISOString(true);
271
+ for (let i = 0; i < uncached.length; i++) {
272
+ const entity = uncached[i];
273
+ const urlData = data[i];
274
+ const entityKey = entity.bucketId + "/" + entity.path;
275
+ const cacheKey = entity.bucketId + entity.path + (optionsString ?? "") + "-cached-url";
276
+ if (urlData?.signedUrl) {
277
+ results.set(entityKey, urlData.signedUrl);
278
+ const cachedUrl = {
279
+ key: cacheKey,
280
+ url: urlData.signedUrl,
281
+ expiresOn
282
+ };
283
+ storedUrls.current.set(cacheKey, cachedUrl);
284
+ db.setItem(cacheKey, cachedUrl);
285
+ } else {
286
+ results.set(entityKey, void 0);
287
+ }
288
+ }
289
+ } else {
290
+ for (const entity of uncached) {
291
+ const entityKey = entity.bucketId + "/" + entity.path;
292
+ const url = await fetchUrl(entity, options);
293
+ results.set(entityKey, url);
294
+ }
295
+ }
296
+ } catch {
297
+ for (const entity of uncached) {
298
+ const entityKey = entity.bucketId + "/" + entity.path;
299
+ const url = await fetchUrl(entity, options);
300
+ results.set(entityKey, url);
301
+ }
302
+ }
303
+ }
304
+ }
305
+ return results;
306
+ }
307
+ return { fetchUrl, fetchUrls, prefetchImage, fetchPublicUrl, expireCache, getCacheVersion, subscribeToCacheVersion };
308
+ }
309
+
310
+ // src/hooks/usePath.ts
311
+ import { useEffect, useState } from "react";
312
+ import { isUsable as isUsable2 } from "@pol-studios/utils/types";
313
+ import { useQuery } from "@pol-studios/db/query";
314
+ import { useSupabase as useSupabase3 } from "@pol-studios/db/client";
315
+ function getContentType(metadata, path) {
316
+ if (metadata?.contentType) {
317
+ return metadata.contentType;
318
+ }
319
+ const ext = path.split(".").pop()?.toUpperCase();
320
+ return ext || "Unknown";
321
+ }
322
+ function usePath(storagePath, bucketId, options = {}) {
323
+ const { fetchMetadata = true, transform, download } = options;
324
+ const [url, setUrl] = useState(null);
325
+ const [error, setError] = useState(null);
326
+ const { fetchUrl } = useUrl();
327
+ const supabase = useSupabase3();
328
+ useEffect(() => {
329
+ if (!isUsable2(storagePath) || !isUsable2(bucketId)) {
330
+ setUrl(null);
331
+ return;
332
+ }
333
+ let cancelled = false;
334
+ fetchUrl({ bucketId, path: storagePath }, { transform, download }).then((signedUrl) => {
335
+ if (!cancelled) {
336
+ setUrl(signedUrl ?? null);
337
+ setError(null);
338
+ }
339
+ }).catch((err) => {
340
+ if (!cancelled) {
341
+ setError(err instanceof Error ? err : new Error(String(err)));
342
+ setUrl(null);
343
+ }
344
+ });
345
+ return () => {
346
+ cancelled = true;
347
+ };
348
+ }, [storagePath, bucketId, JSON.stringify(transform), download]);
349
+ const metadataRequest = useQuery(
350
+ supabase.schema("storage").from("objects").select("metadata").eq("bucket_id", bucketId).eq("name", storagePath ?? "").maybeSingle(),
351
+ {
352
+ enabled: fetchMetadata && isUsable2(storagePath) && isUsable2(bucketId)
353
+ }
354
+ );
355
+ const metadata = metadataRequest.data?.metadata ?? null;
356
+ const contentType = getContentType(metadata, storagePath ?? "");
357
+ const isLoading = isUsable2(storagePath) && url === null && error === null || fetchMetadata && metadataRequest.isFetching;
358
+ return {
359
+ url,
360
+ metadata,
361
+ contentType,
362
+ isLoading,
363
+ error
364
+ };
365
+ }
366
+
367
+ // src/hooks/useDropzoneUpload.ts
368
+ import { useCallback, useEffect as useEffect2, useMemo, useState as useState2 } from "react";
369
+ import {
370
+ useDropzone
371
+ } from "react-dropzone";
372
+ import { useSupabase as useSupabase4 } from "@pol-studios/db/client";
373
+ var useDropzoneUpload = (options) => {
374
+ const {
375
+ bucketName,
376
+ path,
377
+ allowedMimeTypes = [],
378
+ maxFileSize = Number.POSITIVE_INFINITY,
379
+ maxFiles = 1,
380
+ cacheControl = 3600,
381
+ upsert = false
382
+ } = options;
383
+ const [files, setFiles] = useState2([]);
384
+ const [loading, setLoading] = useState2(false);
385
+ const [errors, setErrors] = useState2([]);
386
+ const [successes, setSuccesses] = useState2([]);
387
+ const isSuccess = useMemo(() => {
388
+ if (errors.length === 0 && successes.length === 0) {
389
+ return false;
390
+ }
391
+ if (errors.length === 0 && successes.length === files.length) {
392
+ return true;
393
+ }
394
+ return false;
395
+ }, [errors.length, successes.length, files.length]);
396
+ const onDrop = useCallback(
397
+ (acceptedFiles, fileRejections) => {
398
+ const validFiles = acceptedFiles.filter((file) => !files.find((x) => x.name === file.name)).map((file) => {
399
+ file.preview = URL.createObjectURL(file);
400
+ file.errors = [];
401
+ return file;
402
+ });
403
+ const invalidFiles = fileRejections.map(({ file, errors: errors2 }) => {
404
+ file.preview = URL.createObjectURL(file);
405
+ file.errors = errors2;
406
+ return file;
407
+ });
408
+ const newFiles = [...files, ...validFiles, ...invalidFiles];
409
+ setFiles(newFiles);
410
+ },
411
+ [files, setFiles]
412
+ );
413
+ const dropzoneProps = useDropzone({
414
+ onDrop,
415
+ noClick: true,
416
+ accept: allowedMimeTypes.reduce(
417
+ (acc, type) => ({ ...acc, [type]: [] }),
418
+ {}
419
+ ),
420
+ maxSize: maxFileSize,
421
+ maxFiles,
422
+ multiple: maxFiles !== 1
423
+ });
424
+ const supabase = useSupabase4();
425
+ const onUpload = useCallback(async () => {
426
+ setLoading(true);
427
+ const filesWithErrors = errors.map((x) => x.name);
428
+ const filesToUpload = filesWithErrors.length > 0 ? [
429
+ ...files.filter((f) => filesWithErrors.includes(f.name)),
430
+ ...files.filter((f) => !successes.includes(f.name))
431
+ ] : files;
432
+ const responses = await Promise.all(
433
+ filesToUpload.map(async (file) => {
434
+ const { error } = await supabase.storage.from(bucketName).upload(!!path ? path + "/" + file.name : file.name, file, {
435
+ cacheControl: cacheControl.toString(),
436
+ upsert
437
+ });
438
+ if (error) {
439
+ return { name: file.name, message: error.message };
440
+ } else {
441
+ return { name: file.name, message: void 0 };
442
+ }
443
+ })
444
+ );
445
+ const responseErrors = responses.filter((x) => x.message !== void 0);
446
+ setErrors(responseErrors);
447
+ const responseSuccesses = responses.filter((x) => x.message === void 0);
448
+ const newSuccesses = Array.from(
449
+ /* @__PURE__ */ new Set([...successes, ...responseSuccesses.map((x) => x.name)])
450
+ );
451
+ setSuccesses(newSuccesses);
452
+ setLoading(false);
453
+ }, [files, path, bucketName, errors, successes]);
454
+ useEffect2(() => {
455
+ if (files.length === 0) {
456
+ setErrors([]);
457
+ }
458
+ if (files.length <= maxFiles) {
459
+ let changed = false;
460
+ const newFiles = files.map((file) => {
461
+ if (file.errors.some((e) => e.code === "too-many-files")) {
462
+ file.errors = file.errors.filter((e) => e.code !== "too-many-files");
463
+ changed = true;
464
+ }
465
+ return file;
466
+ });
467
+ if (changed) {
468
+ setFiles(newFiles);
469
+ }
470
+ }
471
+ }, [files.length, setFiles, maxFiles]);
472
+ return {
473
+ files,
474
+ setFiles,
475
+ successes,
476
+ isSuccess,
477
+ loading,
478
+ errors,
479
+ setErrors,
480
+ onUpload,
481
+ maxFileSize,
482
+ maxFiles,
483
+ allowedMimeTypes,
484
+ ...dropzoneProps
485
+ };
486
+ };
487
+ export {
488
+ useDropzoneUpload,
489
+ usePath,
490
+ useUpload,
491
+ useUploadWithEntity,
492
+ useUrl
493
+ };
494
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/hooks/useUpload.ts","../../src/hooks/useUrl.ts","../../src/hooks/usePath.ts","../../src/hooks/useDropzoneUpload.ts"],"sourcesContent":["import { useMutation } from \"@tanstack/react-query\";\nimport { Upload } from \"tus-js-client\";\nimport { SupabaseClient } from \"@supabase/supabase-js\";\nimport { newUuid } from \"@pol-studios/utils/uuid\";\nimport { isBlank } from \"@pol-studios/utils/string\";\nimport { useSupabase, getSupabaseUrl } from \"@pol-studios/db/client\";\nimport type {\n StorageUploadMetadata,\n UploadInput,\n UploadResult,\n UseUploadOptions,\n} from \"../types\";\n\nasync function uploadFileWithMetadata(\n supabase: SupabaseClient,\n bucketName: string,\n filePath: string,\n file: File | Blob,\n metadata: StorageUploadMetadata,\n onProgress?: (bytesUploaded: number, bytesTotal: number) => void\n): Promise<void> {\n const {\n data: { session },\n } = await supabase.auth.getSession();\n\n if (!session) {\n throw new Error(\"User must be logged in to upload files\");\n }\n\n const contentType =\n \"type\" in file ? file.type : metadata.contentType ?? \"application/octet-stream\";\n\n const supabaseUrl = getSupabaseUrl();\n\n return new Promise((resolve, reject) => {\n const upload = new Upload(file, {\n endpoint: supabaseUrl + \"/storage/v1/upload/resumable\",\n retryDelays: [0, 3000, 5000, 10000, 20000],\n headers: {\n authorization: \"Bearer \" + session.access_token,\n \"x-upsert\": \"true\",\n },\n uploadDataDuringCreation: true,\n removeFingerprintOnSuccess: true,\n metadata: {\n bucketName: bucketName,\n objectName: filePath,\n contentType: contentType,\n cacheControl: \"3600\",\n // Custom metadata gets stored in storage.objects.metadata\n ...Object.fromEntries(\n Object.entries(metadata).map(([k, v]) => [k, String(v)])\n ),\n },\n chunkSize: 6 * 1024 * 1024, // 6MB - required by Supabase\n onError: (error) => {\n console.error(\"Upload failed:\", error);\n reject(error);\n },\n onProgress: onProgress,\n onSuccess: () => {\n resolve();\n },\n });\n\n upload.findPreviousUploads().then((previousUploads) => {\n if (previousUploads.length) {\n upload.resumeFromPreviousUpload(previousUploads[0]);\n }\n upload.start();\n });\n });\n}\n\n/**\n * Hook for uploading files to Supabase Storage with path-based references.\n *\n * Unlike useDbAttachmentUpload, this hook:\n * - Does NOT create Attachment records\n * - Returns the storage path for the caller to store\n * - Sets metadata directly on storage.objects\n *\n * @example\n * ```tsx\n * import { useUpload } from \"@pol-studios/storage/hooks\";\n * import { BUCKETS } from \"@pol-studios/db\";\n *\n * const upload = useUpload({ bucketId: BUCKETS.AVATARS });\n *\n * const handleUpload = async (file: File) => {\n * const result = await upload.mutateAsync({ file });\n * // Store result.path on the entity\n * await updateProfile({ profilePath: result.path });\n * };\n * ```\n */\nexport default function useUpload(\n options: UseUploadOptions\n): ReturnType<typeof useMutation<UploadResult, Error, UploadInput>> {\n const { bucketId } = options;\n const supabase = useSupabase();\n\n return useMutation({\n mutationFn: async (input: UploadInput): Promise<UploadResult> => {\n const { file, fileName, directory, metadata = {}, onUploadProgress } = input;\n\n // Get original filename\n const originalFileName =\n fileName ??\n (\"name\" in file ? file[\"name\"] : \"\") ??\n (\"fileName\" in file ? (file as any)[\"fileName\"] : \"file\");\n\n if (isBlank(originalFileName)) {\n throw new Error(\"File must have a valid name\");\n }\n\n // Generate unique filename with original extension\n const extension = originalFileName.split(\".\").pop() ?? \"\";\n const uniqueFileName = newUuid() + \".\" + extension;\n\n // Build path with optional directory\n const path = directory\n ? directory + \"/14/\" + uniqueFileName\n : \"14/\" + uniqueFileName;\n\n // Derive content type from file\n const contentType =\n \"type\" in file\n ? file.type.split(\"/\").pop()?.toUpperCase() ?? \"Unknown\"\n : extension.toUpperCase();\n\n // Build metadata\n const uploadMetadata: StorageUploadMetadata = {\n processingStatus: \"None\",\n contentType,\n originalFileName,\n ...metadata,\n };\n\n // Upload with metadata\n await uploadFileWithMetadata(\n supabase,\n bucketId,\n path,\n file,\n uploadMetadata,\n onUploadProgress\n );\n\n return {\n path,\n bucketId,\n originalFileName,\n contentType,\n };\n },\n });\n}\n\n/**\n * Convenience hook that combines upload with entity update.\n * Uploads the file and then updates the specified entity with the path.\n */\nexport function useUploadWithEntity<T extends Record<string, unknown>>(\n options: UseUploadOptions & {\n /** Field name on the entity to store the path */\n pathField: string;\n /** Function to update the entity after upload */\n updateEntity: (entity: T, path: string) => Promise<void>;\n }\n) {\n const { pathField, updateEntity, ...uploadOptions } = options;\n const uploadMutation = useUpload(uploadOptions);\n\n return useMutation({\n mutationFn: async (input: UploadInput & { entity: T }): Promise<UploadResult> => {\n const { entity, ...uploadInput } = input;\n const result = await uploadMutation.mutateAsync(uploadInput);\n await updateEntity(entity, result.path);\n return result;\n },\n });\n}\n","import { isUsable } from \"@pol-studios/utils/types\";\nimport { useIndexedDB } from \"@pol-studios/hooks/storage\";\nimport { useSupabase } from \"@pol-studios/db/client\";\nimport moment from \"moment\";\nimport { TransformOptions } from \"@supabase/storage-js\";\nimport { useRef } from \"react\";\nimport type { Attachment, CachedUrl } from \"../types\";\n\n// Global cache version map to track when attachments are expired\nconst cacheVersions = new Map<string, number>();\n// Listeners for cache version changes\nconst cacheVersionListeners = new Map<string, Set<() => void>>();\n// Track retry attempts per entity\nconst retryAttempts = new Map<string, number>();\n\nexport default function useUrl() {\n const db = useIndexedDB({\n dbName: \"polstudios\",\n storeName: \"cached-urls\",\n });\n const storedUrls = useRef(new Map<string, CachedUrl>());\n\n const supabase = useSupabase();\n\n function getCacheVersion(entity: { bucketId: string; path: string }): number {\n const key = entity.bucketId + entity.path;\n return cacheVersions.get(key) || 0;\n }\n\n function subscribeToCacheVersion(\n entity: { bucketId: string; path: string },\n callback: () => void\n ): () => void {\n const key = entity.bucketId + entity.path;\n if (!cacheVersionListeners.has(key)) {\n cacheVersionListeners.set(key, new Set());\n }\n cacheVersionListeners.get(key)!.add(callback);\n \n return () => {\n cacheVersionListeners.get(key)?.delete(callback);\n };\n }\n\n async function expireCache(\n entity: { bucketId: string; path: string },\n ) {\n const key = entity.bucketId + entity.path;\n\n const dbKeys = await db.getAllKeys();\n const keysToDelete = Array.from(\n new Set(\n [...Array.from(storedUrls.current.keys()), ...dbKeys].filter((value) =>\n value.startsWith(key)\n ),\n ),\n );\n await Promise.all(\n keysToDelete.map(\n async (x) => {\n storedUrls.current.delete(x);\n await db.removeItem(x);\n },\n ),\n );\n \n // Increment cache version to force image refetch\n const currentVersion = cacheVersions.get(key) || 0;\n cacheVersions.set(key, currentVersion + 1);\n \n // Notify all listeners that cache version changed\n const listeners = cacheVersionListeners.get(key);\n if (listeners) {\n listeners.forEach(callback => {\n callback();\n });\n }\n }\n async function baseFetchUrl(\n entity: Attachment,\n options?: {\n download?: string | boolean;\n transform?: TransformOptions;\n },\n isPublic: boolean = false,\n ) {\n if (isUsable(entity) === false) return;\n const optionsString = JSON.stringify(options);\n if (isUsable(entity.bucketId) === false) {\n return;\n }\n\n if (isUsable(entity.path) === false) {\n return;\n }\n const key = entity.bucketId + entity.path + (optionsString ?? \"\") + \"-cached-url\";\n\n const inMemoryItem = storedUrls.current.get(key);\n\n let item = inMemoryItem;\n if (isUsable(inMemoryItem) === false) {\n item = await db.getItem(key);\n }\n\n if (\n isUsable(item) &&\n moment(item.expiresOn).isAfter(moment().add(-1 * 60, \"seconds\"))\n ) {\n return item.url;\n }\n\n const base = supabase.storage.from(entity.bucketId);\n let download = options?.download;\n\n if (typeof options?.download === \"string\") {\n const ext = entity.path.split(\".\").pop() ?? \"\";\n download = options?.download.endsWith(ext)\n ? options?.download\n : options?.download + \".\" + ext;\n }\n\n const newOptions = options ? { ...options, download: download } : undefined;\n\n // Track retries per entity\n const retryKey = entity.bucketId + \"/\" + entity.path;\n const currentRetries = retryAttempts.get(retryKey) || 0;\n\n let url: string | undefined;\n\n if (isPublic) {\n url = base.getPublicUrl(entity.path, newOptions).data.publicUrl;\n } else {\n // Try to get signed URL with retry logic\n try {\n console.log(\"Creating signed URL for\", entity.path);\n const result = await base.createSignedUrl(entity.path, 60 * 100, newOptions);\n url = result.data?.signedUrl;\n\n if (isUsable(url)) {\n // Success - reset retry count\n retryAttempts.delete(retryKey);\n } else if (currentRetries < 3) {\n // Failed but can retry\n retryAttempts.set(retryKey, currentRetries + 1);\n throw new Error(\"Failed to get signed URL\");\n }\n } catch (error) {\n if (currentRetries < 3) {\n // Retry with exponential backoff\n retryAttempts.set(retryKey, currentRetries + 1);\n const delay = Math.min(1000 * Math.pow(2, currentRetries), 5000);\n await new Promise(resolve => setTimeout(resolve, delay));\n return baseFetchUrl(entity, options, isPublic);\n }\n // Max retries reached\n retryAttempts.delete(retryKey);\n return undefined;\n }\n }\n\n if (isUsable(url) === false) return url;\n\n const cachedUrl = {\n key: key,\n url: url,\n expiresOn: moment()\n .add(60 * 100, \"seconds\")\n .toISOString(true),\n };\n\n storedUrls.current.set(key, cachedUrl);\n\n db.setItem(key, cachedUrl);\n return url;\n }\n async function fetchUrl(\n entity: { bucketId: string; path: string },\n options?: {\n download?: string | boolean;\n transform?: TransformOptions;\n },\n ) {\n return baseFetchUrl(entity, options, false);\n }\n\n async function fetchPublicUrl(\n entity: { bucketId: string; path: string },\n options?: {\n download?: string | boolean;\n transform?: TransformOptions;\n },\n ) {\n return baseFetchUrl(entity, options, true);\n }\n\n async function prefetchImage(\n entity: Attachment,\n options?: {\n download?: string | boolean;\n transform?: TransformOptions;\n },\n ) {\n const url = await fetchUrl(entity, options);\n if (url) {\n new Image().src = url;\n }\n }\n\n /**\n * Batch fetch signed URLs for multiple attachments in a single API call.\n * Falls back to individual fetches if attachments span multiple buckets.\n * Results are cached the same way as individual fetchUrl calls.\n */\n async function fetchUrls(\n entities: Attachment[],\n options?: {\n download?: string | boolean;\n transform?: TransformOptions;\n },\n ): Promise<Map<string, string | undefined>> {\n const results = new Map<string, string | undefined>();\n \n if (entities.length === 0) return results;\n\n const optionsString = JSON.stringify(options);\n const expirySeconds = 60 * 100;\n \n // Group entities by bucket\n const byBucket = new Map<string, Attachment[]>();\n for (const entity of entities) {\n if (!isUsable(entity.bucketId) || !isUsable(entity.path)) continue;\n const list = byBucket.get(entity.bucketId) ?? [];\n list.push(entity);\n byBucket.set(entity.bucketId, list);\n }\n\n // For each bucket, check cache first, then batch fetch uncached\n for (const [bucketId, bucketEntities] of byBucket) {\n const uncached: Attachment[] = [];\n \n // Check cache for each entity\n for (const entity of bucketEntities) {\n const key = entity.bucketId + entity.path + (optionsString ?? \"\") + \"-cached-url\";\n const entityKey = entity.bucketId + \"/\" + entity.path;\n \n // Check in-memory cache first\n let item = storedUrls.current.get(key);\n \n // Then check IndexDB\n if (!isUsable(item)) {\n item = await db.getItem(key);\n }\n \n // If cached and not expired, use it\n if (\n isUsable(item) &&\n moment(item.expiresOn).isAfter(moment().add(-1 * 60, \"seconds\"))\n ) {\n results.set(entityKey, item.url);\n } else {\n uncached.push(entity);\n }\n }\n\n // Batch fetch uncached URLs\n if (uncached.length > 0) {\n const paths = uncached.map(e => e.path);\n const base = supabase.storage.from(bucketId);\n \n try {\n console.log(\"Signed URLs created\");\n const { data, error } = await base.createSignedUrls(paths, expirySeconds);\n if (!error && data) {\n const expiresOn = moment().add(expirySeconds, \"seconds\").toISOString(true);\n \n for (let i = 0; i < uncached.length; i++) {\n const entity = uncached[i];\n const urlData = data[i];\n const entityKey = entity.bucketId + \"/\" + entity.path;\n const cacheKey = entity.bucketId + entity.path + (optionsString ?? \"\") + \"-cached-url\";\n \n if (urlData?.signedUrl) {\n results.set(entityKey, urlData.signedUrl);\n \n // Cache the URL\n const cachedUrl = {\n key: cacheKey,\n url: urlData.signedUrl,\n expiresOn,\n };\n storedUrls.current.set(cacheKey, cachedUrl);\n db.setItem(cacheKey, cachedUrl);\n } else {\n results.set(entityKey, undefined);\n }\n }\n } else {\n // Fall back to individual fetches on error\n for (const entity of uncached) {\n const entityKey = entity.bucketId + \"/\" + entity.path;\n const url = await fetchUrl(entity, options);\n results.set(entityKey, url);\n }\n }\n } catch {\n // Fall back to individual fetches on error\n for (const entity of uncached) {\n const entityKey = entity.bucketId + \"/\" + entity.path;\n const url = await fetchUrl(entity, options);\n results.set(entityKey, url);\n }\n }\n }\n }\n\n return results;\n }\n\n return { fetchUrl, fetchUrls, prefetchImage, fetchPublicUrl, expireCache, getCacheVersion, subscribeToCacheVersion };\n}\n","import { useEffect, useState } from \"react\";\nimport { isUsable } from \"@pol-studios/utils/types\";\nimport { useQuery } from \"@pol-studios/db/query\";\nimport { useSupabase } from \"@pol-studios/db/client\";\nimport useUrl from \"./useUrl\";\nimport type { StorageObjectMetadata, UsePathOptions, UsePathResult } from \"../types\";\n\n/**\n * Get content type from metadata or infer from file path extension\n */\nfunction getContentType(\n metadata: StorageObjectMetadata | null | undefined,\n path: string\n): string {\n if (metadata?.contentType) {\n return metadata.contentType;\n }\n const ext = path.split(\".\").pop()?.toUpperCase();\n return ext || \"Unknown\";\n}\n\n/**\n * Hook to get a signed URL and metadata for a storage path.\n * Replaces useAttachment for the path-based storage approach.\n *\n * @param storagePath - The path within the bucket (e.g., \"14/abc-123.jpg\")\n * @param bucketId - The storage bucket ID\n * @param options - Optional configuration\n *\n * @example\n * ```tsx\n * const { url, contentType, isLoading } = usePath(\n * profile.profilePath,\n * 'attachments'\n * );\n *\n * if (isLoading) return <Spinner />;\n * return <img src={url} />;\n * ```\n */\nexport default function usePath(\n storagePath: string | undefined | null,\n bucketId: string,\n options: UsePathOptions = {}\n): UsePathResult {\n const { fetchMetadata = true, transform, download } = options;\n\n const [url, setUrl] = useState<string | null>(null);\n const [error, setError] = useState<Error | null>(null);\n const { fetchUrl } = useUrl();\n const supabase = useSupabase();\n\n // Fetch signed URL\n useEffect(() => {\n if (!isUsable(storagePath) || !isUsable(bucketId)) {\n setUrl(null);\n return;\n }\n\n let cancelled = false;\n\n fetchUrl({ bucketId, path: storagePath }, { transform, download })\n .then((signedUrl) => {\n if (!cancelled) {\n setUrl(signedUrl ?? null);\n setError(null);\n }\n })\n .catch((err) => {\n if (!cancelled) {\n setError(err instanceof Error ? err : new Error(String(err)));\n setUrl(null);\n }\n });\n\n return () => {\n cancelled = true;\n };\n }, [storagePath, bucketId, JSON.stringify(transform), download]);\n\n // Fetch metadata from storage.objects if requested\n const metadataRequest = useQuery(\n (supabase\n .schema(\"storage\" as any) as any)\n .from(\"objects\")\n .select(\"metadata\")\n .eq(\"bucket_id\", bucketId)\n .eq(\"name\", storagePath ?? \"\")\n .maybeSingle(),\n {\n enabled: fetchMetadata && isUsable(storagePath) && isUsable(bucketId),\n }\n );\n\n const metadata = ((metadataRequest.data as any)?.metadata as StorageObjectMetadata) ?? null;\n const contentType = getContentType(metadata, storagePath ?? \"\");\n const isLoading =\n (isUsable(storagePath) && url === null && error === null) ||\n (fetchMetadata && metadataRequest.isFetching);\n\n return {\n url,\n metadata,\n contentType,\n isLoading,\n error,\n };\n}\n","import { useCallback, useEffect, useMemo, useState } from \"react\";\nimport {\n type FileError,\n type FileRejection,\n useDropzone,\n} from \"react-dropzone\";\nimport { useSupabase } from \"@pol-studios/db/client\";\n\ninterface FileWithPreview extends File {\n preview?: string;\n errors: readonly FileError[];\n}\n\nexport type UseSupabaseUploadOptions = {\n /**\n * Name of bucket to upload files to in your Supabase project\n */\n bucketName: string;\n /**\n * Folder to upload files to in the specified bucket within your Supabase project.\n *\n * Defaults to uploading files to the root of the bucket\n *\n * e.g If specified path is `test`, your file will be uploaded as `test/file_name`\n */\n path?: string;\n /**\n * Allowed MIME types for each file upload (e.g `image/png`, `text/html`, etc). Wildcards are also supported (e.g `image/*`).\n *\n * Defaults to allowing uploading of all MIME types.\n */\n allowedMimeTypes?: string[];\n /**\n * Maximum upload size of each file allowed in bytes. (e.g 1000 bytes = 1 KB)\n */\n maxFileSize?: number;\n /**\n * Maximum number of files allowed per upload.\n */\n maxFiles?: number;\n /**\n * The number of seconds the asset is cached in the browser and in the Supabase CDN.\n *\n * This is set in the Cache-Control: max-age=<seconds> header. Defaults to 3600 seconds.\n */\n cacheControl?: number;\n /**\n * When set to true, the file is overwritten if it exists.\n *\n * When set to false, an error is thrown if the object already exists. Defaults to `false`\n */\n upsert?: boolean;\n};\n\nexport type UseSupabaseUploadReturn = ReturnType<typeof useDropzoneUpload>;\n\nexport const useDropzoneUpload = (options: UseSupabaseUploadOptions) => {\n const {\n bucketName,\n path,\n allowedMimeTypes = [],\n maxFileSize = Number.POSITIVE_INFINITY,\n maxFiles = 1,\n cacheControl = 3600,\n upsert = false,\n } = options;\n\n const [files, setFiles] = useState<FileWithPreview[]>([]);\n const [loading, setLoading] = useState<boolean>(false);\n const [errors, setErrors] = useState<{ name: string; message: string }[]>([]);\n const [successes, setSuccesses] = useState<string[]>([]);\n\n const isSuccess = useMemo(() => {\n if (errors.length === 0 && successes.length === 0) {\n return false;\n }\n if (errors.length === 0 && successes.length === files.length) {\n return true;\n }\n return false;\n }, [errors.length, successes.length, files.length]);\n\n const onDrop = useCallback(\n (acceptedFiles: File[], fileRejections: FileRejection[]) => {\n const validFiles = acceptedFiles\n .filter((file) => !files.find((x) => x.name === file.name))\n .map((file) => {\n (file as FileWithPreview).preview = URL.createObjectURL(file);\n (file as FileWithPreview).errors = [];\n return file as FileWithPreview;\n });\n\n const invalidFiles = fileRejections.map(({ file, errors }) => {\n (file as FileWithPreview).preview = URL.createObjectURL(file);\n (file as FileWithPreview).errors = errors;\n return file as FileWithPreview;\n });\n\n const newFiles = [...files, ...validFiles, ...invalidFiles];\n\n setFiles(newFiles);\n },\n [files, setFiles]\n );\n\n const dropzoneProps = useDropzone({\n onDrop,\n noClick: true,\n accept: allowedMimeTypes.reduce(\n (acc, type) => ({ ...acc, [type]: [] }),\n {}\n ),\n maxSize: maxFileSize,\n maxFiles: maxFiles,\n multiple: maxFiles !== 1,\n });\n\n const supabase = useSupabase();\n\n const onUpload = useCallback(async () => {\n setLoading(true);\n\n // [Joshen] This is to support handling partial successes\n // If any files didn't upload for any reason, hitting \"Upload\" again will only upload the files that had errors\n const filesWithErrors = errors.map((x) => x.name);\n const filesToUpload =\n filesWithErrors.length > 0\n ? [\n ...files.filter((f) => filesWithErrors.includes(f.name)),\n ...files.filter((f) => !successes.includes(f.name)),\n ]\n : files;\n\n const responses = await Promise.all(\n filesToUpload.map(async (file) => {\n const { error } = await supabase.storage\n .from(bucketName)\n .upload(!!path ? path + \"/\" + file.name : file.name, file, {\n cacheControl: cacheControl.toString(),\n upsert,\n });\n if (error) {\n return { name: file.name, message: error.message };\n } else {\n return { name: file.name, message: undefined };\n }\n })\n );\n\n const responseErrors = responses.filter((x) => x.message !== undefined);\n // if there were errors previously, this function tried to upload the files again so we should clear/overwrite the existing errors.\n setErrors(responseErrors);\n\n const responseSuccesses = responses.filter((x) => x.message === undefined);\n const newSuccesses = Array.from(\n new Set([...successes, ...responseSuccesses.map((x) => x.name)])\n );\n setSuccesses(newSuccesses);\n\n setLoading(false);\n }, [files, path, bucketName, errors, successes]);\n\n useEffect(() => {\n if (files.length === 0) {\n setErrors([]);\n }\n\n // If the number of files doesn't exceed the maxFiles parameter, remove the error 'Too many files' from each file\n if (files.length <= maxFiles) {\n let changed = false;\n const newFiles = files.map((file) => {\n if (file.errors.some((e) => e.code === \"too-many-files\")) {\n file.errors = file.errors.filter((e) => e.code !== \"too-many-files\");\n changed = true;\n }\n return file;\n });\n if (changed) {\n setFiles(newFiles);\n }\n }\n }, [files.length, setFiles, maxFiles]);\n\n return {\n files,\n setFiles,\n successes,\n isSuccess,\n loading,\n errors,\n setErrors,\n onUpload,\n maxFileSize: maxFileSize,\n maxFiles: maxFiles,\n allowedMimeTypes,\n ...dropzoneProps,\n };\n};\n"],"mappings":";AAAA,SAAS,mBAAmB;AAC5B,SAAS,cAAc;AAEvB,SAAS,eAAe;AACxB,SAAS,eAAe;AACxB,SAAS,aAAa,sBAAsB;AAQ5C,eAAe,uBACb,UACA,YACA,UACA,MACA,UACA,YACe;AACf,QAAM;AAAA,IACJ,MAAM,EAAE,QAAQ;AAAA,EAClB,IAAI,MAAM,SAAS,KAAK,WAAW;AAEnC,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,wCAAwC;AAAA,EAC1D;AAEA,QAAM,cACJ,UAAU,OAAO,KAAK,OAAO,SAAS,eAAe;AAEvD,QAAM,cAAc,eAAe;AAEnC,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,UAAM,SAAS,IAAI,OAAO,MAAM;AAAA,MAC9B,UAAU,cAAc;AAAA,MACxB,aAAa,CAAC,GAAG,KAAM,KAAM,KAAO,GAAK;AAAA,MACzC,SAAS;AAAA,QACP,eAAe,YAAY,QAAQ;AAAA,QACnC,YAAY;AAAA,MACd;AAAA,MACA,0BAA0B;AAAA,MAC1B,4BAA4B;AAAA,MAC5B,UAAU;AAAA,QACR;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA,cAAc;AAAA;AAAA,QAEd,GAAG,OAAO;AAAA,UACR,OAAO,QAAQ,QAAQ,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;AAAA,QACzD;AAAA,MACF;AAAA,MACA,WAAW,IAAI,OAAO;AAAA;AAAA,MACtB,SAAS,CAAC,UAAU;AAClB,gBAAQ,MAAM,kBAAkB,KAAK;AACrC,eAAO,KAAK;AAAA,MACd;AAAA,MACA;AAAA,MACA,WAAW,MAAM;AACf,gBAAQ;AAAA,MACV;AAAA,IACF,CAAC;AAED,WAAO,oBAAoB,EAAE,KAAK,CAAC,oBAAoB;AACrD,UAAI,gBAAgB,QAAQ;AAC1B,eAAO,yBAAyB,gBAAgB,CAAC,CAAC;AAAA,MACpD;AACA,aAAO,MAAM;AAAA,IACf,CAAC;AAAA,EACH,CAAC;AACH;AAwBe,SAAR,UACL,SACkE;AAClE,QAAM,EAAE,SAAS,IAAI;AACrB,QAAM,WAAW,YAAY;AAE7B,SAAO,YAAY;AAAA,IACjB,YAAY,OAAO,UAA8C;AAC/D,YAAM,EAAE,MAAM,UAAU,WAAW,WAAW,CAAC,GAAG,iBAAiB,IAAI;AAGvE,YAAM,mBACJ,aACC,UAAU,OAAO,KAAK,MAAM,IAAI,QAChC,cAAc,OAAQ,KAAa,UAAU,IAAI;AAEpD,UAAI,QAAQ,gBAAgB,GAAG;AAC7B,cAAM,IAAI,MAAM,6BAA6B;AAAA,MAC/C;AAGA,YAAM,YAAY,iBAAiB,MAAM,GAAG,EAAE,IAAI,KAAK;AACvD,YAAM,iBAAiB,QAAQ,IAAI,MAAM;AAGzC,YAAM,OAAO,YACT,YAAY,SAAS,iBACrB,QAAQ;AAGZ,YAAM,cACJ,UAAU,OACN,KAAK,KAAK,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY,KAAK,YAC7C,UAAU,YAAY;AAG5B,YAAM,iBAAwC;AAAA,QAC5C,kBAAkB;AAAA,QAClB;AAAA,QACA;AAAA,QACA,GAAG;AAAA,MACL;AAGA,YAAM;AAAA,QACJ;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAMO,SAAS,oBACd,SAMA;AACA,QAAM,EAAE,WAAW,cAAc,GAAG,cAAc,IAAI;AACtD,QAAM,iBAAiB,UAAU,aAAa;AAE9C,SAAO,YAAY;AAAA,IACjB,YAAY,OAAO,UAA8D;AAC/E,YAAM,EAAE,QAAQ,GAAG,YAAY,IAAI;AACnC,YAAM,SAAS,MAAM,eAAe,YAAY,WAAW;AAC3D,YAAM,aAAa,QAAQ,OAAO,IAAI;AACtC,aAAO;AAAA,IACT;AAAA,EACF,CAAC;AACH;;;ACtLA,SAAS,gBAAgB;AACzB,SAAS,oBAAoB;AAC7B,SAAS,eAAAA,oBAAmB;AAC5B,OAAO,YAAY;AAEnB,SAAS,cAAc;AAIvB,IAAM,gBAAgB,oBAAI,IAAoB;AAE9C,IAAM,wBAAwB,oBAAI,IAA6B;AAE/D,IAAM,gBAAgB,oBAAI,IAAoB;AAE/B,SAAR,SAA0B;AAC/B,QAAM,KAAK,aAAa;AAAA,IACtB,QAAQ;AAAA,IACR,WAAW;AAAA,EACb,CAAC;AACD,QAAM,aAAa,OAAO,oBAAI,IAAuB,CAAC;AAEtD,QAAM,WAAWA,aAAY;AAE7B,WAAS,gBAAgB,QAAoD;AAC3E,UAAM,MAAM,OAAO,WAAW,OAAO;AACrC,WAAO,cAAc,IAAI,GAAG,KAAK;AAAA,EACnC;AAEA,WAAS,wBACP,QACA,UACY;AACZ,UAAM,MAAM,OAAO,WAAW,OAAO;AACrC,QAAI,CAAC,sBAAsB,IAAI,GAAG,GAAG;AACnC,4BAAsB,IAAI,KAAK,oBAAI,IAAI,CAAC;AAAA,IAC1C;AACA,0BAAsB,IAAI,GAAG,EAAG,IAAI,QAAQ;AAE5C,WAAO,MAAM;AACX,4BAAsB,IAAI,GAAG,GAAG,OAAO,QAAQ;AAAA,IACjD;AAAA,EACF;AAEA,iBAAe,YACb,QACA;AACA,UAAM,MAAM,OAAO,WAAW,OAAO;AAErC,UAAM,SAAS,MAAM,GAAG,WAAW;AACnC,UAAM,eAAe,MAAM;AAAA,MACzB,IAAI;AAAA,QACF,CAAC,GAAG,MAAM,KAAK,WAAW,QAAQ,KAAK,CAAC,GAAG,GAAG,MAAM,EAAE;AAAA,UAAO,CAAC,UAC5D,MAAM,WAAW,GAAG;AAAA,QACtB;AAAA,MACF;AAAA,IACF;AACA,UAAM,QAAQ;AAAA,MACZ,aAAa;AAAA,QACX,OAAO,MAAM;AACX,qBAAW,QAAQ,OAAO,CAAC;AAC3B,gBAAM,GAAG,WAAW,CAAC;AAAA,QACvB;AAAA,MACF;AAAA,IACF;AAGA,UAAM,iBAAiB,cAAc,IAAI,GAAG,KAAK;AACjD,kBAAc,IAAI,KAAK,iBAAiB,CAAC;AAGzC,UAAM,YAAY,sBAAsB,IAAI,GAAG;AAC/C,QAAI,WAAW;AACb,gBAAU,QAAQ,cAAY;AAC5B,iBAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AACA,iBAAe,aACb,QACA,SAIA,WAAoB,OACpB;AACA,QAAI,SAAS,MAAM,MAAM,MAAO;AAChC,UAAM,gBAAgB,KAAK,UAAU,OAAO;AAC5C,QAAI,SAAS,OAAO,QAAQ,MAAM,OAAO;AACvC;AAAA,IACF;AAEA,QAAI,SAAS,OAAO,IAAI,MAAM,OAAO;AACnC;AAAA,IACF;AACA,UAAM,MAAM,OAAO,WAAW,OAAO,QAAQ,iBAAiB,MAAM;AAEpE,UAAM,eAAe,WAAW,QAAQ,IAAI,GAAG;AAE/C,QAAI,OAAO;AACX,QAAI,SAAS,YAAY,MAAM,OAAO;AACpC,aAAO,MAAM,GAAG,QAAQ,GAAG;AAAA,IAC7B;AAEA,QACE,SAAS,IAAI,KACb,OAAO,KAAK,SAAS,EAAE,QAAQ,OAAO,EAAE,IAAI,KAAK,IAAI,SAAS,CAAC,GAC/D;AACA,aAAO,KAAK;AAAA,IACd;AAEA,UAAM,OAAO,SAAS,QAAQ,KAAK,OAAO,QAAQ;AAClD,QAAI,WAAW,SAAS;AAExB,QAAI,OAAO,SAAS,aAAa,UAAU;AACzC,YAAM,MAAM,OAAO,KAAK,MAAM,GAAG,EAAE,IAAI,KAAK;AAC5C,iBAAW,SAAS,SAAS,SAAS,GAAG,IACrC,SAAS,WACT,SAAS,WAAW,MAAM;AAAA,IAChC;AAEA,UAAM,aAAa,UAAU,EAAE,GAAG,SAAS,SAAmB,IAAI;AAGlE,UAAM,WAAW,OAAO,WAAW,MAAM,OAAO;AAChD,UAAM,iBAAiB,cAAc,IAAI,QAAQ,KAAK;AAEtD,QAAI;AAEJ,QAAI,UAAU;AACZ,YAAM,KAAK,aAAa,OAAO,MAAM,UAAU,EAAE,KAAK;AAAA,IACxD,OAAO;AAEL,UAAI;AACF,gBAAQ,IAAI,2BAA2B,OAAO,IAAI;AAClD,cAAM,SAAS,MAAM,KAAK,gBAAgB,OAAO,MAAM,KAAK,KAAK,UAAU;AAC3E,cAAM,OAAO,MAAM;AAEnB,YAAI,SAAS,GAAG,GAAG;AAEjB,wBAAc,OAAO,QAAQ;AAAA,QAC/B,WAAW,iBAAiB,GAAG;AAE7B,wBAAc,IAAI,UAAU,iBAAiB,CAAC;AAC9C,gBAAM,IAAI,MAAM,0BAA0B;AAAA,QAC5C;AAAA,MACF,SAAS,OAAO;AACd,YAAI,iBAAiB,GAAG;AAEtB,wBAAc,IAAI,UAAU,iBAAiB,CAAC;AAC9C,gBAAM,QAAQ,KAAK,IAAI,MAAO,KAAK,IAAI,GAAG,cAAc,GAAG,GAAI;AAC/D,gBAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,KAAK,CAAC;AACvD,iBAAO,aAAa,QAAQ,SAAS,QAAQ;AAAA,QAC/C;AAEA,sBAAc,OAAO,QAAQ;AAC7B,eAAO;AAAA,MACT;AAAA,IACF;AAEA,QAAI,SAAS,GAAG,MAAM,MAAO,QAAO;AAEpC,UAAM,YAAY;AAAA,MAChB;AAAA,MACA;AAAA,MACA,WAAW,OAAO,EACf,IAAI,KAAK,KAAK,SAAS,EACvB,YAAY,IAAI;AAAA,IACrB;AAEA,eAAW,QAAQ,IAAI,KAAK,SAAS;AAErC,OAAG,QAAQ,KAAK,SAAS;AACzB,WAAO;AAAA,EACT;AACA,iBAAe,SACb,QACA,SAIA;AACA,WAAO,aAAa,QAAQ,SAAS,KAAK;AAAA,EAC5C;AAEA,iBAAe,eACb,QACA,SAIA;AACA,WAAO,aAAa,QAAQ,SAAS,IAAI;AAAA,EAC3C;AAEA,iBAAe,cACb,QACA,SAIA;AACA,UAAM,MAAM,MAAM,SAAS,QAAQ,OAAO;AAC1C,QAAI,KAAK;AACP,UAAI,MAAM,EAAE,MAAM;AAAA,IACpB;AAAA,EACF;AAOA,iBAAe,UACb,UACA,SAI0C;AAC1C,UAAM,UAAU,oBAAI,IAAgC;AAEpD,QAAI,SAAS,WAAW,EAAG,QAAO;AAElC,UAAM,gBAAgB,KAAK,UAAU,OAAO;AAC5C,UAAM,gBAAgB,KAAK;AAG3B,UAAM,WAAW,oBAAI,IAA0B;AAC/C,eAAW,UAAU,UAAU;AAC7B,UAAI,CAAC,SAAS,OAAO,QAAQ,KAAK,CAAC,SAAS,OAAO,IAAI,EAAG;AAC1D,YAAM,OAAO,SAAS,IAAI,OAAO,QAAQ,KAAK,CAAC;AAC/C,WAAK,KAAK,MAAM;AAChB,eAAS,IAAI,OAAO,UAAU,IAAI;AAAA,IACpC;AAGA,eAAW,CAAC,UAAU,cAAc,KAAK,UAAU;AACjD,YAAM,WAAyB,CAAC;AAGhC,iBAAW,UAAU,gBAAgB;AACnC,cAAM,MAAM,OAAO,WAAW,OAAO,QAAQ,iBAAiB,MAAM;AACpE,cAAM,YAAY,OAAO,WAAW,MAAM,OAAO;AAGjD,YAAI,OAAO,WAAW,QAAQ,IAAI,GAAG;AAGrC,YAAI,CAAC,SAAS,IAAI,GAAG;AACnB,iBAAO,MAAM,GAAG,QAAQ,GAAG;AAAA,QAC7B;AAGA,YACE,SAAS,IAAI,KACb,OAAO,KAAK,SAAS,EAAE,QAAQ,OAAO,EAAE,IAAI,KAAK,IAAI,SAAS,CAAC,GAC/D;AACA,kBAAQ,IAAI,WAAW,KAAK,GAAG;AAAA,QACjC,OAAO;AACL,mBAAS,KAAK,MAAM;AAAA,QACtB;AAAA,MACF;AAGA,UAAI,SAAS,SAAS,GAAG;AACvB,cAAM,QAAQ,SAAS,IAAI,OAAK,EAAE,IAAI;AACtC,cAAM,OAAO,SAAS,QAAQ,KAAK,QAAQ;AAE3C,YAAI;AACF,kBAAQ,IAAI,qBAAqB;AACjC,gBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,KAAK,iBAAiB,OAAO,aAAa;AACxE,cAAI,CAAC,SAAS,MAAM;AAClB,kBAAM,YAAY,OAAO,EAAE,IAAI,eAAe,SAAS,EAAE,YAAY,IAAI;AAEzE,qBAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,oBAAM,SAAS,SAAS,CAAC;AACzB,oBAAM,UAAU,KAAK,CAAC;AACtB,oBAAM,YAAY,OAAO,WAAW,MAAM,OAAO;AACjD,oBAAM,WAAW,OAAO,WAAW,OAAO,QAAQ,iBAAiB,MAAM;AAEzE,kBAAI,SAAS,WAAW;AACtB,wBAAQ,IAAI,WAAW,QAAQ,SAAS;AAGxC,sBAAM,YAAY;AAAA,kBAChB,KAAK;AAAA,kBACL,KAAK,QAAQ;AAAA,kBACb;AAAA,gBACF;AACA,2BAAW,QAAQ,IAAI,UAAU,SAAS;AAC1C,mBAAG,QAAQ,UAAU,SAAS;AAAA,cAChC,OAAO;AACL,wBAAQ,IAAI,WAAW,MAAS;AAAA,cAClC;AAAA,YACF;AAAA,UACF,OAAO;AAEL,uBAAW,UAAU,UAAU;AAC7B,oBAAM,YAAY,OAAO,WAAW,MAAM,OAAO;AACjD,oBAAM,MAAM,MAAM,SAAS,QAAQ,OAAO;AAC1C,sBAAQ,IAAI,WAAW,GAAG;AAAA,YAC5B;AAAA,UACF;AAAA,QACF,QAAQ;AAEN,qBAAW,UAAU,UAAU;AAC7B,kBAAM,YAAY,OAAO,WAAW,MAAM,OAAO;AACjD,kBAAM,MAAM,MAAM,SAAS,QAAQ,OAAO;AAC1C,oBAAQ,IAAI,WAAW,GAAG;AAAA,UAC5B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAEA,SAAO,EAAE,UAAU,WAAW,eAAe,gBAAgB,aAAa,iBAAiB,wBAAwB;AACrH;;;AC/TA,SAAS,WAAW,gBAAgB;AACpC,SAAS,YAAAC,iBAAgB;AACzB,SAAS,gBAAgB;AACzB,SAAS,eAAAC,oBAAmB;AAO5B,SAAS,eACP,UACA,MACQ;AACR,MAAI,UAAU,aAAa;AACzB,WAAO,SAAS;AAAA,EAClB;AACA,QAAM,MAAM,KAAK,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY;AAC/C,SAAO,OAAO;AAChB;AAqBe,SAAR,QACL,aACA,UACA,UAA0B,CAAC,GACZ;AACf,QAAM,EAAE,gBAAgB,MAAM,WAAW,SAAS,IAAI;AAEtD,QAAM,CAAC,KAAK,MAAM,IAAI,SAAwB,IAAI;AAClD,QAAM,CAAC,OAAO,QAAQ,IAAI,SAAuB,IAAI;AACrD,QAAM,EAAE,SAAS,IAAI,OAAO;AAC5B,QAAM,WAAWC,aAAY;AAG7B,YAAU,MAAM;AACd,QAAI,CAACC,UAAS,WAAW,KAAK,CAACA,UAAS,QAAQ,GAAG;AACjD,aAAO,IAAI;AACX;AAAA,IACF;AAEA,QAAI,YAAY;AAEhB,aAAS,EAAE,UAAU,MAAM,YAAY,GAAG,EAAE,WAAW,SAAS,CAAC,EAC9D,KAAK,CAAC,cAAc;AACnB,UAAI,CAAC,WAAW;AACd,eAAO,aAAa,IAAI;AACxB,iBAAS,IAAI;AAAA,MACf;AAAA,IACF,CAAC,EACA,MAAM,CAAC,QAAQ;AACd,UAAI,CAAC,WAAW;AACd,iBAAS,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAC5D,eAAO,IAAI;AAAA,MACb;AAAA,IACF,CAAC;AAEH,WAAO,MAAM;AACX,kBAAY;AAAA,IACd;AAAA,EACF,GAAG,CAAC,aAAa,UAAU,KAAK,UAAU,SAAS,GAAG,QAAQ,CAAC;AAG/D,QAAM,kBAAkB;AAAA,IACrB,SACE,OAAO,SAAgB,EACvB,KAAK,SAAS,EACd,OAAO,UAAU,EACjB,GAAG,aAAa,QAAQ,EACxB,GAAG,QAAQ,eAAe,EAAE,EAC5B,YAAY;AAAA,IACf;AAAA,MACE,SAAS,iBAAiBA,UAAS,WAAW,KAAKA,UAAS,QAAQ;AAAA,IACtE;AAAA,EACF;AAEA,QAAM,WAAa,gBAAgB,MAAc,YAAsC;AACvF,QAAM,cAAc,eAAe,UAAU,eAAe,EAAE;AAC9D,QAAM,YACHA,UAAS,WAAW,KAAK,QAAQ,QAAQ,UAAU,QACnD,iBAAiB,gBAAgB;AAEpC,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;AC3GA,SAAS,aAAa,aAAAC,YAAW,SAAS,YAAAC,iBAAgB;AAC1D;AAAA,EAGE;AAAA,OACK;AACP,SAAS,eAAAC,oBAAmB;AAkDrB,IAAM,oBAAoB,CAAC,YAAsC;AACtE,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,mBAAmB,CAAC;AAAA,IACpB,cAAc,OAAO;AAAA,IACrB,WAAW;AAAA,IACX,eAAe;AAAA,IACf,SAAS;AAAA,EACX,IAAI;AAEJ,QAAM,CAAC,OAAO,QAAQ,IAAID,UAA4B,CAAC,CAAC;AACxD,QAAM,CAAC,SAAS,UAAU,IAAIA,UAAkB,KAAK;AACrD,QAAM,CAAC,QAAQ,SAAS,IAAIA,UAA8C,CAAC,CAAC;AAC5E,QAAM,CAAC,WAAW,YAAY,IAAIA,UAAmB,CAAC,CAAC;AAEvD,QAAM,YAAY,QAAQ,MAAM;AAC9B,QAAI,OAAO,WAAW,KAAK,UAAU,WAAW,GAAG;AACjD,aAAO;AAAA,IACT;AACA,QAAI,OAAO,WAAW,KAAK,UAAU,WAAW,MAAM,QAAQ;AAC5D,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT,GAAG,CAAC,OAAO,QAAQ,UAAU,QAAQ,MAAM,MAAM,CAAC;AAElD,QAAM,SAAS;AAAA,IACb,CAAC,eAAuB,mBAAoC;AAC1D,YAAM,aAAa,cAChB,OAAO,CAAC,SAAS,CAAC,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,KAAK,IAAI,CAAC,EACzD,IAAI,CAAC,SAAS;AACb,QAAC,KAAyB,UAAU,IAAI,gBAAgB,IAAI;AAC5D,QAAC,KAAyB,SAAS,CAAC;AACpC,eAAO;AAAA,MACT,CAAC;AAEH,YAAM,eAAe,eAAe,IAAI,CAAC,EAAE,MAAM,QAAAE,QAAO,MAAM;AAC5D,QAAC,KAAyB,UAAU,IAAI,gBAAgB,IAAI;AAC5D,QAAC,KAAyB,SAASA;AACnC,eAAO;AAAA,MACT,CAAC;AAED,YAAM,WAAW,CAAC,GAAG,OAAO,GAAG,YAAY,GAAG,YAAY;AAE1D,eAAS,QAAQ;AAAA,IACnB;AAAA,IACA,CAAC,OAAO,QAAQ;AAAA,EAClB;AAEA,QAAM,gBAAgB,YAAY;AAAA,IAChC;AAAA,IACA,SAAS;AAAA,IACT,QAAQ,iBAAiB;AAAA,MACvB,CAAC,KAAK,UAAU,EAAE,GAAG,KAAK,CAAC,IAAI,GAAG,CAAC,EAAE;AAAA,MACrC,CAAC;AAAA,IACH;AAAA,IACA,SAAS;AAAA,IACT;AAAA,IACA,UAAU,aAAa;AAAA,EACzB,CAAC;AAED,QAAM,WAAWD,aAAY;AAE7B,QAAM,WAAW,YAAY,YAAY;AACvC,eAAW,IAAI;AAIf,UAAM,kBAAkB,OAAO,IAAI,CAAC,MAAM,EAAE,IAAI;AAChD,UAAM,gBACJ,gBAAgB,SAAS,IACrB;AAAA,MACE,GAAG,MAAM,OAAO,CAAC,MAAM,gBAAgB,SAAS,EAAE,IAAI,CAAC;AAAA,MACvD,GAAG,MAAM,OAAO,CAAC,MAAM,CAAC,UAAU,SAAS,EAAE,IAAI,CAAC;AAAA,IACpD,IACA;AAEN,UAAM,YAAY,MAAM,QAAQ;AAAA,MAC9B,cAAc,IAAI,OAAO,SAAS;AAChC,cAAM,EAAE,MAAM,IAAI,MAAM,SAAS,QAC9B,KAAK,UAAU,EACf,OAAO,CAAC,CAAC,OAAO,OAAO,MAAM,KAAK,OAAO,KAAK,MAAM,MAAM;AAAA,UACzD,cAAc,aAAa,SAAS;AAAA,UACpC;AAAA,QACF,CAAC;AACH,YAAI,OAAO;AACT,iBAAO,EAAE,MAAM,KAAK,MAAM,SAAS,MAAM,QAAQ;AAAA,QACnD,OAAO;AACL,iBAAO,EAAE,MAAM,KAAK,MAAM,SAAS,OAAU;AAAA,QAC/C;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,iBAAiB,UAAU,OAAO,CAAC,MAAM,EAAE,YAAY,MAAS;AAEtE,cAAU,cAAc;AAExB,UAAM,oBAAoB,UAAU,OAAO,CAAC,MAAM,EAAE,YAAY,MAAS;AACzE,UAAM,eAAe,MAAM;AAAA,MACzB,oBAAI,IAAI,CAAC,GAAG,WAAW,GAAG,kBAAkB,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;AAAA,IACjE;AACA,iBAAa,YAAY;AAEzB,eAAW,KAAK;AAAA,EAClB,GAAG,CAAC,OAAO,MAAM,YAAY,QAAQ,SAAS,CAAC;AAE/C,EAAAF,WAAU,MAAM;AACd,QAAI,MAAM,WAAW,GAAG;AACtB,gBAAU,CAAC,CAAC;AAAA,IACd;AAGA,QAAI,MAAM,UAAU,UAAU;AAC5B,UAAI,UAAU;AACd,YAAM,WAAW,MAAM,IAAI,CAAC,SAAS;AACnC,YAAI,KAAK,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,gBAAgB,GAAG;AACxD,eAAK,SAAS,KAAK,OAAO,OAAO,CAAC,MAAM,EAAE,SAAS,gBAAgB;AACnE,oBAAU;AAAA,QACZ;AACA,eAAO;AAAA,MACT,CAAC;AACD,UAAI,SAAS;AACX,iBAAS,QAAQ;AAAA,MACnB;AAAA,IACF;AAAA,EACF,GAAG,CAAC,MAAM,QAAQ,UAAU,QAAQ,CAAC;AAErC,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAG;AAAA,EACL;AACF;","names":["useSupabase","isUsable","useSupabase","useSupabase","isUsable","useEffect","useState","useSupabase","errors"]}
package/dist/index.js ADDED
@@ -0,0 +1,512 @@
1
+ // src/hooks/useUpload.ts
2
+ import { useMutation } from "@tanstack/react-query";
3
+ import { Upload } from "tus-js-client";
4
+ import { newUuid } from "@pol-studios/utils/uuid";
5
+ import { isBlank } from "@pol-studios/utils/string";
6
+ import { useSupabase, getSupabaseUrl } from "@pol-studios/db/client";
7
+ async function uploadFileWithMetadata(supabase, bucketName, filePath, file, metadata, onProgress) {
8
+ const {
9
+ data: { session }
10
+ } = await supabase.auth.getSession();
11
+ if (!session) {
12
+ throw new Error("User must be logged in to upload files");
13
+ }
14
+ const contentType = "type" in file ? file.type : metadata.contentType ?? "application/octet-stream";
15
+ const supabaseUrl = getSupabaseUrl();
16
+ return new Promise((resolve, reject) => {
17
+ const upload = new Upload(file, {
18
+ endpoint: supabaseUrl + "/storage/v1/upload/resumable",
19
+ retryDelays: [0, 3e3, 5e3, 1e4, 2e4],
20
+ headers: {
21
+ authorization: "Bearer " + session.access_token,
22
+ "x-upsert": "true"
23
+ },
24
+ uploadDataDuringCreation: true,
25
+ removeFingerprintOnSuccess: true,
26
+ metadata: {
27
+ bucketName,
28
+ objectName: filePath,
29
+ contentType,
30
+ cacheControl: "3600",
31
+ // Custom metadata gets stored in storage.objects.metadata
32
+ ...Object.fromEntries(
33
+ Object.entries(metadata).map(([k, v]) => [k, String(v)])
34
+ )
35
+ },
36
+ chunkSize: 6 * 1024 * 1024,
37
+ // 6MB - required by Supabase
38
+ onError: (error) => {
39
+ console.error("Upload failed:", error);
40
+ reject(error);
41
+ },
42
+ onProgress,
43
+ onSuccess: () => {
44
+ resolve();
45
+ }
46
+ });
47
+ upload.findPreviousUploads().then((previousUploads) => {
48
+ if (previousUploads.length) {
49
+ upload.resumeFromPreviousUpload(previousUploads[0]);
50
+ }
51
+ upload.start();
52
+ });
53
+ });
54
+ }
55
+ function useUpload(options) {
56
+ const { bucketId } = options;
57
+ const supabase = useSupabase();
58
+ return useMutation({
59
+ mutationFn: async (input) => {
60
+ const { file, fileName, directory, metadata = {}, onUploadProgress } = input;
61
+ const originalFileName = fileName ?? ("name" in file ? file["name"] : "") ?? ("fileName" in file ? file["fileName"] : "file");
62
+ if (isBlank(originalFileName)) {
63
+ throw new Error("File must have a valid name");
64
+ }
65
+ const extension = originalFileName.split(".").pop() ?? "";
66
+ const uniqueFileName = newUuid() + "." + extension;
67
+ const path = directory ? directory + "/14/" + uniqueFileName : "14/" + uniqueFileName;
68
+ const contentType = "type" in file ? file.type.split("/").pop()?.toUpperCase() ?? "Unknown" : extension.toUpperCase();
69
+ const uploadMetadata = {
70
+ processingStatus: "None",
71
+ contentType,
72
+ originalFileName,
73
+ ...metadata
74
+ };
75
+ await uploadFileWithMetadata(
76
+ supabase,
77
+ bucketId,
78
+ path,
79
+ file,
80
+ uploadMetadata,
81
+ onUploadProgress
82
+ );
83
+ return {
84
+ path,
85
+ bucketId,
86
+ originalFileName,
87
+ contentType
88
+ };
89
+ }
90
+ });
91
+ }
92
+ function useUploadWithEntity(options) {
93
+ const { pathField, updateEntity, ...uploadOptions } = options;
94
+ const uploadMutation = useUpload(uploadOptions);
95
+ return useMutation({
96
+ mutationFn: async (input) => {
97
+ const { entity, ...uploadInput } = input;
98
+ const result = await uploadMutation.mutateAsync(uploadInput);
99
+ await updateEntity(entity, result.path);
100
+ return result;
101
+ }
102
+ });
103
+ }
104
+
105
+ // src/hooks/useUrl.ts
106
+ import { isUsable } from "@pol-studios/utils/types";
107
+ import { useIndexedDB } from "@pol-studios/hooks/storage";
108
+ import { useSupabase as useSupabase2 } from "@pol-studios/db/client";
109
+ import moment from "moment";
110
+ import { useRef } from "react";
111
+ var cacheVersions = /* @__PURE__ */ new Map();
112
+ var cacheVersionListeners = /* @__PURE__ */ new Map();
113
+ var retryAttempts = /* @__PURE__ */ new Map();
114
+ function useUrl() {
115
+ const db = useIndexedDB({
116
+ dbName: "polstudios",
117
+ storeName: "cached-urls"
118
+ });
119
+ const storedUrls = useRef(/* @__PURE__ */ new Map());
120
+ const supabase = useSupabase2();
121
+ function getCacheVersion(entity) {
122
+ const key = entity.bucketId + entity.path;
123
+ return cacheVersions.get(key) || 0;
124
+ }
125
+ function subscribeToCacheVersion(entity, callback) {
126
+ const key = entity.bucketId + entity.path;
127
+ if (!cacheVersionListeners.has(key)) {
128
+ cacheVersionListeners.set(key, /* @__PURE__ */ new Set());
129
+ }
130
+ cacheVersionListeners.get(key).add(callback);
131
+ return () => {
132
+ cacheVersionListeners.get(key)?.delete(callback);
133
+ };
134
+ }
135
+ async function expireCache(entity) {
136
+ const key = entity.bucketId + entity.path;
137
+ const dbKeys = await db.getAllKeys();
138
+ const keysToDelete = Array.from(
139
+ new Set(
140
+ [...Array.from(storedUrls.current.keys()), ...dbKeys].filter(
141
+ (value) => value.startsWith(key)
142
+ )
143
+ )
144
+ );
145
+ await Promise.all(
146
+ keysToDelete.map(
147
+ async (x) => {
148
+ storedUrls.current.delete(x);
149
+ await db.removeItem(x);
150
+ }
151
+ )
152
+ );
153
+ const currentVersion = cacheVersions.get(key) || 0;
154
+ cacheVersions.set(key, currentVersion + 1);
155
+ const listeners = cacheVersionListeners.get(key);
156
+ if (listeners) {
157
+ listeners.forEach((callback) => {
158
+ callback();
159
+ });
160
+ }
161
+ }
162
+ async function baseFetchUrl(entity, options, isPublic = false) {
163
+ if (isUsable(entity) === false) return;
164
+ const optionsString = JSON.stringify(options);
165
+ if (isUsable(entity.bucketId) === false) {
166
+ return;
167
+ }
168
+ if (isUsable(entity.path) === false) {
169
+ return;
170
+ }
171
+ const key = entity.bucketId + entity.path + (optionsString ?? "") + "-cached-url";
172
+ const inMemoryItem = storedUrls.current.get(key);
173
+ let item = inMemoryItem;
174
+ if (isUsable(inMemoryItem) === false) {
175
+ item = await db.getItem(key);
176
+ }
177
+ if (isUsable(item) && moment(item.expiresOn).isAfter(moment().add(-1 * 60, "seconds"))) {
178
+ return item.url;
179
+ }
180
+ const base = supabase.storage.from(entity.bucketId);
181
+ let download = options?.download;
182
+ if (typeof options?.download === "string") {
183
+ const ext = entity.path.split(".").pop() ?? "";
184
+ download = options?.download.endsWith(ext) ? options?.download : options?.download + "." + ext;
185
+ }
186
+ const newOptions = options ? { ...options, download } : void 0;
187
+ const retryKey = entity.bucketId + "/" + entity.path;
188
+ const currentRetries = retryAttempts.get(retryKey) || 0;
189
+ let url;
190
+ if (isPublic) {
191
+ url = base.getPublicUrl(entity.path, newOptions).data.publicUrl;
192
+ } else {
193
+ try {
194
+ console.log("Creating signed URL for", entity.path);
195
+ const result = await base.createSignedUrl(entity.path, 60 * 100, newOptions);
196
+ url = result.data?.signedUrl;
197
+ if (isUsable(url)) {
198
+ retryAttempts.delete(retryKey);
199
+ } else if (currentRetries < 3) {
200
+ retryAttempts.set(retryKey, currentRetries + 1);
201
+ throw new Error("Failed to get signed URL");
202
+ }
203
+ } catch (error) {
204
+ if (currentRetries < 3) {
205
+ retryAttempts.set(retryKey, currentRetries + 1);
206
+ const delay = Math.min(1e3 * Math.pow(2, currentRetries), 5e3);
207
+ await new Promise((resolve) => setTimeout(resolve, delay));
208
+ return baseFetchUrl(entity, options, isPublic);
209
+ }
210
+ retryAttempts.delete(retryKey);
211
+ return void 0;
212
+ }
213
+ }
214
+ if (isUsable(url) === false) return url;
215
+ const cachedUrl = {
216
+ key,
217
+ url,
218
+ expiresOn: moment().add(60 * 100, "seconds").toISOString(true)
219
+ };
220
+ storedUrls.current.set(key, cachedUrl);
221
+ db.setItem(key, cachedUrl);
222
+ return url;
223
+ }
224
+ async function fetchUrl(entity, options) {
225
+ return baseFetchUrl(entity, options, false);
226
+ }
227
+ async function fetchPublicUrl(entity, options) {
228
+ return baseFetchUrl(entity, options, true);
229
+ }
230
+ async function prefetchImage(entity, options) {
231
+ const url = await fetchUrl(entity, options);
232
+ if (url) {
233
+ new Image().src = url;
234
+ }
235
+ }
236
+ async function fetchUrls(entities, options) {
237
+ const results = /* @__PURE__ */ new Map();
238
+ if (entities.length === 0) return results;
239
+ const optionsString = JSON.stringify(options);
240
+ const expirySeconds = 60 * 100;
241
+ const byBucket = /* @__PURE__ */ new Map();
242
+ for (const entity of entities) {
243
+ if (!isUsable(entity.bucketId) || !isUsable(entity.path)) continue;
244
+ const list = byBucket.get(entity.bucketId) ?? [];
245
+ list.push(entity);
246
+ byBucket.set(entity.bucketId, list);
247
+ }
248
+ for (const [bucketId, bucketEntities] of byBucket) {
249
+ const uncached = [];
250
+ for (const entity of bucketEntities) {
251
+ const key = entity.bucketId + entity.path + (optionsString ?? "") + "-cached-url";
252
+ const entityKey = entity.bucketId + "/" + entity.path;
253
+ let item = storedUrls.current.get(key);
254
+ if (!isUsable(item)) {
255
+ item = await db.getItem(key);
256
+ }
257
+ if (isUsable(item) && moment(item.expiresOn).isAfter(moment().add(-1 * 60, "seconds"))) {
258
+ results.set(entityKey, item.url);
259
+ } else {
260
+ uncached.push(entity);
261
+ }
262
+ }
263
+ if (uncached.length > 0) {
264
+ const paths = uncached.map((e) => e.path);
265
+ const base = supabase.storage.from(bucketId);
266
+ try {
267
+ console.log("Signed URLs created");
268
+ const { data, error } = await base.createSignedUrls(paths, expirySeconds);
269
+ if (!error && data) {
270
+ const expiresOn = moment().add(expirySeconds, "seconds").toISOString(true);
271
+ for (let i = 0; i < uncached.length; i++) {
272
+ const entity = uncached[i];
273
+ const urlData = data[i];
274
+ const entityKey = entity.bucketId + "/" + entity.path;
275
+ const cacheKey = entity.bucketId + entity.path + (optionsString ?? "") + "-cached-url";
276
+ if (urlData?.signedUrl) {
277
+ results.set(entityKey, urlData.signedUrl);
278
+ const cachedUrl = {
279
+ key: cacheKey,
280
+ url: urlData.signedUrl,
281
+ expiresOn
282
+ };
283
+ storedUrls.current.set(cacheKey, cachedUrl);
284
+ db.setItem(cacheKey, cachedUrl);
285
+ } else {
286
+ results.set(entityKey, void 0);
287
+ }
288
+ }
289
+ } else {
290
+ for (const entity of uncached) {
291
+ const entityKey = entity.bucketId + "/" + entity.path;
292
+ const url = await fetchUrl(entity, options);
293
+ results.set(entityKey, url);
294
+ }
295
+ }
296
+ } catch {
297
+ for (const entity of uncached) {
298
+ const entityKey = entity.bucketId + "/" + entity.path;
299
+ const url = await fetchUrl(entity, options);
300
+ results.set(entityKey, url);
301
+ }
302
+ }
303
+ }
304
+ }
305
+ return results;
306
+ }
307
+ return { fetchUrl, fetchUrls, prefetchImage, fetchPublicUrl, expireCache, getCacheVersion, subscribeToCacheVersion };
308
+ }
309
+
310
+ // src/hooks/usePath.ts
311
+ import { useEffect, useState } from "react";
312
+ import { isUsable as isUsable2 } from "@pol-studios/utils/types";
313
+ import { useQuery } from "@pol-studios/db/query";
314
+ import { useSupabase as useSupabase3 } from "@pol-studios/db/client";
315
+ function getContentType(metadata, path) {
316
+ if (metadata?.contentType) {
317
+ return metadata.contentType;
318
+ }
319
+ const ext = path.split(".").pop()?.toUpperCase();
320
+ return ext || "Unknown";
321
+ }
322
+ function usePath(storagePath, bucketId, options = {}) {
323
+ const { fetchMetadata = true, transform, download } = options;
324
+ const [url, setUrl] = useState(null);
325
+ const [error, setError] = useState(null);
326
+ const { fetchUrl } = useUrl();
327
+ const supabase = useSupabase3();
328
+ useEffect(() => {
329
+ if (!isUsable2(storagePath) || !isUsable2(bucketId)) {
330
+ setUrl(null);
331
+ return;
332
+ }
333
+ let cancelled = false;
334
+ fetchUrl({ bucketId, path: storagePath }, { transform, download }).then((signedUrl) => {
335
+ if (!cancelled) {
336
+ setUrl(signedUrl ?? null);
337
+ setError(null);
338
+ }
339
+ }).catch((err) => {
340
+ if (!cancelled) {
341
+ setError(err instanceof Error ? err : new Error(String(err)));
342
+ setUrl(null);
343
+ }
344
+ });
345
+ return () => {
346
+ cancelled = true;
347
+ };
348
+ }, [storagePath, bucketId, JSON.stringify(transform), download]);
349
+ const metadataRequest = useQuery(
350
+ supabase.schema("storage").from("objects").select("metadata").eq("bucket_id", bucketId).eq("name", storagePath ?? "").maybeSingle(),
351
+ {
352
+ enabled: fetchMetadata && isUsable2(storagePath) && isUsable2(bucketId)
353
+ }
354
+ );
355
+ const metadata = metadataRequest.data?.metadata ?? null;
356
+ const contentType = getContentType(metadata, storagePath ?? "");
357
+ const isLoading = isUsable2(storagePath) && url === null && error === null || fetchMetadata && metadataRequest.isFetching;
358
+ return {
359
+ url,
360
+ metadata,
361
+ contentType,
362
+ isLoading,
363
+ error
364
+ };
365
+ }
366
+
367
+ // src/hooks/useDropzoneUpload.ts
368
+ import { useCallback, useEffect as useEffect2, useMemo, useState as useState2 } from "react";
369
+ import {
370
+ useDropzone
371
+ } from "react-dropzone";
372
+ import { useSupabase as useSupabase4 } from "@pol-studios/db/client";
373
+ var useDropzoneUpload = (options) => {
374
+ const {
375
+ bucketName,
376
+ path,
377
+ allowedMimeTypes = [],
378
+ maxFileSize = Number.POSITIVE_INFINITY,
379
+ maxFiles = 1,
380
+ cacheControl = 3600,
381
+ upsert = false
382
+ } = options;
383
+ const [files, setFiles] = useState2([]);
384
+ const [loading, setLoading] = useState2(false);
385
+ const [errors, setErrors] = useState2([]);
386
+ const [successes, setSuccesses] = useState2([]);
387
+ const isSuccess = useMemo(() => {
388
+ if (errors.length === 0 && successes.length === 0) {
389
+ return false;
390
+ }
391
+ if (errors.length === 0 && successes.length === files.length) {
392
+ return true;
393
+ }
394
+ return false;
395
+ }, [errors.length, successes.length, files.length]);
396
+ const onDrop = useCallback(
397
+ (acceptedFiles, fileRejections) => {
398
+ const validFiles = acceptedFiles.filter((file) => !files.find((x) => x.name === file.name)).map((file) => {
399
+ file.preview = URL.createObjectURL(file);
400
+ file.errors = [];
401
+ return file;
402
+ });
403
+ const invalidFiles = fileRejections.map(({ file, errors: errors2 }) => {
404
+ file.preview = URL.createObjectURL(file);
405
+ file.errors = errors2;
406
+ return file;
407
+ });
408
+ const newFiles = [...files, ...validFiles, ...invalidFiles];
409
+ setFiles(newFiles);
410
+ },
411
+ [files, setFiles]
412
+ );
413
+ const dropzoneProps = useDropzone({
414
+ onDrop,
415
+ noClick: true,
416
+ accept: allowedMimeTypes.reduce(
417
+ (acc, type) => ({ ...acc, [type]: [] }),
418
+ {}
419
+ ),
420
+ maxSize: maxFileSize,
421
+ maxFiles,
422
+ multiple: maxFiles !== 1
423
+ });
424
+ const supabase = useSupabase4();
425
+ const onUpload = useCallback(async () => {
426
+ setLoading(true);
427
+ const filesWithErrors = errors.map((x) => x.name);
428
+ const filesToUpload = filesWithErrors.length > 0 ? [
429
+ ...files.filter((f) => filesWithErrors.includes(f.name)),
430
+ ...files.filter((f) => !successes.includes(f.name))
431
+ ] : files;
432
+ const responses = await Promise.all(
433
+ filesToUpload.map(async (file) => {
434
+ const { error } = await supabase.storage.from(bucketName).upload(!!path ? path + "/" + file.name : file.name, file, {
435
+ cacheControl: cacheControl.toString(),
436
+ upsert
437
+ });
438
+ if (error) {
439
+ return { name: file.name, message: error.message };
440
+ } else {
441
+ return { name: file.name, message: void 0 };
442
+ }
443
+ })
444
+ );
445
+ const responseErrors = responses.filter((x) => x.message !== void 0);
446
+ setErrors(responseErrors);
447
+ const responseSuccesses = responses.filter((x) => x.message === void 0);
448
+ const newSuccesses = Array.from(
449
+ /* @__PURE__ */ new Set([...successes, ...responseSuccesses.map((x) => x.name)])
450
+ );
451
+ setSuccesses(newSuccesses);
452
+ setLoading(false);
453
+ }, [files, path, bucketName, errors, successes]);
454
+ useEffect2(() => {
455
+ if (files.length === 0) {
456
+ setErrors([]);
457
+ }
458
+ if (files.length <= maxFiles) {
459
+ let changed = false;
460
+ const newFiles = files.map((file) => {
461
+ if (file.errors.some((e) => e.code === "too-many-files")) {
462
+ file.errors = file.errors.filter((e) => e.code !== "too-many-files");
463
+ changed = true;
464
+ }
465
+ return file;
466
+ });
467
+ if (changed) {
468
+ setFiles(newFiles);
469
+ }
470
+ }
471
+ }, [files.length, setFiles, maxFiles]);
472
+ return {
473
+ files,
474
+ setFiles,
475
+ successes,
476
+ isSuccess,
477
+ loading,
478
+ errors,
479
+ setErrors,
480
+ onUpload,
481
+ maxFileSize,
482
+ maxFiles,
483
+ allowedMimeTypes,
484
+ ...dropzoneProps
485
+ };
486
+ };
487
+
488
+ // src/config/buckets.ts
489
+ var BUCKETS = {
490
+ AVATARS: "avatars",
491
+ FIXTURE_CATALOG_COVER: "fixture-catalog-cover",
492
+ FIXTURE_ATTACHMENTS: "fixture-attachments",
493
+ UNIT_DOCUMENTATION: "unit-documentation-attachments",
494
+ DATASHEETS: "datasheets",
495
+ PATTERNS: "patterns",
496
+ LOGOS: "logos",
497
+ RECEIPTS: "receipts",
498
+ TICKET_ATTACHMENTS: "ticket-attachment",
499
+ PROCESS_RESULTS: "process-results",
500
+ DATA_EXCHANGE_IMPORTS: "data-exchange-imports",
501
+ EMAIL_TEMPLATES: "email-templates",
502
+ EMAIL_TEMPLATES_ASSETS: "email-templates-assets"
503
+ };
504
+ export {
505
+ BUCKETS,
506
+ useDropzoneUpload,
507
+ usePath,
508
+ useUpload,
509
+ useUploadWithEntity,
510
+ useUrl
511
+ };
512
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/hooks/useUpload.ts","../src/hooks/useUrl.ts","../src/hooks/usePath.ts","../src/hooks/useDropzoneUpload.ts","../src/config/buckets.ts"],"sourcesContent":["import { useMutation } from \"@tanstack/react-query\";\nimport { Upload } from \"tus-js-client\";\nimport { SupabaseClient } from \"@supabase/supabase-js\";\nimport { newUuid } from \"@pol-studios/utils/uuid\";\nimport { isBlank } from \"@pol-studios/utils/string\";\nimport { useSupabase, getSupabaseUrl } from \"@pol-studios/db/client\";\nimport type {\n StorageUploadMetadata,\n UploadInput,\n UploadResult,\n UseUploadOptions,\n} from \"../types\";\n\nasync function uploadFileWithMetadata(\n supabase: SupabaseClient,\n bucketName: string,\n filePath: string,\n file: File | Blob,\n metadata: StorageUploadMetadata,\n onProgress?: (bytesUploaded: number, bytesTotal: number) => void\n): Promise<void> {\n const {\n data: { session },\n } = await supabase.auth.getSession();\n\n if (!session) {\n throw new Error(\"User must be logged in to upload files\");\n }\n\n const contentType =\n \"type\" in file ? file.type : metadata.contentType ?? \"application/octet-stream\";\n\n const supabaseUrl = getSupabaseUrl();\n\n return new Promise((resolve, reject) => {\n const upload = new Upload(file, {\n endpoint: supabaseUrl + \"/storage/v1/upload/resumable\",\n retryDelays: [0, 3000, 5000, 10000, 20000],\n headers: {\n authorization: \"Bearer \" + session.access_token,\n \"x-upsert\": \"true\",\n },\n uploadDataDuringCreation: true,\n removeFingerprintOnSuccess: true,\n metadata: {\n bucketName: bucketName,\n objectName: filePath,\n contentType: contentType,\n cacheControl: \"3600\",\n // Custom metadata gets stored in storage.objects.metadata\n ...Object.fromEntries(\n Object.entries(metadata).map(([k, v]) => [k, String(v)])\n ),\n },\n chunkSize: 6 * 1024 * 1024, // 6MB - required by Supabase\n onError: (error) => {\n console.error(\"Upload failed:\", error);\n reject(error);\n },\n onProgress: onProgress,\n onSuccess: () => {\n resolve();\n },\n });\n\n upload.findPreviousUploads().then((previousUploads) => {\n if (previousUploads.length) {\n upload.resumeFromPreviousUpload(previousUploads[0]);\n }\n upload.start();\n });\n });\n}\n\n/**\n * Hook for uploading files to Supabase Storage with path-based references.\n *\n * Unlike useDbAttachmentUpload, this hook:\n * - Does NOT create Attachment records\n * - Returns the storage path for the caller to store\n * - Sets metadata directly on storage.objects\n *\n * @example\n * ```tsx\n * import { useUpload } from \"@pol-studios/storage/hooks\";\n * import { BUCKETS } from \"@pol-studios/db\";\n *\n * const upload = useUpload({ bucketId: BUCKETS.AVATARS });\n *\n * const handleUpload = async (file: File) => {\n * const result = await upload.mutateAsync({ file });\n * // Store result.path on the entity\n * await updateProfile({ profilePath: result.path });\n * };\n * ```\n */\nexport default function useUpload(\n options: UseUploadOptions\n): ReturnType<typeof useMutation<UploadResult, Error, UploadInput>> {\n const { bucketId } = options;\n const supabase = useSupabase();\n\n return useMutation({\n mutationFn: async (input: UploadInput): Promise<UploadResult> => {\n const { file, fileName, directory, metadata = {}, onUploadProgress } = input;\n\n // Get original filename\n const originalFileName =\n fileName ??\n (\"name\" in file ? file[\"name\"] : \"\") ??\n (\"fileName\" in file ? (file as any)[\"fileName\"] : \"file\");\n\n if (isBlank(originalFileName)) {\n throw new Error(\"File must have a valid name\");\n }\n\n // Generate unique filename with original extension\n const extension = originalFileName.split(\".\").pop() ?? \"\";\n const uniqueFileName = newUuid() + \".\" + extension;\n\n // Build path with optional directory\n const path = directory\n ? directory + \"/14/\" + uniqueFileName\n : \"14/\" + uniqueFileName;\n\n // Derive content type from file\n const contentType =\n \"type\" in file\n ? file.type.split(\"/\").pop()?.toUpperCase() ?? \"Unknown\"\n : extension.toUpperCase();\n\n // Build metadata\n const uploadMetadata: StorageUploadMetadata = {\n processingStatus: \"None\",\n contentType,\n originalFileName,\n ...metadata,\n };\n\n // Upload with metadata\n await uploadFileWithMetadata(\n supabase,\n bucketId,\n path,\n file,\n uploadMetadata,\n onUploadProgress\n );\n\n return {\n path,\n bucketId,\n originalFileName,\n contentType,\n };\n },\n });\n}\n\n/**\n * Convenience hook that combines upload with entity update.\n * Uploads the file and then updates the specified entity with the path.\n */\nexport function useUploadWithEntity<T extends Record<string, unknown>>(\n options: UseUploadOptions & {\n /** Field name on the entity to store the path */\n pathField: string;\n /** Function to update the entity after upload */\n updateEntity: (entity: T, path: string) => Promise<void>;\n }\n) {\n const { pathField, updateEntity, ...uploadOptions } = options;\n const uploadMutation = useUpload(uploadOptions);\n\n return useMutation({\n mutationFn: async (input: UploadInput & { entity: T }): Promise<UploadResult> => {\n const { entity, ...uploadInput } = input;\n const result = await uploadMutation.mutateAsync(uploadInput);\n await updateEntity(entity, result.path);\n return result;\n },\n });\n}\n","import { isUsable } from \"@pol-studios/utils/types\";\nimport { useIndexedDB } from \"@pol-studios/hooks/storage\";\nimport { useSupabase } from \"@pol-studios/db/client\";\nimport moment from \"moment\";\nimport { TransformOptions } from \"@supabase/storage-js\";\nimport { useRef } from \"react\";\nimport type { Attachment, CachedUrl } from \"../types\";\n\n// Global cache version map to track when attachments are expired\nconst cacheVersions = new Map<string, number>();\n// Listeners for cache version changes\nconst cacheVersionListeners = new Map<string, Set<() => void>>();\n// Track retry attempts per entity\nconst retryAttempts = new Map<string, number>();\n\nexport default function useUrl() {\n const db = useIndexedDB({\n dbName: \"polstudios\",\n storeName: \"cached-urls\",\n });\n const storedUrls = useRef(new Map<string, CachedUrl>());\n\n const supabase = useSupabase();\n\n function getCacheVersion(entity: { bucketId: string; path: string }): number {\n const key = entity.bucketId + entity.path;\n return cacheVersions.get(key) || 0;\n }\n\n function subscribeToCacheVersion(\n entity: { bucketId: string; path: string },\n callback: () => void\n ): () => void {\n const key = entity.bucketId + entity.path;\n if (!cacheVersionListeners.has(key)) {\n cacheVersionListeners.set(key, new Set());\n }\n cacheVersionListeners.get(key)!.add(callback);\n \n return () => {\n cacheVersionListeners.get(key)?.delete(callback);\n };\n }\n\n async function expireCache(\n entity: { bucketId: string; path: string },\n ) {\n const key = entity.bucketId + entity.path;\n\n const dbKeys = await db.getAllKeys();\n const keysToDelete = Array.from(\n new Set(\n [...Array.from(storedUrls.current.keys()), ...dbKeys].filter((value) =>\n value.startsWith(key)\n ),\n ),\n );\n await Promise.all(\n keysToDelete.map(\n async (x) => {\n storedUrls.current.delete(x);\n await db.removeItem(x);\n },\n ),\n );\n \n // Increment cache version to force image refetch\n const currentVersion = cacheVersions.get(key) || 0;\n cacheVersions.set(key, currentVersion + 1);\n \n // Notify all listeners that cache version changed\n const listeners = cacheVersionListeners.get(key);\n if (listeners) {\n listeners.forEach(callback => {\n callback();\n });\n }\n }\n async function baseFetchUrl(\n entity: Attachment,\n options?: {\n download?: string | boolean;\n transform?: TransformOptions;\n },\n isPublic: boolean = false,\n ) {\n if (isUsable(entity) === false) return;\n const optionsString = JSON.stringify(options);\n if (isUsable(entity.bucketId) === false) {\n return;\n }\n\n if (isUsable(entity.path) === false) {\n return;\n }\n const key = entity.bucketId + entity.path + (optionsString ?? \"\") + \"-cached-url\";\n\n const inMemoryItem = storedUrls.current.get(key);\n\n let item = inMemoryItem;\n if (isUsable(inMemoryItem) === false) {\n item = await db.getItem(key);\n }\n\n if (\n isUsable(item) &&\n moment(item.expiresOn).isAfter(moment().add(-1 * 60, \"seconds\"))\n ) {\n return item.url;\n }\n\n const base = supabase.storage.from(entity.bucketId);\n let download = options?.download;\n\n if (typeof options?.download === \"string\") {\n const ext = entity.path.split(\".\").pop() ?? \"\";\n download = options?.download.endsWith(ext)\n ? options?.download\n : options?.download + \".\" + ext;\n }\n\n const newOptions = options ? { ...options, download: download } : undefined;\n\n // Track retries per entity\n const retryKey = entity.bucketId + \"/\" + entity.path;\n const currentRetries = retryAttempts.get(retryKey) || 0;\n\n let url: string | undefined;\n\n if (isPublic) {\n url = base.getPublicUrl(entity.path, newOptions).data.publicUrl;\n } else {\n // Try to get signed URL with retry logic\n try {\n console.log(\"Creating signed URL for\", entity.path);\n const result = await base.createSignedUrl(entity.path, 60 * 100, newOptions);\n url = result.data?.signedUrl;\n\n if (isUsable(url)) {\n // Success - reset retry count\n retryAttempts.delete(retryKey);\n } else if (currentRetries < 3) {\n // Failed but can retry\n retryAttempts.set(retryKey, currentRetries + 1);\n throw new Error(\"Failed to get signed URL\");\n }\n } catch (error) {\n if (currentRetries < 3) {\n // Retry with exponential backoff\n retryAttempts.set(retryKey, currentRetries + 1);\n const delay = Math.min(1000 * Math.pow(2, currentRetries), 5000);\n await new Promise(resolve => setTimeout(resolve, delay));\n return baseFetchUrl(entity, options, isPublic);\n }\n // Max retries reached\n retryAttempts.delete(retryKey);\n return undefined;\n }\n }\n\n if (isUsable(url) === false) return url;\n\n const cachedUrl = {\n key: key,\n url: url,\n expiresOn: moment()\n .add(60 * 100, \"seconds\")\n .toISOString(true),\n };\n\n storedUrls.current.set(key, cachedUrl);\n\n db.setItem(key, cachedUrl);\n return url;\n }\n async function fetchUrl(\n entity: { bucketId: string; path: string },\n options?: {\n download?: string | boolean;\n transform?: TransformOptions;\n },\n ) {\n return baseFetchUrl(entity, options, false);\n }\n\n async function fetchPublicUrl(\n entity: { bucketId: string; path: string },\n options?: {\n download?: string | boolean;\n transform?: TransformOptions;\n },\n ) {\n return baseFetchUrl(entity, options, true);\n }\n\n async function prefetchImage(\n entity: Attachment,\n options?: {\n download?: string | boolean;\n transform?: TransformOptions;\n },\n ) {\n const url = await fetchUrl(entity, options);\n if (url) {\n new Image().src = url;\n }\n }\n\n /**\n * Batch fetch signed URLs for multiple attachments in a single API call.\n * Falls back to individual fetches if attachments span multiple buckets.\n * Results are cached the same way as individual fetchUrl calls.\n */\n async function fetchUrls(\n entities: Attachment[],\n options?: {\n download?: string | boolean;\n transform?: TransformOptions;\n },\n ): Promise<Map<string, string | undefined>> {\n const results = new Map<string, string | undefined>();\n \n if (entities.length === 0) return results;\n\n const optionsString = JSON.stringify(options);\n const expirySeconds = 60 * 100;\n \n // Group entities by bucket\n const byBucket = new Map<string, Attachment[]>();\n for (const entity of entities) {\n if (!isUsable(entity.bucketId) || !isUsable(entity.path)) continue;\n const list = byBucket.get(entity.bucketId) ?? [];\n list.push(entity);\n byBucket.set(entity.bucketId, list);\n }\n\n // For each bucket, check cache first, then batch fetch uncached\n for (const [bucketId, bucketEntities] of byBucket) {\n const uncached: Attachment[] = [];\n \n // Check cache for each entity\n for (const entity of bucketEntities) {\n const key = entity.bucketId + entity.path + (optionsString ?? \"\") + \"-cached-url\";\n const entityKey = entity.bucketId + \"/\" + entity.path;\n \n // Check in-memory cache first\n let item = storedUrls.current.get(key);\n \n // Then check IndexDB\n if (!isUsable(item)) {\n item = await db.getItem(key);\n }\n \n // If cached and not expired, use it\n if (\n isUsable(item) &&\n moment(item.expiresOn).isAfter(moment().add(-1 * 60, \"seconds\"))\n ) {\n results.set(entityKey, item.url);\n } else {\n uncached.push(entity);\n }\n }\n\n // Batch fetch uncached URLs\n if (uncached.length > 0) {\n const paths = uncached.map(e => e.path);\n const base = supabase.storage.from(bucketId);\n \n try {\n console.log(\"Signed URLs created\");\n const { data, error } = await base.createSignedUrls(paths, expirySeconds);\n if (!error && data) {\n const expiresOn = moment().add(expirySeconds, \"seconds\").toISOString(true);\n \n for (let i = 0; i < uncached.length; i++) {\n const entity = uncached[i];\n const urlData = data[i];\n const entityKey = entity.bucketId + \"/\" + entity.path;\n const cacheKey = entity.bucketId + entity.path + (optionsString ?? \"\") + \"-cached-url\";\n \n if (urlData?.signedUrl) {\n results.set(entityKey, urlData.signedUrl);\n \n // Cache the URL\n const cachedUrl = {\n key: cacheKey,\n url: urlData.signedUrl,\n expiresOn,\n };\n storedUrls.current.set(cacheKey, cachedUrl);\n db.setItem(cacheKey, cachedUrl);\n } else {\n results.set(entityKey, undefined);\n }\n }\n } else {\n // Fall back to individual fetches on error\n for (const entity of uncached) {\n const entityKey = entity.bucketId + \"/\" + entity.path;\n const url = await fetchUrl(entity, options);\n results.set(entityKey, url);\n }\n }\n } catch {\n // Fall back to individual fetches on error\n for (const entity of uncached) {\n const entityKey = entity.bucketId + \"/\" + entity.path;\n const url = await fetchUrl(entity, options);\n results.set(entityKey, url);\n }\n }\n }\n }\n\n return results;\n }\n\n return { fetchUrl, fetchUrls, prefetchImage, fetchPublicUrl, expireCache, getCacheVersion, subscribeToCacheVersion };\n}\n","import { useEffect, useState } from \"react\";\nimport { isUsable } from \"@pol-studios/utils/types\";\nimport { useQuery } from \"@pol-studios/db/query\";\nimport { useSupabase } from \"@pol-studios/db/client\";\nimport useUrl from \"./useUrl\";\nimport type { StorageObjectMetadata, UsePathOptions, UsePathResult } from \"../types\";\n\n/**\n * Get content type from metadata or infer from file path extension\n */\nfunction getContentType(\n metadata: StorageObjectMetadata | null | undefined,\n path: string\n): string {\n if (metadata?.contentType) {\n return metadata.contentType;\n }\n const ext = path.split(\".\").pop()?.toUpperCase();\n return ext || \"Unknown\";\n}\n\n/**\n * Hook to get a signed URL and metadata for a storage path.\n * Replaces useAttachment for the path-based storage approach.\n *\n * @param storagePath - The path within the bucket (e.g., \"14/abc-123.jpg\")\n * @param bucketId - The storage bucket ID\n * @param options - Optional configuration\n *\n * @example\n * ```tsx\n * const { url, contentType, isLoading } = usePath(\n * profile.profilePath,\n * 'attachments'\n * );\n *\n * if (isLoading) return <Spinner />;\n * return <img src={url} />;\n * ```\n */\nexport default function usePath(\n storagePath: string | undefined | null,\n bucketId: string,\n options: UsePathOptions = {}\n): UsePathResult {\n const { fetchMetadata = true, transform, download } = options;\n\n const [url, setUrl] = useState<string | null>(null);\n const [error, setError] = useState<Error | null>(null);\n const { fetchUrl } = useUrl();\n const supabase = useSupabase();\n\n // Fetch signed URL\n useEffect(() => {\n if (!isUsable(storagePath) || !isUsable(bucketId)) {\n setUrl(null);\n return;\n }\n\n let cancelled = false;\n\n fetchUrl({ bucketId, path: storagePath }, { transform, download })\n .then((signedUrl) => {\n if (!cancelled) {\n setUrl(signedUrl ?? null);\n setError(null);\n }\n })\n .catch((err) => {\n if (!cancelled) {\n setError(err instanceof Error ? err : new Error(String(err)));\n setUrl(null);\n }\n });\n\n return () => {\n cancelled = true;\n };\n }, [storagePath, bucketId, JSON.stringify(transform), download]);\n\n // Fetch metadata from storage.objects if requested\n const metadataRequest = useQuery(\n (supabase\n .schema(\"storage\" as any) as any)\n .from(\"objects\")\n .select(\"metadata\")\n .eq(\"bucket_id\", bucketId)\n .eq(\"name\", storagePath ?? \"\")\n .maybeSingle(),\n {\n enabled: fetchMetadata && isUsable(storagePath) && isUsable(bucketId),\n }\n );\n\n const metadata = ((metadataRequest.data as any)?.metadata as StorageObjectMetadata) ?? null;\n const contentType = getContentType(metadata, storagePath ?? \"\");\n const isLoading =\n (isUsable(storagePath) && url === null && error === null) ||\n (fetchMetadata && metadataRequest.isFetching);\n\n return {\n url,\n metadata,\n contentType,\n isLoading,\n error,\n };\n}\n","import { useCallback, useEffect, useMemo, useState } from \"react\";\nimport {\n type FileError,\n type FileRejection,\n useDropzone,\n} from \"react-dropzone\";\nimport { useSupabase } from \"@pol-studios/db/client\";\n\ninterface FileWithPreview extends File {\n preview?: string;\n errors: readonly FileError[];\n}\n\nexport type UseSupabaseUploadOptions = {\n /**\n * Name of bucket to upload files to in your Supabase project\n */\n bucketName: string;\n /**\n * Folder to upload files to in the specified bucket within your Supabase project.\n *\n * Defaults to uploading files to the root of the bucket\n *\n * e.g If specified path is `test`, your file will be uploaded as `test/file_name`\n */\n path?: string;\n /**\n * Allowed MIME types for each file upload (e.g `image/png`, `text/html`, etc). Wildcards are also supported (e.g `image/*`).\n *\n * Defaults to allowing uploading of all MIME types.\n */\n allowedMimeTypes?: string[];\n /**\n * Maximum upload size of each file allowed in bytes. (e.g 1000 bytes = 1 KB)\n */\n maxFileSize?: number;\n /**\n * Maximum number of files allowed per upload.\n */\n maxFiles?: number;\n /**\n * The number of seconds the asset is cached in the browser and in the Supabase CDN.\n *\n * This is set in the Cache-Control: max-age=<seconds> header. Defaults to 3600 seconds.\n */\n cacheControl?: number;\n /**\n * When set to true, the file is overwritten if it exists.\n *\n * When set to false, an error is thrown if the object already exists. Defaults to `false`\n */\n upsert?: boolean;\n};\n\nexport type UseSupabaseUploadReturn = ReturnType<typeof useDropzoneUpload>;\n\nexport const useDropzoneUpload = (options: UseSupabaseUploadOptions) => {\n const {\n bucketName,\n path,\n allowedMimeTypes = [],\n maxFileSize = Number.POSITIVE_INFINITY,\n maxFiles = 1,\n cacheControl = 3600,\n upsert = false,\n } = options;\n\n const [files, setFiles] = useState<FileWithPreview[]>([]);\n const [loading, setLoading] = useState<boolean>(false);\n const [errors, setErrors] = useState<{ name: string; message: string }[]>([]);\n const [successes, setSuccesses] = useState<string[]>([]);\n\n const isSuccess = useMemo(() => {\n if (errors.length === 0 && successes.length === 0) {\n return false;\n }\n if (errors.length === 0 && successes.length === files.length) {\n return true;\n }\n return false;\n }, [errors.length, successes.length, files.length]);\n\n const onDrop = useCallback(\n (acceptedFiles: File[], fileRejections: FileRejection[]) => {\n const validFiles = acceptedFiles\n .filter((file) => !files.find((x) => x.name === file.name))\n .map((file) => {\n (file as FileWithPreview).preview = URL.createObjectURL(file);\n (file as FileWithPreview).errors = [];\n return file as FileWithPreview;\n });\n\n const invalidFiles = fileRejections.map(({ file, errors }) => {\n (file as FileWithPreview).preview = URL.createObjectURL(file);\n (file as FileWithPreview).errors = errors;\n return file as FileWithPreview;\n });\n\n const newFiles = [...files, ...validFiles, ...invalidFiles];\n\n setFiles(newFiles);\n },\n [files, setFiles]\n );\n\n const dropzoneProps = useDropzone({\n onDrop,\n noClick: true,\n accept: allowedMimeTypes.reduce(\n (acc, type) => ({ ...acc, [type]: [] }),\n {}\n ),\n maxSize: maxFileSize,\n maxFiles: maxFiles,\n multiple: maxFiles !== 1,\n });\n\n const supabase = useSupabase();\n\n const onUpload = useCallback(async () => {\n setLoading(true);\n\n // [Joshen] This is to support handling partial successes\n // If any files didn't upload for any reason, hitting \"Upload\" again will only upload the files that had errors\n const filesWithErrors = errors.map((x) => x.name);\n const filesToUpload =\n filesWithErrors.length > 0\n ? [\n ...files.filter((f) => filesWithErrors.includes(f.name)),\n ...files.filter((f) => !successes.includes(f.name)),\n ]\n : files;\n\n const responses = await Promise.all(\n filesToUpload.map(async (file) => {\n const { error } = await supabase.storage\n .from(bucketName)\n .upload(!!path ? path + \"/\" + file.name : file.name, file, {\n cacheControl: cacheControl.toString(),\n upsert,\n });\n if (error) {\n return { name: file.name, message: error.message };\n } else {\n return { name: file.name, message: undefined };\n }\n })\n );\n\n const responseErrors = responses.filter((x) => x.message !== undefined);\n // if there were errors previously, this function tried to upload the files again so we should clear/overwrite the existing errors.\n setErrors(responseErrors);\n\n const responseSuccesses = responses.filter((x) => x.message === undefined);\n const newSuccesses = Array.from(\n new Set([...successes, ...responseSuccesses.map((x) => x.name)])\n );\n setSuccesses(newSuccesses);\n\n setLoading(false);\n }, [files, path, bucketName, errors, successes]);\n\n useEffect(() => {\n if (files.length === 0) {\n setErrors([]);\n }\n\n // If the number of files doesn't exceed the maxFiles parameter, remove the error 'Too many files' from each file\n if (files.length <= maxFiles) {\n let changed = false;\n const newFiles = files.map((file) => {\n if (file.errors.some((e) => e.code === \"too-many-files\")) {\n file.errors = file.errors.filter((e) => e.code !== \"too-many-files\");\n changed = true;\n }\n return file;\n });\n if (changed) {\n setFiles(newFiles);\n }\n }\n }, [files.length, setFiles, maxFiles]);\n\n return {\n files,\n setFiles,\n successes,\n isSuccess,\n loading,\n errors,\n setErrors,\n onUpload,\n maxFileSize: maxFileSize,\n maxFiles: maxFiles,\n allowedMimeTypes,\n ...dropzoneProps,\n };\n};\n","/**\n * Storage bucket names for Supabase storage.\n * Use these constants when calling storage APIs.\n */\nexport const BUCKETS = {\n AVATARS: \"avatars\",\n FIXTURE_CATALOG_COVER: \"fixture-catalog-cover\",\n FIXTURE_ATTACHMENTS: \"fixture-attachments\",\n UNIT_DOCUMENTATION: \"unit-documentation-attachments\",\n DATASHEETS: \"datasheets\",\n PATTERNS: \"patterns\",\n LOGOS: \"logos\",\n RECEIPTS: \"receipts\",\n TICKET_ATTACHMENTS: \"ticket-attachment\",\n PROCESS_RESULTS: \"process-results\",\n DATA_EXCHANGE_IMPORTS: \"data-exchange-imports\",\n EMAIL_TEMPLATES: \"email-templates\",\n EMAIL_TEMPLATES_ASSETS: \"email-templates-assets\",\n} as const;\n\nexport type BucketName = (typeof BUCKETS)[keyof typeof BUCKETS];\n"],"mappings":";AAAA,SAAS,mBAAmB;AAC5B,SAAS,cAAc;AAEvB,SAAS,eAAe;AACxB,SAAS,eAAe;AACxB,SAAS,aAAa,sBAAsB;AAQ5C,eAAe,uBACb,UACA,YACA,UACA,MACA,UACA,YACe;AACf,QAAM;AAAA,IACJ,MAAM,EAAE,QAAQ;AAAA,EAClB,IAAI,MAAM,SAAS,KAAK,WAAW;AAEnC,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,wCAAwC;AAAA,EAC1D;AAEA,QAAM,cACJ,UAAU,OAAO,KAAK,OAAO,SAAS,eAAe;AAEvD,QAAM,cAAc,eAAe;AAEnC,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,UAAM,SAAS,IAAI,OAAO,MAAM;AAAA,MAC9B,UAAU,cAAc;AAAA,MACxB,aAAa,CAAC,GAAG,KAAM,KAAM,KAAO,GAAK;AAAA,MACzC,SAAS;AAAA,QACP,eAAe,YAAY,QAAQ;AAAA,QACnC,YAAY;AAAA,MACd;AAAA,MACA,0BAA0B;AAAA,MAC1B,4BAA4B;AAAA,MAC5B,UAAU;AAAA,QACR;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA,cAAc;AAAA;AAAA,QAEd,GAAG,OAAO;AAAA,UACR,OAAO,QAAQ,QAAQ,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;AAAA,QACzD;AAAA,MACF;AAAA,MACA,WAAW,IAAI,OAAO;AAAA;AAAA,MACtB,SAAS,CAAC,UAAU;AAClB,gBAAQ,MAAM,kBAAkB,KAAK;AACrC,eAAO,KAAK;AAAA,MACd;AAAA,MACA;AAAA,MACA,WAAW,MAAM;AACf,gBAAQ;AAAA,MACV;AAAA,IACF,CAAC;AAED,WAAO,oBAAoB,EAAE,KAAK,CAAC,oBAAoB;AACrD,UAAI,gBAAgB,QAAQ;AAC1B,eAAO,yBAAyB,gBAAgB,CAAC,CAAC;AAAA,MACpD;AACA,aAAO,MAAM;AAAA,IACf,CAAC;AAAA,EACH,CAAC;AACH;AAwBe,SAAR,UACL,SACkE;AAClE,QAAM,EAAE,SAAS,IAAI;AACrB,QAAM,WAAW,YAAY;AAE7B,SAAO,YAAY;AAAA,IACjB,YAAY,OAAO,UAA8C;AAC/D,YAAM,EAAE,MAAM,UAAU,WAAW,WAAW,CAAC,GAAG,iBAAiB,IAAI;AAGvE,YAAM,mBACJ,aACC,UAAU,OAAO,KAAK,MAAM,IAAI,QAChC,cAAc,OAAQ,KAAa,UAAU,IAAI;AAEpD,UAAI,QAAQ,gBAAgB,GAAG;AAC7B,cAAM,IAAI,MAAM,6BAA6B;AAAA,MAC/C;AAGA,YAAM,YAAY,iBAAiB,MAAM,GAAG,EAAE,IAAI,KAAK;AACvD,YAAM,iBAAiB,QAAQ,IAAI,MAAM;AAGzC,YAAM,OAAO,YACT,YAAY,SAAS,iBACrB,QAAQ;AAGZ,YAAM,cACJ,UAAU,OACN,KAAK,KAAK,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY,KAAK,YAC7C,UAAU,YAAY;AAG5B,YAAM,iBAAwC;AAAA,QAC5C,kBAAkB;AAAA,QAClB;AAAA,QACA;AAAA,QACA,GAAG;AAAA,MACL;AAGA,YAAM;AAAA,QACJ;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAMO,SAAS,oBACd,SAMA;AACA,QAAM,EAAE,WAAW,cAAc,GAAG,cAAc,IAAI;AACtD,QAAM,iBAAiB,UAAU,aAAa;AAE9C,SAAO,YAAY;AAAA,IACjB,YAAY,OAAO,UAA8D;AAC/E,YAAM,EAAE,QAAQ,GAAG,YAAY,IAAI;AACnC,YAAM,SAAS,MAAM,eAAe,YAAY,WAAW;AAC3D,YAAM,aAAa,QAAQ,OAAO,IAAI;AACtC,aAAO;AAAA,IACT;AAAA,EACF,CAAC;AACH;;;ACtLA,SAAS,gBAAgB;AACzB,SAAS,oBAAoB;AAC7B,SAAS,eAAAA,oBAAmB;AAC5B,OAAO,YAAY;AAEnB,SAAS,cAAc;AAIvB,IAAM,gBAAgB,oBAAI,IAAoB;AAE9C,IAAM,wBAAwB,oBAAI,IAA6B;AAE/D,IAAM,gBAAgB,oBAAI,IAAoB;AAE/B,SAAR,SAA0B;AAC/B,QAAM,KAAK,aAAa;AAAA,IACtB,QAAQ;AAAA,IACR,WAAW;AAAA,EACb,CAAC;AACD,QAAM,aAAa,OAAO,oBAAI,IAAuB,CAAC;AAEtD,QAAM,WAAWA,aAAY;AAE7B,WAAS,gBAAgB,QAAoD;AAC3E,UAAM,MAAM,OAAO,WAAW,OAAO;AACrC,WAAO,cAAc,IAAI,GAAG,KAAK;AAAA,EACnC;AAEA,WAAS,wBACP,QACA,UACY;AACZ,UAAM,MAAM,OAAO,WAAW,OAAO;AACrC,QAAI,CAAC,sBAAsB,IAAI,GAAG,GAAG;AACnC,4BAAsB,IAAI,KAAK,oBAAI,IAAI,CAAC;AAAA,IAC1C;AACA,0BAAsB,IAAI,GAAG,EAAG,IAAI,QAAQ;AAE5C,WAAO,MAAM;AACX,4BAAsB,IAAI,GAAG,GAAG,OAAO,QAAQ;AAAA,IACjD;AAAA,EACF;AAEA,iBAAe,YACb,QACA;AACA,UAAM,MAAM,OAAO,WAAW,OAAO;AAErC,UAAM,SAAS,MAAM,GAAG,WAAW;AACnC,UAAM,eAAe,MAAM;AAAA,MACzB,IAAI;AAAA,QACF,CAAC,GAAG,MAAM,KAAK,WAAW,QAAQ,KAAK,CAAC,GAAG,GAAG,MAAM,EAAE;AAAA,UAAO,CAAC,UAC5D,MAAM,WAAW,GAAG;AAAA,QACtB;AAAA,MACF;AAAA,IACF;AACA,UAAM,QAAQ;AAAA,MACZ,aAAa;AAAA,QACX,OAAO,MAAM;AACX,qBAAW,QAAQ,OAAO,CAAC;AAC3B,gBAAM,GAAG,WAAW,CAAC;AAAA,QACvB;AAAA,MACF;AAAA,IACF;AAGA,UAAM,iBAAiB,cAAc,IAAI,GAAG,KAAK;AACjD,kBAAc,IAAI,KAAK,iBAAiB,CAAC;AAGzC,UAAM,YAAY,sBAAsB,IAAI,GAAG;AAC/C,QAAI,WAAW;AACb,gBAAU,QAAQ,cAAY;AAC5B,iBAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AACA,iBAAe,aACb,QACA,SAIA,WAAoB,OACpB;AACA,QAAI,SAAS,MAAM,MAAM,MAAO;AAChC,UAAM,gBAAgB,KAAK,UAAU,OAAO;AAC5C,QAAI,SAAS,OAAO,QAAQ,MAAM,OAAO;AACvC;AAAA,IACF;AAEA,QAAI,SAAS,OAAO,IAAI,MAAM,OAAO;AACnC;AAAA,IACF;AACA,UAAM,MAAM,OAAO,WAAW,OAAO,QAAQ,iBAAiB,MAAM;AAEpE,UAAM,eAAe,WAAW,QAAQ,IAAI,GAAG;AAE/C,QAAI,OAAO;AACX,QAAI,SAAS,YAAY,MAAM,OAAO;AACpC,aAAO,MAAM,GAAG,QAAQ,GAAG;AAAA,IAC7B;AAEA,QACE,SAAS,IAAI,KACb,OAAO,KAAK,SAAS,EAAE,QAAQ,OAAO,EAAE,IAAI,KAAK,IAAI,SAAS,CAAC,GAC/D;AACA,aAAO,KAAK;AAAA,IACd;AAEA,UAAM,OAAO,SAAS,QAAQ,KAAK,OAAO,QAAQ;AAClD,QAAI,WAAW,SAAS;AAExB,QAAI,OAAO,SAAS,aAAa,UAAU;AACzC,YAAM,MAAM,OAAO,KAAK,MAAM,GAAG,EAAE,IAAI,KAAK;AAC5C,iBAAW,SAAS,SAAS,SAAS,GAAG,IACrC,SAAS,WACT,SAAS,WAAW,MAAM;AAAA,IAChC;AAEA,UAAM,aAAa,UAAU,EAAE,GAAG,SAAS,SAAmB,IAAI;AAGlE,UAAM,WAAW,OAAO,WAAW,MAAM,OAAO;AAChD,UAAM,iBAAiB,cAAc,IAAI,QAAQ,KAAK;AAEtD,QAAI;AAEJ,QAAI,UAAU;AACZ,YAAM,KAAK,aAAa,OAAO,MAAM,UAAU,EAAE,KAAK;AAAA,IACxD,OAAO;AAEL,UAAI;AACF,gBAAQ,IAAI,2BAA2B,OAAO,IAAI;AAClD,cAAM,SAAS,MAAM,KAAK,gBAAgB,OAAO,MAAM,KAAK,KAAK,UAAU;AAC3E,cAAM,OAAO,MAAM;AAEnB,YAAI,SAAS,GAAG,GAAG;AAEjB,wBAAc,OAAO,QAAQ;AAAA,QAC/B,WAAW,iBAAiB,GAAG;AAE7B,wBAAc,IAAI,UAAU,iBAAiB,CAAC;AAC9C,gBAAM,IAAI,MAAM,0BAA0B;AAAA,QAC5C;AAAA,MACF,SAAS,OAAO;AACd,YAAI,iBAAiB,GAAG;AAEtB,wBAAc,IAAI,UAAU,iBAAiB,CAAC;AAC9C,gBAAM,QAAQ,KAAK,IAAI,MAAO,KAAK,IAAI,GAAG,cAAc,GAAG,GAAI;AAC/D,gBAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,KAAK,CAAC;AACvD,iBAAO,aAAa,QAAQ,SAAS,QAAQ;AAAA,QAC/C;AAEA,sBAAc,OAAO,QAAQ;AAC7B,eAAO;AAAA,MACT;AAAA,IACF;AAEA,QAAI,SAAS,GAAG,MAAM,MAAO,QAAO;AAEpC,UAAM,YAAY;AAAA,MAChB;AAAA,MACA;AAAA,MACA,WAAW,OAAO,EACf,IAAI,KAAK,KAAK,SAAS,EACvB,YAAY,IAAI;AAAA,IACrB;AAEA,eAAW,QAAQ,IAAI,KAAK,SAAS;AAErC,OAAG,QAAQ,KAAK,SAAS;AACzB,WAAO;AAAA,EACT;AACA,iBAAe,SACb,QACA,SAIA;AACA,WAAO,aAAa,QAAQ,SAAS,KAAK;AAAA,EAC5C;AAEA,iBAAe,eACb,QACA,SAIA;AACA,WAAO,aAAa,QAAQ,SAAS,IAAI;AAAA,EAC3C;AAEA,iBAAe,cACb,QACA,SAIA;AACA,UAAM,MAAM,MAAM,SAAS,QAAQ,OAAO;AAC1C,QAAI,KAAK;AACP,UAAI,MAAM,EAAE,MAAM;AAAA,IACpB;AAAA,EACF;AAOA,iBAAe,UACb,UACA,SAI0C;AAC1C,UAAM,UAAU,oBAAI,IAAgC;AAEpD,QAAI,SAAS,WAAW,EAAG,QAAO;AAElC,UAAM,gBAAgB,KAAK,UAAU,OAAO;AAC5C,UAAM,gBAAgB,KAAK;AAG3B,UAAM,WAAW,oBAAI,IAA0B;AAC/C,eAAW,UAAU,UAAU;AAC7B,UAAI,CAAC,SAAS,OAAO,QAAQ,KAAK,CAAC,SAAS,OAAO,IAAI,EAAG;AAC1D,YAAM,OAAO,SAAS,IAAI,OAAO,QAAQ,KAAK,CAAC;AAC/C,WAAK,KAAK,MAAM;AAChB,eAAS,IAAI,OAAO,UAAU,IAAI;AAAA,IACpC;AAGA,eAAW,CAAC,UAAU,cAAc,KAAK,UAAU;AACjD,YAAM,WAAyB,CAAC;AAGhC,iBAAW,UAAU,gBAAgB;AACnC,cAAM,MAAM,OAAO,WAAW,OAAO,QAAQ,iBAAiB,MAAM;AACpE,cAAM,YAAY,OAAO,WAAW,MAAM,OAAO;AAGjD,YAAI,OAAO,WAAW,QAAQ,IAAI,GAAG;AAGrC,YAAI,CAAC,SAAS,IAAI,GAAG;AACnB,iBAAO,MAAM,GAAG,QAAQ,GAAG;AAAA,QAC7B;AAGA,YACE,SAAS,IAAI,KACb,OAAO,KAAK,SAAS,EAAE,QAAQ,OAAO,EAAE,IAAI,KAAK,IAAI,SAAS,CAAC,GAC/D;AACA,kBAAQ,IAAI,WAAW,KAAK,GAAG;AAAA,QACjC,OAAO;AACL,mBAAS,KAAK,MAAM;AAAA,QACtB;AAAA,MACF;AAGA,UAAI,SAAS,SAAS,GAAG;AACvB,cAAM,QAAQ,SAAS,IAAI,OAAK,EAAE,IAAI;AACtC,cAAM,OAAO,SAAS,QAAQ,KAAK,QAAQ;AAE3C,YAAI;AACF,kBAAQ,IAAI,qBAAqB;AACjC,gBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,KAAK,iBAAiB,OAAO,aAAa;AACxE,cAAI,CAAC,SAAS,MAAM;AAClB,kBAAM,YAAY,OAAO,EAAE,IAAI,eAAe,SAAS,EAAE,YAAY,IAAI;AAEzE,qBAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,oBAAM,SAAS,SAAS,CAAC;AACzB,oBAAM,UAAU,KAAK,CAAC;AACtB,oBAAM,YAAY,OAAO,WAAW,MAAM,OAAO;AACjD,oBAAM,WAAW,OAAO,WAAW,OAAO,QAAQ,iBAAiB,MAAM;AAEzE,kBAAI,SAAS,WAAW;AACtB,wBAAQ,IAAI,WAAW,QAAQ,SAAS;AAGxC,sBAAM,YAAY;AAAA,kBAChB,KAAK;AAAA,kBACL,KAAK,QAAQ;AAAA,kBACb;AAAA,gBACF;AACA,2BAAW,QAAQ,IAAI,UAAU,SAAS;AAC1C,mBAAG,QAAQ,UAAU,SAAS;AAAA,cAChC,OAAO;AACL,wBAAQ,IAAI,WAAW,MAAS;AAAA,cAClC;AAAA,YACF;AAAA,UACF,OAAO;AAEL,uBAAW,UAAU,UAAU;AAC7B,oBAAM,YAAY,OAAO,WAAW,MAAM,OAAO;AACjD,oBAAM,MAAM,MAAM,SAAS,QAAQ,OAAO;AAC1C,sBAAQ,IAAI,WAAW,GAAG;AAAA,YAC5B;AAAA,UACF;AAAA,QACF,QAAQ;AAEN,qBAAW,UAAU,UAAU;AAC7B,kBAAM,YAAY,OAAO,WAAW,MAAM,OAAO;AACjD,kBAAM,MAAM,MAAM,SAAS,QAAQ,OAAO;AAC1C,oBAAQ,IAAI,WAAW,GAAG;AAAA,UAC5B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAEA,SAAO,EAAE,UAAU,WAAW,eAAe,gBAAgB,aAAa,iBAAiB,wBAAwB;AACrH;;;AC/TA,SAAS,WAAW,gBAAgB;AACpC,SAAS,YAAAC,iBAAgB;AACzB,SAAS,gBAAgB;AACzB,SAAS,eAAAC,oBAAmB;AAO5B,SAAS,eACP,UACA,MACQ;AACR,MAAI,UAAU,aAAa;AACzB,WAAO,SAAS;AAAA,EAClB;AACA,QAAM,MAAM,KAAK,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY;AAC/C,SAAO,OAAO;AAChB;AAqBe,SAAR,QACL,aACA,UACA,UAA0B,CAAC,GACZ;AACf,QAAM,EAAE,gBAAgB,MAAM,WAAW,SAAS,IAAI;AAEtD,QAAM,CAAC,KAAK,MAAM,IAAI,SAAwB,IAAI;AAClD,QAAM,CAAC,OAAO,QAAQ,IAAI,SAAuB,IAAI;AACrD,QAAM,EAAE,SAAS,IAAI,OAAO;AAC5B,QAAM,WAAWC,aAAY;AAG7B,YAAU,MAAM;AACd,QAAI,CAACC,UAAS,WAAW,KAAK,CAACA,UAAS,QAAQ,GAAG;AACjD,aAAO,IAAI;AACX;AAAA,IACF;AAEA,QAAI,YAAY;AAEhB,aAAS,EAAE,UAAU,MAAM,YAAY,GAAG,EAAE,WAAW,SAAS,CAAC,EAC9D,KAAK,CAAC,cAAc;AACnB,UAAI,CAAC,WAAW;AACd,eAAO,aAAa,IAAI;AACxB,iBAAS,IAAI;AAAA,MACf;AAAA,IACF,CAAC,EACA,MAAM,CAAC,QAAQ;AACd,UAAI,CAAC,WAAW;AACd,iBAAS,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAC5D,eAAO,IAAI;AAAA,MACb;AAAA,IACF,CAAC;AAEH,WAAO,MAAM;AACX,kBAAY;AAAA,IACd;AAAA,EACF,GAAG,CAAC,aAAa,UAAU,KAAK,UAAU,SAAS,GAAG,QAAQ,CAAC;AAG/D,QAAM,kBAAkB;AAAA,IACrB,SACE,OAAO,SAAgB,EACvB,KAAK,SAAS,EACd,OAAO,UAAU,EACjB,GAAG,aAAa,QAAQ,EACxB,GAAG,QAAQ,eAAe,EAAE,EAC5B,YAAY;AAAA,IACf;AAAA,MACE,SAAS,iBAAiBA,UAAS,WAAW,KAAKA,UAAS,QAAQ;AAAA,IACtE;AAAA,EACF;AAEA,QAAM,WAAa,gBAAgB,MAAc,YAAsC;AACvF,QAAM,cAAc,eAAe,UAAU,eAAe,EAAE;AAC9D,QAAM,YACHA,UAAS,WAAW,KAAK,QAAQ,QAAQ,UAAU,QACnD,iBAAiB,gBAAgB;AAEpC,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;AC3GA,SAAS,aAAa,aAAAC,YAAW,SAAS,YAAAC,iBAAgB;AAC1D;AAAA,EAGE;AAAA,OACK;AACP,SAAS,eAAAC,oBAAmB;AAkDrB,IAAM,oBAAoB,CAAC,YAAsC;AACtE,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,mBAAmB,CAAC;AAAA,IACpB,cAAc,OAAO;AAAA,IACrB,WAAW;AAAA,IACX,eAAe;AAAA,IACf,SAAS;AAAA,EACX,IAAI;AAEJ,QAAM,CAAC,OAAO,QAAQ,IAAID,UAA4B,CAAC,CAAC;AACxD,QAAM,CAAC,SAAS,UAAU,IAAIA,UAAkB,KAAK;AACrD,QAAM,CAAC,QAAQ,SAAS,IAAIA,UAA8C,CAAC,CAAC;AAC5E,QAAM,CAAC,WAAW,YAAY,IAAIA,UAAmB,CAAC,CAAC;AAEvD,QAAM,YAAY,QAAQ,MAAM;AAC9B,QAAI,OAAO,WAAW,KAAK,UAAU,WAAW,GAAG;AACjD,aAAO;AAAA,IACT;AACA,QAAI,OAAO,WAAW,KAAK,UAAU,WAAW,MAAM,QAAQ;AAC5D,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT,GAAG,CAAC,OAAO,QAAQ,UAAU,QAAQ,MAAM,MAAM,CAAC;AAElD,QAAM,SAAS;AAAA,IACb,CAAC,eAAuB,mBAAoC;AAC1D,YAAM,aAAa,cAChB,OAAO,CAAC,SAAS,CAAC,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,KAAK,IAAI,CAAC,EACzD,IAAI,CAAC,SAAS;AACb,QAAC,KAAyB,UAAU,IAAI,gBAAgB,IAAI;AAC5D,QAAC,KAAyB,SAAS,CAAC;AACpC,eAAO;AAAA,MACT,CAAC;AAEH,YAAM,eAAe,eAAe,IAAI,CAAC,EAAE,MAAM,QAAAE,QAAO,MAAM;AAC5D,QAAC,KAAyB,UAAU,IAAI,gBAAgB,IAAI;AAC5D,QAAC,KAAyB,SAASA;AACnC,eAAO;AAAA,MACT,CAAC;AAED,YAAM,WAAW,CAAC,GAAG,OAAO,GAAG,YAAY,GAAG,YAAY;AAE1D,eAAS,QAAQ;AAAA,IACnB;AAAA,IACA,CAAC,OAAO,QAAQ;AAAA,EAClB;AAEA,QAAM,gBAAgB,YAAY;AAAA,IAChC;AAAA,IACA,SAAS;AAAA,IACT,QAAQ,iBAAiB;AAAA,MACvB,CAAC,KAAK,UAAU,EAAE,GAAG,KAAK,CAAC,IAAI,GAAG,CAAC,EAAE;AAAA,MACrC,CAAC;AAAA,IACH;AAAA,IACA,SAAS;AAAA,IACT;AAAA,IACA,UAAU,aAAa;AAAA,EACzB,CAAC;AAED,QAAM,WAAWD,aAAY;AAE7B,QAAM,WAAW,YAAY,YAAY;AACvC,eAAW,IAAI;AAIf,UAAM,kBAAkB,OAAO,IAAI,CAAC,MAAM,EAAE,IAAI;AAChD,UAAM,gBACJ,gBAAgB,SAAS,IACrB;AAAA,MACE,GAAG,MAAM,OAAO,CAAC,MAAM,gBAAgB,SAAS,EAAE,IAAI,CAAC;AAAA,MACvD,GAAG,MAAM,OAAO,CAAC,MAAM,CAAC,UAAU,SAAS,EAAE,IAAI,CAAC;AAAA,IACpD,IACA;AAEN,UAAM,YAAY,MAAM,QAAQ;AAAA,MAC9B,cAAc,IAAI,OAAO,SAAS;AAChC,cAAM,EAAE,MAAM,IAAI,MAAM,SAAS,QAC9B,KAAK,UAAU,EACf,OAAO,CAAC,CAAC,OAAO,OAAO,MAAM,KAAK,OAAO,KAAK,MAAM,MAAM;AAAA,UACzD,cAAc,aAAa,SAAS;AAAA,UACpC;AAAA,QACF,CAAC;AACH,YAAI,OAAO;AACT,iBAAO,EAAE,MAAM,KAAK,MAAM,SAAS,MAAM,QAAQ;AAAA,QACnD,OAAO;AACL,iBAAO,EAAE,MAAM,KAAK,MAAM,SAAS,OAAU;AAAA,QAC/C;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,iBAAiB,UAAU,OAAO,CAAC,MAAM,EAAE,YAAY,MAAS;AAEtE,cAAU,cAAc;AAExB,UAAM,oBAAoB,UAAU,OAAO,CAAC,MAAM,EAAE,YAAY,MAAS;AACzE,UAAM,eAAe,MAAM;AAAA,MACzB,oBAAI,IAAI,CAAC,GAAG,WAAW,GAAG,kBAAkB,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;AAAA,IACjE;AACA,iBAAa,YAAY;AAEzB,eAAW,KAAK;AAAA,EAClB,GAAG,CAAC,OAAO,MAAM,YAAY,QAAQ,SAAS,CAAC;AAE/C,EAAAF,WAAU,MAAM;AACd,QAAI,MAAM,WAAW,GAAG;AACtB,gBAAU,CAAC,CAAC;AAAA,IACd;AAGA,QAAI,MAAM,UAAU,UAAU;AAC5B,UAAI,UAAU;AACd,YAAM,WAAW,MAAM,IAAI,CAAC,SAAS;AACnC,YAAI,KAAK,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,gBAAgB,GAAG;AACxD,eAAK,SAAS,KAAK,OAAO,OAAO,CAAC,MAAM,EAAE,SAAS,gBAAgB;AACnE,oBAAU;AAAA,QACZ;AACA,eAAO;AAAA,MACT,CAAC;AACD,UAAI,SAAS;AACX,iBAAS,QAAQ;AAAA,MACnB;AAAA,IACF;AAAA,EACF,GAAG,CAAC,MAAM,QAAQ,UAAU,QAAQ,CAAC;AAErC,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAG;AAAA,EACL;AACF;;;ACjMO,IAAM,UAAU;AAAA,EACrB,SAAS;AAAA,EACT,uBAAuB;AAAA,EACvB,qBAAqB;AAAA,EACrB,oBAAoB;AAAA,EACpB,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,OAAO;AAAA,EACP,UAAU;AAAA,EACV,oBAAoB;AAAA,EACpB,iBAAiB;AAAA,EACjB,uBAAuB;AAAA,EACvB,iBAAiB;AAAA,EACjB,wBAAwB;AAC1B;","names":["useSupabase","isUsable","useSupabase","useSupabase","isUsable","useEffect","useState","useSupabase","errors"]}
@@ -0,0 +1 @@
1
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
package/package.json ADDED
@@ -0,0 +1,44 @@
1
+ {
2
+ "name": "@pol-studios/storage",
3
+ "version": "1.0.0",
4
+ "description": "Storage utilities for POL applications",
5
+ "license": "UNLICENSED",
6
+ "type": "module",
7
+ "main": "./dist/index.js",
8
+ "types": "./dist/index.d.ts",
9
+ "files": ["dist"],
10
+ "keywords": ["storage", "supabase", "upload", "files"],
11
+ "exports": {
12
+ ".": { "import": "./dist/index.js", "types": "./dist/index.d.ts" },
13
+ "./hooks": { "import": "./dist/hooks/index.js", "types": "./dist/hooks/index.d.ts" },
14
+ "./types": { "import": "./dist/types/index.js", "types": "./dist/types/index.d.ts" },
15
+ "./config": { "import": "./dist/config/index.js", "types": "./dist/config/index.d.ts" }
16
+ },
17
+ "scripts": {
18
+ "build": "tsup",
19
+ "dev": "tsup --watch",
20
+ "typecheck": "tsc --noEmit",
21
+ "prepublishOnly": "pnpm build"
22
+ },
23
+ "publishConfig": {
24
+ "access": "public"
25
+ },
26
+ "peerDependencies": {
27
+ "react": ">=18.0.0",
28
+ "@tanstack/react-query": ">=5.0.0",
29
+ "@supabase/supabase-js": ">=2.0.0",
30
+ "react-dropzone": ">=14.0.0",
31
+ "@pol-studios/db": ">=1.0.0",
32
+ "@pol-studios/utils": ">=1.0.0"
33
+ },
34
+ "dependencies": {
35
+ "tus-js-client": "^4.0.0",
36
+ "moment": "^2.29.4"
37
+ },
38
+ "devDependencies": {
39
+ "@pol-studios/db": "workspace:*",
40
+ "@pol-studios/utils": "workspace:*",
41
+ "tsup": "^8.0.0",
42
+ "typescript": "^5.0.0"
43
+ }
44
+ }