@supabase/storage-js 2.95.2 → 2.95.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +23 -3
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +101 -71
- package/dist/index.d.cts.map +1 -1
- package/dist/index.d.mts +101 -71
- package/dist/index.d.mts.map +1 -1
- package/dist/index.mjs +23 -3
- package/dist/index.mjs.map +1 -1
- package/dist/umd/supabase.js +1 -1
- package/package.json +1 -1
- package/src/lib/common/fetch.ts +1 -7
- package/src/lib/types.ts +11 -0
- package/src/lib/version.ts +1 -1
- package/src/packages/StorageFileApi.ts +31 -5
package/dist/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.cjs","names":["result: Record<string, any>","params: { [k: string]: any }","fetch","this","downloadFn: () => Promise<Response>","shouldThrowOnError: boolean","this","downloadFn: () => Promise<Response>","shouldThrowOnError: boolean","this","DEFAULT_FILE_OPTIONS: FileOptions","fetch","this","headers: Record<string, string>","_queryString: string[]","params: string[]","fetch","this","params: Record<string, string>","fetch","this","IcebergRestCatalog","fetch","this","fetch","this","fetch","this","fetch","this","fetch"],"sources":["../src/lib/common/errors.ts","../src/lib/common/helpers.ts","../src/lib/common/fetch.ts","../src/lib/common/BaseApiClient.ts","../src/packages/StreamDownloadBuilder.ts","../src/packages/BlobDownloadBuilder.ts","../src/packages/StorageFileApi.ts","../src/lib/version.ts","../src/lib/constants.ts","../src/packages/StorageBucketApi.ts","../src/packages/StorageAnalyticsClient.ts","../src/packages/VectorIndexApi.ts","../src/packages/VectorDataApi.ts","../src/packages/VectorBucketApi.ts","../src/packages/StorageVectorsClient.ts","../src/StorageClient.ts"],"sourcesContent":["/**\n * Namespace type for error classes\n * Determines the error class names and type guards\n */\nexport type ErrorNamespace = 'storage' | 'vectors'\n\n/**\n * Base error class for all Storage errors\n * Supports both 'storage' and 'vectors' namespaces\n */\nexport class StorageError extends Error {\n protected __isStorageError = true\n protected namespace: ErrorNamespace\n status?: number\n statusCode?: string\n\n constructor(\n message: string,\n namespace: ErrorNamespace = 'storage',\n status?: number,\n statusCode?: string\n ) {\n super(message)\n this.namespace = namespace\n this.name = namespace === 'vectors' ? 'StorageVectorsError' : 'StorageError'\n this.status = status\n this.statusCode = statusCode\n }\n}\n\n/**\n * Type guard to check if an error is a StorageError\n * @param error - The error to check\n * @returns True if the error is a StorageError\n */\nexport function isStorageError(error: unknown): error is StorageError {\n return typeof error === 'object' && error !== null && '__isStorageError' in error\n}\n\n/**\n * API error returned from Storage service\n * Includes HTTP status code and service-specific error code\n */\nexport class StorageApiError extends StorageError {\n override status: number\n override statusCode: string\n\n constructor(\n message: string,\n status: number,\n statusCode: string,\n namespace: ErrorNamespace = 'storage'\n ) {\n super(message, namespace, status, statusCode)\n this.name = namespace === 'vectors' ? 'StorageVectorsApiError' : 'StorageApiError'\n this.status = status\n this.statusCode = statusCode\n }\n\n toJSON() {\n return {\n name: this.name,\n message: this.message,\n status: this.status,\n statusCode: this.statusCode,\n }\n }\n}\n\n/**\n * Unknown error that doesn't match expected error patterns\n * Wraps the original error for debugging\n */\nexport class StorageUnknownError extends StorageError {\n originalError: unknown\n\n constructor(message: string, originalError: unknown, namespace: ErrorNamespace = 'storage') {\n super(message, namespace)\n this.name = namespace === 'vectors' ? 'StorageVectorsUnknownError' : 'StorageUnknownError'\n this.originalError = originalError\n }\n}\n\n// ============================================================================\n// Backward Compatibility Exports for Vectors\n// ============================================================================\n\n/**\n * @deprecated Use StorageError with namespace='vectors' instead\n * Alias for backward compatibility with existing vector storage code\n */\nexport class StorageVectorsError extends StorageError {\n constructor(message: string) {\n super(message, 'vectors')\n }\n}\n\n/**\n * Type guard to check if an error is a StorageVectorsError\n * @param error - The error to check\n * @returns True if the error is a StorageVectorsError\n */\nexport function isStorageVectorsError(error: unknown): error is StorageVectorsError {\n return isStorageError(error) && (error as StorageError)['namespace'] === 'vectors'\n}\n\n/**\n * @deprecated Use StorageApiError with namespace='vectors' instead\n * Alias for backward compatibility with existing vector storage code\n */\nexport class StorageVectorsApiError extends StorageApiError {\n constructor(message: string, status: number, statusCode: string) {\n super(message, status, statusCode, 'vectors')\n }\n}\n\n/**\n * @deprecated Use StorageUnknownError with namespace='vectors' instead\n * Alias for backward compatibility with existing vector storage code\n */\nexport class StorageVectorsUnknownError extends StorageUnknownError {\n constructor(message: string, originalError: unknown) {\n super(message, originalError, 'vectors')\n }\n}\n\n/**\n * Error codes specific to S3 Vectors API\n * Maps AWS service errors to application-friendly error codes\n */\nexport enum StorageVectorsErrorCode {\n /** Internal server fault (HTTP 500) */\n InternalError = 'InternalError',\n /** Resource already exists / conflict (HTTP 409) */\n S3VectorConflictException = 'S3VectorConflictException',\n /** Resource not found (HTTP 404) */\n S3VectorNotFoundException = 'S3VectorNotFoundException',\n /** Delete bucket while not empty (HTTP 400) */\n S3VectorBucketNotEmpty = 'S3VectorBucketNotEmpty',\n /** Exceeds bucket quota/limit (HTTP 400) */\n S3VectorMaxBucketsExceeded = 'S3VectorMaxBucketsExceeded',\n /** Exceeds index quota/limit (HTTP 400) */\n S3VectorMaxIndexesExceeded = 'S3VectorMaxIndexesExceeded',\n}\n","type Fetch = typeof fetch\n\n/**\n * Resolves the fetch implementation to use\n * Uses custom fetch if provided, otherwise uses native fetch\n *\n * @param customFetch - Optional custom fetch implementation\n * @returns Resolved fetch function\n */\nexport const resolveFetch = (customFetch?: Fetch): Fetch => {\n if (customFetch) {\n return (...args) => customFetch(...args)\n }\n return (...args) => fetch(...args)\n}\n\n/**\n * Resolves the Response constructor to use\n * Returns native Response constructor\n *\n * @returns Response constructor\n */\nexport const resolveResponse = (): typeof Response => {\n return Response\n}\n\n/**\n * Determine if input is a plain object\n * An object is plain if it's created by either {}, new Object(), or Object.create(null)\n *\n * @param value - Value to check\n * @returns True if value is a plain object\n * @source https://github.com/sindresorhus/is-plain-obj\n */\nexport const isPlainObject = (value: object): boolean => {\n if (typeof value !== 'object' || value === null) {\n return false\n }\n\n const prototype = Object.getPrototypeOf(value)\n return (\n (prototype === null ||\n prototype === Object.prototype ||\n Object.getPrototypeOf(prototype) === null) &&\n !(Symbol.toStringTag in value) &&\n !(Symbol.iterator in value)\n )\n}\n\n/**\n * Recursively converts object keys from snake_case to camelCase\n * Used for normalizing API responses\n *\n * @param item - Object to convert\n * @returns Converted object with camelCase keys\n */\nexport const recursiveToCamel = (item: Record<string, any>): unknown => {\n if (Array.isArray(item)) {\n return item.map((el) => recursiveToCamel(el))\n } else if (typeof item === 'function' || item !== Object(item)) {\n return item\n }\n\n const result: Record<string, any> = {}\n Object.entries(item).forEach(([key, value]) => {\n const newKey = key.replace(/([-_][a-z])/gi, (c) => c.toUpperCase().replace(/[-_]/g, ''))\n result[newKey] = recursiveToCamel(value)\n })\n\n return result\n}\n\n/**\n * Validates if a given bucket name is valid according to Supabase Storage API rules\n * Mirrors backend validation from: storage/src/storage/limits.ts:isValidBucketName()\n *\n * Rules:\n * - Length: 1-100 characters\n * - Allowed characters: alphanumeric (a-z, A-Z, 0-9), underscore (_), and safe special characters\n * - Safe special characters: ! - . * ' ( ) space & $ @ = ; : + , ?\n * - Forbidden: path separators (/, \\), path traversal (..), leading/trailing whitespace\n *\n * AWS S3 Reference: https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html\n *\n * @param bucketName - The bucket name to validate\n * @returns true if valid, false otherwise\n */\nexport const isValidBucketName = (bucketName: string): boolean => {\n if (!bucketName || typeof bucketName !== 'string') {\n return false\n }\n\n // Check length constraints (1-100 characters)\n if (bucketName.length === 0 || bucketName.length > 100) {\n return false\n }\n\n // Check for leading/trailing whitespace\n if (bucketName.trim() !== bucketName) {\n return false\n }\n\n // Explicitly reject path separators (security)\n // Note: Consecutive periods (..) are allowed by backend - the AWS restriction\n // on relative paths applies to object keys, not bucket names\n if (bucketName.includes('/') || bucketName.includes('\\\\')) {\n return false\n }\n\n // Validate against allowed character set\n // Pattern matches backend regex: /^(\\w|!|-|\\.|\\*|'|\\(|\\)| |&|\\$|@|=|;|:|\\+|,|\\?)*$/\n // This explicitly excludes path separators (/, \\) and other problematic characters\n const bucketNameRegex = /^[\\w!.\\*'() &$@=;:+,?-]+$/\n return bucketNameRegex.test(bucketName)\n}\n\n/**\n * Normalizes a number array to float32 format\n * Ensures all vector values are valid 32-bit floats\n *\n * @param values - Array of numbers to normalize\n * @returns Normalized float32 array\n */\nexport const normalizeToFloat32 = (values: number[]): number[] => {\n // Use Float32Array to ensure proper precision\n return Array.from(new Float32Array(values))\n}\n\n/**\n * Validates vector dimensions match expected dimension\n * Throws error if dimensions don't match\n *\n * @param vector - Vector data to validate\n * @param expectedDimension - Expected vector dimension\n * @throws Error if dimensions don't match\n */\nexport const validateVectorDimension = (\n vector: { float32: number[] },\n expectedDimension?: number\n): void => {\n if (expectedDimension !== undefined && vector.float32.length !== expectedDimension) {\n throw new Error(\n `Vector dimension mismatch: expected ${expectedDimension}, got ${vector.float32.length}`\n )\n }\n}\n","import { StorageApiError, StorageUnknownError, ErrorNamespace } from './errors'\nimport { isPlainObject, resolveResponse } from './helpers'\n\nexport type Fetch = typeof fetch\n\n/**\n * Options for fetch requests\n */\nexport interface FetchOptions {\n headers?: {\n [key: string]: string\n }\n duplex?: string\n noResolveJson?: boolean\n}\n\n/**\n * Additional fetch parameters (e.g., signal for cancellation)\n */\nexport interface FetchParameters {\n signal?: AbortSignal\n}\n\n/**\n * HTTP methods supported by the API\n */\nexport type RequestMethodType = 'GET' | 'POST' | 'PUT' | 'DELETE' | 'HEAD'\n\n/**\n * Extracts error message from various error response formats\n * @param err - Error object from API\n * @returns Human-readable error message\n */\nconst _getErrorMessage = (err: any): string =>\n err.msg ||\n err.message ||\n err.error_description ||\n (typeof err.error === 'string' ? err.error : err.error?.message) ||\n JSON.stringify(err)\n\n/**\n * Handles fetch errors and converts them to Storage error types\n * @param error - The error caught from fetch\n * @param reject - Promise rejection function\n * @param options - Fetch options that may affect error handling\n * @param namespace - Error namespace ('storage' or 'vectors')\n */\nconst handleError = async (\n error: unknown,\n reject: (reason?: any) => void,\n options: FetchOptions | undefined,\n namespace: ErrorNamespace\n) => {\n // Check if error is a Response-like object (has status and ok properties)\n // This is more reliable than instanceof which can fail across realms\n const isResponseLike =\n error &&\n typeof error === 'object' &&\n 'status' in error &&\n 'ok' in error &&\n typeof (error as any).status === 'number'\n\n if (isResponseLike && !options?.noResolveJson) {\n const responseError = error as any\n const status = responseError.status || 500\n\n // Try to parse JSON body if available\n if (typeof responseError.json === 'function') {\n responseError\n .json()\n .then((err: any) => {\n const statusCode = err?.statusCode || err?.code || status + ''\n reject(new StorageApiError(_getErrorMessage(err), status, statusCode, namespace))\n })\n .catch(() => {\n // If JSON parsing fails for vectors, create ApiError with HTTP status\n if (namespace === 'vectors') {\n const statusCode = status + ''\n const message = responseError.statusText || `HTTP ${status} error`\n reject(new StorageApiError(message, status, statusCode, namespace))\n } else {\n const statusCode = status + ''\n const message = responseError.statusText || `HTTP ${status} error`\n reject(new StorageApiError(message, status, statusCode, namespace))\n }\n })\n } else {\n // No json() method available, create error from status\n const statusCode = status + ''\n const message = responseError.statusText || `HTTP ${status} error`\n reject(new StorageApiError(message, status, statusCode, namespace))\n }\n } else {\n reject(new StorageUnknownError(_getErrorMessage(error), error, namespace))\n }\n}\n\n/**\n * Builds request parameters for fetch calls\n * @param method - HTTP method\n * @param options - Custom fetch options\n * @param parameters - Additional fetch parameters like AbortSignal\n * @param body - Request body (will be JSON stringified if plain object)\n * @returns Complete fetch request parameters\n */\nconst _getRequestParams = (\n method: RequestMethodType,\n options?: FetchOptions,\n parameters?: FetchParameters,\n body?: object\n) => {\n const params: { [k: string]: any } = { method, headers: options?.headers || {} }\n\n if (method === 'GET' || method === 'HEAD' || !body) {\n return { ...params, ...parameters }\n }\n\n if (isPlainObject(body)) {\n params.headers = { 'Content-Type': 'application/json', ...options?.headers }\n params.body = JSON.stringify(body)\n } else {\n params.body = body\n }\n\n if (options?.duplex) {\n params.duplex = options.duplex\n }\n\n return { ...params, ...parameters }\n}\n\n/**\n * Internal request handler that wraps fetch with error handling\n * @param fetcher - Fetch function to use\n * @param method - HTTP method\n * @param url - Request URL\n * @param options - Custom fetch options\n * @param parameters - Additional fetch parameters\n * @param body - Request body\n * @param namespace - Error namespace ('storage' or 'vectors')\n * @returns Promise with parsed response or error\n */\nasync function _handleRequest(\n fetcher: Fetch,\n method: RequestMethodType,\n url: string,\n options: FetchOptions | undefined,\n parameters: FetchParameters | undefined,\n body: object | undefined,\n namespace: ErrorNamespace\n): Promise<any> {\n return new Promise((resolve, reject) => {\n fetcher(url, _getRequestParams(method, options, parameters, body))\n .then((result) => {\n if (!result.ok) throw result\n if (options?.noResolveJson) return result\n\n // AWS S3 Vectors API returns 200 OK with content-length: 0 for successful mutations\n // (putVectors, deleteVectors) instead of 204 or JSON response. This is AWS's design choice\n // for performance optimization of bulk operations (up to 500 vectors per request).\n // We handle this to prevent \"Unexpected end of JSON input\" errors when calling result.json()\n if (namespace === 'vectors') {\n const contentType = result.headers.get('content-type')\n const contentLength = result.headers.get('content-length')\n\n // Return empty object for explicitly empty responses\n if (contentLength === '0' || result.status === 204) {\n return {}\n }\n\n // Return empty object if no JSON content type\n if (!contentType || !contentType.includes('application/json')) {\n return {}\n }\n }\n\n return result.json()\n })\n .then((data) => resolve(data))\n .catch((error) => handleError(error, reject, options, namespace))\n })\n}\n\n/**\n * Creates a fetch API with the specified namespace\n * @param namespace - Error namespace ('storage' or 'vectors')\n * @returns Object with HTTP method functions\n */\nexport function createFetchApi(namespace: ErrorNamespace = 'storage') {\n return {\n /**\n * Performs a GET request\n * @param fetcher - Fetch function to use\n * @param url - Request URL\n * @param options - Custom fetch options\n * @param parameters - Additional fetch parameters\n * @returns Promise with parsed response\n */\n get: async (\n fetcher: Fetch,\n url: string,\n options?: FetchOptions,\n parameters?: FetchParameters\n ): Promise<any> => {\n return _handleRequest(fetcher, 'GET', url, options, parameters, undefined, namespace)\n },\n\n /**\n * Performs a POST request\n * @param fetcher - Fetch function to use\n * @param url - Request URL\n * @param body - Request body to be JSON stringified\n * @param options - Custom fetch options\n * @param parameters - Additional fetch parameters\n * @returns Promise with parsed response\n */\n post: async (\n fetcher: Fetch,\n url: string,\n body: object,\n options?: FetchOptions,\n parameters?: FetchParameters\n ): Promise<any> => {\n return _handleRequest(fetcher, 'POST', url, options, parameters, body, namespace)\n },\n\n /**\n * Performs a PUT request\n * @param fetcher - Fetch function to use\n * @param url - Request URL\n * @param body - Request body to be JSON stringified\n * @param options - Custom fetch options\n * @param parameters - Additional fetch parameters\n * @returns Promise with parsed response\n */\n put: async (\n fetcher: Fetch,\n url: string,\n body: object,\n options?: FetchOptions,\n parameters?: FetchParameters\n ): Promise<any> => {\n return _handleRequest(fetcher, 'PUT', url, options, parameters, body, namespace)\n },\n\n /**\n * Performs a HEAD request\n * @param fetcher - Fetch function to use\n * @param url - Request URL\n * @param options - Custom fetch options\n * @param parameters - Additional fetch parameters\n * @returns Promise with Response object (not JSON parsed)\n */\n head: async (\n fetcher: Fetch,\n url: string,\n options?: FetchOptions,\n parameters?: FetchParameters\n ): Promise<any> => {\n return _handleRequest(\n fetcher,\n 'HEAD',\n url,\n {\n ...options,\n noResolveJson: true,\n },\n parameters,\n undefined,\n namespace\n )\n },\n\n /**\n * Performs a DELETE request\n * @param fetcher - Fetch function to use\n * @param url - Request URL\n * @param body - Request body to be JSON stringified\n * @param options - Custom fetch options\n * @param parameters - Additional fetch parameters\n * @returns Promise with parsed response\n */\n remove: async (\n fetcher: Fetch,\n url: string,\n body: object,\n options?: FetchOptions,\n parameters?: FetchParameters\n ): Promise<any> => {\n return _handleRequest(fetcher, 'DELETE', url, options, parameters, body, namespace)\n },\n }\n}\n\n// Default exports for backward compatibility with 'storage' namespace\nconst defaultApi = createFetchApi('storage')\nexport const { get, post, put, head, remove } = defaultApi\n\n// Vectors API with 'vectors' namespace for proper error handling\nexport const vectorsApi = createFetchApi('vectors')\n","import { ErrorNamespace, isStorageError, StorageError } from './errors'\nimport { Fetch } from './fetch'\nimport { resolveFetch } from './helpers'\n\n/**\n * @ignore\n * Base API client class for all Storage API classes\n * Provides common infrastructure for error handling and configuration\n *\n * @typeParam TError - The error type (StorageError or subclass)\n */\nexport default abstract class BaseApiClient<TError extends StorageError = StorageError> {\n protected url: string\n protected headers: { [key: string]: string }\n protected fetch: Fetch\n protected shouldThrowOnError = false\n protected namespace: ErrorNamespace\n\n /**\n * Creates a new BaseApiClient instance\n * @param url - Base URL for API requests\n * @param headers - Default headers for API requests\n * @param fetch - Optional custom fetch implementation\n * @param namespace - Error namespace ('storage' or 'vectors')\n */\n constructor(\n url: string,\n headers: { [key: string]: string } = {},\n fetch?: Fetch,\n namespace: ErrorNamespace = 'storage'\n ) {\n this.url = url\n this.headers = headers\n this.fetch = resolveFetch(fetch)\n this.namespace = namespace\n }\n\n /**\n * Enable throwing errors instead of returning them.\n * When enabled, errors are thrown instead of returned in { data, error } format.\n *\n * @returns this - For method chaining\n */\n public throwOnError(): this {\n this.shouldThrowOnError = true\n return this\n }\n\n /**\n * Handles API operation with standardized error handling\n * Eliminates repetitive try-catch blocks across all API methods\n *\n * This wrapper:\n * 1. Executes the operation\n * 2. Returns { data, error: null } on success\n * 3. Returns { data: null, error } on failure (if shouldThrowOnError is false)\n * 4. Throws error on failure (if shouldThrowOnError is true)\n *\n * @typeParam T - The expected data type from the operation\n * @param operation - Async function that performs the API call\n * @returns Promise with { data, error } tuple\n *\n * @example\n * ```typescript\n * async listBuckets() {\n * return this.handleOperation(async () => {\n * return await get(this.fetch, `${this.url}/bucket`, {\n * headers: this.headers,\n * })\n * })\n * }\n * ```\n */\n protected async handleOperation<T>(\n operation: () => Promise<T>\n ): Promise<{ data: T; error: null } | { data: null; error: TError }> {\n try {\n const data = await operation()\n return { data, error: null }\n } catch (error) {\n if (this.shouldThrowOnError) {\n throw error\n }\n if (isStorageError(error)) {\n return { data: null, error: error as TError }\n }\n throw error\n }\n }\n}\n","import { isStorageError } from '../lib/common/errors'\nimport { DownloadResult } from '../lib/types'\n\nexport default class StreamDownloadBuilder implements PromiseLike<DownloadResult<ReadableStream>> {\n constructor(\n private downloadFn: () => Promise<Response>,\n private shouldThrowOnError: boolean\n ) {}\n\n then<TResult1 = DownloadResult<ReadableStream>, TResult2 = never>(\n onfulfilled?:\n | ((value: DownloadResult<ReadableStream>) => TResult1 | PromiseLike<TResult1>)\n | null,\n onrejected?: ((reason: any) => TResult2 | PromiseLike<TResult2>) | null\n ): Promise<TResult1 | TResult2> {\n return this.execute().then(onfulfilled, onrejected)\n }\n\n private async execute(): Promise<DownloadResult<ReadableStream>> {\n try {\n const result = await this.downloadFn()\n\n return {\n data: result.body as ReadableStream,\n error: null,\n }\n } catch (error) {\n if (this.shouldThrowOnError) {\n throw error\n }\n\n if (isStorageError(error)) {\n return { data: null, error }\n }\n\n throw error\n }\n }\n}\n","import { isStorageError } from '../lib/common/errors'\nimport { DownloadResult } from '../lib/types'\nimport StreamDownloadBuilder from './StreamDownloadBuilder'\n\nexport default class BlobDownloadBuilder implements Promise<DownloadResult<Blob>> {\n readonly [Symbol.toStringTag]: string = 'BlobDownloadBuilder'\n private promise: Promise<DownloadResult<Blob>> | null = null\n\n constructor(\n private downloadFn: () => Promise<Response>,\n private shouldThrowOnError: boolean\n ) {}\n\n asStream(): StreamDownloadBuilder {\n return new StreamDownloadBuilder(this.downloadFn, this.shouldThrowOnError)\n }\n\n then<TResult1 = DownloadResult<Blob>, TResult2 = never>(\n onfulfilled?: ((value: DownloadResult<Blob>) => TResult1 | PromiseLike<TResult1>) | null,\n onrejected?: ((reason: any) => TResult2 | PromiseLike<TResult2>) | null\n ): Promise<TResult1 | TResult2> {\n return this.getPromise().then(onfulfilled, onrejected)\n }\n\n catch<TResult = never>(\n onrejected?: ((reason: any) => TResult | PromiseLike<TResult>) | null\n ): Promise<DownloadResult<Blob> | TResult> {\n return this.getPromise().catch(onrejected)\n }\n\n finally(onfinally?: (() => void) | null): Promise<DownloadResult<Blob>> {\n return this.getPromise().finally(onfinally)\n }\n\n private getPromise(): Promise<DownloadResult<Blob>> {\n if (!this.promise) {\n this.promise = this.execute()\n }\n return this.promise\n }\n\n private async execute(): Promise<DownloadResult<Blob>> {\n try {\n const result = await this.downloadFn()\n\n return {\n data: await result.blob(),\n error: null,\n }\n } catch (error) {\n if (this.shouldThrowOnError) {\n throw error\n }\n\n if (isStorageError(error)) {\n return { data: null, error }\n }\n\n throw error\n }\n }\n}\n","import { StorageError, StorageUnknownError, isStorageError } from '../lib/common/errors'\nimport { get, head, post, put, remove, Fetch } from '../lib/common/fetch'\nimport { recursiveToCamel } from '../lib/common/helpers'\nimport BaseApiClient from '../lib/common/BaseApiClient'\nimport {\n FileObject,\n FileOptions,\n SearchOptions,\n FetchParameters,\n TransformOptions,\n DestinationOptions,\n FileObjectV2,\n Camelize,\n SearchV2Options,\n SearchV2Result,\n} from '../lib/types'\nimport BlobDownloadBuilder from './BlobDownloadBuilder'\n\nconst DEFAULT_SEARCH_OPTIONS = {\n limit: 100,\n offset: 0,\n sortBy: {\n column: 'name',\n order: 'asc',\n },\n}\n\nconst DEFAULT_FILE_OPTIONS: FileOptions = {\n cacheControl: '3600',\n contentType: 'text/plain;charset=UTF-8',\n upsert: false,\n}\n\ntype FileBody =\n | ArrayBuffer\n | ArrayBufferView\n | Blob\n | Buffer\n | File\n | FormData\n | NodeJS.ReadableStream\n | ReadableStream<Uint8Array>\n | URLSearchParams\n | string\n\nexport default class StorageFileApi extends BaseApiClient<StorageError> {\n protected bucketId?: string\n\n constructor(\n url: string,\n headers: { [key: string]: string } = {},\n bucketId?: string,\n fetch?: Fetch\n ) {\n super(url, headers, fetch, 'storage')\n this.bucketId = bucketId\n }\n\n /**\n * Uploads a file to an existing bucket or replaces an existing file at the specified path with a new one.\n *\n * @param method HTTP method.\n * @param path The relative file path. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to upload.\n * @param fileBody The body of the file to be stored in the bucket.\n */\n private async uploadOrUpdate(\n method: 'POST' | 'PUT',\n path: string,\n fileBody: FileBody,\n fileOptions?: FileOptions\n ): Promise<\n | {\n data: { id: string; path: string; fullPath: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n let body\n const options = { ...DEFAULT_FILE_OPTIONS, ...fileOptions }\n let headers: Record<string, string> = {\n ...this.headers,\n ...(method === 'POST' && { 'x-upsert': String(options.upsert as boolean) }),\n }\n\n const metadata = options.metadata\n\n if (typeof Blob !== 'undefined' && fileBody instanceof Blob) {\n body = new FormData()\n body.append('cacheControl', options.cacheControl as string)\n if (metadata) {\n body.append('metadata', this.encodeMetadata(metadata))\n }\n body.append('', fileBody)\n } else if (typeof FormData !== 'undefined' && fileBody instanceof FormData) {\n body = fileBody\n // Only append if not already present\n if (!body.has('cacheControl')) {\n body.append('cacheControl', options.cacheControl as string)\n }\n if (metadata && !body.has('metadata')) {\n body.append('metadata', this.encodeMetadata(metadata))\n }\n } else {\n body = fileBody\n headers['cache-control'] = `max-age=${options.cacheControl}`\n headers['content-type'] = options.contentType as string\n\n if (metadata) {\n headers['x-metadata'] = this.toBase64(this.encodeMetadata(metadata))\n }\n\n // Node.js streams require duplex option for fetch in Node 20+\n // Check for both web ReadableStream and Node.js streams\n const isStream =\n (typeof ReadableStream !== 'undefined' && body instanceof ReadableStream) ||\n (body && typeof body === 'object' && 'pipe' in body && typeof body.pipe === 'function')\n\n if (isStream && !options.duplex) {\n options.duplex = 'half'\n }\n }\n\n if (fileOptions?.headers) {\n headers = { ...headers, ...fileOptions.headers }\n }\n\n const cleanPath = this._removeEmptyFolders(path)\n const _path = this._getFinalPath(cleanPath)\n const data = await (method == 'PUT' ? put : post)(\n this.fetch,\n `${this.url}/object/${_path}`,\n body as object,\n { headers, ...(options?.duplex ? { duplex: options.duplex } : {}) }\n )\n\n return { path: cleanPath, id: data.Id, fullPath: data.Key }\n })\n }\n\n /**\n * Uploads a file to an existing bucket.\n *\n * @category File Buckets\n * @param path The file path, including the file name. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to upload.\n * @param fileBody The body of the file to be stored in the bucket.\n * @param fileOptions Optional file upload options including cacheControl, contentType, upsert, and metadata.\n * @returns Promise with response containing file path, id, and fullPath or error\n *\n * @example Upload file\n * ```js\n * const avatarFile = event.target.files[0]\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .upload('public/avatar1.png', avatarFile, {\n * cacheControl: '3600',\n * upsert: false\n * })\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"path\": \"public/avatar1.png\",\n * \"fullPath\": \"avatars/public/avatar1.png\"\n * },\n * \"error\": null\n * }\n * ```\n *\n * @example Upload file using `ArrayBuffer` from base64 file data\n * ```js\n * import { decode } from 'base64-arraybuffer'\n *\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .upload('public/avatar1.png', decode('base64FileData'), {\n * contentType: 'image/png'\n * })\n * ```\n */\n async upload(\n path: string,\n fileBody: FileBody,\n fileOptions?: FileOptions\n ): Promise<\n | {\n data: { id: string; path: string; fullPath: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.uploadOrUpdate('POST', path, fileBody, fileOptions)\n }\n\n /**\n * Upload a file with a token generated from `createSignedUploadUrl`.\n *\n * @category File Buckets\n * @param path The file path, including the file name. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to upload.\n * @param token The token generated from `createSignedUploadUrl`\n * @param fileBody The body of the file to be stored in the bucket.\n * @param fileOptions HTTP headers (cacheControl, contentType, etc.).\n * **Note:** The `upsert` option has no effect here. To enable upsert behavior,\n * pass `{ upsert: true }` when calling `createSignedUploadUrl()` instead.\n * @returns Promise with response containing file path and fullPath or error\n *\n * @example Upload to a signed URL\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .uploadToSignedUrl('folder/cat.jpg', 'token-from-createSignedUploadUrl', file)\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"path\": \"folder/cat.jpg\",\n * \"fullPath\": \"avatars/folder/cat.jpg\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async uploadToSignedUrl(\n path: string,\n token: string,\n fileBody: FileBody,\n fileOptions?: FileOptions\n ) {\n const cleanPath = this._removeEmptyFolders(path)\n const _path = this._getFinalPath(cleanPath)\n\n const url = new URL(this.url + `/object/upload/sign/${_path}`)\n url.searchParams.set('token', token)\n\n return this.handleOperation(async () => {\n let body\n const options = { upsert: DEFAULT_FILE_OPTIONS.upsert, ...fileOptions }\n const headers: Record<string, string> = {\n ...this.headers,\n ...{ 'x-upsert': String(options.upsert as boolean) },\n }\n\n if (typeof Blob !== 'undefined' && fileBody instanceof Blob) {\n body = new FormData()\n body.append('cacheControl', options.cacheControl as string)\n body.append('', fileBody)\n } else if (typeof FormData !== 'undefined' && fileBody instanceof FormData) {\n body = fileBody\n body.append('cacheControl', options.cacheControl as string)\n } else {\n body = fileBody\n headers['cache-control'] = `max-age=${options.cacheControl}`\n headers['content-type'] = options.contentType as string\n }\n\n const data = await put(this.fetch, url.toString(), body as object, { headers })\n\n return { path: cleanPath, fullPath: data.Key }\n })\n }\n\n /**\n * Creates a signed upload URL.\n * Signed upload URLs can be used to upload files to the bucket without further authentication.\n * They are valid for 2 hours.\n *\n * @category File Buckets\n * @param path The file path, including the current file name. For example `folder/image.png`.\n * @param options.upsert If set to true, allows the file to be overwritten if it already exists.\n * @returns Promise with response containing signed upload URL, token, and path or error\n *\n * @example Create Signed Upload URL\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .createSignedUploadUrl('folder/cat.jpg')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"signedUrl\": \"https://example.supabase.co/storage/v1/object/upload/sign/avatars/folder/cat.jpg?token=<TOKEN>\",\n * \"path\": \"folder/cat.jpg\",\n * \"token\": \"<TOKEN>\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async createSignedUploadUrl(\n path: string,\n options?: { upsert: boolean }\n ): Promise<\n | {\n data: { signedUrl: string; token: string; path: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n let _path = this._getFinalPath(path)\n\n const headers = { ...this.headers }\n\n if (options?.upsert) {\n headers['x-upsert'] = 'true'\n }\n\n const data = await post(\n this.fetch,\n `${this.url}/object/upload/sign/${_path}`,\n {},\n { headers }\n )\n\n const url = new URL(this.url + data.url)\n\n const token = url.searchParams.get('token')\n\n if (!token) {\n throw new StorageError('No token returned by API')\n }\n\n return { signedUrl: url.toString(), path, token }\n })\n }\n\n /**\n * Replaces an existing file at the specified path with a new one.\n *\n * @category File Buckets\n * @param path The relative file path. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to update.\n * @param fileBody The body of the file to be stored in the bucket.\n * @param fileOptions Optional file upload options including cacheControl, contentType, upsert, and metadata.\n * @returns Promise with response containing file path, id, and fullPath or error\n *\n * @example Update file\n * ```js\n * const avatarFile = event.target.files[0]\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .update('public/avatar1.png', avatarFile, {\n * cacheControl: '3600',\n * upsert: true\n * })\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"path\": \"public/avatar1.png\",\n * \"fullPath\": \"avatars/public/avatar1.png\"\n * },\n * \"error\": null\n * }\n * ```\n *\n * @example Update file using `ArrayBuffer` from base64 file data\n * ```js\n * import {decode} from 'base64-arraybuffer'\n *\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .update('public/avatar1.png', decode('base64FileData'), {\n * contentType: 'image/png'\n * })\n * ```\n */\n async update(\n path: string,\n fileBody:\n | ArrayBuffer\n | ArrayBufferView\n | Blob\n | Buffer\n | File\n | FormData\n | NodeJS.ReadableStream\n | ReadableStream<Uint8Array>\n | URLSearchParams\n | string,\n fileOptions?: FileOptions\n ): Promise<\n | {\n data: { id: string; path: string; fullPath: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.uploadOrUpdate('PUT', path, fileBody, fileOptions)\n }\n\n /**\n * Moves an existing file to a new path in the same bucket.\n *\n * @category File Buckets\n * @param fromPath The original file path, including the current file name. For example `folder/image.png`.\n * @param toPath The new file path, including the new file name. For example `folder/image-new.png`.\n * @param options The destination options.\n * @returns Promise with response containing success message or error\n *\n * @example Move file\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .move('public/avatar1.png', 'private/avatar2.png')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"message\": \"Successfully moved\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async move(\n fromPath: string,\n toPath: string,\n options?: DestinationOptions\n ): Promise<\n | {\n data: { message: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await post(\n this.fetch,\n `${this.url}/object/move`,\n {\n bucketId: this.bucketId,\n sourceKey: fromPath,\n destinationKey: toPath,\n destinationBucket: options?.destinationBucket,\n },\n { headers: this.headers }\n )\n })\n }\n\n /**\n * Copies an existing file to a new path in the same bucket.\n *\n * @category File Buckets\n * @param fromPath The original file path, including the current file name. For example `folder/image.png`.\n * @param toPath The new file path, including the new file name. For example `folder/image-copy.png`.\n * @param options The destination options.\n * @returns Promise with response containing copied file path or error\n *\n * @example Copy file\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .copy('public/avatar1.png', 'private/avatar2.png')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"path\": \"avatars/private/avatar2.png\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async copy(\n fromPath: string,\n toPath: string,\n options?: DestinationOptions\n ): Promise<\n | {\n data: { path: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n const data = await post(\n this.fetch,\n `${this.url}/object/copy`,\n {\n bucketId: this.bucketId,\n sourceKey: fromPath,\n destinationKey: toPath,\n destinationBucket: options?.destinationBucket,\n },\n { headers: this.headers }\n )\n return { path: data.Key }\n })\n }\n\n /**\n * Creates a signed URL. Use a signed URL to share a file for a fixed amount of time.\n *\n * @category File Buckets\n * @param path The file path, including the current file name. For example `folder/image.png`.\n * @param expiresIn The number of seconds until the signed URL expires. For example, `60` for a URL which is valid for one minute.\n * @param options.download triggers the file as a download if set to true. Set this parameter as the name of the file if you want to trigger the download with a different filename.\n * @param options.transform Transform the asset before serving it to the client.\n * @returns Promise with response containing signed URL or error\n *\n * @example Create Signed URL\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .createSignedUrl('folder/avatar1.png', 60)\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"signedUrl\": \"https://example.supabase.co/storage/v1/object/sign/avatars/folder/avatar1.png?token=<TOKEN>\"\n * },\n * \"error\": null\n * }\n * ```\n *\n * @example Create a signed URL for an asset with transformations\n * ```js\n * const { data } = await supabase\n * .storage\n * .from('avatars')\n * .createSignedUrl('folder/avatar1.png', 60, {\n * transform: {\n * width: 100,\n * height: 100,\n * }\n * })\n * ```\n *\n * @example Create a signed URL which triggers the download of the asset\n * ```js\n * const { data } = await supabase\n * .storage\n * .from('avatars')\n * .createSignedUrl('folder/avatar1.png', 60, {\n * download: true,\n * })\n * ```\n */\n async createSignedUrl(\n path: string,\n expiresIn: number,\n options?: { download?: string | boolean; transform?: TransformOptions }\n ): Promise<\n | {\n data: { signedUrl: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n let _path = this._getFinalPath(path)\n\n let data = await post(\n this.fetch,\n `${this.url}/object/sign/${_path}`,\n { expiresIn, ...(options?.transform ? { transform: options.transform } : {}) },\n { headers: this.headers }\n )\n const downloadQueryParam = options?.download\n ? `&download=${options.download === true ? '' : options.download}`\n : ''\n const signedUrl = encodeURI(`${this.url}${data.signedURL}${downloadQueryParam}`)\n return { signedUrl }\n })\n }\n\n /**\n * Creates multiple signed URLs. Use a signed URL to share a file for a fixed amount of time.\n *\n * @category File Buckets\n * @param paths The file paths to be downloaded, including the current file names. For example `['folder/image.png', 'folder2/image2.png']`.\n * @param expiresIn The number of seconds until the signed URLs expire. For example, `60` for URLs which are valid for one minute.\n * @param options.download triggers the file as a download if set to true. Set this parameter as the name of the file if you want to trigger the download with a different filename.\n * @returns Promise with response containing array of objects with signedUrl, path, and error or error\n *\n * @example Create Signed URLs\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .createSignedUrls(['folder/avatar1.png', 'folder/avatar2.png'], 60)\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": [\n * {\n * \"error\": null,\n * \"path\": \"folder/avatar1.png\",\n * \"signedURL\": \"/object/sign/avatars/folder/avatar1.png?token=<TOKEN>\",\n * \"signedUrl\": \"https://example.supabase.co/storage/v1/object/sign/avatars/folder/avatar1.png?token=<TOKEN>\"\n * },\n * {\n * \"error\": null,\n * \"path\": \"folder/avatar2.png\",\n * \"signedURL\": \"/object/sign/avatars/folder/avatar2.png?token=<TOKEN>\",\n * \"signedUrl\": \"https://example.supabase.co/storage/v1/object/sign/avatars/folder/avatar2.png?token=<TOKEN>\"\n * }\n * ],\n * \"error\": null\n * }\n * ```\n */\n async createSignedUrls(\n paths: string[],\n expiresIn: number,\n options?: { download: string | boolean }\n ): Promise<\n | {\n data: { error: string | null; path: string | null; signedUrl: string }[]\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n const data = await post(\n this.fetch,\n `${this.url}/object/sign/${this.bucketId}`,\n { expiresIn, paths },\n { headers: this.headers }\n )\n\n const downloadQueryParam = options?.download\n ? `&download=${options.download === true ? '' : options.download}`\n : ''\n return data.map((datum: { signedURL: string }) => ({\n ...datum,\n signedUrl: datum.signedURL\n ? encodeURI(`${this.url}${datum.signedURL}${downloadQueryParam}`)\n : null,\n }))\n })\n }\n\n /**\n * Downloads a file from a private bucket. For public buckets, make a request to the URL returned from `getPublicUrl` instead.\n *\n * @category File Buckets\n * @param path The full path and file name of the file to be downloaded. For example `folder/image.png`.\n * @param options.transform Transform the asset before serving it to the client.\n * @returns BlobDownloadBuilder instance for downloading the file\n *\n * @example Download file\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .download('folder/avatar1.png')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": <BLOB>,\n * \"error\": null\n * }\n * ```\n *\n * @example Download file with transformations\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .download('folder/avatar1.png', {\n * transform: {\n * width: 100,\n * height: 100,\n * quality: 80\n * }\n * })\n * ```\n */\n download<Options extends { transform?: TransformOptions }>(\n path: string,\n options?: Options\n ): BlobDownloadBuilder {\n const wantsTransformation = typeof options?.transform !== 'undefined'\n const renderPath = wantsTransformation ? 'render/image/authenticated' : 'object'\n const transformationQuery = this.transformOptsToQueryString(options?.transform || {})\n const queryString = transformationQuery ? `?${transformationQuery}` : ''\n const _path = this._getFinalPath(path)\n const downloadFn = () =>\n get(this.fetch, `${this.url}/${renderPath}/${_path}${queryString}`, {\n headers: this.headers,\n noResolveJson: true,\n })\n return new BlobDownloadBuilder(downloadFn, this.shouldThrowOnError)\n }\n\n /**\n * Retrieves the details of an existing file.\n *\n * @category File Buckets\n * @param path The file path, including the file name. For example `folder/image.png`.\n * @returns Promise with response containing file metadata or error\n *\n * @example Get file info\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .info('folder/avatar1.png')\n * ```\n */\n async info(path: string): Promise<\n | {\n data: Camelize<FileObjectV2>\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n const _path = this._getFinalPath(path)\n\n return this.handleOperation(async () => {\n const data = await get(this.fetch, `${this.url}/object/info/${_path}`, {\n headers: this.headers,\n })\n\n return recursiveToCamel(data) as Camelize<FileObjectV2>\n })\n }\n\n /**\n * Checks the existence of a file.\n *\n * @category File Buckets\n * @param path The file path, including the file name. For example `folder/image.png`.\n * @returns Promise with response containing boolean indicating file existence or error\n *\n * @example Check file existence\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .exists('folder/avatar1.png')\n * ```\n */\n async exists(path: string): Promise<\n | {\n data: boolean\n error: null\n }\n | {\n data: boolean\n error: StorageError\n }\n > {\n const _path = this._getFinalPath(path)\n\n try {\n await head(this.fetch, `${this.url}/object/${_path}`, {\n headers: this.headers,\n })\n\n return { data: true, error: null }\n } catch (error) {\n if (this.shouldThrowOnError) {\n throw error\n }\n if (isStorageError(error) && error instanceof StorageUnknownError) {\n const originalError = error.originalError as unknown as { status: number }\n\n if ([400, 404].includes(originalError?.status)) {\n return { data: false, error }\n }\n }\n\n throw error\n }\n }\n\n /**\n * A simple convenience function to get the URL for an asset in a public bucket. If you do not want to use this function, you can construct the public URL by concatenating the bucket URL with the path to the asset.\n * This function does not verify if the bucket is public. If a public URL is created for a bucket which is not public, you will not be able to download the asset.\n *\n * @category File Buckets\n * @param path The path and name of the file to generate the public URL for. For example `folder/image.png`.\n * @param options.download Triggers the file as a download if set to true. Set this parameter as the name of the file if you want to trigger the download with a different filename.\n * @param options.transform Transform the asset before serving it to the client.\n * @returns Object with public URL\n *\n * @example Returns the URL for an asset in a public bucket\n * ```js\n * const { data } = supabase\n * .storage\n * .from('public-bucket')\n * .getPublicUrl('folder/avatar1.png')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"publicUrl\": \"https://example.supabase.co/storage/v1/object/public/public-bucket/folder/avatar1.png\"\n * }\n * }\n * ```\n *\n * @example Returns the URL for an asset in a public bucket with transformations\n * ```js\n * const { data } = supabase\n * .storage\n * .from('public-bucket')\n * .getPublicUrl('folder/avatar1.png', {\n * transform: {\n * width: 100,\n * height: 100,\n * }\n * })\n * ```\n *\n * @example Returns the URL which triggers the download of an asset in a public bucket\n * ```js\n * const { data } = supabase\n * .storage\n * .from('public-bucket')\n * .getPublicUrl('folder/avatar1.png', {\n * download: true,\n * })\n * ```\n */\n getPublicUrl(\n path: string,\n options?: { download?: string | boolean; transform?: TransformOptions }\n ): { data: { publicUrl: string } } {\n const _path = this._getFinalPath(path)\n const _queryString: string[] = []\n\n const downloadQueryParam = options?.download\n ? `download=${options.download === true ? '' : options.download}`\n : ''\n\n if (downloadQueryParam !== '') {\n _queryString.push(downloadQueryParam)\n }\n\n const wantsTransformation = typeof options?.transform !== 'undefined'\n const renderPath = wantsTransformation ? 'render/image' : 'object'\n const transformationQuery = this.transformOptsToQueryString(options?.transform || {})\n\n if (transformationQuery !== '') {\n _queryString.push(transformationQuery)\n }\n\n let queryString = _queryString.join('&')\n if (queryString !== '') {\n queryString = `?${queryString}`\n }\n\n return {\n data: { publicUrl: encodeURI(`${this.url}/${renderPath}/public/${_path}${queryString}`) },\n }\n }\n\n /**\n * Deletes files within the same bucket\n *\n * @category File Buckets\n * @param paths An array of files to delete, including the path and file name. For example [`'folder/image.png'`].\n * @returns Promise with response containing array of deleted file objects or error\n *\n * @example Delete file\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .remove(['folder/avatar1.png'])\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": [],\n * \"error\": null\n * }\n * ```\n */\n async remove(paths: string[]): Promise<\n | {\n data: FileObject[]\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await remove(\n this.fetch,\n `${this.url}/object/${this.bucketId}`,\n { prefixes: paths },\n { headers: this.headers }\n )\n })\n }\n\n /**\n * Get file metadata\n * @param id the file id to retrieve metadata\n */\n // async getMetadata(\n // id: string\n // ): Promise<\n // | {\n // data: Metadata\n // error: null\n // }\n // | {\n // data: null\n // error: StorageError\n // }\n // > {\n // try {\n // const data = await get(this.fetch, `${this.url}/metadata/${id}`, { headers: this.headers })\n // return { data, error: null }\n // } catch (error) {\n // if (isStorageError(error)) {\n // return { data: null, error }\n // }\n\n // throw error\n // }\n // }\n\n /**\n * Update file metadata\n * @param id the file id to update metadata\n * @param meta the new file metadata\n */\n // async updateMetadata(\n // id: string,\n // meta: Metadata\n // ): Promise<\n // | {\n // data: Metadata\n // error: null\n // }\n // | {\n // data: null\n // error: StorageError\n // }\n // > {\n // try {\n // const data = await post(\n // this.fetch,\n // `${this.url}/metadata/${id}`,\n // { ...meta },\n // { headers: this.headers }\n // )\n // return { data, error: null }\n // } catch (error) {\n // if (isStorageError(error)) {\n // return { data: null, error }\n // }\n\n // throw error\n // }\n // }\n\n /**\n * Lists all the files and folders within a path of the bucket.\n *\n * @category File Buckets\n * @param path The folder path.\n * @param options Search options including limit (defaults to 100), offset, sortBy, and search\n * @param parameters Optional fetch parameters including signal for cancellation\n * @returns Promise with response containing array of files or error\n *\n * @example List files in a bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .list('folder', {\n * limit: 100,\n * offset: 0,\n * sortBy: { column: 'name', order: 'asc' },\n * })\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": [\n * {\n * \"name\": \"avatar1.png\",\n * \"id\": \"e668cf7f-821b-4a2f-9dce-7dfa5dd1cfd2\",\n * \"updated_at\": \"2024-05-22T23:06:05.580Z\",\n * \"created_at\": \"2024-05-22T23:04:34.443Z\",\n * \"last_accessed_at\": \"2024-05-22T23:04:34.443Z\",\n * \"metadata\": {\n * \"eTag\": \"\\\"c5e8c553235d9af30ef4f6e280790b92\\\"\",\n * \"size\": 32175,\n * \"mimetype\": \"image/png\",\n * \"cacheControl\": \"max-age=3600\",\n * \"lastModified\": \"2024-05-22T23:06:05.574Z\",\n * \"contentLength\": 32175,\n * \"httpStatusCode\": 200\n * }\n * }\n * ],\n * \"error\": null\n * }\n * ```\n *\n * @example Search files in a bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .list('folder', {\n * limit: 100,\n * offset: 0,\n * sortBy: { column: 'name', order: 'asc' },\n * search: 'jon'\n * })\n * ```\n */\n async list(\n path?: string,\n options?: SearchOptions,\n parameters?: FetchParameters\n ): Promise<\n | {\n data: FileObject[]\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n const body = { ...DEFAULT_SEARCH_OPTIONS, ...options, prefix: path || '' }\n return await post(\n this.fetch,\n `${this.url}/object/list/${this.bucketId}`,\n body,\n { headers: this.headers },\n parameters\n )\n })\n }\n\n /**\n * @experimental this method signature might change in the future\n *\n * @category File Buckets\n * @param options search options\n * @param parameters\n */\n async listV2(\n options?: SearchV2Options,\n parameters?: FetchParameters\n ): Promise<\n | {\n data: SearchV2Result\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n const body = { ...options }\n return await post(\n this.fetch,\n `${this.url}/object/list-v2/${this.bucketId}`,\n body,\n { headers: this.headers },\n parameters\n )\n })\n }\n\n protected encodeMetadata(metadata: Record<string, any>) {\n return JSON.stringify(metadata)\n }\n\n toBase64(data: string) {\n if (typeof Buffer !== 'undefined') {\n return Buffer.from(data).toString('base64')\n }\n return btoa(data)\n }\n\n private _getFinalPath(path: string) {\n return `${this.bucketId}/${path.replace(/^\\/+/, '')}`\n }\n\n private _removeEmptyFolders(path: string) {\n return path.replace(/^\\/|\\/$/g, '').replace(/\\/+/g, '/')\n }\n\n private transformOptsToQueryString(transform: TransformOptions) {\n const params: string[] = []\n if (transform.width) {\n params.push(`width=${transform.width}`)\n }\n\n if (transform.height) {\n params.push(`height=${transform.height}`)\n }\n\n if (transform.resize) {\n params.push(`resize=${transform.resize}`)\n }\n\n if (transform.format) {\n params.push(`format=${transform.format}`)\n }\n\n if (transform.quality) {\n params.push(`quality=${transform.quality}`)\n }\n\n return params.join('&')\n }\n}\n","// Generated automatically during releases by scripts/update-version-files.ts\n// This file provides runtime access to the package version for:\n// - HTTP request headers (e.g., X-Client-Info header for API requests)\n// - Debugging and support (identifying which version is running)\n// - Telemetry and logging (version reporting in errors/analytics)\n// - Ensuring build artifacts match the published package version\nexport const version = '2.95.2'\n","import { version } from './version'\nexport const DEFAULT_HEADERS = {\n 'X-Client-Info': `storage-js/${version}`,\n}\n","import { DEFAULT_HEADERS } from '../lib/constants'\nimport { StorageError } from '../lib/common/errors'\nimport { Fetch, get, post, put, remove } from '../lib/common/fetch'\nimport BaseApiClient from '../lib/common/BaseApiClient'\nimport { Bucket, BucketType, ListBucketOptions } from '../lib/types'\nimport { StorageClientOptions } from '../StorageClient'\n\nexport default class StorageBucketApi extends BaseApiClient<StorageError> {\n constructor(\n url: string,\n headers: { [key: string]: string } = {},\n fetch?: Fetch,\n opts?: StorageClientOptions\n ) {\n const baseUrl = new URL(url)\n\n // if legacy uri is used, replace with new storage host (disables request buffering to allow > 50GB uploads)\n // \"project-ref.supabase.co\" becomes \"project-ref.storage.supabase.co\"\n if (opts?.useNewHostname) {\n const isSupabaseHost = /supabase\\.(co|in|red)$/.test(baseUrl.hostname)\n if (isSupabaseHost && !baseUrl.hostname.includes('storage.supabase.')) {\n baseUrl.hostname = baseUrl.hostname.replace('supabase.', 'storage.supabase.')\n }\n }\n\n const finalUrl = baseUrl.href.replace(/\\/$/, '')\n const finalHeaders = { ...DEFAULT_HEADERS, ...headers }\n\n super(finalUrl, finalHeaders, fetch, 'storage')\n }\n\n /**\n * Retrieves the details of all Storage buckets within an existing project.\n *\n * @category File Buckets\n * @param options Query parameters for listing buckets\n * @param options.limit Maximum number of buckets to return\n * @param options.offset Number of buckets to skip\n * @param options.sortColumn Column to sort by ('id', 'name', 'created_at', 'updated_at')\n * @param options.sortOrder Sort order ('asc' or 'desc')\n * @param options.search Search term to filter bucket names\n * @returns Promise with response containing array of buckets or error\n *\n * @example List buckets\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .listBuckets()\n * ```\n *\n * @example List buckets with options\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .listBuckets({\n * limit: 10,\n * offset: 0,\n * sortColumn: 'created_at',\n * sortOrder: 'desc',\n * search: 'prod'\n * })\n * ```\n */\n async listBuckets(options?: ListBucketOptions): Promise<\n | {\n data: Bucket[]\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n const queryString = this.listBucketOptionsToQueryString(options)\n return await get(this.fetch, `${this.url}/bucket${queryString}`, {\n headers: this.headers,\n })\n })\n }\n\n /**\n * Retrieves the details of an existing Storage bucket.\n *\n * @category File Buckets\n * @param id The unique identifier of the bucket you would like to retrieve.\n * @returns Promise with response containing bucket details or error\n *\n * @example Get bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .getBucket('avatars')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"id\": \"avatars\",\n * \"name\": \"avatars\",\n * \"owner\": \"\",\n * \"public\": false,\n * \"file_size_limit\": 1024,\n * \"allowed_mime_types\": [\n * \"image/png\"\n * ],\n * \"created_at\": \"2024-05-22T22:26:05.100Z\",\n * \"updated_at\": \"2024-05-22T22:26:05.100Z\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async getBucket(id: string): Promise<\n | {\n data: Bucket\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await get(this.fetch, `${this.url}/bucket/${id}`, { headers: this.headers })\n })\n }\n\n /**\n * Creates a new Storage bucket\n *\n * @category File Buckets\n * @param id A unique identifier for the bucket you are creating.\n * @param options.public The visibility of the bucket. Public buckets don't require an authorization token to download objects, but still require a valid token for all other operations. By default, buckets are private.\n * @param options.fileSizeLimit specifies the max file size in bytes that can be uploaded to this bucket.\n * The global file size limit takes precedence over this value.\n * The default value is null, which doesn't set a per bucket file size limit.\n * @param options.allowedMimeTypes specifies the allowed mime types that this bucket can accept during upload.\n * The default value is null, which allows files with all mime types to be uploaded.\n * Each mime type specified can be a wildcard, e.g. image/*, or a specific mime type, e.g. image/png.\n * @param options.type (private-beta) specifies the bucket type. see `BucketType` for more details.\n * - default bucket type is `STANDARD`\n * @returns Promise with response containing newly created bucket name or error\n *\n * @example Create bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .createBucket('avatars', {\n * public: false,\n * allowedMimeTypes: ['image/png'],\n * fileSizeLimit: 1024\n * })\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"name\": \"avatars\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async createBucket(\n id: string,\n options: {\n public: boolean\n fileSizeLimit?: number | string | null\n allowedMimeTypes?: string[] | null\n type?: BucketType\n } = {\n public: false,\n }\n ): Promise<\n | {\n data: Pick<Bucket, 'name'>\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await post(\n this.fetch,\n `${this.url}/bucket`,\n {\n id,\n name: id,\n type: options.type,\n public: options.public,\n file_size_limit: options.fileSizeLimit,\n allowed_mime_types: options.allowedMimeTypes,\n },\n { headers: this.headers }\n )\n })\n }\n\n /**\n * Updates a Storage bucket\n *\n * @category File Buckets\n * @param id A unique identifier for the bucket you are updating.\n * @param options.public The visibility of the bucket. Public buckets don't require an authorization token to download objects, but still require a valid token for all other operations.\n * @param options.fileSizeLimit specifies the max file size in bytes that can be uploaded to this bucket.\n * The global file size limit takes precedence over this value.\n * The default value is null, which doesn't set a per bucket file size limit.\n * @param options.allowedMimeTypes specifies the allowed mime types that this bucket can accept during upload.\n * The default value is null, which allows files with all mime types to be uploaded.\n * Each mime type specified can be a wildcard, e.g. image/*, or a specific mime type, e.g. image/png.\n * @returns Promise with response containing success message or error\n *\n * @example Update bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .updateBucket('avatars', {\n * public: false,\n * allowedMimeTypes: ['image/png'],\n * fileSizeLimit: 1024\n * })\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"message\": \"Successfully updated\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async updateBucket(\n id: string,\n options: {\n public: boolean\n fileSizeLimit?: number | string | null\n allowedMimeTypes?: string[] | null\n }\n ): Promise<\n | {\n data: { message: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await put(\n this.fetch,\n `${this.url}/bucket/${id}`,\n {\n id,\n name: id,\n public: options.public,\n file_size_limit: options.fileSizeLimit,\n allowed_mime_types: options.allowedMimeTypes,\n },\n { headers: this.headers }\n )\n })\n }\n\n /**\n * Removes all objects inside a single bucket.\n *\n * @category File Buckets\n * @param id The unique identifier of the bucket you would like to empty.\n * @returns Promise with success message or error\n *\n * @example Empty bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .emptyBucket('avatars')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"message\": \"Successfully emptied\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async emptyBucket(id: string): Promise<\n | {\n data: { message: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await post(this.fetch, `${this.url}/bucket/${id}/empty`, {}, { headers: this.headers })\n })\n }\n\n /**\n * Deletes an existing bucket. A bucket can't be deleted with existing objects inside it.\n * You must first `empty()` the bucket.\n *\n * @category File Buckets\n * @param id The unique identifier of the bucket you would like to delete.\n * @returns Promise with success message or error\n *\n * @example Delete bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .deleteBucket('avatars')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"message\": \"Successfully deleted\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async deleteBucket(id: string): Promise<\n | {\n data: { message: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await remove(this.fetch, `${this.url}/bucket/${id}`, {}, { headers: this.headers })\n })\n }\n\n private listBucketOptionsToQueryString(options?: ListBucketOptions): string {\n const params: Record<string, string> = {}\n if (options) {\n if ('limit' in options) {\n params.limit = String(options.limit)\n }\n if ('offset' in options) {\n params.offset = String(options.offset)\n }\n if (options.search) {\n params.search = options.search\n }\n if (options.sortColumn) {\n params.sortColumn = options.sortColumn\n }\n if (options.sortOrder) {\n params.sortOrder = options.sortOrder\n }\n }\n return Object.keys(params).length > 0 ? '?' + new URLSearchParams(params).toString() : ''\n }\n}\n","import { IcebergRestCatalog, IcebergError } from 'iceberg-js'\nimport { DEFAULT_HEADERS } from '../lib/constants'\nimport { StorageError } from '../lib/common/errors'\nimport { Fetch, get, post, remove } from '../lib/common/fetch'\nimport { isValidBucketName } from '../lib/common/helpers'\nimport BaseApiClient from '../lib/common/BaseApiClient'\nimport { AnalyticBucket } from '../lib/types'\n\ntype WrapAsyncMethod<T> = T extends (...args: infer A) => Promise<infer R>\n ? (...args: A) => Promise<{ data: R; error: null } | { data: null; error: IcebergError }>\n : T\n\nexport type WrappedIcebergRestCatalog = {\n [K in keyof IcebergRestCatalog]: WrapAsyncMethod<IcebergRestCatalog[K]>\n}\n\n/**\n * Client class for managing Analytics Buckets using Iceberg tables\n * Provides methods for creating, listing, and deleting analytics buckets\n */\nexport default class StorageAnalyticsClient extends BaseApiClient<StorageError> {\n /**\n * @alpha\n *\n * Creates a new StorageAnalyticsClient instance\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Analytics Buckets\n * @param url - The base URL for the storage API\n * @param headers - HTTP headers to include in requests\n * @param fetch - Optional custom fetch implementation\n *\n * @example\n * ```typescript\n * const client = new StorageAnalyticsClient(url, headers)\n * ```\n */\n constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) {\n const finalUrl = url.replace(/\\/$/, '')\n const finalHeaders = { ...DEFAULT_HEADERS, ...headers }\n super(finalUrl, finalHeaders, fetch, 'storage')\n }\n\n /**\n * @alpha\n *\n * Creates a new analytics bucket using Iceberg tables\n * Analytics buckets are optimized for analytical queries and data processing\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Analytics Buckets\n * @param name A unique name for the bucket you are creating\n * @returns Promise with response containing newly created analytics bucket or error\n *\n * @example Create analytics bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .analytics\n * .createBucket('analytics-data')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"name\": \"analytics-data\",\n * \"type\": \"ANALYTICS\",\n * \"format\": \"iceberg\",\n * \"created_at\": \"2024-05-22T22:26:05.100Z\",\n * \"updated_at\": \"2024-05-22T22:26:05.100Z\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async createBucket(name: string): Promise<\n | {\n data: AnalyticBucket\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await post(this.fetch, `${this.url}/bucket`, { name }, { headers: this.headers })\n })\n }\n\n /**\n * @alpha\n *\n * Retrieves the details of all Analytics Storage buckets within an existing project\n * Only returns buckets of type 'ANALYTICS'\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Analytics Buckets\n * @param options Query parameters for listing buckets\n * @param options.limit Maximum number of buckets to return\n * @param options.offset Number of buckets to skip\n * @param options.sortColumn Column to sort by ('name', 'created_at', 'updated_at')\n * @param options.sortOrder Sort order ('asc' or 'desc')\n * @param options.search Search term to filter bucket names\n * @returns Promise with response containing array of analytics buckets or error\n *\n * @example List analytics buckets\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .analytics\n * .listBuckets({\n * limit: 10,\n * offset: 0,\n * sortColumn: 'created_at',\n * sortOrder: 'desc'\n * })\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": [\n * {\n * \"name\": \"analytics-data\",\n * \"type\": \"ANALYTICS\",\n * \"format\": \"iceberg\",\n * \"created_at\": \"2024-05-22T22:26:05.100Z\",\n * \"updated_at\": \"2024-05-22T22:26:05.100Z\"\n * }\n * ],\n * \"error\": null\n * }\n * ```\n */\n async listBuckets(options?: {\n limit?: number\n offset?: number\n sortColumn?: 'name' | 'created_at' | 'updated_at'\n sortOrder?: 'asc' | 'desc'\n search?: string\n }): Promise<\n | {\n data: AnalyticBucket[]\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n // Build query string from options\n const queryParams = new URLSearchParams()\n if (options?.limit !== undefined) queryParams.set('limit', options.limit.toString())\n if (options?.offset !== undefined) queryParams.set('offset', options.offset.toString())\n if (options?.sortColumn) queryParams.set('sortColumn', options.sortColumn)\n if (options?.sortOrder) queryParams.set('sortOrder', options.sortOrder)\n if (options?.search) queryParams.set('search', options.search)\n\n const queryString = queryParams.toString()\n const url = queryString ? `${this.url}/bucket?${queryString}` : `${this.url}/bucket`\n\n return await get(this.fetch, url, { headers: this.headers })\n })\n }\n\n /**\n * @alpha\n *\n * Deletes an existing analytics bucket\n * A bucket can't be deleted with existing objects inside it\n * You must first empty the bucket before deletion\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Analytics Buckets\n * @param bucketName The unique identifier of the bucket you would like to delete\n * @returns Promise with response containing success message or error\n *\n * @example Delete analytics bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .analytics\n * .deleteBucket('analytics-data')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"message\": \"Successfully deleted\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async deleteBucket(bucketName: string): Promise<\n | {\n data: { message: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await remove(\n this.fetch,\n `${this.url}/bucket/${bucketName}`,\n {},\n { headers: this.headers }\n )\n })\n }\n\n /**\n * @alpha\n *\n * Get an Iceberg REST Catalog client configured for a specific analytics bucket\n * Use this to perform advanced table and namespace operations within the bucket\n * The returned client provides full access to the Apache Iceberg REST Catalog API\n * with the Supabase `{ data, error }` pattern for consistent error handling on all operations.\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Analytics Buckets\n * @param bucketName - The name of the analytics bucket (warehouse) to connect to\n * @returns The wrapped Iceberg catalog client\n * @throws {StorageError} If the bucket name is invalid\n *\n * @example Get catalog and create table\n * ```js\n * // First, create an analytics bucket\n * const { data: bucket, error: bucketError } = await supabase\n * .storage\n * .analytics\n * .createBucket('analytics-data')\n *\n * // Get the Iceberg catalog for that bucket\n * const catalog = supabase.storage.analytics.from('analytics-data')\n *\n * // Create a namespace\n * const { error: nsError } = await catalog.createNamespace({ namespace: ['default'] })\n *\n * // Create a table with schema\n * const { data: tableMetadata, error: tableError } = await catalog.createTable(\n * { namespace: ['default'] },\n * {\n * name: 'events',\n * schema: {\n * type: 'struct',\n * fields: [\n * { id: 1, name: 'id', type: 'long', required: true },\n * { id: 2, name: 'timestamp', type: 'timestamp', required: true },\n * { id: 3, name: 'user_id', type: 'string', required: false }\n * ],\n * 'schema-id': 0,\n * 'identifier-field-ids': [1]\n * },\n * 'partition-spec': {\n * 'spec-id': 0,\n * fields: []\n * },\n * 'write-order': {\n * 'order-id': 0,\n * fields: []\n * },\n * properties: {\n * 'write.format.default': 'parquet'\n * }\n * }\n * )\n * ```\n *\n * @example List tables in namespace\n * ```js\n * const catalog = supabase.storage.analytics.from('analytics-data')\n *\n * // List all tables in the default namespace\n * const { data: tables, error: listError } = await catalog.listTables({ namespace: ['default'] })\n * if (listError) {\n * if (listError.isNotFound()) {\n * console.log('Namespace not found')\n * }\n * return\n * }\n * console.log(tables) // [{ namespace: ['default'], name: 'events' }]\n * ```\n *\n * @example Working with namespaces\n * ```js\n * const catalog = supabase.storage.analytics.from('analytics-data')\n *\n * // List all namespaces\n * const { data: namespaces } = await catalog.listNamespaces()\n *\n * // Create namespace with properties\n * await catalog.createNamespace(\n * { namespace: ['production'] },\n * { properties: { owner: 'data-team', env: 'prod' } }\n * )\n * ```\n *\n * @example Cleanup operations\n * ```js\n * const catalog = supabase.storage.analytics.from('analytics-data')\n *\n * // Drop table with purge option (removes all data)\n * const { error: dropError } = await catalog.dropTable(\n * { namespace: ['default'], name: 'events' },\n * { purge: true }\n * )\n *\n * if (dropError?.isNotFound()) {\n * console.log('Table does not exist')\n * }\n *\n * // Drop namespace (must be empty)\n * await catalog.dropNamespace({ namespace: ['default'] })\n * ```\n *\n * @remarks\n * This method provides a bridge between Supabase's bucket management and the standard\n * Apache Iceberg REST Catalog API. The bucket name maps to the Iceberg warehouse parameter.\n * All authentication and configuration is handled automatically using your Supabase credentials.\n *\n * **Error Handling**: Invalid bucket names throw immediately. All catalog\n * operations return `{ data, error }` where errors are `IcebergError` instances from iceberg-js.\n * Use helper methods like `error.isNotFound()` or check `error.status` for specific error handling.\n * Use `.throwOnError()` on the analytics client if you prefer exceptions for catalog operations.\n *\n * **Cleanup Operations**: When using `dropTable`, the `purge: true` option permanently\n * deletes all table data. Without it, the table is marked as deleted but data remains.\n *\n * **Library Dependency**: The returned catalog wraps `IcebergRestCatalog` from iceberg-js.\n * For complete API documentation and advanced usage, refer to the\n * [iceberg-js documentation](https://supabase.github.io/iceberg-js/).\n */\n from(bucketName: string): WrappedIcebergRestCatalog {\n // Validate bucket name using same rules as Supabase Storage API backend\n if (!isValidBucketName(bucketName)) {\n throw new StorageError(\n 'Invalid bucket name: File, folder, and bucket names must follow AWS object key naming guidelines ' +\n 'and should avoid the use of any other characters.'\n )\n }\n\n // Construct the Iceberg REST Catalog URL\n // The base URL is /storage/v1/iceberg\n // Note: IcebergRestCatalog from iceberg-js automatically adds /v1/ prefix to API paths\n // so we should NOT append /v1 here (it would cause double /v1/v1/ in the URL)\n const catalog = new IcebergRestCatalog({\n baseUrl: this.url,\n catalogName: bucketName, // Maps to the warehouse parameter in Supabase's implementation\n auth: {\n type: 'custom',\n getHeaders: async () => this.headers,\n },\n fetch: this.fetch,\n })\n\n const shouldThrowOnError = this.shouldThrowOnError\n\n const wrappedCatalog = new Proxy(catalog, {\n get(target, prop: keyof IcebergRestCatalog) {\n const value = target[prop]\n if (typeof value !== 'function') {\n return value\n }\n\n return async (...args: unknown[]) => {\n try {\n const data = await (value as Function).apply(target, args)\n return { data, error: null }\n } catch (error) {\n if (shouldThrowOnError) {\n throw error\n }\n return { data: null, error: error as IcebergError }\n }\n }\n },\n }) as unknown as WrappedIcebergRestCatalog\n\n return wrappedCatalog\n }\n}\n","import { DEFAULT_HEADERS } from '../lib/constants'\nimport { StorageError } from '../lib/common/errors'\nimport { Fetch, vectorsApi } from '../lib/common/fetch'\nimport BaseApiClient from '../lib/common/BaseApiClient'\nimport {\n ApiResponse,\n VectorIndex,\n ListIndexesOptions,\n ListIndexesResponse,\n VectorDataType,\n DistanceMetric,\n MetadataConfiguration,\n} from '../lib/types'\n\n/**\n * @alpha\n *\n * Options for creating a vector index\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n */\nexport interface CreateIndexOptions {\n vectorBucketName: string\n indexName: string\n dataType: VectorDataType\n dimension: number\n distanceMetric: DistanceMetric\n metadataConfiguration?: MetadataConfiguration\n}\n\n/**\n * @hidden\n * Base implementation for vector index operations.\n * Use {@link VectorBucketScope} via `supabase.storage.vectors.from('bucket')` instead.\n */\nexport default class VectorIndexApi extends BaseApiClient<StorageError> {\n /** Creates a new VectorIndexApi instance */\n constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) {\n const finalUrl = url.replace(/\\/$/, '')\n const finalHeaders = { ...DEFAULT_HEADERS, 'Content-Type': 'application/json', ...headers }\n super(finalUrl, finalHeaders, fetch, 'vectors')\n }\n\n /** Creates a new vector index within a bucket */\n async createIndex(options: CreateIndexOptions): Promise<ApiResponse<undefined>> {\n return this.handleOperation(async () => {\n const data = await vectorsApi.post(this.fetch, `${this.url}/CreateIndex`, options, {\n headers: this.headers,\n })\n return data || {}\n })\n }\n\n /** Retrieves metadata for a specific vector index */\n async getIndex(\n vectorBucketName: string,\n indexName: string\n ): Promise<ApiResponse<{ index: VectorIndex }>> {\n return this.handleOperation(async () => {\n return await vectorsApi.post(\n this.fetch,\n `${this.url}/GetIndex`,\n { vectorBucketName, indexName },\n { headers: this.headers }\n )\n })\n }\n\n /** Lists vector indexes within a bucket with optional filtering and pagination */\n async listIndexes(options: ListIndexesOptions): Promise<ApiResponse<ListIndexesResponse>> {\n return this.handleOperation(async () => {\n return await vectorsApi.post(this.fetch, `${this.url}/ListIndexes`, options, {\n headers: this.headers,\n })\n })\n }\n\n /** Deletes a vector index and all its data */\n async deleteIndex(vectorBucketName: string, indexName: string): Promise<ApiResponse<undefined>> {\n return this.handleOperation(async () => {\n const data = await vectorsApi.post(\n this.fetch,\n `${this.url}/DeleteIndex`,\n { vectorBucketName, indexName },\n { headers: this.headers }\n )\n return data || {}\n })\n }\n}\n","import { DEFAULT_HEADERS } from '../lib/constants'\nimport { StorageError } from '../lib/common/errors'\nimport { Fetch, vectorsApi } from '../lib/common/fetch'\nimport BaseApiClient from '../lib/common/BaseApiClient'\nimport {\n ApiResponse,\n PutVectorsOptions,\n GetVectorsOptions,\n GetVectorsResponse,\n DeleteVectorsOptions,\n ListVectorsOptions,\n ListVectorsResponse,\n QueryVectorsOptions,\n QueryVectorsResponse,\n} from '../lib/types'\n\n/**\n * @hidden\n * Base implementation for vector data operations.\n * Use {@link VectorIndexScope} via `supabase.storage.vectors.from('bucket').index('idx')` instead.\n */\nexport default class VectorDataApi extends BaseApiClient<StorageError> {\n /** Creates a new VectorDataApi instance */\n constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) {\n const finalUrl = url.replace(/\\/$/, '')\n const finalHeaders = { ...DEFAULT_HEADERS, 'Content-Type': 'application/json', ...headers }\n super(finalUrl, finalHeaders, fetch, 'vectors')\n }\n\n /** Inserts or updates vectors in batch (1-500 per request) */\n async putVectors(options: PutVectorsOptions): Promise<ApiResponse<undefined>> {\n // Validate batch size\n if (options.vectors.length < 1 || options.vectors.length > 500) {\n throw new Error('Vector batch size must be between 1 and 500 items')\n }\n\n return this.handleOperation(async () => {\n const data = await vectorsApi.post(this.fetch, `${this.url}/PutVectors`, options, {\n headers: this.headers,\n })\n return data || {}\n })\n }\n\n /** Retrieves vectors by their keys in batch */\n async getVectors(options: GetVectorsOptions): Promise<ApiResponse<GetVectorsResponse>> {\n return this.handleOperation(async () => {\n return await vectorsApi.post(this.fetch, `${this.url}/GetVectors`, options, {\n headers: this.headers,\n })\n })\n }\n\n /** Lists vectors in an index with pagination */\n async listVectors(options: ListVectorsOptions): Promise<ApiResponse<ListVectorsResponse>> {\n // Validate segment configuration\n if (options.segmentCount !== undefined) {\n if (options.segmentCount < 1 || options.segmentCount > 16) {\n throw new Error('segmentCount must be between 1 and 16')\n }\n if (options.segmentIndex !== undefined) {\n if (options.segmentIndex < 0 || options.segmentIndex >= options.segmentCount) {\n throw new Error(`segmentIndex must be between 0 and ${options.segmentCount - 1}`)\n }\n }\n }\n\n return this.handleOperation(async () => {\n return await vectorsApi.post(this.fetch, `${this.url}/ListVectors`, options, {\n headers: this.headers,\n })\n })\n }\n\n /** Queries for similar vectors using approximate nearest neighbor search */\n async queryVectors(options: QueryVectorsOptions): Promise<ApiResponse<QueryVectorsResponse>> {\n return this.handleOperation(async () => {\n return await vectorsApi.post(this.fetch, `${this.url}/QueryVectors`, options, {\n headers: this.headers,\n })\n })\n }\n\n /** Deletes vectors by their keys in batch (1-500 per request) */\n async deleteVectors(options: DeleteVectorsOptions): Promise<ApiResponse<undefined>> {\n // Validate batch size\n if (options.keys.length < 1 || options.keys.length > 500) {\n throw new Error('Keys batch size must be between 1 and 500 items')\n }\n\n return this.handleOperation(async () => {\n const data = await vectorsApi.post(this.fetch, `${this.url}/DeleteVectors`, options, {\n headers: this.headers,\n })\n return data || {}\n })\n }\n}\n","import { DEFAULT_HEADERS } from '../lib/constants'\nimport { StorageError } from '../lib/common/errors'\nimport { Fetch, vectorsApi } from '../lib/common/fetch'\nimport BaseApiClient from '../lib/common/BaseApiClient'\nimport {\n ApiResponse,\n VectorBucket,\n ListVectorBucketsOptions,\n ListVectorBucketsResponse,\n} from '../lib/types'\n\n/**\n * @hidden\n * Base implementation for vector bucket operations.\n * Use {@link StorageVectorsClient} via `supabase.storage.vectors` instead.\n */\nexport default class VectorBucketApi extends BaseApiClient<StorageError> {\n /** Creates a new VectorBucketApi instance */\n constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) {\n const finalUrl = url.replace(/\\/$/, '')\n const finalHeaders = { ...DEFAULT_HEADERS, 'Content-Type': 'application/json', ...headers }\n super(finalUrl, finalHeaders, fetch, 'vectors')\n }\n\n /** Creates a new vector bucket */\n async createBucket(vectorBucketName: string): Promise<ApiResponse<undefined>> {\n return this.handleOperation(async () => {\n const data = await vectorsApi.post(\n this.fetch,\n `${this.url}/CreateVectorBucket`,\n { vectorBucketName },\n { headers: this.headers }\n )\n return data || {}\n })\n }\n\n /** Retrieves metadata for a specific vector bucket */\n async getBucket(vectorBucketName: string): Promise<ApiResponse<{ vectorBucket: VectorBucket }>> {\n return this.handleOperation(async () => {\n return await vectorsApi.post(\n this.fetch,\n `${this.url}/GetVectorBucket`,\n { vectorBucketName },\n { headers: this.headers }\n )\n })\n }\n\n /** Lists vector buckets with optional filtering and pagination */\n async listBuckets(\n options: ListVectorBucketsOptions = {}\n ): Promise<ApiResponse<ListVectorBucketsResponse>> {\n return this.handleOperation(async () => {\n return await vectorsApi.post(this.fetch, `${this.url}/ListVectorBuckets`, options, {\n headers: this.headers,\n })\n })\n }\n\n /** Deletes a vector bucket (must be empty first) */\n async deleteBucket(vectorBucketName: string): Promise<ApiResponse<undefined>> {\n return this.handleOperation(async () => {\n const data = await vectorsApi.post(\n this.fetch,\n `${this.url}/DeleteVectorBucket`,\n { vectorBucketName },\n { headers: this.headers }\n )\n return data || {}\n })\n }\n}\n","import VectorIndexApi, { CreateIndexOptions } from './VectorIndexApi'\nimport VectorDataApi from './VectorDataApi'\nimport { Fetch } from '../lib/common/fetch'\nimport VectorBucketApi from './VectorBucketApi'\nimport {\n ApiResponse,\n DeleteVectorsOptions,\n GetVectorsOptions,\n ListIndexesOptions,\n ListVectorsOptions,\n ListVectorBucketsOptions,\n ListVectorBucketsResponse,\n PutVectorsOptions,\n QueryVectorsOptions,\n VectorBucket,\n} from '../lib/types'\n\n/**\n *\n * @alpha\n *\n * Configuration options for the Storage Vectors client\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n */\nexport interface StorageVectorsClientOptions {\n /**\n * Custom headers to include in all requests\n */\n headers?: { [key: string]: string }\n /**\n * Custom fetch implementation (optional)\n * Useful for testing or custom request handling\n */\n fetch?: Fetch\n}\n\n/**\n *\n * @alpha\n *\n * Main client for interacting with S3 Vectors API\n * Provides access to bucket, index, and vector data operations\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * **Usage Patterns:**\n *\n * ```typescript\n * const { data, error } = await supabase\n * .storage\n * .vectors\n * .createBucket('embeddings-prod')\n *\n * // Access index operations via buckets\n * const bucket = supabase.storage.vectors.from('embeddings-prod')\n * await bucket.createIndex({\n * indexName: 'documents',\n * dataType: 'float32',\n * dimension: 1536,\n * distanceMetric: 'cosine'\n * })\n *\n * // Access vector operations via index\n * const index = bucket.index('documents')\n * await index.putVectors({\n * vectors: [\n * { key: 'doc-1', data: { float32: [...] }, metadata: { title: 'Intro' } }\n * ]\n * })\n *\n * // Query similar vectors\n * const { data } = await index.queryVectors({\n * queryVector: { float32: [...] },\n * topK: 5,\n * returnDistance: true\n * })\n * ```\n */\nexport class StorageVectorsClient extends VectorBucketApi {\n /**\n * @alpha\n *\n * Creates a StorageVectorsClient that can manage buckets, indexes, and vectors.\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param url - Base URL of the Storage Vectors REST API.\n * @param options.headers - Optional headers (for example `Authorization`) applied to every request.\n * @param options.fetch - Optional custom `fetch` implementation for non-browser runtimes.\n *\n * @example\n * ```typescript\n * const client = new StorageVectorsClient(url, options)\n * ```\n */\n constructor(url: string, options: StorageVectorsClientOptions = {}) {\n super(url, options.headers || {}, options.fetch)\n }\n\n /**\n *\n * @alpha\n *\n * Access operations for a specific vector bucket\n * Returns a scoped client for index and vector operations within the bucket\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param vectorBucketName - Name of the vector bucket\n * @returns Bucket-scoped client with index and vector operations\n *\n * @example\n * ```typescript\n * const bucket = supabase.storage.vectors.from('embeddings-prod')\n * ```\n */\n from(vectorBucketName: string): VectorBucketScope {\n return new VectorBucketScope(this.url, this.headers, vectorBucketName, this.fetch)\n }\n\n /**\n *\n * @alpha\n *\n * Creates a new vector bucket\n * Vector buckets are containers for vector indexes and their data\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param vectorBucketName - Unique name for the vector bucket\n * @returns Promise with empty response on success or error\n *\n * @example\n * ```typescript\n * const { data, error } = await supabase\n * .storage\n * .vectors\n * .createBucket('embeddings-prod')\n * ```\n */\n async createBucket(vectorBucketName: string): Promise<ApiResponse<undefined>> {\n return super.createBucket(vectorBucketName)\n }\n\n /**\n *\n * @alpha\n *\n * Retrieves metadata for a specific vector bucket\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param vectorBucketName - Name of the vector bucket\n * @returns Promise with bucket metadata or error\n *\n * @example\n * ```typescript\n * const { data, error } = await supabase\n * .storage\n * .vectors\n * .getBucket('embeddings-prod')\n *\n * console.log('Bucket created:', data?.vectorBucket.creationTime)\n * ```\n */\n async getBucket(vectorBucketName: string): Promise<ApiResponse<{ vectorBucket: VectorBucket }>> {\n return super.getBucket(vectorBucketName)\n }\n\n /**\n *\n * @alpha\n *\n * Lists all vector buckets with optional filtering and pagination\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param options - Optional filters (prefix, maxResults, nextToken)\n * @returns Promise with list of buckets or error\n *\n * @example\n * ```typescript\n * const { data, error } = await supabase\n * .storage\n * .vectors\n * .listBuckets({ prefix: 'embeddings-' })\n *\n * data?.vectorBuckets.forEach(bucket => {\n * console.log(bucket.vectorBucketName)\n * })\n * ```\n */\n async listBuckets(\n options: ListVectorBucketsOptions = {}\n ): Promise<ApiResponse<ListVectorBucketsResponse>> {\n return super.listBuckets(options)\n }\n\n /**\n *\n * @alpha\n *\n * Deletes a vector bucket (bucket must be empty)\n * All indexes must be deleted before deleting the bucket\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param vectorBucketName - Name of the vector bucket to delete\n * @returns Promise with empty response on success or error\n *\n * @example\n * ```typescript\n * const { data, error } = await supabase\n * .storage\n * .vectors\n * .deleteBucket('embeddings-old')\n * ```\n */\n async deleteBucket(vectorBucketName: string): Promise<ApiResponse<undefined>> {\n return super.deleteBucket(vectorBucketName)\n }\n}\n\n/**\n *\n * @alpha\n *\n * Scoped client for operations within a specific vector bucket\n * Provides index management and access to vector operations\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n */\nexport class VectorBucketScope extends VectorIndexApi {\n private vectorBucketName: string\n\n /**\n * @alpha\n *\n * Creates a helper that automatically scopes all index operations to the provided bucket.\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @example\n * ```typescript\n * const bucket = supabase.storage.vectors.from('embeddings-prod')\n * ```\n */\n constructor(\n url: string,\n headers: { [key: string]: string },\n vectorBucketName: string,\n fetch?: Fetch\n ) {\n super(url, headers, fetch)\n this.vectorBucketName = vectorBucketName\n }\n\n /**\n *\n * @alpha\n *\n * Creates a new vector index in this bucket\n * Convenience method that automatically includes the bucket name\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param options - Index configuration (vectorBucketName is automatically set)\n * @returns Promise with empty response on success or error\n *\n * @example\n * ```typescript\n * const bucket = supabase.storage.vectors.from('embeddings-prod')\n * await bucket.createIndex({\n * indexName: 'documents-openai',\n * dataType: 'float32',\n * dimension: 1536,\n * distanceMetric: 'cosine',\n * metadataConfiguration: {\n * nonFilterableMetadataKeys: ['raw_text']\n * }\n * })\n * ```\n */\n override async createIndex(options: Omit<CreateIndexOptions, 'vectorBucketName'>) {\n return super.createIndex({\n ...options,\n vectorBucketName: this.vectorBucketName,\n })\n }\n\n /**\n *\n * @alpha\n *\n * Lists indexes in this bucket\n * Convenience method that automatically includes the bucket name\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param options - Listing options (vectorBucketName is automatically set)\n * @returns Promise with response containing indexes array and pagination token or error\n *\n * @example\n * ```typescript\n * const bucket = supabase.storage.vectors.from('embeddings-prod')\n * const { data } = await bucket.listIndexes({ prefix: 'documents-' })\n * ```\n */\n override async listIndexes(options: Omit<ListIndexesOptions, 'vectorBucketName'> = {}) {\n return super.listIndexes({\n ...options,\n vectorBucketName: this.vectorBucketName,\n })\n }\n\n /**\n *\n * @alpha\n *\n * Retrieves metadata for a specific index in this bucket\n * Convenience method that automatically includes the bucket name\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param indexName - Name of the index to retrieve\n * @returns Promise with index metadata or error\n *\n * @example\n * ```typescript\n * const bucket = supabase.storage.vectors.from('embeddings-prod')\n * const { data } = await bucket.getIndex('documents-openai')\n * console.log('Dimension:', data?.index.dimension)\n * ```\n */\n override async getIndex(indexName: string) {\n return super.getIndex(this.vectorBucketName, indexName)\n }\n\n /**\n *\n * @alpha\n *\n * Deletes an index from this bucket\n * Convenience method that automatically includes the bucket name\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param indexName - Name of the index to delete\n * @returns Promise with empty response on success or error\n *\n * @example\n * ```typescript\n * const bucket = supabase.storage.vectors.from('embeddings-prod')\n * await bucket.deleteIndex('old-index')\n * ```\n */\n override async deleteIndex(indexName: string) {\n return super.deleteIndex(this.vectorBucketName, indexName)\n }\n\n /**\n *\n * @alpha\n *\n * Access operations for a specific index within this bucket\n * Returns a scoped client for vector data operations\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param indexName - Name of the index\n * @returns Index-scoped client with vector data operations\n *\n * @example\n * ```typescript\n * const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')\n *\n * // Insert vectors\n * await index.putVectors({\n * vectors: [\n * { key: 'doc-1', data: { float32: [...] }, metadata: { title: 'Intro' } }\n * ]\n * })\n *\n * // Query similar vectors\n * const { data } = await index.queryVectors({\n * queryVector: { float32: [...] },\n * topK: 5\n * })\n * ```\n */\n index(indexName: string): VectorIndexScope {\n return new VectorIndexScope(\n this.url,\n this.headers,\n this.vectorBucketName,\n indexName,\n this.fetch\n )\n }\n}\n\n/**\n *\n * @alpha\n *\n * Scoped client for operations within a specific vector index\n * Provides vector data operations (put, get, list, query, delete)\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n */\nexport class VectorIndexScope extends VectorDataApi {\n private vectorBucketName: string\n private indexName: string\n\n /**\n *\n * @alpha\n *\n * Creates a helper that automatically scopes all vector operations to the provided bucket/index names.\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @example\n * ```typescript\n * const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')\n * ```\n */\n constructor(\n url: string,\n headers: { [key: string]: string },\n vectorBucketName: string,\n indexName: string,\n fetch?: Fetch\n ) {\n super(url, headers, fetch)\n this.vectorBucketName = vectorBucketName\n this.indexName = indexName\n }\n\n /**\n *\n * @alpha\n *\n * Inserts or updates vectors in this index\n * Convenience method that automatically includes bucket and index names\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param options - Vector insertion options (bucket and index names automatically set)\n * @returns Promise with empty response on success or error\n *\n * @example\n * ```typescript\n * const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')\n * await index.putVectors({\n * vectors: [\n * {\n * key: 'doc-1',\n * data: { float32: [0.1, 0.2, ...] },\n * metadata: { title: 'Introduction', page: 1 }\n * }\n * ]\n * })\n * ```\n */\n override async putVectors(options: Omit<PutVectorsOptions, 'vectorBucketName' | 'indexName'>) {\n return super.putVectors({\n ...options,\n vectorBucketName: this.vectorBucketName,\n indexName: this.indexName,\n })\n }\n\n /**\n *\n * @alpha\n *\n * Retrieves vectors by keys from this index\n * Convenience method that automatically includes bucket and index names\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param options - Vector retrieval options (bucket and index names automatically set)\n * @returns Promise with response containing vectors array or error\n *\n * @example\n * ```typescript\n * const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')\n * const { data } = await index.getVectors({\n * keys: ['doc-1', 'doc-2'],\n * returnMetadata: true\n * })\n * ```\n */\n override async getVectors(options: Omit<GetVectorsOptions, 'vectorBucketName' | 'indexName'>) {\n return super.getVectors({\n ...options,\n vectorBucketName: this.vectorBucketName,\n indexName: this.indexName,\n })\n }\n\n /**\n *\n * @alpha\n *\n * Lists vectors in this index with pagination\n * Convenience method that automatically includes bucket and index names\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param options - Listing options (bucket and index names automatically set)\n * @returns Promise with response containing vectors array and pagination token or error\n *\n * @example\n * ```typescript\n * const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')\n * const { data } = await index.listVectors({\n * maxResults: 500,\n * returnMetadata: true\n * })\n * ```\n */\n override async listVectors(\n options: Omit<ListVectorsOptions, 'vectorBucketName' | 'indexName'> = {}\n ) {\n return super.listVectors({\n ...options,\n vectorBucketName: this.vectorBucketName,\n indexName: this.indexName,\n })\n }\n\n /**\n *\n * @alpha\n *\n * Queries for similar vectors in this index\n * Convenience method that automatically includes bucket and index names\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param options - Query options (bucket and index names automatically set)\n * @returns Promise with response containing matches array of similar vectors ordered by distance or error\n *\n * @example\n * ```typescript\n * const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')\n * const { data } = await index.queryVectors({\n * queryVector: { float32: [0.1, 0.2, ...] },\n * topK: 5,\n * filter: { category: 'technical' },\n * returnDistance: true,\n * returnMetadata: true\n * })\n * ```\n */\n override async queryVectors(\n options: Omit<QueryVectorsOptions, 'vectorBucketName' | 'indexName'>\n ) {\n return super.queryVectors({\n ...options,\n vectorBucketName: this.vectorBucketName,\n indexName: this.indexName,\n })\n }\n\n /**\n *\n * @alpha\n *\n * Deletes vectors by keys from this index\n * Convenience method that automatically includes bucket and index names\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param options - Deletion options (bucket and index names automatically set)\n * @returns Promise with empty response on success or error\n *\n * @example\n * ```typescript\n * const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')\n * await index.deleteVectors({\n * keys: ['doc-1', 'doc-2', 'doc-3']\n * })\n * ```\n */\n override async deleteVectors(\n options: Omit<DeleteVectorsOptions, 'vectorBucketName' | 'indexName'>\n ) {\n return super.deleteVectors({\n ...options,\n vectorBucketName: this.vectorBucketName,\n indexName: this.indexName,\n })\n }\n}\n","import StorageFileApi from './packages/StorageFileApi'\nimport StorageBucketApi from './packages/StorageBucketApi'\nimport StorageAnalyticsClient from './packages/StorageAnalyticsClient'\nimport { Fetch } from './lib/common/fetch'\nimport { StorageVectorsClient } from './packages/StorageVectorsClient'\n\nexport interface StorageClientOptions {\n useNewHostname?: boolean\n}\n\nexport class StorageClient extends StorageBucketApi {\n /**\n * Creates a client for Storage buckets, files, analytics, and vectors.\n *\n * @category File Buckets\n * @example\n * ```ts\n * import { StorageClient } from '@supabase/storage-js'\n *\n * const storage = new StorageClient('https://xyzcompany.supabase.co/storage/v1', {\n * apikey: 'public-anon-key',\n * })\n * const avatars = storage.from('avatars')\n * ```\n */\n constructor(\n url: string,\n headers: { [key: string]: string } = {},\n fetch?: Fetch,\n opts?: StorageClientOptions\n ) {\n super(url, headers, fetch, opts)\n }\n\n /**\n * Perform file operation in a bucket.\n *\n * @category File Buckets\n * @param id The bucket id to operate on.\n *\n * @example\n * ```typescript\n * const avatars = supabase.storage.from('avatars')\n * ```\n */\n from(id: string): StorageFileApi {\n return new StorageFileApi(this.url, this.headers, id, this.fetch)\n }\n\n /**\n *\n * @alpha\n *\n * Access vector storage operations.\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @returns A StorageVectorsClient instance configured with the current storage settings.\n */\n get vectors(): StorageVectorsClient {\n return new StorageVectorsClient(this.url + '/vector', {\n headers: this.headers,\n fetch: this.fetch,\n })\n }\n\n /**\n *\n * @alpha\n *\n * Access analytics storage operations using Iceberg tables.\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Analytics Buckets\n * @returns A StorageAnalyticsClient instance configured with the current storage settings.\n */\n get analytics(): StorageAnalyticsClient {\n return new StorageAnalyticsClient(this.url + '/iceberg', this.headers, this.fetch)\n }\n}\n"],"mappings":";;;;;;;AAUA,IAAa,eAAb,cAAkC,MAAM;CAMtC,YACE,SACA,YAA4B,WAC5B,QACA,YACA;AACA,QAAM,QAAQ;OAXN,mBAAmB;AAY3B,OAAK,YAAY;AACjB,OAAK,OAAO,cAAc,YAAY,wBAAwB;AAC9D,OAAK,SAAS;AACd,OAAK,aAAa;;;;;;;;AAStB,SAAgB,eAAe,OAAuC;AACpE,QAAO,OAAO,UAAU,YAAY,UAAU,QAAQ,sBAAsB;;;;;;AAO9E,IAAa,kBAAb,cAAqC,aAAa;CAIhD,YACE,SACA,QACA,YACA,YAA4B,WAC5B;AACA,QAAM,SAAS,WAAW,QAAQ,WAAW;AAC7C,OAAK,OAAO,cAAc,YAAY,2BAA2B;AACjE,OAAK,SAAS;AACd,OAAK,aAAa;;CAGpB,SAAS;AACP,SAAO;GACL,MAAM,KAAK;GACX,SAAS,KAAK;GACd,QAAQ,KAAK;GACb,YAAY,KAAK;GAClB;;;;;;;AAQL,IAAa,sBAAb,cAAyC,aAAa;CAGpD,YAAY,SAAiB,eAAwB,YAA4B,WAAW;AAC1F,QAAM,SAAS,UAAU;AACzB,OAAK,OAAO,cAAc,YAAY,+BAA+B;AACrE,OAAK,gBAAgB;;;;;;;AAYzB,IAAa,sBAAb,cAAyC,aAAa;CACpD,YAAY,SAAiB;AAC3B,QAAM,SAAS,UAAU;;;;;;;;AAS7B,SAAgB,sBAAsB,OAA8C;AAClF,QAAO,eAAe,MAAM,IAAK,MAAuB,iBAAiB;;;;;;AAO3E,IAAa,yBAAb,cAA4C,gBAAgB;CAC1D,YAAY,SAAiB,QAAgB,YAAoB;AAC/D,QAAM,SAAS,QAAQ,YAAY,UAAU;;;;;;;AAQjD,IAAa,6BAAb,cAAgD,oBAAoB;CAClE,YAAY,SAAiB,eAAwB;AACnD,QAAM,SAAS,eAAe,UAAU;;;;;;;AAQ5C,IAAY,8EAAL;;AAEL;;AAEA;;AAEA;;AAEA;;AAEA;;AAEA;;;;;;;;;;;;;ACrIF,MAAa,gBAAgB,gBAA+B;AAC1D,KAAI,YACF,SAAQ,GAAG,SAAS,YAAY,GAAG,KAAK;AAE1C,SAAQ,GAAG,SAAS,MAAM,GAAG,KAAK;;;;;;;;;;AAqBpC,MAAa,iBAAiB,UAA2B;AACvD,KAAI,OAAO,UAAU,YAAY,UAAU,KACzC,QAAO;CAGT,MAAM,YAAY,OAAO,eAAe,MAAM;AAC9C,SACG,cAAc,QACb,cAAc,OAAO,aACrB,OAAO,eAAe,UAAU,KAAK,SACvC,EAAE,OAAO,eAAe,UACxB,EAAE,OAAO,YAAY;;;;;;;;;AAWzB,MAAa,oBAAoB,SAAuC;AACtE,KAAI,MAAM,QAAQ,KAAK,CACrB,QAAO,KAAK,KAAK,OAAO,iBAAiB,GAAG,CAAC;UACpC,OAAO,SAAS,cAAc,SAAS,OAAO,KAAK,CAC5D,QAAO;CAGT,MAAMA,SAA8B,EAAE;AACtC,QAAO,QAAQ,KAAK,CAAC,SAAS,CAAC,KAAK,WAAW;EAC7C,MAAM,SAAS,IAAI,QAAQ,kBAAkB,MAAM,EAAE,aAAa,CAAC,QAAQ,SAAS,GAAG,CAAC;AACxF,SAAO,UAAU,iBAAiB,MAAM;GACxC;AAEF,QAAO;;;;;;;;;;;;;;;;;AAkBT,MAAa,qBAAqB,eAAgC;AAChE,KAAI,CAAC,cAAc,OAAO,eAAe,SACvC,QAAO;AAIT,KAAI,WAAW,WAAW,KAAK,WAAW,SAAS,IACjD,QAAO;AAIT,KAAI,WAAW,MAAM,KAAK,WACxB,QAAO;AAMT,KAAI,WAAW,SAAS,IAAI,IAAI,WAAW,SAAS,KAAK,CACvD,QAAO;AAOT,QADwB,4BACD,KAAK,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AChFzC,MAAM,oBAAoB,QACxB;;YAAI,OACJ,IAAI,WACJ,IAAI,sBACH,OAAO,IAAI,UAAU,WAAW,IAAI,sBAAQ,IAAI,+DAAO,YACxD,KAAK,UAAU,IAAI;;;;;;;;;AASrB,MAAM,cAAc,OAClB,OACA,QACA,SACA,cACG;AAUH,KANE,SACA,OAAO,UAAU,YACjB,YAAY,SACZ,QAAQ,SACR,OAAQ,MAAc,WAAW,YAEb,oDAAC,QAAS,gBAAe;EAC7C,MAAM,gBAAgB;EACtB,MAAM,SAAS,cAAc,UAAU;AAGvC,MAAI,OAAO,cAAc,SAAS,WAChC,eACG,MAAM,CACN,MAAM,QAAa;GAClB,MAAM,wDAAa,IAAK,0DAAc,IAAK,SAAQ,SAAS;AAC5D,UAAO,IAAI,gBAAgB,iBAAiB,IAAI,EAAE,QAAQ,YAAY,UAAU,CAAC;IACjF,CACD,YAAY;AAEX,OAAI,cAAc,WAAW;IAC3B,MAAM,aAAa,SAAS;AAE5B,WAAO,IAAI,gBADK,cAAc,cAAc,QAAQ,OAAO,SACvB,QAAQ,YAAY,UAAU,CAAC;UAC9D;IACL,MAAM,aAAa,SAAS;AAE5B,WAAO,IAAI,gBADK,cAAc,cAAc,QAAQ,OAAO,SACvB,QAAQ,YAAY,UAAU,CAAC;;IAErE;OACC;GAEL,MAAM,aAAa,SAAS;AAE5B,UAAO,IAAI,gBADK,cAAc,cAAc,QAAQ,OAAO,SACvB,QAAQ,YAAY,UAAU,CAAC;;OAGrE,QAAO,IAAI,oBAAoB,iBAAiB,MAAM,EAAE,OAAO,UAAU,CAAC;;;;;;;;;;AAY9E,MAAM,qBACJ,QACA,SACA,YACA,SACG;CACH,MAAMC,SAA+B;EAAE;EAAQ,4DAAS,QAAS,YAAW,EAAE;EAAE;AAEhF,KAAI,WAAW,SAAS,WAAW,UAAU,CAAC,KAC5C,0CAAY,SAAW;AAGzB,KAAI,cAAc,KAAK,EAAE;AACvB,SAAO,2BAAY,gBAAgB,wEAAuB,QAAS;AACnE,SAAO,OAAO,KAAK,UAAU,KAAK;OAElC,QAAO,OAAO;AAGhB,uDAAI,QAAS,OACX,QAAO,SAAS,QAAQ;AAG1B,0CAAY,SAAW;;;;;;;;;;;;;AAczB,eAAe,eACb,SACA,QACA,KACA,SACA,YACA,MACA,WACc;AACd,QAAO,IAAI,SAAS,SAAS,WAAW;AACtC,UAAQ,KAAK,kBAAkB,QAAQ,SAAS,YAAY,KAAK,CAAC,CAC/D,MAAM,WAAW;AAChB,OAAI,CAAC,OAAO,GAAI,OAAM;AACtB,yDAAI,QAAS,cAAe,QAAO;AAMnC,OAAI,cAAc,WAAW;IAC3B,MAAM,cAAc,OAAO,QAAQ,IAAI,eAAe;AAItD,QAHsB,OAAO,QAAQ,IAAI,iBAAiB,KAGpC,OAAO,OAAO,WAAW,IAC7C,QAAO,EAAE;AAIX,QAAI,CAAC,eAAe,CAAC,YAAY,SAAS,mBAAmB,CAC3D,QAAO,EAAE;;AAIb,UAAO,OAAO,MAAM;IACpB,CACD,MAAM,SAAS,QAAQ,KAAK,CAAC,CAC7B,OAAO,UAAU,YAAY,OAAO,QAAQ,SAAS,UAAU,CAAC;GACnE;;;;;;;AAQJ,SAAgB,eAAe,YAA4B,WAAW;AACpE,QAAO;EASL,KAAK,OACH,SACA,KACA,SACA,eACiB;AACjB,UAAO,eAAe,SAAS,OAAO,KAAK,SAAS,YAAY,QAAW,UAAU;;EAYvF,MAAM,OACJ,SACA,KACA,MACA,SACA,eACiB;AACjB,UAAO,eAAe,SAAS,QAAQ,KAAK,SAAS,YAAY,MAAM,UAAU;;EAYnF,KAAK,OACH,SACA,KACA,MACA,SACA,eACiB;AACjB,UAAO,eAAe,SAAS,OAAO,KAAK,SAAS,YAAY,MAAM,UAAU;;EAWlF,MAAM,OACJ,SACA,KACA,SACA,eACiB;AACjB,UAAO,eACL,SACA,QACA,uCAEK,gBACH,eAAe,SAEjB,YACA,QACA,UACD;;EAYH,QAAQ,OACN,SACA,KACA,MACA,SACA,eACiB;AACjB,UAAO,eAAe,SAAS,UAAU,KAAK,SAAS,YAAY,MAAM,UAAU;;EAEtF;;AAIH,MAAM,aAAa,eAAe,UAAU;AAC5C,MAAa,EAAE,KAAK,MAAM,KAAK,MAAM,WAAW;AAGhD,MAAa,aAAa,eAAe,UAAU;;;;;;;;;;;AChSnD,IAA8B,gBAA9B,MAAwF;;;;;;;;CActF,YACE,KACA,UAAqC,EAAE,EACvC,SACA,YAA4B,WAC5B;OAfQ,qBAAqB;AAgB7B,OAAK,MAAM;AACX,OAAK,UAAU;AACf,OAAK,QAAQ,aAAaC,QAAM;AAChC,OAAK,YAAY;;;;;;;;CASnB,AAAO,eAAqB;AAC1B,OAAK,qBAAqB;AAC1B,SAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;CA4BT,MAAgB,gBACd,WACmE;;AACnE,MAAI;AAEF,UAAO;IAAE,MADI,MAAM,WAAW;IACf,OAAO;IAAM;WACrB,OAAO;AACd,OAAIC,MAAK,mBACP,OAAM;AAER,OAAI,eAAe,MAAM,CACvB,QAAO;IAAE,MAAM;IAAa;IAAiB;AAE/C,SAAM;;;;;;;ACnFZ,IAAqB,wBAArB,MAAkG;CAChG,YACE,AAAQC,YACR,AAAQC,oBACR;EAFQ;EACA;;CAGV,KACE,aAGA,YAC8B;AAC9B,SAAO,KAAK,SAAS,CAAC,KAAK,aAAa,WAAW;;CAGrD,MAAc,UAAmD;;AAC/D,MAAI;AAGF,UAAO;IACL,OAHa,MAAMC,MAAK,YAAY,EAGvB;IACb,OAAO;IACR;WACM,OAAO;AACd,OAAIA,MAAK,mBACP,OAAM;AAGR,OAAI,eAAe,MAAM,CACvB,QAAO;IAAE,MAAM;IAAM;IAAO;AAG9B,SAAM;;;;;;;;sBC9BA,OAAO;AADnB,IAAqB,sBAArB,MAAkF;CAIhF,YACE,AAAQC,YACR,AAAQC,oBACR;EAFQ;EACA;8BAL8B;OAChC,UAAgD;;CAOxD,WAAkC;AAChC,SAAO,IAAI,sBAAsB,KAAK,YAAY,KAAK,mBAAmB;;CAG5E,KACE,aACA,YAC8B;AAC9B,SAAO,KAAK,YAAY,CAAC,KAAK,aAAa,WAAW;;CAGxD,MACE,YACyC;AACzC,SAAO,KAAK,YAAY,CAAC,MAAM,WAAW;;CAG5C,QAAQ,WAAgE;AACtE,SAAO,KAAK,YAAY,CAAC,QAAQ,UAAU;;CAG7C,AAAQ,aAA4C;AAClD,MAAI,CAAC,KAAK,QACR,MAAK,UAAU,KAAK,SAAS;AAE/B,SAAO,KAAK;;CAGd,MAAc,UAAyC;;AACrD,MAAI;AAGF,UAAO;IACL,MAAM,OAHO,MAAMC,MAAK,YAAY,EAGjB,MAAM;IACzB,OAAO;IACR;WACM,OAAO;AACd,OAAIA,MAAK,mBACP,OAAM;AAGR,OAAI,eAAe,MAAM,CACvB,QAAO;IAAE,MAAM;IAAM;IAAO;AAG9B,SAAM;;;;;;;ACxCZ,MAAM,yBAAyB;CAC7B,OAAO;CACP,QAAQ;CACR,QAAQ;EACN,QAAQ;EACR,OAAO;EACR;CACF;AAED,MAAMC,uBAAoC;CACxC,cAAc;CACd,aAAa;CACb,QAAQ;CACT;AAcD,IAAqB,iBAArB,cAA4C,cAA4B;CAGtE,YACE,KACA,UAAqC,EAAE,EACvC,UACA,SACA;AACA,QAAM,KAAK,SAASC,SAAO,UAAU;AACrC,OAAK,WAAW;;;;;;;;;CAUlB,MAAc,eACZ,QACA,MACA,UACA,aAUA;;AACA,SAAOC,MAAK,gBAAgB,YAAY;GACtC,IAAI;GACJ,MAAM,4CAAe,uBAAyB;GAC9C,IAAIC,4CACCD,MAAK,UACJ,WAAW,UAAU,EAAE,YAAY,OAAO,QAAQ,OAAkB,EAAE;GAG5E,MAAM,WAAW,QAAQ;AAEzB,OAAI,OAAO,SAAS,eAAe,oBAAoB,MAAM;AAC3D,WAAO,IAAI,UAAU;AACrB,SAAK,OAAO,gBAAgB,QAAQ,aAAuB;AAC3D,QAAI,SACF,MAAK,OAAO,YAAYA,MAAK,eAAe,SAAS,CAAC;AAExD,SAAK,OAAO,IAAI,SAAS;cAChB,OAAO,aAAa,eAAe,oBAAoB,UAAU;AAC1E,WAAO;AAEP,QAAI,CAAC,KAAK,IAAI,eAAe,CAC3B,MAAK,OAAO,gBAAgB,QAAQ,aAAuB;AAE7D,QAAI,YAAY,CAAC,KAAK,IAAI,WAAW,CACnC,MAAK,OAAO,YAAYA,MAAK,eAAe,SAAS,CAAC;UAEnD;AACL,WAAO;AACP,YAAQ,mBAAmB,WAAW,QAAQ;AAC9C,YAAQ,kBAAkB,QAAQ;AAElC,QAAI,SACF,SAAQ,gBAAgBA,MAAK,SAASA,MAAK,eAAe,SAAS,CAAC;AAStE,SAHG,OAAO,mBAAmB,eAAe,gBAAgB,kBACzD,QAAQ,OAAO,SAAS,YAAY,UAAU,QAAQ,OAAO,KAAK,SAAS,eAE9D,CAAC,QAAQ,OACvB,SAAQ,SAAS;;AAIrB,iEAAI,YAAa,QACf,6CAAe,UAAY,YAAY;GAGzC,MAAM,YAAYA,MAAK,oBAAoB,KAAK;GAChD,MAAM,QAAQA,MAAK,cAAc,UAAU;GAC3C,MAAM,OAAO,OAAO,UAAU,QAAQ,MAAM,MAC1CA,MAAK,OACL,GAAGA,MAAK,IAAI,UAAU,SACtB,uBACE,8DAAa,QAAS,UAAS,EAAE,QAAQ,QAAQ,QAAQ,GAAG,EAAE,EACjE;AAED,UAAO;IAAE,MAAM;IAAW,IAAI,KAAK;IAAI,UAAU,KAAK;IAAK;IAC3D;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA+CJ,MAAM,OACJ,MACA,UACA,aAUA;AACA,cAAY,eAAe,QAAQ,MAAM,UAAU,YAAY;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAkCjE,MAAM,kBACJ,MACA,OACA,UACA,aACA;;EACA,MAAM,YAAYA,OAAK,oBAAoB,KAAK;EAChD,MAAM,QAAQA,OAAK,cAAc,UAAU;EAE3C,MAAM,MAAM,IAAI,IAAIA,OAAK,MAAM,uBAAuB,QAAQ;AAC9D,MAAI,aAAa,IAAI,SAAS,MAAM;AAEpC,SAAOA,OAAK,gBAAgB,YAAY;GACtC,IAAI;GACJ,MAAM,2BAAY,QAAQ,qBAAqB,UAAW;GAC1D,MAAMC,4CACDD,OAAK,UACL,EAAE,YAAY,OAAO,QAAQ,OAAkB,EAAE;AAGtD,OAAI,OAAO,SAAS,eAAe,oBAAoB,MAAM;AAC3D,WAAO,IAAI,UAAU;AACrB,SAAK,OAAO,gBAAgB,QAAQ,aAAuB;AAC3D,SAAK,OAAO,IAAI,SAAS;cAChB,OAAO,aAAa,eAAe,oBAAoB,UAAU;AAC1E,WAAO;AACP,SAAK,OAAO,gBAAgB,QAAQ,aAAuB;UACtD;AACL,WAAO;AACP,YAAQ,mBAAmB,WAAW,QAAQ;AAC9C,YAAQ,kBAAkB,QAAQ;;AAKpC,UAAO;IAAE,MAAM;IAAW,WAFb,MAAM,IAAIA,OAAK,OAAO,IAAI,UAAU,EAAE,MAAgB,EAAE,SAAS,CAAC,EAEtC;IAAK;IAC9C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAiCJ,MAAM,sBACJ,MACA,SAUA;;AACA,SAAOA,OAAK,gBAAgB,YAAY;GACtC,IAAI,QAAQA,OAAK,cAAc,KAAK;GAEpC,MAAM,6BAAeA,OAAK;AAE1B,yDAAI,QAAS,OACX,SAAQ,cAAc;GAGxB,MAAM,OAAO,MAAM,KACjBA,OAAK,OACL,GAAGA,OAAK,IAAI,sBAAsB,SAClC,EAAE,EACF,EAAE,SAAS,CACZ;GAED,MAAM,MAAM,IAAI,IAAIA,OAAK,MAAM,KAAK,IAAI;GAExC,MAAM,QAAQ,IAAI,aAAa,IAAI,QAAQ;AAE3C,OAAI,CAAC,MACH,OAAM,IAAI,aAAa,2BAA2B;AAGpD,UAAO;IAAE,WAAW,IAAI,UAAU;IAAE;IAAM;IAAO;IACjD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA+CJ,MAAM,OACJ,MACA,UAWA,aAUA;AACA,cAAY,eAAe,OAAO,MAAM,UAAU,YAAY;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA8BhE,MAAM,KACJ,UACA,QACA,SAUA;;AACA,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,KACXA,OAAK,OACL,GAAGA,OAAK,IAAI,eACZ;IACE,UAAUA,OAAK;IACf,WAAW;IACX,gBAAgB;IAChB,qEAAmB,QAAS;IAC7B,EACD,EAAE,SAASA,OAAK,SAAS,CAC1B;IACD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA8BJ,MAAM,KACJ,UACA,QACA,SAUA;;AACA,SAAOA,OAAK,gBAAgB,YAAY;AAYtC,UAAO,EAAE,OAXI,MAAM,KACjBA,OAAK,OACL,GAAGA,OAAK,IAAI,eACZ;IACE,UAAUA,OAAK;IACf,WAAW;IACX,gBAAgB;IAChB,qEAAmB,QAAS;IAC7B,EACD,EAAE,SAASA,OAAK,SAAS,CAC1B,EACmB,KAAK;IACzB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAsDJ,MAAM,gBACJ,MACA,WACA,SAUA;;AACA,SAAOA,OAAK,gBAAgB,YAAY;GACtC,IAAI,QAAQA,OAAK,cAAc,KAAK;GAEpC,IAAI,OAAO,MAAM,KACfA,OAAK,OACL,GAAGA,OAAK,IAAI,eAAe,0BACzB,gEAAe,QAAS,aAAY,EAAE,WAAW,QAAQ,WAAW,GAAG,EAAE,GAC3E,EAAE,SAASA,OAAK,SAAS,CAC1B;GACD,MAAM,wEAAqB,QAAS,YAChC,aAAa,QAAQ,aAAa,OAAO,KAAK,QAAQ,aACtD;AAEJ,UAAO,EAAE,WADS,UAAU,GAAGA,OAAK,MAAM,KAAK,YAAY,qBAAqB,EAC5D;IACpB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAyCJ,MAAM,iBACJ,OACA,WACA,SAUA;;AACA,SAAOA,OAAK,gBAAgB,YAAY;GACtC,MAAM,OAAO,MAAM,KACjBA,OAAK,OACL,GAAGA,OAAK,IAAI,eAAeA,OAAK,YAChC;IAAE;IAAW;IAAO,EACpB,EAAE,SAASA,OAAK,SAAS,CAC1B;GAED,MAAM,wEAAqB,QAAS,YAChC,aAAa,QAAQ,aAAa,OAAO,KAAK,QAAQ,aACtD;AACJ,UAAO,KAAK,KAAK,4CACZ,cACH,WAAW,MAAM,YACb,UAAU,GAAGA,OAAK,MAAM,MAAM,YAAY,qBAAqB,GAC/D,QACH;IACH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAyCJ,SACE,MACA,SACqB;EAErB,MAAM,aADsB,0DAAO,QAAS,eAAc,cACjB,+BAA+B;EACxE,MAAM,sBAAsB,KAAK,8EAA2B,QAAS,cAAa,EAAE,CAAC;EACrF,MAAM,cAAc,sBAAsB,IAAI,wBAAwB;EACtE,MAAM,QAAQ,KAAK,cAAc,KAAK;EACtC,MAAM,mBACJ,IAAI,KAAK,OAAO,GAAG,KAAK,IAAI,GAAG,WAAW,GAAG,QAAQ,eAAe;GAClE,SAAS,KAAK;GACd,eAAe;GAChB,CAAC;AACJ,SAAO,IAAI,oBAAoB,YAAY,KAAK,mBAAmB;;;;;;;;;;;;;;;;;CAkBrE,MAAM,KAAK,MAST;;EACA,MAAM,QAAQA,QAAK,cAAc,KAAK;AAEtC,SAAOA,QAAK,gBAAgB,YAAY;AAKtC,UAAO,iBAJM,MAAM,IAAIA,QAAK,OAAO,GAAGA,QAAK,IAAI,eAAe,SAAS,EACrE,SAASA,QAAK,SACf,CAAC,CAE2B;IAC7B;;;;;;;;;;;;;;;;;CAkBJ,MAAM,OAAO,MASX;;EACA,MAAM,QAAQA,QAAK,cAAc,KAAK;AAEtC,MAAI;AACF,SAAM,KAAKA,QAAK,OAAO,GAAGA,QAAK,IAAI,UAAU,SAAS,EACpD,SAASA,QAAK,SACf,CAAC;AAEF,UAAO;IAAE,MAAM;IAAM,OAAO;IAAM;WAC3B,OAAO;AACd,OAAIA,QAAK,mBACP,OAAM;AAER,OAAI,eAAe,MAAM,IAAI,iBAAiB,qBAAqB;IACjE,MAAM,gBAAgB,MAAM;AAE5B,QAAI,CAAC,KAAK,IAAI,CAAC,uEAAS,cAAe,OAAO,CAC5C,QAAO;KAAE,MAAM;KAAO;KAAO;;AAIjC,SAAM;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAsDV,aACE,MACA,SACiC;EACjC,MAAM,QAAQ,KAAK,cAAc,KAAK;EACtC,MAAME,eAAyB,EAAE;EAEjC,MAAM,wEAAqB,QAAS,YAChC,YAAY,QAAQ,aAAa,OAAO,KAAK,QAAQ,aACrD;AAEJ,MAAI,uBAAuB,GACzB,cAAa,KAAK,mBAAmB;EAIvC,MAAM,aADsB,0DAAO,QAAS,eAAc,cACjB,iBAAiB;EAC1D,MAAM,sBAAsB,KAAK,8EAA2B,QAAS,cAAa,EAAE,CAAC;AAErF,MAAI,wBAAwB,GAC1B,cAAa,KAAK,oBAAoB;EAGxC,IAAI,cAAc,aAAa,KAAK,IAAI;AACxC,MAAI,gBAAgB,GAClB,eAAc,IAAI;AAGpB,SAAO,EACL,MAAM,EAAE,WAAW,UAAU,GAAG,KAAK,IAAI,GAAG,WAAW,UAAU,QAAQ,cAAc,EAAE,EAC1F;;;;;;;;;;;;;;;;;;;;;;;;;CA0BH,MAAM,OAAO,OASX;;AACA,SAAOF,QAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,OACXA,QAAK,OACL,GAAGA,QAAK,IAAI,UAAUA,QAAK,YAC3B,EAAE,UAAU,OAAO,EACnB,EAAE,SAASA,QAAK,SAAS,CAC1B;IACD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA6HJ,MAAM,KACJ,MACA,SACA,YAUA;;AACA,SAAOA,QAAK,gBAAgB,YAAY;GACtC,MAAM,wDAAY,yBAA2B,gBAAS,QAAQ,QAAQ;AACtE,UAAO,MAAM,KACXA,QAAK,OACL,GAAGA,QAAK,IAAI,eAAeA,QAAK,YAChC,MACA,EAAE,SAASA,QAAK,SAAS,EACzB,WACD;IACD;;;;;;;;;CAUJ,MAAM,OACJ,SACA,YAUA;;AACA,SAAOA,QAAK,gBAAgB,YAAY;GACtC,MAAM,0BAAY;AAClB,UAAO,MAAM,KACXA,QAAK,OACL,GAAGA,QAAK,IAAI,kBAAkBA,QAAK,YACnC,MACA,EAAE,SAASA,QAAK,SAAS,EACzB,WACD;IACD;;CAGJ,AAAU,eAAe,UAA+B;AACtD,SAAO,KAAK,UAAU,SAAS;;CAGjC,SAAS,MAAc;AACrB,MAAI,OAAO,WAAW,YACpB,QAAO,OAAO,KAAK,KAAK,CAAC,SAAS,SAAS;AAE7C,SAAO,KAAK,KAAK;;CAGnB,AAAQ,cAAc,MAAc;AAClC,SAAO,GAAG,KAAK,SAAS,GAAG,KAAK,QAAQ,QAAQ,GAAG;;CAGrD,AAAQ,oBAAoB,MAAc;AACxC,SAAO,KAAK,QAAQ,YAAY,GAAG,CAAC,QAAQ,QAAQ,IAAI;;CAG1D,AAAQ,2BAA2B,WAA6B;EAC9D,MAAMG,SAAmB,EAAE;AAC3B,MAAI,UAAU,MACZ,QAAO,KAAK,SAAS,UAAU,QAAQ;AAGzC,MAAI,UAAU,OACZ,QAAO,KAAK,UAAU,UAAU,SAAS;AAG3C,MAAI,UAAU,OACZ,QAAO,KAAK,UAAU,UAAU,SAAS;AAG3C,MAAI,UAAU,OACZ,QAAO,KAAK,UAAU,UAAU,SAAS;AAG3C,MAAI,UAAU,QACZ,QAAO,KAAK,WAAW,UAAU,UAAU;AAG7C,SAAO,OAAO,KAAK,IAAI;;;;;;AC5oC3B,MAAa,UAAU;;;;ACLvB,MAAa,kBAAkB,EAC7B,iBAAiB,cAAc,WAChC;;;;ACID,IAAqB,mBAArB,cAA8C,cAA4B;CACxE,YACE,KACA,UAAqC,EAAE,EACvC,SACA,MACA;EACA,MAAM,UAAU,IAAI,IAAI,IAAI;AAI5B,kDAAI,KAAM,gBAER;OADuB,yBAAyB,KAAK,QAAQ,SAAS,IAChD,CAAC,QAAQ,SAAS,SAAS,oBAAoB,CACnE,SAAQ,WAAW,QAAQ,SAAS,QAAQ,aAAa,oBAAoB;;EAIjF,MAAM,WAAW,QAAQ,KAAK,QAAQ,OAAO,GAAG;EAChD,MAAM,iDAAoB,kBAAoB;AAE9C,QAAM,UAAU,cAAcC,SAAO,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAmCjD,MAAM,YAAY,SAShB;;AACA,SAAOC,MAAK,gBAAgB,YAAY;GACtC,MAAM,cAAcA,MAAK,+BAA+B,QAAQ;AAChE,UAAO,MAAM,IAAIA,MAAK,OAAO,GAAGA,MAAK,IAAI,SAAS,eAAe,EAC/D,SAASA,MAAK,SACf,CAAC;IACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAoCJ,MAAM,UAAU,IASd;;AACA,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,IAAIA,OAAK,OAAO,GAAGA,OAAK,IAAI,UAAU,MAAM,EAAE,SAASA,OAAK,SAAS,CAAC;IACnF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAwCJ,MAAM,aACJ,IACA,UAKI,EACF,QAAQ,OACT,EAUD;;AACA,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,KACXA,OAAK,OACL,GAAGA,OAAK,IAAI,UACZ;IACE;IACA,MAAM;IACN,MAAM,QAAQ;IACd,QAAQ,QAAQ;IAChB,iBAAiB,QAAQ;IACzB,oBAAoB,QAAQ;IAC7B,EACD,EAAE,SAASA,OAAK,SAAS,CAC1B;IACD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAsCJ,MAAM,aACJ,IACA,SAcA;;AACA,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,IACXA,OAAK,OACL,GAAGA,OAAK,IAAI,UAAU,MACtB;IACE;IACA,MAAM;IACN,QAAQ,QAAQ;IAChB,iBAAiB,QAAQ;IACzB,oBAAoB,QAAQ;IAC7B,EACD,EAAE,SAASA,OAAK,SAAS,CAC1B;IACD;;;;;;;;;;;;;;;;;;;;;;;;;;CA2BJ,MAAM,YAAY,IAShB;;AACA,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,KAAKA,OAAK,OAAO,GAAGA,OAAK,IAAI,UAAU,GAAG,SAAS,EAAE,EAAE,EAAE,SAASA,OAAK,SAAS,CAAC;IAC9F;;;;;;;;;;;;;;;;;;;;;;;;;;;CA4BJ,MAAM,aAAa,IASjB;;AACA,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,OAAOA,OAAK,OAAO,GAAGA,OAAK,IAAI,UAAU,MAAM,EAAE,EAAE,EAAE,SAASA,OAAK,SAAS,CAAC;IAC1F;;CAGJ,AAAQ,+BAA+B,SAAqC;EAC1E,MAAMC,SAAiC,EAAE;AACzC,MAAI,SAAS;AACX,OAAI,WAAW,QACb,QAAO,QAAQ,OAAO,QAAQ,MAAM;AAEtC,OAAI,YAAY,QACd,QAAO,SAAS,OAAO,QAAQ,OAAO;AAExC,OAAI,QAAQ,OACV,QAAO,SAAS,QAAQ;AAE1B,OAAI,QAAQ,WACV,QAAO,aAAa,QAAQ;AAE9B,OAAI,QAAQ,UACV,QAAO,YAAY,QAAQ;;AAG/B,SAAO,OAAO,KAAK,OAAO,CAAC,SAAS,IAAI,MAAM,IAAI,gBAAgB,OAAO,CAAC,UAAU,GAAG;;;;;;;;;;AC7V3F,IAAqB,yBAArB,cAAoD,cAA4B;;;;;;;;;;;;;;;;;;CAkB9E,YAAY,KAAa,UAAqC,EAAE,EAAE,SAAe;EAC/E,MAAM,WAAW,IAAI,QAAQ,OAAO,GAAG;EACvC,MAAM,iDAAoB,kBAAoB;AAC9C,QAAM,UAAU,cAAcC,SAAO,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAqCjD,MAAM,aAAa,MASjB;;AACA,SAAOC,MAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,KAAKA,MAAK,OAAO,GAAGA,MAAK,IAAI,UAAU,EAAE,MAAM,EAAE,EAAE,SAASA,MAAK,SAAS,CAAC;IACxF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAiDJ,MAAM,YAAY,SAehB;;AACA,SAAOA,OAAK,gBAAgB,YAAY;GAEtC,MAAM,cAAc,IAAI,iBAAiB;AACzC,0DAAI,QAAS,WAAU,OAAW,aAAY,IAAI,SAAS,QAAQ,MAAM,UAAU,CAAC;AACpF,0DAAI,QAAS,YAAW,OAAW,aAAY,IAAI,UAAU,QAAQ,OAAO,UAAU,CAAC;AACvF,yDAAI,QAAS,WAAY,aAAY,IAAI,cAAc,QAAQ,WAAW;AAC1E,yDAAI,QAAS,UAAW,aAAY,IAAI,aAAa,QAAQ,UAAU;AACvE,yDAAI,QAAS,OAAQ,aAAY,IAAI,UAAU,QAAQ,OAAO;GAE9D,MAAM,cAAc,YAAY,UAAU;GAC1C,MAAM,MAAM,cAAc,GAAGA,OAAK,IAAI,UAAU,gBAAgB,GAAGA,OAAK,IAAI;AAE5E,UAAO,MAAM,IAAIA,OAAK,OAAO,KAAK,EAAE,SAASA,OAAK,SAAS,CAAC;IAC5D;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAkCJ,MAAM,aAAa,YASjB;;AACA,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,OACXA,OAAK,OACL,GAAGA,OAAK,IAAI,UAAU,cACtB,EAAE,EACF,EAAE,SAASA,OAAK,SAAS,CAC1B;IACD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA8HJ,KAAK,YAA+C;;AAElD,MAAI,CAAC,kBAAkB,WAAW,CAChC,OAAM,IAAI,aACR,qJAED;EAOH,MAAM,UAAU,IAAIC,8BAAmB;GACrC,SAAS,KAAK;GACd,aAAa;GACb,MAAM;IACJ,MAAM;IACN,YAAY,YAAYD,OAAK;IAC9B;GACD,OAAO,KAAK;GACb,CAAC;EAEF,MAAM,qBAAqB,KAAK;AAuBhC,SArBuB,IAAI,MAAM,SAAS,EACxC,IAAI,QAAQ,MAAgC;GAC1C,MAAM,QAAQ,OAAO;AACrB,OAAI,OAAO,UAAU,WACnB,QAAO;AAGT,UAAO,OAAO,GAAG,SAAoB;AACnC,QAAI;AAEF,YAAO;MAAE,MADI,MAAO,MAAmB,MAAM,QAAQ,KAAK;MAC3C,OAAO;MAAM;aACrB,OAAO;AACd,SAAI,mBACF,OAAM;AAER,YAAO;MAAE,MAAM;MAAa;MAAuB;;;KAI1D,CAAC;;;;;;;;;;;AClWN,IAAqB,iBAArB,cAA4C,cAA4B;;CAEtE,YAAY,KAAa,UAAqC,EAAE,EAAE,SAAe;EAC/E,MAAM,WAAW,IAAI,QAAQ,OAAO,GAAG;EACvC,MAAM,iDAAoB,wBAAiB,gBAAgB,sBAAuB;AAClF,QAAM,UAAU,cAAcE,SAAO,UAAU;;;CAIjD,MAAM,YAAY,SAA8D;;AAC9E,SAAOC,MAAK,gBAAgB,YAAY;AAItC,UAHa,MAAM,WAAW,KAAKA,MAAK,OAAO,GAAGA,MAAK,IAAI,eAAe,SAAS,EACjF,SAASA,MAAK,SACf,CAAC,IACa,EAAE;IACjB;;;CAIJ,MAAM,SACJ,kBACA,WAC8C;;AAC9C,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,WAAW,KACtBA,OAAK,OACL,GAAGA,OAAK,IAAI,YACZ;IAAE;IAAkB;IAAW,EAC/B,EAAE,SAASA,OAAK,SAAS,CAC1B;IACD;;;CAIJ,MAAM,YAAY,SAAwE;;AACxF,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,WAAW,KAAKA,OAAK,OAAO,GAAGA,OAAK,IAAI,eAAe,SAAS,EAC3E,SAASA,OAAK,SACf,CAAC;IACF;;;CAIJ,MAAM,YAAY,kBAA0B,WAAoD;;AAC9F,SAAOA,OAAK,gBAAgB,YAAY;AAOtC,UANa,MAAM,WAAW,KAC5BA,OAAK,OACL,GAAGA,OAAK,IAAI,eACZ;IAAE;IAAkB;IAAW,EAC/B,EAAE,SAASA,OAAK,SAAS,CAC1B,IACc,EAAE;IACjB;;;;;;;;;;;AClEN,IAAqB,gBAArB,cAA2C,cAA4B;;CAErE,YAAY,KAAa,UAAqC,EAAE,EAAE,SAAe;EAC/E,MAAM,WAAW,IAAI,QAAQ,OAAO,GAAG;EACvC,MAAM,iDAAoB,wBAAiB,gBAAgB,sBAAuB;AAClF,QAAM,UAAU,cAAcC,SAAO,UAAU;;;CAIjD,MAAM,WAAW,SAA6D;;AAE5E,MAAI,QAAQ,QAAQ,SAAS,KAAK,QAAQ,QAAQ,SAAS,IACzD,OAAM,IAAI,MAAM,oDAAoD;AAGtE,SAAOC,MAAK,gBAAgB,YAAY;AAItC,UAHa,MAAM,WAAW,KAAKA,MAAK,OAAO,GAAGA,MAAK,IAAI,cAAc,SAAS,EAChF,SAASA,MAAK,SACf,CAAC,IACa,EAAE;IACjB;;;CAIJ,MAAM,WAAW,SAAsE;;AACrF,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,WAAW,KAAKA,OAAK,OAAO,GAAGA,OAAK,IAAI,cAAc,SAAS,EAC1E,SAASA,OAAK,SACf,CAAC;IACF;;;CAIJ,MAAM,YAAY,SAAwE;;AAExF,MAAI,QAAQ,iBAAiB,QAAW;AACtC,OAAI,QAAQ,eAAe,KAAK,QAAQ,eAAe,GACrD,OAAM,IAAI,MAAM,wCAAwC;AAE1D,OAAI,QAAQ,iBAAiB,QAC3B;QAAI,QAAQ,eAAe,KAAK,QAAQ,gBAAgB,QAAQ,aAC9D,OAAM,IAAI,MAAM,sCAAsC,QAAQ,eAAe,IAAI;;;AAKvF,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,WAAW,KAAKA,OAAK,OAAO,GAAGA,OAAK,IAAI,eAAe,SAAS,EAC3E,SAASA,OAAK,SACf,CAAC;IACF;;;CAIJ,MAAM,aAAa,SAA0E;;AAC3F,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,WAAW,KAAKA,OAAK,OAAO,GAAGA,OAAK,IAAI,gBAAgB,SAAS,EAC5E,SAASA,OAAK,SACf,CAAC;IACF;;;CAIJ,MAAM,cAAc,SAAgE;;AAElF,MAAI,QAAQ,KAAK,SAAS,KAAK,QAAQ,KAAK,SAAS,IACnD,OAAM,IAAI,MAAM,kDAAkD;AAGpE,SAAOA,OAAK,gBAAgB,YAAY;AAItC,UAHa,MAAM,WAAW,KAAKA,OAAK,OAAO,GAAGA,OAAK,IAAI,iBAAiB,SAAS,EACnF,SAASA,OAAK,SACf,CAAC,IACa,EAAE;IACjB;;;;;;;;;;;AC/EN,IAAqB,kBAArB,cAA6C,cAA4B;;CAEvE,YAAY,KAAa,UAAqC,EAAE,EAAE,SAAe;EAC/E,MAAM,WAAW,IAAI,QAAQ,OAAO,GAAG;EACvC,MAAM,iDAAoB,wBAAiB,gBAAgB,sBAAuB;AAClF,QAAM,UAAU,cAAcC,SAAO,UAAU;;;CAIjD,MAAM,aAAa,kBAA2D;;AAC5E,SAAOC,MAAK,gBAAgB,YAAY;AAOtC,UANa,MAAM,WAAW,KAC5BA,MAAK,OACL,GAAGA,MAAK,IAAI,sBACZ,EAAE,kBAAkB,EACpB,EAAE,SAASA,MAAK,SAAS,CAC1B,IACc,EAAE;IACjB;;;CAIJ,MAAM,UAAU,kBAAgF;;AAC9F,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,WAAW,KACtBA,OAAK,OACL,GAAGA,OAAK,IAAI,mBACZ,EAAE,kBAAkB,EACpB,EAAE,SAASA,OAAK,SAAS,CAC1B;IACD;;;CAIJ,MAAM,YACJ,UAAoC,EAAE,EACW;;AACjD,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,WAAW,KAAKA,OAAK,OAAO,GAAGA,OAAK,IAAI,qBAAqB,SAAS,EACjF,SAASA,OAAK,SACf,CAAC;IACF;;;CAIJ,MAAM,aAAa,kBAA2D;;AAC5E,SAAOA,OAAK,gBAAgB,YAAY;AAOtC,UANa,MAAM,WAAW,KAC5BA,OAAK,OACL,GAAGA,OAAK,IAAI,sBACZ,EAAE,kBAAkB,EACpB,EAAE,SAASA,OAAK,SAAS,CAC1B,IACc,EAAE;IACjB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACSN,IAAa,uBAAb,cAA0C,gBAAgB;;;;;;;;;;;;;;;;;;CAkBxD,YAAY,KAAa,UAAuC,EAAE,EAAE;AAClE,QAAM,KAAK,QAAQ,WAAW,EAAE,EAAE,QAAQ,MAAM;;;;;;;;;;;;;;;;;;;;CAqBlD,KAAK,kBAA6C;AAChD,SAAO,IAAI,kBAAkB,KAAK,KAAK,KAAK,SAAS,kBAAkB,KAAK,MAAM;;;;;;;;;;;;;;;;;;;;;;;CAwBpF,MAAM,aAAa,kBAA2D;yCACrE,MAAM;AAAb,kDAA0B;;;;;;;;;;;;;;;;;;;;;;;;CAyB5B,MAAM,UAAU,kBAAgF;sCACvF,MAAM;AAAb,gDAAuB;;;;;;;;;;;;;;;;;;;;;;;;;;CA2BzB,MAAM,YACJ,UAAoC,EAAE,EACW;wCAC1C,MAAM;AAAb,kDAAyB;;;;;;;;;;;;;;;;;;;;;;;CAwB3B,MAAM,aAAa,kBAA2D;yCACrE,MAAM;AAAb,mDAA0B;;;;;;;;;;;;AAa9B,IAAa,oBAAb,cAAuC,eAAe;;;;;;;;;;;;;;CAgBpD,YACE,KACA,SACA,kBACA,SACA;AACA,QAAM,KAAK,SAASC,QAAM;AAC1B,OAAK,mBAAmB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA8B1B,MAAe,YAAY,SAAuD;wCACzE,MAAM;AAAb,oFACK,gBACH,kBAAkBC,OAAK;;;;;;;;;;;;;;;;;;;;;CAuB3B,MAAe,YAAY,UAAwD,EAAE,EAAE;wCAC9E,MAAM;AAAb,oFACK,gBACH,kBAAkBA,OAAK;;;;;;;;;;;;;;;;;;;;;;CAwB3B,MAAe,SAAS,WAAmB;qCAClC,MAAM;AAAb,+CAAsBA,OAAK,kBAAkB;;;;;;;;;;;;;;;;;;;;;CAsB/C,MAAe,YAAY,WAAmB;wCACrC,MAAM;AAAb,kDAAyBA,OAAK,kBAAkB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAkClD,MAAM,WAAqC;AACzC,SAAO,IAAI,iBACT,KAAK,KACL,KAAK,SACL,KAAK,kBACL,WACA,KAAK,MACN;;;;;;;;;;;;AAaL,IAAa,mBAAb,cAAsC,cAAc;;;;;;;;;;;;;;;CAkBlD,YACE,KACA,SACA,kBACA,WACA,SACA;AACA,QAAM,KAAK,SAASD,QAAM;AAC1B,OAAK,mBAAmB;AACxB,OAAK,YAAY;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA8BnB,MAAe,WAAW,SAAoE;uCACrF,MAAM;AAAb,mFACK;GACH,kBAAkBC,OAAK;GACvB,WAAWA,OAAK;;;;;;;;;;;;;;;;;;;;;;;;;CA0BpB,MAAe,WAAW,SAAoE;uCACrF,MAAM;AAAb,oFACK;GACH,kBAAkBA,QAAK;GACvB,WAAWA,QAAK;;;;;;;;;;;;;;;;;;;;;;;;;CA0BpB,MAAe,YACb,UAAsE,EAAE,EACxE;wCACO,MAAM;AAAb,qFACK;GACH,kBAAkBA,QAAK;GACvB,WAAWA,QAAK;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA6BpB,MAAe,aACb,SACA;yCACO,MAAM;AAAb,sFACK;GACH,kBAAkBA,QAAK;GACvB,WAAWA,QAAK;;;;;;;;;;;;;;;;;;;;;;;;CAyBpB,MAAe,cACb,SACA;0CACO,MAAM;AAAb,uFACK;GACH,kBAAkBA,QAAK;GACvB,WAAWA,QAAK;;;;;;;AC1lBtB,IAAa,gBAAb,cAAmC,iBAAiB;;;;;;;;;;;;;;;CAelD,YACE,KACA,UAAqC,EAAE,EACvC,SACA,MACA;AACA,QAAM,KAAK,SAASC,SAAO,KAAK;;;;;;;;;;;;;CAclC,KAAK,IAA4B;AAC/B,SAAO,IAAI,eAAe,KAAK,KAAK,KAAK,SAAS,IAAI,KAAK,MAAM;;;;;;;;;;;;;CAcnE,IAAI,UAAgC;AAClC,SAAO,IAAI,qBAAqB,KAAK,MAAM,WAAW;GACpD,SAAS,KAAK;GACd,OAAO,KAAK;GACb,CAAC;;;;;;;;;;;;;CAcJ,IAAI,YAAoC;AACtC,SAAO,IAAI,uBAAuB,KAAK,MAAM,YAAY,KAAK,SAAS,KAAK,MAAM"}
|
|
1
|
+
{"version":3,"file":"index.cjs","names":["result: Record<string, any>","params: { [k: string]: any }","fetch","this","downloadFn: () => Promise<Response>","shouldThrowOnError: boolean","this","downloadFn: () => Promise<Response>","shouldThrowOnError: boolean","this","DEFAULT_FILE_OPTIONS: FileOptions","fetch","this","headers: Record<string, string>","_queryString: string[]","params: string[]","fetch","this","params: Record<string, string>","fetch","this","IcebergRestCatalog","fetch","this","fetch","this","fetch","this","fetch","this","fetch"],"sources":["../src/lib/common/errors.ts","../src/lib/common/helpers.ts","../src/lib/common/fetch.ts","../src/lib/common/BaseApiClient.ts","../src/packages/StreamDownloadBuilder.ts","../src/packages/BlobDownloadBuilder.ts","../src/packages/StorageFileApi.ts","../src/lib/version.ts","../src/lib/constants.ts","../src/packages/StorageBucketApi.ts","../src/packages/StorageAnalyticsClient.ts","../src/packages/VectorIndexApi.ts","../src/packages/VectorDataApi.ts","../src/packages/VectorBucketApi.ts","../src/packages/StorageVectorsClient.ts","../src/StorageClient.ts"],"sourcesContent":["/**\n * Namespace type for error classes\n * Determines the error class names and type guards\n */\nexport type ErrorNamespace = 'storage' | 'vectors'\n\n/**\n * Base error class for all Storage errors\n * Supports both 'storage' and 'vectors' namespaces\n */\nexport class StorageError extends Error {\n protected __isStorageError = true\n protected namespace: ErrorNamespace\n status?: number\n statusCode?: string\n\n constructor(\n message: string,\n namespace: ErrorNamespace = 'storage',\n status?: number,\n statusCode?: string\n ) {\n super(message)\n this.namespace = namespace\n this.name = namespace === 'vectors' ? 'StorageVectorsError' : 'StorageError'\n this.status = status\n this.statusCode = statusCode\n }\n}\n\n/**\n * Type guard to check if an error is a StorageError\n * @param error - The error to check\n * @returns True if the error is a StorageError\n */\nexport function isStorageError(error: unknown): error is StorageError {\n return typeof error === 'object' && error !== null && '__isStorageError' in error\n}\n\n/**\n * API error returned from Storage service\n * Includes HTTP status code and service-specific error code\n */\nexport class StorageApiError extends StorageError {\n override status: number\n override statusCode: string\n\n constructor(\n message: string,\n status: number,\n statusCode: string,\n namespace: ErrorNamespace = 'storage'\n ) {\n super(message, namespace, status, statusCode)\n this.name = namespace === 'vectors' ? 'StorageVectorsApiError' : 'StorageApiError'\n this.status = status\n this.statusCode = statusCode\n }\n\n toJSON() {\n return {\n name: this.name,\n message: this.message,\n status: this.status,\n statusCode: this.statusCode,\n }\n }\n}\n\n/**\n * Unknown error that doesn't match expected error patterns\n * Wraps the original error for debugging\n */\nexport class StorageUnknownError extends StorageError {\n originalError: unknown\n\n constructor(message: string, originalError: unknown, namespace: ErrorNamespace = 'storage') {\n super(message, namespace)\n this.name = namespace === 'vectors' ? 'StorageVectorsUnknownError' : 'StorageUnknownError'\n this.originalError = originalError\n }\n}\n\n// ============================================================================\n// Backward Compatibility Exports for Vectors\n// ============================================================================\n\n/**\n * @deprecated Use StorageError with namespace='vectors' instead\n * Alias for backward compatibility with existing vector storage code\n */\nexport class StorageVectorsError extends StorageError {\n constructor(message: string) {\n super(message, 'vectors')\n }\n}\n\n/**\n * Type guard to check if an error is a StorageVectorsError\n * @param error - The error to check\n * @returns True if the error is a StorageVectorsError\n */\nexport function isStorageVectorsError(error: unknown): error is StorageVectorsError {\n return isStorageError(error) && (error as StorageError)['namespace'] === 'vectors'\n}\n\n/**\n * @deprecated Use StorageApiError with namespace='vectors' instead\n * Alias for backward compatibility with existing vector storage code\n */\nexport class StorageVectorsApiError extends StorageApiError {\n constructor(message: string, status: number, statusCode: string) {\n super(message, status, statusCode, 'vectors')\n }\n}\n\n/**\n * @deprecated Use StorageUnknownError with namespace='vectors' instead\n * Alias for backward compatibility with existing vector storage code\n */\nexport class StorageVectorsUnknownError extends StorageUnknownError {\n constructor(message: string, originalError: unknown) {\n super(message, originalError, 'vectors')\n }\n}\n\n/**\n * Error codes specific to S3 Vectors API\n * Maps AWS service errors to application-friendly error codes\n */\nexport enum StorageVectorsErrorCode {\n /** Internal server fault (HTTP 500) */\n InternalError = 'InternalError',\n /** Resource already exists / conflict (HTTP 409) */\n S3VectorConflictException = 'S3VectorConflictException',\n /** Resource not found (HTTP 404) */\n S3VectorNotFoundException = 'S3VectorNotFoundException',\n /** Delete bucket while not empty (HTTP 400) */\n S3VectorBucketNotEmpty = 'S3VectorBucketNotEmpty',\n /** Exceeds bucket quota/limit (HTTP 400) */\n S3VectorMaxBucketsExceeded = 'S3VectorMaxBucketsExceeded',\n /** Exceeds index quota/limit (HTTP 400) */\n S3VectorMaxIndexesExceeded = 'S3VectorMaxIndexesExceeded',\n}\n","type Fetch = typeof fetch\n\n/**\n * Resolves the fetch implementation to use\n * Uses custom fetch if provided, otherwise uses native fetch\n *\n * @param customFetch - Optional custom fetch implementation\n * @returns Resolved fetch function\n */\nexport const resolveFetch = (customFetch?: Fetch): Fetch => {\n if (customFetch) {\n return (...args) => customFetch(...args)\n }\n return (...args) => fetch(...args)\n}\n\n/**\n * Resolves the Response constructor to use\n * Returns native Response constructor\n *\n * @returns Response constructor\n */\nexport const resolveResponse = (): typeof Response => {\n return Response\n}\n\n/**\n * Determine if input is a plain object\n * An object is plain if it's created by either {}, new Object(), or Object.create(null)\n *\n * @param value - Value to check\n * @returns True if value is a plain object\n * @source https://github.com/sindresorhus/is-plain-obj\n */\nexport const isPlainObject = (value: object): boolean => {\n if (typeof value !== 'object' || value === null) {\n return false\n }\n\n const prototype = Object.getPrototypeOf(value)\n return (\n (prototype === null ||\n prototype === Object.prototype ||\n Object.getPrototypeOf(prototype) === null) &&\n !(Symbol.toStringTag in value) &&\n !(Symbol.iterator in value)\n )\n}\n\n/**\n * Recursively converts object keys from snake_case to camelCase\n * Used for normalizing API responses\n *\n * @param item - Object to convert\n * @returns Converted object with camelCase keys\n */\nexport const recursiveToCamel = (item: Record<string, any>): unknown => {\n if (Array.isArray(item)) {\n return item.map((el) => recursiveToCamel(el))\n } else if (typeof item === 'function' || item !== Object(item)) {\n return item\n }\n\n const result: Record<string, any> = {}\n Object.entries(item).forEach(([key, value]) => {\n const newKey = key.replace(/([-_][a-z])/gi, (c) => c.toUpperCase().replace(/[-_]/g, ''))\n result[newKey] = recursiveToCamel(value)\n })\n\n return result\n}\n\n/**\n * Validates if a given bucket name is valid according to Supabase Storage API rules\n * Mirrors backend validation from: storage/src/storage/limits.ts:isValidBucketName()\n *\n * Rules:\n * - Length: 1-100 characters\n * - Allowed characters: alphanumeric (a-z, A-Z, 0-9), underscore (_), and safe special characters\n * - Safe special characters: ! - . * ' ( ) space & $ @ = ; : + , ?\n * - Forbidden: path separators (/, \\), path traversal (..), leading/trailing whitespace\n *\n * AWS S3 Reference: https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html\n *\n * @param bucketName - The bucket name to validate\n * @returns true if valid, false otherwise\n */\nexport const isValidBucketName = (bucketName: string): boolean => {\n if (!bucketName || typeof bucketName !== 'string') {\n return false\n }\n\n // Check length constraints (1-100 characters)\n if (bucketName.length === 0 || bucketName.length > 100) {\n return false\n }\n\n // Check for leading/trailing whitespace\n if (bucketName.trim() !== bucketName) {\n return false\n }\n\n // Explicitly reject path separators (security)\n // Note: Consecutive periods (..) are allowed by backend - the AWS restriction\n // on relative paths applies to object keys, not bucket names\n if (bucketName.includes('/') || bucketName.includes('\\\\')) {\n return false\n }\n\n // Validate against allowed character set\n // Pattern matches backend regex: /^(\\w|!|-|\\.|\\*|'|\\(|\\)| |&|\\$|@|=|;|:|\\+|,|\\?)*$/\n // This explicitly excludes path separators (/, \\) and other problematic characters\n const bucketNameRegex = /^[\\w!.\\*'() &$@=;:+,?-]+$/\n return bucketNameRegex.test(bucketName)\n}\n\n/**\n * Normalizes a number array to float32 format\n * Ensures all vector values are valid 32-bit floats\n *\n * @param values - Array of numbers to normalize\n * @returns Normalized float32 array\n */\nexport const normalizeToFloat32 = (values: number[]): number[] => {\n // Use Float32Array to ensure proper precision\n return Array.from(new Float32Array(values))\n}\n\n/**\n * Validates vector dimensions match expected dimension\n * Throws error if dimensions don't match\n *\n * @param vector - Vector data to validate\n * @param expectedDimension - Expected vector dimension\n * @throws Error if dimensions don't match\n */\nexport const validateVectorDimension = (\n vector: { float32: number[] },\n expectedDimension?: number\n): void => {\n if (expectedDimension !== undefined && vector.float32.length !== expectedDimension) {\n throw new Error(\n `Vector dimension mismatch: expected ${expectedDimension}, got ${vector.float32.length}`\n )\n }\n}\n","import { StorageApiError, StorageUnknownError, ErrorNamespace } from './errors'\nimport { isPlainObject, resolveResponse } from './helpers'\nimport { FetchParameters } from '../types'\n\nexport type Fetch = typeof fetch\n\n/**\n * Options for fetch requests\n */\nexport interface FetchOptions {\n headers?: {\n [key: string]: string\n }\n duplex?: string\n noResolveJson?: boolean\n}\n\n/**\n * HTTP methods supported by the API\n */\nexport type RequestMethodType = 'GET' | 'POST' | 'PUT' | 'DELETE' | 'HEAD'\n\n/**\n * Extracts error message from various error response formats\n * @param err - Error object from API\n * @returns Human-readable error message\n */\nconst _getErrorMessage = (err: any): string =>\n err.msg ||\n err.message ||\n err.error_description ||\n (typeof err.error === 'string' ? err.error : err.error?.message) ||\n JSON.stringify(err)\n\n/**\n * Handles fetch errors and converts them to Storage error types\n * @param error - The error caught from fetch\n * @param reject - Promise rejection function\n * @param options - Fetch options that may affect error handling\n * @param namespace - Error namespace ('storage' or 'vectors')\n */\nconst handleError = async (\n error: unknown,\n reject: (reason?: any) => void,\n options: FetchOptions | undefined,\n namespace: ErrorNamespace\n) => {\n // Check if error is a Response-like object (has status and ok properties)\n // This is more reliable than instanceof which can fail across realms\n const isResponseLike =\n error &&\n typeof error === 'object' &&\n 'status' in error &&\n 'ok' in error &&\n typeof (error as any).status === 'number'\n\n if (isResponseLike && !options?.noResolveJson) {\n const responseError = error as any\n const status = responseError.status || 500\n\n // Try to parse JSON body if available\n if (typeof responseError.json === 'function') {\n responseError\n .json()\n .then((err: any) => {\n const statusCode = err?.statusCode || err?.code || status + ''\n reject(new StorageApiError(_getErrorMessage(err), status, statusCode, namespace))\n })\n .catch(() => {\n // If JSON parsing fails for vectors, create ApiError with HTTP status\n if (namespace === 'vectors') {\n const statusCode = status + ''\n const message = responseError.statusText || `HTTP ${status} error`\n reject(new StorageApiError(message, status, statusCode, namespace))\n } else {\n const statusCode = status + ''\n const message = responseError.statusText || `HTTP ${status} error`\n reject(new StorageApiError(message, status, statusCode, namespace))\n }\n })\n } else {\n // No json() method available, create error from status\n const statusCode = status + ''\n const message = responseError.statusText || `HTTP ${status} error`\n reject(new StorageApiError(message, status, statusCode, namespace))\n }\n } else {\n reject(new StorageUnknownError(_getErrorMessage(error), error, namespace))\n }\n}\n\n/**\n * Builds request parameters for fetch calls\n * @param method - HTTP method\n * @param options - Custom fetch options\n * @param parameters - Additional fetch parameters like AbortSignal\n * @param body - Request body (will be JSON stringified if plain object)\n * @returns Complete fetch request parameters\n */\nconst _getRequestParams = (\n method: RequestMethodType,\n options?: FetchOptions,\n parameters?: FetchParameters,\n body?: object\n) => {\n const params: { [k: string]: any } = { method, headers: options?.headers || {} }\n\n if (method === 'GET' || method === 'HEAD' || !body) {\n return { ...params, ...parameters }\n }\n\n if (isPlainObject(body)) {\n params.headers = { 'Content-Type': 'application/json', ...options?.headers }\n params.body = JSON.stringify(body)\n } else {\n params.body = body\n }\n\n if (options?.duplex) {\n params.duplex = options.duplex\n }\n\n return { ...params, ...parameters }\n}\n\n/**\n * Internal request handler that wraps fetch with error handling\n * @param fetcher - Fetch function to use\n * @param method - HTTP method\n * @param url - Request URL\n * @param options - Custom fetch options\n * @param parameters - Additional fetch parameters\n * @param body - Request body\n * @param namespace - Error namespace ('storage' or 'vectors')\n * @returns Promise with parsed response or error\n */\nasync function _handleRequest(\n fetcher: Fetch,\n method: RequestMethodType,\n url: string,\n options: FetchOptions | undefined,\n parameters: FetchParameters | undefined,\n body: object | undefined,\n namespace: ErrorNamespace\n): Promise<any> {\n return new Promise((resolve, reject) => {\n fetcher(url, _getRequestParams(method, options, parameters, body))\n .then((result) => {\n if (!result.ok) throw result\n if (options?.noResolveJson) return result\n\n // AWS S3 Vectors API returns 200 OK with content-length: 0 for successful mutations\n // (putVectors, deleteVectors) instead of 204 or JSON response. This is AWS's design choice\n // for performance optimization of bulk operations (up to 500 vectors per request).\n // We handle this to prevent \"Unexpected end of JSON input\" errors when calling result.json()\n if (namespace === 'vectors') {\n const contentType = result.headers.get('content-type')\n const contentLength = result.headers.get('content-length')\n\n // Return empty object for explicitly empty responses\n if (contentLength === '0' || result.status === 204) {\n return {}\n }\n\n // Return empty object if no JSON content type\n if (!contentType || !contentType.includes('application/json')) {\n return {}\n }\n }\n\n return result.json()\n })\n .then((data) => resolve(data))\n .catch((error) => handleError(error, reject, options, namespace))\n })\n}\n\n/**\n * Creates a fetch API with the specified namespace\n * @param namespace - Error namespace ('storage' or 'vectors')\n * @returns Object with HTTP method functions\n */\nexport function createFetchApi(namespace: ErrorNamespace = 'storage') {\n return {\n /**\n * Performs a GET request\n * @param fetcher - Fetch function to use\n * @param url - Request URL\n * @param options - Custom fetch options\n * @param parameters - Additional fetch parameters\n * @returns Promise with parsed response\n */\n get: async (\n fetcher: Fetch,\n url: string,\n options?: FetchOptions,\n parameters?: FetchParameters\n ): Promise<any> => {\n return _handleRequest(fetcher, 'GET', url, options, parameters, undefined, namespace)\n },\n\n /**\n * Performs a POST request\n * @param fetcher - Fetch function to use\n * @param url - Request URL\n * @param body - Request body to be JSON stringified\n * @param options - Custom fetch options\n * @param parameters - Additional fetch parameters\n * @returns Promise with parsed response\n */\n post: async (\n fetcher: Fetch,\n url: string,\n body: object,\n options?: FetchOptions,\n parameters?: FetchParameters\n ): Promise<any> => {\n return _handleRequest(fetcher, 'POST', url, options, parameters, body, namespace)\n },\n\n /**\n * Performs a PUT request\n * @param fetcher - Fetch function to use\n * @param url - Request URL\n * @param body - Request body to be JSON stringified\n * @param options - Custom fetch options\n * @param parameters - Additional fetch parameters\n * @returns Promise with parsed response\n */\n put: async (\n fetcher: Fetch,\n url: string,\n body: object,\n options?: FetchOptions,\n parameters?: FetchParameters\n ): Promise<any> => {\n return _handleRequest(fetcher, 'PUT', url, options, parameters, body, namespace)\n },\n\n /**\n * Performs a HEAD request\n * @param fetcher - Fetch function to use\n * @param url - Request URL\n * @param options - Custom fetch options\n * @param parameters - Additional fetch parameters\n * @returns Promise with Response object (not JSON parsed)\n */\n head: async (\n fetcher: Fetch,\n url: string,\n options?: FetchOptions,\n parameters?: FetchParameters\n ): Promise<any> => {\n return _handleRequest(\n fetcher,\n 'HEAD',\n url,\n {\n ...options,\n noResolveJson: true,\n },\n parameters,\n undefined,\n namespace\n )\n },\n\n /**\n * Performs a DELETE request\n * @param fetcher - Fetch function to use\n * @param url - Request URL\n * @param body - Request body to be JSON stringified\n * @param options - Custom fetch options\n * @param parameters - Additional fetch parameters\n * @returns Promise with parsed response\n */\n remove: async (\n fetcher: Fetch,\n url: string,\n body: object,\n options?: FetchOptions,\n parameters?: FetchParameters\n ): Promise<any> => {\n return _handleRequest(fetcher, 'DELETE', url, options, parameters, body, namespace)\n },\n }\n}\n\n// Default exports for backward compatibility with 'storage' namespace\nconst defaultApi = createFetchApi('storage')\nexport const { get, post, put, head, remove } = defaultApi\n\n// Vectors API with 'vectors' namespace for proper error handling\nexport const vectorsApi = createFetchApi('vectors')\n","import { ErrorNamespace, isStorageError, StorageError } from './errors'\nimport { Fetch } from './fetch'\nimport { resolveFetch } from './helpers'\n\n/**\n * @ignore\n * Base API client class for all Storage API classes\n * Provides common infrastructure for error handling and configuration\n *\n * @typeParam TError - The error type (StorageError or subclass)\n */\nexport default abstract class BaseApiClient<TError extends StorageError = StorageError> {\n protected url: string\n protected headers: { [key: string]: string }\n protected fetch: Fetch\n protected shouldThrowOnError = false\n protected namespace: ErrorNamespace\n\n /**\n * Creates a new BaseApiClient instance\n * @param url - Base URL for API requests\n * @param headers - Default headers for API requests\n * @param fetch - Optional custom fetch implementation\n * @param namespace - Error namespace ('storage' or 'vectors')\n */\n constructor(\n url: string,\n headers: { [key: string]: string } = {},\n fetch?: Fetch,\n namespace: ErrorNamespace = 'storage'\n ) {\n this.url = url\n this.headers = headers\n this.fetch = resolveFetch(fetch)\n this.namespace = namespace\n }\n\n /**\n * Enable throwing errors instead of returning them.\n * When enabled, errors are thrown instead of returned in { data, error } format.\n *\n * @returns this - For method chaining\n */\n public throwOnError(): this {\n this.shouldThrowOnError = true\n return this\n }\n\n /**\n * Handles API operation with standardized error handling\n * Eliminates repetitive try-catch blocks across all API methods\n *\n * This wrapper:\n * 1. Executes the operation\n * 2. Returns { data, error: null } on success\n * 3. Returns { data: null, error } on failure (if shouldThrowOnError is false)\n * 4. Throws error on failure (if shouldThrowOnError is true)\n *\n * @typeParam T - The expected data type from the operation\n * @param operation - Async function that performs the API call\n * @returns Promise with { data, error } tuple\n *\n * @example\n * ```typescript\n * async listBuckets() {\n * return this.handleOperation(async () => {\n * return await get(this.fetch, `${this.url}/bucket`, {\n * headers: this.headers,\n * })\n * })\n * }\n * ```\n */\n protected async handleOperation<T>(\n operation: () => Promise<T>\n ): Promise<{ data: T; error: null } | { data: null; error: TError }> {\n try {\n const data = await operation()\n return { data, error: null }\n } catch (error) {\n if (this.shouldThrowOnError) {\n throw error\n }\n if (isStorageError(error)) {\n return { data: null, error: error as TError }\n }\n throw error\n }\n }\n}\n","import { isStorageError } from '../lib/common/errors'\nimport { DownloadResult } from '../lib/types'\n\nexport default class StreamDownloadBuilder implements PromiseLike<DownloadResult<ReadableStream>> {\n constructor(\n private downloadFn: () => Promise<Response>,\n private shouldThrowOnError: boolean\n ) {}\n\n then<TResult1 = DownloadResult<ReadableStream>, TResult2 = never>(\n onfulfilled?:\n | ((value: DownloadResult<ReadableStream>) => TResult1 | PromiseLike<TResult1>)\n | null,\n onrejected?: ((reason: any) => TResult2 | PromiseLike<TResult2>) | null\n ): Promise<TResult1 | TResult2> {\n return this.execute().then(onfulfilled, onrejected)\n }\n\n private async execute(): Promise<DownloadResult<ReadableStream>> {\n try {\n const result = await this.downloadFn()\n\n return {\n data: result.body as ReadableStream,\n error: null,\n }\n } catch (error) {\n if (this.shouldThrowOnError) {\n throw error\n }\n\n if (isStorageError(error)) {\n return { data: null, error }\n }\n\n throw error\n }\n }\n}\n","import { isStorageError } from '../lib/common/errors'\nimport { DownloadResult } from '../lib/types'\nimport StreamDownloadBuilder from './StreamDownloadBuilder'\n\nexport default class BlobDownloadBuilder implements Promise<DownloadResult<Blob>> {\n readonly [Symbol.toStringTag]: string = 'BlobDownloadBuilder'\n private promise: Promise<DownloadResult<Blob>> | null = null\n\n constructor(\n private downloadFn: () => Promise<Response>,\n private shouldThrowOnError: boolean\n ) {}\n\n asStream(): StreamDownloadBuilder {\n return new StreamDownloadBuilder(this.downloadFn, this.shouldThrowOnError)\n }\n\n then<TResult1 = DownloadResult<Blob>, TResult2 = never>(\n onfulfilled?: ((value: DownloadResult<Blob>) => TResult1 | PromiseLike<TResult1>) | null,\n onrejected?: ((reason: any) => TResult2 | PromiseLike<TResult2>) | null\n ): Promise<TResult1 | TResult2> {\n return this.getPromise().then(onfulfilled, onrejected)\n }\n\n catch<TResult = never>(\n onrejected?: ((reason: any) => TResult | PromiseLike<TResult>) | null\n ): Promise<DownloadResult<Blob> | TResult> {\n return this.getPromise().catch(onrejected)\n }\n\n finally(onfinally?: (() => void) | null): Promise<DownloadResult<Blob>> {\n return this.getPromise().finally(onfinally)\n }\n\n private getPromise(): Promise<DownloadResult<Blob>> {\n if (!this.promise) {\n this.promise = this.execute()\n }\n return this.promise\n }\n\n private async execute(): Promise<DownloadResult<Blob>> {\n try {\n const result = await this.downloadFn()\n\n return {\n data: await result.blob(),\n error: null,\n }\n } catch (error) {\n if (this.shouldThrowOnError) {\n throw error\n }\n\n if (isStorageError(error)) {\n return { data: null, error }\n }\n\n throw error\n }\n }\n}\n","import { StorageError, StorageUnknownError, isStorageError } from '../lib/common/errors'\nimport { get, head, post, put, remove, Fetch } from '../lib/common/fetch'\nimport { recursiveToCamel } from '../lib/common/helpers'\nimport BaseApiClient from '../lib/common/BaseApiClient'\nimport {\n FileObject,\n FileOptions,\n SearchOptions,\n FetchParameters,\n TransformOptions,\n DestinationOptions,\n FileObjectV2,\n Camelize,\n SearchV2Options,\n SearchV2Result,\n} from '../lib/types'\nimport BlobDownloadBuilder from './BlobDownloadBuilder'\n\nconst DEFAULT_SEARCH_OPTIONS = {\n limit: 100,\n offset: 0,\n sortBy: {\n column: 'name',\n order: 'asc',\n },\n}\n\nconst DEFAULT_FILE_OPTIONS: FileOptions = {\n cacheControl: '3600',\n contentType: 'text/plain;charset=UTF-8',\n upsert: false,\n}\n\ntype FileBody =\n | ArrayBuffer\n | ArrayBufferView\n | Blob\n | Buffer\n | File\n | FormData\n | NodeJS.ReadableStream\n | ReadableStream<Uint8Array>\n | URLSearchParams\n | string\n\nexport default class StorageFileApi extends BaseApiClient<StorageError> {\n protected bucketId?: string\n\n constructor(\n url: string,\n headers: { [key: string]: string } = {},\n bucketId?: string,\n fetch?: Fetch\n ) {\n super(url, headers, fetch, 'storage')\n this.bucketId = bucketId\n }\n\n /**\n * Uploads a file to an existing bucket or replaces an existing file at the specified path with a new one.\n *\n * @param method HTTP method.\n * @param path The relative file path. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to upload.\n * @param fileBody The body of the file to be stored in the bucket.\n */\n private async uploadOrUpdate(\n method: 'POST' | 'PUT',\n path: string,\n fileBody: FileBody,\n fileOptions?: FileOptions\n ): Promise<\n | {\n data: { id: string; path: string; fullPath: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n let body\n const options = { ...DEFAULT_FILE_OPTIONS, ...fileOptions }\n let headers: Record<string, string> = {\n ...this.headers,\n ...(method === 'POST' && { 'x-upsert': String(options.upsert as boolean) }),\n }\n\n const metadata = options.metadata\n\n if (typeof Blob !== 'undefined' && fileBody instanceof Blob) {\n body = new FormData()\n body.append('cacheControl', options.cacheControl as string)\n if (metadata) {\n body.append('metadata', this.encodeMetadata(metadata))\n }\n body.append('', fileBody)\n } else if (typeof FormData !== 'undefined' && fileBody instanceof FormData) {\n body = fileBody\n // Only append if not already present\n if (!body.has('cacheControl')) {\n body.append('cacheControl', options.cacheControl as string)\n }\n if (metadata && !body.has('metadata')) {\n body.append('metadata', this.encodeMetadata(metadata))\n }\n } else {\n body = fileBody\n headers['cache-control'] = `max-age=${options.cacheControl}`\n headers['content-type'] = options.contentType as string\n\n if (metadata) {\n headers['x-metadata'] = this.toBase64(this.encodeMetadata(metadata))\n }\n\n // Node.js streams require duplex option for fetch in Node 20+\n // Check for both web ReadableStream and Node.js streams\n const isStream =\n (typeof ReadableStream !== 'undefined' && body instanceof ReadableStream) ||\n (body && typeof body === 'object' && 'pipe' in body && typeof body.pipe === 'function')\n\n if (isStream && !options.duplex) {\n options.duplex = 'half'\n }\n }\n\n if (fileOptions?.headers) {\n headers = { ...headers, ...fileOptions.headers }\n }\n\n const cleanPath = this._removeEmptyFolders(path)\n const _path = this._getFinalPath(cleanPath)\n const data = await (method == 'PUT' ? put : post)(\n this.fetch,\n `${this.url}/object/${_path}`,\n body as object,\n { headers, ...(options?.duplex ? { duplex: options.duplex } : {}) }\n )\n\n return { path: cleanPath, id: data.Id, fullPath: data.Key }\n })\n }\n\n /**\n * Uploads a file to an existing bucket.\n *\n * @category File Buckets\n * @param path The file path, including the file name. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to upload.\n * @param fileBody The body of the file to be stored in the bucket.\n * @param fileOptions Optional file upload options including cacheControl, contentType, upsert, and metadata.\n * @returns Promise with response containing file path, id, and fullPath or error\n *\n * @example Upload file\n * ```js\n * const avatarFile = event.target.files[0]\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .upload('public/avatar1.png', avatarFile, {\n * cacheControl: '3600',\n * upsert: false\n * })\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"path\": \"public/avatar1.png\",\n * \"fullPath\": \"avatars/public/avatar1.png\"\n * },\n * \"error\": null\n * }\n * ```\n *\n * @example Upload file using `ArrayBuffer` from base64 file data\n * ```js\n * import { decode } from 'base64-arraybuffer'\n *\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .upload('public/avatar1.png', decode('base64FileData'), {\n * contentType: 'image/png'\n * })\n * ```\n */\n async upload(\n path: string,\n fileBody: FileBody,\n fileOptions?: FileOptions\n ): Promise<\n | {\n data: { id: string; path: string; fullPath: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.uploadOrUpdate('POST', path, fileBody, fileOptions)\n }\n\n /**\n * Upload a file with a token generated from `createSignedUploadUrl`.\n *\n * @category File Buckets\n * @param path The file path, including the file name. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to upload.\n * @param token The token generated from `createSignedUploadUrl`\n * @param fileBody The body of the file to be stored in the bucket.\n * @param fileOptions HTTP headers (cacheControl, contentType, etc.).\n * **Note:** The `upsert` option has no effect here. To enable upsert behavior,\n * pass `{ upsert: true }` when calling `createSignedUploadUrl()` instead.\n * @returns Promise with response containing file path and fullPath or error\n *\n * @example Upload to a signed URL\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .uploadToSignedUrl('folder/cat.jpg', 'token-from-createSignedUploadUrl', file)\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"path\": \"folder/cat.jpg\",\n * \"fullPath\": \"avatars/folder/cat.jpg\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async uploadToSignedUrl(\n path: string,\n token: string,\n fileBody: FileBody,\n fileOptions?: FileOptions\n ) {\n const cleanPath = this._removeEmptyFolders(path)\n const _path = this._getFinalPath(cleanPath)\n\n const url = new URL(this.url + `/object/upload/sign/${_path}`)\n url.searchParams.set('token', token)\n\n return this.handleOperation(async () => {\n let body\n const options = { upsert: DEFAULT_FILE_OPTIONS.upsert, ...fileOptions }\n const headers: Record<string, string> = {\n ...this.headers,\n ...{ 'x-upsert': String(options.upsert as boolean) },\n }\n\n if (typeof Blob !== 'undefined' && fileBody instanceof Blob) {\n body = new FormData()\n body.append('cacheControl', options.cacheControl as string)\n body.append('', fileBody)\n } else if (typeof FormData !== 'undefined' && fileBody instanceof FormData) {\n body = fileBody\n body.append('cacheControl', options.cacheControl as string)\n } else {\n body = fileBody\n headers['cache-control'] = `max-age=${options.cacheControl}`\n headers['content-type'] = options.contentType as string\n }\n\n const data = await put(this.fetch, url.toString(), body as object, { headers })\n\n return { path: cleanPath, fullPath: data.Key }\n })\n }\n\n /**\n * Creates a signed upload URL.\n * Signed upload URLs can be used to upload files to the bucket without further authentication.\n * They are valid for 2 hours.\n *\n * @category File Buckets\n * @param path The file path, including the current file name. For example `folder/image.png`.\n * @param options.upsert If set to true, allows the file to be overwritten if it already exists.\n * @returns Promise with response containing signed upload URL, token, and path or error\n *\n * @example Create Signed Upload URL\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .createSignedUploadUrl('folder/cat.jpg')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"signedUrl\": \"https://example.supabase.co/storage/v1/object/upload/sign/avatars/folder/cat.jpg?token=<TOKEN>\",\n * \"path\": \"folder/cat.jpg\",\n * \"token\": \"<TOKEN>\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async createSignedUploadUrl(\n path: string,\n options?: { upsert: boolean }\n ): Promise<\n | {\n data: { signedUrl: string; token: string; path: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n let _path = this._getFinalPath(path)\n\n const headers = { ...this.headers }\n\n if (options?.upsert) {\n headers['x-upsert'] = 'true'\n }\n\n const data = await post(\n this.fetch,\n `${this.url}/object/upload/sign/${_path}`,\n {},\n { headers }\n )\n\n const url = new URL(this.url + data.url)\n\n const token = url.searchParams.get('token')\n\n if (!token) {\n throw new StorageError('No token returned by API')\n }\n\n return { signedUrl: url.toString(), path, token }\n })\n }\n\n /**\n * Replaces an existing file at the specified path with a new one.\n *\n * @category File Buckets\n * @param path The relative file path. Should be of the format `folder/subfolder/filename.png`. The bucket must already exist before attempting to update.\n * @param fileBody The body of the file to be stored in the bucket.\n * @param fileOptions Optional file upload options including cacheControl, contentType, upsert, and metadata.\n * @returns Promise with response containing file path, id, and fullPath or error\n *\n * @example Update file\n * ```js\n * const avatarFile = event.target.files[0]\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .update('public/avatar1.png', avatarFile, {\n * cacheControl: '3600',\n * upsert: true\n * })\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"path\": \"public/avatar1.png\",\n * \"fullPath\": \"avatars/public/avatar1.png\"\n * },\n * \"error\": null\n * }\n * ```\n *\n * @example Update file using `ArrayBuffer` from base64 file data\n * ```js\n * import {decode} from 'base64-arraybuffer'\n *\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .update('public/avatar1.png', decode('base64FileData'), {\n * contentType: 'image/png'\n * })\n * ```\n */\n async update(\n path: string,\n fileBody:\n | ArrayBuffer\n | ArrayBufferView\n | Blob\n | Buffer\n | File\n | FormData\n | NodeJS.ReadableStream\n | ReadableStream<Uint8Array>\n | URLSearchParams\n | string,\n fileOptions?: FileOptions\n ): Promise<\n | {\n data: { id: string; path: string; fullPath: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.uploadOrUpdate('PUT', path, fileBody, fileOptions)\n }\n\n /**\n * Moves an existing file to a new path in the same bucket.\n *\n * @category File Buckets\n * @param fromPath The original file path, including the current file name. For example `folder/image.png`.\n * @param toPath The new file path, including the new file name. For example `folder/image-new.png`.\n * @param options The destination options.\n * @returns Promise with response containing success message or error\n *\n * @example Move file\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .move('public/avatar1.png', 'private/avatar2.png')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"message\": \"Successfully moved\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async move(\n fromPath: string,\n toPath: string,\n options?: DestinationOptions\n ): Promise<\n | {\n data: { message: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await post(\n this.fetch,\n `${this.url}/object/move`,\n {\n bucketId: this.bucketId,\n sourceKey: fromPath,\n destinationKey: toPath,\n destinationBucket: options?.destinationBucket,\n },\n { headers: this.headers }\n )\n })\n }\n\n /**\n * Copies an existing file to a new path in the same bucket.\n *\n * @category File Buckets\n * @param fromPath The original file path, including the current file name. For example `folder/image.png`.\n * @param toPath The new file path, including the new file name. For example `folder/image-copy.png`.\n * @param options The destination options.\n * @returns Promise with response containing copied file path or error\n *\n * @example Copy file\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .copy('public/avatar1.png', 'private/avatar2.png')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"path\": \"avatars/private/avatar2.png\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async copy(\n fromPath: string,\n toPath: string,\n options?: DestinationOptions\n ): Promise<\n | {\n data: { path: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n const data = await post(\n this.fetch,\n `${this.url}/object/copy`,\n {\n bucketId: this.bucketId,\n sourceKey: fromPath,\n destinationKey: toPath,\n destinationBucket: options?.destinationBucket,\n },\n { headers: this.headers }\n )\n return { path: data.Key }\n })\n }\n\n /**\n * Creates a signed URL. Use a signed URL to share a file for a fixed amount of time.\n *\n * @category File Buckets\n * @param path The file path, including the current file name. For example `folder/image.png`.\n * @param expiresIn The number of seconds until the signed URL expires. For example, `60` for a URL which is valid for one minute.\n * @param options.download triggers the file as a download if set to true. Set this parameter as the name of the file if you want to trigger the download with a different filename.\n * @param options.transform Transform the asset before serving it to the client.\n * @returns Promise with response containing signed URL or error\n *\n * @example Create Signed URL\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .createSignedUrl('folder/avatar1.png', 60)\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"signedUrl\": \"https://example.supabase.co/storage/v1/object/sign/avatars/folder/avatar1.png?token=<TOKEN>\"\n * },\n * \"error\": null\n * }\n * ```\n *\n * @example Create a signed URL for an asset with transformations\n * ```js\n * const { data } = await supabase\n * .storage\n * .from('avatars')\n * .createSignedUrl('folder/avatar1.png', 60, {\n * transform: {\n * width: 100,\n * height: 100,\n * }\n * })\n * ```\n *\n * @example Create a signed URL which triggers the download of the asset\n * ```js\n * const { data } = await supabase\n * .storage\n * .from('avatars')\n * .createSignedUrl('folder/avatar1.png', 60, {\n * download: true,\n * })\n * ```\n */\n async createSignedUrl(\n path: string,\n expiresIn: number,\n options?: { download?: string | boolean; transform?: TransformOptions }\n ): Promise<\n | {\n data: { signedUrl: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n let _path = this._getFinalPath(path)\n\n let data = await post(\n this.fetch,\n `${this.url}/object/sign/${_path}`,\n { expiresIn, ...(options?.transform ? { transform: options.transform } : {}) },\n { headers: this.headers }\n )\n const downloadQueryParam = options?.download\n ? `&download=${options.download === true ? '' : options.download}`\n : ''\n const signedUrl = encodeURI(`${this.url}${data.signedURL}${downloadQueryParam}`)\n return { signedUrl }\n })\n }\n\n /**\n * Creates multiple signed URLs. Use a signed URL to share a file for a fixed amount of time.\n *\n * @category File Buckets\n * @param paths The file paths to be downloaded, including the current file names. For example `['folder/image.png', 'folder2/image2.png']`.\n * @param expiresIn The number of seconds until the signed URLs expire. For example, `60` for URLs which are valid for one minute.\n * @param options.download triggers the file as a download if set to true. Set this parameter as the name of the file if you want to trigger the download with a different filename.\n * @returns Promise with response containing array of objects with signedUrl, path, and error or error\n *\n * @example Create Signed URLs\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .createSignedUrls(['folder/avatar1.png', 'folder/avatar2.png'], 60)\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": [\n * {\n * \"error\": null,\n * \"path\": \"folder/avatar1.png\",\n * \"signedURL\": \"/object/sign/avatars/folder/avatar1.png?token=<TOKEN>\",\n * \"signedUrl\": \"https://example.supabase.co/storage/v1/object/sign/avatars/folder/avatar1.png?token=<TOKEN>\"\n * },\n * {\n * \"error\": null,\n * \"path\": \"folder/avatar2.png\",\n * \"signedURL\": \"/object/sign/avatars/folder/avatar2.png?token=<TOKEN>\",\n * \"signedUrl\": \"https://example.supabase.co/storage/v1/object/sign/avatars/folder/avatar2.png?token=<TOKEN>\"\n * }\n * ],\n * \"error\": null\n * }\n * ```\n */\n async createSignedUrls(\n paths: string[],\n expiresIn: number,\n options?: { download: string | boolean }\n ): Promise<\n | {\n data: { error: string | null; path: string | null; signedUrl: string }[]\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n const data = await post(\n this.fetch,\n `${this.url}/object/sign/${this.bucketId}`,\n { expiresIn, paths },\n { headers: this.headers }\n )\n\n const downloadQueryParam = options?.download\n ? `&download=${options.download === true ? '' : options.download}`\n : ''\n return data.map((datum: { signedURL: string }) => ({\n ...datum,\n signedUrl: datum.signedURL\n ? encodeURI(`${this.url}${datum.signedURL}${downloadQueryParam}`)\n : null,\n }))\n })\n }\n\n /**\n * Downloads a file from a private bucket. For public buckets, make a request to the URL returned from `getPublicUrl` instead.\n *\n * @category File Buckets\n * @param path The full path and file name of the file to be downloaded. For example `folder/image.png`.\n * @param options.transform Transform the asset before serving it to the client.\n * @param parameters Additional fetch parameters like signal for cancellation. Supports standard fetch options including cache control.\n * @returns BlobDownloadBuilder instance for downloading the file\n *\n * @example Download file\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .download('folder/avatar1.png')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": <BLOB>,\n * \"error\": null\n * }\n * ```\n *\n * @example Download file with transformations\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .download('folder/avatar1.png', {\n * transform: {\n * width: 100,\n * height: 100,\n * quality: 80\n * }\n * })\n * ```\n *\n * @example Download with cache control (useful in Edge Functions)\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .download('folder/avatar1.png', {}, { cache: 'no-store' })\n * ```\n *\n * @example Download with abort signal\n * ```js\n * const controller = new AbortController()\n * setTimeout(() => controller.abort(), 5000)\n *\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .download('folder/avatar1.png', {}, { signal: controller.signal })\n * ```\n */\n download<Options extends { transform?: TransformOptions }>(\n path: string,\n options?: Options,\n parameters?: FetchParameters\n ): BlobDownloadBuilder {\n const wantsTransformation = typeof options?.transform !== 'undefined'\n const renderPath = wantsTransformation ? 'render/image/authenticated' : 'object'\n const transformationQuery = this.transformOptsToQueryString(options?.transform || {})\n const queryString = transformationQuery ? `?${transformationQuery}` : ''\n const _path = this._getFinalPath(path)\n const downloadFn = () =>\n get(\n this.fetch,\n `${this.url}/${renderPath}/${_path}${queryString}`,\n {\n headers: this.headers,\n noResolveJson: true,\n },\n parameters\n )\n return new BlobDownloadBuilder(downloadFn, this.shouldThrowOnError)\n }\n\n /**\n * Retrieves the details of an existing file.\n *\n * @category File Buckets\n * @param path The file path, including the file name. For example `folder/image.png`.\n * @returns Promise with response containing file metadata or error\n *\n * @example Get file info\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .info('folder/avatar1.png')\n * ```\n */\n async info(path: string): Promise<\n | {\n data: Camelize<FileObjectV2>\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n const _path = this._getFinalPath(path)\n\n return this.handleOperation(async () => {\n const data = await get(this.fetch, `${this.url}/object/info/${_path}`, {\n headers: this.headers,\n })\n\n return recursiveToCamel(data) as Camelize<FileObjectV2>\n })\n }\n\n /**\n * Checks the existence of a file.\n *\n * @category File Buckets\n * @param path The file path, including the file name. For example `folder/image.png`.\n * @returns Promise with response containing boolean indicating file existence or error\n *\n * @example Check file existence\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .exists('folder/avatar1.png')\n * ```\n */\n async exists(path: string): Promise<\n | {\n data: boolean\n error: null\n }\n | {\n data: boolean\n error: StorageError\n }\n > {\n const _path = this._getFinalPath(path)\n\n try {\n await head(this.fetch, `${this.url}/object/${_path}`, {\n headers: this.headers,\n })\n\n return { data: true, error: null }\n } catch (error) {\n if (this.shouldThrowOnError) {\n throw error\n }\n if (isStorageError(error) && error instanceof StorageUnknownError) {\n const originalError = error.originalError as unknown as { status: number }\n\n if ([400, 404].includes(originalError?.status)) {\n return { data: false, error }\n }\n }\n\n throw error\n }\n }\n\n /**\n * A simple convenience function to get the URL for an asset in a public bucket. If you do not want to use this function, you can construct the public URL by concatenating the bucket URL with the path to the asset.\n * This function does not verify if the bucket is public. If a public URL is created for a bucket which is not public, you will not be able to download the asset.\n *\n * @category File Buckets\n * @param path The path and name of the file to generate the public URL for. For example `folder/image.png`.\n * @param options.download Triggers the file as a download if set to true. Set this parameter as the name of the file if you want to trigger the download with a different filename.\n * @param options.transform Transform the asset before serving it to the client.\n * @returns Object with public URL\n *\n * @example Returns the URL for an asset in a public bucket\n * ```js\n * const { data } = supabase\n * .storage\n * .from('public-bucket')\n * .getPublicUrl('folder/avatar1.png')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"publicUrl\": \"https://example.supabase.co/storage/v1/object/public/public-bucket/folder/avatar1.png\"\n * }\n * }\n * ```\n *\n * @example Returns the URL for an asset in a public bucket with transformations\n * ```js\n * const { data } = supabase\n * .storage\n * .from('public-bucket')\n * .getPublicUrl('folder/avatar1.png', {\n * transform: {\n * width: 100,\n * height: 100,\n * }\n * })\n * ```\n *\n * @example Returns the URL which triggers the download of an asset in a public bucket\n * ```js\n * const { data } = supabase\n * .storage\n * .from('public-bucket')\n * .getPublicUrl('folder/avatar1.png', {\n * download: true,\n * })\n * ```\n */\n getPublicUrl(\n path: string,\n options?: { download?: string | boolean; transform?: TransformOptions }\n ): { data: { publicUrl: string } } {\n const _path = this._getFinalPath(path)\n const _queryString: string[] = []\n\n const downloadQueryParam = options?.download\n ? `download=${options.download === true ? '' : options.download}`\n : ''\n\n if (downloadQueryParam !== '') {\n _queryString.push(downloadQueryParam)\n }\n\n const wantsTransformation = typeof options?.transform !== 'undefined'\n const renderPath = wantsTransformation ? 'render/image' : 'object'\n const transformationQuery = this.transformOptsToQueryString(options?.transform || {})\n\n if (transformationQuery !== '') {\n _queryString.push(transformationQuery)\n }\n\n let queryString = _queryString.join('&')\n if (queryString !== '') {\n queryString = `?${queryString}`\n }\n\n return {\n data: { publicUrl: encodeURI(`${this.url}/${renderPath}/public/${_path}${queryString}`) },\n }\n }\n\n /**\n * Deletes files within the same bucket\n *\n * @category File Buckets\n * @param paths An array of files to delete, including the path and file name. For example [`'folder/image.png'`].\n * @returns Promise with response containing array of deleted file objects or error\n *\n * @example Delete file\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .remove(['folder/avatar1.png'])\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": [],\n * \"error\": null\n * }\n * ```\n */\n async remove(paths: string[]): Promise<\n | {\n data: FileObject[]\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await remove(\n this.fetch,\n `${this.url}/object/${this.bucketId}`,\n { prefixes: paths },\n { headers: this.headers }\n )\n })\n }\n\n /**\n * Get file metadata\n * @param id the file id to retrieve metadata\n */\n // async getMetadata(\n // id: string\n // ): Promise<\n // | {\n // data: Metadata\n // error: null\n // }\n // | {\n // data: null\n // error: StorageError\n // }\n // > {\n // try {\n // const data = await get(this.fetch, `${this.url}/metadata/${id}`, { headers: this.headers })\n // return { data, error: null }\n // } catch (error) {\n // if (isStorageError(error)) {\n // return { data: null, error }\n // }\n\n // throw error\n // }\n // }\n\n /**\n * Update file metadata\n * @param id the file id to update metadata\n * @param meta the new file metadata\n */\n // async updateMetadata(\n // id: string,\n // meta: Metadata\n // ): Promise<\n // | {\n // data: Metadata\n // error: null\n // }\n // | {\n // data: null\n // error: StorageError\n // }\n // > {\n // try {\n // const data = await post(\n // this.fetch,\n // `${this.url}/metadata/${id}`,\n // { ...meta },\n // { headers: this.headers }\n // )\n // return { data, error: null }\n // } catch (error) {\n // if (isStorageError(error)) {\n // return { data: null, error }\n // }\n\n // throw error\n // }\n // }\n\n /**\n * Lists all the files and folders within a path of the bucket.\n *\n * @category File Buckets\n * @param path The folder path.\n * @param options Search options including limit (defaults to 100), offset, sortBy, and search\n * @param parameters Optional fetch parameters including signal for cancellation\n * @returns Promise with response containing array of files or error\n *\n * @example List files in a bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .list('folder', {\n * limit: 100,\n * offset: 0,\n * sortBy: { column: 'name', order: 'asc' },\n * })\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": [\n * {\n * \"name\": \"avatar1.png\",\n * \"id\": \"e668cf7f-821b-4a2f-9dce-7dfa5dd1cfd2\",\n * \"updated_at\": \"2024-05-22T23:06:05.580Z\",\n * \"created_at\": \"2024-05-22T23:04:34.443Z\",\n * \"last_accessed_at\": \"2024-05-22T23:04:34.443Z\",\n * \"metadata\": {\n * \"eTag\": \"\\\"c5e8c553235d9af30ef4f6e280790b92\\\"\",\n * \"size\": 32175,\n * \"mimetype\": \"image/png\",\n * \"cacheControl\": \"max-age=3600\",\n * \"lastModified\": \"2024-05-22T23:06:05.574Z\",\n * \"contentLength\": 32175,\n * \"httpStatusCode\": 200\n * }\n * }\n * ],\n * \"error\": null\n * }\n * ```\n *\n * @example Search files in a bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .from('avatars')\n * .list('folder', {\n * limit: 100,\n * offset: 0,\n * sortBy: { column: 'name', order: 'asc' },\n * search: 'jon'\n * })\n * ```\n */\n async list(\n path?: string,\n options?: SearchOptions,\n parameters?: FetchParameters\n ): Promise<\n | {\n data: FileObject[]\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n const body = { ...DEFAULT_SEARCH_OPTIONS, ...options, prefix: path || '' }\n return await post(\n this.fetch,\n `${this.url}/object/list/${this.bucketId}`,\n body,\n { headers: this.headers },\n parameters\n )\n })\n }\n\n /**\n * @experimental this method signature might change in the future\n *\n * @category File Buckets\n * @param options search options\n * @param parameters\n */\n async listV2(\n options?: SearchV2Options,\n parameters?: FetchParameters\n ): Promise<\n | {\n data: SearchV2Result\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n const body = { ...options }\n return await post(\n this.fetch,\n `${this.url}/object/list-v2/${this.bucketId}`,\n body,\n { headers: this.headers },\n parameters\n )\n })\n }\n\n protected encodeMetadata(metadata: Record<string, any>) {\n return JSON.stringify(metadata)\n }\n\n toBase64(data: string) {\n if (typeof Buffer !== 'undefined') {\n return Buffer.from(data).toString('base64')\n }\n return btoa(data)\n }\n\n private _getFinalPath(path: string) {\n return `${this.bucketId}/${path.replace(/^\\/+/, '')}`\n }\n\n private _removeEmptyFolders(path: string) {\n return path.replace(/^\\/|\\/$/g, '').replace(/\\/+/g, '/')\n }\n\n private transformOptsToQueryString(transform: TransformOptions) {\n const params: string[] = []\n if (transform.width) {\n params.push(`width=${transform.width}`)\n }\n\n if (transform.height) {\n params.push(`height=${transform.height}`)\n }\n\n if (transform.resize) {\n params.push(`resize=${transform.resize}`)\n }\n\n if (transform.format) {\n params.push(`format=${transform.format}`)\n }\n\n if (transform.quality) {\n params.push(`quality=${transform.quality}`)\n }\n\n return params.join('&')\n }\n}\n","// Generated automatically during releases by scripts/update-version-files.ts\n// This file provides runtime access to the package version for:\n// - HTTP request headers (e.g., X-Client-Info header for API requests)\n// - Debugging and support (identifying which version is running)\n// - Telemetry and logging (version reporting in errors/analytics)\n// - Ensuring build artifacts match the published package version\nexport const version = '2.95.3'\n","import { version } from './version'\nexport const DEFAULT_HEADERS = {\n 'X-Client-Info': `storage-js/${version}`,\n}\n","import { DEFAULT_HEADERS } from '../lib/constants'\nimport { StorageError } from '../lib/common/errors'\nimport { Fetch, get, post, put, remove } from '../lib/common/fetch'\nimport BaseApiClient from '../lib/common/BaseApiClient'\nimport { Bucket, BucketType, ListBucketOptions } from '../lib/types'\nimport { StorageClientOptions } from '../StorageClient'\n\nexport default class StorageBucketApi extends BaseApiClient<StorageError> {\n constructor(\n url: string,\n headers: { [key: string]: string } = {},\n fetch?: Fetch,\n opts?: StorageClientOptions\n ) {\n const baseUrl = new URL(url)\n\n // if legacy uri is used, replace with new storage host (disables request buffering to allow > 50GB uploads)\n // \"project-ref.supabase.co\" becomes \"project-ref.storage.supabase.co\"\n if (opts?.useNewHostname) {\n const isSupabaseHost = /supabase\\.(co|in|red)$/.test(baseUrl.hostname)\n if (isSupabaseHost && !baseUrl.hostname.includes('storage.supabase.')) {\n baseUrl.hostname = baseUrl.hostname.replace('supabase.', 'storage.supabase.')\n }\n }\n\n const finalUrl = baseUrl.href.replace(/\\/$/, '')\n const finalHeaders = { ...DEFAULT_HEADERS, ...headers }\n\n super(finalUrl, finalHeaders, fetch, 'storage')\n }\n\n /**\n * Retrieves the details of all Storage buckets within an existing project.\n *\n * @category File Buckets\n * @param options Query parameters for listing buckets\n * @param options.limit Maximum number of buckets to return\n * @param options.offset Number of buckets to skip\n * @param options.sortColumn Column to sort by ('id', 'name', 'created_at', 'updated_at')\n * @param options.sortOrder Sort order ('asc' or 'desc')\n * @param options.search Search term to filter bucket names\n * @returns Promise with response containing array of buckets or error\n *\n * @example List buckets\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .listBuckets()\n * ```\n *\n * @example List buckets with options\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .listBuckets({\n * limit: 10,\n * offset: 0,\n * sortColumn: 'created_at',\n * sortOrder: 'desc',\n * search: 'prod'\n * })\n * ```\n */\n async listBuckets(options?: ListBucketOptions): Promise<\n | {\n data: Bucket[]\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n const queryString = this.listBucketOptionsToQueryString(options)\n return await get(this.fetch, `${this.url}/bucket${queryString}`, {\n headers: this.headers,\n })\n })\n }\n\n /**\n * Retrieves the details of an existing Storage bucket.\n *\n * @category File Buckets\n * @param id The unique identifier of the bucket you would like to retrieve.\n * @returns Promise with response containing bucket details or error\n *\n * @example Get bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .getBucket('avatars')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"id\": \"avatars\",\n * \"name\": \"avatars\",\n * \"owner\": \"\",\n * \"public\": false,\n * \"file_size_limit\": 1024,\n * \"allowed_mime_types\": [\n * \"image/png\"\n * ],\n * \"created_at\": \"2024-05-22T22:26:05.100Z\",\n * \"updated_at\": \"2024-05-22T22:26:05.100Z\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async getBucket(id: string): Promise<\n | {\n data: Bucket\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await get(this.fetch, `${this.url}/bucket/${id}`, { headers: this.headers })\n })\n }\n\n /**\n * Creates a new Storage bucket\n *\n * @category File Buckets\n * @param id A unique identifier for the bucket you are creating.\n * @param options.public The visibility of the bucket. Public buckets don't require an authorization token to download objects, but still require a valid token for all other operations. By default, buckets are private.\n * @param options.fileSizeLimit specifies the max file size in bytes that can be uploaded to this bucket.\n * The global file size limit takes precedence over this value.\n * The default value is null, which doesn't set a per bucket file size limit.\n * @param options.allowedMimeTypes specifies the allowed mime types that this bucket can accept during upload.\n * The default value is null, which allows files with all mime types to be uploaded.\n * Each mime type specified can be a wildcard, e.g. image/*, or a specific mime type, e.g. image/png.\n * @param options.type (private-beta) specifies the bucket type. see `BucketType` for more details.\n * - default bucket type is `STANDARD`\n * @returns Promise with response containing newly created bucket name or error\n *\n * @example Create bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .createBucket('avatars', {\n * public: false,\n * allowedMimeTypes: ['image/png'],\n * fileSizeLimit: 1024\n * })\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"name\": \"avatars\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async createBucket(\n id: string,\n options: {\n public: boolean\n fileSizeLimit?: number | string | null\n allowedMimeTypes?: string[] | null\n type?: BucketType\n } = {\n public: false,\n }\n ): Promise<\n | {\n data: Pick<Bucket, 'name'>\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await post(\n this.fetch,\n `${this.url}/bucket`,\n {\n id,\n name: id,\n type: options.type,\n public: options.public,\n file_size_limit: options.fileSizeLimit,\n allowed_mime_types: options.allowedMimeTypes,\n },\n { headers: this.headers }\n )\n })\n }\n\n /**\n * Updates a Storage bucket\n *\n * @category File Buckets\n * @param id A unique identifier for the bucket you are updating.\n * @param options.public The visibility of the bucket. Public buckets don't require an authorization token to download objects, but still require a valid token for all other operations.\n * @param options.fileSizeLimit specifies the max file size in bytes that can be uploaded to this bucket.\n * The global file size limit takes precedence over this value.\n * The default value is null, which doesn't set a per bucket file size limit.\n * @param options.allowedMimeTypes specifies the allowed mime types that this bucket can accept during upload.\n * The default value is null, which allows files with all mime types to be uploaded.\n * Each mime type specified can be a wildcard, e.g. image/*, or a specific mime type, e.g. image/png.\n * @returns Promise with response containing success message or error\n *\n * @example Update bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .updateBucket('avatars', {\n * public: false,\n * allowedMimeTypes: ['image/png'],\n * fileSizeLimit: 1024\n * })\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"message\": \"Successfully updated\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async updateBucket(\n id: string,\n options: {\n public: boolean\n fileSizeLimit?: number | string | null\n allowedMimeTypes?: string[] | null\n }\n ): Promise<\n | {\n data: { message: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await put(\n this.fetch,\n `${this.url}/bucket/${id}`,\n {\n id,\n name: id,\n public: options.public,\n file_size_limit: options.fileSizeLimit,\n allowed_mime_types: options.allowedMimeTypes,\n },\n { headers: this.headers }\n )\n })\n }\n\n /**\n * Removes all objects inside a single bucket.\n *\n * @category File Buckets\n * @param id The unique identifier of the bucket you would like to empty.\n * @returns Promise with success message or error\n *\n * @example Empty bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .emptyBucket('avatars')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"message\": \"Successfully emptied\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async emptyBucket(id: string): Promise<\n | {\n data: { message: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await post(this.fetch, `${this.url}/bucket/${id}/empty`, {}, { headers: this.headers })\n })\n }\n\n /**\n * Deletes an existing bucket. A bucket can't be deleted with existing objects inside it.\n * You must first `empty()` the bucket.\n *\n * @category File Buckets\n * @param id The unique identifier of the bucket you would like to delete.\n * @returns Promise with success message or error\n *\n * @example Delete bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .deleteBucket('avatars')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"message\": \"Successfully deleted\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async deleteBucket(id: string): Promise<\n | {\n data: { message: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await remove(this.fetch, `${this.url}/bucket/${id}`, {}, { headers: this.headers })\n })\n }\n\n private listBucketOptionsToQueryString(options?: ListBucketOptions): string {\n const params: Record<string, string> = {}\n if (options) {\n if ('limit' in options) {\n params.limit = String(options.limit)\n }\n if ('offset' in options) {\n params.offset = String(options.offset)\n }\n if (options.search) {\n params.search = options.search\n }\n if (options.sortColumn) {\n params.sortColumn = options.sortColumn\n }\n if (options.sortOrder) {\n params.sortOrder = options.sortOrder\n }\n }\n return Object.keys(params).length > 0 ? '?' + new URLSearchParams(params).toString() : ''\n }\n}\n","import { IcebergRestCatalog, IcebergError } from 'iceberg-js'\nimport { DEFAULT_HEADERS } from '../lib/constants'\nimport { StorageError } from '../lib/common/errors'\nimport { Fetch, get, post, remove } from '../lib/common/fetch'\nimport { isValidBucketName } from '../lib/common/helpers'\nimport BaseApiClient from '../lib/common/BaseApiClient'\nimport { AnalyticBucket } from '../lib/types'\n\ntype WrapAsyncMethod<T> = T extends (...args: infer A) => Promise<infer R>\n ? (...args: A) => Promise<{ data: R; error: null } | { data: null; error: IcebergError }>\n : T\n\nexport type WrappedIcebergRestCatalog = {\n [K in keyof IcebergRestCatalog]: WrapAsyncMethod<IcebergRestCatalog[K]>\n}\n\n/**\n * Client class for managing Analytics Buckets using Iceberg tables\n * Provides methods for creating, listing, and deleting analytics buckets\n */\nexport default class StorageAnalyticsClient extends BaseApiClient<StorageError> {\n /**\n * @alpha\n *\n * Creates a new StorageAnalyticsClient instance\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Analytics Buckets\n * @param url - The base URL for the storage API\n * @param headers - HTTP headers to include in requests\n * @param fetch - Optional custom fetch implementation\n *\n * @example\n * ```typescript\n * const client = new StorageAnalyticsClient(url, headers)\n * ```\n */\n constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) {\n const finalUrl = url.replace(/\\/$/, '')\n const finalHeaders = { ...DEFAULT_HEADERS, ...headers }\n super(finalUrl, finalHeaders, fetch, 'storage')\n }\n\n /**\n * @alpha\n *\n * Creates a new analytics bucket using Iceberg tables\n * Analytics buckets are optimized for analytical queries and data processing\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Analytics Buckets\n * @param name A unique name for the bucket you are creating\n * @returns Promise with response containing newly created analytics bucket or error\n *\n * @example Create analytics bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .analytics\n * .createBucket('analytics-data')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"name\": \"analytics-data\",\n * \"type\": \"ANALYTICS\",\n * \"format\": \"iceberg\",\n * \"created_at\": \"2024-05-22T22:26:05.100Z\",\n * \"updated_at\": \"2024-05-22T22:26:05.100Z\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async createBucket(name: string): Promise<\n | {\n data: AnalyticBucket\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await post(this.fetch, `${this.url}/bucket`, { name }, { headers: this.headers })\n })\n }\n\n /**\n * @alpha\n *\n * Retrieves the details of all Analytics Storage buckets within an existing project\n * Only returns buckets of type 'ANALYTICS'\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Analytics Buckets\n * @param options Query parameters for listing buckets\n * @param options.limit Maximum number of buckets to return\n * @param options.offset Number of buckets to skip\n * @param options.sortColumn Column to sort by ('name', 'created_at', 'updated_at')\n * @param options.sortOrder Sort order ('asc' or 'desc')\n * @param options.search Search term to filter bucket names\n * @returns Promise with response containing array of analytics buckets or error\n *\n * @example List analytics buckets\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .analytics\n * .listBuckets({\n * limit: 10,\n * offset: 0,\n * sortColumn: 'created_at',\n * sortOrder: 'desc'\n * })\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": [\n * {\n * \"name\": \"analytics-data\",\n * \"type\": \"ANALYTICS\",\n * \"format\": \"iceberg\",\n * \"created_at\": \"2024-05-22T22:26:05.100Z\",\n * \"updated_at\": \"2024-05-22T22:26:05.100Z\"\n * }\n * ],\n * \"error\": null\n * }\n * ```\n */\n async listBuckets(options?: {\n limit?: number\n offset?: number\n sortColumn?: 'name' | 'created_at' | 'updated_at'\n sortOrder?: 'asc' | 'desc'\n search?: string\n }): Promise<\n | {\n data: AnalyticBucket[]\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n // Build query string from options\n const queryParams = new URLSearchParams()\n if (options?.limit !== undefined) queryParams.set('limit', options.limit.toString())\n if (options?.offset !== undefined) queryParams.set('offset', options.offset.toString())\n if (options?.sortColumn) queryParams.set('sortColumn', options.sortColumn)\n if (options?.sortOrder) queryParams.set('sortOrder', options.sortOrder)\n if (options?.search) queryParams.set('search', options.search)\n\n const queryString = queryParams.toString()\n const url = queryString ? `${this.url}/bucket?${queryString}` : `${this.url}/bucket`\n\n return await get(this.fetch, url, { headers: this.headers })\n })\n }\n\n /**\n * @alpha\n *\n * Deletes an existing analytics bucket\n * A bucket can't be deleted with existing objects inside it\n * You must first empty the bucket before deletion\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Analytics Buckets\n * @param bucketName The unique identifier of the bucket you would like to delete\n * @returns Promise with response containing success message or error\n *\n * @example Delete analytics bucket\n * ```js\n * const { data, error } = await supabase\n * .storage\n * .analytics\n * .deleteBucket('analytics-data')\n * ```\n *\n * Response:\n * ```json\n * {\n * \"data\": {\n * \"message\": \"Successfully deleted\"\n * },\n * \"error\": null\n * }\n * ```\n */\n async deleteBucket(bucketName: string): Promise<\n | {\n data: { message: string }\n error: null\n }\n | {\n data: null\n error: StorageError\n }\n > {\n return this.handleOperation(async () => {\n return await remove(\n this.fetch,\n `${this.url}/bucket/${bucketName}`,\n {},\n { headers: this.headers }\n )\n })\n }\n\n /**\n * @alpha\n *\n * Get an Iceberg REST Catalog client configured for a specific analytics bucket\n * Use this to perform advanced table and namespace operations within the bucket\n * The returned client provides full access to the Apache Iceberg REST Catalog API\n * with the Supabase `{ data, error }` pattern for consistent error handling on all operations.\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Analytics Buckets\n * @param bucketName - The name of the analytics bucket (warehouse) to connect to\n * @returns The wrapped Iceberg catalog client\n * @throws {StorageError} If the bucket name is invalid\n *\n * @example Get catalog and create table\n * ```js\n * // First, create an analytics bucket\n * const { data: bucket, error: bucketError } = await supabase\n * .storage\n * .analytics\n * .createBucket('analytics-data')\n *\n * // Get the Iceberg catalog for that bucket\n * const catalog = supabase.storage.analytics.from('analytics-data')\n *\n * // Create a namespace\n * const { error: nsError } = await catalog.createNamespace({ namespace: ['default'] })\n *\n * // Create a table with schema\n * const { data: tableMetadata, error: tableError } = await catalog.createTable(\n * { namespace: ['default'] },\n * {\n * name: 'events',\n * schema: {\n * type: 'struct',\n * fields: [\n * { id: 1, name: 'id', type: 'long', required: true },\n * { id: 2, name: 'timestamp', type: 'timestamp', required: true },\n * { id: 3, name: 'user_id', type: 'string', required: false }\n * ],\n * 'schema-id': 0,\n * 'identifier-field-ids': [1]\n * },\n * 'partition-spec': {\n * 'spec-id': 0,\n * fields: []\n * },\n * 'write-order': {\n * 'order-id': 0,\n * fields: []\n * },\n * properties: {\n * 'write.format.default': 'parquet'\n * }\n * }\n * )\n * ```\n *\n * @example List tables in namespace\n * ```js\n * const catalog = supabase.storage.analytics.from('analytics-data')\n *\n * // List all tables in the default namespace\n * const { data: tables, error: listError } = await catalog.listTables({ namespace: ['default'] })\n * if (listError) {\n * if (listError.isNotFound()) {\n * console.log('Namespace not found')\n * }\n * return\n * }\n * console.log(tables) // [{ namespace: ['default'], name: 'events' }]\n * ```\n *\n * @example Working with namespaces\n * ```js\n * const catalog = supabase.storage.analytics.from('analytics-data')\n *\n * // List all namespaces\n * const { data: namespaces } = await catalog.listNamespaces()\n *\n * // Create namespace with properties\n * await catalog.createNamespace(\n * { namespace: ['production'] },\n * { properties: { owner: 'data-team', env: 'prod' } }\n * )\n * ```\n *\n * @example Cleanup operations\n * ```js\n * const catalog = supabase.storage.analytics.from('analytics-data')\n *\n * // Drop table with purge option (removes all data)\n * const { error: dropError } = await catalog.dropTable(\n * { namespace: ['default'], name: 'events' },\n * { purge: true }\n * )\n *\n * if (dropError?.isNotFound()) {\n * console.log('Table does not exist')\n * }\n *\n * // Drop namespace (must be empty)\n * await catalog.dropNamespace({ namespace: ['default'] })\n * ```\n *\n * @remarks\n * This method provides a bridge between Supabase's bucket management and the standard\n * Apache Iceberg REST Catalog API. The bucket name maps to the Iceberg warehouse parameter.\n * All authentication and configuration is handled automatically using your Supabase credentials.\n *\n * **Error Handling**: Invalid bucket names throw immediately. All catalog\n * operations return `{ data, error }` where errors are `IcebergError` instances from iceberg-js.\n * Use helper methods like `error.isNotFound()` or check `error.status` for specific error handling.\n * Use `.throwOnError()` on the analytics client if you prefer exceptions for catalog operations.\n *\n * **Cleanup Operations**: When using `dropTable`, the `purge: true` option permanently\n * deletes all table data. Without it, the table is marked as deleted but data remains.\n *\n * **Library Dependency**: The returned catalog wraps `IcebergRestCatalog` from iceberg-js.\n * For complete API documentation and advanced usage, refer to the\n * [iceberg-js documentation](https://supabase.github.io/iceberg-js/).\n */\n from(bucketName: string): WrappedIcebergRestCatalog {\n // Validate bucket name using same rules as Supabase Storage API backend\n if (!isValidBucketName(bucketName)) {\n throw new StorageError(\n 'Invalid bucket name: File, folder, and bucket names must follow AWS object key naming guidelines ' +\n 'and should avoid the use of any other characters.'\n )\n }\n\n // Construct the Iceberg REST Catalog URL\n // The base URL is /storage/v1/iceberg\n // Note: IcebergRestCatalog from iceberg-js automatically adds /v1/ prefix to API paths\n // so we should NOT append /v1 here (it would cause double /v1/v1/ in the URL)\n const catalog = new IcebergRestCatalog({\n baseUrl: this.url,\n catalogName: bucketName, // Maps to the warehouse parameter in Supabase's implementation\n auth: {\n type: 'custom',\n getHeaders: async () => this.headers,\n },\n fetch: this.fetch,\n })\n\n const shouldThrowOnError = this.shouldThrowOnError\n\n const wrappedCatalog = new Proxy(catalog, {\n get(target, prop: keyof IcebergRestCatalog) {\n const value = target[prop]\n if (typeof value !== 'function') {\n return value\n }\n\n return async (...args: unknown[]) => {\n try {\n const data = await (value as Function).apply(target, args)\n return { data, error: null }\n } catch (error) {\n if (shouldThrowOnError) {\n throw error\n }\n return { data: null, error: error as IcebergError }\n }\n }\n },\n }) as unknown as WrappedIcebergRestCatalog\n\n return wrappedCatalog\n }\n}\n","import { DEFAULT_HEADERS } from '../lib/constants'\nimport { StorageError } from '../lib/common/errors'\nimport { Fetch, vectorsApi } from '../lib/common/fetch'\nimport BaseApiClient from '../lib/common/BaseApiClient'\nimport {\n ApiResponse,\n VectorIndex,\n ListIndexesOptions,\n ListIndexesResponse,\n VectorDataType,\n DistanceMetric,\n MetadataConfiguration,\n} from '../lib/types'\n\n/**\n * @alpha\n *\n * Options for creating a vector index\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n */\nexport interface CreateIndexOptions {\n vectorBucketName: string\n indexName: string\n dataType: VectorDataType\n dimension: number\n distanceMetric: DistanceMetric\n metadataConfiguration?: MetadataConfiguration\n}\n\n/**\n * @hidden\n * Base implementation for vector index operations.\n * Use {@link VectorBucketScope} via `supabase.storage.vectors.from('bucket')` instead.\n */\nexport default class VectorIndexApi extends BaseApiClient<StorageError> {\n /** Creates a new VectorIndexApi instance */\n constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) {\n const finalUrl = url.replace(/\\/$/, '')\n const finalHeaders = { ...DEFAULT_HEADERS, 'Content-Type': 'application/json', ...headers }\n super(finalUrl, finalHeaders, fetch, 'vectors')\n }\n\n /** Creates a new vector index within a bucket */\n async createIndex(options: CreateIndexOptions): Promise<ApiResponse<undefined>> {\n return this.handleOperation(async () => {\n const data = await vectorsApi.post(this.fetch, `${this.url}/CreateIndex`, options, {\n headers: this.headers,\n })\n return data || {}\n })\n }\n\n /** Retrieves metadata for a specific vector index */\n async getIndex(\n vectorBucketName: string,\n indexName: string\n ): Promise<ApiResponse<{ index: VectorIndex }>> {\n return this.handleOperation(async () => {\n return await vectorsApi.post(\n this.fetch,\n `${this.url}/GetIndex`,\n { vectorBucketName, indexName },\n { headers: this.headers }\n )\n })\n }\n\n /** Lists vector indexes within a bucket with optional filtering and pagination */\n async listIndexes(options: ListIndexesOptions): Promise<ApiResponse<ListIndexesResponse>> {\n return this.handleOperation(async () => {\n return await vectorsApi.post(this.fetch, `${this.url}/ListIndexes`, options, {\n headers: this.headers,\n })\n })\n }\n\n /** Deletes a vector index and all its data */\n async deleteIndex(vectorBucketName: string, indexName: string): Promise<ApiResponse<undefined>> {\n return this.handleOperation(async () => {\n const data = await vectorsApi.post(\n this.fetch,\n `${this.url}/DeleteIndex`,\n { vectorBucketName, indexName },\n { headers: this.headers }\n )\n return data || {}\n })\n }\n}\n","import { DEFAULT_HEADERS } from '../lib/constants'\nimport { StorageError } from '../lib/common/errors'\nimport { Fetch, vectorsApi } from '../lib/common/fetch'\nimport BaseApiClient from '../lib/common/BaseApiClient'\nimport {\n ApiResponse,\n PutVectorsOptions,\n GetVectorsOptions,\n GetVectorsResponse,\n DeleteVectorsOptions,\n ListVectorsOptions,\n ListVectorsResponse,\n QueryVectorsOptions,\n QueryVectorsResponse,\n} from '../lib/types'\n\n/**\n * @hidden\n * Base implementation for vector data operations.\n * Use {@link VectorIndexScope} via `supabase.storage.vectors.from('bucket').index('idx')` instead.\n */\nexport default class VectorDataApi extends BaseApiClient<StorageError> {\n /** Creates a new VectorDataApi instance */\n constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) {\n const finalUrl = url.replace(/\\/$/, '')\n const finalHeaders = { ...DEFAULT_HEADERS, 'Content-Type': 'application/json', ...headers }\n super(finalUrl, finalHeaders, fetch, 'vectors')\n }\n\n /** Inserts or updates vectors in batch (1-500 per request) */\n async putVectors(options: PutVectorsOptions): Promise<ApiResponse<undefined>> {\n // Validate batch size\n if (options.vectors.length < 1 || options.vectors.length > 500) {\n throw new Error('Vector batch size must be between 1 and 500 items')\n }\n\n return this.handleOperation(async () => {\n const data = await vectorsApi.post(this.fetch, `${this.url}/PutVectors`, options, {\n headers: this.headers,\n })\n return data || {}\n })\n }\n\n /** Retrieves vectors by their keys in batch */\n async getVectors(options: GetVectorsOptions): Promise<ApiResponse<GetVectorsResponse>> {\n return this.handleOperation(async () => {\n return await vectorsApi.post(this.fetch, `${this.url}/GetVectors`, options, {\n headers: this.headers,\n })\n })\n }\n\n /** Lists vectors in an index with pagination */\n async listVectors(options: ListVectorsOptions): Promise<ApiResponse<ListVectorsResponse>> {\n // Validate segment configuration\n if (options.segmentCount !== undefined) {\n if (options.segmentCount < 1 || options.segmentCount > 16) {\n throw new Error('segmentCount must be between 1 and 16')\n }\n if (options.segmentIndex !== undefined) {\n if (options.segmentIndex < 0 || options.segmentIndex >= options.segmentCount) {\n throw new Error(`segmentIndex must be between 0 and ${options.segmentCount - 1}`)\n }\n }\n }\n\n return this.handleOperation(async () => {\n return await vectorsApi.post(this.fetch, `${this.url}/ListVectors`, options, {\n headers: this.headers,\n })\n })\n }\n\n /** Queries for similar vectors using approximate nearest neighbor search */\n async queryVectors(options: QueryVectorsOptions): Promise<ApiResponse<QueryVectorsResponse>> {\n return this.handleOperation(async () => {\n return await vectorsApi.post(this.fetch, `${this.url}/QueryVectors`, options, {\n headers: this.headers,\n })\n })\n }\n\n /** Deletes vectors by their keys in batch (1-500 per request) */\n async deleteVectors(options: DeleteVectorsOptions): Promise<ApiResponse<undefined>> {\n // Validate batch size\n if (options.keys.length < 1 || options.keys.length > 500) {\n throw new Error('Keys batch size must be between 1 and 500 items')\n }\n\n return this.handleOperation(async () => {\n const data = await vectorsApi.post(this.fetch, `${this.url}/DeleteVectors`, options, {\n headers: this.headers,\n })\n return data || {}\n })\n }\n}\n","import { DEFAULT_HEADERS } from '../lib/constants'\nimport { StorageError } from '../lib/common/errors'\nimport { Fetch, vectorsApi } from '../lib/common/fetch'\nimport BaseApiClient from '../lib/common/BaseApiClient'\nimport {\n ApiResponse,\n VectorBucket,\n ListVectorBucketsOptions,\n ListVectorBucketsResponse,\n} from '../lib/types'\n\n/**\n * @hidden\n * Base implementation for vector bucket operations.\n * Use {@link StorageVectorsClient} via `supabase.storage.vectors` instead.\n */\nexport default class VectorBucketApi extends BaseApiClient<StorageError> {\n /** Creates a new VectorBucketApi instance */\n constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) {\n const finalUrl = url.replace(/\\/$/, '')\n const finalHeaders = { ...DEFAULT_HEADERS, 'Content-Type': 'application/json', ...headers }\n super(finalUrl, finalHeaders, fetch, 'vectors')\n }\n\n /** Creates a new vector bucket */\n async createBucket(vectorBucketName: string): Promise<ApiResponse<undefined>> {\n return this.handleOperation(async () => {\n const data = await vectorsApi.post(\n this.fetch,\n `${this.url}/CreateVectorBucket`,\n { vectorBucketName },\n { headers: this.headers }\n )\n return data || {}\n })\n }\n\n /** Retrieves metadata for a specific vector bucket */\n async getBucket(vectorBucketName: string): Promise<ApiResponse<{ vectorBucket: VectorBucket }>> {\n return this.handleOperation(async () => {\n return await vectorsApi.post(\n this.fetch,\n `${this.url}/GetVectorBucket`,\n { vectorBucketName },\n { headers: this.headers }\n )\n })\n }\n\n /** Lists vector buckets with optional filtering and pagination */\n async listBuckets(\n options: ListVectorBucketsOptions = {}\n ): Promise<ApiResponse<ListVectorBucketsResponse>> {\n return this.handleOperation(async () => {\n return await vectorsApi.post(this.fetch, `${this.url}/ListVectorBuckets`, options, {\n headers: this.headers,\n })\n })\n }\n\n /** Deletes a vector bucket (must be empty first) */\n async deleteBucket(vectorBucketName: string): Promise<ApiResponse<undefined>> {\n return this.handleOperation(async () => {\n const data = await vectorsApi.post(\n this.fetch,\n `${this.url}/DeleteVectorBucket`,\n { vectorBucketName },\n { headers: this.headers }\n )\n return data || {}\n })\n }\n}\n","import VectorIndexApi, { CreateIndexOptions } from './VectorIndexApi'\nimport VectorDataApi from './VectorDataApi'\nimport { Fetch } from '../lib/common/fetch'\nimport VectorBucketApi from './VectorBucketApi'\nimport {\n ApiResponse,\n DeleteVectorsOptions,\n GetVectorsOptions,\n ListIndexesOptions,\n ListVectorsOptions,\n ListVectorBucketsOptions,\n ListVectorBucketsResponse,\n PutVectorsOptions,\n QueryVectorsOptions,\n VectorBucket,\n} from '../lib/types'\n\n/**\n *\n * @alpha\n *\n * Configuration options for the Storage Vectors client\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n */\nexport interface StorageVectorsClientOptions {\n /**\n * Custom headers to include in all requests\n */\n headers?: { [key: string]: string }\n /**\n * Custom fetch implementation (optional)\n * Useful for testing or custom request handling\n */\n fetch?: Fetch\n}\n\n/**\n *\n * @alpha\n *\n * Main client for interacting with S3 Vectors API\n * Provides access to bucket, index, and vector data operations\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * **Usage Patterns:**\n *\n * ```typescript\n * const { data, error } = await supabase\n * .storage\n * .vectors\n * .createBucket('embeddings-prod')\n *\n * // Access index operations via buckets\n * const bucket = supabase.storage.vectors.from('embeddings-prod')\n * await bucket.createIndex({\n * indexName: 'documents',\n * dataType: 'float32',\n * dimension: 1536,\n * distanceMetric: 'cosine'\n * })\n *\n * // Access vector operations via index\n * const index = bucket.index('documents')\n * await index.putVectors({\n * vectors: [\n * { key: 'doc-1', data: { float32: [...] }, metadata: { title: 'Intro' } }\n * ]\n * })\n *\n * // Query similar vectors\n * const { data } = await index.queryVectors({\n * queryVector: { float32: [...] },\n * topK: 5,\n * returnDistance: true\n * })\n * ```\n */\nexport class StorageVectorsClient extends VectorBucketApi {\n /**\n * @alpha\n *\n * Creates a StorageVectorsClient that can manage buckets, indexes, and vectors.\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param url - Base URL of the Storage Vectors REST API.\n * @param options.headers - Optional headers (for example `Authorization`) applied to every request.\n * @param options.fetch - Optional custom `fetch` implementation for non-browser runtimes.\n *\n * @example\n * ```typescript\n * const client = new StorageVectorsClient(url, options)\n * ```\n */\n constructor(url: string, options: StorageVectorsClientOptions = {}) {\n super(url, options.headers || {}, options.fetch)\n }\n\n /**\n *\n * @alpha\n *\n * Access operations for a specific vector bucket\n * Returns a scoped client for index and vector operations within the bucket\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param vectorBucketName - Name of the vector bucket\n * @returns Bucket-scoped client with index and vector operations\n *\n * @example\n * ```typescript\n * const bucket = supabase.storage.vectors.from('embeddings-prod')\n * ```\n */\n from(vectorBucketName: string): VectorBucketScope {\n return new VectorBucketScope(this.url, this.headers, vectorBucketName, this.fetch)\n }\n\n /**\n *\n * @alpha\n *\n * Creates a new vector bucket\n * Vector buckets are containers for vector indexes and their data\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param vectorBucketName - Unique name for the vector bucket\n * @returns Promise with empty response on success or error\n *\n * @example\n * ```typescript\n * const { data, error } = await supabase\n * .storage\n * .vectors\n * .createBucket('embeddings-prod')\n * ```\n */\n async createBucket(vectorBucketName: string): Promise<ApiResponse<undefined>> {\n return super.createBucket(vectorBucketName)\n }\n\n /**\n *\n * @alpha\n *\n * Retrieves metadata for a specific vector bucket\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param vectorBucketName - Name of the vector bucket\n * @returns Promise with bucket metadata or error\n *\n * @example\n * ```typescript\n * const { data, error } = await supabase\n * .storage\n * .vectors\n * .getBucket('embeddings-prod')\n *\n * console.log('Bucket created:', data?.vectorBucket.creationTime)\n * ```\n */\n async getBucket(vectorBucketName: string): Promise<ApiResponse<{ vectorBucket: VectorBucket }>> {\n return super.getBucket(vectorBucketName)\n }\n\n /**\n *\n * @alpha\n *\n * Lists all vector buckets with optional filtering and pagination\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param options - Optional filters (prefix, maxResults, nextToken)\n * @returns Promise with list of buckets or error\n *\n * @example\n * ```typescript\n * const { data, error } = await supabase\n * .storage\n * .vectors\n * .listBuckets({ prefix: 'embeddings-' })\n *\n * data?.vectorBuckets.forEach(bucket => {\n * console.log(bucket.vectorBucketName)\n * })\n * ```\n */\n async listBuckets(\n options: ListVectorBucketsOptions = {}\n ): Promise<ApiResponse<ListVectorBucketsResponse>> {\n return super.listBuckets(options)\n }\n\n /**\n *\n * @alpha\n *\n * Deletes a vector bucket (bucket must be empty)\n * All indexes must be deleted before deleting the bucket\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param vectorBucketName - Name of the vector bucket to delete\n * @returns Promise with empty response on success or error\n *\n * @example\n * ```typescript\n * const { data, error } = await supabase\n * .storage\n * .vectors\n * .deleteBucket('embeddings-old')\n * ```\n */\n async deleteBucket(vectorBucketName: string): Promise<ApiResponse<undefined>> {\n return super.deleteBucket(vectorBucketName)\n }\n}\n\n/**\n *\n * @alpha\n *\n * Scoped client for operations within a specific vector bucket\n * Provides index management and access to vector operations\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n */\nexport class VectorBucketScope extends VectorIndexApi {\n private vectorBucketName: string\n\n /**\n * @alpha\n *\n * Creates a helper that automatically scopes all index operations to the provided bucket.\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @example\n * ```typescript\n * const bucket = supabase.storage.vectors.from('embeddings-prod')\n * ```\n */\n constructor(\n url: string,\n headers: { [key: string]: string },\n vectorBucketName: string,\n fetch?: Fetch\n ) {\n super(url, headers, fetch)\n this.vectorBucketName = vectorBucketName\n }\n\n /**\n *\n * @alpha\n *\n * Creates a new vector index in this bucket\n * Convenience method that automatically includes the bucket name\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param options - Index configuration (vectorBucketName is automatically set)\n * @returns Promise with empty response on success or error\n *\n * @example\n * ```typescript\n * const bucket = supabase.storage.vectors.from('embeddings-prod')\n * await bucket.createIndex({\n * indexName: 'documents-openai',\n * dataType: 'float32',\n * dimension: 1536,\n * distanceMetric: 'cosine',\n * metadataConfiguration: {\n * nonFilterableMetadataKeys: ['raw_text']\n * }\n * })\n * ```\n */\n override async createIndex(options: Omit<CreateIndexOptions, 'vectorBucketName'>) {\n return super.createIndex({\n ...options,\n vectorBucketName: this.vectorBucketName,\n })\n }\n\n /**\n *\n * @alpha\n *\n * Lists indexes in this bucket\n * Convenience method that automatically includes the bucket name\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param options - Listing options (vectorBucketName is automatically set)\n * @returns Promise with response containing indexes array and pagination token or error\n *\n * @example\n * ```typescript\n * const bucket = supabase.storage.vectors.from('embeddings-prod')\n * const { data } = await bucket.listIndexes({ prefix: 'documents-' })\n * ```\n */\n override async listIndexes(options: Omit<ListIndexesOptions, 'vectorBucketName'> = {}) {\n return super.listIndexes({\n ...options,\n vectorBucketName: this.vectorBucketName,\n })\n }\n\n /**\n *\n * @alpha\n *\n * Retrieves metadata for a specific index in this bucket\n * Convenience method that automatically includes the bucket name\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param indexName - Name of the index to retrieve\n * @returns Promise with index metadata or error\n *\n * @example\n * ```typescript\n * const bucket = supabase.storage.vectors.from('embeddings-prod')\n * const { data } = await bucket.getIndex('documents-openai')\n * console.log('Dimension:', data?.index.dimension)\n * ```\n */\n override async getIndex(indexName: string) {\n return super.getIndex(this.vectorBucketName, indexName)\n }\n\n /**\n *\n * @alpha\n *\n * Deletes an index from this bucket\n * Convenience method that automatically includes the bucket name\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param indexName - Name of the index to delete\n * @returns Promise with empty response on success or error\n *\n * @example\n * ```typescript\n * const bucket = supabase.storage.vectors.from('embeddings-prod')\n * await bucket.deleteIndex('old-index')\n * ```\n */\n override async deleteIndex(indexName: string) {\n return super.deleteIndex(this.vectorBucketName, indexName)\n }\n\n /**\n *\n * @alpha\n *\n * Access operations for a specific index within this bucket\n * Returns a scoped client for vector data operations\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param indexName - Name of the index\n * @returns Index-scoped client with vector data operations\n *\n * @example\n * ```typescript\n * const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')\n *\n * // Insert vectors\n * await index.putVectors({\n * vectors: [\n * { key: 'doc-1', data: { float32: [...] }, metadata: { title: 'Intro' } }\n * ]\n * })\n *\n * // Query similar vectors\n * const { data } = await index.queryVectors({\n * queryVector: { float32: [...] },\n * topK: 5\n * })\n * ```\n */\n index(indexName: string): VectorIndexScope {\n return new VectorIndexScope(\n this.url,\n this.headers,\n this.vectorBucketName,\n indexName,\n this.fetch\n )\n }\n}\n\n/**\n *\n * @alpha\n *\n * Scoped client for operations within a specific vector index\n * Provides vector data operations (put, get, list, query, delete)\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n */\nexport class VectorIndexScope extends VectorDataApi {\n private vectorBucketName: string\n private indexName: string\n\n /**\n *\n * @alpha\n *\n * Creates a helper that automatically scopes all vector operations to the provided bucket/index names.\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @example\n * ```typescript\n * const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')\n * ```\n */\n constructor(\n url: string,\n headers: { [key: string]: string },\n vectorBucketName: string,\n indexName: string,\n fetch?: Fetch\n ) {\n super(url, headers, fetch)\n this.vectorBucketName = vectorBucketName\n this.indexName = indexName\n }\n\n /**\n *\n * @alpha\n *\n * Inserts or updates vectors in this index\n * Convenience method that automatically includes bucket and index names\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param options - Vector insertion options (bucket and index names automatically set)\n * @returns Promise with empty response on success or error\n *\n * @example\n * ```typescript\n * const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')\n * await index.putVectors({\n * vectors: [\n * {\n * key: 'doc-1',\n * data: { float32: [0.1, 0.2, ...] },\n * metadata: { title: 'Introduction', page: 1 }\n * }\n * ]\n * })\n * ```\n */\n override async putVectors(options: Omit<PutVectorsOptions, 'vectorBucketName' | 'indexName'>) {\n return super.putVectors({\n ...options,\n vectorBucketName: this.vectorBucketName,\n indexName: this.indexName,\n })\n }\n\n /**\n *\n * @alpha\n *\n * Retrieves vectors by keys from this index\n * Convenience method that automatically includes bucket and index names\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param options - Vector retrieval options (bucket and index names automatically set)\n * @returns Promise with response containing vectors array or error\n *\n * @example\n * ```typescript\n * const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')\n * const { data } = await index.getVectors({\n * keys: ['doc-1', 'doc-2'],\n * returnMetadata: true\n * })\n * ```\n */\n override async getVectors(options: Omit<GetVectorsOptions, 'vectorBucketName' | 'indexName'>) {\n return super.getVectors({\n ...options,\n vectorBucketName: this.vectorBucketName,\n indexName: this.indexName,\n })\n }\n\n /**\n *\n * @alpha\n *\n * Lists vectors in this index with pagination\n * Convenience method that automatically includes bucket and index names\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param options - Listing options (bucket and index names automatically set)\n * @returns Promise with response containing vectors array and pagination token or error\n *\n * @example\n * ```typescript\n * const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')\n * const { data } = await index.listVectors({\n * maxResults: 500,\n * returnMetadata: true\n * })\n * ```\n */\n override async listVectors(\n options: Omit<ListVectorsOptions, 'vectorBucketName' | 'indexName'> = {}\n ) {\n return super.listVectors({\n ...options,\n vectorBucketName: this.vectorBucketName,\n indexName: this.indexName,\n })\n }\n\n /**\n *\n * @alpha\n *\n * Queries for similar vectors in this index\n * Convenience method that automatically includes bucket and index names\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param options - Query options (bucket and index names automatically set)\n * @returns Promise with response containing matches array of similar vectors ordered by distance or error\n *\n * @example\n * ```typescript\n * const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')\n * const { data } = await index.queryVectors({\n * queryVector: { float32: [0.1, 0.2, ...] },\n * topK: 5,\n * filter: { category: 'technical' },\n * returnDistance: true,\n * returnMetadata: true\n * })\n * ```\n */\n override async queryVectors(\n options: Omit<QueryVectorsOptions, 'vectorBucketName' | 'indexName'>\n ) {\n return super.queryVectors({\n ...options,\n vectorBucketName: this.vectorBucketName,\n indexName: this.indexName,\n })\n }\n\n /**\n *\n * @alpha\n *\n * Deletes vectors by keys from this index\n * Convenience method that automatically includes bucket and index names\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @param options - Deletion options (bucket and index names automatically set)\n * @returns Promise with empty response on success or error\n *\n * @example\n * ```typescript\n * const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')\n * await index.deleteVectors({\n * keys: ['doc-1', 'doc-2', 'doc-3']\n * })\n * ```\n */\n override async deleteVectors(\n options: Omit<DeleteVectorsOptions, 'vectorBucketName' | 'indexName'>\n ) {\n return super.deleteVectors({\n ...options,\n vectorBucketName: this.vectorBucketName,\n indexName: this.indexName,\n })\n }\n}\n","import StorageFileApi from './packages/StorageFileApi'\nimport StorageBucketApi from './packages/StorageBucketApi'\nimport StorageAnalyticsClient from './packages/StorageAnalyticsClient'\nimport { Fetch } from './lib/common/fetch'\nimport { StorageVectorsClient } from './packages/StorageVectorsClient'\n\nexport interface StorageClientOptions {\n useNewHostname?: boolean\n}\n\nexport class StorageClient extends StorageBucketApi {\n /**\n * Creates a client for Storage buckets, files, analytics, and vectors.\n *\n * @category File Buckets\n * @example\n * ```ts\n * import { StorageClient } from '@supabase/storage-js'\n *\n * const storage = new StorageClient('https://xyzcompany.supabase.co/storage/v1', {\n * apikey: 'public-anon-key',\n * })\n * const avatars = storage.from('avatars')\n * ```\n */\n constructor(\n url: string,\n headers: { [key: string]: string } = {},\n fetch?: Fetch,\n opts?: StorageClientOptions\n ) {\n super(url, headers, fetch, opts)\n }\n\n /**\n * Perform file operation in a bucket.\n *\n * @category File Buckets\n * @param id The bucket id to operate on.\n *\n * @example\n * ```typescript\n * const avatars = supabase.storage.from('avatars')\n * ```\n */\n from(id: string): StorageFileApi {\n return new StorageFileApi(this.url, this.headers, id, this.fetch)\n }\n\n /**\n *\n * @alpha\n *\n * Access vector storage operations.\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Vector Buckets\n * @returns A StorageVectorsClient instance configured with the current storage settings.\n */\n get vectors(): StorageVectorsClient {\n return new StorageVectorsClient(this.url + '/vector', {\n headers: this.headers,\n fetch: this.fetch,\n })\n }\n\n /**\n *\n * @alpha\n *\n * Access analytics storage operations using Iceberg tables.\n *\n * **Public alpha:** This API is part of a public alpha release and may not be available to your account type.\n *\n * @category Analytics Buckets\n * @returns A StorageAnalyticsClient instance configured with the current storage settings.\n */\n get analytics(): StorageAnalyticsClient {\n return new StorageAnalyticsClient(this.url + '/iceberg', this.headers, this.fetch)\n }\n}\n"],"mappings":";;;;;;;AAUA,IAAa,eAAb,cAAkC,MAAM;CAMtC,YACE,SACA,YAA4B,WAC5B,QACA,YACA;AACA,QAAM,QAAQ;OAXN,mBAAmB;AAY3B,OAAK,YAAY;AACjB,OAAK,OAAO,cAAc,YAAY,wBAAwB;AAC9D,OAAK,SAAS;AACd,OAAK,aAAa;;;;;;;;AAStB,SAAgB,eAAe,OAAuC;AACpE,QAAO,OAAO,UAAU,YAAY,UAAU,QAAQ,sBAAsB;;;;;;AAO9E,IAAa,kBAAb,cAAqC,aAAa;CAIhD,YACE,SACA,QACA,YACA,YAA4B,WAC5B;AACA,QAAM,SAAS,WAAW,QAAQ,WAAW;AAC7C,OAAK,OAAO,cAAc,YAAY,2BAA2B;AACjE,OAAK,SAAS;AACd,OAAK,aAAa;;CAGpB,SAAS;AACP,SAAO;GACL,MAAM,KAAK;GACX,SAAS,KAAK;GACd,QAAQ,KAAK;GACb,YAAY,KAAK;GAClB;;;;;;;AAQL,IAAa,sBAAb,cAAyC,aAAa;CAGpD,YAAY,SAAiB,eAAwB,YAA4B,WAAW;AAC1F,QAAM,SAAS,UAAU;AACzB,OAAK,OAAO,cAAc,YAAY,+BAA+B;AACrE,OAAK,gBAAgB;;;;;;;AAYzB,IAAa,sBAAb,cAAyC,aAAa;CACpD,YAAY,SAAiB;AAC3B,QAAM,SAAS,UAAU;;;;;;;;AAS7B,SAAgB,sBAAsB,OAA8C;AAClF,QAAO,eAAe,MAAM,IAAK,MAAuB,iBAAiB;;;;;;AAO3E,IAAa,yBAAb,cAA4C,gBAAgB;CAC1D,YAAY,SAAiB,QAAgB,YAAoB;AAC/D,QAAM,SAAS,QAAQ,YAAY,UAAU;;;;;;;AAQjD,IAAa,6BAAb,cAAgD,oBAAoB;CAClE,YAAY,SAAiB,eAAwB;AACnD,QAAM,SAAS,eAAe,UAAU;;;;;;;AAQ5C,IAAY,8EAAL;;AAEL;;AAEA;;AAEA;;AAEA;;AAEA;;AAEA;;;;;;;;;;;;;ACrIF,MAAa,gBAAgB,gBAA+B;AAC1D,KAAI,YACF,SAAQ,GAAG,SAAS,YAAY,GAAG,KAAK;AAE1C,SAAQ,GAAG,SAAS,MAAM,GAAG,KAAK;;;;;;;;;;AAqBpC,MAAa,iBAAiB,UAA2B;AACvD,KAAI,OAAO,UAAU,YAAY,UAAU,KACzC,QAAO;CAGT,MAAM,YAAY,OAAO,eAAe,MAAM;AAC9C,SACG,cAAc,QACb,cAAc,OAAO,aACrB,OAAO,eAAe,UAAU,KAAK,SACvC,EAAE,OAAO,eAAe,UACxB,EAAE,OAAO,YAAY;;;;;;;;;AAWzB,MAAa,oBAAoB,SAAuC;AACtE,KAAI,MAAM,QAAQ,KAAK,CACrB,QAAO,KAAK,KAAK,OAAO,iBAAiB,GAAG,CAAC;UACpC,OAAO,SAAS,cAAc,SAAS,OAAO,KAAK,CAC5D,QAAO;CAGT,MAAMA,SAA8B,EAAE;AACtC,QAAO,QAAQ,KAAK,CAAC,SAAS,CAAC,KAAK,WAAW;EAC7C,MAAM,SAAS,IAAI,QAAQ,kBAAkB,MAAM,EAAE,aAAa,CAAC,QAAQ,SAAS,GAAG,CAAC;AACxF,SAAO,UAAU,iBAAiB,MAAM;GACxC;AAEF,QAAO;;;;;;;;;;;;;;;;;AAkBT,MAAa,qBAAqB,eAAgC;AAChE,KAAI,CAAC,cAAc,OAAO,eAAe,SACvC,QAAO;AAIT,KAAI,WAAW,WAAW,KAAK,WAAW,SAAS,IACjD,QAAO;AAIT,KAAI,WAAW,MAAM,KAAK,WACxB,QAAO;AAMT,KAAI,WAAW,SAAS,IAAI,IAAI,WAAW,SAAS,KAAK,CACvD,QAAO;AAOT,QADwB,4BACD,KAAK,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACtFzC,MAAM,oBAAoB,QACxB;;YAAI,OACJ,IAAI,WACJ,IAAI,sBACH,OAAO,IAAI,UAAU,WAAW,IAAI,sBAAQ,IAAI,+DAAO,YACxD,KAAK,UAAU,IAAI;;;;;;;;;AASrB,MAAM,cAAc,OAClB,OACA,QACA,SACA,cACG;AAUH,KANE,SACA,OAAO,UAAU,YACjB,YAAY,SACZ,QAAQ,SACR,OAAQ,MAAc,WAAW,YAEb,oDAAC,QAAS,gBAAe;EAC7C,MAAM,gBAAgB;EACtB,MAAM,SAAS,cAAc,UAAU;AAGvC,MAAI,OAAO,cAAc,SAAS,WAChC,eACG,MAAM,CACN,MAAM,QAAa;GAClB,MAAM,wDAAa,IAAK,0DAAc,IAAK,SAAQ,SAAS;AAC5D,UAAO,IAAI,gBAAgB,iBAAiB,IAAI,EAAE,QAAQ,YAAY,UAAU,CAAC;IACjF,CACD,YAAY;AAEX,OAAI,cAAc,WAAW;IAC3B,MAAM,aAAa,SAAS;AAE5B,WAAO,IAAI,gBADK,cAAc,cAAc,QAAQ,OAAO,SACvB,QAAQ,YAAY,UAAU,CAAC;UAC9D;IACL,MAAM,aAAa,SAAS;AAE5B,WAAO,IAAI,gBADK,cAAc,cAAc,QAAQ,OAAO,SACvB,QAAQ,YAAY,UAAU,CAAC;;IAErE;OACC;GAEL,MAAM,aAAa,SAAS;AAE5B,UAAO,IAAI,gBADK,cAAc,cAAc,QAAQ,OAAO,SACvB,QAAQ,YAAY,UAAU,CAAC;;OAGrE,QAAO,IAAI,oBAAoB,iBAAiB,MAAM,EAAE,OAAO,UAAU,CAAC;;;;;;;;;;AAY9E,MAAM,qBACJ,QACA,SACA,YACA,SACG;CACH,MAAMC,SAA+B;EAAE;EAAQ,4DAAS,QAAS,YAAW,EAAE;EAAE;AAEhF,KAAI,WAAW,SAAS,WAAW,UAAU,CAAC,KAC5C,0CAAY,SAAW;AAGzB,KAAI,cAAc,KAAK,EAAE;AACvB,SAAO,2BAAY,gBAAgB,wEAAuB,QAAS;AACnE,SAAO,OAAO,KAAK,UAAU,KAAK;OAElC,QAAO,OAAO;AAGhB,uDAAI,QAAS,OACX,QAAO,SAAS,QAAQ;AAG1B,0CAAY,SAAW;;;;;;;;;;;;;AAczB,eAAe,eACb,SACA,QACA,KACA,SACA,YACA,MACA,WACc;AACd,QAAO,IAAI,SAAS,SAAS,WAAW;AACtC,UAAQ,KAAK,kBAAkB,QAAQ,SAAS,YAAY,KAAK,CAAC,CAC/D,MAAM,WAAW;AAChB,OAAI,CAAC,OAAO,GAAI,OAAM;AACtB,yDAAI,QAAS,cAAe,QAAO;AAMnC,OAAI,cAAc,WAAW;IAC3B,MAAM,cAAc,OAAO,QAAQ,IAAI,eAAe;AAItD,QAHsB,OAAO,QAAQ,IAAI,iBAAiB,KAGpC,OAAO,OAAO,WAAW,IAC7C,QAAO,EAAE;AAIX,QAAI,CAAC,eAAe,CAAC,YAAY,SAAS,mBAAmB,CAC3D,QAAO,EAAE;;AAIb,UAAO,OAAO,MAAM;IACpB,CACD,MAAM,SAAS,QAAQ,KAAK,CAAC,CAC7B,OAAO,UAAU,YAAY,OAAO,QAAQ,SAAS,UAAU,CAAC;GACnE;;;;;;;AAQJ,SAAgB,eAAe,YAA4B,WAAW;AACpE,QAAO;EASL,KAAK,OACH,SACA,KACA,SACA,eACiB;AACjB,UAAO,eAAe,SAAS,OAAO,KAAK,SAAS,YAAY,QAAW,UAAU;;EAYvF,MAAM,OACJ,SACA,KACA,MACA,SACA,eACiB;AACjB,UAAO,eAAe,SAAS,QAAQ,KAAK,SAAS,YAAY,MAAM,UAAU;;EAYnF,KAAK,OACH,SACA,KACA,MACA,SACA,eACiB;AACjB,UAAO,eAAe,SAAS,OAAO,KAAK,SAAS,YAAY,MAAM,UAAU;;EAWlF,MAAM,OACJ,SACA,KACA,SACA,eACiB;AACjB,UAAO,eACL,SACA,QACA,uCAEK,gBACH,eAAe,SAEjB,YACA,QACA,UACD;;EAYH,QAAQ,OACN,SACA,KACA,MACA,SACA,eACiB;AACjB,UAAO,eAAe,SAAS,UAAU,KAAK,SAAS,YAAY,MAAM,UAAU;;EAEtF;;AAIH,MAAM,aAAa,eAAe,UAAU;AAC5C,MAAa,EAAE,KAAK,MAAM,KAAK,MAAM,WAAW;AAGhD,MAAa,aAAa,eAAe,UAAU;;;;;;;;;;;AC1RnD,IAA8B,gBAA9B,MAAwF;;;;;;;;CActF,YACE,KACA,UAAqC,EAAE,EACvC,SACA,YAA4B,WAC5B;OAfQ,qBAAqB;AAgB7B,OAAK,MAAM;AACX,OAAK,UAAU;AACf,OAAK,QAAQ,aAAaC,QAAM;AAChC,OAAK,YAAY;;;;;;;;CASnB,AAAO,eAAqB;AAC1B,OAAK,qBAAqB;AAC1B,SAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;CA4BT,MAAgB,gBACd,WACmE;;AACnE,MAAI;AAEF,UAAO;IAAE,MADI,MAAM,WAAW;IACf,OAAO;IAAM;WACrB,OAAO;AACd,OAAIC,MAAK,mBACP,OAAM;AAER,OAAI,eAAe,MAAM,CACvB,QAAO;IAAE,MAAM;IAAa;IAAiB;AAE/C,SAAM;;;;;;;ACnFZ,IAAqB,wBAArB,MAAkG;CAChG,YACE,AAAQC,YACR,AAAQC,oBACR;EAFQ;EACA;;CAGV,KACE,aAGA,YAC8B;AAC9B,SAAO,KAAK,SAAS,CAAC,KAAK,aAAa,WAAW;;CAGrD,MAAc,UAAmD;;AAC/D,MAAI;AAGF,UAAO;IACL,OAHa,MAAMC,MAAK,YAAY,EAGvB;IACb,OAAO;IACR;WACM,OAAO;AACd,OAAIA,MAAK,mBACP,OAAM;AAGR,OAAI,eAAe,MAAM,CACvB,QAAO;IAAE,MAAM;IAAM;IAAO;AAG9B,SAAM;;;;;;;;sBC9BA,OAAO;AADnB,IAAqB,sBAArB,MAAkF;CAIhF,YACE,AAAQC,YACR,AAAQC,oBACR;EAFQ;EACA;8BAL8B;OAChC,UAAgD;;CAOxD,WAAkC;AAChC,SAAO,IAAI,sBAAsB,KAAK,YAAY,KAAK,mBAAmB;;CAG5E,KACE,aACA,YAC8B;AAC9B,SAAO,KAAK,YAAY,CAAC,KAAK,aAAa,WAAW;;CAGxD,MACE,YACyC;AACzC,SAAO,KAAK,YAAY,CAAC,MAAM,WAAW;;CAG5C,QAAQ,WAAgE;AACtE,SAAO,KAAK,YAAY,CAAC,QAAQ,UAAU;;CAG7C,AAAQ,aAA4C;AAClD,MAAI,CAAC,KAAK,QACR,MAAK,UAAU,KAAK,SAAS;AAE/B,SAAO,KAAK;;CAGd,MAAc,UAAyC;;AACrD,MAAI;AAGF,UAAO;IACL,MAAM,OAHO,MAAMC,MAAK,YAAY,EAGjB,MAAM;IACzB,OAAO;IACR;WACM,OAAO;AACd,OAAIA,MAAK,mBACP,OAAM;AAGR,OAAI,eAAe,MAAM,CACvB,QAAO;IAAE,MAAM;IAAM;IAAO;AAG9B,SAAM;;;;;;;ACxCZ,MAAM,yBAAyB;CAC7B,OAAO;CACP,QAAQ;CACR,QAAQ;EACN,QAAQ;EACR,OAAO;EACR;CACF;AAED,MAAMC,uBAAoC;CACxC,cAAc;CACd,aAAa;CACb,QAAQ;CACT;AAcD,IAAqB,iBAArB,cAA4C,cAA4B;CAGtE,YACE,KACA,UAAqC,EAAE,EACvC,UACA,SACA;AACA,QAAM,KAAK,SAASC,SAAO,UAAU;AACrC,OAAK,WAAW;;;;;;;;;CAUlB,MAAc,eACZ,QACA,MACA,UACA,aAUA;;AACA,SAAOC,MAAK,gBAAgB,YAAY;GACtC,IAAI;GACJ,MAAM,4CAAe,uBAAyB;GAC9C,IAAIC,4CACCD,MAAK,UACJ,WAAW,UAAU,EAAE,YAAY,OAAO,QAAQ,OAAkB,EAAE;GAG5E,MAAM,WAAW,QAAQ;AAEzB,OAAI,OAAO,SAAS,eAAe,oBAAoB,MAAM;AAC3D,WAAO,IAAI,UAAU;AACrB,SAAK,OAAO,gBAAgB,QAAQ,aAAuB;AAC3D,QAAI,SACF,MAAK,OAAO,YAAYA,MAAK,eAAe,SAAS,CAAC;AAExD,SAAK,OAAO,IAAI,SAAS;cAChB,OAAO,aAAa,eAAe,oBAAoB,UAAU;AAC1E,WAAO;AAEP,QAAI,CAAC,KAAK,IAAI,eAAe,CAC3B,MAAK,OAAO,gBAAgB,QAAQ,aAAuB;AAE7D,QAAI,YAAY,CAAC,KAAK,IAAI,WAAW,CACnC,MAAK,OAAO,YAAYA,MAAK,eAAe,SAAS,CAAC;UAEnD;AACL,WAAO;AACP,YAAQ,mBAAmB,WAAW,QAAQ;AAC9C,YAAQ,kBAAkB,QAAQ;AAElC,QAAI,SACF,SAAQ,gBAAgBA,MAAK,SAASA,MAAK,eAAe,SAAS,CAAC;AAStE,SAHG,OAAO,mBAAmB,eAAe,gBAAgB,kBACzD,QAAQ,OAAO,SAAS,YAAY,UAAU,QAAQ,OAAO,KAAK,SAAS,eAE9D,CAAC,QAAQ,OACvB,SAAQ,SAAS;;AAIrB,iEAAI,YAAa,QACf,6CAAe,UAAY,YAAY;GAGzC,MAAM,YAAYA,MAAK,oBAAoB,KAAK;GAChD,MAAM,QAAQA,MAAK,cAAc,UAAU;GAC3C,MAAM,OAAO,OAAO,UAAU,QAAQ,MAAM,MAC1CA,MAAK,OACL,GAAGA,MAAK,IAAI,UAAU,SACtB,uBACE,8DAAa,QAAS,UAAS,EAAE,QAAQ,QAAQ,QAAQ,GAAG,EAAE,EACjE;AAED,UAAO;IAAE,MAAM;IAAW,IAAI,KAAK;IAAI,UAAU,KAAK;IAAK;IAC3D;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA+CJ,MAAM,OACJ,MACA,UACA,aAUA;AACA,cAAY,eAAe,QAAQ,MAAM,UAAU,YAAY;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAkCjE,MAAM,kBACJ,MACA,OACA,UACA,aACA;;EACA,MAAM,YAAYA,OAAK,oBAAoB,KAAK;EAChD,MAAM,QAAQA,OAAK,cAAc,UAAU;EAE3C,MAAM,MAAM,IAAI,IAAIA,OAAK,MAAM,uBAAuB,QAAQ;AAC9D,MAAI,aAAa,IAAI,SAAS,MAAM;AAEpC,SAAOA,OAAK,gBAAgB,YAAY;GACtC,IAAI;GACJ,MAAM,2BAAY,QAAQ,qBAAqB,UAAW;GAC1D,MAAMC,4CACDD,OAAK,UACL,EAAE,YAAY,OAAO,QAAQ,OAAkB,EAAE;AAGtD,OAAI,OAAO,SAAS,eAAe,oBAAoB,MAAM;AAC3D,WAAO,IAAI,UAAU;AACrB,SAAK,OAAO,gBAAgB,QAAQ,aAAuB;AAC3D,SAAK,OAAO,IAAI,SAAS;cAChB,OAAO,aAAa,eAAe,oBAAoB,UAAU;AAC1E,WAAO;AACP,SAAK,OAAO,gBAAgB,QAAQ,aAAuB;UACtD;AACL,WAAO;AACP,YAAQ,mBAAmB,WAAW,QAAQ;AAC9C,YAAQ,kBAAkB,QAAQ;;AAKpC,UAAO;IAAE,MAAM;IAAW,WAFb,MAAM,IAAIA,OAAK,OAAO,IAAI,UAAU,EAAE,MAAgB,EAAE,SAAS,CAAC,EAEtC;IAAK;IAC9C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAiCJ,MAAM,sBACJ,MACA,SAUA;;AACA,SAAOA,OAAK,gBAAgB,YAAY;GACtC,IAAI,QAAQA,OAAK,cAAc,KAAK;GAEpC,MAAM,6BAAeA,OAAK;AAE1B,yDAAI,QAAS,OACX,SAAQ,cAAc;GAGxB,MAAM,OAAO,MAAM,KACjBA,OAAK,OACL,GAAGA,OAAK,IAAI,sBAAsB,SAClC,EAAE,EACF,EAAE,SAAS,CACZ;GAED,MAAM,MAAM,IAAI,IAAIA,OAAK,MAAM,KAAK,IAAI;GAExC,MAAM,QAAQ,IAAI,aAAa,IAAI,QAAQ;AAE3C,OAAI,CAAC,MACH,OAAM,IAAI,aAAa,2BAA2B;AAGpD,UAAO;IAAE,WAAW,IAAI,UAAU;IAAE;IAAM;IAAO;IACjD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA+CJ,MAAM,OACJ,MACA,UAWA,aAUA;AACA,cAAY,eAAe,OAAO,MAAM,UAAU,YAAY;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA8BhE,MAAM,KACJ,UACA,QACA,SAUA;;AACA,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,KACXA,OAAK,OACL,GAAGA,OAAK,IAAI,eACZ;IACE,UAAUA,OAAK;IACf,WAAW;IACX,gBAAgB;IAChB,qEAAmB,QAAS;IAC7B,EACD,EAAE,SAASA,OAAK,SAAS,CAC1B;IACD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA8BJ,MAAM,KACJ,UACA,QACA,SAUA;;AACA,SAAOA,OAAK,gBAAgB,YAAY;AAYtC,UAAO,EAAE,OAXI,MAAM,KACjBA,OAAK,OACL,GAAGA,OAAK,IAAI,eACZ;IACE,UAAUA,OAAK;IACf,WAAW;IACX,gBAAgB;IAChB,qEAAmB,QAAS;IAC7B,EACD,EAAE,SAASA,OAAK,SAAS,CAC1B,EACmB,KAAK;IACzB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAsDJ,MAAM,gBACJ,MACA,WACA,SAUA;;AACA,SAAOA,OAAK,gBAAgB,YAAY;GACtC,IAAI,QAAQA,OAAK,cAAc,KAAK;GAEpC,IAAI,OAAO,MAAM,KACfA,OAAK,OACL,GAAGA,OAAK,IAAI,eAAe,0BACzB,gEAAe,QAAS,aAAY,EAAE,WAAW,QAAQ,WAAW,GAAG,EAAE,GAC3E,EAAE,SAASA,OAAK,SAAS,CAC1B;GACD,MAAM,wEAAqB,QAAS,YAChC,aAAa,QAAQ,aAAa,OAAO,KAAK,QAAQ,aACtD;AAEJ,UAAO,EAAE,WADS,UAAU,GAAGA,OAAK,MAAM,KAAK,YAAY,qBAAqB,EAC5D;IACpB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAyCJ,MAAM,iBACJ,OACA,WACA,SAUA;;AACA,SAAOA,OAAK,gBAAgB,YAAY;GACtC,MAAM,OAAO,MAAM,KACjBA,OAAK,OACL,GAAGA,OAAK,IAAI,eAAeA,OAAK,YAChC;IAAE;IAAW;IAAO,EACpB,EAAE,SAASA,OAAK,SAAS,CAC1B;GAED,MAAM,wEAAqB,QAAS,YAChC,aAAa,QAAQ,aAAa,OAAO,KAAK,QAAQ,aACtD;AACJ,UAAO,KAAK,KAAK,4CACZ,cACH,WAAW,MAAM,YACb,UAAU,GAAGA,OAAK,MAAM,MAAM,YAAY,qBAAqB,GAC/D,QACH;IACH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA6DJ,SACE,MACA,SACA,YACqB;EAErB,MAAM,aADsB,0DAAO,QAAS,eAAc,cACjB,+BAA+B;EACxE,MAAM,sBAAsB,KAAK,8EAA2B,QAAS,cAAa,EAAE,CAAC;EACrF,MAAM,cAAc,sBAAsB,IAAI,wBAAwB;EACtE,MAAM,QAAQ,KAAK,cAAc,KAAK;EACtC,MAAM,mBACJ,IACE,KAAK,OACL,GAAG,KAAK,IAAI,GAAG,WAAW,GAAG,QAAQ,eACrC;GACE,SAAS,KAAK;GACd,eAAe;GAChB,EACD,WACD;AACH,SAAO,IAAI,oBAAoB,YAAY,KAAK,mBAAmB;;;;;;;;;;;;;;;;;CAkBrE,MAAM,KAAK,MAST;;EACA,MAAM,QAAQA,QAAK,cAAc,KAAK;AAEtC,SAAOA,QAAK,gBAAgB,YAAY;AAKtC,UAAO,iBAJM,MAAM,IAAIA,QAAK,OAAO,GAAGA,QAAK,IAAI,eAAe,SAAS,EACrE,SAASA,QAAK,SACf,CAAC,CAE2B;IAC7B;;;;;;;;;;;;;;;;;CAkBJ,MAAM,OAAO,MASX;;EACA,MAAM,QAAQA,QAAK,cAAc,KAAK;AAEtC,MAAI;AACF,SAAM,KAAKA,QAAK,OAAO,GAAGA,QAAK,IAAI,UAAU,SAAS,EACpD,SAASA,QAAK,SACf,CAAC;AAEF,UAAO;IAAE,MAAM;IAAM,OAAO;IAAM;WAC3B,OAAO;AACd,OAAIA,QAAK,mBACP,OAAM;AAER,OAAI,eAAe,MAAM,IAAI,iBAAiB,qBAAqB;IACjE,MAAM,gBAAgB,MAAM;AAE5B,QAAI,CAAC,KAAK,IAAI,CAAC,uEAAS,cAAe,OAAO,CAC5C,QAAO;KAAE,MAAM;KAAO;KAAO;;AAIjC,SAAM;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAsDV,aACE,MACA,SACiC;EACjC,MAAM,QAAQ,KAAK,cAAc,KAAK;EACtC,MAAME,eAAyB,EAAE;EAEjC,MAAM,wEAAqB,QAAS,YAChC,YAAY,QAAQ,aAAa,OAAO,KAAK,QAAQ,aACrD;AAEJ,MAAI,uBAAuB,GACzB,cAAa,KAAK,mBAAmB;EAIvC,MAAM,aADsB,0DAAO,QAAS,eAAc,cACjB,iBAAiB;EAC1D,MAAM,sBAAsB,KAAK,8EAA2B,QAAS,cAAa,EAAE,CAAC;AAErF,MAAI,wBAAwB,GAC1B,cAAa,KAAK,oBAAoB;EAGxC,IAAI,cAAc,aAAa,KAAK,IAAI;AACxC,MAAI,gBAAgB,GAClB,eAAc,IAAI;AAGpB,SAAO,EACL,MAAM,EAAE,WAAW,UAAU,GAAG,KAAK,IAAI,GAAG,WAAW,UAAU,QAAQ,cAAc,EAAE,EAC1F;;;;;;;;;;;;;;;;;;;;;;;;;CA0BH,MAAM,OAAO,OASX;;AACA,SAAOF,QAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,OACXA,QAAK,OACL,GAAGA,QAAK,IAAI,UAAUA,QAAK,YAC3B,EAAE,UAAU,OAAO,EACnB,EAAE,SAASA,QAAK,SAAS,CAC1B;IACD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA6HJ,MAAM,KACJ,MACA,SACA,YAUA;;AACA,SAAOA,QAAK,gBAAgB,YAAY;GACtC,MAAM,wDAAY,yBAA2B,gBAAS,QAAQ,QAAQ;AACtE,UAAO,MAAM,KACXA,QAAK,OACL,GAAGA,QAAK,IAAI,eAAeA,QAAK,YAChC,MACA,EAAE,SAASA,QAAK,SAAS,EACzB,WACD;IACD;;;;;;;;;CAUJ,MAAM,OACJ,SACA,YAUA;;AACA,SAAOA,QAAK,gBAAgB,YAAY;GACtC,MAAM,0BAAY;AAClB,UAAO,MAAM,KACXA,QAAK,OACL,GAAGA,QAAK,IAAI,kBAAkBA,QAAK,YACnC,MACA,EAAE,SAASA,QAAK,SAAS,EACzB,WACD;IACD;;CAGJ,AAAU,eAAe,UAA+B;AACtD,SAAO,KAAK,UAAU,SAAS;;CAGjC,SAAS,MAAc;AACrB,MAAI,OAAO,WAAW,YACpB,QAAO,OAAO,KAAK,KAAK,CAAC,SAAS,SAAS;AAE7C,SAAO,KAAK,KAAK;;CAGnB,AAAQ,cAAc,MAAc;AAClC,SAAO,GAAG,KAAK,SAAS,GAAG,KAAK,QAAQ,QAAQ,GAAG;;CAGrD,AAAQ,oBAAoB,MAAc;AACxC,SAAO,KAAK,QAAQ,YAAY,GAAG,CAAC,QAAQ,QAAQ,IAAI;;CAG1D,AAAQ,2BAA2B,WAA6B;EAC9D,MAAMG,SAAmB,EAAE;AAC3B,MAAI,UAAU,MACZ,QAAO,KAAK,SAAS,UAAU,QAAQ;AAGzC,MAAI,UAAU,OACZ,QAAO,KAAK,UAAU,UAAU,SAAS;AAG3C,MAAI,UAAU,OACZ,QAAO,KAAK,UAAU,UAAU,SAAS;AAG3C,MAAI,UAAU,OACZ,QAAO,KAAK,UAAU,UAAU,SAAS;AAG3C,MAAI,UAAU,QACZ,QAAO,KAAK,WAAW,UAAU,UAAU;AAG7C,SAAO,OAAO,KAAK,IAAI;;;;;;ACtqC3B,MAAa,UAAU;;;;ACLvB,MAAa,kBAAkB,EAC7B,iBAAiB,cAAc,WAChC;;;;ACID,IAAqB,mBAArB,cAA8C,cAA4B;CACxE,YACE,KACA,UAAqC,EAAE,EACvC,SACA,MACA;EACA,MAAM,UAAU,IAAI,IAAI,IAAI;AAI5B,kDAAI,KAAM,gBAER;OADuB,yBAAyB,KAAK,QAAQ,SAAS,IAChD,CAAC,QAAQ,SAAS,SAAS,oBAAoB,CACnE,SAAQ,WAAW,QAAQ,SAAS,QAAQ,aAAa,oBAAoB;;EAIjF,MAAM,WAAW,QAAQ,KAAK,QAAQ,OAAO,GAAG;EAChD,MAAM,iDAAoB,kBAAoB;AAE9C,QAAM,UAAU,cAAcC,SAAO,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAmCjD,MAAM,YAAY,SAShB;;AACA,SAAOC,MAAK,gBAAgB,YAAY;GACtC,MAAM,cAAcA,MAAK,+BAA+B,QAAQ;AAChE,UAAO,MAAM,IAAIA,MAAK,OAAO,GAAGA,MAAK,IAAI,SAAS,eAAe,EAC/D,SAASA,MAAK,SACf,CAAC;IACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAoCJ,MAAM,UAAU,IASd;;AACA,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,IAAIA,OAAK,OAAO,GAAGA,OAAK,IAAI,UAAU,MAAM,EAAE,SAASA,OAAK,SAAS,CAAC;IACnF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAwCJ,MAAM,aACJ,IACA,UAKI,EACF,QAAQ,OACT,EAUD;;AACA,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,KACXA,OAAK,OACL,GAAGA,OAAK,IAAI,UACZ;IACE;IACA,MAAM;IACN,MAAM,QAAQ;IACd,QAAQ,QAAQ;IAChB,iBAAiB,QAAQ;IACzB,oBAAoB,QAAQ;IAC7B,EACD,EAAE,SAASA,OAAK,SAAS,CAC1B;IACD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAsCJ,MAAM,aACJ,IACA,SAcA;;AACA,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,IACXA,OAAK,OACL,GAAGA,OAAK,IAAI,UAAU,MACtB;IACE;IACA,MAAM;IACN,QAAQ,QAAQ;IAChB,iBAAiB,QAAQ;IACzB,oBAAoB,QAAQ;IAC7B,EACD,EAAE,SAASA,OAAK,SAAS,CAC1B;IACD;;;;;;;;;;;;;;;;;;;;;;;;;;CA2BJ,MAAM,YAAY,IAShB;;AACA,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,KAAKA,OAAK,OAAO,GAAGA,OAAK,IAAI,UAAU,GAAG,SAAS,EAAE,EAAE,EAAE,SAASA,OAAK,SAAS,CAAC;IAC9F;;;;;;;;;;;;;;;;;;;;;;;;;;;CA4BJ,MAAM,aAAa,IASjB;;AACA,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,OAAOA,OAAK,OAAO,GAAGA,OAAK,IAAI,UAAU,MAAM,EAAE,EAAE,EAAE,SAASA,OAAK,SAAS,CAAC;IAC1F;;CAGJ,AAAQ,+BAA+B,SAAqC;EAC1E,MAAMC,SAAiC,EAAE;AACzC,MAAI,SAAS;AACX,OAAI,WAAW,QACb,QAAO,QAAQ,OAAO,QAAQ,MAAM;AAEtC,OAAI,YAAY,QACd,QAAO,SAAS,OAAO,QAAQ,OAAO;AAExC,OAAI,QAAQ,OACV,QAAO,SAAS,QAAQ;AAE1B,OAAI,QAAQ,WACV,QAAO,aAAa,QAAQ;AAE9B,OAAI,QAAQ,UACV,QAAO,YAAY,QAAQ;;AAG/B,SAAO,OAAO,KAAK,OAAO,CAAC,SAAS,IAAI,MAAM,IAAI,gBAAgB,OAAO,CAAC,UAAU,GAAG;;;;;;;;;;AC7V3F,IAAqB,yBAArB,cAAoD,cAA4B;;;;;;;;;;;;;;;;;;CAkB9E,YAAY,KAAa,UAAqC,EAAE,EAAE,SAAe;EAC/E,MAAM,WAAW,IAAI,QAAQ,OAAO,GAAG;EACvC,MAAM,iDAAoB,kBAAoB;AAC9C,QAAM,UAAU,cAAcC,SAAO,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAqCjD,MAAM,aAAa,MASjB;;AACA,SAAOC,MAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,KAAKA,MAAK,OAAO,GAAGA,MAAK,IAAI,UAAU,EAAE,MAAM,EAAE,EAAE,SAASA,MAAK,SAAS,CAAC;IACxF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAiDJ,MAAM,YAAY,SAehB;;AACA,SAAOA,OAAK,gBAAgB,YAAY;GAEtC,MAAM,cAAc,IAAI,iBAAiB;AACzC,0DAAI,QAAS,WAAU,OAAW,aAAY,IAAI,SAAS,QAAQ,MAAM,UAAU,CAAC;AACpF,0DAAI,QAAS,YAAW,OAAW,aAAY,IAAI,UAAU,QAAQ,OAAO,UAAU,CAAC;AACvF,yDAAI,QAAS,WAAY,aAAY,IAAI,cAAc,QAAQ,WAAW;AAC1E,yDAAI,QAAS,UAAW,aAAY,IAAI,aAAa,QAAQ,UAAU;AACvE,yDAAI,QAAS,OAAQ,aAAY,IAAI,UAAU,QAAQ,OAAO;GAE9D,MAAM,cAAc,YAAY,UAAU;GAC1C,MAAM,MAAM,cAAc,GAAGA,OAAK,IAAI,UAAU,gBAAgB,GAAGA,OAAK,IAAI;AAE5E,UAAO,MAAM,IAAIA,OAAK,OAAO,KAAK,EAAE,SAASA,OAAK,SAAS,CAAC;IAC5D;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAkCJ,MAAM,aAAa,YASjB;;AACA,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,OACXA,OAAK,OACL,GAAGA,OAAK,IAAI,UAAU,cACtB,EAAE,EACF,EAAE,SAASA,OAAK,SAAS,CAC1B;IACD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA8HJ,KAAK,YAA+C;;AAElD,MAAI,CAAC,kBAAkB,WAAW,CAChC,OAAM,IAAI,aACR,qJAED;EAOH,MAAM,UAAU,IAAIC,8BAAmB;GACrC,SAAS,KAAK;GACd,aAAa;GACb,MAAM;IACJ,MAAM;IACN,YAAY,YAAYD,OAAK;IAC9B;GACD,OAAO,KAAK;GACb,CAAC;EAEF,MAAM,qBAAqB,KAAK;AAuBhC,SArBuB,IAAI,MAAM,SAAS,EACxC,IAAI,QAAQ,MAAgC;GAC1C,MAAM,QAAQ,OAAO;AACrB,OAAI,OAAO,UAAU,WACnB,QAAO;AAGT,UAAO,OAAO,GAAG,SAAoB;AACnC,QAAI;AAEF,YAAO;MAAE,MADI,MAAO,MAAmB,MAAM,QAAQ,KAAK;MAC3C,OAAO;MAAM;aACrB,OAAO;AACd,SAAI,mBACF,OAAM;AAER,YAAO;MAAE,MAAM;MAAa;MAAuB;;;KAI1D,CAAC;;;;;;;;;;;AClWN,IAAqB,iBAArB,cAA4C,cAA4B;;CAEtE,YAAY,KAAa,UAAqC,EAAE,EAAE,SAAe;EAC/E,MAAM,WAAW,IAAI,QAAQ,OAAO,GAAG;EACvC,MAAM,iDAAoB,wBAAiB,gBAAgB,sBAAuB;AAClF,QAAM,UAAU,cAAcE,SAAO,UAAU;;;CAIjD,MAAM,YAAY,SAA8D;;AAC9E,SAAOC,MAAK,gBAAgB,YAAY;AAItC,UAHa,MAAM,WAAW,KAAKA,MAAK,OAAO,GAAGA,MAAK,IAAI,eAAe,SAAS,EACjF,SAASA,MAAK,SACf,CAAC,IACa,EAAE;IACjB;;;CAIJ,MAAM,SACJ,kBACA,WAC8C;;AAC9C,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,WAAW,KACtBA,OAAK,OACL,GAAGA,OAAK,IAAI,YACZ;IAAE;IAAkB;IAAW,EAC/B,EAAE,SAASA,OAAK,SAAS,CAC1B;IACD;;;CAIJ,MAAM,YAAY,SAAwE;;AACxF,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,WAAW,KAAKA,OAAK,OAAO,GAAGA,OAAK,IAAI,eAAe,SAAS,EAC3E,SAASA,OAAK,SACf,CAAC;IACF;;;CAIJ,MAAM,YAAY,kBAA0B,WAAoD;;AAC9F,SAAOA,OAAK,gBAAgB,YAAY;AAOtC,UANa,MAAM,WAAW,KAC5BA,OAAK,OACL,GAAGA,OAAK,IAAI,eACZ;IAAE;IAAkB;IAAW,EAC/B,EAAE,SAASA,OAAK,SAAS,CAC1B,IACc,EAAE;IACjB;;;;;;;;;;;AClEN,IAAqB,gBAArB,cAA2C,cAA4B;;CAErE,YAAY,KAAa,UAAqC,EAAE,EAAE,SAAe;EAC/E,MAAM,WAAW,IAAI,QAAQ,OAAO,GAAG;EACvC,MAAM,iDAAoB,wBAAiB,gBAAgB,sBAAuB;AAClF,QAAM,UAAU,cAAcC,SAAO,UAAU;;;CAIjD,MAAM,WAAW,SAA6D;;AAE5E,MAAI,QAAQ,QAAQ,SAAS,KAAK,QAAQ,QAAQ,SAAS,IACzD,OAAM,IAAI,MAAM,oDAAoD;AAGtE,SAAOC,MAAK,gBAAgB,YAAY;AAItC,UAHa,MAAM,WAAW,KAAKA,MAAK,OAAO,GAAGA,MAAK,IAAI,cAAc,SAAS,EAChF,SAASA,MAAK,SACf,CAAC,IACa,EAAE;IACjB;;;CAIJ,MAAM,WAAW,SAAsE;;AACrF,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,WAAW,KAAKA,OAAK,OAAO,GAAGA,OAAK,IAAI,cAAc,SAAS,EAC1E,SAASA,OAAK,SACf,CAAC;IACF;;;CAIJ,MAAM,YAAY,SAAwE;;AAExF,MAAI,QAAQ,iBAAiB,QAAW;AACtC,OAAI,QAAQ,eAAe,KAAK,QAAQ,eAAe,GACrD,OAAM,IAAI,MAAM,wCAAwC;AAE1D,OAAI,QAAQ,iBAAiB,QAC3B;QAAI,QAAQ,eAAe,KAAK,QAAQ,gBAAgB,QAAQ,aAC9D,OAAM,IAAI,MAAM,sCAAsC,QAAQ,eAAe,IAAI;;;AAKvF,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,WAAW,KAAKA,OAAK,OAAO,GAAGA,OAAK,IAAI,eAAe,SAAS,EAC3E,SAASA,OAAK,SACf,CAAC;IACF;;;CAIJ,MAAM,aAAa,SAA0E;;AAC3F,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,WAAW,KAAKA,OAAK,OAAO,GAAGA,OAAK,IAAI,gBAAgB,SAAS,EAC5E,SAASA,OAAK,SACf,CAAC;IACF;;;CAIJ,MAAM,cAAc,SAAgE;;AAElF,MAAI,QAAQ,KAAK,SAAS,KAAK,QAAQ,KAAK,SAAS,IACnD,OAAM,IAAI,MAAM,kDAAkD;AAGpE,SAAOA,OAAK,gBAAgB,YAAY;AAItC,UAHa,MAAM,WAAW,KAAKA,OAAK,OAAO,GAAGA,OAAK,IAAI,iBAAiB,SAAS,EACnF,SAASA,OAAK,SACf,CAAC,IACa,EAAE;IACjB;;;;;;;;;;;AC/EN,IAAqB,kBAArB,cAA6C,cAA4B;;CAEvE,YAAY,KAAa,UAAqC,EAAE,EAAE,SAAe;EAC/E,MAAM,WAAW,IAAI,QAAQ,OAAO,GAAG;EACvC,MAAM,iDAAoB,wBAAiB,gBAAgB,sBAAuB;AAClF,QAAM,UAAU,cAAcC,SAAO,UAAU;;;CAIjD,MAAM,aAAa,kBAA2D;;AAC5E,SAAOC,MAAK,gBAAgB,YAAY;AAOtC,UANa,MAAM,WAAW,KAC5BA,MAAK,OACL,GAAGA,MAAK,IAAI,sBACZ,EAAE,kBAAkB,EACpB,EAAE,SAASA,MAAK,SAAS,CAC1B,IACc,EAAE;IACjB;;;CAIJ,MAAM,UAAU,kBAAgF;;AAC9F,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,WAAW,KACtBA,OAAK,OACL,GAAGA,OAAK,IAAI,mBACZ,EAAE,kBAAkB,EACpB,EAAE,SAASA,OAAK,SAAS,CAC1B;IACD;;;CAIJ,MAAM,YACJ,UAAoC,EAAE,EACW;;AACjD,SAAOA,OAAK,gBAAgB,YAAY;AACtC,UAAO,MAAM,WAAW,KAAKA,OAAK,OAAO,GAAGA,OAAK,IAAI,qBAAqB,SAAS,EACjF,SAASA,OAAK,SACf,CAAC;IACF;;;CAIJ,MAAM,aAAa,kBAA2D;;AAC5E,SAAOA,OAAK,gBAAgB,YAAY;AAOtC,UANa,MAAM,WAAW,KAC5BA,OAAK,OACL,GAAGA,OAAK,IAAI,sBACZ,EAAE,kBAAkB,EACpB,EAAE,SAASA,OAAK,SAAS,CAC1B,IACc,EAAE;IACjB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACSN,IAAa,uBAAb,cAA0C,gBAAgB;;;;;;;;;;;;;;;;;;CAkBxD,YAAY,KAAa,UAAuC,EAAE,EAAE;AAClE,QAAM,KAAK,QAAQ,WAAW,EAAE,EAAE,QAAQ,MAAM;;;;;;;;;;;;;;;;;;;;CAqBlD,KAAK,kBAA6C;AAChD,SAAO,IAAI,kBAAkB,KAAK,KAAK,KAAK,SAAS,kBAAkB,KAAK,MAAM;;;;;;;;;;;;;;;;;;;;;;;CAwBpF,MAAM,aAAa,kBAA2D;yCACrE,MAAM;AAAb,kDAA0B;;;;;;;;;;;;;;;;;;;;;;;;CAyB5B,MAAM,UAAU,kBAAgF;sCACvF,MAAM;AAAb,gDAAuB;;;;;;;;;;;;;;;;;;;;;;;;;;CA2BzB,MAAM,YACJ,UAAoC,EAAE,EACW;wCAC1C,MAAM;AAAb,kDAAyB;;;;;;;;;;;;;;;;;;;;;;;CAwB3B,MAAM,aAAa,kBAA2D;yCACrE,MAAM;AAAb,mDAA0B;;;;;;;;;;;;AAa9B,IAAa,oBAAb,cAAuC,eAAe;;;;;;;;;;;;;;CAgBpD,YACE,KACA,SACA,kBACA,SACA;AACA,QAAM,KAAK,SAASC,QAAM;AAC1B,OAAK,mBAAmB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA8B1B,MAAe,YAAY,SAAuD;wCACzE,MAAM;AAAb,oFACK,gBACH,kBAAkBC,OAAK;;;;;;;;;;;;;;;;;;;;;CAuB3B,MAAe,YAAY,UAAwD,EAAE,EAAE;wCAC9E,MAAM;AAAb,oFACK,gBACH,kBAAkBA,OAAK;;;;;;;;;;;;;;;;;;;;;;CAwB3B,MAAe,SAAS,WAAmB;qCAClC,MAAM;AAAb,+CAAsBA,OAAK,kBAAkB;;;;;;;;;;;;;;;;;;;;;CAsB/C,MAAe,YAAY,WAAmB;wCACrC,MAAM;AAAb,kDAAyBA,OAAK,kBAAkB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAkClD,MAAM,WAAqC;AACzC,SAAO,IAAI,iBACT,KAAK,KACL,KAAK,SACL,KAAK,kBACL,WACA,KAAK,MACN;;;;;;;;;;;;AAaL,IAAa,mBAAb,cAAsC,cAAc;;;;;;;;;;;;;;;CAkBlD,YACE,KACA,SACA,kBACA,WACA,SACA;AACA,QAAM,KAAK,SAASD,QAAM;AAC1B,OAAK,mBAAmB;AACxB,OAAK,YAAY;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA8BnB,MAAe,WAAW,SAAoE;uCACrF,MAAM;AAAb,mFACK;GACH,kBAAkBC,OAAK;GACvB,WAAWA,OAAK;;;;;;;;;;;;;;;;;;;;;;;;;CA0BpB,MAAe,WAAW,SAAoE;uCACrF,MAAM;AAAb,oFACK;GACH,kBAAkBA,QAAK;GACvB,WAAWA,QAAK;;;;;;;;;;;;;;;;;;;;;;;;;CA0BpB,MAAe,YACb,UAAsE,EAAE,EACxE;wCACO,MAAM;AAAb,qFACK;GACH,kBAAkBA,QAAK;GACvB,WAAWA,QAAK;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA6BpB,MAAe,aACb,SACA;yCACO,MAAM;AAAb,sFACK;GACH,kBAAkBA,QAAK;GACvB,WAAWA,QAAK;;;;;;;;;;;;;;;;;;;;;;;;CAyBpB,MAAe,cACb,SACA;0CACO,MAAM;AAAb,uFACK;GACH,kBAAkBA,QAAK;GACvB,WAAWA,QAAK;;;;;;;AC1lBtB,IAAa,gBAAb,cAAmC,iBAAiB;;;;;;;;;;;;;;;CAelD,YACE,KACA,UAAqC,EAAE,EACvC,SACA,MACA;AACA,QAAM,KAAK,SAASC,SAAO,KAAK;;;;;;;;;;;;;CAclC,KAAK,IAA4B;AAC/B,SAAO,IAAI,eAAe,KAAK,KAAK,KAAK,SAAS,IAAI,KAAK,MAAM;;;;;;;;;;;;;CAcnE,IAAI,UAAgC;AAClC,SAAO,IAAI,qBAAqB,KAAK,MAAM,WAAW;GACpD,SAAS,KAAK;GACd,OAAO,KAAK;GACb,CAAC;;;;;;;;;;;;;CAcJ,IAAI,YAAoC;AACtC,SAAO,IAAI,uBAAuB,KAAK,MAAM,YAAY,KAAK,SAAS,KAAK,MAAM"}
|