@fileverse/api 0.0.1 → 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../node_modules/tsup/assets/esm_shims.js","../../src/cli/constants.generated.ts","../../src/cli/constants.ts","../../src/config/index.ts","../../src/infra/logger.ts","../../src/infra/asyncHandler.ts","../../src/sdk/key-store.ts","../../src/sdk/auth-token-provider.ts","../../src/constants/chains.ts","../../src/constants/events.ts","../../src/constants/methods.ts","../../src/constants/index.ts","../../src/sdk/pimlico-utils.ts","../../src/sdk/smart-agent.ts","../../src/sdk/file-encryption.ts","../../src/sdk/file-utils.ts","../../src/sdk/file-manager.ts","../../src/domain/portal/publish.ts","../../src/domain/portal/saveApiKey.ts","../../src/domain/portal/removeApiKey.ts","../../src/domain/portal/index.ts","../../src/errors/rate-limit.ts","../../src/infra/worker/eventProcessor.ts","../../src/infra/worker/workerSignal.ts","../../src/infra/worker/worker.ts","../../src/infra/worker/index.ts","../../src/appWorker.ts","../../src/infra/reporter.ts","../../src/infra/index.ts","../../src/infra/database/connection.ts","../../src/domain/file/constants.ts","../../src/infra/database/query-builder.ts","../../src/infra/database/index.ts","../../src/infra/database/models/files.model.ts","../../src/infra/database/models/portals.model.ts","../../src/infra/database/models/apikeys.model.ts","../../src/infra/database/models/folders.model.ts","../../src/infra/database/models/events.model.ts","../../src/infra/database/models/index.ts","../../src/domain/portal/savePortal.ts","../../src/infra/database/migrations/index.ts","../../src/cli/index.ts","../../src/cli/fetch-api-key.ts","../../src/cli/scaffold-config.ts","../../src/cli/process-manager.ts","../../src/cli/prompts.ts","../../src/init/index.ts"],"sourcesContent":["// Shim globals in esm bundle\nimport path from 'node:path'\nimport { fileURLToPath } from 'node:url'\n\nconst getFilename = () => fileURLToPath(import.meta.url)\nconst getDirname = () => path.dirname(getFilename())\n\nexport const __dirname = /* @__PURE__ */ getDirname()\nexport const __filename = /* @__PURE__ */ getFilename()\n","export const STATIC_CONFIG = {\n API_URL: 'https://prod-apps-storage-5cdacc06ff79.herokuapp.com/',\n SERVER_DID: 'did:key:z6Mkroj9bxTin6Z5S9qwx2G2b87NPrCX7S85FhCpmBGPcDCz',\n PROXY_SERVER_DID: 'did:key:z6MkrZSmq8D6vQG87YbjUQatXeptaCCXWdTx8fYaWxWbRUHB',\n NETWORK_NAME: 'gnosis',\n DEFAULT_PORT: '8001',\n DEFAULT_RPC_URL: 'https://rpc.gnosischain.com',\n PIMLICO_PROXY_URL: 'https://pimlico-proxy-0a326da116f8.herokuapp.com/',\n SERVICE_NAME: 'fileverse-api',\n LOG_LEVEL: 'info',\n FRONTEND_URL: 'https://docs.fileverse.io'\n} as const;\n\nexport const BASE_CONFIG = STATIC_CONFIG;\n","export { STATIC_CONFIG, BASE_CONFIG } from \"./constants.generated.js\";\n","import dotenv from \"dotenv\";\nimport path from \"path\";\nimport fs from \"fs\";\nimport os from \"os\";\nimport { STATIC_CONFIG } from \"../cli/constants.js\";\n\nconst projectEnvPath = path.join(process.cwd(), \"config\", \".env\");\nconst userEnvPath = path.join(os.homedir(), \".fileverse\", \".env\");\n\nfunction getEnvPath(): string {\n if (fs.existsSync(projectEnvPath)) {\n return projectEnvPath;\n }\n return userEnvPath;\n}\n\nexport function loadConfig(override = true): void {\n const envPath = getEnvPath();\n dotenv.config({ path: envPath, override });\n}\n\nloadConfig(false);\n\nexport function getRuntimeConfig() {\n return {\n get API_KEY() {\n return process.env.API_KEY;\n },\n get RPC_URL() {\n return process.env.RPC_URL || STATIC_CONFIG.DEFAULT_RPC_URL;\n },\n get DB_PATH() {\n return process.env.DB_PATH;\n },\n get PORT() {\n return process.env.PORT || STATIC_CONFIG.DEFAULT_PORT;\n },\n get NODE_ENV() {\n return process.env.NODE_ENV || \"production\";\n },\n get FRONTEND_URL() {\n return process.env.FRONTEND_URL || STATIC_CONFIG.FRONTEND_URL;\n },\n };\n}\n\nexport function validateDbPath(): void {\n const dbPath = process.env.DB_PATH;\n if (!dbPath) {\n console.error(\"Error: DB_PATH environment variable is required\");\n console.error(\"Please set DB_PATH in your .env file (config/.env or ~/.fileverse/.env) or run the CLI first\");\n process.exit(1);\n }\n\n const dbDir = path.dirname(dbPath.trim());\n if (!fs.existsSync(dbDir)) {\n fs.mkdirSync(dbDir, { recursive: true });\n }\n}\n\nconst config: Record<string, string | undefined> = {\n ...STATIC_CONFIG,\n get SERVICE_NAME() {\n return STATIC_CONFIG.SERVICE_NAME;\n },\n get LOG_LEVEL() {\n return STATIC_CONFIG.LOG_LEVEL;\n },\n get NETWORK_NAME() {\n return STATIC_CONFIG.NETWORK_NAME;\n },\n get UPLOAD_SERVER_URL() {\n return STATIC_CONFIG.API_URL;\n },\n get UPLOAD_SERVER_DID() {\n return STATIC_CONFIG.SERVER_DID;\n },\n get API_KEY() {\n return process.env.API_KEY;\n },\n get RPC_URL() {\n return process.env.RPC_URL || STATIC_CONFIG.DEFAULT_RPC_URL;\n },\n get DB_PATH() {\n return process.env.DB_PATH;\n },\n get PORT() {\n return process.env.PORT || STATIC_CONFIG.DEFAULT_PORT;\n },\n get NODE_ENV() {\n return process.env.NODE_ENV || \"production\";\n },\n get IP() {\n return process.env.IP || \"0.0.0.0\";\n },\n get FRONTEND_URL() {\n return process.env.FRONTEND_URL || STATIC_CONFIG.FRONTEND_URL;\n },\n};\n\nexport { config };\n","import pino, { Logger as PinoLogger, Level } from \"pino\";\nimport { STATIC_CONFIG } from \"../cli/constants\";\nimport { config } from \"../config\";\n\nconst isProduction = config.NODE_ENV === \"production\";\n\nconst pinoInstance = pino({\n name: STATIC_CONFIG.SERVICE_NAME,\n level: STATIC_CONFIG.LOG_LEVEL,\n formatters: {\n bindings: (bindings) => ({ name: bindings.name }),\n level: (label) => ({ level: label }),\n },\n serializers: {\n err(err: Error | undefined) {\n if (!err) return err;\n if (isProduction) {\n return { type: err.name, message: err.message };\n }\n return {\n type: err.name,\n message: err.message,\n stack: err.stack,\n };\n },\n },\n transport:\n config.NODE_ENV !== \"production\"\n ? {\n target: \"pino-pretty\",\n options: {\n colorize: true,\n translateTime: \"SYS:standard\",\n ignore: \"pid,hostname\",\n errorProps: \"*\",\n errorLikeObjectKeys: [\"err\", \"error\"],\n },\n }\n : undefined,\n});\n\ntype LogFn = {\n (msg: string, ...args: unknown[]): void;\n (obj: object, msg?: string, ...args: unknown[]): void;\n};\n\nconst createLogMethod = (level: Level): LogFn => {\n return (...args: unknown[]) => {\n const [first, ...rest] = args;\n const log = pinoInstance[level].bind(pinoInstance) as (...a: unknown[]) => void;\n\n if (typeof first === \"object\" && first !== null && !(first instanceof Error)) {\n log(first, ...rest);\n return;\n }\n\n if (rest.length > 0) {\n const last = rest[rest.length - 1];\n if (last instanceof Error) {\n log({ err: last }, first, ...rest.slice(0, -1));\n return;\n }\n }\n\n if (first instanceof Error) {\n log({ err: first }, first.message);\n return;\n }\n\n log(first, ...rest);\n };\n};\n\ninterface Logger {\n trace: LogFn;\n debug: LogFn;\n info: LogFn;\n warn: LogFn;\n error: LogFn;\n fatal: LogFn;\n level: Level;\n child: PinoLogger[\"child\"];\n}\n\nexport const logger: Logger = {\n trace: createLogMethod(\"trace\"),\n debug: createLogMethod(\"debug\"),\n info: createLogMethod(\"info\"),\n warn: createLogMethod(\"warn\"),\n error: createLogMethod(\"error\"),\n fatal: createLogMethod(\"fatal\"),\n get level() {\n return pinoInstance.level as Level;\n },\n set level(lvl: Level) {\n pinoInstance.level = lvl;\n },\n child: pinoInstance.child.bind(pinoInstance),\n};\n","import { Request, Response, NextFunction } from \"express\";\n\nconst asyncHandler =\n (fn: (req: Request, res: Response, next: NextFunction) => Promise<void>) =>\n (req: Request, res: Response, next: NextFunction) =>\n Promise.resolve(fn(req, res, next)).catch(next);\n\nconst asyncHandlerArray = (resolvers: any) => {\n return resolvers.map(asyncHandler);\n};\n\nexport { asyncHandler, asyncHandlerArray };\n","import { Hex } from \"viem\";\nimport { eciesDecrypt, eciesEncrypt, generateECKeyPair } from \"@fileverse/crypto/ecies\";\nimport { AuthTokenProvider } from \"./auth-token-provider\";\n\nexport class KeyStore {\n private portalKeySeed: Uint8Array | undefined;\n private portalAddress: Hex | undefined;\n\n constructor(\n seed: Uint8Array,\n address: Hex,\n private readonly authTokenProvider: AuthTokenProvider,\n ) {\n this.portalKeySeed = seed;\n this.portalAddress = address;\n this.authTokenProvider = authTokenProvider;\n }\n\n getPortalAddress() {\n if (!this.portalAddress) {\n throw new Error(\"Portal address is not set\");\n }\n return this.portalAddress;\n }\n\n private getAppEncryptionKey() {\n if (!this.portalKeySeed) {\n throw new Error(\"Portal key seed is not set\");\n }\n\n const keyPair = generateECKeyPair(this.portalKeySeed);\n return keyPair.publicKey;\n }\n\n private getAppDecryptionKey() {\n if (!this.portalKeySeed) {\n throw new Error(\"Portal key seed is not set\");\n }\n\n const keyPair = generateECKeyPair(this.portalKeySeed);\n return keyPair.privateKey;\n }\n\n encryptData(data: Uint8Array) {\n return eciesEncrypt(this.getAppEncryptionKey(), data);\n }\n\n decryptData(data: string) {\n return eciesDecrypt(this.getAppDecryptionKey(), data);\n }\n\n getAuthToken(audienceDid: string) {\n return this.authTokenProvider.getAuthToken(audienceDid);\n }\n}\n","import * as ucans from \"@ucans/ucans\";\nimport type { Hex } from \"viem\";\n\nexport class AuthTokenProvider {\n private readonly DEFAULT_OPTIONS = {\n namespace: \"file\",\n segment: \"CREATE\",\n scheme: \"storage\",\n };\n private keyPair: ucans.EdKeypair;\n portalAddress: Hex;\n constructor(keyPair: ucans.EdKeypair, portalAddress: Hex) {\n this.keyPair = keyPair;\n this.portalAddress = portalAddress;\n }\n\n async getAuthToken(\n audienceDid: string,\n options: { namespace: string; segment: string; scheme: string } = this.DEFAULT_OPTIONS,\n ): Promise<string> {\n const ucan = await ucans.build({\n audience: audienceDid,\n issuer: this.keyPair,\n lifetimeInSeconds: 7 * 86400,\n capabilities: [\n {\n with: {\n scheme: options.scheme,\n hierPart: this.portalAddress.toLocaleLowerCase(),\n },\n can: { namespace: options.namespace, segments: [options.segment] },\n },\n ],\n });\n\n return ucans.encode(ucan);\n }\n}\n","export { sepolia, gnosis } from \"viem/chains\";\n","export const ADDED_FILE_EVENT = [\n {\n anonymous: false,\n inputs: [\n {\n indexed: true,\n internalType: \"uint256\",\n name: \"fileId\",\n type: \"uint256\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"appFileId\",\n type: \"string\",\n },\n {\n indexed: false,\n internalType: \"enum FileverseApp.FileType\",\n name: \"fileType\",\n type: \"uint8\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"metadataIPFSHash\",\n type: \"string\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"contentIPFSHash\",\n type: \"string\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"gateIPFSHash\",\n type: \"string\",\n },\n {\n indexed: false,\n internalType: \"uint256\",\n name: \"version\",\n type: \"uint256\",\n },\n {\n indexed: true,\n internalType: \"address\",\n name: \"by\",\n type: \"address\",\n },\n ],\n name: \"AddedFile\",\n type: \"event\",\n },\n] as const;\n\nexport const EDITED_FILE_EVENT = [\n {\n anonymous: false,\n inputs: [\n {\n indexed: true,\n internalType: \"uint256\",\n name: \"fileId\",\n type: \"uint256\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"appFileId\",\n type: \"string\",\n },\n {\n indexed: false,\n internalType: \"enum FileverseApp.FileType\",\n name: \"fileType\",\n type: \"uint8\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"metadataIPFSHash\",\n type: \"string\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"contentIPFSHash\",\n type: \"string\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"gateIPFSHash\",\n type: \"string\",\n },\n {\n indexed: false,\n internalType: \"uint256\",\n name: \"version\",\n type: \"uint256\",\n },\n {\n indexed: true,\n internalType: \"address\",\n name: \"by\",\n type: \"address\",\n },\n ],\n name: \"EditedFile\",\n type: \"event\",\n },\n] as const;\n\nexport const DELETED_FILE_EVENT = [\n {\n anonymous: false,\n inputs: [\n {\n indexed: true,\n internalType: \"uint256\",\n name: \"fileId\",\n type: \"uint256\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"appFileId\",\n type: \"string\",\n },\n {\n indexed: true,\n internalType: \"address\",\n name: \"by\",\n type: \"address\",\n },\n ],\n name: \"DeletedFile\",\n type: \"event\",\n },\n] as const;\n","export const ADD_FILE_METHOD = [\n {\n inputs: [\n {\n internalType: \"string\",\n name: \"_appFileId\",\n type: \"string\",\n },\n {\n internalType: \"enum FileverseApp.FileType\",\n name: \"fileType\",\n type: \"uint8\",\n },\n {\n internalType: \"string\",\n name: \"_metadataIPFSHash\",\n type: \"string\",\n },\n {\n internalType: \"string\",\n name: \"_contentIPFSHash\",\n type: \"string\",\n },\n {\n internalType: \"string\",\n name: \"_gateIPFSHash\",\n type: \"string\",\n },\n {\n internalType: \"uint256\",\n name: \"version\",\n type: \"uint256\",\n },\n ],\n name: \"addFile\",\n outputs: [],\n stateMutability: \"nonpayable\",\n type: \"function\",\n },\n] as const;\n\nexport const EDIT_FILE_METHOD = [\n {\n inputs: [\n {\n internalType: \"uint256\",\n name: \"fileId\",\n type: \"uint256\",\n },\n {\n internalType: \"string\",\n name: \"_appFileId\",\n type: \"string\",\n },\n {\n internalType: \"string\",\n name: \"_metadataIPFSHash\",\n type: \"string\",\n },\n {\n internalType: \"string\",\n name: \"_contentIPFSHash\",\n type: \"string\",\n },\n {\n internalType: \"string\",\n name: \"_gateIPFSHash\",\n type: \"string\",\n },\n {\n internalType: \"enum FileverseApp.FileType\",\n name: \"fileType\",\n type: \"uint8\",\n },\n {\n internalType: \"uint256\",\n name: \"version\",\n type: \"uint256\",\n },\n ],\n name: \"editFile\",\n outputs: [],\n stateMutability: \"nonpayable\",\n type: \"function\",\n },\n] as const;\n\nexport const DELETED_FILE_ABI = [\n {\n inputs: [\n {\n internalType: \"uint256\",\n name: \"fileId\",\n type: \"uint256\",\n },\n ],\n name: \"deleteFile\",\n outputs: [],\n stateMutability: \"nonpayable\",\n type: \"function\",\n },\n] as const;\n","import { STATIC_CONFIG } from \"../cli/constants\";\nimport { getRuntimeConfig } from \"../config\";\nimport { gnosis, sepolia } from \"./chains\";\n\nexport const NETWORK_NAME = STATIC_CONFIG.NETWORK_NAME;\nexport const UPLOAD_SERVER_URL = STATIC_CONFIG.API_URL;\n\nexport const getRpcUrl = () => getRuntimeConfig().RPC_URL;\nexport const getPimlicoUrl = () => `${STATIC_CONFIG.PIMLICO_PROXY_URL}api/${NETWORK_NAME}/rpc`;\n\nconst CHAIN_MAP = {\n gnosis: gnosis,\n sepolia: sepolia,\n} as const;\n\nexport const CHAIN = CHAIN_MAP[NETWORK_NAME as keyof typeof CHAIN_MAP];\nexport { DELETED_FILE_EVENT, EDITED_FILE_EVENT, ADDED_FILE_EVENT } from \"./events\";\nexport { DELETED_FILE_ABI, EDIT_FILE_METHOD, ADD_FILE_METHOD } from \"./methods\";\n","import { createPublicClient, http, hexToBigInt, toHex, toBytes, type PrivateKeyAccount, type Hex } from \"viem\";\n\nimport { createPimlicoClient } from \"permissionless/clients/pimlico\";\nimport { createSmartAccountClient } from \"permissionless\";\nimport { toSafeSmartAccount } from \"permissionless/accounts\";\nimport { entryPoint07Address } from \"viem/account-abstraction\";\nimport { CHAIN, getRpcUrl, getPimlicoUrl } from \"../constants\";\nimport { generatePrivateKey } from \"viem/accounts\";\n\nexport const getPublicClient = () =>\n createPublicClient({\n transport: http(getRpcUrl(), {\n retryCount: 0,\n }),\n chain: CHAIN,\n });\n\nexport const getPimlicoClient = (authToken: string, portalAddress: Hex, invokerAddress: Hex) =>\n createPimlicoClient({\n transport: http(getPimlicoUrl(), {\n retryCount: 0,\n fetchOptions: {\n headers: {\n Authorization: `Bearer ${authToken}`,\n contract: portalAddress,\n invoker: invokerAddress,\n },\n },\n }),\n entryPoint: {\n address: entryPoint07Address,\n version: \"0.7\",\n },\n });\n\nexport const signerToSmartAccount = async (signer: PrivateKeyAccount) =>\n await toSafeSmartAccount({\n client: getPublicClient(),\n owners: [signer],\n entryPoint: {\n address: entryPoint07Address,\n version: \"0.7\",\n },\n version: \"1.4.1\",\n });\n\nexport const getSmartAccountClient = async (signer: PrivateKeyAccount, authToken: string, portalAddress: Hex) => {\n const smartAccount = await signerToSmartAccount(signer);\n const pimlicoClient = getPimlicoClient(authToken, portalAddress, smartAccount.address);\n\n return createSmartAccountClient({\n account: smartAccount,\n chain: CHAIN,\n paymaster: pimlicoClient,\n bundlerTransport: http(getPimlicoUrl(), {\n fetchOptions: {\n headers: {\n Authorization: `Bearer ${authToken}`,\n contract: portalAddress,\n invoker: smartAccount.address,\n },\n },\n retryCount: 0,\n }),\n userOperation: {\n estimateFeesPerGas: async () => (await pimlicoClient.getUserOperationGasPrice()).fast,\n },\n });\n};\n\nexport const getNonce = () =>\n hexToBigInt(\n toHex(toBytes(generatePrivateKey()).slice(0, 24), {\n size: 32,\n }),\n );\n\nexport const waitForUserOpReceipt = async (\n hash: Hex,\n authToken: string,\n portalAddress: Hex,\n invokerAddress: Hex,\n timeout = 120000,\n) => {\n const pimlicoClient = getPimlicoClient(authToken, portalAddress, invokerAddress);\n return pimlicoClient.waitForUserOperationReceipt({\n hash,\n timeout,\n });\n};\n","import { Hex, toHex } from \"viem\";\nimport { privateKeyToAccount } from \"viem/accounts\";\nimport { getSmartAccountClient, getNonce, waitForUserOpReceipt } from \"./pimlico-utils\";\nimport { AuthTokenProvider } from \"./auth-token-provider\";\nimport { STATIC_CONFIG } from \"../cli/constants\";\nimport { createSmartAccountClient } from \"permissionless\";\nimport type { IExecuteUserOperationRequest } from \"../types\";\n\nexport type { IExecuteUserOperationRequest };\n\nexport class AgentClient {\n private smartAccountAgent: ReturnType<typeof createSmartAccountClient> | null = null;\n private readonly MAX_CALL_GAS_LIMIT = 500000;\n private readonly authOptions: {\n namespace: string;\n segment: string;\n scheme: string;\n } = { namespace: \"proxy\", segment: \"ACCESS\", scheme: \"pimlico\" };\n\n constructor(private readonly authTokenProvider: AuthTokenProvider) {\n this.authTokenProvider = authTokenProvider;\n }\n\n async initializeAgentClient(keyMaterial: Uint8Array) {\n const agentAccount = privateKeyToAccount(toHex(keyMaterial));\n const authToken = await this.authTokenProvider.getAuthToken(STATIC_CONFIG.PROXY_SERVER_DID, this.authOptions);\n const smartAccountClient = await getSmartAccountClient(\n agentAccount,\n authToken,\n this.authTokenProvider.portalAddress,\n );\n this.smartAccountAgent = smartAccountClient;\n }\n\n getSmartAccountAgent() {\n if (!this.smartAccountAgent) throw new Error(\"Agent client not initialized\");\n\n return this.smartAccountAgent;\n }\n\n getAgentAddress() {\n const smartAccountAgent = this.getSmartAccountAgent();\n if (!smartAccountAgent.account) throw new Error(\"Agent account not found\");\n return smartAccountAgent.account.address;\n }\n\n getAgentAccount() {\n const smartAccountAgent = this.getSmartAccountAgent();\n if (!smartAccountAgent.account) throw new Error(\"Agent account not found\");\n return smartAccountAgent.account;\n }\n\n destroyAgentClient() {\n this.smartAccountAgent = null;\n }\n\n async getCallData(request: IExecuteUserOperationRequest | IExecuteUserOperationRequest[]) {\n const agentAccount = this.getAgentAccount();\n if (Array.isArray(request)) {\n if (request.length === 0 || request.length > 10) throw new Error(\"Request length must be between 1 and 10\");\n\n const encodedCallData = request.map((req) => ({\n to: req.contractAddress,\n data: req.data,\n value: BigInt(0),\n }));\n\n return await agentAccount.encodeCalls(encodedCallData);\n }\n\n return await agentAccount.encodeCalls([\n {\n to: request.contractAddress,\n data: request.data,\n value: BigInt(0),\n },\n ]);\n }\n\n async sendUserOperation(\n request: IExecuteUserOperationRequest | IExecuteUserOperationRequest[],\n customGasLimit?: number,\n ) {\n try {\n const smartAccountAgent = this.getSmartAccountAgent();\n\n const callData = await this.getCallData(request);\n\n return await smartAccountAgent.sendUserOperation({\n callData,\n callGasLimit: BigInt(customGasLimit || this.MAX_CALL_GAS_LIMIT),\n nonce: getNonce(),\n });\n } catch (error) {\n throw error;\n }\n }\n\n async executeUserOperationRequest(\n request: IExecuteUserOperationRequest | IExecuteUserOperationRequest[],\n timeout: number,\n customGasLimit?: number,\n ) {\n const userOpHash = await this.sendUserOperation(request, customGasLimit);\n const { authToken, portalAddress, invokerAddress } = await this.getAuthParams();\n const receipt = await waitForUserOpReceipt(userOpHash, authToken, portalAddress, invokerAddress, timeout);\n if (!receipt.success) throw new Error(`Failed to execute user operation: ${receipt.reason}`);\n return receipt;\n }\n\n async getAuthParams(): Promise<{ authToken: string; portalAddress: Hex; invokerAddress: Hex }> {\n const authToken = await this.authTokenProvider.getAuthToken(STATIC_CONFIG.PROXY_SERVER_DID, this.authOptions);\n return {\n authToken,\n portalAddress: this.authTokenProvider.portalAddress,\n invokerAddress: this.getAgentAddress(),\n };\n }\n}\n","import { gcm } from \"@noble/ciphers/aes.js\";\nimport { generateRandomBytes } from \"@fileverse/crypto/utils\";\n\nconst KEY_LEN = 32;\nconst IV_LEN = 12;\nconst TAG_LEN = 16;\n\nconst b64ToBytes = (b64: string) => Uint8Array.from(Buffer.from(b64, \"base64\"));\nconst bytesToB64 = (b: Uint8Array) => Buffer.from(b).toString(\"base64\");\n\nimport type { DecryptionOptions } from \"../types\";\nexport type { DecryptionOptions };\n\nexport function gcmEncrypt(plaintext: Uint8Array) {\n const key = generateRandomBytes(KEY_LEN);\n const iv = generateRandomBytes(IV_LEN);\n if (key.length !== KEY_LEN) throw new Error(\"key must be 32 bytes\");\n if (iv.length !== IV_LEN) throw new Error(\"iv must be 12 bytes\");\n\n const out = gcm(key, iv).encrypt(plaintext);\n const ciphertext = out.subarray(0, out.length - TAG_LEN);\n const authTag = out.subarray(out.length - TAG_LEN);\n\n return {\n ciphertext,\n authTag: bytesToB64(authTag),\n key: bytesToB64(key),\n iv: bytesToB64(iv),\n };\n}\n\nexport function gcmDecrypt(ciphertext: Uint8Array, opts: DecryptionOptions) {\n const key = b64ToBytes(opts.key);\n const iv = b64ToBytes(opts.iv);\n const tag = b64ToBytes(opts.authTag);\n if (key.length !== KEY_LEN) throw new Error(\"key must be 32 bytes\");\n if (iv.length !== IV_LEN) throw new Error(\"iv must be 12 bytes\");\n if (tag.length !== TAG_LEN) throw new Error(\"authTag must be 16 bytes\");\n\n const combined = new Uint8Array(ciphertext.length + TAG_LEN);\n combined.set(ciphertext, 0);\n combined.set(tag, ciphertext.length);\n\n return gcm(key, iv).decrypt(combined);\n}\n","import { getArgon2idHash } from \"@fileverse/crypto/argon\";\nimport { bytesToBase64, generateRandomBytes } from \"@fileverse/crypto/utils\";\nimport { derivePBKDF2Key, encryptAesCBC } from \"@fileverse/crypto/kdf\";\nimport { secretBoxEncrypt } from \"@fileverse/crypto/nacl\";\nimport hkdf from \"futoin-hkdf\";\n\nimport tweetnacl from \"tweetnacl\";\nimport { fromUint8Array, toUint8Array } from \"js-base64\";\nimport { gcmEncrypt } from \"./file-encryption\";\nimport { toAESKey, aesEncrypt } from \"@fileverse/crypto/webcrypto\";\nimport axios from \"axios\";\nimport { ADD_FILE_METHOD, DELETED_FILE_ABI, EDIT_FILE_METHOD, UPLOAD_SERVER_URL } from \"../constants\";\nimport type { UploadFileAuthParams, FileMetadataParams, UploadFilesParams } from \"../types\";\nimport { encodeFunctionData, type Hex, parseEventLogs, type Abi } from \"viem\";\n\ninterface LinkKeyMaterialParams {\n ddocId: string;\n linkKey: string | undefined;\n linkKeyNonce: string | undefined;\n}\n\nconst deriveKeyFromAg2Hash = async (pass: string, salt: Uint8Array) => {\n const key = await getArgon2idHash(pass, salt);\n\n return hkdf(Buffer.from(key), tweetnacl.secretbox.keyLength, {\n info: Buffer.from(\"encryptionKey\"),\n });\n};\n\nconst decryptSecretKey = async (docId: string, nonce: string, encryptedSecretKey: string) => {\n const derivedKey = await deriveKeyFromAg2Hash(docId, toUint8Array(nonce));\n\n return tweetnacl.secretbox.open(toUint8Array(encryptedSecretKey), toUint8Array(nonce), derivedKey);\n};\n\nconst getExistingEncryptionMaterial = async (\n existingEncryptedSecretKey: string,\n existingNonce: string,\n docId: string,\n) => {\n const secretKey = await decryptSecretKey(docId, existingNonce, existingEncryptedSecretKey);\n return {\n encryptedSecretKey: existingEncryptedSecretKey,\n nonce: toUint8Array(existingNonce),\n secretKey,\n };\n};\n\nconst getNaclSecretKey = async (ddocId: string) => {\n const { secretKey } = tweetnacl.box.keyPair();\n const nonce = tweetnacl.randomBytes(tweetnacl.secretbox.nonceLength);\n\n const derivedKey = await deriveKeyFromAg2Hash(ddocId, nonce);\n\n const encryptedSecretKey = fromUint8Array(tweetnacl.secretbox(secretKey, nonce, derivedKey), true);\n\n return { nonce, encryptedSecretKey, secretKey };\n};\n\nexport const generateLinkKeyMaterial = async (params: LinkKeyMaterialParams) => {\n if (params.linkKeyNonce && params.linkKey) {\n const { encryptedSecretKey, nonce, secretKey } = await getExistingEncryptionMaterial(\n params.linkKey,\n params.linkKeyNonce,\n params.ddocId,\n );\n if (secretKey) return { encryptedSecretKey, nonce, secretKey };\n }\n const { secretKey, nonce, encryptedSecretKey } = await getNaclSecretKey(params.ddocId);\n\n return { secretKey, nonce, encryptedSecretKey };\n};\n\nexport const jsonToFile = (json: any, fileName: string) => {\n const blob = new Blob([JSON.stringify(json)], {\n type: \"application/json\",\n });\n\n const file = new File([blob], fileName, {\n type: \"application/json\",\n });\n\n return file;\n};\n\nconst appendAuthTagIvToBlob = async (blob: Blob, authTag: Uint8Array, iv: Uint8Array) => {\n const encryptedFileBytes = await blob.arrayBuffer();\n const encryptedBytes = new Uint8Array(encryptedFileBytes);\n const combinedLength = encryptedBytes.length + authTag.length + iv.length;\n const combinedArray = new Uint8Array(combinedLength);\n\n let offset = 0;\n combinedArray.set(encryptedBytes, offset);\n offset += encryptedBytes.length;\n\n combinedArray.set(authTag, offset);\n offset += authTag.length;\n\n combinedArray.set(iv, offset);\n\n return new Blob([combinedArray], { type: blob.type });\n};\n\nexport const encryptFile = async (file: File) => {\n const arrayBuffer = await file.arrayBuffer();\n\n const plaintext = new Uint8Array(arrayBuffer);\n\n const { ciphertext, authTag, key, iv } = gcmEncrypt(plaintext);\n\n const encryptedBlob = new Blob([ciphertext], { type: file.type });\n\n const encryptedBlobWithAuthTagIv = await appendAuthTagIvToBlob(\n encryptedBlob,\n toUint8Array(authTag),\n toUint8Array(iv),\n );\n\n return {\n encryptedFile: new File([encryptedBlobWithAuthTagIv], file.name),\n key,\n };\n};\n\nexport const getNonceAppendedCipherText = (nonce: Uint8Array, cipherText: Uint8Array) => {\n return fromUint8Array(nonce, true) + \"__n__\" + fromUint8Array(cipherText, true);\n};\n\nexport const jsonToBytes = (json: Record<string, any>) => new TextEncoder().encode(JSON.stringify(json));\n\nexport const buildLinklock = (key: Uint8Array, fileKey: Uint8Array, commentKey: Uint8Array) => {\n const ikm = generateRandomBytes();\n const kdfSalt = generateRandomBytes();\n const derivedEphermalKey = derivePBKDF2Key(ikm, kdfSalt);\n\n const { iv, cipherText } = encryptAesCBC(\n {\n key: derivedEphermalKey,\n message: fileKey,\n },\n \"base64\",\n );\n\n const { iv: commentIv, cipherText: commentCipherText } = encryptAesCBC(\n {\n key: derivedEphermalKey,\n message: commentKey,\n },\n \"base64\",\n );\n\n const encryptedIkm = secretBoxEncrypt(ikm, key);\n\n const lockedFileKey = iv + \"__n__\" + cipherText;\n\n const lockedChatKey = commentIv + \"__n__\" + commentCipherText;\n\n const keyMaterial = bytesToBase64(kdfSalt) + \"__n__\" + encryptedIkm;\n\n const fileKeyNonce = generateRandomBytes(24);\n const encryptedFileKey = tweetnacl.secretbox(jsonToBytes({ key: fromUint8Array(fileKey) }), fileKeyNonce, key);\n\n const chatKeyNonce = generateRandomBytes(24);\n const encryptedChatKey = tweetnacl.secretbox(commentKey, chatKeyNonce, key);\n\n return {\n lockedFileKey: getNonceAppendedCipherText(fileKeyNonce, encryptedFileKey),\n lockedChatKey: getNonceAppendedCipherText(chatKeyNonce, encryptedChatKey),\n lockedFileKey_v2: lockedFileKey,\n lockedChatKey_v2: lockedChatKey,\n keyMaterial,\n };\n};\n\nexport const encryptTitleWithFileKey = async (args: { title: string; key: string }) => {\n const key = await toAESKey(toUint8Array(args.key));\n if (!key) throw new Error(\"Key is undefined\");\n\n const titleBytes = new TextEncoder().encode(args.title);\n\n const encryptedTitle = await aesEncrypt(key, titleBytes, \"base64\");\n\n return encryptedTitle;\n};\n\ninterface UploadFileParams {\n file: File;\n ipfsType: string;\n appFileId: string;\n}\n\nexport type { UploadFileAuthParams };\n\nexport const uploadFileToIPFS = async (fileParams: UploadFileParams, authParams: UploadFileAuthParams) => {\n const { file, ipfsType, appFileId } = fileParams;\n const { token, invoker, contractAddress } = authParams;\n\n const body = new FormData();\n body.append(\"file\", file);\n body.append(\"ipfsType\", ipfsType);\n body.append(\"appFileId\", appFileId);\n\n body.append(\"sourceApp\", \"ddoc\");\n const uploadEndpoint = UPLOAD_SERVER_URL + \"upload\";\n const response = await axios.post(uploadEndpoint, body, {\n headers: {\n Authorization: `Bearer ${token}`,\n contract: contractAddress,\n invoker: invoker,\n chain: process.env.chainId,\n },\n });\n\n return response.data.ipfsHash;\n};\n\nconst getEditFileTrxCalldata = (args: {\n fileId: number;\n appFileId: string;\n metadataHash: string;\n contentHash: string;\n gateHash: string;\n}) => {\n return encodeFunctionData({\n abi: EDIT_FILE_METHOD,\n functionName: \"editFile\",\n args: [BigInt(args.fileId), args.appFileId, args.metadataHash, args.contentHash, args.gateHash, 2, BigInt(0)],\n });\n};\n\nconst getAddFileTrxCalldata = (args: {\n appFileId: string;\n metadataHash: string;\n contentHash: string;\n gateHash: string;\n}) => {\n return encodeFunctionData({\n abi: ADD_FILE_METHOD,\n functionName: \"addFile\",\n args: [args.appFileId, 2, args.metadataHash, args.contentHash, args.gateHash, BigInt(0)],\n });\n};\n\nexport const prepareCallData = (args: {\n metadataHash: string;\n contentHash: string;\n gateHash: string;\n appFileId: string;\n fileId?: number;\n}) => {\n if (args.fileId) {\n return getEditFileTrxCalldata({\n fileId: args.fileId,\n appFileId: args.appFileId,\n metadataHash: args.metadataHash,\n contentHash: args.contentHash,\n gateHash: args.gateHash,\n });\n }\n return getAddFileTrxCalldata(args);\n};\n\nexport const prepareDeleteFileCallData = (args: { onChainFileId: number }) => {\n return encodeFunctionData({\n abi: DELETED_FILE_ABI,\n functionName: \"deleteFile\",\n args: [BigInt(args.onChainFileId)],\n });\n};\n\nexport const createEncryptedContentFile = async (content: any) => {\n const contentFile = jsonToFile(\n { file: content, source: \"ddoc\" },\n `${fromUint8Array(generateRandomBytes(16))}-CONTENT`,\n );\n return encryptFile(contentFile);\n};\n\nexport type { FileMetadataParams };\n\nexport const buildFileMetadata = (params: FileMetadataParams) => ({\n title: params.encryptedTitle,\n size: params.encryptedFileSize,\n mimeType: \"application/json\",\n appLock: params.appLock,\n ownerLock: params.ownerLock,\n ddocId: params.ddocId,\n nonce: params.nonce,\n owner: params.owner,\n version: \"4\",\n sourceApp: \"fileverse-api\",\n});\n\nexport const parseFileEventLog = (logs: any[], eventName: string, abi: Abi): number => {\n const [parsedLog] = parseEventLogs({ abi, logs, eventName });\n\n if (!parsedLog) throw new Error(`${eventName} event not found`);\n\n const fileId = (parsedLog as any).args.fileId;\n\n if (fileId === undefined || fileId === null) throw new Error(\"FileId not found in event logs\");\n\n return Number(fileId);\n};\n\nexport type { UploadFilesParams };\n\nexport const uploadAllFilesToIPFS = async (params: UploadFilesParams, authParams: UploadFileAuthParams) => {\n const { metadata, encryptedFile, linkLock, ddocId } = params;\n\n const [metadataHash, contentHash, gateHash] = await Promise.all([\n uploadFileToIPFS(\n {\n file: jsonToFile(metadata, `${fromUint8Array(generateRandomBytes(16))}-METADATA`),\n ipfsType: \"METADATA\",\n appFileId: ddocId,\n },\n authParams,\n ),\n uploadFileToIPFS(\n {\n file: encryptedFile,\n ipfsType: \"CONTENT\",\n appFileId: ddocId,\n },\n authParams,\n ),\n uploadFileToIPFS(\n {\n file: jsonToFile(linkLock, `${fromUint8Array(generateRandomBytes(16))}-GATE`),\n ipfsType: \"GATE\",\n appFileId: ddocId,\n },\n authParams,\n ),\n ]);\n\n return { metadataHash, contentHash, gateHash };\n};\n","import { fromUint8Array, toUint8Array } from \"js-base64\";\nimport { KeyStore } from \"./key-store\";\nimport {\n buildLinklock,\n encryptTitleWithFileKey,\n generateLinkKeyMaterial,\n prepareCallData,\n createEncryptedContentFile,\n buildFileMetadata,\n parseFileEventLog,\n uploadAllFilesToIPFS,\n UploadFileAuthParams,\n prepareDeleteFileCallData,\n} from \"./file-utils\";\nimport { AgentClient } from \"./smart-agent\";\nimport { generateAESKey, exportAESKey } from \"@fileverse/crypto/webcrypto\";\nimport { STATIC_CONFIG } from \"../cli/constants\";\nimport { DELETED_FILE_EVENT, EDITED_FILE_EVENT } from \"../constants\";\nimport { markdownToYjs } from \"@fileverse/content-processor\";\nimport { logger } from \"../infra\";\n\nexport class FileManager {\n private keyStore: KeyStore;\n private agentClient: AgentClient;\n\n constructor(keyStore: KeyStore, agentClient: AgentClient) {\n this.keyStore = keyStore;\n this.agentClient = agentClient;\n }\n\n private createLocks(key: string, encryptedSecretKey: string, commentKey: Uint8Array) {\n const appLock = {\n lockedFileKey: this.keyStore.encryptData(toUint8Array(key)),\n lockedLinkKey: this.keyStore.encryptData(toUint8Array(encryptedSecretKey)),\n lockedChatKey: this.keyStore.encryptData(commentKey),\n };\n return { appLock, ownerLock: { ...appLock } };\n }\n\n private async getAuthParams(): Promise<UploadFileAuthParams> {\n return {\n token: await this.keyStore.getAuthToken(STATIC_CONFIG.SERVER_DID),\n contractAddress: this.keyStore.getPortalAddress(),\n invoker: this.agentClient.getAgentAddress(),\n };\n }\n\n private async executeFileOperation(callData: `0x${string}`) {\n return this.agentClient.executeUserOperationRequest(\n {\n contractAddress: this.keyStore.getPortalAddress(),\n data: callData,\n },\n 1000000,\n );\n }\n\n private async sendFileOperation(callData: `0x${string}`) {\n return this.agentClient.sendUserOperation(\n {\n contractAddress: this.keyStore.getPortalAddress(),\n data: callData,\n },\n 1000000,\n );\n }\n\n async getProxyAuthParams() {\n return this.agentClient.getAuthParams();\n }\n\n async submitAddFileTrx(file: any) {\n logger.debug(`Preparing to add file ${file.ddocId}`);\n const { encryptedSecretKey, nonce, secretKey } = await generateLinkKeyMaterial({\n ddocId: file.ddocId,\n linkKey: file.linkKey,\n linkKeyNonce: file.linkKeyNonce,\n });\n\n const yJSContent = markdownToYjs(file.content);\n const { encryptedFile, key } = await createEncryptedContentFile(yJSContent);\n logger.debug(`Generated encrypted content file for file ${file.ddocId}`);\n const commentKey = await exportAESKey(await generateAESKey(128));\n\n const { appLock, ownerLock } = this.createLocks(key, encryptedSecretKey, commentKey);\n const linkLock = buildLinklock(secretKey, toUint8Array(key), commentKey);\n\n const encryptedTitle = await encryptTitleWithFileKey({\n title: file.title || \"Untitled\",\n key,\n });\n const metadata = buildFileMetadata({\n encryptedTitle,\n encryptedFileSize: encryptedFile.size,\n appLock,\n ownerLock,\n ddocId: file.ddocId,\n nonce: fromUint8Array(nonce),\n owner: this.agentClient.getAgentAddress(),\n });\n\n const authParams = await this.getAuthParams();\n const { metadataHash, contentHash, gateHash } = await uploadAllFilesToIPFS(\n { metadata, encryptedFile, linkLock, ddocId: file.ddocId },\n authParams,\n );\n logger.debug(`Uploaded files to IPFS for file ${file.ddocId}`);\n\n const callData = prepareCallData({\n metadataHash,\n contentHash,\n gateHash,\n appFileId: file.ddocId,\n fileId: file.fileId,\n });\n logger.debug(`Prepared call data for file ${file.ddocId}`);\n\n const userOpHash = await this.sendFileOperation(callData);\n logger.debug(`Submitted user op for file ${file.ddocId}`);\n return {\n userOpHash,\n linkKey: encryptedSecretKey,\n linkKeyNonce: fromUint8Array(nonce),\n commentKey: fromUint8Array(commentKey),\n metadata,\n };\n }\n\n async updateFile(file: any) {\n logger.debug(`Updating file ${file.ddocId} with onChainFileId ${file.onChainFileId}`);\n const { encryptedSecretKey, nonce, secretKey } = await generateLinkKeyMaterial({\n ddocId: file.ddocId,\n linkKey: file.linkKey,\n linkKeyNonce: file.linkKeyNonce,\n });\n\n logger.debug(`Generating encrypted content file for file ${file.ddocId} with onChainFileId ${file.onChainFileId}`);\n\n const yjsContent = markdownToYjs(file.content);\n const { encryptedFile, key } = await createEncryptedContentFile(yjsContent);\n const commentKey = toUint8Array(file.commentKey);\n\n const { appLock, ownerLock } = this.createLocks(key, encryptedSecretKey, commentKey);\n const linkLock = buildLinklock(secretKey, toUint8Array(key), commentKey);\n\n const encryptedTitle = await encryptTitleWithFileKey({\n title: file.title || \"Untitled\",\n key,\n });\n const metadata = buildFileMetadata({\n encryptedTitle,\n encryptedFileSize: encryptedFile.size,\n appLock,\n ownerLock,\n ddocId: file.ddocId,\n nonce: fromUint8Array(nonce),\n owner: this.agentClient.getAgentAddress(),\n });\n\n const authParams = await this.getAuthParams();\n logger.debug(`Uploading files to IPFS for file ${file.ddocId} with onChainFileId ${file.onChainFileId}`);\n const { metadataHash, contentHash, gateHash } = await uploadAllFilesToIPFS(\n { metadata, encryptedFile, linkLock, ddocId: file.ddocId },\n authParams,\n );\n\n const callData = prepareCallData({\n metadataHash,\n contentHash,\n gateHash,\n appFileId: file.ddocId,\n fileId: file.onChainFileId,\n });\n logger.debug(`Executing file operation for file ${file.ddocId} with onChainFileId ${file.onChainFileId}`);\n\n const { logs } = await this.executeFileOperation(callData);\n const onChainFileId = parseFileEventLog(logs, \"EditedFile\", EDITED_FILE_EVENT);\n\n return { onChainFileId, metadata };\n }\n\n async deleteFile(file: any) {\n logger.debug(`Deleting file ${file.ddocId} with onChainFileId ${file.onChainFileId}`);\n const callData = prepareDeleteFileCallData({\n onChainFileId: file.onChainFileId,\n });\n logger.debug(`Prepared call data for deleting file ${file.ddocId} with onChainFileId ${file.onChainFileId}`);\n\n const { logs } = await this.executeFileOperation(callData);\n parseFileEventLog(logs, \"DeletedFile\", DELETED_FILE_EVENT);\n logger.debug(`Executed file operation for deleting file ${file.ddocId} with onChainFileId ${file.onChainFileId}`);\n return {\n fileId: file.id,\n onChainFileId: file.onChainFileId,\n metadata: file.metadata,\n };\n }\n}\n","import { FilesModel, PortalsModel } from \"../../infra/database/models\";\nimport { logger } from \"../../infra\";\nimport { KeyStore } from \"../../sdk/key-store\";\nimport { AuthTokenProvider } from \"../../sdk/auth-token-provider\";\nimport { fromUint8Array, toUint8Array } from \"js-base64\";\nimport { Hex, stringToBytes } from \"viem\";\nimport { deriveHKDFKey } from \"@fileverse/crypto/kdf\";\nimport { generateKeyPairFromSeed } from \"@stablelib/ed25519\";\nimport * as ucans from \"@ucans/ucans\";\nimport { AgentClient } from \"../../sdk/smart-agent\";\nimport { FileManager } from \"../../sdk/file-manager\";\nimport { getRuntimeConfig } from \"../../config\";\n\nimport type { PublishResult } from \"../../types\";\n\ninterface PublishContext {\n file: ReturnType<typeof FilesModel.findByIdIncludingDeleted>;\n portalDetails: NonNullable<ReturnType<typeof PortalsModel.findByPortalAddress>>;\n apiKey: string;\n}\n\nfunction getPortalData(fileId: string): PublishContext {\n const file = FilesModel.findByIdIncludingDeleted(fileId);\n if (!file) {\n throw new Error(`File with _id ${fileId} not found`);\n }\n\n const portalDetails = PortalsModel.findByPortalAddress(file.portalAddress);\n if (!portalDetails) {\n throw new Error(`Portal with address ${file.portalAddress} not found`);\n }\n\n const apiKey = getRuntimeConfig().API_KEY;\n if (!apiKey) {\n throw new Error(\"API key is not set\");\n }\n\n return { file, portalDetails, apiKey };\n}\n\nfunction deriveCollaboratorKeys(apiKeySeed: Uint8Array) {\n const salt = new Uint8Array([0]);\n\n const privateAccountKey = deriveHKDFKey(apiKeySeed, salt, stringToBytes(\"COLLABORATOR_PRIVATE_KEY\"));\n\n const ucanDerivedSecret = deriveHKDFKey(apiKeySeed, salt, stringToBytes(\"COLLABORATOR_UCAN_SECRET\"));\n\n const { secretKey: ucanSecret } = generateKeyPairFromSeed(ucanDerivedSecret);\n\n return { privateAccountKey, ucanSecret };\n}\n\nconst createFileManager = async (\n portalSeed: string,\n portalAddress: Hex,\n ucanSecret: Uint8Array,\n privateAccountKey: Uint8Array,\n): Promise<FileManager> => {\n const keyPair = ucans.EdKeypair.fromSecretKey(fromUint8Array(ucanSecret), {\n exportable: true,\n });\n\n const authTokenProvider = new AuthTokenProvider(keyPair, portalAddress);\n const keyStore = new KeyStore(toUint8Array(portalSeed), portalAddress, authTokenProvider);\n\n const agentClient = new AgentClient(authTokenProvider);\n await agentClient.initializeAgentClient(privateAccountKey);\n\n return new FileManager(keyStore, agentClient);\n};\n\nconst executeOperation = async (\n fileManager: FileManager,\n file: any,\n operation: \"update\" | \"delete\",\n): Promise<PublishResult> => {\n\n if (operation === \"update\") {\n const result = await fileManager.updateFile(file);\n return { success: true, ...result };\n }\n\n if (operation === \"delete\") {\n const result = await fileManager.deleteFile(file);\n return { success: true, ...result };\n }\n\n throw new Error(`Invalid operation: ${operation}`);\n};\n\nexport const handleExistingFileOp = async (fileId: string, operation: \"update\" | \"delete\"): Promise<PublishResult> => {\n try {\n const { file, portalDetails, apiKey } = getPortalData(fileId);\n\n const apiKeySeed = toUint8Array(apiKey);\n const { privateAccountKey, ucanSecret } = deriveCollaboratorKeys(apiKeySeed);\n\n const fileManager = await createFileManager(\n portalDetails.portalSeed,\n portalDetails.portalAddress as Hex,\n ucanSecret,\n privateAccountKey,\n );\n\n return executeOperation(fileManager, file, operation);\n } catch (error: any) {\n logger.error(`Failed to publish file ${fileId}:`, error);\n throw error;\n }\n};\n\nexport const handleNewFileOp = async (\n fileId: string,\n): Promise<{\n userOpHash: string;\n linkKey: string;\n linkKeyNonce: string;\n commentKey: string;\n metadata: Record<string, unknown>;\n}> => {\n const { file, portalDetails, apiKey } = getPortalData(fileId);\n const apiKeySeed = toUint8Array(apiKey);\n const { privateAccountKey, ucanSecret } = deriveCollaboratorKeys(apiKeySeed);\n const fileManager = await createFileManager(\n portalDetails.portalSeed,\n portalDetails.portalAddress as Hex,\n ucanSecret,\n privateAccountKey,\n );\n return fileManager.submitAddFileTrx(file);\n};\n\nexport const getProxyAuthParams = async (\n fileId: string,\n): Promise<{\n authToken: string;\n portalAddress: Hex;\n invokerAddress: Hex;\n}> => {\n const { portalDetails, apiKey } = getPortalData(fileId);\n const apiKeySeed = toUint8Array(apiKey);\n const { privateAccountKey, ucanSecret } = deriveCollaboratorKeys(apiKeySeed);\n const fileManager = await createFileManager(\n portalDetails.portalSeed,\n portalDetails.portalAddress as Hex,\n ucanSecret,\n privateAccountKey,\n );\n return fileManager.getProxyAuthParams();\n};\n","import { ApiKeysModel, PortalsModel } from \"../../infra/database/models\";\nimport type { AddApiKeyInput, ApiKey } from \"../../types\";\n\nexport function addApiKey(input: AddApiKeyInput): ApiKey {\n if (!input.apiKeySeed || !input.name || !input.collaboratorAddress || !input.portalAddress) {\n throw new Error(\"apiKeySeed, name, collaboratorAddress, and portalAddress are required\");\n }\n\n const portal = PortalsModel.findByPortalAddress(input.portalAddress);\n if (!portal) {\n throw new Error(`Portal with address ${input.portalAddress} does not exist`);\n }\n\n return ApiKeysModel.create(input);\n}\n","import { ApiKeysModel, type ApiKey } from \"../../infra/database/models\";\n\nexport function removeApiKey(collaboratorAddress: string): ApiKey {\n if (!collaboratorAddress) {\n throw new Error(\"collaboratorAddress is required\");\n }\n\n const apiKey = ApiKeysModel.findByCollaboratorAddress(collaboratorAddress);\n if (!apiKey) {\n throw new Error(\"API key not found\");\n }\n\n ApiKeysModel.delete(apiKey._id);\n return { ...apiKey, isDeleted: 1 };\n}\n","import { handleExistingFileOp, handleNewFileOp, getProxyAuthParams } from \"./publish\";\nimport { savePortal } from \"./savePortal\";\nimport { addApiKey } from \"./saveApiKey\";\nimport { removeApiKey } from \"./removeApiKey\";\n\nexport { handleExistingFileOp, handleNewFileOp, getProxyAuthParams, savePortal, addApiKey, removeApiKey };\n","import { HttpRequestError } from \"viem\";\n\nexport class RateLimitError extends Error {\n readonly retryAfterSeconds: number;\n\n constructor(retryAfterSeconds: number, message = \"Rate limit exceeded\") {\n super(message);\n this.name = \"RateLimitError\";\n this.retryAfterSeconds = retryAfterSeconds;\n }\n}\n\nconst MAX_RETRY_AFTER_SECONDS = 300;\nconst DEFAULT_RETRY_AFTER_SECONDS = 3600;\n\nfunction parseRetryAfterRaw(raw: string | null): number {\n if (!raw) return DEFAULT_RETRY_AFTER_SECONDS;\n const parsed = parseInt(raw, 10);\n if (!Number.isNaN(parsed) && parsed >= 0) return Math.min(parsed, MAX_RETRY_AFTER_SECONDS);\n const date = Date.parse(raw);\n if (!Number.isNaN(date)) {\n const seconds = Math.max(0, Math.ceil((date - Date.now()) / 1000));\n return Math.min(seconds, MAX_RETRY_AFTER_SECONDS);\n }\n return DEFAULT_RETRY_AFTER_SECONDS;\n}\n\nexport const parseRetryAfterSeconds = (response: Response): number =>\n parseRetryAfterRaw(response.headers.get(\"Retry-After\"));\n\nexport const parseRetryAfterFromHeaders = (headers?: Headers): number =>\n parseRetryAfterRaw(headers?.get(\"Retry-After\") ?? null);\n\nexport function normalizeRateLimitError(error: unknown): unknown {\n if (!(error instanceof HttpRequestError) || error.status !== 429) return error;\n const retryAfter = parseRetryAfterFromHeaders(error.headers);\n const message = \"Beta API rate limit reached. Try again in an hour please!\"\n return new RateLimitError(retryAfter, message);\n}\n","import { getRuntimeConfig } from \"../../config\";\nimport { handleNewFileOp, getProxyAuthParams, handleExistingFileOp } from \"../../domain/portal\";\nimport { FilesModel, EventsModel } from \"../database/models\";\nimport type { Event, ProcessResult, UpdateFilePayload } from \"../../types\";\nimport { logger } from \"../index\";\nimport { waitForUserOpReceipt } from \"../../sdk/pimlico-utils\";\nimport { parseFileEventLog } from \"../../sdk/file-utils\";\nimport { ADDED_FILE_EVENT } from \"../../constants\";\nimport { RateLimitError, normalizeRateLimitError } from \"../../errors/rate-limit\";\n\nexport type { ProcessResult };\n\nexport const processEvent = async (event: Event): Promise<ProcessResult> => {\n const { fileId, type } = event;\n\n try {\n switch (type) {\n case \"create\":\n await processCreateEvent(event);\n break;\n case \"update\":\n await processUpdateEvent(event);\n break;\n case \"delete\":\n await processDeleteEvent(event);\n break;\n default:\n throw new Error(`Unknown event type: ${type}`);\n }\n return { success: true };\n } catch (error) {\n const normalized = normalizeRateLimitError(error);\n if (normalized instanceof RateLimitError) throw normalized;\n const errorMsg = error instanceof Error ? error.message : String(error);\n logger.error(`Error processing ${type} event for file ${fileId}:`, errorMsg);\n return { success: false, error: errorMsg };\n }\n};\n\nconst onTransactionSuccess = (\n fileId: string,\n file: ReturnType<typeof FilesModel.findByIdIncludingDeleted>,\n onChainFileId: number,\n pending: { linkKey: string; linkKeyNonce: string; commentKey: string; metadata: Record<string, unknown> },\n): void => {\n const frontendUrl = getRuntimeConfig().FRONTEND_URL;\n const payload: UpdateFilePayload = {\n onchainVersion: file!.localVersion,\n onChainFileId,\n linkKey: pending.linkKey,\n linkKeyNonce: pending.linkKeyNonce,\n commentKey: pending.commentKey,\n metadata: pending.metadata,\n link: `${frontendUrl}/${file!.portalAddress}/${onChainFileId}#key=${pending.linkKey}`,\n };\n const updatedFile = FilesModel.update(fileId, payload, file!.portalAddress);\n if (updatedFile.localVersion === updatedFile.onchainVersion) {\n FilesModel.update(fileId, { syncStatus: \"synced\" }, file!.portalAddress);\n }\n};\n\nconst processCreateEvent = async (event: Event): Promise<void> => {\n const { fileId } = event;\n\n const file = FilesModel.findByIdIncludingDeleted(fileId);\n if (!file) {\n throw new Error(`File ${fileId} not found`);\n }\n\n if (file.isDeleted === 1) {\n logger.info(`File ${fileId} is deleted, skipping create event`);\n return;\n }\n\n const waitContext = await getProxyAuthParams(fileId);\n const timeout = 120000;\n\n if (event.userOpHash) {\n const receipt = await waitForUserOpReceipt(\n event.userOpHash as `0x${string}`,\n waitContext.authToken,\n waitContext.portalAddress,\n waitContext.invokerAddress,\n timeout,\n );\n if (!receipt.success) {\n EventsModel.clearEventPendingOp(event._id);\n throw new Error(`User operation failed: ${receipt.reason}`);\n }\n const onChainFileId = parseFileEventLog(receipt.logs, \"AddedFile\", ADDED_FILE_EVENT);\n const pending = JSON.parse(event.pendingPayload!) as {\n linkKey: string;\n linkKeyNonce: string;\n commentKey: string;\n metadata: Record<string, unknown>;\n };\n onTransactionSuccess(fileId, file, onChainFileId, pending);\n EventsModel.clearEventPendingOp(event._id);\n logger.info(`File ${file.ddocId} created and published successfully (resumed from pending op)`);\n return;\n }\n\n const result = await handleNewFileOp(fileId);\n EventsModel.setEventPendingOp(event._id, result.userOpHash, {\n linkKey: result.linkKey,\n linkKeyNonce: result.linkKeyNonce,\n commentKey: result.commentKey,\n metadata: result.metadata,\n });\n\n const receipt = await waitForUserOpReceipt(\n result.userOpHash as `0x${string}`,\n waitContext.authToken,\n waitContext.portalAddress,\n waitContext.invokerAddress,\n timeout,\n );\n if (!receipt.success) {\n EventsModel.clearEventPendingOp(event._id);\n throw new Error(`User operation failed: ${receipt.reason}`);\n }\n const onChainFileId = parseFileEventLog(receipt.logs, \"AddedFile\", ADDED_FILE_EVENT);\n onTransactionSuccess(fileId, file, onChainFileId, {\n linkKey: result.linkKey,\n linkKeyNonce: result.linkKeyNonce,\n commentKey: result.commentKey,\n metadata: result.metadata,\n });\n EventsModel.clearEventPendingOp(event._id);\n logger.info(`File ${file.ddocId} created and published successfully`);\n};\n\nconst processUpdateEvent = async (event: Event): Promise<void> => {\n const { fileId } = event;\n\n const file = FilesModel.findByIdExcludingDeleted(fileId);\n if (!file) {\n return;\n }\n\n if (file.localVersion <= file.onchainVersion) {\n return;\n }\n\n const result = await handleExistingFileOp(fileId, \"update\");\n if (!result.success) {\n throw new Error(`Publish failed for file ${fileId}`);\n }\n\n const payload: UpdateFilePayload = {\n onchainVersion: file.localVersion,\n metadata: result.metadata,\n };\n const updatedFile = FilesModel.update(fileId, payload, file.portalAddress);\n\n if (updatedFile.localVersion === updatedFile.onchainVersion) {\n FilesModel.update(fileId, { syncStatus: \"synced\" }, file.portalAddress);\n }\n logger.info(`File ${file.ddocId} updated and published successfully`);\n};\n\nconst processDeleteEvent = async (event: Event): Promise<void> => {\n const { fileId } = event;\n\n const file = FilesModel.findByIdIncludingDeleted(fileId);\n if (!file) {\n return;\n }\n\n if (file.isDeleted === 1 && file.syncStatus === \"synced\") {\n logger.info(`File ${fileId} deletion already synced, skipping`);\n return;\n }\n\n const payload: UpdateFilePayload = {\n syncStatus: \"synced\",\n isDeleted: 1,\n };\n\n if (file.onChainFileId !== null || file.onChainFileId !== undefined) {\n const result = await handleExistingFileOp(fileId, \"delete\");\n if (!result.success) {\n throw new Error(`Publish failed for file ${fileId}`);\n }\n\n payload.onchainVersion = file.localVersion;\n payload.metadata = result.metadata;\n payload.isDeleted = 1;\n }\n\n FilesModel.update(fileId, payload, file.portalAddress);\n\n logger.info(`File ${fileId} delete event processed (syncStatus set to synced)`);\n};\n","import { EventEmitter } from \"events\";\n\nclass WorkerSignal extends EventEmitter {}\n\nconst workerSignal = new WorkerSignal();\nworkerSignal.setMaxListeners(20);\n\nexport function notifyNewEvent(): void {\n workerSignal.emit(\"newEvent\");\n}\n\nexport function onNewEvent(callback: () => void): () => void {\n workerSignal.on(\"newEvent\", callback);\n return () => workerSignal.off(\"newEvent\", callback);\n}\n","import { logger } from \"../index\";\nimport { processEvent } from \"./eventProcessor\";\nimport { onNewEvent } from \"./workerSignal\";\nimport { EventsModel } from \"../database/models\";\nimport type { Event } from \"../database/models\";\nimport { RateLimitError } from \"../../errors/rate-limit\";\n\nconst DEFAULT_CONCURRENCY = 5;\nconst STALE_THRESHOLD_MS = 5 * 60 * 1000;\nconst SIGNAL_RETRY_DELAY_MS = 50;\nconst FALLBACK_POLL_MS = 30000;\nconst MAX_RETRIES = 10;\n\nexport class FileEventsWorker {\n private isRunning = false;\n private concurrency: number;\n private activeProcessors = new Map<string, Promise<void>>();\n private signalCleanup: (() => void) | null = null;\n private pendingSignal = false;\n private wakeResolver: (() => void) | null = null;\n\n constructor(concurrency: number = DEFAULT_CONCURRENCY) {\n this.concurrency = concurrency;\n }\n\n start(): void {\n if (this.isRunning) {\n logger.warn(\"Worker is already running\");\n return;\n }\n this.isRunning = true;\n\n const staleCount = this.recoverStaleEvents();\n if (staleCount > 0) {\n logger.info(`Recovered ${staleCount} stale event(s)`);\n }\n\n this.signalCleanup = onNewEvent(() => {\n this.pendingSignal = true;\n this.wakeUp();\n });\n\n logger.debug(`File events worker started (concurrency: ${this.concurrency})`);\n this.run();\n }\n\n private async run(): Promise<void> {\n while (this.isRunning) {\n const foundEvents = await this.fillSlots();\n logger.debug(`Found ${foundEvents ? \"events\" : \"no events\"} to process`);\n if (this.activeProcessors.size === 0) {\n if (this.pendingSignal && !foundEvents) {\n this.pendingSignal = false;\n await this.sleep(SIGNAL_RETRY_DELAY_MS);\n continue;\n }\n\n this.pendingSignal = false;\n await this.waitForSignalOrTimeout(FALLBACK_POLL_MS);\n } else {\n await Promise.race(this.activeProcessors.values());\n }\n }\n }\n\n private async fillSlots(): Promise<boolean> {\n let foundAny = false;\n\n while (this.activeProcessors.size < this.concurrency && this.isRunning) {\n const lockedFileIds = Array.from(this.activeProcessors.keys());\n const event = EventsModel.findNextEligible(lockedFileIds);\n\n if (!event) break;\n\n foundAny = true;\n EventsModel.markProcessing(event._id);\n const processor = this.processEventWrapper(event);\n this.activeProcessors.set(event.fileId, processor);\n }\n\n logger.debug(`Slots filled: ${this.activeProcessors.size}`);\n return foundAny;\n }\n\n private async processEventWrapper(event: Event): Promise<void> {\n try {\n const result = await processEvent(event);\n if (result.success) {\n EventsModel.markProcessed(event._id);\n } else {\n this.handleFailure(event, result.error);\n }\n } catch (err) {\n this.handleFailure(event, err);\n } finally {\n this.activeProcessors.delete(event.fileId);\n }\n }\n\n private handleFailure(event: Event, error: unknown): void {\n const errorMsg = error instanceof Error ? error.message : String(error);\n if (error instanceof RateLimitError) {\n const retryAfterMs = error.retryAfterSeconds * 1000;\n EventsModel.scheduleRetryAfter(event._id, errorMsg, retryAfterMs);\n logger.warn(`Event ${event._id} rate limited; retry after ${error.retryAfterSeconds}s`);\n return;\n }\n if (event.retryCount < MAX_RETRIES) {\n EventsModel.scheduleRetry(event._id, errorMsg);\n logger.warn(`Event ${event._id} failed (retry ${event.retryCount + 1}/${MAX_RETRIES}): ${errorMsg}`);\n } else {\n EventsModel.markFailed(event._id, errorMsg);\n logger.error(`Event ${event._id} permanently failed after ${MAX_RETRIES} retries: ${errorMsg}`);\n }\n }\n\n private recoverStaleEvents(): number {\n const staleThreshold = Date.now() - STALE_THRESHOLD_MS;\n return EventsModel.resetStaleEvents(staleThreshold);\n }\n\n private wakeUp(): void {\n if (this.wakeResolver) {\n this.wakeResolver();\n this.wakeResolver = null;\n }\n }\n\n private waitForSignalOrTimeout(ms: number): Promise<void> {\n return new Promise((resolve) => {\n const timeout = setTimeout(() => {\n this.wakeResolver = null;\n resolve();\n }, ms);\n\n this.wakeResolver = () => {\n clearTimeout(timeout);\n resolve();\n };\n });\n }\n\n private sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n }\n\n async close(): Promise<void> {\n if (!this.isRunning) {\n return;\n }\n logger.info(\"Closing worker gracefully...\");\n this.isRunning = false;\n\n if (this.signalCleanup) {\n this.signalCleanup();\n this.signalCleanup = null;\n }\n\n this.wakeUp();\n this.wakeResolver = null;\n\n if (this.activeProcessors.size > 0) {\n logger.info(`Waiting for ${this.activeProcessors.size} active processor(s) to complete...`);\n await Promise.all(this.activeProcessors.values());\n }\n\n logger.info(\"Worker closed\");\n }\n\n isActive(): boolean {\n return this.isRunning;\n }\n\n getActiveCount(): number {\n return this.activeProcessors.size;\n }\n}\n\nexport function createWorker(concurrency: number = DEFAULT_CONCURRENCY): FileEventsWorker {\n return new FileEventsWorker(concurrency);\n}\n","export { createWorker, FileEventsWorker } from \"./worker\";\nexport { notifyNewEvent } from \"./workerSignal\";\n","import { createWorker, type FileEventsWorker } from \"./infra/worker\";\n\nconst DEFAULT_CONCURRENCY = 5;\n\nlet worker: FileEventsWorker | null = null;\n\nexport function startWorker(concurrency: number = DEFAULT_CONCURRENCY): void {\n if (worker?.isActive()) {\n return;\n }\n worker = createWorker(concurrency);\n worker.start();\n}\n\nexport async function closeWorker(): Promise<void> {\n if (worker) {\n await worker.close();\n worker = null;\n }\n}\n\nexport function isWorkerActive(): boolean {\n return worker?.isActive() ?? false;\n}\n\nexport function getWorkerActiveCount(): number {\n return worker?.getActiveCount() ?? 0;\n}\n","// Error reporting service\n// Example: Slack, Sentry, etc.\n\nclass Reporter {\n async reportError(message: string): Promise<void> {\n // Implement your error reporting logic\n console.error(\"Error reported:\", message);\n }\n}\n\nexport default new Reporter();\n","import { logger } from \"./logger\";\nimport { asyncHandler, asyncHandlerArray } from \"./asyncHandler\";\nimport { closeWorker } from \"../appWorker\";\nimport { closeDatabase } from \"./database\";\n\nimport reporter from \"./reporter\";\n\nexport { logger, asyncHandler, asyncHandlerArray, reporter, closeWorker, closeDatabase };\n","import Database from \"better-sqlite3\";\nimport { config } from \"../../config\";\nimport { logger } from \"../index\";\nimport path from \"path\";\nimport fs from \"fs\";\n\n/**\n * Database connection manager - Singleton pattern\n * Provides a shared SQLite database connection\n */\nclass DatabaseConnectionManager {\n private static instance: DatabaseConnectionManager;\n private db: Database.Database | null = null;\n\n private constructor() {}\n\n static getInstance(): DatabaseConnectionManager {\n if (!DatabaseConnectionManager.instance) {\n DatabaseConnectionManager.instance = new DatabaseConnectionManager();\n }\n return DatabaseConnectionManager.instance;\n }\n\n getConnection(): Database.Database {\n if (!this.db) {\n // DB_PATH is required, validated, and normalized in config/index.ts\n const dbPath = config.DB_PATH!;\n\n // Create database instance\n this.db = new Database(dbPath, {\n verbose: config.NODE_ENV === \"development\" ? (msg: unknown) => logger.debug(String(msg)) : undefined,\n });\n\n // Enable WAL mode for better concurrency\n this.db.pragma(\"journal_mode = WAL\");\n\n // Enable foreign keys\n this.db.pragma(\"foreign_keys = ON\");\n\n // Connection health check\n this.db.prepare(\"SELECT 1\").get();\n\n logger.info(`SQLite database connected: ${dbPath}`);\n }\n\n return this.db;\n }\n\n async close(): Promise<void> {\n if (this.db) {\n this.db.close();\n this.db = null;\n logger.info(\"Database connection closed\");\n }\n }\n\n isConnected(): boolean {\n return this.db !== null && this.db.open;\n }\n}\n\nexport const databaseConnectionManager = DatabaseConnectionManager.getInstance();\n","/**\n * Default limit for listing files/ddocs\n * Used by both API and CLI to ensure consistent behavior\n */\nexport const DEFAULT_LIST_LIMIT = 10;\n","import { databaseConnectionManager } from \"./connection\";\nimport type { QueryOptions } from \"../../types\";\nimport { DEFAULT_LIST_LIMIT } from \"../../domain/file/constants\";\n\nfunction getDb() {\n return databaseConnectionManager.getConnection();\n}\n\nexport class QueryBuilder {\n static select<T = any>(sql: string, params: any[] = []): T[] {\n const stmt = getDb().prepare(sql);\n return stmt.all(params) as T[];\n }\n\n static selectOne<T = any>(sql: string, params: any[] = []): T | undefined {\n const stmt = getDb().prepare(sql);\n return stmt.get(params) as T | undefined;\n }\n\n static execute(\n sql: string,\n params: any[] = [],\n ): {\n changes: number;\n lastInsertRowid: number | bigint;\n } {\n const stmt = getDb().prepare(sql);\n const result = stmt.run(params);\n return {\n changes: result.changes,\n lastInsertRowid: result.lastInsertRowid,\n };\n }\n\n static transaction<T>(callback: () => T): T {\n return getDb().transaction(callback)();\n }\n\n static paginate(sql: string, options: QueryOptions = {}): string {\n let query = sql;\n\n if (options.orderBy) {\n query += ` ORDER BY ${options.orderBy} ${options.orderDirection || \"ASC\"}`;\n }\n\n const hasOffset = (options.offset ?? 0) > 0;\n const limit = options.limit ?? (hasOffset ? DEFAULT_LIST_LIMIT : undefined);\n\n if (limit) {\n query += ` LIMIT ${limit}`;\n }\n\n if (hasOffset) {\n query += ` OFFSET ${options.offset}`;\n }\n\n return query;\n }\n}\n","import { databaseConnectionManager } from \"./connection\";\nimport { QueryBuilder } from \"./query-builder\";\n\nfunction getDb() {\n return databaseConnectionManager.getConnection();\n}\n\nconst closeDatabase = async (): Promise<void> => {\n await databaseConnectionManager.close();\n};\n\nexport default getDb;\nexport { getDb, closeDatabase, QueryBuilder };\n","import { QueryBuilder } from \"../index\";\nimport { uuidv7 } from \"uuidv7\";\nimport type { File, FileListResponse, UpdateFilePayload } from \"../../../types\";\n\nexport type { File, FileListResponse };\n\nexport class FilesModel {\n private static readonly TABLE = \"files\";\n\n private static parseFile(fileRaw: any): File {\n let metadata: Record<string, unknown> = {};\n try {\n if (fileRaw.metadata) {\n metadata = typeof fileRaw.metadata === \"string\" ? JSON.parse(fileRaw.metadata) : fileRaw.metadata;\n }\n } catch (e) {\n // If parsing fails, use empty object\n metadata = {};\n }\n\n return {\n _id: fileRaw._id,\n ddocId: fileRaw.ddocId,\n title: fileRaw.title,\n content: fileRaw.content,\n localVersion: fileRaw.localVersion,\n onchainVersion: fileRaw.onchainVersion,\n syncStatus: fileRaw.syncStatus,\n isDeleted: fileRaw.isDeleted,\n onChainFileId: fileRaw.onChainFileId ?? null,\n portalAddress: fileRaw.portalAddress,\n metadata: metadata || {},\n createdAt: fileRaw.createdAt,\n updatedAt: fileRaw.updatedAt,\n linkKey: fileRaw.linkKey,\n linkKeyNonce: fileRaw.linkKeyNonce,\n commentKey: fileRaw.commentKey,\n link: fileRaw.link,\n };\n }\n\n static findAll(\n portalAddress: string,\n limit?: number,\n skip?: number,\n ): { files: File[]; total: number; hasNext: boolean } {\n const whereClause = \"isDeleted = 0 AND portalAddress = ?\";\n const params: any[] = [portalAddress];\n\n const countSql = `\n SELECT COUNT(*) as count \n FROM ${this.TABLE} \n WHERE ${whereClause}\n `;\n const totalResult = QueryBuilder.selectOne<{ count: number }>(countSql, params);\n const total = totalResult?.count || 0;\n const sql = `\n SELECT *\n FROM ${this.TABLE}\n WHERE ${whereClause}\n `;\n const completeSql = QueryBuilder.paginate(sql, {\n limit,\n offset: skip,\n orderBy: \"createdAt\",\n orderDirection: \"DESC\",\n });\n\n const filesRaw = QueryBuilder.select<any>(completeSql, params);\n const files = filesRaw.map(this.parseFile);\n const hasNext = skip !== undefined && limit !== undefined ? skip + limit < total : false;\n return { files, total, hasNext };\n }\n\n static findById(_id: string, portalAddress: string): File | undefined {\n const sql = `\n SELECT *\n FROM ${this.TABLE} \n WHERE _id = ? AND isDeleted = 0 AND portalAddress = ?\n `;\n const result = QueryBuilder.selectOne<any>(sql, [_id, portalAddress]);\n return result ? this.parseFile(result) : undefined;\n }\n\n static findByIdIncludingDeleted(_id: string): File | undefined {\n const sql = `\n SELECT *\n FROM ${this.TABLE} \n WHERE _id = ?\n `;\n const result = QueryBuilder.selectOne<any>(sql, [_id]);\n return result ? this.parseFile(result) : undefined;\n }\n\n static findByIdExcludingDeleted(_id: string): File | undefined {\n const sql = `\n SELECT *\n FROM ${this.TABLE} \n WHERE _id = ? AND isDeleted = 0\n `;\n const result = QueryBuilder.selectOne<any>(sql, [_id]);\n return result ? this.parseFile(result) : undefined;\n }\n\n static findByDDocId(ddocId: string, portalAddress: string): File | undefined {\n const sql = `\n SELECT *\n FROM ${this.TABLE} \n WHERE ddocId = ? AND isDeleted = 0 AND portalAddress = ?\n `;\n const result = QueryBuilder.selectOne<any>(sql, [ddocId, portalAddress]);\n return result ? this.parseFile(result) : undefined;\n }\n\n static searchByTitle(searchTerm: string, portalAddress: string, limit?: number, skip?: number): File[] {\n const sql = `\n SELECT *\n FROM ${this.TABLE} \n WHERE LOWER(title) LIKE LOWER(?) AND isDeleted = 0 AND portalAddress = ?\n `;\n const completeSql = QueryBuilder.paginate(sql, {\n limit,\n offset: skip,\n orderBy: \"createdAt\",\n orderDirection: \"DESC\",\n });\n const filesRaw = QueryBuilder.select<any>(completeSql, [`%${searchTerm}%`, portalAddress]);\n return filesRaw.map(this.parseFile);\n }\n\n static create(input: { title: string; content: string; ddocId: string; portalAddress: string }): File {\n const _id = uuidv7();\n const sql = `\n INSERT INTO ${this.TABLE} \n (_id, title, content, ddocId, portalAddress) \n VALUES (?, ?, ?, ?, ?)\n `;\n\n QueryBuilder.execute(sql, [_id, input.title, input.content, input.ddocId, input.portalAddress]);\n // NOTE: default values while file creation: localVersion = 1, onchainVersion = 0, syncStatus = 'pending'\n\n const created = this.findById(_id, input.portalAddress);\n if (!created) {\n throw new Error(\"Failed to create file\");\n }\n return created;\n }\n\n static update(_id: string, payload: UpdateFilePayload, portalAddress: string): File {\n const now = new Date().toISOString();\n\n const keys: string[] = [];\n const values: any[] = [];\n for (const [k, v] of Object.entries(payload)) {\n if (v !== undefined) {\n // Handle metadata specially - convert to JSON string\n if (k === \"metadata\" && typeof v === \"object\") {\n keys.push(`${k} = ?`);\n values.push(JSON.stringify(v));\n } else {\n keys.push(`${k} = ?`);\n values.push(v);\n }\n }\n }\n\n // Always add updatedAt\n keys.push(\"updatedAt = ?\");\n values.push(now, _id, portalAddress);\n\n const updateChain = keys.join(\", \");\n const sql = `UPDATE ${this.TABLE} SET ${updateChain} WHERE _id = ? AND portalAddress = ?`;\n\n QueryBuilder.execute(sql, values);\n\n const updated = this.findById(_id, portalAddress);\n if (!updated) {\n throw new Error(\"Failed to update file\");\n }\n return updated;\n }\n\n static softDelete(_id: string): File {\n const now = new Date().toISOString();\n const sql = `\n UPDATE ${this.TABLE} \n SET isDeleted = 1, syncStatus = 'pending', updatedAt = ?\n WHERE _id = ?\n `;\n\n QueryBuilder.execute(sql, [now, _id]);\n\n // Use findByIdIncludingDeleted since the file is now marked as deleted\n const deleted = this.findByIdIncludingDeleted(_id);\n if (!deleted) {\n throw new Error(\"Failed to delete file\");\n }\n return deleted;\n }\n}\n","import { QueryBuilder } from \"../index\";\nimport { uuidv7 } from \"uuidv7\";\nimport type { Portal } from \"../../../types\";\n\nexport type { Portal };\n\nexport class PortalsModel {\n private static readonly TABLE = \"portals\";\n\n static findByPortalAddress(portalAddress: string): Portal | undefined {\n const sql = `SELECT _id, portalAddress, portalSeed, ownerAddress, createdAt, updatedAt FROM ${this.TABLE} WHERE portalAddress = ?`;\n return QueryBuilder.selectOne<Portal>(sql, [portalAddress]);\n }\n\n static create(input: { portalAddress: string; portalSeed: string; ownerAddress: string }): Portal {\n const _id = uuidv7();\n const now = new Date().toISOString();\n const sql = `INSERT INTO ${this.TABLE} (_id, portalAddress, portalSeed, ownerAddress, createdAt, updatedAt) VALUES (?, ?, ?, ?, ?, ?)`;\n\n QueryBuilder.execute(sql, [_id, input.portalAddress, input.portalSeed, input.ownerAddress, now, now]);\n\n const created = this.findByPortalAddress(input.portalAddress);\n if (!created) {\n throw new Error(\"Failed to create portal\");\n }\n return created;\n }\n\n static update(\n portalAddress: string,\n input: {\n portalSeed?: string;\n ownerAddress?: string;\n },\n ): Portal {\n const now = new Date().toISOString();\n const keys: string[] = [];\n const values: any[] = [];\n\n for (const [k, v] of Object.entries(input)) {\n if (v !== undefined) {\n keys.push(`${k} = ?`);\n values.push(v);\n }\n }\n\n keys.push(\"updatedAt = ?\");\n values.push(now);\n\n const updateChain = keys.join(\", \");\n const sql = `UPDATE ${this.TABLE} SET ${updateChain} WHERE portalAddress = ?`;\n values.push(portalAddress);\n QueryBuilder.execute(sql, values);\n\n const updated = this.findByPortalAddress(portalAddress);\n if (!updated) {\n throw new Error(\"Failed to update portal\");\n }\n return updated;\n }\n\n static upsert(input: { portalAddress: string; portalSeed: string; ownerAddress: string }): Portal {\n const existing = this.findByPortalAddress(input.portalAddress);\n if (existing) {\n return this.update(input.portalAddress, {\n portalSeed: input.portalSeed,\n ownerAddress: input.ownerAddress,\n });\n }\n return this.create(input);\n }\n}\n","import { QueryBuilder } from \"../index\";\nimport { uuidv7 } from \"uuidv7\";\nimport type { ApiKey } from \"../../../types\";\n\nexport type { ApiKey };\n\nexport class ApiKeysModel {\n private static readonly TABLE = \"api_keys\";\n\n static create(input: {\n apiKeySeed: string;\n name: string;\n collaboratorAddress: string;\n portalAddress: string;\n }): ApiKey {\n const _id = uuidv7();\n const now = new Date().toISOString();\n const sql = `INSERT INTO ${this.TABLE} (_id, apiKeySeed, name, collaboratorAddress, portalAddress, createdAt) \n VALUES (?, ?, ?, ?, ?, ?)`;\n\n const result = QueryBuilder.execute(sql, [\n _id,\n input.apiKeySeed,\n input.name,\n input.collaboratorAddress,\n input.portalAddress,\n now,\n ]);\n\n if (result.changes === 0) {\n throw new Error(\"Failed to create API key\");\n }\n\n const created = this.findById(_id);\n if (!created) {\n throw new Error(\"Failed to create API key\");\n }\n return created;\n }\n\n static findById(_id: string): ApiKey | undefined {\n const sql = `SELECT _id, apiKeySeed, name, collaboratorAddress, portalAddress, createdAt, isDeleted FROM ${this.TABLE} WHERE _id = ? AND isDeleted = 0`;\n return QueryBuilder.selectOne<ApiKey>(sql, [_id]);\n }\n\n static findByCollaboratorAddress(collaboratorAddress: string): ApiKey | undefined {\n const sql = `SELECT _id, apiKeySeed, name, collaboratorAddress, portalAddress, createdAt, isDeleted FROM ${this.TABLE} WHERE collaboratorAddress = ? AND isDeleted = 0 LIMIT 1`;\n return QueryBuilder.selectOne<ApiKey>(sql, [collaboratorAddress]);\n }\n\n static delete(_id: string): void {\n const sql = `UPDATE ${this.TABLE} SET isDeleted = 1 WHERE _id = ?`;\n QueryBuilder.execute(sql, [_id]);\n }\n\n static findByPortalAddress(portalAddress: string): ApiKey | undefined {\n const sql = `SELECT _id, apiKeySeed, name, collaboratorAddress, portalAddress, createdAt, isDeleted FROM ${this.TABLE} WHERE portalAddress = ? AND isDeleted = 0`;\n return QueryBuilder.selectOne<ApiKey>(sql, [portalAddress]);\n }\n\n static findByApiKey(apiKey: string): ApiKey | undefined {\n const sql = `SELECT _id, apiKeySeed, name, collaboratorAddress, portalAddress, createdAt, isDeleted FROM ${this.TABLE} WHERE apiKeySeed = ? AND isDeleted = 0`;\n return QueryBuilder.selectOne<ApiKey>(sql, [apiKey]);\n }\n}\n","import { QueryBuilder } from \"../index\";\nimport type { File, Folder, FolderWithDDocs, FolderListResponse } from \"../../../types\";\n\nexport type { Folder, FolderWithDDocs, FolderListResponse };\n\nexport class FoldersModel {\n private static readonly TABLE = \"folders\";\n\n /**\n * List all folders with pagination\n */\n static findAll(limit?: number, skip?: number): { folders: Folder[]; total: number; hasNext: boolean } {\n // Get total count\n const countSql = `SELECT COUNT(*) as count FROM ${this.TABLE} WHERE isDeleted = 0`;\n const totalResult = QueryBuilder.selectOne<{ count: number }>(countSql);\n const total = totalResult?.count || 0;\n\n // Get paginated results\n const sql = QueryBuilder.paginate(`SELECT * FROM ${this.TABLE} WHERE isDeleted = 0`, {\n limit,\n offset: skip,\n orderBy: \"created_at\",\n orderDirection: \"DESC\",\n });\n\n const folders = QueryBuilder.select<any>(sql).map((folderRaw) => ({\n ...folderRaw,\n isDeleted: Boolean(folderRaw.isDeleted),\n }));\n\n const hasNext = skip !== undefined && limit !== undefined ? skip + limit < total : false;\n\n return { folders, total, hasNext };\n }\n\n /**\n * Get a single folder by folderRef and folderId\n * Includes ddocs array (as per API spec)\n */\n static findByFolderRefAndId(folderRef: string, folderId: string): FolderWithDDocs | undefined {\n const sql = `SELECT * FROM ${this.TABLE} WHERE folderRef = ? AND folderId = ? AND isDeleted = 0`;\n const folderRaw = QueryBuilder.selectOne<any>(sql, [folderRef, folderId]);\n\n if (!folderRaw) {\n return undefined;\n }\n\n const parsedFolder: Folder = {\n ...folderRaw,\n isDeleted: Boolean(folderRaw.isDeleted),\n };\n\n // Get ddocs in this folder\n // Import at runtime to avoid circular dependency\n // Note: FolderRef functionality removed in simplified schema, returning empty array\n const ddocs: File[] = [];\n\n return {\n ...parsedFolder,\n ddocs,\n };\n }\n\n /**\n * Get folder by folderRef only\n */\n static findByFolderRef(folderRef: string): Folder | undefined {\n const sql = `SELECT * FROM ${this.TABLE} WHERE folderRef = ? AND isDeleted = 0 LIMIT 1`;\n const folderRaw = QueryBuilder.selectOne<any>(sql, [folderRef]);\n\n if (!folderRaw) {\n return undefined;\n }\n\n return {\n ...folderRaw,\n isDeleted: Boolean(folderRaw.isDeleted),\n };\n }\n\n /**\n * Search folders by folderName (case-insensitive substring match)\n */\n static searchByName(searchTerm: string, limit?: number, skip?: number): Folder[] {\n const sql = QueryBuilder.paginate(\n `SELECT * FROM ${this.TABLE} \n WHERE isDeleted = 0 AND LOWER(folderName) LIKE LOWER(?)`,\n {\n limit,\n offset: skip,\n orderBy: \"created_at\",\n orderDirection: \"DESC\",\n },\n );\n\n const foldersRaw = QueryBuilder.select<any>(sql, [`%${searchTerm}%`]);\n return foldersRaw.map((folderRaw) => ({\n ...folderRaw,\n isDeleted: Boolean(folderRaw.isDeleted),\n }));\n }\n\n /**\n * Create a new folder\n */\n static create(input: {\n _id?: string;\n onchainFileId: number;\n folderId: string;\n folderRef: string;\n folderName: string;\n portalAddress: string;\n metadataIPFSHash: string;\n contentIPFSHash: string;\n lastTransactionHash?: string;\n lastTransactionBlockNumber: number;\n lastTransactionBlockTimestamp: number;\n }): Folder {\n const _id = input._id || `folder_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;\n const now = new Date().toISOString();\n\n const sql = `INSERT INTO ${this.TABLE} (\n _id, onchainFileId, folderId, folderRef, folderName, portalAddress, metadataIPFSHash,\n contentIPFSHash, isDeleted, lastTransactionHash, lastTransactionBlockNumber, \n lastTransactionBlockTimestamp, created_at, updated_at\n ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`;\n\n QueryBuilder.execute(sql, [\n _id,\n input.onchainFileId,\n input.folderId,\n input.folderRef,\n input.folderName,\n input.portalAddress,\n input.metadataIPFSHash,\n input.contentIPFSHash,\n 0, // isDeleted\n input.lastTransactionHash || null,\n input.lastTransactionBlockNumber,\n input.lastTransactionBlockTimestamp,\n now,\n now,\n ]);\n\n // Fetch the created folder (without ddocs)\n const selectSql = `SELECT * FROM ${this.TABLE} WHERE folderRef = ? AND folderId = ? AND isDeleted = 0`;\n const folderRaw = QueryBuilder.selectOne<any>(selectSql, [input.folderRef, input.folderId]);\n\n if (!folderRaw) {\n throw new Error(\"Failed to create folder\");\n }\n\n return {\n ...folderRaw,\n isDeleted: Boolean(folderRaw.isDeleted),\n };\n }\n}\n","import { QueryBuilder } from \"../index\";\nimport { uuidv7 } from \"uuidv7\";\nimport { notifyNewEvent } from \"../../worker/workerSignal\";\nimport type { Event, EventType, EventStatus } from \"../../../types\";\n\nexport type { Event, EventType, EventStatus };\n\nconst RETRY_DELAYS_MS = [5000, 30000, 120000];\n\ninterface EventRow {\n _id: string;\n type: string;\n timestamp: number;\n fileId: string;\n portalAddress: string;\n status: string;\n retryCount: number;\n lastError: string | null;\n lockedAt: number | null;\n nextRetryAt: number | null;\n userOpHash?: string | null;\n pendingPayload?: string | null;\n}\n\nexport class EventsModel {\n private static readonly TABLE = \"events\";\n\n static create(input: { type: EventType; fileId: string; portalAddress: string }): Event {\n const _id = uuidv7();\n const timestamp = Date.now();\n const status: EventStatus = \"pending\";\n\n const sql = `\n INSERT INTO ${this.TABLE} \n (_id, type, timestamp, fileId, portalAddress, status, retryCount, lastError, lockedAt, nextRetryAt) \n VALUES (?, ?, ?, ?, ?, ?, 0, NULL, NULL, NULL)\n `;\n\n QueryBuilder.execute(sql, [_id, input.type, timestamp, input.fileId, input.portalAddress, status]);\n\n notifyNewEvent();\n\n return {\n _id,\n type: input.type,\n timestamp,\n fileId: input.fileId,\n portalAddress: input.portalAddress,\n status,\n retryCount: 0,\n lastError: null,\n lockedAt: null,\n nextRetryAt: null,\n };\n }\n\n static findById(_id: string): Event | undefined {\n const sql = `SELECT * FROM ${this.TABLE} WHERE _id = ?`;\n const row = QueryBuilder.selectOne<EventRow>(sql, [_id]);\n return row ? this.parseEvent(row) : undefined;\n }\n\n static findNextPending(): Event | undefined {\n const sql = `\n SELECT * FROM ${this.TABLE}\n WHERE status = 'pending'\n ORDER BY timestamp ASC\n LIMIT 1\n `;\n const row = QueryBuilder.selectOne<EventRow>(sql, []);\n return row ? this.parseEvent(row) : undefined;\n }\n\n static findNextEligible(lockedFileIds: string[]): Event | undefined {\n const now = Date.now();\n\n const exclusionClause =\n lockedFileIds.length > 0 ? `AND e1.fileId NOT IN (${lockedFileIds.map(() => \"?\").join(\", \")})` : \"\";\n\n const sql = `\n SELECT e1.* FROM ${this.TABLE} e1\n WHERE e1.status = 'pending'\n AND (e1.nextRetryAt IS NULL OR e1.nextRetryAt <= ?)\n ${exclusionClause}\n AND NOT EXISTS (\n SELECT 1 FROM ${this.TABLE} e2\n WHERE e2.fileId = e1.fileId\n AND e2.status = 'pending'\n AND e2.timestamp < e1.timestamp\n )\n ORDER BY e1.timestamp ASC\n LIMIT 1\n `;\n\n const params = [now, ...lockedFileIds];\n const row = QueryBuilder.selectOne<EventRow>(sql, params);\n return row ? this.parseEvent(row) : undefined;\n }\n\n static markProcessing(_id: string): void {\n const sql = `\n UPDATE ${this.TABLE}\n SET status = 'processing',\n lockedAt = ?\n WHERE _id = ?\n `;\n QueryBuilder.execute(sql, [Date.now(), _id]);\n }\n\n static markProcessed(_id: string): void {\n const sql = `\n UPDATE ${this.TABLE}\n SET status = 'processed',\n lockedAt = NULL\n WHERE _id = ?\n `;\n QueryBuilder.execute(sql, [_id]);\n }\n\n static scheduleRetry(_id: string, errorMsg: string): void {\n const event = this.findById(_id);\n if (!event) return;\n\n const delay = RETRY_DELAYS_MS[Math.min(event.retryCount, RETRY_DELAYS_MS.length - 1)];\n const nextRetryAt = Date.now() + delay;\n\n const sql = `\n UPDATE ${this.TABLE}\n SET status = 'pending',\n retryCount = retryCount + 1,\n lastError = ?,\n nextRetryAt = ?,\n lockedAt = NULL\n WHERE _id = ?\n `;\n QueryBuilder.execute(sql, [errorMsg, nextRetryAt, _id]);\n }\n\n static scheduleRetryAfter(_id: string, errorMsg: string, retryAfterMs: number): void {\n const nextRetryAt = Date.now() + retryAfterMs;\n const sql = `\n UPDATE ${this.TABLE}\n SET status = 'pending',\n lastError = ?,\n nextRetryAt = ?,\n lockedAt = NULL\n WHERE _id = ?\n `;\n QueryBuilder.execute(sql, [errorMsg, nextRetryAt, _id]);\n }\n\n static markFailed(_id: string, errorMsg: string): void {\n const sql = `\n UPDATE ${this.TABLE}\n SET status = 'failed',\n lastError = ?,\n lockedAt = NULL\n WHERE _id = ?\n `;\n QueryBuilder.execute(sql, [errorMsg, _id]);\n }\n\n static listFailed(portalAddress?: string): Event[] {\n const portalClause = portalAddress != null ? \"AND portalAddress = ?\" : \"\";\n const sql = `\n SELECT * FROM ${this.TABLE}\n WHERE status = 'failed'\n ${portalClause}\n ORDER BY timestamp ASC\n `;\n const params = portalAddress != null ? [portalAddress] : [];\n const rows = QueryBuilder.select<EventRow>(sql, params);\n return rows.map((row) => this.parseEvent(row));\n }\n\n static resetFailedToPending(_id: string, portalAddress?: string): boolean {\n const portalClause = portalAddress != null ? \"AND portalAddress = ?\" : \"\";\n const sql = `\n UPDATE ${this.TABLE}\n SET status = 'pending',\n retryCount = 0,\n lastError = NULL,\n nextRetryAt = NULL,\n lockedAt = NULL\n WHERE _id = ?\n AND status = 'failed'\n ${portalClause}\n `;\n const params = portalAddress != null ? [_id, portalAddress] : [_id];\n const result = QueryBuilder.execute(sql, params);\n if (result.changes > 0) {\n notifyNewEvent();\n }\n return result.changes > 0;\n }\n\n static resetAllFailedToPending(portalAddress?: string): number {\n const portalClause = portalAddress != null ? \"AND portalAddress = ?\" : \"\";\n const sql = `\n UPDATE ${this.TABLE}\n SET status = 'pending',\n retryCount = 0,\n lastError = NULL,\n nextRetryAt = NULL,\n lockedAt = NULL\n WHERE status = 'failed'\n ${portalClause}\n `;\n const params = portalAddress != null ? [portalAddress] : [];\n const result = QueryBuilder.execute(sql, params);\n if (result.changes > 0) {\n notifyNewEvent();\n }\n return result.changes;\n }\n\n static resetStaleEvents(staleThreshold: number): number {\n const sql = `\n UPDATE ${this.TABLE}\n SET status = 'pending',\n lockedAt = NULL,\n userOpHash = NULL,\n pendingPayload = NULL\n WHERE status = 'processing'\n AND lockedAt IS NOT NULL\n AND lockedAt < ?\n `;\n const result = QueryBuilder.execute(sql, [staleThreshold]);\n return result.changes;\n }\n\n static setEventPendingOp(_id: string, userOpHash: string, payload: Record<string, unknown>): void {\n const sql = `UPDATE ${this.TABLE} SET userOpHash = ?, pendingPayload = ? WHERE _id = ?`;\n QueryBuilder.execute(sql, [userOpHash, JSON.stringify(payload), _id]);\n }\n\n static clearEventPendingOp(_id: string): void {\n const sql = `UPDATE ${this.TABLE} SET userOpHash = NULL, pendingPayload = NULL WHERE _id = ?`;\n QueryBuilder.execute(sql, [_id]);\n }\n\n private static parseEvent(row: EventRow): Event {\n return {\n _id: row._id,\n type: row.type as EventType,\n timestamp: row.timestamp,\n fileId: row.fileId,\n portalAddress: row.portalAddress ?? \"\",\n status: row.status as EventStatus,\n retryCount: row.retryCount,\n lastError: row.lastError,\n lockedAt: row.lockedAt,\n nextRetryAt: row.nextRetryAt,\n userOpHash: row.userOpHash ?? null,\n pendingPayload: row.pendingPayload ?? null,\n };\n }\n}\n","import { type File, FilesModel, type FileListResponse } from \"./files.model\";\nimport { PortalsModel, type Portal } from \"./portals.model\";\nimport { ApiKeysModel, type ApiKey } from \"./apikeys.model\";\nimport { type Folder, type FolderWithDDocs, type FolderListResponse, FoldersModel } from \"./folders.model\";\nimport { EventsModel, type Event, type EventType, type EventStatus } from \"./events.model\";\n\nexport { FilesModel, PortalsModel, ApiKeysModel, FoldersModel, EventsModel };\nexport type {\n File,\n FileListResponse,\n Portal,\n ApiKey,\n Folder,\n FolderWithDDocs,\n FolderListResponse,\n Event,\n EventType,\n EventStatus,\n};\n","import { PortalsModel } from \"../../infra/database/models\";\nimport type { Portal, SavePortalInput } from \"../../types\";\n\nexport function savePortal(input: SavePortalInput): Portal {\n if (!input.portalAddress || !input.portalSeed || !input.ownerAddress) {\n throw new Error(\"portalAddress, portalSeed, and ownerAddress are required\");\n }\n\n return PortalsModel.upsert(input);\n}\n","import getDb from \"../index\";\nimport { logger } from \"../../\";\n\nconst STABLE_SCHEMA = `\nCREATE TABLE IF NOT EXISTS files (\n _id TEXT PRIMARY KEY,\n ddocId TEXT NOT NULL,\n title TEXT NOT NULL,\n content TEXT NOT NULL,\n localVersion INTEGER NOT NULL DEFAULT 1,\n onchainVersion INTEGER NOT NULL DEFAULT 0,\n syncStatus TEXT NOT NULL DEFAULT 'pending',\n createdAt DATETIME DEFAULT CURRENT_TIMESTAMP,\n updatedAt DATETIME DEFAULT CURRENT_TIMESTAMP,\n isDeleted INTEGER NOT NULL DEFAULT 0,\n portalAddress TEXT NOT NULL,\n metadata TEXT DEFAULT '{}',\n onChainFileId INTEGER,\n commentKey TEXT,\n linkKey TEXT,\n linkKeyNonce TEXT,\n link TEXT\n);\nCREATE INDEX IF NOT EXISTS idx_files_createdAt ON files(createdAt);\nCREATE INDEX IF NOT EXISTS idx_files_syncStatus ON files(syncStatus);\nCREATE INDEX IF NOT EXISTS idx_files_title ON files(title);\nCREATE INDEX IF NOT EXISTS idx_files_portalAddress ON files(portalAddress);\n\nCREATE TABLE IF NOT EXISTS portals (\n _id TEXT PRIMARY KEY,\n portalAddress TEXT NOT NULL UNIQUE,\n portalSeed TEXT NOT NULL UNIQUE,\n ownerAddress TEXT NOT NULL,\n createdAt DATETIME DEFAULT CURRENT_TIMESTAMP,\n updatedAt DATETIME DEFAULT CURRENT_TIMESTAMP\n);\n\nCREATE TABLE IF NOT EXISTS api_keys (\n _id TEXT PRIMARY KEY,\n apiKeySeed TEXT NOT NULL UNIQUE,\n name TEXT NOT NULL,\n collaboratorAddress TEXT NOT NULL UNIQUE,\n portalAddress TEXT NOT NULL,\n createdAt DATETIME DEFAULT CURRENT_TIMESTAMP,\n isDeleted INTEGER NOT NULL DEFAULT 0\n);\n\nCREATE TABLE IF NOT EXISTS events (\n _id TEXT PRIMARY KEY,\n type TEXT NOT NULL CHECK (type IN ('create', 'update', 'delete')),\n timestamp INTEGER NOT NULL,\n fileId TEXT NOT NULL,\n status TEXT NOT NULL DEFAULT 'pending' CHECK (status IN ('pending', 'processing', 'processed', 'failed')),\n retryCount INTEGER NOT NULL DEFAULT 0,\n lastError TEXT,\n lockedAt INTEGER,\n nextRetryAt INTEGER,\n userOpHash TEXT,\n pendingPayload TEXT,\n portalAddress TEXT\n);\nCREATE INDEX IF NOT EXISTS idx_events_pending_eligible ON events (status, nextRetryAt, timestamp) WHERE status = 'pending';\nCREATE INDEX IF NOT EXISTS idx_events_file_pending_ts ON events (fileId, status, timestamp) WHERE status = 'pending';\nCREATE INDEX IF NOT EXISTS idx_events_processing_locked ON events (status, lockedAt) WHERE status = 'processing';\nCREATE INDEX IF NOT EXISTS idx_events_failed_portal ON events (portalAddress, status) WHERE status = 'failed';\n\nCREATE TABLE IF NOT EXISTS folders (\n _id TEXT PRIMARY KEY,\n onchainFileId INTEGER NOT NULL,\n folderId TEXT NOT NULL,\n folderRef TEXT NOT NULL,\n folderName TEXT NOT NULL,\n portalAddress TEXT NOT NULL,\n metadataIPFSHash TEXT NOT NULL,\n contentIPFSHash TEXT NOT NULL,\n isDeleted INTEGER NOT NULL DEFAULT 0,\n lastTransactionHash TEXT,\n lastTransactionBlockNumber INTEGER NOT NULL,\n lastTransactionBlockTimestamp INTEGER NOT NULL,\n created_at DATETIME DEFAULT CURRENT_TIMESTAMP,\n updated_at DATETIME DEFAULT CURRENT_TIMESTAMP\n);\nCREATE INDEX IF NOT EXISTS idx_folders_folderRef_folderId ON folders(folderRef, folderId);\nCREATE INDEX IF NOT EXISTS idx_folders_folderRef ON folders(folderRef);\nCREATE INDEX IF NOT EXISTS idx_folders_created_at ON folders(created_at);\n`;\n\nexport function runMigrations(): void {\n const db = getDb();\n db.exec(STABLE_SCHEMA);\n logger.debug(\"Database schema ready\");\n}\n","#!/usr/bin/env node\nimport { Command } from \"commander\";\nimport { fetchApiKeyData } from \"./fetch-api-key.js\";\nimport type { AppKeyMaterial, KeyMaterial } from \"../types\";\nimport { scaffoldConfig } from \"./scaffold-config.js\";\nimport { startAll, setupShutdownHandlers, waitForProcesses } from \"./process-manager.js\";\nimport { promptForConfig, needsPrompting } from \"./prompts.js\";\nimport { loadConfig } from \"../config/index.js\";\nimport { decryptSavedData, initializeWithData } from \"../init/index.js\";\n\nconst program = new Command()\n .name(\"fileverse-api\")\n .description(\"Run the Fileverse API server\")\n .version(\"0.0.1\")\n .option(\"--apiKey <key>\", \"API key for authentication\")\n .option(\"--rpcUrl <url>\", \"RPC URL for blockchain connection\")\n .option(\"--port <port>\", \"Port to run the server on\", \"8001\")\n .option(\"--db <path>\", \"Database path\")\n .action(async (options) => {\n try {\n console.log(\"Fileverse API - Starting initialization...\\n\");\n\n if (needsPrompting(options)) {\n const prompted = await promptForConfig({\n apiKey: options.apiKey,\n rpcUrl: options.rpcUrl,\n });\n options.apiKey = prompted.apiKey;\n options.rpcUrl = prompted.rpcUrl;\n }\n\n const data = await fetchApiKeyData(options.apiKey);\n console.log(\"✓ API key data retrieved\\n\");\n\n const keyMaterial = await decryptSavedData<KeyMaterial>(options.apiKey, data.encryptedKeyMaterial);\n const appMaterial = await decryptSavedData<AppKeyMaterial>(options.apiKey, data.encryptedAppMaterial);\n console.log(\"Setting up configuration...\");\n const envPath = scaffoldConfig({\n dbPath: options.db,\n port: options.port,\n apiKey: options.apiKey,\n rpcUrl: options.rpcUrl,\n });\n loadConfig();\n console.log(`✓ Configuration saved to ${envPath}\\n`);\n\n const { runMigrations } = await import(\"../infra/database/migrations/index.js\");\n runMigrations();\n console.log(\"✓ Database migrations complete\");\n\n const result = initializeWithData({\n keyMaterial,\n appMaterial,\n id: data.id,\n });\n console.log(\"✓ Portal saved\");\n if (result.apiKeySaved) {\n console.log(\"✓ API key saved\");\n } else {\n console.log(\"✓ API key already exists\");\n }\n\n console.log(\"\\nStarting services...\");\n setupShutdownHandlers();\n startAll();\n\n console.log(`\n✓ Fileverse API is running!\n\n API Server: http://127.0.0.1:${options.port}\n Worker: Active\n\n MCP: Add this to your AI agent's MCP config (e.g. .claude/mcp.json):\n\n {\n \"mcpServers\": {\n \"fileverse-api\": {\n \"command\": \"fileverse-api-mcp\"\n }\n }\n }\n\n Config is auto-read from ~/.fileverse/.env — no env vars needed.\n\nPress Ctrl+C to stop.\n`);\n\n await waitForProcesses();\n } catch (error) {\n console.error(\"\\n❌ Error:\", error instanceof Error ? error.message : error);\n process.exit(1);\n }\n });\n\nprogram.parse();\n","import axios from \"axios\";\nimport { toUint8Array } from \"js-base64\";\nimport { sha256 } from \"viem\";\nimport { BASE_CONFIG } from \"./constants\";\nimport type { ApiKeyResponse } from \"../types\";\n\nexport const fetchApiKeyData = async (apiKey: string): Promise<ApiKeyResponse> => {\n try {\n const keyHash = sha256(toUint8Array(apiKey));\n const fullUrl = BASE_CONFIG.API_URL + \"api-access\" + `/${keyHash}`;\n const response = await axios.get<ApiKeyResponse>(fullUrl);\n\n const { encryptedKeyMaterial, encryptedAppMaterial, id } = response.data;\n\n return { encryptedKeyMaterial, encryptedAppMaterial, id };\n } catch (error) {\n if (axios.isAxiosError(error)) {\n if (error.response?.status === 401) {\n throw new Error(\"Invalid API key\");\n }\n if (error.response?.status === 404) {\n throw new Error(\"API key not found\");\n }\n if (error.code === \"ECONNREFUSED\") {\n throw new Error(`Cannot connect to server at ${BASE_CONFIG.API_URL}`);\n }\n throw new Error(`Server error: ${error.response?.data?.message || error.message}`);\n }\n throw error;\n }\n};\n","import fs from \"fs\";\nimport path from \"path\";\nimport os from \"os\";\nimport { STATIC_CONFIG } from \"./constants\";\nimport type { ConfigOptions } from \"../types\";\n\nexport function getFileverseDir(): string {\n return path.join(os.homedir(), \".fileverse\");\n}\n\nfunction getDefaultDbPath(): string {\n return path.join(getFileverseDir(), \"fileverse-api.db\");\n}\n\nexport function getEnvPath(): string {\n return path.join(getFileverseDir(), \".env\");\n}\n\nexport function scaffoldConfig(options: ConfigOptions = {}): string {\n const fileverseDir = getFileverseDir();\n const envPath = getEnvPath();\n\n if (!fs.existsSync(fileverseDir)) {\n fs.mkdirSync(fileverseDir, { recursive: true });\n }\n\n const dbPath = options.dbPath || getDefaultDbPath();\n const dbDir = path.dirname(dbPath);\n if (!fs.existsSync(dbDir)) {\n fs.mkdirSync(dbDir, { recursive: true });\n }\n\n const envContent = `API_KEY=${options.apiKey}\nRPC_URL=${options.rpcUrl || STATIC_CONFIG.DEFAULT_RPC_URL}\nDB_PATH=${dbPath}\nPORT=${options.port || STATIC_CONFIG.DEFAULT_PORT}\n`;\n\n fs.writeFileSync(envPath, envContent, \"utf-8\");\n\n return envPath;\n}\n","import { spawn, ChildProcess } from \"child_process\";\nimport path from \"path\";\nimport { fileURLToPath } from \"url\";\nimport { existsSync } from \"fs\";\n\ninterface ManagedProcess {\n name: string;\n process: ChildProcess;\n}\n\nconst managedProcesses: ManagedProcess[] = [];\n\nfunction getDistDir(): string {\n const __dirname = path.dirname(fileURLToPath(import.meta.url));\n return path.resolve(__dirname, \"..\");\n}\n\nfunction isDevMode(): boolean {\n const distDir = getDistDir();\n return existsSync(path.join(distDir, \"index.ts\"));\n}\n\nfunction prefixOutput(name: string, data: Buffer): void {\n const lines = data.toString().split(\"\\n\").filter(Boolean);\n for (const line of lines) {\n console.log(`[${name}] ${line}`);\n }\n}\n\nfunction spawnProcess(name: string, executable: string, scriptPath: string, extraEnv?: Record<string, string>): ChildProcess {\n const child = spawn(executable, [scriptPath], {\n stdio: [\"ignore\", \"pipe\", \"pipe\"],\n env: { ...process.env, NODE_ENV: executable === \"tsx\" ? \"development\" : \"production\", ...extraEnv },\n detached: false,\n });\n\n child.stdout?.on(\"data\", (data: Buffer) => prefixOutput(name, data));\n child.stderr?.on(\"data\", (data: Buffer) => prefixOutput(name, data));\n\n child.on(\"error\", (error) => {\n console.error(`[${name}] Process error:`, error.message);\n });\n\n child.on(\"exit\", (code, signal) => {\n if (signal) {\n console.log(`[${name}] Process terminated by signal ${signal}`);\n } else if (code !== 0) {\n console.error(`[${name}] Process exited with code ${code}`);\n }\n });\n\n managedProcesses.push({ name, process: child });\n return child;\n}\n\nexport function startApiServer(): ChildProcess {\n const distDir = getDistDir();\n const dev = isDevMode();\n const executable = dev ? \"tsx\" : \"node\";\n const apiPath = path.join(distDir, dev ? \"index.ts\" : \"index.js\");\n return spawnProcess(\"API\", executable, apiPath, { IS_CLI: \"1\" });\n}\n\nexport function startWorker(): ChildProcess {\n const distDir = getDistDir();\n const dev = isDevMode();\n const executable = dev ? \"tsx\" : \"node\";\n const workerPath = path.join(distDir, dev ? \"worker.ts\" : \"worker.js\");\n return spawnProcess(\"WORKER\", executable, workerPath);\n}\n\nexport function startAll(): { api: ChildProcess; worker: ChildProcess } {\n const api = startApiServer();\n const worker = startWorker();\n return { api, worker };\n}\n\nexport function setupShutdownHandlers(): void {\n const shutdown = (signal: string) => {\n console.log(`\\nReceived ${signal}, shutting down...`);\n\n for (const { name, process: child } of managedProcesses) {\n if (child.pid && !child.killed) {\n console.log(`[${name}] Stopping...`);\n child.kill(\"SIGTERM\");\n }\n }\n\n setTimeout(() => {\n for (const { name, process: child } of managedProcesses) {\n if (child.pid && !child.killed) {\n console.log(`[${name}] Force killing...`);\n child.kill(\"SIGKILL\");\n }\n }\n process.exit(0);\n }, 5000);\n };\n\n process.on(\"SIGTERM\", () => shutdown(\"SIGTERM\"));\n process.on(\"SIGINT\", () => shutdown(\"SIGINT\"));\n}\n\nexport function waitForProcesses(): Promise<void> {\n return new Promise((resolve) => {\n const checkInterval = setInterval(() => {\n const allExited = managedProcesses.every(({ process: child }) => child.exitCode !== null || child.killed);\n if (allExited) {\n clearInterval(checkInterval);\n resolve();\n }\n }, 1000);\n });\n}\n","import prompts from \"prompts\";\nimport { STATIC_CONFIG } from \"./constants\";\nimport { getRuntimeConfig } from \"../config/index.js\";\nimport type { PromptedConfig } from \"../types\";\n\nexport const promptForConfig = async (\n existingOptions: {\n apiKey?: string;\n rpcUrl?: string;\n } = {},\n): Promise<PromptedConfig> => {\n const savedConfig = getRuntimeConfig();\n const questions: prompts.PromptObject[] = [];\n\n if (!existingOptions.apiKey) {\n questions.push({\n type: \"text\",\n name: \"apiKey\",\n message: \"Enter your API Key:\",\n validate: (value: string) => value.length > 0 || \"API Key is required\",\n initial: savedConfig.API_KEY || \"\",\n });\n }\n\n if (!existingOptions.rpcUrl) {\n questions.push({\n type: \"text\",\n name: \"rpcUrl\",\n message: \"Enter RPC URL (press Enter for default):\",\n initial: savedConfig.RPC_URL || STATIC_CONFIG.DEFAULT_RPC_URL,\n });\n }\n\n if (questions.length === 0) {\n return {\n apiKey: existingOptions.apiKey!,\n rpcUrl: existingOptions.rpcUrl || STATIC_CONFIG.DEFAULT_RPC_URL,\n };\n }\n\n const response = await prompts(questions, {\n onCancel: () => {\n console.log(\"\\nSetup cancelled.\");\n process.exit(1);\n },\n });\n\n return {\n apiKey: existingOptions.apiKey || response.apiKey,\n rpcUrl: existingOptions.rpcUrl || response.rpcUrl || STATIC_CONFIG.DEFAULT_RPC_URL,\n };\n};\n\nexport function needsPrompting(options: { apiKey?: string }): boolean {\n return !options.apiKey;\n}\n","import { fetchApiKeyData } from \"../cli/fetch-api-key.js\";\nimport { savePortal } from \"../domain/portal/savePortal.js\";\nimport { addApiKey } from \"../domain/portal/saveApiKey.js\";\nimport { ApiKeysModel } from \"../infra/database/models/apikeys.model.js\";\nimport { logger } from \"../infra/index.js\";\nimport type { ApiKeyMaterialResponse, InitResult, KeyMaterial, AppKeyMaterial } from \"../types\";\nimport { deriveHKDFKey } from \"@fileverse/crypto/hkdf\";\nimport { toUint8Array } from \"js-base64\";\nimport { stringToBytes } from \"viem\";\nimport { toAESKey, aesDecrypt } from \"@fileverse/crypto/webcrypto\";\n\nexport type { InitResult };\n\nconst SAVED_DATA_ENCRYPTION_KEY_INFO = \"SAVED_DATA_ENCRYPTION_KEY\";\n\nexport function initializeWithData(data: ApiKeyMaterialResponse): InitResult {\n const { keyMaterial, appMaterial } = data;\n\n savePortal({\n portalAddress: appMaterial.portalAddress,\n portalSeed: appMaterial.portalSeed,\n ownerAddress: appMaterial.ownerAddress,\n });\n\n const existingApiKey = ApiKeysModel.findByApiKey(keyMaterial.apiKeySeed);\n\n if (!existingApiKey) {\n addApiKey({\n apiKeySeed: keyMaterial.apiKeySeed,\n name: keyMaterial.name,\n collaboratorAddress: keyMaterial.collaboratorAddress,\n portalAddress: appMaterial.portalAddress,\n });\n return { portalSaved: true, apiKeySaved: true };\n }\n\n return { portalSaved: true, apiKeySaved: false };\n}\n\nexport const getAesKeyFromApiKey = async (apiKey: string) => {\n const rawSecret = deriveHKDFKey(\n toUint8Array(apiKey),\n new Uint8Array([0]),\n stringToBytes(SAVED_DATA_ENCRYPTION_KEY_INFO),\n );\n return await toAESKey(rawSecret);\n};\n\nconst bytestToJSON = (bytes: Uint8Array) => {\n return JSON.parse(new TextDecoder().decode(bytes));\n};\n\nexport const decryptSavedData = async <T>(apiKey: string, encryptedData: string): Promise<T> => {\n const aesKey = await getAesKeyFromApiKey(apiKey);\n const decryptedBytes = await aesDecrypt(aesKey, toUint8Array(encryptedData));\n\n const data = bytestToJSON(decryptedBytes) as T;\n return data;\n};\n\nexport const initializeFromApiKey = async (apiKey: string): Promise<void> => {\n logger.debug(\"Fetching API key data from server...\");\n const data = await fetchApiKeyData(apiKey);\n logger.debug(\"API key data retrieved\");\n\n const keyMaterial = await decryptSavedData<KeyMaterial>(apiKey, data.encryptedKeyMaterial);\n const appMaterial = await decryptSavedData<AppKeyMaterial>(apiKey, data.encryptedAppMaterial);\n const result = initializeWithData({ keyMaterial, appMaterial, id: data.id });\n\n logger.debug(\"Portal saved\");\n if (result.apiKeySaved) {\n logger.debug(\"API key saved\");\n } else {\n logger.debug(\"API key already exists\");\n }\n};\n"],"mappings":";;;;;;;;;;;;AACA,OAAO,UAAU;AACjB,SAAS,qBAAqB;AAF9B;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAa,eAaA;AAbb;AAAA;AAAA;AAAA;AAAO,IAAM,gBAAgB;AAAA,MAC3B,SAAS;AAAA,MACT,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB,cAAc;AAAA,MACd,cAAc;AAAA,MACd,iBAAiB;AAAA,MACjB,mBAAmB;AAAA,MACnB,cAAc;AAAA,MACd,WAAW;AAAA,MACX,cAAc;AAAA,IAChB;AAEO,IAAM,cAAc;AAAA;AAAA;;;ACb3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,OAAO,YAAY;AACnB,OAAOA,WAAU;AACjB,OAAOC,SAAQ;AACf,OAAOC,SAAQ;AAMf,SAASC,cAAqB;AAC5B,MAAIF,IAAG,WAAW,cAAc,GAAG;AACjC,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAEO,SAAS,WAAW,WAAW,MAAY;AAChD,QAAM,UAAUE,YAAW;AAC3B,SAAO,OAAO,EAAE,MAAM,SAAS,SAAS,CAAC;AAC3C;AAIO,SAAS,mBAAmB;AACjC,SAAO;AAAA,IACL,IAAI,UAAU;AACZ,aAAO,QAAQ,IAAI;AAAA,IACrB;AAAA,IACA,IAAI,UAAU;AACZ,aAAO,QAAQ,IAAI,WAAW,cAAc;AAAA,IAC9C;AAAA,IACA,IAAI,UAAU;AACZ,aAAO,QAAQ,IAAI;AAAA,IACrB;AAAA,IACA,IAAI,OAAO;AACT,aAAO,QAAQ,IAAI,QAAQ,cAAc;AAAA,IAC3C;AAAA,IACA,IAAI,WAAW;AACb,aAAO,QAAQ,IAAI,YAAY;AAAA,IACjC;AAAA,IACA,IAAI,eAAe;AACjB,aAAO,QAAQ,IAAI,gBAAgB,cAAc;AAAA,IACnD;AAAA,EACF;AACF;AA5CA,IAMM,gBACA,aAqDA;AA5DN;AAAA;AAAA;AAAA;AAIA;AAEA,IAAM,iBAAiBH,MAAK,KAAK,QAAQ,IAAI,GAAG,UAAU,MAAM;AAChE,IAAM,cAAcA,MAAK,KAAKE,IAAG,QAAQ,GAAG,cAAc,MAAM;AAchE,eAAW,KAAK;AAuChB,IAAM,SAA6C;AAAA,MACjD,GAAG;AAAA,MACH,IAAI,eAAe;AACjB,eAAO,cAAc;AAAA,MACvB;AAAA,MACA,IAAI,YAAY;AACd,eAAO,cAAc;AAAA,MACvB;AAAA,MACA,IAAI,eAAe;AACjB,eAAO,cAAc;AAAA,MACvB;AAAA,MACA,IAAI,oBAAoB;AACtB,eAAO,cAAc;AAAA,MACvB;AAAA,MACA,IAAI,oBAAoB;AACtB,eAAO,cAAc;AAAA,MACvB;AAAA,MACA,IAAI,UAAU;AACZ,eAAO,QAAQ,IAAI;AAAA,MACrB;AAAA,MACA,IAAI,UAAU;AACZ,eAAO,QAAQ,IAAI,WAAW,cAAc;AAAA,MAC9C;AAAA,MACA,IAAI,UAAU;AACZ,eAAO,QAAQ,IAAI;AAAA,MACrB;AAAA,MACA,IAAI,OAAO;AACT,eAAO,QAAQ,IAAI,QAAQ,cAAc;AAAA,MAC3C;AAAA,MACA,IAAI,WAAW;AACb,eAAO,QAAQ,IAAI,YAAY;AAAA,MACjC;AAAA,MACA,IAAI,KAAK;AACP,eAAO,QAAQ,IAAI,MAAM;AAAA,MAC3B;AAAA,MACA,IAAI,eAAe;AACjB,eAAO,QAAQ,IAAI,gBAAgB,cAAc;AAAA,MACnD;AAAA,IACF;AAAA;AAAA;;;AClGA,OAAO,UAA2C;AAAlD,IAIM,cAEA,cAwCA,iBAsCO;AApFb;AAAA;AAAA;AAAA;AACA;AACA;AAEA,IAAM,eAAe,OAAO,aAAa;AAEzC,IAAM,eAAe,KAAK;AAAA,MACxB,MAAM,cAAc;AAAA,MACpB,OAAO,cAAc;AAAA,MACrB,YAAY;AAAA,QACV,UAAU,CAAC,cAAc,EAAE,MAAM,SAAS,KAAK;AAAA,QAC/C,OAAO,CAAC,WAAW,EAAE,OAAO,MAAM;AAAA,MACpC;AAAA,MACA,aAAa;AAAA,QACX,IAAI,KAAwB;AAC1B,cAAI,CAAC,IAAK,QAAO;AACjB,cAAI,cAAc;AAChB,mBAAO,EAAE,MAAM,IAAI,MAAM,SAAS,IAAI,QAAQ;AAAA,UAChD;AACA,iBAAO;AAAA,YACL,MAAM,IAAI;AAAA,YACV,SAAS,IAAI;AAAA,YACb,OAAO,IAAI;AAAA,UACb;AAAA,QACF;AAAA,MACF;AAAA,MACA,WACE,OAAO,aAAa,eAChB;AAAA,QACE,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,UAAU;AAAA,UACV,eAAe;AAAA,UACf,QAAQ;AAAA,UACR,YAAY;AAAA,UACZ,qBAAqB,CAAC,OAAO,OAAO;AAAA,QACtC;AAAA,MACF,IACA;AAAA,IACR,CAAC;AAOD,IAAM,kBAAkB,CAAC,UAAwB;AAC/C,aAAO,IAAI,SAAoB;AAC7B,cAAM,CAAC,OAAO,GAAG,IAAI,IAAI;AACzB,cAAM,MAAM,aAAa,KAAK,EAAE,KAAK,YAAY;AAEjD,YAAI,OAAO,UAAU,YAAY,UAAU,QAAQ,EAAE,iBAAiB,QAAQ;AAC5E,cAAI,OAAO,GAAG,IAAI;AAClB;AAAA,QACF;AAEA,YAAI,KAAK,SAAS,GAAG;AACnB,gBAAM,OAAO,KAAK,KAAK,SAAS,CAAC;AACjC,cAAI,gBAAgB,OAAO;AACzB,gBAAI,EAAE,KAAK,KAAK,GAAG,OAAO,GAAG,KAAK,MAAM,GAAG,EAAE,CAAC;AAC9C;AAAA,UACF;AAAA,QACF;AAEA,YAAI,iBAAiB,OAAO;AAC1B,cAAI,EAAE,KAAK,MAAM,GAAG,MAAM,OAAO;AACjC;AAAA,QACF;AAEA,YAAI,OAAO,GAAG,IAAI;AAAA,MACpB;AAAA,IACF;AAaO,IAAM,SAAiB;AAAA,MAC5B,OAAO,gBAAgB,OAAO;AAAA,MAC9B,OAAO,gBAAgB,OAAO;AAAA,MAC9B,MAAM,gBAAgB,MAAM;AAAA,MAC5B,MAAM,gBAAgB,MAAM;AAAA,MAC5B,OAAO,gBAAgB,OAAO;AAAA,MAC9B,OAAO,gBAAgB,OAAO;AAAA,MAC9B,IAAI,QAAQ;AACV,eAAO,aAAa;AAAA,MACtB;AAAA,MACA,IAAI,MAAM,KAAY;AACpB,qBAAa,QAAQ;AAAA,MACvB;AAAA,MACA,OAAO,aAAa,MAAM,KAAK,YAAY;AAAA,IAC7C;AAAA;AAAA;;;AClGA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCA,SAAS,cAAc,cAAc,yBAAyB;AAD9D;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,YAAY,WAAW;AAAvB;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,SAAS,SAAS,cAAc;AAAhC;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAIa,cACA,mBAKP,WAKO;AAfb,IAAAE,kBAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AAcA;AACA;AAbO,IAAM,eAAe,cAAc;AACnC,IAAM,oBAAoB,cAAc;AAK/C,IAAM,YAAY;AAAA,MAChB;AAAA,MACA;AAAA,IACF;AAEO,IAAM,QAAQ,UAAU,YAAsC;AAAA;AAAA;;;ACfrE,SAAS,oBAAoB,MAAM,aAAa,OAAO,eAAiD;AAExG,SAAS,2BAA2B;AACpC,SAAS,gCAAgC;AACzC,SAAS,0BAA0B;AACnC,SAAS,2BAA2B;AAEpC,SAAS,0BAA0B;AAPnC;AAAA;AAAA;AAAA;AAMA,IAAAC;AAAA;AAAA;;;ACNA,SAAc,SAAAC,cAAa;AAC3B,SAAS,2BAA2B;AADpC;AAAA;AAAA;AAAA;AAEA;AAEA;AAAA;AAAA;;;ACHA,SAAS,2BAA2B;AADpC;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,SAAS,uBAAuB;AAChC,SAAS,eAAe,uBAAAC,4BAA2B;AACnD,SAAS,iBAAiB,qBAAqB;AAC/C,SAAS,wBAAwB;AACjC,OAAO,UAAU;AAEjB,OAAO,eAAe;AACtB,SAAS,gBAAgB,gBAAAC,qBAAoB;AAE7C,SAAS,UAAU,kBAAkB;AACrC,OAAOC,YAAW;AAGlB,SAAS,oBAA8B,sBAAgC;AAbvE;AAAA;AAAA;AAAA;AAQA;AAGA,IAAAC;AAAA;AAAA;;;ACXA,SAAS,kBAAAC,iBAAgB,gBAAAC,qBAAoB;AAe7C,SAAS,gBAAgB,oBAAoB;AAG7C,SAAS,qBAAqB;AAlB9B;AAAA;AAAA;AAAA;AAEA;AAcA;AACA,IAAAC;AAEA;AAAA;AAAA;;;ACfA,SAAS,kBAAAC,iBAAgB,gBAAAC,qBAAoB;AAC7C,SAAc,qBAAqB;AACnC,SAAS,qBAAqB;AAC9B,SAAS,+BAA+B;AACxC,YAAYC,YAAW;AARvB;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AAMA;AACA;AACA;AAAA;AAAA;;;ACRO,SAAS,UAAU,OAA+B;AACvD,MAAI,CAAC,MAAM,cAAc,CAAC,MAAM,QAAQ,CAAC,MAAM,uBAAuB,CAAC,MAAM,eAAe;AAC1F,UAAM,IAAI,MAAM,uEAAuE;AAAA,EACzF;AAEA,QAAM,SAAS,aAAa,oBAAoB,MAAM,aAAa;AACnE,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,uBAAuB,MAAM,aAAa,iBAAiB;AAAA,EAC7E;AAEA,SAAO,aAAa,OAAO,KAAK;AAClC;AAdA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AAAA;AAAA;;;ACHA,SAAS,wBAAwB;AAAjC;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AAEA;AACA;AACA;AACA,IAAAC;AACA;AAAA;AAAA;;;ACRA,SAAS,oBAAoB;AAA7B,IAEM,cAEA;AAJN;AAAA;AAAA;AAAA;AAEA,IAAM,eAAN,cAA2B,aAAa;AAAA,IAAC;AAEzC,IAAM,eAAe,IAAI,aAAa;AACtC,iBAAa,gBAAgB,EAAE;AAAA;AAAA;;;ACL/B,IAQM;AARN;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AAEA;AAGA,IAAM,qBAAqB,IAAI,KAAK;AAAA;AAAA;;;ACRpC,IAAAC,eAAA;AAAA;AAAA;AAAA;AAAA;AACA;AAAA;AAAA;;;ACDA;AAAA;AAAA;AAAA;AAAA,IAAAC;AAAA;AAAA;;;ACAA,IAGM,UAOC;AAVP;AAAA;AAAA;AAAA;AAGA,IAAM,WAAN,MAAe;AAAA,MACb,MAAM,YAAY,SAAgC;AAEhD,gBAAQ,MAAM,mBAAmB,OAAO;AAAA,MAC1C;AAAA,IACF;AAEA,IAAO,mBAAQ,IAAI,SAAS;AAAA;AAAA;;;ACV5B;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AAEA;AAAA;AAAA;;;ACLA,OAAO,cAAc;AAArB,IAUM,2BAmDO;AA7Db;AAAA;AAAA;AAAA;AACA;AACA;AAQA,IAAM,4BAAN,MAAM,2BAA0B;AAAA,MAC9B,OAAe;AAAA,MACP,KAA+B;AAAA,MAE/B,cAAc;AAAA,MAAC;AAAA,MAEvB,OAAO,cAAyC;AAC9C,YAAI,CAAC,2BAA0B,UAAU;AACvC,qCAA0B,WAAW,IAAI,2BAA0B;AAAA,QACrE;AACA,eAAO,2BAA0B;AAAA,MACnC;AAAA,MAEA,gBAAmC;AACjC,YAAI,CAAC,KAAK,IAAI;AAEZ,gBAAM,SAAS,OAAO;AAGtB,eAAK,KAAK,IAAI,SAAS,QAAQ;AAAA,YAC7B,SAAS,OAAO,aAAa,gBAAgB,CAAC,QAAiB,OAAO,MAAM,OAAO,GAAG,CAAC,IAAI;AAAA,UAC7F,CAAC;AAGD,eAAK,GAAG,OAAO,oBAAoB;AAGnC,eAAK,GAAG,OAAO,mBAAmB;AAGlC,eAAK,GAAG,QAAQ,UAAU,EAAE,IAAI;AAEhC,iBAAO,KAAK,8BAA8B,MAAM,EAAE;AAAA,QACpD;AAEA,eAAO,KAAK;AAAA,MACd;AAAA,MAEA,MAAM,QAAuB;AAC3B,YAAI,KAAK,IAAI;AACX,eAAK,GAAG,MAAM;AACd,eAAK,KAAK;AACV,iBAAO,KAAK,4BAA4B;AAAA,QAC1C;AAAA,MACF;AAAA,MAEA,cAAuB;AACrB,eAAO,KAAK,OAAO,QAAQ,KAAK,GAAG;AAAA,MACrC;AAAA,IACF;AAEO,IAAM,4BAA4B,0BAA0B,YAAY;AAAA;AAAA;;;AC7D/E,IAIa;AAJb,IAAAC,kBAAA;AAAA;AAAA;AAAA;AAIO,IAAM,qBAAqB;AAAA;AAAA;;;ACAlC,SAAS,QAAQ;AACf,SAAO,0BAA0B,cAAc;AACjD;AANA,IAQa;AARb;AAAA;AAAA;AAAA;AAAA;AAEA,IAAAC;AAMO,IAAM,eAAN,MAAmB;AAAA,MACxB,OAAO,OAAgB,KAAa,SAAgB,CAAC,GAAQ;AAC3D,cAAM,OAAO,MAAM,EAAE,QAAQ,GAAG;AAChC,eAAO,KAAK,IAAI,MAAM;AAAA,MACxB;AAAA,MAEA,OAAO,UAAmB,KAAa,SAAgB,CAAC,GAAkB;AACxE,cAAM,OAAO,MAAM,EAAE,QAAQ,GAAG;AAChC,eAAO,KAAK,IAAI,MAAM;AAAA,MACxB;AAAA,MAEA,OAAO,QACL,KACA,SAAgB,CAAC,GAIjB;AACA,cAAM,OAAO,MAAM,EAAE,QAAQ,GAAG;AAChC,cAAM,SAAS,KAAK,IAAI,MAAM;AAC9B,eAAO;AAAA,UACL,SAAS,OAAO;AAAA,UAChB,iBAAiB,OAAO;AAAA,QAC1B;AAAA,MACF;AAAA,MAEA,OAAO,YAAe,UAAsB;AAC1C,eAAO,MAAM,EAAE,YAAY,QAAQ,EAAE;AAAA,MACvC;AAAA,MAEA,OAAO,SAAS,KAAa,UAAwB,CAAC,GAAW;AAC/D,YAAI,QAAQ;AAEZ,YAAI,QAAQ,SAAS;AACnB,mBAAS,aAAa,QAAQ,OAAO,IAAI,QAAQ,kBAAkB,KAAK;AAAA,QAC1E;AAEA,cAAM,aAAa,QAAQ,UAAU,KAAK;AAC1C,cAAM,QAAQ,QAAQ,UAAU,YAAY,qBAAqB;AAEjE,YAAI,OAAO;AACT,mBAAS,UAAU,KAAK;AAAA,QAC1B;AAEA,YAAI,WAAW;AACb,mBAAS,WAAW,QAAQ,MAAM;AAAA,QACpC;AAEA,eAAO;AAAA,MACT;AAAA,IACF;AAAA;AAAA;;;ACvDA,SAASC,SAAQ;AACf,SAAO,0BAA0B,cAAc;AACjD;AALA,IAWO;AAXP;AAAA;AAAA;AAAA;AAAA;AACA;AAUA,IAAO,mBAAQA;AAAA;AAAA;;;ACVf,SAAS,cAAc;AADvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCA,SAAS,UAAAC,eAAc;AADvB,IAMa;AANb;AAAA;AAAA;AAAA;AAAA;AAMO,IAAM,eAAN,MAAmB;AAAA,MACxB,OAAwB,QAAQ;AAAA,MAEhC,OAAO,oBAAoB,eAA2C;AACpE,cAAM,MAAM,kFAAkF,KAAK,KAAK;AACxG,eAAO,aAAa,UAAkB,KAAK,CAAC,aAAa,CAAC;AAAA,MAC5D;AAAA,MAEA,OAAO,OAAO,OAAoF;AAChG,cAAM,MAAMA,QAAO;AACnB,cAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,cAAM,MAAM,eAAe,KAAK,KAAK;AAErC,qBAAa,QAAQ,KAAK,CAAC,KAAK,MAAM,eAAe,MAAM,YAAY,MAAM,cAAc,KAAK,GAAG,CAAC;AAEpG,cAAM,UAAU,KAAK,oBAAoB,MAAM,aAAa;AAC5D,YAAI,CAAC,SAAS;AACZ,gBAAM,IAAI,MAAM,yBAAyB;AAAA,QAC3C;AACA,eAAO;AAAA,MACT;AAAA,MAEA,OAAO,OACL,eACA,OAIQ;AACR,cAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,cAAM,OAAiB,CAAC;AACxB,cAAM,SAAgB,CAAC;AAEvB,mBAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,KAAK,GAAG;AAC1C,cAAI,MAAM,QAAW;AACnB,iBAAK,KAAK,GAAG,CAAC,MAAM;AACpB,mBAAO,KAAK,CAAC;AAAA,UACf;AAAA,QACF;AAEA,aAAK,KAAK,eAAe;AACzB,eAAO,KAAK,GAAG;AAEf,cAAM,cAAc,KAAK,KAAK,IAAI;AAClC,cAAM,MAAM,UAAU,KAAK,KAAK,QAAQ,WAAW;AACnD,eAAO,KAAK,aAAa;AACzB,qBAAa,QAAQ,KAAK,MAAM;AAEhC,cAAM,UAAU,KAAK,oBAAoB,aAAa;AACtD,YAAI,CAAC,SAAS;AACZ,gBAAM,IAAI,MAAM,yBAAyB;AAAA,QAC3C;AACA,eAAO;AAAA,MACT;AAAA,MAEA,OAAO,OAAO,OAAoF;AAChG,cAAM,WAAW,KAAK,oBAAoB,MAAM,aAAa;AAC7D,YAAI,UAAU;AACZ,iBAAO,KAAK,OAAO,MAAM,eAAe;AAAA,YACtC,YAAY,MAAM;AAAA,YAClB,cAAc,MAAM;AAAA,UACtB,CAAC;AAAA,QACH;AACA,eAAO,KAAK,OAAO,KAAK;AAAA,MAC1B;AAAA,IACF;AAAA;AAAA;;;ACtEA,SAAS,UAAAC,eAAc;AADvB,IAMa;AANb;AAAA;AAAA;AAAA;AAAA;AAMO,IAAM,eAAN,MAAmB;AAAA,MACxB,OAAwB,QAAQ;AAAA,MAEhC,OAAO,OAAO,OAKH;AACT,cAAM,MAAMA,QAAO;AACnB,cAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,cAAM,MAAM,eAAe,KAAK,KAAK;AAAA;AAGrC,cAAM,SAAS,aAAa,QAAQ,KAAK;AAAA,UACvC;AAAA,UACA,MAAM;AAAA,UACN,MAAM;AAAA,UACN,MAAM;AAAA,UACN,MAAM;AAAA,UACN;AAAA,QACF,CAAC;AAED,YAAI,OAAO,YAAY,GAAG;AACxB,gBAAM,IAAI,MAAM,0BAA0B;AAAA,QAC5C;AAEA,cAAM,UAAU,KAAK,SAAS,GAAG;AACjC,YAAI,CAAC,SAAS;AACZ,gBAAM,IAAI,MAAM,0BAA0B;AAAA,QAC5C;AACA,eAAO;AAAA,MACT;AAAA,MAEA,OAAO,SAAS,KAAiC;AAC/C,cAAM,MAAM,+FAA+F,KAAK,KAAK;AACrH,eAAO,aAAa,UAAkB,KAAK,CAAC,GAAG,CAAC;AAAA,MAClD;AAAA,MAEA,OAAO,0BAA0B,qBAAiD;AAChF,cAAM,MAAM,+FAA+F,KAAK,KAAK;AACrH,eAAO,aAAa,UAAkB,KAAK,CAAC,mBAAmB,CAAC;AAAA,MAClE;AAAA,MAEA,OAAO,OAAO,KAAmB;AAC/B,cAAM,MAAM,UAAU,KAAK,KAAK;AAChC,qBAAa,QAAQ,KAAK,CAAC,GAAG,CAAC;AAAA,MACjC;AAAA,MAEA,OAAO,oBAAoB,eAA2C;AACpE,cAAM,MAAM,+FAA+F,KAAK,KAAK;AACrH,eAAO,aAAa,UAAkB,KAAK,CAAC,aAAa,CAAC;AAAA,MAC5D;AAAA,MAEA,OAAO,aAAa,QAAoC;AACtD,cAAM,MAAM,+FAA+F,KAAK,KAAK;AACrH,eAAO,aAAa,UAAkB,KAAK,CAAC,MAAM,CAAC;AAAA,MACrD;AAAA,IACF;AAAA;AAAA;;;AChEA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCA,SAAS,UAAAC,eAAc;AADvB;AAAA;AAAA;AAAA;AAAA;AAEA;AAAA;AAAA;;;ACFA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AAAA;AAAA;;;ACDO,SAAS,WAAW,OAAgC;AACzD,MAAI,CAAC,MAAM,iBAAiB,CAAC,MAAM,cAAc,CAAC,MAAM,cAAc;AACpE,UAAM,IAAI,MAAM,0DAA0D;AAAA,EAC5E;AAEA,SAAO,aAAa,OAAO,KAAK;AAClC;AATA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAuFO,SAAS,gBAAsB;AACpC,QAAM,KAAK,iBAAM;AACjB,KAAG,KAAK,aAAa;AACrB,SAAO,MAAM,uBAAuB;AACtC;AA3FA,IAGM;AAHN;AAAA;AAAA;AAAA;AAAA;AACA;AAEA,IAAM,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACHtB;AACA,SAAS,eAAe;;;ACDxB;AAGA;AAHA,OAAO,WAAW;AAClB,SAAS,oBAAoB;AAC7B,SAAS,cAAc;AAIhB,IAAM,kBAAkB,OAAO,WAA4C;AAChF,MAAI;AACF,UAAM,UAAU,OAAO,aAAa,MAAM,CAAC;AAC3C,UAAM,UAAU,YAAY,UAAU,cAAmB,OAAO;AAChE,UAAM,WAAW,MAAM,MAAM,IAAoB,OAAO;AAExD,UAAM,EAAE,sBAAsB,sBAAsB,GAAG,IAAI,SAAS;AAEpE,WAAO,EAAE,sBAAsB,sBAAsB,GAAG;AAAA,EAC1D,SAAS,OAAO;AACd,QAAI,MAAM,aAAa,KAAK,GAAG;AAC7B,UAAI,MAAM,UAAU,WAAW,KAAK;AAClC,cAAM,IAAI,MAAM,iBAAiB;AAAA,MACnC;AACA,UAAI,MAAM,UAAU,WAAW,KAAK;AAClC,cAAM,IAAI,MAAM,mBAAmB;AAAA,MACrC;AACA,UAAI,MAAM,SAAS,gBAAgB;AACjC,cAAM,IAAI,MAAM,+BAA+B,YAAY,OAAO,EAAE;AAAA,MACtE;AACA,YAAM,IAAI,MAAM,iBAAiB,MAAM,UAAU,MAAM,WAAW,MAAM,OAAO,EAAE;AAAA,IACnF;AACA,UAAM;AAAA,EACR;AACF;;;AC9BA;AAGA;AAHA,OAAO,QAAQ;AACf,OAAOC,WAAU;AACjB,OAAO,QAAQ;AAIR,SAAS,kBAA0B;AACxC,SAAOA,MAAK,KAAK,GAAG,QAAQ,GAAG,YAAY;AAC7C;AAEA,SAAS,mBAA2B;AAClC,SAAOA,MAAK,KAAK,gBAAgB,GAAG,kBAAkB;AACxD;AAEO,SAAS,aAAqB;AACnC,SAAOA,MAAK,KAAK,gBAAgB,GAAG,MAAM;AAC5C;AAEO,SAAS,eAAe,UAAyB,CAAC,GAAW;AAClE,QAAM,eAAe,gBAAgB;AACrC,QAAM,UAAU,WAAW;AAE3B,MAAI,CAAC,GAAG,WAAW,YAAY,GAAG;AAChC,OAAG,UAAU,cAAc,EAAE,WAAW,KAAK,CAAC;AAAA,EAChD;AAEA,QAAM,SAAS,QAAQ,UAAU,iBAAiB;AAClD,QAAM,QAAQA,MAAK,QAAQ,MAAM;AACjC,MAAI,CAAC,GAAG,WAAW,KAAK,GAAG;AACzB,OAAG,UAAU,OAAO,EAAE,WAAW,KAAK,CAAC;AAAA,EACzC;AAEA,QAAM,aAAa,WAAW,QAAQ,MAAM;AAAA,UACpC,QAAQ,UAAU,cAAc,eAAe;AAAA,UAC/C,MAAM;AAAA,OACT,QAAQ,QAAQ,cAAc,YAAY;AAAA;AAG/C,KAAG,cAAc,SAAS,YAAY,OAAO;AAE7C,SAAO;AACT;;;ACzCA;AAAA,SAAS,aAA2B;AACpC,OAAOC,WAAU;AACjB,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,kBAAkB;AAO3B,IAAM,mBAAqC,CAAC;AAE5C,SAAS,aAAqB;AAC5B,QAAMC,aAAYF,MAAK,QAAQC,eAAc,YAAY,GAAG,CAAC;AAC7D,SAAOD,MAAK,QAAQE,YAAW,IAAI;AACrC;AAEA,SAAS,YAAqB;AAC5B,QAAM,UAAU,WAAW;AAC3B,SAAO,WAAWF,MAAK,KAAK,SAAS,UAAU,CAAC;AAClD;AAEA,SAAS,aAAa,MAAc,MAAoB;AACtD,QAAM,QAAQ,KAAK,SAAS,EAAE,MAAM,IAAI,EAAE,OAAO,OAAO;AACxD,aAAW,QAAQ,OAAO;AACxB,YAAQ,IAAI,IAAI,IAAI,KAAK,IAAI,EAAE;AAAA,EACjC;AACF;AAEA,SAAS,aAAa,MAAc,YAAoB,YAAoB,UAAiD;AAC3H,QAAM,QAAQ,MAAM,YAAY,CAAC,UAAU,GAAG;AAAA,IAC5C,OAAO,CAAC,UAAU,QAAQ,MAAM;AAAA,IAChC,KAAK,EAAE,GAAG,QAAQ,KAAK,UAAU,eAAe,QAAQ,gBAAgB,cAAc,GAAG,SAAS;AAAA,IAClG,UAAU;AAAA,EACZ,CAAC;AAED,QAAM,QAAQ,GAAG,QAAQ,CAAC,SAAiB,aAAa,MAAM,IAAI,CAAC;AACnE,QAAM,QAAQ,GAAG,QAAQ,CAAC,SAAiB,aAAa,MAAM,IAAI,CAAC;AAEnE,QAAM,GAAG,SAAS,CAAC,UAAU;AAC3B,YAAQ,MAAM,IAAI,IAAI,oBAAoB,MAAM,OAAO;AAAA,EACzD,CAAC;AAED,QAAM,GAAG,QAAQ,CAAC,MAAM,WAAW;AACjC,QAAI,QAAQ;AACV,cAAQ,IAAI,IAAI,IAAI,kCAAkC,MAAM,EAAE;AAAA,IAChE,WAAW,SAAS,GAAG;AACrB,cAAQ,MAAM,IAAI,IAAI,8BAA8B,IAAI,EAAE;AAAA,IAC5D;AAAA,EACF,CAAC;AAED,mBAAiB,KAAK,EAAE,MAAM,SAAS,MAAM,CAAC;AAC9C,SAAO;AACT;AAEO,SAAS,iBAA+B;AAC7C,QAAM,UAAU,WAAW;AAC3B,QAAM,MAAM,UAAU;AACtB,QAAM,aAAa,MAAM,QAAQ;AACjC,QAAM,UAAUA,MAAK,KAAK,SAAS,MAAM,aAAa,UAAU;AAChE,SAAO,aAAa,OAAO,YAAY,SAAS,EAAE,QAAQ,IAAI,CAAC;AACjE;AAEO,SAAS,cAA4B;AAC1C,QAAM,UAAU,WAAW;AAC3B,QAAM,MAAM,UAAU;AACtB,QAAM,aAAa,MAAM,QAAQ;AACjC,QAAM,aAAaA,MAAK,KAAK,SAAS,MAAM,cAAc,WAAW;AACrE,SAAO,aAAa,UAAU,YAAY,UAAU;AACtD;AAEO,SAAS,WAAwD;AACtE,QAAM,MAAM,eAAe;AAC3B,QAAM,SAAS,YAAY;AAC3B,SAAO,EAAE,KAAK,OAAO;AACvB;AAEO,SAAS,wBAA8B;AAC5C,QAAM,WAAW,CAAC,WAAmB;AACnC,YAAQ,IAAI;AAAA,WAAc,MAAM,oBAAoB;AAEpD,eAAW,EAAE,MAAM,SAAS,MAAM,KAAK,kBAAkB;AACvD,UAAI,MAAM,OAAO,CAAC,MAAM,QAAQ;AAC9B,gBAAQ,IAAI,IAAI,IAAI,eAAe;AACnC,cAAM,KAAK,SAAS;AAAA,MACtB;AAAA,IACF;AAEA,eAAW,MAAM;AACf,iBAAW,EAAE,MAAM,SAAS,MAAM,KAAK,kBAAkB;AACvD,YAAI,MAAM,OAAO,CAAC,MAAM,QAAQ;AAC9B,kBAAQ,IAAI,IAAI,IAAI,oBAAoB;AACxC,gBAAM,KAAK,SAAS;AAAA,QACtB;AAAA,MACF;AACA,cAAQ,KAAK,CAAC;AAAA,IAChB,GAAG,GAAI;AAAA,EACT;AAEA,UAAQ,GAAG,WAAW,MAAM,SAAS,SAAS,CAAC;AAC/C,UAAQ,GAAG,UAAU,MAAM,SAAS,QAAQ,CAAC;AAC/C;AAEO,SAAS,mBAAkC;AAChD,SAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,UAAM,gBAAgB,YAAY,MAAM;AACtC,YAAM,YAAY,iBAAiB,MAAM,CAAC,EAAE,SAAS,MAAM,MAAM,MAAM,aAAa,QAAQ,MAAM,MAAM;AACxG,UAAI,WAAW;AACb,sBAAc,aAAa;AAC3B,gBAAQ;AAAA,MACV;AAAA,IACF,GAAG,GAAI;AAAA,EACT,CAAC;AACH;;;ACjHA;AACA;AACA;AAFA,OAAO,aAAa;AAKb,IAAM,kBAAkB,OAC7B,kBAGI,CAAC,MACuB;AAC5B,QAAM,cAAc,iBAAiB;AACrC,QAAM,YAAoC,CAAC;AAE3C,MAAI,CAAC,gBAAgB,QAAQ;AAC3B,cAAU,KAAK;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,UAAU,CAAC,UAAkB,MAAM,SAAS,KAAK;AAAA,MACjD,SAAS,YAAY,WAAW;AAAA,IAClC,CAAC;AAAA,EACH;AAEA,MAAI,CAAC,gBAAgB,QAAQ;AAC3B,cAAU,KAAK;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,SAAS,YAAY,WAAW,cAAc;AAAA,IAChD,CAAC;AAAA,EACH;AAEA,MAAI,UAAU,WAAW,GAAG;AAC1B,WAAO;AAAA,MACL,QAAQ,gBAAgB;AAAA,MACxB,QAAQ,gBAAgB,UAAU,cAAc;AAAA,IAClD;AAAA,EACF;AAEA,QAAM,WAAW,MAAM,QAAQ,WAAW;AAAA,IACxC,UAAU,MAAM;AACd,cAAQ,IAAI,oBAAoB;AAChC,cAAQ,KAAK,CAAC;AAAA,IAChB;AAAA,EACF,CAAC;AAED,SAAO;AAAA,IACL,QAAQ,gBAAgB,UAAU,SAAS;AAAA,IAC3C,QAAQ,gBAAgB,UAAU,SAAS,UAAU,cAAc;AAAA,EACrE;AACF;AAEO,SAAS,eAAe,SAAuC;AACpE,SAAO,CAAC,QAAQ;AAClB;;;AJhDA;;;AKPA;AACA;AACA;AACA;AACA;AAEA,SAAS,iBAAAG,sBAAqB;AAC9B,SAAS,gBAAAC,qBAAoB;AAC7B,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,YAAAC,WAAU,kBAAkB;AAIrC,IAAM,iCAAiC;AAEhC,SAAS,mBAAmB,MAA0C;AAC3E,QAAM,EAAE,aAAa,YAAY,IAAI;AAErC,aAAW;AAAA,IACT,eAAe,YAAY;AAAA,IAC3B,YAAY,YAAY;AAAA,IACxB,cAAc,YAAY;AAAA,EAC5B,CAAC;AAED,QAAM,iBAAiB,aAAa,aAAa,YAAY,UAAU;AAEvE,MAAI,CAAC,gBAAgB;AACnB,cAAU;AAAA,MACR,YAAY,YAAY;AAAA,MACxB,MAAM,YAAY;AAAA,MAClB,qBAAqB,YAAY;AAAA,MACjC,eAAe,YAAY;AAAA,IAC7B,CAAC;AACD,WAAO,EAAE,aAAa,MAAM,aAAa,KAAK;AAAA,EAChD;AAEA,SAAO,EAAE,aAAa,MAAM,aAAa,MAAM;AACjD;AAEO,IAAM,sBAAsB,OAAO,WAAmB;AAC3D,QAAM,YAAYH;AAAA,IAChBC,cAAa,MAAM;AAAA,IACnB,IAAI,WAAW,CAAC,CAAC,CAAC;AAAA,IAClBC,eAAc,8BAA8B;AAAA,EAC9C;AACA,SAAO,MAAMC,UAAS,SAAS;AACjC;AAEA,IAAM,eAAe,CAAC,UAAsB;AAC1C,SAAO,KAAK,MAAM,IAAI,YAAY,EAAE,OAAO,KAAK,CAAC;AACnD;AAEO,IAAM,mBAAmB,OAAU,QAAgB,kBAAsC;AAC9F,QAAM,SAAS,MAAM,oBAAoB,MAAM;AAC/C,QAAM,iBAAiB,MAAM,WAAW,QAAQF,cAAa,aAAa,CAAC;AAE3E,QAAM,OAAO,aAAa,cAAc;AACxC,SAAO;AACT;;;ALhDA,IAAM,UAAU,IAAI,QAAQ,EACzB,KAAK,eAAe,EACpB,YAAY,8BAA8B,EAC1C,QAAQ,OAAO,EACf,OAAO,kBAAkB,4BAA4B,EACrD,OAAO,kBAAkB,mCAAmC,EAC5D,OAAO,iBAAiB,6BAA6B,MAAM,EAC3D,OAAO,eAAe,eAAe,EACrC,OAAO,OAAO,YAAY;AACzB,MAAI;AACF,YAAQ,IAAI,8CAA8C;AAE1D,QAAI,eAAe,OAAO,GAAG;AAC3B,YAAM,WAAW,MAAM,gBAAgB;AAAA,QACrC,QAAQ,QAAQ;AAAA,QAChB,QAAQ,QAAQ;AAAA,MAClB,CAAC;AACD,cAAQ,SAAS,SAAS;AAC1B,cAAQ,SAAS,SAAS;AAAA,IAC5B;AAEA,UAAM,OAAO,MAAM,gBAAgB,QAAQ,MAAM;AACjD,YAAQ,IAAI,iCAA4B;AAExC,UAAM,cAAc,MAAM,iBAA8B,QAAQ,QAAQ,KAAK,oBAAoB;AACjG,UAAM,cAAc,MAAM,iBAAiC,QAAQ,QAAQ,KAAK,oBAAoB;AACpG,YAAQ,IAAI,6BAA6B;AACzC,UAAM,UAAU,eAAe;AAAA,MAC7B,QAAQ,QAAQ;AAAA,MAChB,MAAM,QAAQ;AAAA,MACd,QAAQ,QAAQ;AAAA,MAChB,QAAQ,QAAQ;AAAA,IAClB,CAAC;AACD,eAAW;AACX,YAAQ,IAAI,iCAA4B,OAAO;AAAA,CAAI;AAEnD,UAAM,EAAE,eAAAG,eAAc,IAAI,MAAM;AAChC,IAAAA,eAAc;AACd,YAAQ,IAAI,qCAAgC;AAE5C,UAAM,SAAS,mBAAmB;AAAA,MAChC;AAAA,MACA;AAAA,MACA,IAAI,KAAK;AAAA,IACX,CAAC;AACD,YAAQ,IAAI,qBAAgB;AAC5B,QAAI,OAAO,aAAa;AACtB,cAAQ,IAAI,sBAAiB;AAAA,IAC/B,OAAO;AACL,cAAQ,IAAI,+BAA0B;AAAA,IACxC;AAEA,YAAQ,IAAI,wBAAwB;AACpC,0BAAsB;AACtB,aAAS;AAET,YAAQ,IAAI;AAAA;AAAA;AAAA,iCAGe,QAAQ,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,CAgB5C;AAEK,UAAM,iBAAiB;AAAA,EACzB,SAAS,OAAO;AACd,YAAQ,MAAM,mBAAc,iBAAiB,QAAQ,MAAM,UAAU,KAAK;AAC1E,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF,CAAC;AAEH,QAAQ,MAAM;","names":["path","fs","os","getEnvPath","init_constants","init_constants","toHex","generateRandomBytes","toUint8Array","axios","init_constants","fromUint8Array","toUint8Array","init_constants","fromUint8Array","toUint8Array","ucans","init_constants","init_worker","init_worker","init_constants","init_constants","getDb","uuidv7","uuidv7","uuidv7","path","path","fileURLToPath","__dirname","deriveHKDFKey","toUint8Array","stringToBytes","toAESKey","runMigrations"]}
|
|
1
|
+
{"version":3,"sources":["../../node_modules/tsup/assets/esm_shims.js","../../src/cli/constants.generated.ts","../../src/cli/constants.ts","../../src/config/index.ts","../../src/infra/logger.ts","../../src/infra/asyncHandler.ts","../../src/sdk/key-store.ts","../../src/sdk/auth-token-provider.ts","../../src/constants/chains.ts","../../src/constants/events.ts","../../src/constants/methods.ts","../../src/constants/index.ts","../../src/sdk/pimlico-utils.ts","../../src/sdk/smart-agent.ts","../../src/sdk/file-encryption.ts","../../src/sdk/file-utils.ts","../../src/sdk/file-manager.ts","../../src/domain/portal/publish.ts","../../src/domain/portal/saveApiKey.ts","../../src/domain/portal/removeApiKey.ts","../../src/domain/portal/index.ts","../../src/errors/rate-limit.ts","../../src/infra/worker/eventProcessor.ts","../../src/infra/worker/workerSignal.ts","../../src/infra/worker/worker.ts","../../src/infra/worker/index.ts","../../src/appWorker.ts","../../src/infra/reporter.ts","../../src/infra/index.ts","../../src/infra/database/connection.ts","../../src/domain/file/constants.ts","../../src/infra/database/query-builder.ts","../../src/infra/database/index.ts","../../src/infra/database/models/files.model.ts","../../src/infra/database/models/portals.model.ts","../../src/infra/database/models/apikeys.model.ts","../../src/infra/database/models/folders.model.ts","../../src/infra/database/models/events.model.ts","../../src/infra/database/models/index.ts","../../src/domain/portal/savePortal.ts","../../src/infra/database/migrations/index.ts","../../src/cli/index.ts","../../src/cli/fetch-api-key.ts","../../src/cli/scaffold-config.ts","../../src/cli/process-manager.ts","../../src/cli/prompts.ts","../../src/init/index.ts"],"sourcesContent":["// Shim globals in esm bundle\nimport path from 'node:path'\nimport { fileURLToPath } from 'node:url'\n\nconst getFilename = () => fileURLToPath(import.meta.url)\nconst getDirname = () => path.dirname(getFilename())\n\nexport const __dirname = /* @__PURE__ */ getDirname()\nexport const __filename = /* @__PURE__ */ getFilename()\n","export const STATIC_CONFIG = {\n API_URL: 'https://prod-apps-storage-5cdacc06ff79.herokuapp.com/',\n SERVER_DID: 'did:key:z6Mkroj9bxTin6Z5S9qwx2G2b87NPrCX7S85FhCpmBGPcDCz',\n PROXY_SERVER_DID: 'did:key:z6MkrZSmq8D6vQG87YbjUQatXeptaCCXWdTx8fYaWxWbRUHB',\n NETWORK_NAME: 'gnosis',\n DEFAULT_PORT: '8001',\n DEFAULT_RPC_URL: 'https://rpc.gnosischain.com',\n PIMLICO_PROXY_URL: 'https://pimlico-proxy-0a326da116f8.herokuapp.com/',\n SERVICE_NAME: 'fileverse-api',\n LOG_LEVEL: 'info',\n FRONTEND_URL: 'https://docs.fileverse.io'\n} as const;\n\nexport const BASE_CONFIG = STATIC_CONFIG;\n","export { STATIC_CONFIG, BASE_CONFIG } from \"./constants.generated.js\";\n","import dotenv from \"dotenv\";\nimport path from \"path\";\nimport fs from \"fs\";\nimport os from \"os\";\nimport { STATIC_CONFIG } from \"../cli/constants.js\";\n\nconst projectEnvPath = path.join(process.cwd(), \"config\", \".env\");\nconst userEnvPath = path.join(os.homedir(), \".fileverse\", \".env\");\n\nfunction getEnvPath(): string {\n if (fs.existsSync(projectEnvPath)) {\n return projectEnvPath;\n }\n return userEnvPath;\n}\n\nexport function loadConfig(override = true): void {\n const envPath = getEnvPath();\n dotenv.config({ path: envPath, override });\n}\n\nloadConfig(false);\n\nexport function getRuntimeConfig() {\n return {\n get API_KEY() {\n return process.env.API_KEY;\n },\n get RPC_URL() {\n return process.env.RPC_URL || STATIC_CONFIG.DEFAULT_RPC_URL;\n },\n get DB_PATH() {\n return process.env.DB_PATH;\n },\n get PORT() {\n return process.env.PORT || STATIC_CONFIG.DEFAULT_PORT;\n },\n get NODE_ENV() {\n return process.env.NODE_ENV || \"production\";\n },\n get FRONTEND_URL() {\n return process.env.FRONTEND_URL || STATIC_CONFIG.FRONTEND_URL;\n },\n };\n}\n\nexport function validateDbPath(): void {\n const dbPath = process.env.DB_PATH;\n if (!dbPath) {\n console.error(\"Error: DB_PATH environment variable is required\");\n console.error(\"Please set DB_PATH in your .env file (config/.env or ~/.fileverse/.env) or run the CLI first\");\n process.exit(1);\n }\n\n const dbDir = path.dirname(dbPath.trim());\n if (!fs.existsSync(dbDir)) {\n fs.mkdirSync(dbDir, { recursive: true });\n }\n}\n\nconst config: Record<string, string | undefined> = {\n ...STATIC_CONFIG,\n get SERVICE_NAME() {\n return STATIC_CONFIG.SERVICE_NAME;\n },\n get LOG_LEVEL() {\n return STATIC_CONFIG.LOG_LEVEL;\n },\n get NETWORK_NAME() {\n return STATIC_CONFIG.NETWORK_NAME;\n },\n get UPLOAD_SERVER_URL() {\n return STATIC_CONFIG.API_URL;\n },\n get UPLOAD_SERVER_DID() {\n return STATIC_CONFIG.SERVER_DID;\n },\n get API_KEY() {\n return process.env.API_KEY;\n },\n get RPC_URL() {\n return process.env.RPC_URL || STATIC_CONFIG.DEFAULT_RPC_URL;\n },\n get DB_PATH() {\n return process.env.DB_PATH;\n },\n get PORT() {\n return process.env.PORT || STATIC_CONFIG.DEFAULT_PORT;\n },\n get NODE_ENV() {\n return process.env.NODE_ENV || \"production\";\n },\n get IP() {\n return process.env.IP || \"0.0.0.0\";\n },\n get FRONTEND_URL() {\n return process.env.FRONTEND_URL || STATIC_CONFIG.FRONTEND_URL;\n },\n};\n\nexport { config };\n","import pino, { Logger as PinoLogger, Level } from \"pino\";\nimport { STATIC_CONFIG } from \"../cli/constants\";\nimport { config } from \"../config\";\n\nconst isProduction = config.NODE_ENV === \"production\";\n\nconst pinoInstance = pino({\n name: STATIC_CONFIG.SERVICE_NAME,\n level: STATIC_CONFIG.LOG_LEVEL,\n formatters: {\n bindings: (bindings) => ({ name: bindings.name }),\n level: (label) => ({ level: label }),\n },\n serializers: {\n err(err: Error | undefined) {\n if (!err) return err;\n if (isProduction) {\n return { type: err.name, message: err.message };\n }\n return {\n type: err.name,\n message: err.message,\n stack: err.stack,\n };\n },\n },\n transport:\n config.NODE_ENV !== \"production\"\n ? {\n target: \"pino-pretty\",\n options: {\n colorize: true,\n translateTime: \"SYS:standard\",\n ignore: \"pid,hostname\",\n errorProps: \"*\",\n errorLikeObjectKeys: [\"err\", \"error\"],\n },\n }\n : undefined,\n});\n\ntype LogFn = {\n (msg: string, ...args: unknown[]): void;\n (obj: object, msg?: string, ...args: unknown[]): void;\n};\n\nconst createLogMethod = (level: Level): LogFn => {\n return (...args: unknown[]) => {\n const [first, ...rest] = args;\n const log = pinoInstance[level].bind(pinoInstance) as (...a: unknown[]) => void;\n\n if (typeof first === \"object\" && first !== null && !(first instanceof Error)) {\n log(first, ...rest);\n return;\n }\n\n if (rest.length > 0) {\n const last = rest[rest.length - 1];\n if (last instanceof Error) {\n log({ err: last }, first, ...rest.slice(0, -1));\n return;\n }\n }\n\n if (first instanceof Error) {\n log({ err: first }, first.message);\n return;\n }\n\n log(first, ...rest);\n };\n};\n\ninterface Logger {\n trace: LogFn;\n debug: LogFn;\n info: LogFn;\n warn: LogFn;\n error: LogFn;\n fatal: LogFn;\n level: Level;\n child: PinoLogger[\"child\"];\n}\n\nexport const logger: Logger = {\n trace: createLogMethod(\"trace\"),\n debug: createLogMethod(\"debug\"),\n info: createLogMethod(\"info\"),\n warn: createLogMethod(\"warn\"),\n error: createLogMethod(\"error\"),\n fatal: createLogMethod(\"fatal\"),\n get level() {\n return pinoInstance.level as Level;\n },\n set level(lvl: Level) {\n pinoInstance.level = lvl;\n },\n child: pinoInstance.child.bind(pinoInstance),\n};\n","import { Request, Response, NextFunction } from \"express\";\n\nconst asyncHandler =\n (fn: (req: Request, res: Response, next: NextFunction) => Promise<void>) =>\n (req: Request, res: Response, next: NextFunction) =>\n Promise.resolve(fn(req, res, next)).catch(next);\n\nconst asyncHandlerArray = (resolvers: any) => {\n return resolvers.map(asyncHandler);\n};\n\nexport { asyncHandler, asyncHandlerArray };\n","import { Hex } from \"viem\";\nimport { eciesDecrypt, eciesEncrypt, generateECKeyPair } from \"@fileverse/crypto/ecies\";\nimport { AuthTokenProvider } from \"./auth-token-provider\";\n\nexport class KeyStore {\n private portalKeySeed: Uint8Array | undefined;\n private portalAddress: Hex | undefined;\n\n constructor(\n seed: Uint8Array,\n address: Hex,\n private readonly authTokenProvider: AuthTokenProvider,\n ) {\n this.portalKeySeed = seed;\n this.portalAddress = address;\n this.authTokenProvider = authTokenProvider;\n }\n\n getPortalAddress() {\n if (!this.portalAddress) {\n throw new Error(\"Portal address is not set\");\n }\n return this.portalAddress;\n }\n\n private getAppEncryptionKey() {\n if (!this.portalKeySeed) {\n throw new Error(\"Portal key seed is not set\");\n }\n\n const keyPair = generateECKeyPair(this.portalKeySeed);\n return keyPair.publicKey;\n }\n\n private getAppDecryptionKey() {\n if (!this.portalKeySeed) {\n throw new Error(\"Portal key seed is not set\");\n }\n\n const keyPair = generateECKeyPair(this.portalKeySeed);\n return keyPair.privateKey;\n }\n\n encryptData(data: Uint8Array) {\n return eciesEncrypt(this.getAppEncryptionKey(), data);\n }\n\n decryptData(data: string) {\n return eciesDecrypt(this.getAppDecryptionKey(), data);\n }\n\n getAuthToken(audienceDid: string) {\n return this.authTokenProvider.getAuthToken(audienceDid);\n }\n}\n","import * as ucans from \"@ucans/ucans\";\nimport type { Hex } from \"viem\";\n\nexport class AuthTokenProvider {\n private readonly DEFAULT_OPTIONS = {\n namespace: \"file\",\n segment: \"CREATE\",\n scheme: \"storage\",\n };\n private keyPair: ucans.EdKeypair;\n portalAddress: Hex;\n constructor(keyPair: ucans.EdKeypair, portalAddress: Hex) {\n this.keyPair = keyPair;\n this.portalAddress = portalAddress;\n }\n\n async getAuthToken(\n audienceDid: string,\n options: { namespace: string; segment: string; scheme: string } = this.DEFAULT_OPTIONS,\n ): Promise<string> {\n const ucan = await ucans.build({\n audience: audienceDid,\n issuer: this.keyPair,\n lifetimeInSeconds: 7 * 86400,\n capabilities: [\n {\n with: {\n scheme: options.scheme,\n hierPart: this.portalAddress.toLocaleLowerCase(),\n },\n can: { namespace: options.namespace, segments: [options.segment] },\n },\n ],\n });\n\n return ucans.encode(ucan);\n }\n}\n","export { sepolia, gnosis } from \"viem/chains\";\n","export const ADDED_FILE_EVENT = [\n {\n anonymous: false,\n inputs: [\n {\n indexed: true,\n internalType: \"uint256\",\n name: \"fileId\",\n type: \"uint256\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"appFileId\",\n type: \"string\",\n },\n {\n indexed: false,\n internalType: \"enum FileverseApp.FileType\",\n name: \"fileType\",\n type: \"uint8\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"metadataIPFSHash\",\n type: \"string\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"contentIPFSHash\",\n type: \"string\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"gateIPFSHash\",\n type: \"string\",\n },\n {\n indexed: false,\n internalType: \"uint256\",\n name: \"version\",\n type: \"uint256\",\n },\n {\n indexed: true,\n internalType: \"address\",\n name: \"by\",\n type: \"address\",\n },\n ],\n name: \"AddedFile\",\n type: \"event\",\n },\n] as const;\n\nexport const EDITED_FILE_EVENT = [\n {\n anonymous: false,\n inputs: [\n {\n indexed: true,\n internalType: \"uint256\",\n name: \"fileId\",\n type: \"uint256\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"appFileId\",\n type: \"string\",\n },\n {\n indexed: false,\n internalType: \"enum FileverseApp.FileType\",\n name: \"fileType\",\n type: \"uint8\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"metadataIPFSHash\",\n type: \"string\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"contentIPFSHash\",\n type: \"string\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"gateIPFSHash\",\n type: \"string\",\n },\n {\n indexed: false,\n internalType: \"uint256\",\n name: \"version\",\n type: \"uint256\",\n },\n {\n indexed: true,\n internalType: \"address\",\n name: \"by\",\n type: \"address\",\n },\n ],\n name: \"EditedFile\",\n type: \"event\",\n },\n] as const;\n\nexport const DELETED_FILE_EVENT = [\n {\n anonymous: false,\n inputs: [\n {\n indexed: true,\n internalType: \"uint256\",\n name: \"fileId\",\n type: \"uint256\",\n },\n {\n indexed: false,\n internalType: \"string\",\n name: \"appFileId\",\n type: \"string\",\n },\n {\n indexed: true,\n internalType: \"address\",\n name: \"by\",\n type: \"address\",\n },\n ],\n name: \"DeletedFile\",\n type: \"event\",\n },\n] as const;\n","export const ADD_FILE_METHOD = [\n {\n inputs: [\n {\n internalType: \"string\",\n name: \"_appFileId\",\n type: \"string\",\n },\n {\n internalType: \"enum FileverseApp.FileType\",\n name: \"fileType\",\n type: \"uint8\",\n },\n {\n internalType: \"string\",\n name: \"_metadataIPFSHash\",\n type: \"string\",\n },\n {\n internalType: \"string\",\n name: \"_contentIPFSHash\",\n type: \"string\",\n },\n {\n internalType: \"string\",\n name: \"_gateIPFSHash\",\n type: \"string\",\n },\n {\n internalType: \"uint256\",\n name: \"version\",\n type: \"uint256\",\n },\n ],\n name: \"addFile\",\n outputs: [],\n stateMutability: \"nonpayable\",\n type: \"function\",\n },\n] as const;\n\nexport const EDIT_FILE_METHOD = [\n {\n inputs: [\n {\n internalType: \"uint256\",\n name: \"fileId\",\n type: \"uint256\",\n },\n {\n internalType: \"string\",\n name: \"_appFileId\",\n type: \"string\",\n },\n {\n internalType: \"string\",\n name: \"_metadataIPFSHash\",\n type: \"string\",\n },\n {\n internalType: \"string\",\n name: \"_contentIPFSHash\",\n type: \"string\",\n },\n {\n internalType: \"string\",\n name: \"_gateIPFSHash\",\n type: \"string\",\n },\n {\n internalType: \"enum FileverseApp.FileType\",\n name: \"fileType\",\n type: \"uint8\",\n },\n {\n internalType: \"uint256\",\n name: \"version\",\n type: \"uint256\",\n },\n ],\n name: \"editFile\",\n outputs: [],\n stateMutability: \"nonpayable\",\n type: \"function\",\n },\n] as const;\n\nexport const DELETED_FILE_ABI = [\n {\n inputs: [\n {\n internalType: \"uint256\",\n name: \"fileId\",\n type: \"uint256\",\n },\n ],\n name: \"deleteFile\",\n outputs: [],\n stateMutability: \"nonpayable\",\n type: \"function\",\n },\n] as const;\n","import { STATIC_CONFIG } from \"../cli/constants\";\nimport { getRuntimeConfig } from \"../config\";\nimport { gnosis, sepolia } from \"./chains\";\n\nexport const NETWORK_NAME = STATIC_CONFIG.NETWORK_NAME;\nexport const UPLOAD_SERVER_URL = STATIC_CONFIG.API_URL;\n\nexport const getRpcUrl = () => getRuntimeConfig().RPC_URL;\nexport const getPimlicoUrl = () => `${STATIC_CONFIG.PIMLICO_PROXY_URL}api/${NETWORK_NAME}/rpc`;\n\nconst CHAIN_MAP = {\n gnosis: gnosis,\n sepolia: sepolia,\n} as const;\n\nexport const CHAIN = CHAIN_MAP[NETWORK_NAME as keyof typeof CHAIN_MAP];\nexport { DELETED_FILE_EVENT, EDITED_FILE_EVENT, ADDED_FILE_EVENT } from \"./events\";\nexport { DELETED_FILE_ABI, EDIT_FILE_METHOD, ADD_FILE_METHOD } from \"./methods\";\n","import { createPublicClient, http, hexToBigInt, toHex, toBytes, type PrivateKeyAccount, type Hex } from \"viem\";\n\nimport { createPimlicoClient } from \"permissionless/clients/pimlico\";\nimport { createSmartAccountClient } from \"permissionless\";\nimport { toSafeSmartAccount } from \"permissionless/accounts\";\nimport { entryPoint07Address } from \"viem/account-abstraction\";\nimport { CHAIN, getRpcUrl, getPimlicoUrl } from \"../constants\";\nimport { generatePrivateKey } from \"viem/accounts\";\n\nexport const getPublicClient = () =>\n createPublicClient({\n transport: http(getRpcUrl(), {\n retryCount: 0,\n }),\n chain: CHAIN,\n });\n\nexport const getPimlicoClient = (authToken: string, portalAddress: Hex, invokerAddress: Hex) =>\n createPimlicoClient({\n transport: http(getPimlicoUrl(), {\n retryCount: 0,\n fetchOptions: {\n headers: {\n Authorization: `Bearer ${authToken}`,\n contract: portalAddress,\n invoker: invokerAddress,\n },\n },\n }),\n entryPoint: {\n address: entryPoint07Address,\n version: \"0.7\",\n },\n });\n\nexport const signerToSmartAccount = async (signer: PrivateKeyAccount) =>\n await toSafeSmartAccount({\n client: getPublicClient(),\n owners: [signer],\n entryPoint: {\n address: entryPoint07Address,\n version: \"0.7\",\n },\n version: \"1.4.1\",\n });\n\nexport const getSmartAccountClient = async (signer: PrivateKeyAccount, authToken: string, portalAddress: Hex) => {\n const smartAccount = await signerToSmartAccount(signer);\n const pimlicoClient = getPimlicoClient(authToken, portalAddress, smartAccount.address);\n\n return createSmartAccountClient({\n account: smartAccount,\n chain: CHAIN,\n paymaster: pimlicoClient,\n bundlerTransport: http(getPimlicoUrl(), {\n fetchOptions: {\n headers: {\n Authorization: `Bearer ${authToken}`,\n contract: portalAddress,\n invoker: smartAccount.address,\n },\n },\n retryCount: 0,\n }),\n userOperation: {\n estimateFeesPerGas: async () => (await pimlicoClient.getUserOperationGasPrice()).fast,\n },\n });\n};\n\nexport const getNonce = () =>\n hexToBigInt(\n toHex(toBytes(generatePrivateKey()).slice(0, 24), {\n size: 32,\n }),\n );\n\nexport const waitForUserOpReceipt = async (\n hash: Hex,\n authToken: string,\n portalAddress: Hex,\n invokerAddress: Hex,\n timeout = 120000,\n) => {\n const pimlicoClient = getPimlicoClient(authToken, portalAddress, invokerAddress);\n return pimlicoClient.waitForUserOperationReceipt({\n hash,\n timeout,\n });\n};\n","import { Hex, toHex } from \"viem\";\nimport { privateKeyToAccount } from \"viem/accounts\";\nimport { getSmartAccountClient, getNonce, waitForUserOpReceipt } from \"./pimlico-utils\";\nimport { AuthTokenProvider } from \"./auth-token-provider\";\nimport { STATIC_CONFIG } from \"../cli/constants\";\nimport { createSmartAccountClient } from \"permissionless\";\nimport type { IExecuteUserOperationRequest } from \"../types\";\n\nexport type { IExecuteUserOperationRequest };\n\nexport class AgentClient {\n private smartAccountAgent: ReturnType<typeof createSmartAccountClient> | null = null;\n private readonly MAX_CALL_GAS_LIMIT = 500000;\n private readonly authOptions: {\n namespace: string;\n segment: string;\n scheme: string;\n } = { namespace: \"proxy\", segment: \"ACCESS\", scheme: \"pimlico\" };\n\n constructor(private readonly authTokenProvider: AuthTokenProvider) {\n this.authTokenProvider = authTokenProvider;\n }\n\n async initializeAgentClient(keyMaterial: Uint8Array) {\n const agentAccount = privateKeyToAccount(toHex(keyMaterial));\n const authToken = await this.authTokenProvider.getAuthToken(STATIC_CONFIG.PROXY_SERVER_DID, this.authOptions);\n const smartAccountClient = await getSmartAccountClient(\n agentAccount,\n authToken,\n this.authTokenProvider.portalAddress,\n );\n this.smartAccountAgent = smartAccountClient;\n }\n\n getSmartAccountAgent() {\n if (!this.smartAccountAgent) throw new Error(\"Agent client not initialized\");\n\n return this.smartAccountAgent;\n }\n\n getAgentAddress() {\n const smartAccountAgent = this.getSmartAccountAgent();\n if (!smartAccountAgent.account) throw new Error(\"Agent account not found\");\n return smartAccountAgent.account.address;\n }\n\n getAgentAccount() {\n const smartAccountAgent = this.getSmartAccountAgent();\n if (!smartAccountAgent.account) throw new Error(\"Agent account not found\");\n return smartAccountAgent.account;\n }\n\n destroyAgentClient() {\n this.smartAccountAgent = null;\n }\n\n async getCallData(request: IExecuteUserOperationRequest | IExecuteUserOperationRequest[]) {\n const agentAccount = this.getAgentAccount();\n if (Array.isArray(request)) {\n if (request.length === 0 || request.length > 10) throw new Error(\"Request length must be between 1 and 10\");\n\n const encodedCallData = request.map((req) => ({\n to: req.contractAddress,\n data: req.data,\n value: BigInt(0),\n }));\n\n return await agentAccount.encodeCalls(encodedCallData);\n }\n\n return await agentAccount.encodeCalls([\n {\n to: request.contractAddress,\n data: request.data,\n value: BigInt(0),\n },\n ]);\n }\n\n async sendUserOperation(\n request: IExecuteUserOperationRequest | IExecuteUserOperationRequest[],\n customGasLimit?: number,\n ) {\n try {\n const smartAccountAgent = this.getSmartAccountAgent();\n\n const callData = await this.getCallData(request);\n\n return await smartAccountAgent.sendUserOperation({\n callData,\n callGasLimit: BigInt(customGasLimit || this.MAX_CALL_GAS_LIMIT),\n nonce: getNonce(),\n });\n } catch (error) {\n throw error;\n }\n }\n\n async executeUserOperationRequest(\n request: IExecuteUserOperationRequest | IExecuteUserOperationRequest[],\n timeout: number,\n customGasLimit?: number,\n ) {\n const userOpHash = await this.sendUserOperation(request, customGasLimit);\n const { authToken, portalAddress, invokerAddress } = await this.getAuthParams();\n const receipt = await waitForUserOpReceipt(userOpHash, authToken, portalAddress, invokerAddress, timeout);\n if (!receipt.success) throw new Error(`Failed to execute user operation: ${receipt.reason}`);\n return receipt;\n }\n\n async getAuthParams(): Promise<{ authToken: string; portalAddress: Hex; invokerAddress: Hex }> {\n const authToken = await this.authTokenProvider.getAuthToken(STATIC_CONFIG.PROXY_SERVER_DID, this.authOptions);\n return {\n authToken,\n portalAddress: this.authTokenProvider.portalAddress,\n invokerAddress: this.getAgentAddress(),\n };\n }\n}\n","import { gcm } from \"@noble/ciphers/aes.js\";\nimport { generateRandomBytes } from \"@fileverse/crypto/utils\";\n\nconst KEY_LEN = 32;\nconst IV_LEN = 12;\nconst TAG_LEN = 16;\n\nconst b64ToBytes = (b64: string) => Uint8Array.from(Buffer.from(b64, \"base64\"));\nconst bytesToB64 = (b: Uint8Array) => Buffer.from(b).toString(\"base64\");\n\nimport type { DecryptionOptions } from \"../types\";\nexport type { DecryptionOptions };\n\nexport function gcmEncrypt(plaintext: Uint8Array) {\n const key = generateRandomBytes(KEY_LEN);\n const iv = generateRandomBytes(IV_LEN);\n if (key.length !== KEY_LEN) throw new Error(\"key must be 32 bytes\");\n if (iv.length !== IV_LEN) throw new Error(\"iv must be 12 bytes\");\n\n const out = gcm(key, iv).encrypt(plaintext);\n const ciphertext = out.subarray(0, out.length - TAG_LEN);\n const authTag = out.subarray(out.length - TAG_LEN);\n\n return {\n ciphertext,\n authTag: bytesToB64(authTag),\n key: bytesToB64(key),\n iv: bytesToB64(iv),\n };\n}\n\nexport function gcmDecrypt(ciphertext: Uint8Array, opts: DecryptionOptions) {\n const key = b64ToBytes(opts.key);\n const iv = b64ToBytes(opts.iv);\n const tag = b64ToBytes(opts.authTag);\n if (key.length !== KEY_LEN) throw new Error(\"key must be 32 bytes\");\n if (iv.length !== IV_LEN) throw new Error(\"iv must be 12 bytes\");\n if (tag.length !== TAG_LEN) throw new Error(\"authTag must be 16 bytes\");\n\n const combined = new Uint8Array(ciphertext.length + TAG_LEN);\n combined.set(ciphertext, 0);\n combined.set(tag, ciphertext.length);\n\n return gcm(key, iv).decrypt(combined);\n}\n","import { getArgon2idHash } from \"@fileverse/crypto/argon\";\nimport { bytesToBase64, generateRandomBytes } from \"@fileverse/crypto/utils\";\nimport { derivePBKDF2Key, encryptAesCBC } from \"@fileverse/crypto/kdf\";\nimport { secretBoxEncrypt } from \"@fileverse/crypto/nacl\";\nimport hkdf from \"futoin-hkdf\";\n\nimport tweetnacl from \"tweetnacl\";\nimport { fromUint8Array, toUint8Array } from \"js-base64\";\nimport { gcmEncrypt } from \"./file-encryption\";\nimport { toAESKey, aesEncrypt } from \"@fileverse/crypto/webcrypto\";\nimport axios from \"axios\";\nimport { ADD_FILE_METHOD, DELETED_FILE_ABI, EDIT_FILE_METHOD, UPLOAD_SERVER_URL } from \"../constants\";\nimport type { UploadFileAuthParams, FileMetadataParams, UploadFilesParams } from \"../types\";\nimport { encodeFunctionData, type Hex, parseEventLogs, type Abi } from \"viem\";\n\ninterface LinkKeyMaterialParams {\n ddocId: string;\n linkKey: string | undefined;\n linkKeyNonce: string | undefined;\n}\n\nconst deriveKeyFromAg2Hash = async (pass: string, salt: Uint8Array) => {\n const key = await getArgon2idHash(pass, salt);\n\n return hkdf(Buffer.from(key), tweetnacl.secretbox.keyLength, {\n info: Buffer.from(\"encryptionKey\"),\n });\n};\n\nconst decryptSecretKey = async (docId: string, nonce: string, encryptedSecretKey: string) => {\n const derivedKey = await deriveKeyFromAg2Hash(docId, toUint8Array(nonce));\n\n return tweetnacl.secretbox.open(toUint8Array(encryptedSecretKey), toUint8Array(nonce), derivedKey);\n};\n\nconst getExistingEncryptionMaterial = async (\n existingEncryptedSecretKey: string,\n existingNonce: string,\n docId: string,\n) => {\n const secretKey = await decryptSecretKey(docId, existingNonce, existingEncryptedSecretKey);\n return {\n encryptedSecretKey: existingEncryptedSecretKey,\n nonce: toUint8Array(existingNonce),\n secretKey,\n };\n};\n\nconst getNaclSecretKey = async (ddocId: string) => {\n const { secretKey } = tweetnacl.box.keyPair();\n const nonce = tweetnacl.randomBytes(tweetnacl.secretbox.nonceLength);\n\n const derivedKey = await deriveKeyFromAg2Hash(ddocId, nonce);\n\n const encryptedSecretKey = fromUint8Array(tweetnacl.secretbox(secretKey, nonce, derivedKey), true);\n\n return { nonce, encryptedSecretKey, secretKey };\n};\n\nexport const generateLinkKeyMaterial = async (params: LinkKeyMaterialParams) => {\n if (params.linkKeyNonce && params.linkKey) {\n const { encryptedSecretKey, nonce, secretKey } = await getExistingEncryptionMaterial(\n params.linkKey,\n params.linkKeyNonce,\n params.ddocId,\n );\n if (secretKey) return { encryptedSecretKey, nonce, secretKey };\n }\n const { secretKey, nonce, encryptedSecretKey } = await getNaclSecretKey(params.ddocId);\n\n return { secretKey, nonce, encryptedSecretKey };\n};\n\nexport const jsonToFile = (json: any, fileName: string) => {\n const blob = new Blob([JSON.stringify(json)], {\n type: \"application/json\",\n });\n\n const file = new File([blob], fileName, {\n type: \"application/json\",\n });\n\n return file;\n};\n\nconst appendAuthTagIvToBlob = async (blob: Blob, authTag: Uint8Array, iv: Uint8Array) => {\n const encryptedFileBytes = await blob.arrayBuffer();\n const encryptedBytes = new Uint8Array(encryptedFileBytes);\n const combinedLength = encryptedBytes.length + authTag.length + iv.length;\n const combinedArray = new Uint8Array(combinedLength);\n\n let offset = 0;\n combinedArray.set(encryptedBytes, offset);\n offset += encryptedBytes.length;\n\n combinedArray.set(authTag, offset);\n offset += authTag.length;\n\n combinedArray.set(iv, offset);\n\n return new Blob([combinedArray], { type: blob.type });\n};\n\nexport const encryptFile = async (file: File) => {\n const arrayBuffer = await file.arrayBuffer();\n\n const plaintext = new Uint8Array(arrayBuffer);\n\n const { ciphertext, authTag, key, iv } = gcmEncrypt(plaintext);\n\n const encryptedBlob = new Blob([ciphertext], { type: file.type });\n\n const encryptedBlobWithAuthTagIv = await appendAuthTagIvToBlob(\n encryptedBlob,\n toUint8Array(authTag),\n toUint8Array(iv),\n );\n\n return {\n encryptedFile: new File([encryptedBlobWithAuthTagIv], file.name),\n key,\n };\n};\n\nexport const getNonceAppendedCipherText = (nonce: Uint8Array, cipherText: Uint8Array) => {\n return fromUint8Array(nonce, true) + \"__n__\" + fromUint8Array(cipherText, true);\n};\n\nexport const jsonToBytes = (json: Record<string, any>) => new TextEncoder().encode(JSON.stringify(json));\n\nexport const buildLinklock = (key: Uint8Array, fileKey: Uint8Array, commentKey: Uint8Array) => {\n const ikm = generateRandomBytes();\n const kdfSalt = generateRandomBytes();\n const derivedEphermalKey = derivePBKDF2Key(ikm, kdfSalt);\n\n const { iv, cipherText } = encryptAesCBC(\n {\n key: derivedEphermalKey,\n message: fileKey,\n },\n \"base64\",\n );\n\n const { iv: commentIv, cipherText: commentCipherText } = encryptAesCBC(\n {\n key: derivedEphermalKey,\n message: commentKey,\n },\n \"base64\",\n );\n\n const encryptedIkm = secretBoxEncrypt(ikm, key);\n\n const lockedFileKey = iv + \"__n__\" + cipherText;\n\n const lockedChatKey = commentIv + \"__n__\" + commentCipherText;\n\n const keyMaterial = bytesToBase64(kdfSalt) + \"__n__\" + encryptedIkm;\n\n const fileKeyNonce = generateRandomBytes(24);\n const encryptedFileKey = tweetnacl.secretbox(jsonToBytes({ key: fromUint8Array(fileKey) }), fileKeyNonce, key);\n\n const chatKeyNonce = generateRandomBytes(24);\n const encryptedChatKey = tweetnacl.secretbox(commentKey, chatKeyNonce, key);\n\n return {\n lockedFileKey: getNonceAppendedCipherText(fileKeyNonce, encryptedFileKey),\n lockedChatKey: getNonceAppendedCipherText(chatKeyNonce, encryptedChatKey),\n lockedFileKey_v2: lockedFileKey,\n lockedChatKey_v2: lockedChatKey,\n keyMaterial,\n };\n};\n\nexport const encryptTitleWithFileKey = async (args: { title: string; key: string }) => {\n const key = await toAESKey(toUint8Array(args.key));\n if (!key) throw new Error(\"Key is undefined\");\n\n const titleBytes = new TextEncoder().encode(args.title);\n\n const encryptedTitle = await aesEncrypt(key, titleBytes, \"base64\");\n\n return encryptedTitle;\n};\n\ninterface UploadFileParams {\n file: File;\n ipfsType: string;\n appFileId: string;\n}\n\nexport type { UploadFileAuthParams };\n\nexport const uploadFileToIPFS = async (fileParams: UploadFileParams, authParams: UploadFileAuthParams) => {\n const { file, ipfsType, appFileId } = fileParams;\n const { token, invoker, contractAddress } = authParams;\n\n const body = new FormData();\n body.append(\"file\", file);\n body.append(\"ipfsType\", ipfsType);\n body.append(\"appFileId\", appFileId);\n\n body.append(\"sourceApp\", \"ddoc\");\n const uploadEndpoint = UPLOAD_SERVER_URL + \"upload\";\n const response = await axios.post(uploadEndpoint, body, {\n headers: {\n Authorization: `Bearer ${token}`,\n contract: contractAddress,\n invoker: invoker,\n chain: process.env.chainId,\n },\n });\n\n return response.data.ipfsHash;\n};\n\nconst getEditFileTrxCalldata = (args: {\n fileId: number;\n appFileId: string;\n metadataHash: string;\n contentHash: string;\n gateHash: string;\n}) => {\n return encodeFunctionData({\n abi: EDIT_FILE_METHOD,\n functionName: \"editFile\",\n args: [BigInt(args.fileId), args.appFileId, args.metadataHash, args.contentHash, args.gateHash, 2, BigInt(0)],\n });\n};\n\nconst getAddFileTrxCalldata = (args: {\n appFileId: string;\n metadataHash: string;\n contentHash: string;\n gateHash: string;\n}) => {\n return encodeFunctionData({\n abi: ADD_FILE_METHOD,\n functionName: \"addFile\",\n args: [args.appFileId, 2, args.metadataHash, args.contentHash, args.gateHash, BigInt(0)],\n });\n};\n\nexport const prepareCallData = (args: {\n metadataHash: string;\n contentHash: string;\n gateHash: string;\n appFileId: string;\n fileId?: number;\n}) => {\n if (args.fileId) {\n return getEditFileTrxCalldata({\n fileId: args.fileId,\n appFileId: args.appFileId,\n metadataHash: args.metadataHash,\n contentHash: args.contentHash,\n gateHash: args.gateHash,\n });\n }\n return getAddFileTrxCalldata(args);\n};\n\nexport const prepareDeleteFileCallData = (args: { onChainFileId: number }) => {\n return encodeFunctionData({\n abi: DELETED_FILE_ABI,\n functionName: \"deleteFile\",\n args: [BigInt(args.onChainFileId)],\n });\n};\n\nexport const createEncryptedContentFile = async (content: any) => {\n const contentFile = jsonToFile(\n { file: content, source: \"ddoc\" },\n `${fromUint8Array(generateRandomBytes(16))}-CONTENT`,\n );\n return encryptFile(contentFile);\n};\n\nexport type { FileMetadataParams };\n\nexport const buildFileMetadata = (params: FileMetadataParams) => ({\n title: params.encryptedTitle,\n size: params.encryptedFileSize,\n mimeType: \"application/json\",\n appLock: params.appLock,\n ownerLock: params.ownerLock,\n ddocId: params.ddocId,\n nonce: params.nonce,\n owner: params.owner,\n version: \"4\",\n sourceApp: \"fileverse-api\",\n});\n\nexport const parseFileEventLog = (logs: any[], eventName: string, abi: Abi): number => {\n const [parsedLog] = parseEventLogs({ abi, logs, eventName });\n\n if (!parsedLog) throw new Error(`${eventName} event not found`);\n\n const fileId = (parsedLog as any).args.fileId;\n\n if (fileId === undefined || fileId === null) throw new Error(\"FileId not found in event logs\");\n\n return Number(fileId);\n};\n\nexport type { UploadFilesParams };\n\nexport const uploadAllFilesToIPFS = async (params: UploadFilesParams, authParams: UploadFileAuthParams) => {\n const { metadata, encryptedFile, linkLock, ddocId } = params;\n\n const [metadataHash, contentHash, gateHash] = await Promise.all([\n uploadFileToIPFS(\n {\n file: jsonToFile(metadata, `${fromUint8Array(generateRandomBytes(16))}-METADATA`),\n ipfsType: \"METADATA\",\n appFileId: ddocId,\n },\n authParams,\n ),\n uploadFileToIPFS(\n {\n file: encryptedFile,\n ipfsType: \"CONTENT\",\n appFileId: ddocId,\n },\n authParams,\n ),\n uploadFileToIPFS(\n {\n file: jsonToFile(linkLock, `${fromUint8Array(generateRandomBytes(16))}-GATE`),\n ipfsType: \"GATE\",\n appFileId: ddocId,\n },\n authParams,\n ),\n ]);\n\n return { metadataHash, contentHash, gateHash };\n};\n","import { fromUint8Array, toUint8Array } from \"js-base64\";\nimport { KeyStore } from \"./key-store\";\nimport {\n buildLinklock,\n encryptTitleWithFileKey,\n generateLinkKeyMaterial,\n prepareCallData,\n createEncryptedContentFile,\n buildFileMetadata,\n parseFileEventLog,\n uploadAllFilesToIPFS,\n UploadFileAuthParams,\n prepareDeleteFileCallData,\n} from \"./file-utils\";\nimport { AgentClient } from \"./smart-agent\";\nimport { generateAESKey, exportAESKey } from \"@fileverse/crypto/webcrypto\";\nimport { STATIC_CONFIG } from \"../cli/constants\";\nimport { DELETED_FILE_EVENT, EDITED_FILE_EVENT } from \"../constants\";\nimport { markdownToYjs } from \"@fileverse/content-processor\";\nimport { logger } from \"../infra\";\n\nexport class FileManager {\n private keyStore: KeyStore;\n private agentClient: AgentClient;\n\n constructor(keyStore: KeyStore, agentClient: AgentClient) {\n this.keyStore = keyStore;\n this.agentClient = agentClient;\n }\n\n private createLocks(key: string, encryptedSecretKey: string, commentKey: Uint8Array) {\n const appLock = {\n lockedFileKey: this.keyStore.encryptData(toUint8Array(key)),\n lockedLinkKey: this.keyStore.encryptData(toUint8Array(encryptedSecretKey)),\n lockedChatKey: this.keyStore.encryptData(commentKey),\n };\n return { appLock, ownerLock: { ...appLock } };\n }\n\n private async getAuthParams(): Promise<UploadFileAuthParams> {\n return {\n token: await this.keyStore.getAuthToken(STATIC_CONFIG.SERVER_DID),\n contractAddress: this.keyStore.getPortalAddress(),\n invoker: this.agentClient.getAgentAddress(),\n };\n }\n\n private async executeFileOperation(callData: `0x${string}`) {\n return this.agentClient.executeUserOperationRequest(\n {\n contractAddress: this.keyStore.getPortalAddress(),\n data: callData,\n },\n 1000000,\n );\n }\n\n private async sendFileOperation(callData: `0x${string}`) {\n return this.agentClient.sendUserOperation(\n {\n contractAddress: this.keyStore.getPortalAddress(),\n data: callData,\n },\n 1000000,\n );\n }\n\n async getProxyAuthParams() {\n return this.agentClient.getAuthParams();\n }\n\n async submitAddFileTrx(file: any) {\n logger.debug(`Preparing to add file ${file.ddocId}`);\n const { encryptedSecretKey, nonce, secretKey } = await generateLinkKeyMaterial({\n ddocId: file.ddocId,\n linkKey: file.linkKey,\n linkKeyNonce: file.linkKeyNonce,\n });\n\n const yJSContent = markdownToYjs(file.content);\n const { encryptedFile, key } = await createEncryptedContentFile(yJSContent);\n logger.debug(`Generated encrypted content file for file ${file.ddocId}`);\n const commentKey = await exportAESKey(await generateAESKey(128));\n\n const { appLock, ownerLock } = this.createLocks(key, encryptedSecretKey, commentKey);\n const linkLock = buildLinklock(secretKey, toUint8Array(key), commentKey);\n\n const encryptedTitle = await encryptTitleWithFileKey({\n title: file.title || \"Untitled\",\n key,\n });\n const metadata = buildFileMetadata({\n encryptedTitle,\n encryptedFileSize: encryptedFile.size,\n appLock,\n ownerLock,\n ddocId: file.ddocId,\n nonce: fromUint8Array(nonce),\n owner: this.agentClient.getAgentAddress(),\n });\n\n const authParams = await this.getAuthParams();\n const { metadataHash, contentHash, gateHash } = await uploadAllFilesToIPFS(\n { metadata, encryptedFile, linkLock, ddocId: file.ddocId },\n authParams,\n );\n logger.debug(`Uploaded files to IPFS for file ${file.ddocId}`);\n\n const callData = prepareCallData({\n metadataHash,\n contentHash,\n gateHash,\n appFileId: file.ddocId,\n fileId: file.fileId,\n });\n logger.debug(`Prepared call data for file ${file.ddocId}`);\n\n const userOpHash = await this.sendFileOperation(callData);\n logger.debug(`Submitted user op for file ${file.ddocId}`);\n return {\n userOpHash,\n linkKey: encryptedSecretKey,\n linkKeyNonce: fromUint8Array(nonce),\n commentKey: fromUint8Array(commentKey),\n metadata,\n };\n }\n\n async updateFile(file: any) {\n logger.debug(`Updating file ${file.ddocId} with onChainFileId ${file.onChainFileId}`);\n const { encryptedSecretKey, nonce, secretKey } = await generateLinkKeyMaterial({\n ddocId: file.ddocId,\n linkKey: file.linkKey,\n linkKeyNonce: file.linkKeyNonce,\n });\n\n logger.debug(`Generating encrypted content file for file ${file.ddocId} with onChainFileId ${file.onChainFileId}`);\n\n const yjsContent = markdownToYjs(file.content);\n const { encryptedFile, key } = await createEncryptedContentFile(yjsContent);\n const commentKey = toUint8Array(file.commentKey);\n\n const { appLock, ownerLock } = this.createLocks(key, encryptedSecretKey, commentKey);\n const linkLock = buildLinklock(secretKey, toUint8Array(key), commentKey);\n\n const encryptedTitle = await encryptTitleWithFileKey({\n title: file.title || \"Untitled\",\n key,\n });\n const metadata = buildFileMetadata({\n encryptedTitle,\n encryptedFileSize: encryptedFile.size,\n appLock,\n ownerLock,\n ddocId: file.ddocId,\n nonce: fromUint8Array(nonce),\n owner: this.agentClient.getAgentAddress(),\n });\n\n const authParams = await this.getAuthParams();\n logger.debug(`Uploading files to IPFS for file ${file.ddocId} with onChainFileId ${file.onChainFileId}`);\n const { metadataHash, contentHash, gateHash } = await uploadAllFilesToIPFS(\n { metadata, encryptedFile, linkLock, ddocId: file.ddocId },\n authParams,\n );\n\n const callData = prepareCallData({\n metadataHash,\n contentHash,\n gateHash,\n appFileId: file.ddocId,\n fileId: file.onChainFileId,\n });\n logger.debug(`Executing file operation for file ${file.ddocId} with onChainFileId ${file.onChainFileId}`);\n\n const { logs } = await this.executeFileOperation(callData);\n const onChainFileId = parseFileEventLog(logs, \"EditedFile\", EDITED_FILE_EVENT);\n\n return { onChainFileId, metadata };\n }\n\n async deleteFile(file: any) {\n logger.debug(`Deleting file ${file.ddocId} with onChainFileId ${file.onChainFileId}`);\n const callData = prepareDeleteFileCallData({\n onChainFileId: file.onChainFileId,\n });\n logger.debug(`Prepared call data for deleting file ${file.ddocId} with onChainFileId ${file.onChainFileId}`);\n\n const { logs } = await this.executeFileOperation(callData);\n parseFileEventLog(logs, \"DeletedFile\", DELETED_FILE_EVENT);\n logger.debug(`Executed file operation for deleting file ${file.ddocId} with onChainFileId ${file.onChainFileId}`);\n return {\n fileId: file.id,\n onChainFileId: file.onChainFileId,\n metadata: file.metadata,\n };\n }\n}\n","import { FilesModel, PortalsModel } from \"../../infra/database/models\";\nimport { logger } from \"../../infra\";\nimport { KeyStore } from \"../../sdk/key-store\";\nimport { AuthTokenProvider } from \"../../sdk/auth-token-provider\";\nimport { fromUint8Array, toUint8Array } from \"js-base64\";\nimport { Hex, stringToBytes } from \"viem\";\nimport { deriveHKDFKey } from \"@fileverse/crypto/kdf\";\nimport { generateKeyPairFromSeed } from \"@stablelib/ed25519\";\nimport * as ucans from \"@ucans/ucans\";\nimport { AgentClient } from \"../../sdk/smart-agent\";\nimport { FileManager } from \"../../sdk/file-manager\";\nimport { getRuntimeConfig } from \"../../config\";\n\nimport type { PublishResult } from \"../../types\";\n\ninterface PublishContext {\n file: ReturnType<typeof FilesModel.findByIdIncludingDeleted>;\n portalDetails: NonNullable<ReturnType<typeof PortalsModel.findByPortalAddress>>;\n apiKey: string;\n}\n\nfunction getPortalData(fileId: string): PublishContext {\n const file = FilesModel.findByIdIncludingDeleted(fileId);\n if (!file) {\n throw new Error(`File with _id ${fileId} not found`);\n }\n\n const portalDetails = PortalsModel.findByPortalAddress(file.portalAddress);\n if (!portalDetails) {\n throw new Error(`Portal with address ${file.portalAddress} not found`);\n }\n\n const apiKey = getRuntimeConfig().API_KEY;\n if (!apiKey) {\n throw new Error(\"API key is not set\");\n }\n\n return { file, portalDetails, apiKey };\n}\n\nfunction deriveCollaboratorKeys(apiKeySeed: Uint8Array) {\n const salt = new Uint8Array([0]);\n\n const privateAccountKey = deriveHKDFKey(apiKeySeed, salt, stringToBytes(\"COLLABORATOR_PRIVATE_KEY\"));\n\n const ucanDerivedSecret = deriveHKDFKey(apiKeySeed, salt, stringToBytes(\"COLLABORATOR_UCAN_SECRET\"));\n\n const { secretKey: ucanSecret } = generateKeyPairFromSeed(ucanDerivedSecret);\n\n return { privateAccountKey, ucanSecret };\n}\n\nconst createFileManager = async (\n portalSeed: string,\n portalAddress: Hex,\n ucanSecret: Uint8Array,\n privateAccountKey: Uint8Array,\n): Promise<FileManager> => {\n const keyPair = ucans.EdKeypair.fromSecretKey(fromUint8Array(ucanSecret), {\n exportable: true,\n });\n\n const authTokenProvider = new AuthTokenProvider(keyPair, portalAddress);\n const keyStore = new KeyStore(toUint8Array(portalSeed), portalAddress, authTokenProvider);\n\n const agentClient = new AgentClient(authTokenProvider);\n await agentClient.initializeAgentClient(privateAccountKey);\n\n return new FileManager(keyStore, agentClient);\n};\n\nconst executeOperation = async (\n fileManager: FileManager,\n file: any,\n operation: \"update\" | \"delete\",\n): Promise<PublishResult> => {\n\n if (operation === \"update\") {\n const result = await fileManager.updateFile(file);\n return { success: true, ...result };\n }\n\n if (operation === \"delete\") {\n const result = await fileManager.deleteFile(file);\n return { success: true, ...result };\n }\n\n throw new Error(`Invalid operation: ${operation}`);\n};\n\nexport const handleExistingFileOp = async (fileId: string, operation: \"update\" | \"delete\"): Promise<PublishResult> => {\n try {\n const { file, portalDetails, apiKey } = getPortalData(fileId);\n\n const apiKeySeed = toUint8Array(apiKey);\n const { privateAccountKey, ucanSecret } = deriveCollaboratorKeys(apiKeySeed);\n\n const fileManager = await createFileManager(\n portalDetails.portalSeed,\n portalDetails.portalAddress as Hex,\n ucanSecret,\n privateAccountKey,\n );\n\n return executeOperation(fileManager, file, operation);\n } catch (error: any) {\n logger.error(`Failed to publish file ${fileId}:`, error);\n throw error;\n }\n};\n\nexport const handleNewFileOp = async (\n fileId: string,\n): Promise<{\n userOpHash: string;\n linkKey: string;\n linkKeyNonce: string;\n commentKey: string;\n metadata: Record<string, unknown>;\n}> => {\n const { file, portalDetails, apiKey } = getPortalData(fileId);\n const apiKeySeed = toUint8Array(apiKey);\n const { privateAccountKey, ucanSecret } = deriveCollaboratorKeys(apiKeySeed);\n const fileManager = await createFileManager(\n portalDetails.portalSeed,\n portalDetails.portalAddress as Hex,\n ucanSecret,\n privateAccountKey,\n );\n return fileManager.submitAddFileTrx(file);\n};\n\nexport const getProxyAuthParams = async (\n fileId: string,\n): Promise<{\n authToken: string;\n portalAddress: Hex;\n invokerAddress: Hex;\n}> => {\n const { portalDetails, apiKey } = getPortalData(fileId);\n const apiKeySeed = toUint8Array(apiKey);\n const { privateAccountKey, ucanSecret } = deriveCollaboratorKeys(apiKeySeed);\n const fileManager = await createFileManager(\n portalDetails.portalSeed,\n portalDetails.portalAddress as Hex,\n ucanSecret,\n privateAccountKey,\n );\n return fileManager.getProxyAuthParams();\n};\n","import { ApiKeysModel, PortalsModel } from \"../../infra/database/models\";\nimport type { AddApiKeyInput, ApiKey } from \"../../types\";\n\nexport function addApiKey(input: AddApiKeyInput): ApiKey {\n if (!input.apiKeySeed || !input.name || !input.collaboratorAddress || !input.portalAddress) {\n throw new Error(\"apiKeySeed, name, collaboratorAddress, and portalAddress are required\");\n }\n\n const portal = PortalsModel.findByPortalAddress(input.portalAddress);\n if (!portal) {\n throw new Error(`Portal with address ${input.portalAddress} does not exist`);\n }\n\n return ApiKeysModel.create(input);\n}\n","import { ApiKeysModel, type ApiKey } from \"../../infra/database/models\";\n\nexport function removeApiKey(collaboratorAddress: string): ApiKey {\n if (!collaboratorAddress) {\n throw new Error(\"collaboratorAddress is required\");\n }\n\n const apiKey = ApiKeysModel.findByCollaboratorAddress(collaboratorAddress);\n if (!apiKey) {\n throw new Error(\"API key not found\");\n }\n\n ApiKeysModel.delete(apiKey._id);\n return { ...apiKey, isDeleted: 1 };\n}\n","import { handleExistingFileOp, handleNewFileOp, getProxyAuthParams } from \"./publish\";\nimport { savePortal } from \"./savePortal\";\nimport { addApiKey } from \"./saveApiKey\";\nimport { removeApiKey } from \"./removeApiKey\";\n\nexport { handleExistingFileOp, handleNewFileOp, getProxyAuthParams, savePortal, addApiKey, removeApiKey };\n","import { HttpRequestError } from \"viem\";\n\nexport class RateLimitError extends Error {\n readonly retryAfterSeconds: number;\n\n constructor(retryAfterSeconds: number, message = \"Rate limit exceeded\") {\n super(message);\n this.name = \"RateLimitError\";\n this.retryAfterSeconds = retryAfterSeconds;\n }\n}\n\nconst MAX_RETRY_AFTER_SECONDS = 300;\nconst DEFAULT_RETRY_AFTER_SECONDS = 3600;\n\nfunction parseRetryAfterRaw(raw: string | null): number {\n if (!raw) return DEFAULT_RETRY_AFTER_SECONDS;\n const parsed = parseInt(raw, 10);\n if (!Number.isNaN(parsed) && parsed >= 0) return Math.min(parsed, MAX_RETRY_AFTER_SECONDS);\n const date = Date.parse(raw);\n if (!Number.isNaN(date)) {\n const seconds = Math.max(0, Math.ceil((date - Date.now()) / 1000));\n return Math.min(seconds, MAX_RETRY_AFTER_SECONDS);\n }\n return DEFAULT_RETRY_AFTER_SECONDS;\n}\n\nexport const parseRetryAfterSeconds = (response: Response): number =>\n parseRetryAfterRaw(response.headers.get(\"Retry-After\"));\n\nexport const parseRetryAfterFromHeaders = (headers?: Headers): number =>\n parseRetryAfterRaw(headers?.get(\"Retry-After\") ?? null);\n\nexport function normalizeRateLimitError(error: unknown): unknown {\n if (!(error instanceof HttpRequestError) || error.status !== 429) return error;\n const retryAfter = parseRetryAfterFromHeaders(error.headers);\n const message = \"Beta API rate limit reached. Try again in an hour please!\"\n return new RateLimitError(retryAfter, message);\n}\n","import { getRuntimeConfig } from \"../../config\";\nimport { handleNewFileOp, getProxyAuthParams, handleExistingFileOp } from \"../../domain/portal\";\nimport { FilesModel, EventsModel } from \"../database/models\";\nimport type { Event, ProcessResult, UpdateFilePayload } from \"../../types\";\nimport { logger } from \"../index\";\nimport { waitForUserOpReceipt } from \"../../sdk/pimlico-utils\";\nimport { parseFileEventLog } from \"../../sdk/file-utils\";\nimport { ADDED_FILE_EVENT } from \"../../constants\";\nimport { RateLimitError, normalizeRateLimitError } from \"../../errors/rate-limit\";\n\nexport type { ProcessResult };\n\nexport const processEvent = async (event: Event): Promise<ProcessResult> => {\n const { fileId, type } = event;\n\n try {\n switch (type) {\n case \"create\":\n await processCreateEvent(event);\n break;\n case \"update\":\n await processUpdateEvent(event);\n break;\n case \"delete\":\n await processDeleteEvent(event);\n break;\n default:\n throw new Error(`Unknown event type: ${type}`);\n }\n return { success: true };\n } catch (error) {\n const normalized = normalizeRateLimitError(error);\n if (normalized instanceof RateLimitError) throw normalized;\n const errorMsg = error instanceof Error ? error.message : String(error);\n logger.error(`Error processing ${type} event for file ${fileId}:`, errorMsg);\n return { success: false, error: errorMsg };\n }\n};\n\nconst onTransactionSuccess = (\n fileId: string,\n file: ReturnType<typeof FilesModel.findByIdIncludingDeleted>,\n onChainFileId: number,\n pending: { linkKey: string; linkKeyNonce: string; commentKey: string; metadata: Record<string, unknown> },\n): void => {\n const frontendUrl = getRuntimeConfig().FRONTEND_URL;\n const payload: UpdateFilePayload = {\n onchainVersion: file!.localVersion,\n onChainFileId,\n linkKey: pending.linkKey,\n linkKeyNonce: pending.linkKeyNonce,\n commentKey: pending.commentKey,\n metadata: pending.metadata,\n link: `${frontendUrl}/${file!.portalAddress}/${onChainFileId}#key=${pending.linkKey}`,\n };\n const updatedFile = FilesModel.update(fileId, payload, file!.portalAddress);\n if (updatedFile.localVersion === updatedFile.onchainVersion) {\n FilesModel.update(fileId, { syncStatus: \"synced\" }, file!.portalAddress);\n }\n};\n\nconst processCreateEvent = async (event: Event): Promise<void> => {\n const { fileId } = event;\n\n const file = FilesModel.findByIdIncludingDeleted(fileId);\n if (!file) {\n throw new Error(`File ${fileId} not found`);\n }\n\n if (file.isDeleted === 1) {\n logger.info(`File ${fileId} is deleted, skipping create event`);\n return;\n }\n\n const waitContext = await getProxyAuthParams(fileId);\n const timeout = 120000;\n\n if (event.userOpHash) {\n const receipt = await waitForUserOpReceipt(\n event.userOpHash as `0x${string}`,\n waitContext.authToken,\n waitContext.portalAddress,\n waitContext.invokerAddress,\n timeout,\n );\n if (!receipt.success) {\n EventsModel.clearEventPendingOp(event._id);\n throw new Error(`User operation failed: ${receipt.reason}`);\n }\n const onChainFileId = parseFileEventLog(receipt.logs, \"AddedFile\", ADDED_FILE_EVENT);\n const pending = JSON.parse(event.pendingPayload!) as {\n linkKey: string;\n linkKeyNonce: string;\n commentKey: string;\n metadata: Record<string, unknown>;\n };\n onTransactionSuccess(fileId, file, onChainFileId, pending);\n EventsModel.clearEventPendingOp(event._id);\n logger.info(`File ${file.ddocId} created and published successfully (resumed from pending op)`);\n return;\n }\n\n const result = await handleNewFileOp(fileId);\n EventsModel.setEventPendingOp(event._id, result.userOpHash, {\n linkKey: result.linkKey,\n linkKeyNonce: result.linkKeyNonce,\n commentKey: result.commentKey,\n metadata: result.metadata,\n });\n\n const receipt = await waitForUserOpReceipt(\n result.userOpHash as `0x${string}`,\n waitContext.authToken,\n waitContext.portalAddress,\n waitContext.invokerAddress,\n timeout,\n );\n if (!receipt.success) {\n EventsModel.clearEventPendingOp(event._id);\n throw new Error(`User operation failed: ${receipt.reason}`);\n }\n const onChainFileId = parseFileEventLog(receipt.logs, \"AddedFile\", ADDED_FILE_EVENT);\n onTransactionSuccess(fileId, file, onChainFileId, {\n linkKey: result.linkKey,\n linkKeyNonce: result.linkKeyNonce,\n commentKey: result.commentKey,\n metadata: result.metadata,\n });\n EventsModel.clearEventPendingOp(event._id);\n logger.info(`File ${file.ddocId} created and published successfully`);\n};\n\nconst processUpdateEvent = async (event: Event): Promise<void> => {\n const { fileId } = event;\n\n const file = FilesModel.findByIdExcludingDeleted(fileId);\n if (!file) {\n return;\n }\n\n if (file.localVersion <= file.onchainVersion) {\n return;\n }\n\n const result = await handleExistingFileOp(fileId, \"update\");\n if (!result.success) {\n throw new Error(`Publish failed for file ${fileId}`);\n }\n\n const payload: UpdateFilePayload = {\n onchainVersion: file.localVersion,\n metadata: result.metadata,\n };\n const updatedFile = FilesModel.update(fileId, payload, file.portalAddress);\n\n if (updatedFile.localVersion === updatedFile.onchainVersion) {\n FilesModel.update(fileId, { syncStatus: \"synced\" }, file.portalAddress);\n }\n logger.info(`File ${file.ddocId} updated and published successfully`);\n};\n\nconst processDeleteEvent = async (event: Event): Promise<void> => {\n const { fileId } = event;\n\n const file = FilesModel.findByIdIncludingDeleted(fileId);\n if (!file) {\n return;\n }\n\n if (file.isDeleted === 1 && file.syncStatus === \"synced\") {\n logger.info(`File ${fileId} deletion already synced, skipping`);\n return;\n }\n\n const payload: UpdateFilePayload = {\n syncStatus: \"synced\",\n isDeleted: 1,\n };\n\n if (file.onChainFileId !== null || file.onChainFileId !== undefined) {\n const result = await handleExistingFileOp(fileId, \"delete\");\n if (!result.success) {\n throw new Error(`Publish failed for file ${fileId}`);\n }\n\n payload.onchainVersion = file.localVersion;\n payload.metadata = result.metadata;\n payload.isDeleted = 1;\n }\n\n FilesModel.update(fileId, payload, file.portalAddress);\n\n logger.info(`File ${fileId} delete event processed (syncStatus set to synced)`);\n};\n","import { EventEmitter } from \"events\";\n\nclass WorkerSignal extends EventEmitter {}\n\nconst workerSignal = new WorkerSignal();\nworkerSignal.setMaxListeners(20);\n\nexport function notifyNewEvent(): void {\n workerSignal.emit(\"newEvent\");\n}\n\nexport function onNewEvent(callback: () => void): () => void {\n workerSignal.on(\"newEvent\", callback);\n return () => workerSignal.off(\"newEvent\", callback);\n}\n","import { logger } from \"../index\";\nimport { processEvent } from \"./eventProcessor\";\nimport { onNewEvent } from \"./workerSignal\";\nimport { EventsModel } from \"../database/models\";\nimport type { Event } from \"../database/models\";\nimport { RateLimitError } from \"../../errors/rate-limit\";\n\nconst DEFAULT_CONCURRENCY = 5;\nconst STALE_THRESHOLD_MS = 5 * 60 * 1000;\nconst SIGNAL_RETRY_DELAY_MS = 50;\nconst FALLBACK_POLL_MS = 30000;\nconst MAX_RETRIES = 10;\n\nexport class FileEventsWorker {\n private isRunning = false;\n private concurrency: number;\n private activeProcessors = new Map<string, Promise<void>>();\n private signalCleanup: (() => void) | null = null;\n private pendingSignal = false;\n private wakeResolver: (() => void) | null = null;\n\n constructor(concurrency: number = DEFAULT_CONCURRENCY) {\n this.concurrency = concurrency;\n }\n\n start(): void {\n if (this.isRunning) {\n logger.warn(\"Worker is already running\");\n return;\n }\n this.isRunning = true;\n\n const staleCount = this.recoverStaleEvents();\n if (staleCount > 0) {\n logger.info(`Recovered ${staleCount} stale event(s)`);\n }\n\n this.signalCleanup = onNewEvent(() => {\n this.pendingSignal = true;\n this.wakeUp();\n });\n\n logger.debug(`File events worker started (concurrency: ${this.concurrency})`);\n this.run();\n }\n\n private async run(): Promise<void> {\n while (this.isRunning) {\n const foundEvents = await this.fillSlots();\n logger.debug(`Found ${foundEvents ? \"events\" : \"no events\"} to process`);\n if (this.activeProcessors.size === 0) {\n if (this.pendingSignal && !foundEvents) {\n this.pendingSignal = false;\n await this.sleep(SIGNAL_RETRY_DELAY_MS);\n continue;\n }\n\n this.pendingSignal = false;\n await this.waitForSignalOrTimeout(FALLBACK_POLL_MS);\n } else {\n await Promise.race(this.activeProcessors.values());\n }\n }\n }\n\n private async fillSlots(): Promise<boolean> {\n let foundAny = false;\n\n while (this.activeProcessors.size < this.concurrency && this.isRunning) {\n const lockedFileIds = Array.from(this.activeProcessors.keys());\n const event = EventsModel.findNextEligible(lockedFileIds);\n\n if (!event) break;\n\n foundAny = true;\n EventsModel.markProcessing(event._id);\n const processor = this.processEventWrapper(event);\n this.activeProcessors.set(event.fileId, processor);\n }\n\n logger.debug(`Slots filled: ${this.activeProcessors.size}`);\n return foundAny;\n }\n\n private async processEventWrapper(event: Event): Promise<void> {\n try {\n const result = await processEvent(event);\n if (result.success) {\n EventsModel.markProcessed(event._id);\n } else {\n this.handleFailure(event, result.error);\n }\n } catch (err) {\n this.handleFailure(event, err);\n } finally {\n this.activeProcessors.delete(event.fileId);\n }\n }\n\n private handleFailure(event: Event, error: unknown): void {\n const errorMsg = error instanceof Error ? error.message : String(error);\n if (error instanceof RateLimitError) {\n const retryAfterMs = error.retryAfterSeconds * 1000;\n EventsModel.scheduleRetryAfter(event._id, errorMsg, retryAfterMs);\n logger.warn(`Event ${event._id} rate limited; retry after ${error.retryAfterSeconds}s`);\n return;\n }\n if (event.retryCount < MAX_RETRIES) {\n EventsModel.scheduleRetry(event._id, errorMsg);\n logger.warn(`Event ${event._id} failed (retry ${event.retryCount + 1}/${MAX_RETRIES}): ${errorMsg}`);\n } else {\n EventsModel.markFailed(event._id, errorMsg);\n logger.error(`Event ${event._id} permanently failed after ${MAX_RETRIES} retries: ${errorMsg}`);\n }\n }\n\n private recoverStaleEvents(): number {\n const staleThreshold = Date.now() - STALE_THRESHOLD_MS;\n return EventsModel.resetStaleEvents(staleThreshold);\n }\n\n private wakeUp(): void {\n if (this.wakeResolver) {\n this.wakeResolver();\n this.wakeResolver = null;\n }\n }\n\n private waitForSignalOrTimeout(ms: number): Promise<void> {\n return new Promise((resolve) => {\n const timeout = setTimeout(() => {\n this.wakeResolver = null;\n resolve();\n }, ms);\n\n this.wakeResolver = () => {\n clearTimeout(timeout);\n resolve();\n };\n });\n }\n\n private sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n }\n\n async close(): Promise<void> {\n if (!this.isRunning) {\n return;\n }\n logger.info(\"Closing worker gracefully...\");\n this.isRunning = false;\n\n if (this.signalCleanup) {\n this.signalCleanup();\n this.signalCleanup = null;\n }\n\n this.wakeUp();\n this.wakeResolver = null;\n\n if (this.activeProcessors.size > 0) {\n logger.info(`Waiting for ${this.activeProcessors.size} active processor(s) to complete...`);\n await Promise.all(this.activeProcessors.values());\n }\n\n logger.info(\"Worker closed\");\n }\n\n isActive(): boolean {\n return this.isRunning;\n }\n\n getActiveCount(): number {\n return this.activeProcessors.size;\n }\n}\n\nexport function createWorker(concurrency: number = DEFAULT_CONCURRENCY): FileEventsWorker {\n return new FileEventsWorker(concurrency);\n}\n","export { createWorker, FileEventsWorker } from \"./worker\";\nexport { notifyNewEvent } from \"./workerSignal\";\n","import { createWorker, type FileEventsWorker } from \"./infra/worker\";\n\nconst DEFAULT_CONCURRENCY = 5;\n\nlet worker: FileEventsWorker | null = null;\n\nexport function startWorker(concurrency: number = DEFAULT_CONCURRENCY): void {\n if (worker?.isActive()) {\n return;\n }\n worker = createWorker(concurrency);\n worker.start();\n}\n\nexport async function closeWorker(): Promise<void> {\n if (worker) {\n await worker.close();\n worker = null;\n }\n}\n\nexport function isWorkerActive(): boolean {\n return worker?.isActive() ?? false;\n}\n\nexport function getWorkerActiveCount(): number {\n return worker?.getActiveCount() ?? 0;\n}\n","// Error reporting service\n// Example: Slack, Sentry, etc.\n\nclass Reporter {\n async reportError(message: string): Promise<void> {\n // Implement your error reporting logic\n console.error(\"Error reported:\", message);\n }\n}\n\nexport default new Reporter();\n","import { logger } from \"./logger\";\nimport { asyncHandler, asyncHandlerArray } from \"./asyncHandler\";\nimport { closeWorker } from \"../appWorker\";\nimport { closeDatabase } from \"./database\";\n\nimport reporter from \"./reporter\";\n\nexport { logger, asyncHandler, asyncHandlerArray, reporter, closeWorker, closeDatabase };\n","import Database from \"better-sqlite3\";\nimport { config } from \"../../config\";\nimport { logger } from \"../index\";\nimport path from \"path\";\nimport fs from \"fs\";\n\n/**\n * Database connection manager - Singleton pattern\n * Provides a shared SQLite database connection\n */\nclass DatabaseConnectionManager {\n private static instance: DatabaseConnectionManager;\n private db: Database.Database | null = null;\n\n private constructor() {}\n\n static getInstance(): DatabaseConnectionManager {\n if (!DatabaseConnectionManager.instance) {\n DatabaseConnectionManager.instance = new DatabaseConnectionManager();\n }\n return DatabaseConnectionManager.instance;\n }\n\n getConnection(): Database.Database {\n if (!this.db) {\n // DB_PATH is required, validated, and normalized in config/index.ts\n const dbPath = config.DB_PATH!;\n\n // Create database instance\n this.db = new Database(dbPath, {\n verbose: config.NODE_ENV === \"development\" ? (msg: unknown) => logger.debug(String(msg)) : undefined,\n });\n\n // Enable WAL mode for better concurrency\n this.db.pragma(\"journal_mode = WAL\");\n\n // Enable foreign keys\n this.db.pragma(\"foreign_keys = ON\");\n\n // Connection health check\n this.db.prepare(\"SELECT 1\").get();\n\n logger.info(`SQLite database connected: ${dbPath}`);\n }\n\n return this.db;\n }\n\n async close(): Promise<void> {\n if (this.db) {\n this.db.close();\n this.db = null;\n logger.info(\"Database connection closed\");\n }\n }\n\n isConnected(): boolean {\n return this.db !== null && this.db.open;\n }\n}\n\nexport const databaseConnectionManager = DatabaseConnectionManager.getInstance();\n","/**\n * Default limit for listing files/ddocs\n * Used by both API and CLI to ensure consistent behavior\n */\nexport const DEFAULT_LIST_LIMIT = 10;\n","import { databaseConnectionManager } from \"./connection\";\nimport type { QueryOptions } from \"../../types\";\nimport { DEFAULT_LIST_LIMIT } from \"../../domain/file/constants\";\n\nfunction getDb() {\n return databaseConnectionManager.getConnection();\n}\n\nexport class QueryBuilder {\n static select<T = any>(sql: string, params: any[] = []): T[] {\n const stmt = getDb().prepare(sql);\n return stmt.all(params) as T[];\n }\n\n static selectOne<T = any>(sql: string, params: any[] = []): T | undefined {\n const stmt = getDb().prepare(sql);\n return stmt.get(params) as T | undefined;\n }\n\n static execute(\n sql: string,\n params: any[] = [],\n ): {\n changes: number;\n lastInsertRowid: number | bigint;\n } {\n const stmt = getDb().prepare(sql);\n const result = stmt.run(params);\n return {\n changes: result.changes,\n lastInsertRowid: result.lastInsertRowid,\n };\n }\n\n static transaction<T>(callback: () => T): T {\n return getDb().transaction(callback)();\n }\n\n static paginate(sql: string, options: QueryOptions = {}): string {\n let query = sql;\n\n if (options.orderBy) {\n query += ` ORDER BY ${options.orderBy} ${options.orderDirection || \"ASC\"}`;\n }\n\n const hasOffset = (options.offset ?? 0) > 0;\n const limit = options.limit ?? (hasOffset ? DEFAULT_LIST_LIMIT : undefined);\n\n if (limit) {\n query += ` LIMIT ${limit}`;\n }\n\n if (hasOffset) {\n query += ` OFFSET ${options.offset}`;\n }\n\n return query;\n }\n}\n","import { databaseConnectionManager } from \"./connection\";\nimport { QueryBuilder } from \"./query-builder\";\n\nfunction getDb() {\n return databaseConnectionManager.getConnection();\n}\n\nconst closeDatabase = async (): Promise<void> => {\n await databaseConnectionManager.close();\n};\n\nexport default getDb;\nexport { getDb, closeDatabase, QueryBuilder };\n","import { QueryBuilder } from \"../index\";\nimport { uuidv7 } from \"uuidv7\";\nimport type { File, FileListResponse, UpdateFilePayload } from \"../../../types\";\n\nexport type { File, FileListResponse };\n\nexport class FilesModel {\n private static readonly TABLE = \"files\";\n\n private static parseFile(fileRaw: any): File {\n let metadata: Record<string, unknown> = {};\n try {\n if (fileRaw.metadata) {\n metadata = typeof fileRaw.metadata === \"string\" ? JSON.parse(fileRaw.metadata) : fileRaw.metadata;\n }\n } catch (e) {\n // If parsing fails, use empty object\n metadata = {};\n }\n\n return {\n _id: fileRaw._id,\n ddocId: fileRaw.ddocId,\n title: fileRaw.title,\n content: fileRaw.content,\n localVersion: fileRaw.localVersion,\n onchainVersion: fileRaw.onchainVersion,\n syncStatus: fileRaw.syncStatus,\n isDeleted: fileRaw.isDeleted,\n onChainFileId: fileRaw.onChainFileId ?? null,\n portalAddress: fileRaw.portalAddress,\n metadata: metadata || {},\n createdAt: fileRaw.createdAt,\n updatedAt: fileRaw.updatedAt,\n linkKey: fileRaw.linkKey,\n linkKeyNonce: fileRaw.linkKeyNonce,\n commentKey: fileRaw.commentKey,\n link: fileRaw.link,\n };\n }\n\n static findAll(\n portalAddress: string,\n limit?: number,\n skip?: number,\n ): { files: File[]; total: number; hasNext: boolean } {\n const whereClause = \"isDeleted = 0 AND portalAddress = ?\";\n const params: any[] = [portalAddress];\n\n const countSql = `\n SELECT COUNT(*) as count \n FROM ${this.TABLE} \n WHERE ${whereClause}\n `;\n const totalResult = QueryBuilder.selectOne<{ count: number }>(countSql, params);\n const total = totalResult?.count || 0;\n const sql = `\n SELECT *\n FROM ${this.TABLE}\n WHERE ${whereClause}\n `;\n const completeSql = QueryBuilder.paginate(sql, {\n limit,\n offset: skip,\n orderBy: \"createdAt\",\n orderDirection: \"DESC\",\n });\n\n const filesRaw = QueryBuilder.select<any>(completeSql, params);\n const files = filesRaw.map(this.parseFile);\n const hasNext = skip !== undefined && limit !== undefined ? skip + limit < total : false;\n return { files, total, hasNext };\n }\n\n static findById(_id: string, portalAddress: string): File | undefined {\n const sql = `\n SELECT *\n FROM ${this.TABLE} \n WHERE _id = ? AND isDeleted = 0 AND portalAddress = ?\n `;\n const result = QueryBuilder.selectOne<any>(sql, [_id, portalAddress]);\n return result ? this.parseFile(result) : undefined;\n }\n\n static findByIdIncludingDeleted(_id: string): File | undefined {\n const sql = `\n SELECT *\n FROM ${this.TABLE} \n WHERE _id = ?\n `;\n const result = QueryBuilder.selectOne<any>(sql, [_id]);\n return result ? this.parseFile(result) : undefined;\n }\n\n static findByIdExcludingDeleted(_id: string): File | undefined {\n const sql = `\n SELECT *\n FROM ${this.TABLE} \n WHERE _id = ? AND isDeleted = 0\n `;\n const result = QueryBuilder.selectOne<any>(sql, [_id]);\n return result ? this.parseFile(result) : undefined;\n }\n\n static findByDDocId(ddocId: string, portalAddress: string): File | undefined {\n const sql = `\n SELECT *\n FROM ${this.TABLE} \n WHERE ddocId = ? AND isDeleted = 0 AND portalAddress = ?\n `;\n const result = QueryBuilder.selectOne<any>(sql, [ddocId, portalAddress]);\n return result ? this.parseFile(result) : undefined;\n }\n\n static searchByTitle(searchTerm: string, portalAddress: string, limit?: number, skip?: number): File[] {\n const sql = `\n SELECT *\n FROM ${this.TABLE} \n WHERE LOWER(title) LIKE LOWER(?) AND isDeleted = 0 AND portalAddress = ?\n `;\n const completeSql = QueryBuilder.paginate(sql, {\n limit,\n offset: skip,\n orderBy: \"createdAt\",\n orderDirection: \"DESC\",\n });\n const filesRaw = QueryBuilder.select<any>(completeSql, [`%${searchTerm}%`, portalAddress]);\n return filesRaw.map(this.parseFile);\n }\n\n static create(input: { title: string; content: string; ddocId: string; portalAddress: string }): File {\n const _id = uuidv7();\n const sql = `\n INSERT INTO ${this.TABLE} \n (_id, title, content, ddocId, portalAddress) \n VALUES (?, ?, ?, ?, ?)\n `;\n\n QueryBuilder.execute(sql, [_id, input.title, input.content, input.ddocId, input.portalAddress]);\n // NOTE: default values while file creation: localVersion = 1, onchainVersion = 0, syncStatus = 'pending'\n\n const created = this.findById(_id, input.portalAddress);\n if (!created) {\n throw new Error(\"Failed to create file\");\n }\n return created;\n }\n\n static update(_id: string, payload: UpdateFilePayload, portalAddress: string): File {\n const now = new Date().toISOString();\n\n const keys: string[] = [];\n const values: any[] = [];\n for (const [k, v] of Object.entries(payload)) {\n if (v !== undefined) {\n // Handle metadata specially - convert to JSON string\n if (k === \"metadata\" && typeof v === \"object\") {\n keys.push(`${k} = ?`);\n values.push(JSON.stringify(v));\n } else {\n keys.push(`${k} = ?`);\n values.push(v);\n }\n }\n }\n\n // Always add updatedAt\n keys.push(\"updatedAt = ?\");\n values.push(now, _id, portalAddress);\n\n const updateChain = keys.join(\", \");\n const sql = `UPDATE ${this.TABLE} SET ${updateChain} WHERE _id = ? AND portalAddress = ?`;\n\n QueryBuilder.execute(sql, values);\n\n const updated = this.findById(_id, portalAddress);\n if (!updated) {\n throw new Error(\"Failed to update file\");\n }\n return updated;\n }\n\n static softDelete(_id: string): File {\n const now = new Date().toISOString();\n const sql = `\n UPDATE ${this.TABLE} \n SET isDeleted = 1, syncStatus = 'pending', updatedAt = ?\n WHERE _id = ?\n `;\n\n QueryBuilder.execute(sql, [now, _id]);\n\n // Use findByIdIncludingDeleted since the file is now marked as deleted\n const deleted = this.findByIdIncludingDeleted(_id);\n if (!deleted) {\n throw new Error(\"Failed to delete file\");\n }\n return deleted;\n }\n}\n","import { QueryBuilder } from \"../index\";\nimport { uuidv7 } from \"uuidv7\";\nimport type { Portal } from \"../../../types\";\n\nexport type { Portal };\n\nexport class PortalsModel {\n private static readonly TABLE = \"portals\";\n\n static findByPortalAddress(portalAddress: string): Portal | undefined {\n const sql = `SELECT _id, portalAddress, portalSeed, ownerAddress, createdAt, updatedAt FROM ${this.TABLE} WHERE portalAddress = ?`;\n return QueryBuilder.selectOne<Portal>(sql, [portalAddress]);\n }\n\n static create(input: { portalAddress: string; portalSeed: string; ownerAddress: string }): Portal {\n const _id = uuidv7();\n const now = new Date().toISOString();\n const sql = `INSERT INTO ${this.TABLE} (_id, portalAddress, portalSeed, ownerAddress, createdAt, updatedAt) VALUES (?, ?, ?, ?, ?, ?)`;\n\n QueryBuilder.execute(sql, [_id, input.portalAddress, input.portalSeed, input.ownerAddress, now, now]);\n\n const created = this.findByPortalAddress(input.portalAddress);\n if (!created) {\n throw new Error(\"Failed to create portal\");\n }\n return created;\n }\n\n static update(\n portalAddress: string,\n input: {\n portalSeed?: string;\n ownerAddress?: string;\n },\n ): Portal {\n const now = new Date().toISOString();\n const keys: string[] = [];\n const values: any[] = [];\n\n for (const [k, v] of Object.entries(input)) {\n if (v !== undefined) {\n keys.push(`${k} = ?`);\n values.push(v);\n }\n }\n\n keys.push(\"updatedAt = ?\");\n values.push(now);\n\n const updateChain = keys.join(\", \");\n const sql = `UPDATE ${this.TABLE} SET ${updateChain} WHERE portalAddress = ?`;\n values.push(portalAddress);\n QueryBuilder.execute(sql, values);\n\n const updated = this.findByPortalAddress(portalAddress);\n if (!updated) {\n throw new Error(\"Failed to update portal\");\n }\n return updated;\n }\n\n static upsert(input: { portalAddress: string; portalSeed: string; ownerAddress: string }): Portal {\n const existing = this.findByPortalAddress(input.portalAddress);\n if (existing) {\n return this.update(input.portalAddress, {\n portalSeed: input.portalSeed,\n ownerAddress: input.ownerAddress,\n });\n }\n return this.create(input);\n }\n}\n","import { QueryBuilder } from \"../index\";\nimport { uuidv7 } from \"uuidv7\";\nimport type { ApiKey } from \"../../../types\";\n\nexport type { ApiKey };\n\nexport class ApiKeysModel {\n private static readonly TABLE = \"api_keys\";\n\n static create(input: {\n apiKeySeed: string;\n name: string;\n collaboratorAddress: string;\n portalAddress: string;\n }): ApiKey {\n const _id = uuidv7();\n const now = new Date().toISOString();\n const sql = `INSERT INTO ${this.TABLE} (_id, apiKeySeed, name, collaboratorAddress, portalAddress, createdAt) \n VALUES (?, ?, ?, ?, ?, ?)`;\n\n const result = QueryBuilder.execute(sql, [\n _id,\n input.apiKeySeed,\n input.name,\n input.collaboratorAddress,\n input.portalAddress,\n now,\n ]);\n\n if (result.changes === 0) {\n throw new Error(\"Failed to create API key\");\n }\n\n const created = this.findById(_id);\n if (!created) {\n throw new Error(\"Failed to create API key\");\n }\n return created;\n }\n\n static findById(_id: string): ApiKey | undefined {\n const sql = `SELECT _id, apiKeySeed, name, collaboratorAddress, portalAddress, createdAt, isDeleted FROM ${this.TABLE} WHERE _id = ? AND isDeleted = 0`;\n return QueryBuilder.selectOne<ApiKey>(sql, [_id]);\n }\n\n static findByCollaboratorAddress(collaboratorAddress: string): ApiKey | undefined {\n const sql = `SELECT _id, apiKeySeed, name, collaboratorAddress, portalAddress, createdAt, isDeleted FROM ${this.TABLE} WHERE collaboratorAddress = ? AND isDeleted = 0 LIMIT 1`;\n return QueryBuilder.selectOne<ApiKey>(sql, [collaboratorAddress]);\n }\n\n static delete(_id: string): void {\n const sql = `UPDATE ${this.TABLE} SET isDeleted = 1 WHERE _id = ?`;\n QueryBuilder.execute(sql, [_id]);\n }\n\n static findByPortalAddress(portalAddress: string): ApiKey | undefined {\n const sql = `SELECT _id, apiKeySeed, name, collaboratorAddress, portalAddress, createdAt, isDeleted FROM ${this.TABLE} WHERE portalAddress = ? AND isDeleted = 0`;\n return QueryBuilder.selectOne<ApiKey>(sql, [portalAddress]);\n }\n\n static findByApiKey(apiKey: string): ApiKey | undefined {\n const sql = `SELECT _id, apiKeySeed, name, collaboratorAddress, portalAddress, createdAt, isDeleted FROM ${this.TABLE} WHERE apiKeySeed = ? AND isDeleted = 0`;\n return QueryBuilder.selectOne<ApiKey>(sql, [apiKey]);\n }\n}\n","import { QueryBuilder } from \"../index\";\nimport type { File, Folder, FolderWithDDocs, FolderListResponse } from \"../../../types\";\n\nexport type { Folder, FolderWithDDocs, FolderListResponse };\n\nexport class FoldersModel {\n private static readonly TABLE = \"folders\";\n\n /**\n * List all folders with pagination\n */\n static findAll(limit?: number, skip?: number): { folders: Folder[]; total: number; hasNext: boolean } {\n // Get total count\n const countSql = `SELECT COUNT(*) as count FROM ${this.TABLE} WHERE isDeleted = 0`;\n const totalResult = QueryBuilder.selectOne<{ count: number }>(countSql);\n const total = totalResult?.count || 0;\n\n // Get paginated results\n const sql = QueryBuilder.paginate(`SELECT * FROM ${this.TABLE} WHERE isDeleted = 0`, {\n limit,\n offset: skip,\n orderBy: \"created_at\",\n orderDirection: \"DESC\",\n });\n\n const folders = QueryBuilder.select<any>(sql).map((folderRaw) => ({\n ...folderRaw,\n isDeleted: Boolean(folderRaw.isDeleted),\n }));\n\n const hasNext = skip !== undefined && limit !== undefined ? skip + limit < total : false;\n\n return { folders, total, hasNext };\n }\n\n /**\n * Get a single folder by folderRef and folderId\n * Includes ddocs array (as per API spec)\n */\n static findByFolderRefAndId(folderRef: string, folderId: string): FolderWithDDocs | undefined {\n const sql = `SELECT * FROM ${this.TABLE} WHERE folderRef = ? AND folderId = ? AND isDeleted = 0`;\n const folderRaw = QueryBuilder.selectOne<any>(sql, [folderRef, folderId]);\n\n if (!folderRaw) {\n return undefined;\n }\n\n const parsedFolder: Folder = {\n ...folderRaw,\n isDeleted: Boolean(folderRaw.isDeleted),\n };\n\n // Get ddocs in this folder\n // Import at runtime to avoid circular dependency\n // Note: FolderRef functionality removed in simplified schema, returning empty array\n const ddocs: File[] = [];\n\n return {\n ...parsedFolder,\n ddocs,\n };\n }\n\n /**\n * Get folder by folderRef only\n */\n static findByFolderRef(folderRef: string): Folder | undefined {\n const sql = `SELECT * FROM ${this.TABLE} WHERE folderRef = ? AND isDeleted = 0 LIMIT 1`;\n const folderRaw = QueryBuilder.selectOne<any>(sql, [folderRef]);\n\n if (!folderRaw) {\n return undefined;\n }\n\n return {\n ...folderRaw,\n isDeleted: Boolean(folderRaw.isDeleted),\n };\n }\n\n /**\n * Search folders by folderName (case-insensitive substring match)\n */\n static searchByName(searchTerm: string, limit?: number, skip?: number): Folder[] {\n const sql = QueryBuilder.paginate(\n `SELECT * FROM ${this.TABLE} \n WHERE isDeleted = 0 AND LOWER(folderName) LIKE LOWER(?)`,\n {\n limit,\n offset: skip,\n orderBy: \"created_at\",\n orderDirection: \"DESC\",\n },\n );\n\n const foldersRaw = QueryBuilder.select<any>(sql, [`%${searchTerm}%`]);\n return foldersRaw.map((folderRaw) => ({\n ...folderRaw,\n isDeleted: Boolean(folderRaw.isDeleted),\n }));\n }\n\n /**\n * Create a new folder\n */\n static create(input: {\n _id?: string;\n onchainFileId: number;\n folderId: string;\n folderRef: string;\n folderName: string;\n portalAddress: string;\n metadataIPFSHash: string;\n contentIPFSHash: string;\n lastTransactionHash?: string;\n lastTransactionBlockNumber: number;\n lastTransactionBlockTimestamp: number;\n }): Folder {\n const _id = input._id || `folder_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;\n const now = new Date().toISOString();\n\n const sql = `INSERT INTO ${this.TABLE} (\n _id, onchainFileId, folderId, folderRef, folderName, portalAddress, metadataIPFSHash,\n contentIPFSHash, isDeleted, lastTransactionHash, lastTransactionBlockNumber, \n lastTransactionBlockTimestamp, created_at, updated_at\n ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`;\n\n QueryBuilder.execute(sql, [\n _id,\n input.onchainFileId,\n input.folderId,\n input.folderRef,\n input.folderName,\n input.portalAddress,\n input.metadataIPFSHash,\n input.contentIPFSHash,\n 0, // isDeleted\n input.lastTransactionHash || null,\n input.lastTransactionBlockNumber,\n input.lastTransactionBlockTimestamp,\n now,\n now,\n ]);\n\n // Fetch the created folder (without ddocs)\n const selectSql = `SELECT * FROM ${this.TABLE} WHERE folderRef = ? AND folderId = ? AND isDeleted = 0`;\n const folderRaw = QueryBuilder.selectOne<any>(selectSql, [input.folderRef, input.folderId]);\n\n if (!folderRaw) {\n throw new Error(\"Failed to create folder\");\n }\n\n return {\n ...folderRaw,\n isDeleted: Boolean(folderRaw.isDeleted),\n };\n }\n}\n","import { QueryBuilder } from \"../index\";\nimport { uuidv7 } from \"uuidv7\";\nimport { notifyNewEvent } from \"../../worker/workerSignal\";\nimport type { Event, EventType, EventStatus } from \"../../../types\";\n\nexport type { Event, EventType, EventStatus };\n\nconst RETRY_DELAYS_MS = [5000, 30000, 120000];\n\ninterface EventRow {\n _id: string;\n type: string;\n timestamp: number;\n fileId: string;\n portalAddress: string;\n status: string;\n retryCount: number;\n lastError: string | null;\n lockedAt: number | null;\n nextRetryAt: number | null;\n userOpHash?: string | null;\n pendingPayload?: string | null;\n}\n\nexport class EventsModel {\n private static readonly TABLE = \"events\";\n\n static create(input: { type: EventType; fileId: string; portalAddress: string }): Event {\n const _id = uuidv7();\n const timestamp = Date.now();\n const status: EventStatus = \"pending\";\n\n const sql = `\n INSERT INTO ${this.TABLE} \n (_id, type, timestamp, fileId, portalAddress, status, retryCount, lastError, lockedAt, nextRetryAt) \n VALUES (?, ?, ?, ?, ?, ?, 0, NULL, NULL, NULL)\n `;\n\n QueryBuilder.execute(sql, [_id, input.type, timestamp, input.fileId, input.portalAddress, status]);\n\n notifyNewEvent();\n\n return {\n _id,\n type: input.type,\n timestamp,\n fileId: input.fileId,\n portalAddress: input.portalAddress,\n status,\n retryCount: 0,\n lastError: null,\n lockedAt: null,\n nextRetryAt: null,\n };\n }\n\n static findById(_id: string): Event | undefined {\n const sql = `SELECT * FROM ${this.TABLE} WHERE _id = ?`;\n const row = QueryBuilder.selectOne<EventRow>(sql, [_id]);\n return row ? this.parseEvent(row) : undefined;\n }\n\n static findNextPending(): Event | undefined {\n const sql = `\n SELECT * FROM ${this.TABLE}\n WHERE status = 'pending'\n ORDER BY timestamp ASC\n LIMIT 1\n `;\n const row = QueryBuilder.selectOne<EventRow>(sql, []);\n return row ? this.parseEvent(row) : undefined;\n }\n\n static findNextEligible(lockedFileIds: string[]): Event | undefined {\n const now = Date.now();\n\n const exclusionClause =\n lockedFileIds.length > 0 ? `AND e1.fileId NOT IN (${lockedFileIds.map(() => \"?\").join(\", \")})` : \"\";\n\n const sql = `\n SELECT e1.* FROM ${this.TABLE} e1\n WHERE e1.status = 'pending'\n AND (e1.nextRetryAt IS NULL OR e1.nextRetryAt <= ?)\n ${exclusionClause}\n AND NOT EXISTS (\n SELECT 1 FROM ${this.TABLE} e2\n WHERE e2.fileId = e1.fileId\n AND e2.status = 'pending'\n AND e2.timestamp < e1.timestamp\n )\n ORDER BY e1.timestamp ASC\n LIMIT 1\n `;\n\n const params = [now, ...lockedFileIds];\n const row = QueryBuilder.selectOne<EventRow>(sql, params);\n return row ? this.parseEvent(row) : undefined;\n }\n\n static markProcessing(_id: string): void {\n const sql = `\n UPDATE ${this.TABLE}\n SET status = 'processing',\n lockedAt = ?\n WHERE _id = ?\n `;\n QueryBuilder.execute(sql, [Date.now(), _id]);\n }\n\n static markProcessed(_id: string): void {\n const sql = `\n UPDATE ${this.TABLE}\n SET status = 'processed',\n lockedAt = NULL\n WHERE _id = ?\n `;\n QueryBuilder.execute(sql, [_id]);\n }\n\n static scheduleRetry(_id: string, errorMsg: string): void {\n const event = this.findById(_id);\n if (!event) return;\n\n const delay = RETRY_DELAYS_MS[Math.min(event.retryCount, RETRY_DELAYS_MS.length - 1)];\n const nextRetryAt = Date.now() + delay;\n\n const sql = `\n UPDATE ${this.TABLE}\n SET status = 'pending',\n retryCount = retryCount + 1,\n lastError = ?,\n nextRetryAt = ?,\n lockedAt = NULL\n WHERE _id = ?\n `;\n QueryBuilder.execute(sql, [errorMsg, nextRetryAt, _id]);\n }\n\n static scheduleRetryAfter(_id: string, errorMsg: string, retryAfterMs: number): void {\n const nextRetryAt = Date.now() + retryAfterMs;\n const sql = `\n UPDATE ${this.TABLE}\n SET status = 'pending',\n lastError = ?,\n nextRetryAt = ?,\n lockedAt = NULL\n WHERE _id = ?\n `;\n QueryBuilder.execute(sql, [errorMsg, nextRetryAt, _id]);\n }\n\n static markFailed(_id: string, errorMsg: string): void {\n const sql = `\n UPDATE ${this.TABLE}\n SET status = 'failed',\n lastError = ?,\n lockedAt = NULL\n WHERE _id = ?\n `;\n QueryBuilder.execute(sql, [errorMsg, _id]);\n }\n\n static listFailed(portalAddress?: string): Event[] {\n const portalClause = portalAddress != null ? \"AND portalAddress = ?\" : \"\";\n const sql = `\n SELECT * FROM ${this.TABLE}\n WHERE status = 'failed'\n ${portalClause}\n ORDER BY timestamp ASC\n `;\n const params = portalAddress != null ? [portalAddress] : [];\n const rows = QueryBuilder.select<EventRow>(sql, params);\n return rows.map((row) => this.parseEvent(row));\n }\n\n static resetFailedToPending(_id: string, portalAddress?: string): boolean {\n const portalClause = portalAddress != null ? \"AND portalAddress = ?\" : \"\";\n const sql = `\n UPDATE ${this.TABLE}\n SET status = 'pending',\n retryCount = 0,\n lastError = NULL,\n nextRetryAt = NULL,\n lockedAt = NULL\n WHERE _id = ?\n AND status = 'failed'\n ${portalClause}\n `;\n const params = portalAddress != null ? [_id, portalAddress] : [_id];\n const result = QueryBuilder.execute(sql, params);\n if (result.changes > 0) {\n notifyNewEvent();\n }\n return result.changes > 0;\n }\n\n static resetAllFailedToPending(portalAddress?: string): number {\n const portalClause = portalAddress != null ? \"AND portalAddress = ?\" : \"\";\n const sql = `\n UPDATE ${this.TABLE}\n SET status = 'pending',\n retryCount = 0,\n lastError = NULL,\n nextRetryAt = NULL,\n lockedAt = NULL\n WHERE status = 'failed'\n ${portalClause}\n `;\n const params = portalAddress != null ? [portalAddress] : [];\n const result = QueryBuilder.execute(sql, params);\n if (result.changes > 0) {\n notifyNewEvent();\n }\n return result.changes;\n }\n\n static resetStaleEvents(staleThreshold: number): number {\n const sql = `\n UPDATE ${this.TABLE}\n SET status = 'pending',\n lockedAt = NULL,\n userOpHash = NULL,\n pendingPayload = NULL\n WHERE status = 'processing'\n AND lockedAt IS NOT NULL\n AND lockedAt < ?\n `;\n const result = QueryBuilder.execute(sql, [staleThreshold]);\n return result.changes;\n }\n\n static setEventPendingOp(_id: string, userOpHash: string, payload: Record<string, unknown>): void {\n const sql = `UPDATE ${this.TABLE} SET userOpHash = ?, pendingPayload = ? WHERE _id = ?`;\n QueryBuilder.execute(sql, [userOpHash, JSON.stringify(payload), _id]);\n }\n\n static clearEventPendingOp(_id: string): void {\n const sql = `UPDATE ${this.TABLE} SET userOpHash = NULL, pendingPayload = NULL WHERE _id = ?`;\n QueryBuilder.execute(sql, [_id]);\n }\n\n private static parseEvent(row: EventRow): Event {\n return {\n _id: row._id,\n type: row.type as EventType,\n timestamp: row.timestamp,\n fileId: row.fileId,\n portalAddress: row.portalAddress ?? \"\",\n status: row.status as EventStatus,\n retryCount: row.retryCount,\n lastError: row.lastError,\n lockedAt: row.lockedAt,\n nextRetryAt: row.nextRetryAt,\n userOpHash: row.userOpHash ?? null,\n pendingPayload: row.pendingPayload ?? null,\n };\n }\n}\n","import { type File, FilesModel, type FileListResponse } from \"./files.model\";\nimport { PortalsModel, type Portal } from \"./portals.model\";\nimport { ApiKeysModel, type ApiKey } from \"./apikeys.model\";\nimport { type Folder, type FolderWithDDocs, type FolderListResponse, FoldersModel } from \"./folders.model\";\nimport { EventsModel, type Event, type EventType, type EventStatus } from \"./events.model\";\n\nexport { FilesModel, PortalsModel, ApiKeysModel, FoldersModel, EventsModel };\nexport type {\n File,\n FileListResponse,\n Portal,\n ApiKey,\n Folder,\n FolderWithDDocs,\n FolderListResponse,\n Event,\n EventType,\n EventStatus,\n};\n","import { PortalsModel } from \"../../infra/database/models\";\nimport type { Portal, SavePortalInput } from \"../../types\";\n\nexport function savePortal(input: SavePortalInput): Portal {\n if (!input.portalAddress || !input.portalSeed || !input.ownerAddress) {\n throw new Error(\"portalAddress, portalSeed, and ownerAddress are required\");\n }\n\n return PortalsModel.upsert(input);\n}\n","import getDb from \"../index\";\nimport { logger } from \"../../\";\n\nconst STABLE_SCHEMA = `\nCREATE TABLE IF NOT EXISTS files (\n _id TEXT PRIMARY KEY,\n ddocId TEXT NOT NULL,\n title TEXT NOT NULL,\n content TEXT NOT NULL,\n localVersion INTEGER NOT NULL DEFAULT 1,\n onchainVersion INTEGER NOT NULL DEFAULT 0,\n syncStatus TEXT NOT NULL DEFAULT 'pending',\n createdAt DATETIME DEFAULT CURRENT_TIMESTAMP,\n updatedAt DATETIME DEFAULT CURRENT_TIMESTAMP,\n isDeleted INTEGER NOT NULL DEFAULT 0,\n portalAddress TEXT NOT NULL,\n metadata TEXT DEFAULT '{}',\n onChainFileId INTEGER,\n commentKey TEXT,\n linkKey TEXT,\n linkKeyNonce TEXT,\n link TEXT\n);\nCREATE INDEX IF NOT EXISTS idx_files_createdAt ON files(createdAt);\nCREATE INDEX IF NOT EXISTS idx_files_syncStatus ON files(syncStatus);\nCREATE INDEX IF NOT EXISTS idx_files_title ON files(title);\nCREATE INDEX IF NOT EXISTS idx_files_portalAddress ON files(portalAddress);\n\nCREATE TABLE IF NOT EXISTS portals (\n _id TEXT PRIMARY KEY,\n portalAddress TEXT NOT NULL UNIQUE,\n portalSeed TEXT NOT NULL UNIQUE,\n ownerAddress TEXT NOT NULL,\n createdAt DATETIME DEFAULT CURRENT_TIMESTAMP,\n updatedAt DATETIME DEFAULT CURRENT_TIMESTAMP\n);\n\nCREATE TABLE IF NOT EXISTS api_keys (\n _id TEXT PRIMARY KEY,\n apiKeySeed TEXT NOT NULL UNIQUE,\n name TEXT NOT NULL,\n collaboratorAddress TEXT NOT NULL UNIQUE,\n portalAddress TEXT NOT NULL,\n createdAt DATETIME DEFAULT CURRENT_TIMESTAMP,\n isDeleted INTEGER NOT NULL DEFAULT 0\n);\n\nCREATE TABLE IF NOT EXISTS events (\n _id TEXT PRIMARY KEY,\n type TEXT NOT NULL CHECK (type IN ('create', 'update', 'delete')),\n timestamp INTEGER NOT NULL,\n fileId TEXT NOT NULL,\n status TEXT NOT NULL DEFAULT 'pending' CHECK (status IN ('pending', 'processing', 'processed', 'failed')),\n retryCount INTEGER NOT NULL DEFAULT 0,\n lastError TEXT,\n lockedAt INTEGER,\n nextRetryAt INTEGER,\n userOpHash TEXT,\n pendingPayload TEXT,\n portalAddress TEXT\n);\nCREATE INDEX IF NOT EXISTS idx_events_pending_eligible ON events (status, nextRetryAt, timestamp) WHERE status = 'pending';\nCREATE INDEX IF NOT EXISTS idx_events_file_pending_ts ON events (fileId, status, timestamp) WHERE status = 'pending';\nCREATE INDEX IF NOT EXISTS idx_events_processing_locked ON events (status, lockedAt) WHERE status = 'processing';\nCREATE INDEX IF NOT EXISTS idx_events_failed_portal ON events (portalAddress, status) WHERE status = 'failed';\n\nCREATE TABLE IF NOT EXISTS folders (\n _id TEXT PRIMARY KEY,\n onchainFileId INTEGER NOT NULL,\n folderId TEXT NOT NULL,\n folderRef TEXT NOT NULL,\n folderName TEXT NOT NULL,\n portalAddress TEXT NOT NULL,\n metadataIPFSHash TEXT NOT NULL,\n contentIPFSHash TEXT NOT NULL,\n isDeleted INTEGER NOT NULL DEFAULT 0,\n lastTransactionHash TEXT,\n lastTransactionBlockNumber INTEGER NOT NULL,\n lastTransactionBlockTimestamp INTEGER NOT NULL,\n created_at DATETIME DEFAULT CURRENT_TIMESTAMP,\n updated_at DATETIME DEFAULT CURRENT_TIMESTAMP\n);\nCREATE INDEX IF NOT EXISTS idx_folders_folderRef_folderId ON folders(folderRef, folderId);\nCREATE INDEX IF NOT EXISTS idx_folders_folderRef ON folders(folderRef);\nCREATE INDEX IF NOT EXISTS idx_folders_created_at ON folders(created_at);\n`;\n\nexport function runMigrations(): void {\n const db = getDb();\n db.exec(STABLE_SCHEMA);\n logger.debug(\"Database schema ready\");\n}\n","#!/usr/bin/env node\nimport { Command } from \"commander\";\nimport { fetchApiKeyData } from \"./fetch-api-key.js\";\nimport type { AppKeyMaterial, KeyMaterial } from \"../types\";\nimport { scaffoldConfig } from \"./scaffold-config.js\";\nimport { startAll, setupShutdownHandlers, waitForProcesses } from \"./process-manager.js\";\nimport { promptForConfig, needsPrompting } from \"./prompts.js\";\nimport { loadConfig } from \"../config/index.js\";\nimport { decryptSavedData, initializeWithData } from \"../init/index.js\";\n\nconst program = new Command()\n .name(\"fileverse-api\")\n .description(\"Run the Fileverse API server\")\n .version(\"0.0.3\")\n .option(\"--apiKey <key>\", \"API key for authentication\")\n .option(\"--rpcUrl <url>\", \"RPC URL for blockchain connection\")\n .option(\"--port <port>\", \"Port to run the server on\", \"8001\")\n .option(\"--db <path>\", \"Database path\")\n .action(async (options) => {\n try {\n console.log(\"Fileverse API - Starting initialization...\\n\");\n\n if (needsPrompting(options)) {\n const prompted = await promptForConfig({\n apiKey: options.apiKey,\n rpcUrl: options.rpcUrl,\n });\n options.apiKey = prompted.apiKey;\n options.rpcUrl = prompted.rpcUrl;\n }\n\n const data = await fetchApiKeyData(options.apiKey);\n console.log(\"✓ API key data retrieved\\n\");\n\n const keyMaterial = await decryptSavedData<KeyMaterial>(options.apiKey, data.encryptedKeyMaterial);\n const appMaterial = await decryptSavedData<AppKeyMaterial>(options.apiKey, data.encryptedAppMaterial);\n console.log(\"Setting up configuration...\");\n const envPath = scaffoldConfig({\n dbPath: options.db,\n port: options.port,\n apiKey: options.apiKey,\n rpcUrl: options.rpcUrl,\n });\n loadConfig();\n console.log(`✓ Configuration saved to ${envPath}\\n`);\n\n const { runMigrations } = await import(\"../infra/database/migrations/index.js\");\n runMigrations();\n console.log(\"✓ Database migrations complete\");\n\n const result = initializeWithData({\n keyMaterial,\n appMaterial,\n id: data.id,\n });\n console.log(\"✓ Portal saved\");\n if (result.apiKeySaved) {\n console.log(\"✓ API key saved\");\n } else {\n console.log(\"✓ API key already exists\");\n }\n\n console.log(\"\\nStarting services...\");\n setupShutdownHandlers();\n startAll();\n\n console.log(`\n✓ Fileverse API is running!\n\n API Server: http://127.0.0.1:${options.port}\n Worker: Active\n\n MCP: Add this to your AI agent's MCP config (e.g. .claude/mcp.json):\n\n {\n \"mcpServers\": {\n \"fileverse-api\": {\n \"command\": \"fileverse-api-mcp\"\n }\n }\n }\n\n Config is auto-read from ~/.fileverse/.env — no env vars needed.\n\nPress Ctrl+C to stop.\n`);\n\n await waitForProcesses();\n } catch (error) {\n console.error(\"\\n❌ Error:\", error instanceof Error ? error.message : error);\n process.exit(1);\n }\n });\n\nprogram.parse();\n","import axios from \"axios\";\nimport { toUint8Array } from \"js-base64\";\nimport { sha256 } from \"viem\";\nimport { BASE_CONFIG } from \"./constants\";\nimport type { ApiKeyResponse } from \"../types\";\n\nexport const fetchApiKeyData = async (apiKey: string): Promise<ApiKeyResponse> => {\n try {\n const keyHash = sha256(toUint8Array(apiKey));\n const fullUrl = BASE_CONFIG.API_URL + \"api-access\" + `/${keyHash}`;\n const response = await axios.get<ApiKeyResponse>(fullUrl);\n\n const { encryptedKeyMaterial, encryptedAppMaterial, id } = response.data;\n\n return { encryptedKeyMaterial, encryptedAppMaterial, id };\n } catch (error) {\n if (axios.isAxiosError(error)) {\n if (error.response?.status === 401) {\n throw new Error(\"Invalid API key\");\n }\n if (error.response?.status === 404) {\n throw new Error(\"API key not found\");\n }\n if (error.code === \"ECONNREFUSED\") {\n throw new Error(`Cannot connect to server at ${BASE_CONFIG.API_URL}`);\n }\n throw new Error(`Server error: ${error.response?.data?.message || error.message}`);\n }\n throw error;\n }\n};\n","import fs from \"fs\";\nimport path from \"path\";\nimport os from \"os\";\nimport { STATIC_CONFIG } from \"./constants\";\nimport type { ConfigOptions } from \"../types\";\n\nexport function getFileverseDir(): string {\n return path.join(os.homedir(), \".fileverse\");\n}\n\nfunction getDefaultDbPath(): string {\n return path.join(getFileverseDir(), \"fileverse-api.db\");\n}\n\nexport function getEnvPath(): string {\n return path.join(getFileverseDir(), \".env\");\n}\n\nexport function scaffoldConfig(options: ConfigOptions = {}): string {\n const fileverseDir = getFileverseDir();\n const envPath = getEnvPath();\n\n if (!fs.existsSync(fileverseDir)) {\n fs.mkdirSync(fileverseDir, { recursive: true });\n }\n\n const dbPath = options.dbPath || getDefaultDbPath();\n const dbDir = path.dirname(dbPath);\n if (!fs.existsSync(dbDir)) {\n fs.mkdirSync(dbDir, { recursive: true });\n }\n\n const envContent = `API_KEY=${options.apiKey}\nRPC_URL=${options.rpcUrl || STATIC_CONFIG.DEFAULT_RPC_URL}\nDB_PATH=${dbPath}\nPORT=${options.port || STATIC_CONFIG.DEFAULT_PORT}\n`;\n\n fs.writeFileSync(envPath, envContent, \"utf-8\");\n\n return envPath;\n}\n","import { spawn, ChildProcess } from \"child_process\";\nimport path from \"path\";\nimport { fileURLToPath } from \"url\";\nimport { existsSync } from \"fs\";\n\ninterface ManagedProcess {\n name: string;\n process: ChildProcess;\n}\n\nconst managedProcesses: ManagedProcess[] = [];\n\nfunction getDistDir(): string {\n const __dirname = path.dirname(fileURLToPath(import.meta.url));\n return path.resolve(__dirname, \"..\");\n}\n\nfunction isDevMode(): boolean {\n const distDir = getDistDir();\n return existsSync(path.join(distDir, \"index.ts\"));\n}\n\nfunction prefixOutput(name: string, data: Buffer): void {\n const lines = data.toString().split(\"\\n\").filter(Boolean);\n for (const line of lines) {\n console.log(`[${name}] ${line}`);\n }\n}\n\nfunction spawnProcess(name: string, executable: string, scriptPath: string, extraEnv?: Record<string, string>): ChildProcess {\n const child = spawn(executable, [scriptPath], {\n stdio: [\"ignore\", \"pipe\", \"pipe\"],\n env: { ...process.env, NODE_ENV: executable === \"tsx\" ? \"development\" : \"production\", ...extraEnv },\n detached: false,\n });\n\n child.stdout?.on(\"data\", (data: Buffer) => prefixOutput(name, data));\n child.stderr?.on(\"data\", (data: Buffer) => prefixOutput(name, data));\n\n child.on(\"error\", (error) => {\n console.error(`[${name}] Process error:`, error.message);\n });\n\n child.on(\"exit\", (code, signal) => {\n if (signal) {\n console.log(`[${name}] Process terminated by signal ${signal}`);\n } else if (code !== 0) {\n console.error(`[${name}] Process exited with code ${code}`);\n }\n });\n\n managedProcesses.push({ name, process: child });\n return child;\n}\n\nexport function startApiServer(): ChildProcess {\n const distDir = getDistDir();\n const dev = isDevMode();\n const executable = dev ? \"tsx\" : \"node\";\n const apiPath = path.join(distDir, dev ? \"index.ts\" : \"index.js\");\n return spawnProcess(\"API\", executable, apiPath, { IS_CLI: \"1\" });\n}\n\nexport function startWorker(): ChildProcess {\n const distDir = getDistDir();\n const dev = isDevMode();\n const executable = dev ? \"tsx\" : \"node\";\n const workerPath = path.join(distDir, dev ? \"worker.ts\" : \"worker.js\");\n return spawnProcess(\"WORKER\", executable, workerPath);\n}\n\nexport function startAll(): { api: ChildProcess; worker: ChildProcess } {\n const api = startApiServer();\n const worker = startWorker();\n return { api, worker };\n}\n\nexport function setupShutdownHandlers(): void {\n const shutdown = (signal: string) => {\n console.log(`\\nReceived ${signal}, shutting down...`);\n\n for (const { name, process: child } of managedProcesses) {\n if (child.pid && !child.killed) {\n console.log(`[${name}] Stopping...`);\n child.kill(\"SIGTERM\");\n }\n }\n\n setTimeout(() => {\n for (const { name, process: child } of managedProcesses) {\n if (child.pid && !child.killed) {\n console.log(`[${name}] Force killing...`);\n child.kill(\"SIGKILL\");\n }\n }\n process.exit(0);\n }, 5000);\n };\n\n process.on(\"SIGTERM\", () => shutdown(\"SIGTERM\"));\n process.on(\"SIGINT\", () => shutdown(\"SIGINT\"));\n}\n\nexport function waitForProcesses(): Promise<void> {\n return new Promise((resolve) => {\n const checkInterval = setInterval(() => {\n const allExited = managedProcesses.every(({ process: child }) => child.exitCode !== null || child.killed);\n if (allExited) {\n clearInterval(checkInterval);\n resolve();\n }\n }, 1000);\n });\n}\n","import prompts from \"prompts\";\nimport { STATIC_CONFIG } from \"./constants\";\nimport { getRuntimeConfig } from \"../config/index.js\";\nimport type { PromptedConfig } from \"../types\";\n\nexport const promptForConfig = async (\n existingOptions: {\n apiKey?: string;\n rpcUrl?: string;\n } = {},\n): Promise<PromptedConfig> => {\n const savedConfig = getRuntimeConfig();\n const questions: prompts.PromptObject[] = [];\n\n if (!existingOptions.apiKey) {\n questions.push({\n type: \"text\",\n name: \"apiKey\",\n message: \"Enter your API Key:\",\n validate: (value: string) => value.length > 0 || \"API Key is required\",\n initial: savedConfig.API_KEY || \"\",\n });\n }\n\n if (!existingOptions.rpcUrl) {\n questions.push({\n type: \"text\",\n name: \"rpcUrl\",\n message: \"Enter RPC URL (press Enter for default):\",\n initial: savedConfig.RPC_URL || STATIC_CONFIG.DEFAULT_RPC_URL,\n });\n }\n\n if (questions.length === 0) {\n return {\n apiKey: existingOptions.apiKey!,\n rpcUrl: existingOptions.rpcUrl || STATIC_CONFIG.DEFAULT_RPC_URL,\n };\n }\n\n const response = await prompts(questions, {\n onCancel: () => {\n console.log(\"\\nSetup cancelled.\");\n process.exit(1);\n },\n });\n\n return {\n apiKey: existingOptions.apiKey || response.apiKey,\n rpcUrl: existingOptions.rpcUrl || response.rpcUrl || STATIC_CONFIG.DEFAULT_RPC_URL,\n };\n};\n\nexport function needsPrompting(options: { apiKey?: string }): boolean {\n return !options.apiKey;\n}\n","import { fetchApiKeyData } from \"../cli/fetch-api-key.js\";\nimport { savePortal } from \"../domain/portal/savePortal.js\";\nimport { addApiKey } from \"../domain/portal/saveApiKey.js\";\nimport { ApiKeysModel } from \"../infra/database/models/apikeys.model.js\";\nimport { logger } from \"../infra/index.js\";\nimport type { ApiKeyMaterialResponse, InitResult, KeyMaterial, AppKeyMaterial } from \"../types\";\nimport { deriveHKDFKey } from \"@fileverse/crypto/hkdf\";\nimport { toUint8Array } from \"js-base64\";\nimport { stringToBytes } from \"viem\";\nimport { toAESKey, aesDecrypt } from \"@fileverse/crypto/webcrypto\";\n\nexport type { InitResult };\n\nconst SAVED_DATA_ENCRYPTION_KEY_INFO = \"SAVED_DATA_ENCRYPTION_KEY\";\n\nexport function initializeWithData(data: ApiKeyMaterialResponse): InitResult {\n const { keyMaterial, appMaterial } = data;\n\n savePortal({\n portalAddress: appMaterial.portalAddress,\n portalSeed: appMaterial.portalSeed,\n ownerAddress: appMaterial.ownerAddress,\n });\n\n const existingApiKey = ApiKeysModel.findByApiKey(keyMaterial.apiKeySeed);\n\n if (!existingApiKey) {\n addApiKey({\n apiKeySeed: keyMaterial.apiKeySeed,\n name: keyMaterial.name,\n collaboratorAddress: keyMaterial.collaboratorAddress,\n portalAddress: appMaterial.portalAddress,\n });\n return { portalSaved: true, apiKeySaved: true };\n }\n\n return { portalSaved: true, apiKeySaved: false };\n}\n\nexport const getAesKeyFromApiKey = async (apiKey: string) => {\n const rawSecret = deriveHKDFKey(\n toUint8Array(apiKey),\n new Uint8Array([0]),\n stringToBytes(SAVED_DATA_ENCRYPTION_KEY_INFO),\n );\n return await toAESKey(rawSecret);\n};\n\nconst bytestToJSON = (bytes: Uint8Array) => {\n return JSON.parse(new TextDecoder().decode(bytes));\n};\n\nexport const decryptSavedData = async <T>(apiKey: string, encryptedData: string): Promise<T> => {\n const aesKey = await getAesKeyFromApiKey(apiKey);\n const decryptedBytes = await aesDecrypt(aesKey, toUint8Array(encryptedData));\n\n const data = bytestToJSON(decryptedBytes) as T;\n return data;\n};\n\nexport const initializeFromApiKey = async (apiKey: string): Promise<void> => {\n logger.debug(\"Fetching API key data from server...\");\n const data = await fetchApiKeyData(apiKey);\n logger.debug(\"API key data retrieved\");\n\n const keyMaterial = await decryptSavedData<KeyMaterial>(apiKey, data.encryptedKeyMaterial);\n const appMaterial = await decryptSavedData<AppKeyMaterial>(apiKey, data.encryptedAppMaterial);\n const result = initializeWithData({ keyMaterial, appMaterial, id: data.id });\n\n logger.debug(\"Portal saved\");\n if (result.apiKeySaved) {\n logger.debug(\"API key saved\");\n } else {\n logger.debug(\"API key already exists\");\n }\n};\n"],"mappings":";;;;;;;;;;;;AACA,OAAO,UAAU;AACjB,SAAS,qBAAqB;AAF9B;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAa,eAaA;AAbb;AAAA;AAAA;AAAA;AAAO,IAAM,gBAAgB;AAAA,MAC3B,SAAS;AAAA,MACT,YAAY;AAAA,MACZ,kBAAkB;AAAA,MAClB,cAAc;AAAA,MACd,cAAc;AAAA,MACd,iBAAiB;AAAA,MACjB,mBAAmB;AAAA,MACnB,cAAc;AAAA,MACd,WAAW;AAAA,MACX,cAAc;AAAA,IAChB;AAEO,IAAM,cAAc;AAAA;AAAA;;;ACb3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,OAAO,YAAY;AACnB,OAAOA,WAAU;AACjB,OAAOC,SAAQ;AACf,OAAOC,SAAQ;AAMf,SAASC,cAAqB;AAC5B,MAAIF,IAAG,WAAW,cAAc,GAAG;AACjC,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAEO,SAAS,WAAW,WAAW,MAAY;AAChD,QAAM,UAAUE,YAAW;AAC3B,SAAO,OAAO,EAAE,MAAM,SAAS,SAAS,CAAC;AAC3C;AAIO,SAAS,mBAAmB;AACjC,SAAO;AAAA,IACL,IAAI,UAAU;AACZ,aAAO,QAAQ,IAAI;AAAA,IACrB;AAAA,IACA,IAAI,UAAU;AACZ,aAAO,QAAQ,IAAI,WAAW,cAAc;AAAA,IAC9C;AAAA,IACA,IAAI,UAAU;AACZ,aAAO,QAAQ,IAAI;AAAA,IACrB;AAAA,IACA,IAAI,OAAO;AACT,aAAO,QAAQ,IAAI,QAAQ,cAAc;AAAA,IAC3C;AAAA,IACA,IAAI,WAAW;AACb,aAAO,QAAQ,IAAI,YAAY;AAAA,IACjC;AAAA,IACA,IAAI,eAAe;AACjB,aAAO,QAAQ,IAAI,gBAAgB,cAAc;AAAA,IACnD;AAAA,EACF;AACF;AA5CA,IAMM,gBACA,aAqDA;AA5DN;AAAA;AAAA;AAAA;AAIA;AAEA,IAAM,iBAAiBH,MAAK,KAAK,QAAQ,IAAI,GAAG,UAAU,MAAM;AAChE,IAAM,cAAcA,MAAK,KAAKE,IAAG,QAAQ,GAAG,cAAc,MAAM;AAchE,eAAW,KAAK;AAuChB,IAAM,SAA6C;AAAA,MACjD,GAAG;AAAA,MACH,IAAI,eAAe;AACjB,eAAO,cAAc;AAAA,MACvB;AAAA,MACA,IAAI,YAAY;AACd,eAAO,cAAc;AAAA,MACvB;AAAA,MACA,IAAI,eAAe;AACjB,eAAO,cAAc;AAAA,MACvB;AAAA,MACA,IAAI,oBAAoB;AACtB,eAAO,cAAc;AAAA,MACvB;AAAA,MACA,IAAI,oBAAoB;AACtB,eAAO,cAAc;AAAA,MACvB;AAAA,MACA,IAAI,UAAU;AACZ,eAAO,QAAQ,IAAI;AAAA,MACrB;AAAA,MACA,IAAI,UAAU;AACZ,eAAO,QAAQ,IAAI,WAAW,cAAc;AAAA,MAC9C;AAAA,MACA,IAAI,UAAU;AACZ,eAAO,QAAQ,IAAI;AAAA,MACrB;AAAA,MACA,IAAI,OAAO;AACT,eAAO,QAAQ,IAAI,QAAQ,cAAc;AAAA,MAC3C;AAAA,MACA,IAAI,WAAW;AACb,eAAO,QAAQ,IAAI,YAAY;AAAA,MACjC;AAAA,MACA,IAAI,KAAK;AACP,eAAO,QAAQ,IAAI,MAAM;AAAA,MAC3B;AAAA,MACA,IAAI,eAAe;AACjB,eAAO,QAAQ,IAAI,gBAAgB,cAAc;AAAA,MACnD;AAAA,IACF;AAAA;AAAA;;;AClGA,OAAO,UAA2C;AAAlD,IAIM,cAEA,cAwCA,iBAsCO;AApFb;AAAA;AAAA;AAAA;AACA;AACA;AAEA,IAAM,eAAe,OAAO,aAAa;AAEzC,IAAM,eAAe,KAAK;AAAA,MACxB,MAAM,cAAc;AAAA,MACpB,OAAO,cAAc;AAAA,MACrB,YAAY;AAAA,QACV,UAAU,CAAC,cAAc,EAAE,MAAM,SAAS,KAAK;AAAA,QAC/C,OAAO,CAAC,WAAW,EAAE,OAAO,MAAM;AAAA,MACpC;AAAA,MACA,aAAa;AAAA,QACX,IAAI,KAAwB;AAC1B,cAAI,CAAC,IAAK,QAAO;AACjB,cAAI,cAAc;AAChB,mBAAO,EAAE,MAAM,IAAI,MAAM,SAAS,IAAI,QAAQ;AAAA,UAChD;AACA,iBAAO;AAAA,YACL,MAAM,IAAI;AAAA,YACV,SAAS,IAAI;AAAA,YACb,OAAO,IAAI;AAAA,UACb;AAAA,QACF;AAAA,MACF;AAAA,MACA,WACE,OAAO,aAAa,eAChB;AAAA,QACE,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,UAAU;AAAA,UACV,eAAe;AAAA,UACf,QAAQ;AAAA,UACR,YAAY;AAAA,UACZ,qBAAqB,CAAC,OAAO,OAAO;AAAA,QACtC;AAAA,MACF,IACA;AAAA,IACR,CAAC;AAOD,IAAM,kBAAkB,CAAC,UAAwB;AAC/C,aAAO,IAAI,SAAoB;AAC7B,cAAM,CAAC,OAAO,GAAG,IAAI,IAAI;AACzB,cAAM,MAAM,aAAa,KAAK,EAAE,KAAK,YAAY;AAEjD,YAAI,OAAO,UAAU,YAAY,UAAU,QAAQ,EAAE,iBAAiB,QAAQ;AAC5E,cAAI,OAAO,GAAG,IAAI;AAClB;AAAA,QACF;AAEA,YAAI,KAAK,SAAS,GAAG;AACnB,gBAAM,OAAO,KAAK,KAAK,SAAS,CAAC;AACjC,cAAI,gBAAgB,OAAO;AACzB,gBAAI,EAAE,KAAK,KAAK,GAAG,OAAO,GAAG,KAAK,MAAM,GAAG,EAAE,CAAC;AAC9C;AAAA,UACF;AAAA,QACF;AAEA,YAAI,iBAAiB,OAAO;AAC1B,cAAI,EAAE,KAAK,MAAM,GAAG,MAAM,OAAO;AACjC;AAAA,QACF;AAEA,YAAI,OAAO,GAAG,IAAI;AAAA,MACpB;AAAA,IACF;AAaO,IAAM,SAAiB;AAAA,MAC5B,OAAO,gBAAgB,OAAO;AAAA,MAC9B,OAAO,gBAAgB,OAAO;AAAA,MAC9B,MAAM,gBAAgB,MAAM;AAAA,MAC5B,MAAM,gBAAgB,MAAM;AAAA,MAC5B,OAAO,gBAAgB,OAAO;AAAA,MAC9B,OAAO,gBAAgB,OAAO;AAAA,MAC9B,IAAI,QAAQ;AACV,eAAO,aAAa;AAAA,MACtB;AAAA,MACA,IAAI,MAAM,KAAY;AACpB,qBAAa,QAAQ;AAAA,MACvB;AAAA,MACA,OAAO,aAAa,MAAM,KAAK,YAAY;AAAA,IAC7C;AAAA;AAAA;;;AClGA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCA,SAAS,cAAc,cAAc,yBAAyB;AAD9D;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,YAAY,WAAW;AAAvB;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,SAAS,SAAS,cAAc;AAAhC;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAIa,cACA,mBAKP,WAKO;AAfb,IAAAE,kBAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AAcA;AACA;AAbO,IAAM,eAAe,cAAc;AACnC,IAAM,oBAAoB,cAAc;AAK/C,IAAM,YAAY;AAAA,MAChB;AAAA,MACA;AAAA,IACF;AAEO,IAAM,QAAQ,UAAU,YAAsC;AAAA;AAAA;;;ACfrE,SAAS,oBAAoB,MAAM,aAAa,OAAO,eAAiD;AAExG,SAAS,2BAA2B;AACpC,SAAS,gCAAgC;AACzC,SAAS,0BAA0B;AACnC,SAAS,2BAA2B;AAEpC,SAAS,0BAA0B;AAPnC;AAAA;AAAA;AAAA;AAMA,IAAAC;AAAA;AAAA;;;ACNA,SAAc,SAAAC,cAAa;AAC3B,SAAS,2BAA2B;AADpC;AAAA;AAAA;AAAA;AAEA;AAEA;AAAA;AAAA;;;ACHA,SAAS,2BAA2B;AADpC;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,SAAS,uBAAuB;AAChC,SAAS,eAAe,uBAAAC,4BAA2B;AACnD,SAAS,iBAAiB,qBAAqB;AAC/C,SAAS,wBAAwB;AACjC,OAAO,UAAU;AAEjB,OAAO,eAAe;AACtB,SAAS,gBAAgB,gBAAAC,qBAAoB;AAE7C,SAAS,UAAU,kBAAkB;AACrC,OAAOC,YAAW;AAGlB,SAAS,oBAA8B,sBAAgC;AAbvE;AAAA;AAAA;AAAA;AAQA;AAGA,IAAAC;AAAA;AAAA;;;ACXA,SAAS,kBAAAC,iBAAgB,gBAAAC,qBAAoB;AAe7C,SAAS,gBAAgB,oBAAoB;AAG7C,SAAS,qBAAqB;AAlB9B;AAAA;AAAA;AAAA;AAEA;AAcA;AACA,IAAAC;AAEA;AAAA;AAAA;;;ACfA,SAAS,kBAAAC,iBAAgB,gBAAAC,qBAAoB;AAC7C,SAAc,qBAAqB;AACnC,SAAS,qBAAqB;AAC9B,SAAS,+BAA+B;AACxC,YAAYC,YAAW;AARvB;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AAMA;AACA;AACA;AAAA;AAAA;;;ACRO,SAAS,UAAU,OAA+B;AACvD,MAAI,CAAC,MAAM,cAAc,CAAC,MAAM,QAAQ,CAAC,MAAM,uBAAuB,CAAC,MAAM,eAAe;AAC1F,UAAM,IAAI,MAAM,uEAAuE;AAAA,EACzF;AAEA,QAAM,SAAS,aAAa,oBAAoB,MAAM,aAAa;AACnE,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,uBAAuB,MAAM,aAAa,iBAAiB;AAAA,EAC7E;AAEA,SAAO,aAAa,OAAO,KAAK;AAClC;AAdA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AAAA;AAAA;;;ACHA,SAAS,wBAAwB;AAAjC;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AAEA;AACA;AACA;AACA,IAAAC;AACA;AAAA;AAAA;;;ACRA,SAAS,oBAAoB;AAA7B,IAEM,cAEA;AAJN;AAAA;AAAA;AAAA;AAEA,IAAM,eAAN,cAA2B,aAAa;AAAA,IAAC;AAEzC,IAAM,eAAe,IAAI,aAAa;AACtC,iBAAa,gBAAgB,EAAE;AAAA;AAAA;;;ACL/B,IAQM;AARN;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AAEA;AAGA,IAAM,qBAAqB,IAAI,KAAK;AAAA;AAAA;;;ACRpC,IAAAC,eAAA;AAAA;AAAA;AAAA;AAAA;AACA;AAAA;AAAA;;;ACDA;AAAA;AAAA;AAAA;AAAA,IAAAC;AAAA;AAAA;;;ACAA,IAGM,UAOC;AAVP;AAAA;AAAA;AAAA;AAGA,IAAM,WAAN,MAAe;AAAA,MACb,MAAM,YAAY,SAAgC;AAEhD,gBAAQ,MAAM,mBAAmB,OAAO;AAAA,MAC1C;AAAA,IACF;AAEA,IAAO,mBAAQ,IAAI,SAAS;AAAA;AAAA;;;ACV5B;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AAEA;AAAA;AAAA;;;ACLA,OAAO,cAAc;AAArB,IAUM,2BAmDO;AA7Db;AAAA;AAAA;AAAA;AACA;AACA;AAQA,IAAM,4BAAN,MAAM,2BAA0B;AAAA,MAC9B,OAAe;AAAA,MACP,KAA+B;AAAA,MAE/B,cAAc;AAAA,MAAC;AAAA,MAEvB,OAAO,cAAyC;AAC9C,YAAI,CAAC,2BAA0B,UAAU;AACvC,qCAA0B,WAAW,IAAI,2BAA0B;AAAA,QACrE;AACA,eAAO,2BAA0B;AAAA,MACnC;AAAA,MAEA,gBAAmC;AACjC,YAAI,CAAC,KAAK,IAAI;AAEZ,gBAAM,SAAS,OAAO;AAGtB,eAAK,KAAK,IAAI,SAAS,QAAQ;AAAA,YAC7B,SAAS,OAAO,aAAa,gBAAgB,CAAC,QAAiB,OAAO,MAAM,OAAO,GAAG,CAAC,IAAI;AAAA,UAC7F,CAAC;AAGD,eAAK,GAAG,OAAO,oBAAoB;AAGnC,eAAK,GAAG,OAAO,mBAAmB;AAGlC,eAAK,GAAG,QAAQ,UAAU,EAAE,IAAI;AAEhC,iBAAO,KAAK,8BAA8B,MAAM,EAAE;AAAA,QACpD;AAEA,eAAO,KAAK;AAAA,MACd;AAAA,MAEA,MAAM,QAAuB;AAC3B,YAAI,KAAK,IAAI;AACX,eAAK,GAAG,MAAM;AACd,eAAK,KAAK;AACV,iBAAO,KAAK,4BAA4B;AAAA,QAC1C;AAAA,MACF;AAAA,MAEA,cAAuB;AACrB,eAAO,KAAK,OAAO,QAAQ,KAAK,GAAG;AAAA,MACrC;AAAA,IACF;AAEO,IAAM,4BAA4B,0BAA0B,YAAY;AAAA;AAAA;;;AC7D/E,IAIa;AAJb,IAAAC,kBAAA;AAAA;AAAA;AAAA;AAIO,IAAM,qBAAqB;AAAA;AAAA;;;ACAlC,SAAS,QAAQ;AACf,SAAO,0BAA0B,cAAc;AACjD;AANA,IAQa;AARb;AAAA;AAAA;AAAA;AAAA;AAEA,IAAAC;AAMO,IAAM,eAAN,MAAmB;AAAA,MACxB,OAAO,OAAgB,KAAa,SAAgB,CAAC,GAAQ;AAC3D,cAAM,OAAO,MAAM,EAAE,QAAQ,GAAG;AAChC,eAAO,KAAK,IAAI,MAAM;AAAA,MACxB;AAAA,MAEA,OAAO,UAAmB,KAAa,SAAgB,CAAC,GAAkB;AACxE,cAAM,OAAO,MAAM,EAAE,QAAQ,GAAG;AAChC,eAAO,KAAK,IAAI,MAAM;AAAA,MACxB;AAAA,MAEA,OAAO,QACL,KACA,SAAgB,CAAC,GAIjB;AACA,cAAM,OAAO,MAAM,EAAE,QAAQ,GAAG;AAChC,cAAM,SAAS,KAAK,IAAI,MAAM;AAC9B,eAAO;AAAA,UACL,SAAS,OAAO;AAAA,UAChB,iBAAiB,OAAO;AAAA,QAC1B;AAAA,MACF;AAAA,MAEA,OAAO,YAAe,UAAsB;AAC1C,eAAO,MAAM,EAAE,YAAY,QAAQ,EAAE;AAAA,MACvC;AAAA,MAEA,OAAO,SAAS,KAAa,UAAwB,CAAC,GAAW;AAC/D,YAAI,QAAQ;AAEZ,YAAI,QAAQ,SAAS;AACnB,mBAAS,aAAa,QAAQ,OAAO,IAAI,QAAQ,kBAAkB,KAAK;AAAA,QAC1E;AAEA,cAAM,aAAa,QAAQ,UAAU,KAAK;AAC1C,cAAM,QAAQ,QAAQ,UAAU,YAAY,qBAAqB;AAEjE,YAAI,OAAO;AACT,mBAAS,UAAU,KAAK;AAAA,QAC1B;AAEA,YAAI,WAAW;AACb,mBAAS,WAAW,QAAQ,MAAM;AAAA,QACpC;AAEA,eAAO;AAAA,MACT;AAAA,IACF;AAAA;AAAA;;;ACvDA,SAASC,SAAQ;AACf,SAAO,0BAA0B,cAAc;AACjD;AALA,IAWO;AAXP;AAAA;AAAA;AAAA;AAAA;AACA;AAUA,IAAO,mBAAQA;AAAA;AAAA;;;ACVf,SAAS,cAAc;AADvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCA,SAAS,UAAAC,eAAc;AADvB,IAMa;AANb;AAAA;AAAA;AAAA;AAAA;AAMO,IAAM,eAAN,MAAmB;AAAA,MACxB,OAAwB,QAAQ;AAAA,MAEhC,OAAO,oBAAoB,eAA2C;AACpE,cAAM,MAAM,kFAAkF,KAAK,KAAK;AACxG,eAAO,aAAa,UAAkB,KAAK,CAAC,aAAa,CAAC;AAAA,MAC5D;AAAA,MAEA,OAAO,OAAO,OAAoF;AAChG,cAAM,MAAMA,QAAO;AACnB,cAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,cAAM,MAAM,eAAe,KAAK,KAAK;AAErC,qBAAa,QAAQ,KAAK,CAAC,KAAK,MAAM,eAAe,MAAM,YAAY,MAAM,cAAc,KAAK,GAAG,CAAC;AAEpG,cAAM,UAAU,KAAK,oBAAoB,MAAM,aAAa;AAC5D,YAAI,CAAC,SAAS;AACZ,gBAAM,IAAI,MAAM,yBAAyB;AAAA,QAC3C;AACA,eAAO;AAAA,MACT;AAAA,MAEA,OAAO,OACL,eACA,OAIQ;AACR,cAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,cAAM,OAAiB,CAAC;AACxB,cAAM,SAAgB,CAAC;AAEvB,mBAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,KAAK,GAAG;AAC1C,cAAI,MAAM,QAAW;AACnB,iBAAK,KAAK,GAAG,CAAC,MAAM;AACpB,mBAAO,KAAK,CAAC;AAAA,UACf;AAAA,QACF;AAEA,aAAK,KAAK,eAAe;AACzB,eAAO,KAAK,GAAG;AAEf,cAAM,cAAc,KAAK,KAAK,IAAI;AAClC,cAAM,MAAM,UAAU,KAAK,KAAK,QAAQ,WAAW;AACnD,eAAO,KAAK,aAAa;AACzB,qBAAa,QAAQ,KAAK,MAAM;AAEhC,cAAM,UAAU,KAAK,oBAAoB,aAAa;AACtD,YAAI,CAAC,SAAS;AACZ,gBAAM,IAAI,MAAM,yBAAyB;AAAA,QAC3C;AACA,eAAO;AAAA,MACT;AAAA,MAEA,OAAO,OAAO,OAAoF;AAChG,cAAM,WAAW,KAAK,oBAAoB,MAAM,aAAa;AAC7D,YAAI,UAAU;AACZ,iBAAO,KAAK,OAAO,MAAM,eAAe;AAAA,YACtC,YAAY,MAAM;AAAA,YAClB,cAAc,MAAM;AAAA,UACtB,CAAC;AAAA,QACH;AACA,eAAO,KAAK,OAAO,KAAK;AAAA,MAC1B;AAAA,IACF;AAAA;AAAA;;;ACtEA,SAAS,UAAAC,eAAc;AADvB,IAMa;AANb;AAAA;AAAA;AAAA;AAAA;AAMO,IAAM,eAAN,MAAmB;AAAA,MACxB,OAAwB,QAAQ;AAAA,MAEhC,OAAO,OAAO,OAKH;AACT,cAAM,MAAMA,QAAO;AACnB,cAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,cAAM,MAAM,eAAe,KAAK,KAAK;AAAA;AAGrC,cAAM,SAAS,aAAa,QAAQ,KAAK;AAAA,UACvC;AAAA,UACA,MAAM;AAAA,UACN,MAAM;AAAA,UACN,MAAM;AAAA,UACN,MAAM;AAAA,UACN;AAAA,QACF,CAAC;AAED,YAAI,OAAO,YAAY,GAAG;AACxB,gBAAM,IAAI,MAAM,0BAA0B;AAAA,QAC5C;AAEA,cAAM,UAAU,KAAK,SAAS,GAAG;AACjC,YAAI,CAAC,SAAS;AACZ,gBAAM,IAAI,MAAM,0BAA0B;AAAA,QAC5C;AACA,eAAO;AAAA,MACT;AAAA,MAEA,OAAO,SAAS,KAAiC;AAC/C,cAAM,MAAM,+FAA+F,KAAK,KAAK;AACrH,eAAO,aAAa,UAAkB,KAAK,CAAC,GAAG,CAAC;AAAA,MAClD;AAAA,MAEA,OAAO,0BAA0B,qBAAiD;AAChF,cAAM,MAAM,+FAA+F,KAAK,KAAK;AACrH,eAAO,aAAa,UAAkB,KAAK,CAAC,mBAAmB,CAAC;AAAA,MAClE;AAAA,MAEA,OAAO,OAAO,KAAmB;AAC/B,cAAM,MAAM,UAAU,KAAK,KAAK;AAChC,qBAAa,QAAQ,KAAK,CAAC,GAAG,CAAC;AAAA,MACjC;AAAA,MAEA,OAAO,oBAAoB,eAA2C;AACpE,cAAM,MAAM,+FAA+F,KAAK,KAAK;AACrH,eAAO,aAAa,UAAkB,KAAK,CAAC,aAAa,CAAC;AAAA,MAC5D;AAAA,MAEA,OAAO,aAAa,QAAoC;AACtD,cAAM,MAAM,+FAA+F,KAAK,KAAK;AACrH,eAAO,aAAa,UAAkB,KAAK,CAAC,MAAM,CAAC;AAAA,MACrD;AAAA,IACF;AAAA;AAAA;;;AChEA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCA,SAAS,UAAAC,eAAc;AADvB;AAAA;AAAA;AAAA;AAAA;AAEA;AAAA;AAAA;;;ACFA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AAAA;AAAA;;;ACDO,SAAS,WAAW,OAAgC;AACzD,MAAI,CAAC,MAAM,iBAAiB,CAAC,MAAM,cAAc,CAAC,MAAM,cAAc;AACpE,UAAM,IAAI,MAAM,0DAA0D;AAAA,EAC5E;AAEA,SAAO,aAAa,OAAO,KAAK;AAClC;AATA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAuFO,SAAS,gBAAsB;AACpC,QAAM,KAAK,iBAAM;AACjB,KAAG,KAAK,aAAa;AACrB,SAAO,MAAM,uBAAuB;AACtC;AA3FA,IAGM;AAHN;AAAA;AAAA;AAAA;AAAA;AACA;AAEA,IAAM,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACHtB;AACA,SAAS,eAAe;;;ACDxB;AAGA;AAHA,OAAO,WAAW;AAClB,SAAS,oBAAoB;AAC7B,SAAS,cAAc;AAIhB,IAAM,kBAAkB,OAAO,WAA4C;AAChF,MAAI;AACF,UAAM,UAAU,OAAO,aAAa,MAAM,CAAC;AAC3C,UAAM,UAAU,YAAY,UAAU,cAAmB,OAAO;AAChE,UAAM,WAAW,MAAM,MAAM,IAAoB,OAAO;AAExD,UAAM,EAAE,sBAAsB,sBAAsB,GAAG,IAAI,SAAS;AAEpE,WAAO,EAAE,sBAAsB,sBAAsB,GAAG;AAAA,EAC1D,SAAS,OAAO;AACd,QAAI,MAAM,aAAa,KAAK,GAAG;AAC7B,UAAI,MAAM,UAAU,WAAW,KAAK;AAClC,cAAM,IAAI,MAAM,iBAAiB;AAAA,MACnC;AACA,UAAI,MAAM,UAAU,WAAW,KAAK;AAClC,cAAM,IAAI,MAAM,mBAAmB;AAAA,MACrC;AACA,UAAI,MAAM,SAAS,gBAAgB;AACjC,cAAM,IAAI,MAAM,+BAA+B,YAAY,OAAO,EAAE;AAAA,MACtE;AACA,YAAM,IAAI,MAAM,iBAAiB,MAAM,UAAU,MAAM,WAAW,MAAM,OAAO,EAAE;AAAA,IACnF;AACA,UAAM;AAAA,EACR;AACF;;;AC9BA;AAGA;AAHA,OAAO,QAAQ;AACf,OAAOC,WAAU;AACjB,OAAO,QAAQ;AAIR,SAAS,kBAA0B;AACxC,SAAOA,MAAK,KAAK,GAAG,QAAQ,GAAG,YAAY;AAC7C;AAEA,SAAS,mBAA2B;AAClC,SAAOA,MAAK,KAAK,gBAAgB,GAAG,kBAAkB;AACxD;AAEO,SAAS,aAAqB;AACnC,SAAOA,MAAK,KAAK,gBAAgB,GAAG,MAAM;AAC5C;AAEO,SAAS,eAAe,UAAyB,CAAC,GAAW;AAClE,QAAM,eAAe,gBAAgB;AACrC,QAAM,UAAU,WAAW;AAE3B,MAAI,CAAC,GAAG,WAAW,YAAY,GAAG;AAChC,OAAG,UAAU,cAAc,EAAE,WAAW,KAAK,CAAC;AAAA,EAChD;AAEA,QAAM,SAAS,QAAQ,UAAU,iBAAiB;AAClD,QAAM,QAAQA,MAAK,QAAQ,MAAM;AACjC,MAAI,CAAC,GAAG,WAAW,KAAK,GAAG;AACzB,OAAG,UAAU,OAAO,EAAE,WAAW,KAAK,CAAC;AAAA,EACzC;AAEA,QAAM,aAAa,WAAW,QAAQ,MAAM;AAAA,UACpC,QAAQ,UAAU,cAAc,eAAe;AAAA,UAC/C,MAAM;AAAA,OACT,QAAQ,QAAQ,cAAc,YAAY;AAAA;AAG/C,KAAG,cAAc,SAAS,YAAY,OAAO;AAE7C,SAAO;AACT;;;ACzCA;AAAA,SAAS,aAA2B;AACpC,OAAOC,WAAU;AACjB,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,kBAAkB;AAO3B,IAAM,mBAAqC,CAAC;AAE5C,SAAS,aAAqB;AAC5B,QAAMC,aAAYF,MAAK,QAAQC,eAAc,YAAY,GAAG,CAAC;AAC7D,SAAOD,MAAK,QAAQE,YAAW,IAAI;AACrC;AAEA,SAAS,YAAqB;AAC5B,QAAM,UAAU,WAAW;AAC3B,SAAO,WAAWF,MAAK,KAAK,SAAS,UAAU,CAAC;AAClD;AAEA,SAAS,aAAa,MAAc,MAAoB;AACtD,QAAM,QAAQ,KAAK,SAAS,EAAE,MAAM,IAAI,EAAE,OAAO,OAAO;AACxD,aAAW,QAAQ,OAAO;AACxB,YAAQ,IAAI,IAAI,IAAI,KAAK,IAAI,EAAE;AAAA,EACjC;AACF;AAEA,SAAS,aAAa,MAAc,YAAoB,YAAoB,UAAiD;AAC3H,QAAM,QAAQ,MAAM,YAAY,CAAC,UAAU,GAAG;AAAA,IAC5C,OAAO,CAAC,UAAU,QAAQ,MAAM;AAAA,IAChC,KAAK,EAAE,GAAG,QAAQ,KAAK,UAAU,eAAe,QAAQ,gBAAgB,cAAc,GAAG,SAAS;AAAA,IAClG,UAAU;AAAA,EACZ,CAAC;AAED,QAAM,QAAQ,GAAG,QAAQ,CAAC,SAAiB,aAAa,MAAM,IAAI,CAAC;AACnE,QAAM,QAAQ,GAAG,QAAQ,CAAC,SAAiB,aAAa,MAAM,IAAI,CAAC;AAEnE,QAAM,GAAG,SAAS,CAAC,UAAU;AAC3B,YAAQ,MAAM,IAAI,IAAI,oBAAoB,MAAM,OAAO;AAAA,EACzD,CAAC;AAED,QAAM,GAAG,QAAQ,CAAC,MAAM,WAAW;AACjC,QAAI,QAAQ;AACV,cAAQ,IAAI,IAAI,IAAI,kCAAkC,MAAM,EAAE;AAAA,IAChE,WAAW,SAAS,GAAG;AACrB,cAAQ,MAAM,IAAI,IAAI,8BAA8B,IAAI,EAAE;AAAA,IAC5D;AAAA,EACF,CAAC;AAED,mBAAiB,KAAK,EAAE,MAAM,SAAS,MAAM,CAAC;AAC9C,SAAO;AACT;AAEO,SAAS,iBAA+B;AAC7C,QAAM,UAAU,WAAW;AAC3B,QAAM,MAAM,UAAU;AACtB,QAAM,aAAa,MAAM,QAAQ;AACjC,QAAM,UAAUA,MAAK,KAAK,SAAS,MAAM,aAAa,UAAU;AAChE,SAAO,aAAa,OAAO,YAAY,SAAS,EAAE,QAAQ,IAAI,CAAC;AACjE;AAEO,SAAS,cAA4B;AAC1C,QAAM,UAAU,WAAW;AAC3B,QAAM,MAAM,UAAU;AACtB,QAAM,aAAa,MAAM,QAAQ;AACjC,QAAM,aAAaA,MAAK,KAAK,SAAS,MAAM,cAAc,WAAW;AACrE,SAAO,aAAa,UAAU,YAAY,UAAU;AACtD;AAEO,SAAS,WAAwD;AACtE,QAAM,MAAM,eAAe;AAC3B,QAAM,SAAS,YAAY;AAC3B,SAAO,EAAE,KAAK,OAAO;AACvB;AAEO,SAAS,wBAA8B;AAC5C,QAAM,WAAW,CAAC,WAAmB;AACnC,YAAQ,IAAI;AAAA,WAAc,MAAM,oBAAoB;AAEpD,eAAW,EAAE,MAAM,SAAS,MAAM,KAAK,kBAAkB;AACvD,UAAI,MAAM,OAAO,CAAC,MAAM,QAAQ;AAC9B,gBAAQ,IAAI,IAAI,IAAI,eAAe;AACnC,cAAM,KAAK,SAAS;AAAA,MACtB;AAAA,IACF;AAEA,eAAW,MAAM;AACf,iBAAW,EAAE,MAAM,SAAS,MAAM,KAAK,kBAAkB;AACvD,YAAI,MAAM,OAAO,CAAC,MAAM,QAAQ;AAC9B,kBAAQ,IAAI,IAAI,IAAI,oBAAoB;AACxC,gBAAM,KAAK,SAAS;AAAA,QACtB;AAAA,MACF;AACA,cAAQ,KAAK,CAAC;AAAA,IAChB,GAAG,GAAI;AAAA,EACT;AAEA,UAAQ,GAAG,WAAW,MAAM,SAAS,SAAS,CAAC;AAC/C,UAAQ,GAAG,UAAU,MAAM,SAAS,QAAQ,CAAC;AAC/C;AAEO,SAAS,mBAAkC;AAChD,SAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,UAAM,gBAAgB,YAAY,MAAM;AACtC,YAAM,YAAY,iBAAiB,MAAM,CAAC,EAAE,SAAS,MAAM,MAAM,MAAM,aAAa,QAAQ,MAAM,MAAM;AACxG,UAAI,WAAW;AACb,sBAAc,aAAa;AAC3B,gBAAQ;AAAA,MACV;AAAA,IACF,GAAG,GAAI;AAAA,EACT,CAAC;AACH;;;ACjHA;AACA;AACA;AAFA,OAAO,aAAa;AAKb,IAAM,kBAAkB,OAC7B,kBAGI,CAAC,MACuB;AAC5B,QAAM,cAAc,iBAAiB;AACrC,QAAM,YAAoC,CAAC;AAE3C,MAAI,CAAC,gBAAgB,QAAQ;AAC3B,cAAU,KAAK;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,UAAU,CAAC,UAAkB,MAAM,SAAS,KAAK;AAAA,MACjD,SAAS,YAAY,WAAW;AAAA,IAClC,CAAC;AAAA,EACH;AAEA,MAAI,CAAC,gBAAgB,QAAQ;AAC3B,cAAU,KAAK;AAAA,MACb,MAAM;AAAA,MACN,MAAM;AAAA,MACN,SAAS;AAAA,MACT,SAAS,YAAY,WAAW,cAAc;AAAA,IAChD,CAAC;AAAA,EACH;AAEA,MAAI,UAAU,WAAW,GAAG;AAC1B,WAAO;AAAA,MACL,QAAQ,gBAAgB;AAAA,MACxB,QAAQ,gBAAgB,UAAU,cAAc;AAAA,IAClD;AAAA,EACF;AAEA,QAAM,WAAW,MAAM,QAAQ,WAAW;AAAA,IACxC,UAAU,MAAM;AACd,cAAQ,IAAI,oBAAoB;AAChC,cAAQ,KAAK,CAAC;AAAA,IAChB;AAAA,EACF,CAAC;AAED,SAAO;AAAA,IACL,QAAQ,gBAAgB,UAAU,SAAS;AAAA,IAC3C,QAAQ,gBAAgB,UAAU,SAAS,UAAU,cAAc;AAAA,EACrE;AACF;AAEO,SAAS,eAAe,SAAuC;AACpE,SAAO,CAAC,QAAQ;AAClB;;;AJhDA;;;AKPA;AACA;AACA;AACA;AACA;AAEA,SAAS,iBAAAG,sBAAqB;AAC9B,SAAS,gBAAAC,qBAAoB;AAC7B,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,YAAAC,WAAU,kBAAkB;AAIrC,IAAM,iCAAiC;AAEhC,SAAS,mBAAmB,MAA0C;AAC3E,QAAM,EAAE,aAAa,YAAY,IAAI;AAErC,aAAW;AAAA,IACT,eAAe,YAAY;AAAA,IAC3B,YAAY,YAAY;AAAA,IACxB,cAAc,YAAY;AAAA,EAC5B,CAAC;AAED,QAAM,iBAAiB,aAAa,aAAa,YAAY,UAAU;AAEvE,MAAI,CAAC,gBAAgB;AACnB,cAAU;AAAA,MACR,YAAY,YAAY;AAAA,MACxB,MAAM,YAAY;AAAA,MAClB,qBAAqB,YAAY;AAAA,MACjC,eAAe,YAAY;AAAA,IAC7B,CAAC;AACD,WAAO,EAAE,aAAa,MAAM,aAAa,KAAK;AAAA,EAChD;AAEA,SAAO,EAAE,aAAa,MAAM,aAAa,MAAM;AACjD;AAEO,IAAM,sBAAsB,OAAO,WAAmB;AAC3D,QAAM,YAAYH;AAAA,IAChBC,cAAa,MAAM;AAAA,IACnB,IAAI,WAAW,CAAC,CAAC,CAAC;AAAA,IAClBC,eAAc,8BAA8B;AAAA,EAC9C;AACA,SAAO,MAAMC,UAAS,SAAS;AACjC;AAEA,IAAM,eAAe,CAAC,UAAsB;AAC1C,SAAO,KAAK,MAAM,IAAI,YAAY,EAAE,OAAO,KAAK,CAAC;AACnD;AAEO,IAAM,mBAAmB,OAAU,QAAgB,kBAAsC;AAC9F,QAAM,SAAS,MAAM,oBAAoB,MAAM;AAC/C,QAAM,iBAAiB,MAAM,WAAW,QAAQF,cAAa,aAAa,CAAC;AAE3E,QAAM,OAAO,aAAa,cAAc;AACxC,SAAO;AACT;;;ALhDA,IAAM,UAAU,IAAI,QAAQ,EACzB,KAAK,eAAe,EACpB,YAAY,8BAA8B,EAC1C,QAAQ,OAAO,EACf,OAAO,kBAAkB,4BAA4B,EACrD,OAAO,kBAAkB,mCAAmC,EAC5D,OAAO,iBAAiB,6BAA6B,MAAM,EAC3D,OAAO,eAAe,eAAe,EACrC,OAAO,OAAO,YAAY;AACzB,MAAI;AACF,YAAQ,IAAI,8CAA8C;AAE1D,QAAI,eAAe,OAAO,GAAG;AAC3B,YAAM,WAAW,MAAM,gBAAgB;AAAA,QACrC,QAAQ,QAAQ;AAAA,QAChB,QAAQ,QAAQ;AAAA,MAClB,CAAC;AACD,cAAQ,SAAS,SAAS;AAC1B,cAAQ,SAAS,SAAS;AAAA,IAC5B;AAEA,UAAM,OAAO,MAAM,gBAAgB,QAAQ,MAAM;AACjD,YAAQ,IAAI,iCAA4B;AAExC,UAAM,cAAc,MAAM,iBAA8B,QAAQ,QAAQ,KAAK,oBAAoB;AACjG,UAAM,cAAc,MAAM,iBAAiC,QAAQ,QAAQ,KAAK,oBAAoB;AACpG,YAAQ,IAAI,6BAA6B;AACzC,UAAM,UAAU,eAAe;AAAA,MAC7B,QAAQ,QAAQ;AAAA,MAChB,MAAM,QAAQ;AAAA,MACd,QAAQ,QAAQ;AAAA,MAChB,QAAQ,QAAQ;AAAA,IAClB,CAAC;AACD,eAAW;AACX,YAAQ,IAAI,iCAA4B,OAAO;AAAA,CAAI;AAEnD,UAAM,EAAE,eAAAG,eAAc,IAAI,MAAM;AAChC,IAAAA,eAAc;AACd,YAAQ,IAAI,qCAAgC;AAE5C,UAAM,SAAS,mBAAmB;AAAA,MAChC;AAAA,MACA;AAAA,MACA,IAAI,KAAK;AAAA,IACX,CAAC;AACD,YAAQ,IAAI,qBAAgB;AAC5B,QAAI,OAAO,aAAa;AACtB,cAAQ,IAAI,sBAAiB;AAAA,IAC/B,OAAO;AACL,cAAQ,IAAI,+BAA0B;AAAA,IACxC;AAEA,YAAQ,IAAI,wBAAwB;AACpC,0BAAsB;AACtB,aAAS;AAET,YAAQ,IAAI;AAAA;AAAA;AAAA,iCAGe,QAAQ,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,CAgB5C;AAEK,UAAM,iBAAiB;AAAA,EACzB,SAAS,OAAO;AACd,YAAQ,MAAM,mBAAc,iBAAiB,QAAQ,MAAM,UAAU,KAAK;AAC1E,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF,CAAC;AAEH,QAAQ,MAAM;","names":["path","fs","os","getEnvPath","init_constants","init_constants","toHex","generateRandomBytes","toUint8Array","axios","init_constants","fromUint8Array","toUint8Array","init_constants","fromUint8Array","toUint8Array","ucans","init_constants","init_worker","init_worker","init_constants","init_constants","getDb","uuidv7","uuidv7","uuidv7","path","path","fileURLToPath","__dirname","deriveHKDFKey","toUint8Array","stringToBytes","toAESKey","runMigrations"]}
|