@salesforce/storefront-next-dev 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.txt +181 -0
- package/README.md +302 -0
- package/dist/cartridge-services/index.d.ts +60 -0
- package/dist/cartridge-services/index.d.ts.map +1 -0
- package/dist/cartridge-services/index.js +954 -0
- package/dist/cartridge-services/index.js.map +1 -0
- package/dist/cli.js +3373 -0
- package/dist/configs/react-router.config.d.ts +13 -0
- package/dist/configs/react-router.config.d.ts.map +1 -0
- package/dist/configs/react-router.config.js +36 -0
- package/dist/configs/react-router.config.js.map +1 -0
- package/dist/extensibility/templates/install-instructions.mdc.hbs +192 -0
- package/dist/extensibility/templates/uninstall-instructions.mdc.hbs +137 -0
- package/dist/index.d.ts +327 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +2606 -0
- package/dist/index.js.map +1 -0
- package/dist/mrt/sfnext-server-chunk-DUt5XHAg.mjs +1 -0
- package/dist/mrt/sfnext-server-jiti-DjnmHo-6.mjs +10 -0
- package/dist/mrt/sfnext-server-jiti-DjnmHo-6.mjs.map +1 -0
- package/dist/mrt/ssr.d.ts +19 -0
- package/dist/mrt/ssr.d.ts.map +1 -0
- package/dist/mrt/ssr.mjs +246 -0
- package/dist/mrt/ssr.mjs.map +1 -0
- package/dist/mrt/streamingHandler.d.ts +11 -0
- package/dist/mrt/streamingHandler.d.ts.map +1 -0
- package/dist/mrt/streamingHandler.mjs +255 -0
- package/dist/mrt/streamingHandler.mjs.map +1 -0
- package/dist/react-router/Scripts.d.ts +36 -0
- package/dist/react-router/Scripts.d.ts.map +1 -0
- package/dist/react-router/Scripts.js +68 -0
- package/dist/react-router/Scripts.js.map +1 -0
- package/package.json +157 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","names":["path","fetchOptions: RequestInit","headers: Record<string, string>","isCliAvailable: boolean | null","result: Array<{ id: string; path: string; file: string; index?: boolean }>","fullPath: string","VALID_ATTRIBUTE_TYPES: readonly AttributeType[]","TYPE_MAPPING: Record<string, string>","result: Record<string, unknown>","result: unknown[]","attributes: Record<string, unknown>[]","attribute: Record<string, unknown>","regionDefinitions: Record<string, unknown>[]","regionDefinition: Record<string, unknown>","components: unknown[]","pageTypes: unknown[]","aspects: unknown[]","cartridgeData: Record<string, unknown>","files: string[]","extname","allComponents: unknown[]","allPageTypes: unknown[]","allAspects: unknown[]"],"sources":["../../src/cartridge-services/types.ts","../../src/cartridge-services/sfcc-client.ts","../../src/cartridge-services/validation.ts","../../src/cartridge-services/deploy-cartridge.ts","../../src/cartridge-services/react-router-config.ts","../../src/cartridge-services/generate-cartridge.ts"],"sourcesContent":["/**\n * Copyright 2026 Salesforce, Inc.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * TypeScript types for Salesforce Commerce Cloud cartridge deployment\n */\nexport interface HttpRequestOptions {\n auth: { basic: string };\n uri: string;\n method: string;\n headers?: Record<string, string>;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n body?: any; // BodyInit | ReadStream - using any for flexibility with Node.js streams\n form?: Record<string, unknown>;\n // WebDAV-specific options\n baseUrl?: string;\n // Node.js fetch streaming support\n duplex?: string;\n}\n\nexport interface HttpResponse {\n statusCode: number;\n statusMessage: string;\n headers: Record<string, string>;\n}\n\nexport interface DeployResult {\n version: string;\n}\n\nexport const WEBDAV_BASE = '/on/demandware.servlet/webdav/Sites';\nexport const CARTRIDGES_PATH = 'Cartridges';\n\n// HTTP Methods\nexport const HTTP_METHODS = {\n PUT: 'PUT',\n POST: 'POST',\n DELETE: 'DELETE',\n} as const;\n\n// Content Types\nexport const CONTENT_TYPES = {\n APPLICATION_ZIP: 'application/zip',\n APPLICATION_FORM_URLENCODED: 'application/x-www-form-urlencoded',\n APPLICATION_JSON: 'application/json',\n} as const;\n\n// WebDAV Operations\nexport const WEBDAV_OPERATIONS = {\n UNZIP: 'UNZIP',\n TARGET_CARTRIDGES: 'cartridges',\n} as const;\n","/**\n * Copyright 2026 Salesforce, Inc.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * SFCC API client utilities for Commerce Cloud requests\n * Handles SSL, authentication, and network requests for WebDAV and OCAPI\n */\nimport { type HttpRequestOptions, type HttpResponse, CONTENT_TYPES, WEBDAV_BASE } from './types';\n\n/**\n * Create HTTP request options for WebDAV operations (file upload/download)\n *\n * @param instance - The Commerce Cloud instance hostname\n * @param path - The WebDAV path (e.g., '/cartridges')\n * @param basicAuth - Base64 encoded basic authentication credentials (required)\n * @param method - HTTP method (PUT, DELETE, UNZIP, etc.)\n * @param formData - Optional form data for the request\n * @returns Configured HTTP request options for WebDAV operations\n */\nexport function getWebdavOptions(\n instance: string,\n path: string,\n basicAuth: string,\n method: string,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n formData?: Record<string, any>\n): HttpRequestOptions {\n const endpoint = `${WEBDAV_BASE}/${path}`;\n\n const opts: HttpRequestOptions = {\n baseUrl: `https://${instance}`,\n uri: endpoint,\n auth: { basic: basicAuth },\n method,\n ...(formData && { form: formData }),\n };\n return opts;\n}\n\n/**\n * Check if an HTTP response indicates an authentication error and throw if so\n *\n * @param response - The HTTP response to check\n * @throws Error with authentication message if status code is 401\n */\nexport function checkAuthenticationError(response: HttpResponse): void {\n if (response.statusCode === 401) {\n throw new Error('Authentication failed. Please login again.');\n }\n}\n\n/**\n * Execute an HTTP request using the native fetch API with default SSL validation\n *\n * This function handles general HTTP requests and does not automatically set Content-Type headers.\n * Callers must set the appropriate Content-Type header in opts.headers based on their body type\n *\n * @param opts - HTTP request configuration including URL, method, headers, and body\n * @returns Promise resolving to an object containing the HTTP response and parsed body\n * @throws Error if the HTTP request fails or cannot be completed\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport async function makeRequest(opts: HttpRequestOptions): Promise<{ response: HttpResponse; body: any }> {\n const url = opts.uri;\n\n const fetchOptions: RequestInit = {\n ...opts,\n headers: {\n Authorization: `Basic ${opts.auth.basic}`,\n ...opts.headers,\n },\n };\n\n // Add form data if specified\n if (opts.form) {\n const formData = new URLSearchParams();\n Object.entries(opts.form).forEach(([key, value]) => {\n formData.append(key, String(value));\n });\n fetchOptions.body = formData;\n fetchOptions.headers = {\n ...fetchOptions.headers,\n 'Content-Type': CONTENT_TYPES.APPLICATION_FORM_URLENCODED,\n };\n }\n\n try {\n const response = await fetch(url, fetchOptions);\n\n const body = response.headers.get('content-type')?.includes(CONTENT_TYPES.APPLICATION_JSON)\n ? await response.json()\n : await response.text();\n\n // Convert Headers to plain object\n const headers: Record<string, string> = {};\n response.headers.forEach((value, key) => {\n headers[key] = value;\n });\n\n return {\n response: {\n statusCode: response.status,\n statusMessage: response.statusText,\n headers,\n },\n body,\n };\n } catch (error) {\n throw new Error(`HTTP request failed: ${error instanceof Error ? error.message : String(error)}`);\n }\n}\n","/**\n * Copyright 2026 Salesforce, Inc.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Input validation utilities for cartridge services\n * Validates parameters before calling core business logic functions\n */\nimport { extname } from 'path';\n\n/**\n * Validation error class for cartridge service parameter validation\n */\nexport class ValidationError extends Error {\n constructor(message: string) {\n super(message);\n this.name = 'ValidationError';\n }\n}\n\n/**\n * Validate Commerce Cloud instance hostname\n *\n * @param instance - The instance hostname to validate\n * @throws ValidationError if instance is invalid\n */\nexport function validateInstance(instance: string): void {\n if (!instance || typeof instance !== 'string') {\n throw new ValidationError('Instance parameter is required and must be a string');\n }\n\n if (instance.trim().length === 0) {\n throw new ValidationError('Instance parameter cannot be empty');\n }\n\n // Basic format validation for instance\n if (!instance.includes('.')) {\n throw new ValidationError('Parameter instance must be a valid domain name');\n }\n}\n\n/**\n * Validate cartridge file (must be a ZIP file)\n *\n * @param cartridgePath - The cartridge file path to validate\n * @throws ValidationError if cartridge is invalid\n */\nexport function validateCartridgePath(cartridgePath: string): void {\n if (!cartridgePath || typeof cartridgePath !== 'string') {\n throw new ValidationError('cartridge parameter is required and must be a string');\n }\n\n if (cartridgePath.trim().length === 0) {\n throw new ValidationError('cartridge parameter cannot be empty');\n }\n\n // Only allow directories (no file extension)\n const ext = extname(cartridgePath).toLowerCase();\n if (ext !== '') {\n throw new ValidationError(`cartridge must be a directory, got: ${ext}`);\n }\n}\n\n/**\n * Validate Basic Auth credentials\n *\n * @param basicAuth - The base64 encoded basic auth credentials to validate\n * @throws ValidationError if credentials are invalid\n */\nexport function validateBasicAuth(basicAuth: string): void {\n if (!basicAuth || typeof basicAuth !== 'string') {\n throw new ValidationError('Basic auth credentials parameter is required and must be a string');\n }\n\n if (basicAuth.trim().length === 0) {\n throw new ValidationError('Basic auth credentials parameter cannot be empty');\n }\n\n // Basic validation for base64 encoded credentials\n if (basicAuth.length < 10) {\n throw new ValidationError('Basic auth credentials appear to be too short to be valid');\n }\n}\n\n/**\n * Validate code version name\n *\n * @param version - The code version name to validate\n * @throws ValidationError if version is invalid\n */\nexport function validateVersion(version: string): void {\n if (!version || typeof version !== 'string') {\n throw new ValidationError('Version parameter is required and must be a string');\n }\n\n if (version.trim().length === 0) {\n throw new ValidationError('Version parameter cannot be empty');\n }\n\n // Basic version name validation (alphanumeric, hyphens, underscores, dots)\n const versionRegex = /^[a-zA-Z0-9._-]+$/;\n if (!versionRegex.test(version)) {\n throw new ValidationError(\n 'Version parameter contains invalid characters. Only alphanumeric, dots, hyphens, and underscores are allowed'\n );\n }\n}\n\n/**\n * Validate WebDAV path\n *\n * @param webdavPath - The WebDAV path to validate\n * @throws ValidationError if path is invalid\n */\nexport function validateWebdavPath(webdavPath: string): void {\n if (!webdavPath || typeof webdavPath !== 'string') {\n throw new ValidationError('WebDAV path parameter is required and must be a string');\n }\n\n if (!webdavPath.startsWith('/')) {\n throw new ValidationError('WebDAV path must start with a forward slash');\n }\n}\n\n/**\n * Validate all parameters for deployCode function\n *\n * @param instance - Commerce Cloud instance hostname\n * @param codeVersionName - Target code version name\n * @param cartridgeDirectoryPath - Path to the source directory\n * @param basicAuth - Base64 encoded basic auth credentials\n * @param cartridgeWebDevPath - WebDAV path for cartridge deployment\n * @throws ValidationError if any parameter is invalid\n */\nexport function validateDeployCodeParams(\n instance: string,\n codeVersionName: string,\n cartridgeDirectoryPath: string,\n basicAuth: string,\n cartridgeWebDevPath: string\n): void {\n validateInstance(instance);\n validateVersion(codeVersionName);\n validateCartridgePath(cartridgeDirectoryPath);\n validateBasicAuth(basicAuth);\n validateWebdavPath(cartridgeWebDevPath);\n}\n","/**\n * Copyright 2026 Salesforce, Inc.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Core cartridge business logic\n * Contains the actual implementation without validation\n */\nimport fs from 'fs';\nimport path from 'path';\nimport archiver from 'archiver';\nimport {\n type DeployResult,\n type HttpResponse,\n WEBDAV_BASE,\n CARTRIDGES_PATH,\n HTTP_METHODS,\n CONTENT_TYPES,\n WEBDAV_OPERATIONS,\n} from './types';\nimport { getWebdavOptions, checkAuthenticationError, makeRequest } from './sfcc-client';\nimport { validateDeployCodeParams } from './validation';\n\n/**\n * Extract the filename (including extension) from a file path\n *\n * @param filePath - The full path to the file\n * @returns The filename portion of the path (e.g., 'archive.zip' from '/path/to/archive.zip')\n */\nfunction getFilename(filePath: string): string {\n return path.basename(filePath);\n}\n\n/**\n * Create a ZIP cartridge from a directory\n *\n * @param sourceDir - The directory to zip\n * @param outputPath - The output ZIP file path (can be same as sourceDir)\n * @returns Promise resolving when the ZIP file is created\n */\nasync function zipCartridge(sourceDir: string, outputPath: string): Promise<void> {\n const archive = archiver('zip', { zlib: { level: 9 } });\n const output = fs.createWriteStream(outputPath);\n\n archive.pipe(output);\n archive.directory(sourceDir, false);\n await archive.finalize();\n}\n\n/**\n * Build the WebDAV endpoint URL for a file\n *\n * @param instance - The Commerce Cloud instance hostname\n * @param path - The WebDAV path (e.g., 'Cartridges/local_metadata')\n * @param file - The local file path (filename will be extracted)\n * @returns The complete WebDAV endpoint URL\n */\nfunction buildWebdavEndpoint(instance: string, webdavPath: string, file: string): string {\n const filename = getFilename(file);\n return `https://${instance}${WEBDAV_BASE}/${webdavPath}/${filename}`;\n}\n\n/**\n * Unzip an uploaded archive file on Commerce Cloud via WebDAV\n *\n * @param instance - The Commerce Cloud instance hostname\n * @param path - The WebDAV path where the file was uploaded\n * @param file - The local file path (used to determine the remote filename)\n * @param basicAuth - Base64 encoded basic authentication credentials\n * @returns Promise resolving to HTTP response and body from the unzip operation\n */\nasync function unzip(\n instance: string,\n webdavPath: string,\n file: string,\n basicAuth: string\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n): Promise<{ response: HttpResponse; body: any }> {\n const endpoint = buildWebdavEndpoint(instance, webdavPath, file);\n const opts = getWebdavOptions(instance, webdavPath, basicAuth, HTTP_METHODS.POST, {\n method: WEBDAV_OPERATIONS.UNZIP,\n target: WEBDAV_OPERATIONS.TARGET_CARTRIDGES,\n });\n opts.uri = endpoint;\n const result = await makeRequest(opts);\n checkAuthenticationError(result.response);\n return result;\n}\n\n/**\n * Delete a file from Commerce Cloud via WebDAV\n *\n * @param instance - The Commerce Cloud instance hostname\n * @param path - The WebDAV path where the file is located\n * @param file - The local file path (used to determine the remote filename)\n * @param basicAuth - Base64 encoded basic authentication credentials\n * @returns Promise resolving to HTTP response and body from the delete operation\n */\nasync function deleteFile(\n instance: string,\n webdavPath: string,\n file: string,\n basicAuth: string\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n): Promise<{ response: HttpResponse; body: any }> {\n const endpoint = buildWebdavEndpoint(instance, webdavPath, file);\n const opts = getWebdavOptions(instance, webdavPath, basicAuth, HTTP_METHODS.DELETE);\n opts.uri = endpoint;\n const result = await makeRequest(opts);\n checkAuthenticationError(result.response);\n return result;\n}\n\n/**\n * Upload a file to a specific cartridge version on Commerce Cloud via WebDAV (internal function)\n *\n * @param instance - The Commerce Cloud instance hostname\n * @param codeVersionName - The target code version name\n * @param filePath - The local file path to upload\n * @param basicAuth - Base64 encoded basic authentication credentials\n * @returns Promise resolving to HTTP response and body from the upload operation\n */\nasync function postFile(\n instance: string,\n codeVersionName: string,\n filePath: string,\n basicAuth: string\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n): Promise<{ response: HttpResponse; body: any }> {\n const targetPath = `${CARTRIDGES_PATH}/${codeVersionName}`;\n\n try {\n const endpoint = buildWebdavEndpoint(instance, targetPath, filePath);\n const opts = getWebdavOptions(instance, targetPath, basicAuth, HTTP_METHODS.PUT);\n opts.uri = endpoint;\n\n // Stream the ZIP file for upload - uses fs.createReadStream for memory efficiency\n // This allows uploading large cartridges without loading them entirely into memory\n opts.body = fs.createReadStream(filePath);\n\n // Add duplex: 'half' - required by Node.js fetch for streaming bodies\n opts.duplex = 'half';\n opts.headers = {\n ...opts.headers,\n 'Content-Type': CONTENT_TYPES.APPLICATION_ZIP,\n };\n\n const result = await makeRequest(opts);\n checkAuthenticationError(result.response);\n\n if (![200, 201, 204].includes(result.response.statusCode)) {\n throw new Error(\n `Post file \"${filePath}\" failed: ${result.response.statusCode} (${result.response.statusMessage})`\n );\n }\n\n return result;\n } catch (error) {\n throw new Error(`Post file \"${filePath}\" failed: ${error instanceof Error ? error.message : String(error)}`);\n }\n}\n\n/**\n * Deploy code to Commerce Cloud by uploading, unzipping, and cleaning up\n *\n * This function performs a complete code deployment workflow:\n * 1. Uploads the archive file via WebDAV to the specified cartridge version\n * 2. Unzips the archive on the server\n * 3. Deletes the uploaded archive file\n * 4. Returns the deployed version name\n *\n * @param instance - The Commerce Cloud instance hostname\n * @param codeVersionName - The target code version name\n * @param sourceDir - The local directory containing the source files to deploy\n * @param basicAuth - Base64 encoded basic authentication credentials\n * @returns Promise resolving to deployment result with the version name\n * @throws Error if any step of the deployment process fails\n */\nasync function deployCode(\n instance: string,\n codeVersionName: string,\n sourceDir: string,\n basicAuth: string\n): Promise<DeployResult> {\n const cartridgePath = `/${CARTRIDGES_PATH}/${codeVersionName}/cartridges`;\n\n validateDeployCodeParams(instance, codeVersionName, sourceDir, basicAuth, cartridgePath);\n\n // Create a temporary ZIP file in the same directory as sourceDir\n const tempZipPath = path.join(path.dirname(sourceDir), `metadata-${Date.now()}.zip`);\n\n try {\n // Step 0: Create ZIP cartridge from source directory\n await zipCartridge(sourceDir, tempZipPath);\n const file = path.basename(tempZipPath);\n\n // Step 1: Upload metadata cartridge\n // Note: postFile already validates status codes [200, 201, 204] and throws on failure\n await postFile(instance, codeVersionName, tempZipPath, basicAuth);\n\n // Step 2: Unzip file to cartridges subdirectory\n const unzipResult = await unzip(instance, `${CARTRIDGES_PATH}/${codeVersionName}`, file, basicAuth);\n if (![200, 201, 202].includes(unzipResult.response.statusCode)) {\n throw new Error(\n `Deploy code ${file} failed (unzip step): ${unzipResult.response.statusCode} (${unzipResult.response.statusMessage})`\n );\n }\n\n // Step 3: Delete ZIP file\n const deleteResult = await deleteFile(instance, `${CARTRIDGES_PATH}/${codeVersionName}`, file, basicAuth);\n if (![200, 204].includes(deleteResult.response.statusCode)) {\n throw new Error(\n `Delete ZIP file ${file} after deployment failed (deleteFile step): ${deleteResult.response.statusCode} (${deleteResult.response.statusMessage})`\n );\n }\n\n // Generate version name\n const version = getFilename(file).replace('.zip', '');\n return { version };\n } catch (error) {\n if (error instanceof Error) {\n throw error;\n }\n throw new Error(`Deploy code ${sourceDir} failed: ${String(error)}`);\n } finally {\n // Clean up temporary ZIP file\n if (fs.existsSync(tempZipPath)) {\n fs.unlinkSync(tempZipPath);\n }\n }\n}\n\nexport { deployCode };\n","/**\n * Copyright 2026 Salesforce, Inc.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { join } from 'node:path';\nimport { existsSync, readFileSync, unlinkSync } from 'node:fs';\nimport { execSync } from 'node:child_process';\nimport { tmpdir } from 'node:os';\nimport { randomUUID } from 'node:crypto';\nimport { npmRunPathEnv } from 'npm-run-path';\nimport type { RouteConfigEntry } from '@react-router/dev/routes';\n\nlet isCliAvailable: boolean | null = null;\n\nfunction checkReactRouterCli(projectDirectory: string): boolean {\n if (isCliAvailable !== null) {\n return isCliAvailable;\n }\n\n try {\n execSync('react-router --version', {\n cwd: projectDirectory,\n env: npmRunPathEnv(),\n stdio: 'pipe',\n });\n isCliAvailable = true;\n } catch {\n isCliAvailable = false;\n }\n return isCliAvailable;\n}\n\n/**\n * Get the fully resolved routes from React Router by invoking its CLI.\n * This ensures we get the exact same route resolution as React Router uses internally,\n * including all presets, file-system routes, and custom route configurations.\n * @param projectDirectory - The project root directory\n * @returns Array of resolved route config entries\n * @example\n * const routes = getReactRouterRoutes('/path/to/project');\n * // Returns the same structure as `react-router routes --json`\n */\nfunction getReactRouterRoutes(projectDirectory: string): RouteConfigEntry[] {\n if (!checkReactRouterCli(projectDirectory)) {\n throw new Error(\n 'React Router CLI is not available. Please make sure @react-router/dev is installed and accessible.'\n );\n }\n\n // Use a temp file to avoid Node.js buffer limits (8KB default)\n const tempFile = join(tmpdir(), `react-router-routes-${randomUUID()}.json`);\n\n try {\n // Redirect output to temp file to avoid buffer truncation\n execSync(`react-router routes --json > \"${tempFile}\"`, {\n cwd: projectDirectory,\n env: npmRunPathEnv(),\n encoding: 'utf-8',\n stdio: ['pipe', 'pipe', 'pipe'],\n });\n const output = readFileSync(tempFile, 'utf-8');\n return JSON.parse(output) as RouteConfigEntry[];\n } catch (error) {\n throw new Error(`Failed to get routes from React Router CLI: ${(error as Error).message}`);\n } finally {\n // Clean up temp file\n try {\n if (existsSync(tempFile)) {\n unlinkSync(tempFile);\n }\n } catch {\n // Ignore cleanup errors\n }\n }\n}\n\n/**\n * Convert a file path to its corresponding route path using React Router's CLI.\n * This ensures we get the exact same route resolution as React Router uses internally.\n * @param filePath - Absolute path to the route file\n * @param projectRoot - The project root directory\n * @returns The route path (e.g., '/cart', '/product/:productId')\n * @example\n * const route = filePathToRoute('/path/to/project/src/routes/_app.cart.tsx', '/path/to/project');\n * // Returns: '/cart'\n */\nexport function filePathToRoute(filePath: string, projectRoot: string): string {\n // Normalize paths to POSIX-style\n const filePathPosix = filePath.replace(/\\\\/g, '/');\n\n // Get all routes from React Router CLI\n const routes = getReactRouterRoutes(projectRoot);\n const flatRoutes = flattenRoutes(routes);\n\n // Find the route that matches this file\n for (const route of flatRoutes) {\n // Normalize the route file path for comparison\n const routeFilePosix = route.file.replace(/\\\\/g, '/');\n\n // Check if the file path ends with the route file (handles relative vs. absolute paths)\n if (filePathPosix.endsWith(routeFilePosix) || filePathPosix.endsWith(`/${routeFilePosix}`)) {\n return route.path;\n }\n\n // Also check without leading ./\n const routeFileNormalized = routeFilePosix.replace(/^\\.\\//, '');\n if (filePathPosix.endsWith(routeFileNormalized) || filePathPosix.endsWith(`/${routeFileNormalized}`)) {\n return route.path;\n }\n }\n\n // Fallback: if no match found, return a warning path\n console.warn(`Warning: Could not find route for file: ${filePath}`);\n return '/unknown';\n}\n\n/**\n * Flatten a nested route tree into a flat array with computed paths.\n * Each route will have its full path computed from parent paths.\n * @param routes - The nested route config entries\n * @param parentPath - The parent path prefix (used internally for recursion)\n * @returns Flat array of routes with their full paths\n */\nfunction flattenRoutes(\n routes: RouteConfigEntry[],\n parentPath = ''\n): Array<{ id: string; path: string; file: string; index?: boolean }> {\n const result: Array<{ id: string; path: string; file: string; index?: boolean }> = [];\n\n for (const route of routes) {\n // Compute the full path\n let fullPath: string;\n if (route.index) {\n fullPath = parentPath || '/';\n } else if (route.path) {\n // Handle paths that already start with / (absolute paths from extensions)\n const pathSegment = route.path.startsWith('/') ? route.path : `/${route.path}`;\n fullPath = parentPath ? `${parentPath}${pathSegment}`.replace(/\\/+/g, '/') : pathSegment;\n } else {\n // Layout route without path - use parent path\n fullPath = parentPath || '/';\n }\n\n // Add this route if it has an id\n if (route.id) {\n result.push({\n id: route.id,\n path: fullPath,\n file: route.file,\n index: route.index,\n });\n }\n\n // Recursively process children\n if (route.children && route.children.length > 0) {\n const childPath = route.path ? fullPath : parentPath;\n result.push(...flattenRoutes(route.children, childPath));\n }\n }\n\n return result;\n}\n","#!/usr/bin/env node\n/**\n * Copyright 2026 Salesforce, Inc.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/* eslint-disable no-console */\nimport { readdir, readFile, writeFile, mkdir, access, rm } from 'node:fs/promises';\nimport { join, extname, resolve, basename } from 'node:path';\nimport { execSync } from 'node:child_process';\nimport { Project, Node, type SourceFile, type PropertyDeclaration, type Decorator } from 'ts-morph';\nimport { filePathToRoute } from './react-router-config.js';\n\n// Re-export `filePathToRoute`\nexport { filePathToRoute };\n\nconst SKIP_DIRECTORIES = ['build', 'dist', 'node_modules', '.git', '.next', 'coverage'];\n\nconst DEFAULT_COMPONENT_GROUP = 'odyssey_base';\nconst ARCH_TYPE_HEADLESS = 'headless';\n\ntype AttributeType =\n | 'string'\n | 'text'\n | 'markup'\n | 'integer'\n | 'boolean'\n | 'product'\n | 'category'\n | 'file'\n | 'page'\n | 'image'\n | 'url'\n | 'enum'\n | 'custom'\n | 'cms_record';\n\nconst VALID_ATTRIBUTE_TYPES: readonly AttributeType[] = [\n 'string',\n 'text',\n 'markup',\n 'integer',\n 'boolean',\n 'product',\n 'category',\n 'file',\n 'page',\n 'image',\n 'url',\n 'enum',\n 'custom',\n 'cms_record',\n] as const;\n\n// Type mapping for TypeScript types to B2C Commerce attribute types\n// Based on official schema: https://salesforcecommercecloud.github.io/b2c-dev-doc/docs/current/content/attributedefinition.json\nconst TYPE_MAPPING: Record<string, string> = {\n String: 'string',\n string: 'string',\n Number: 'integer',\n number: 'integer',\n Boolean: 'boolean',\n boolean: 'boolean',\n Date: 'string', // B2C Commerce doesn't have a native date type, use string\n URL: 'url',\n CMSRecord: 'cms_record',\n};\n\n// Resolve attribute type in order: decorator type -> ts-morph type inference -> fallback to string\nfunction resolveAttributeType(decoratorType?: string, tsMorphType?: string, fieldName?: string): string {\n // 1) If the type is set on the decorator, use that (with validation)\n if (decoratorType) {\n if (!VALID_ATTRIBUTE_TYPES.includes(decoratorType as AttributeType)) {\n console.error(\n `Error: Invalid attribute type '${decoratorType}' for field '${fieldName || 'unknown'}'. Valid types are: ${VALID_ATTRIBUTE_TYPES.join(', ')}`\n );\n process.exit(1);\n }\n return decoratorType;\n }\n\n // 2) Use the type from ts-morph type inference\n if (tsMorphType && TYPE_MAPPING[tsMorphType]) {\n return TYPE_MAPPING[tsMorphType];\n }\n\n // 3) Fall back to string\n return 'string';\n}\n\n// Convert field name to human-readable name\nfunction toHumanReadableName(fieldName: string): string {\n return fieldName\n .replace(/([A-Z])/g, ' $1') // Add space before capital letters\n .replace(/^./, (str) => str.toUpperCase()) // Capitalize first letter\n .trim();\n}\n\n// Convert name to camelCase filename (handles spaces and hyphens, preserves existing camelCase)\nfunction toCamelCaseFileName(name: string): string {\n // If the name is already camelCase (no spaces or hyphens), return as-is\n if (!/[\\s-]/.test(name)) {\n return name;\n }\n\n return name\n .split(/[\\s-]+/) // Split by whitespace and hyphens\n .map((word, index) => {\n if (index === 0) {\n return word.toLowerCase(); // First word is all lowercase\n }\n return word.charAt(0).toUpperCase() + word.slice(1).toLowerCase(); // Subsequent words are capitalized\n })\n .join(''); // Join without spaces or hyphens\n}\n\nfunction getTypeFromTsMorph(property: PropertyDeclaration, _sourceFile: SourceFile): string {\n try {\n const typeNode = property.getTypeNode();\n if (typeNode) {\n const typeText = typeNode.getText();\n // Extract the base type name from complex types\n const baseType = typeText.split('|')[0].split('&')[0].trim();\n return baseType;\n }\n } catch {\n // If type extraction fails, return string\n }\n\n return 'string';\n}\n\n// Helper function to parse any TypeScript expression into a JavaScript value\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction parseExpression(expression: any): unknown {\n if (Node.isStringLiteral(expression)) {\n return expression.getLiteralValue();\n } else if (Node.isNumericLiteral(expression)) {\n return expression.getLiteralValue();\n } else if (Node.isTrueLiteral(expression)) {\n return true;\n } else if (Node.isFalseLiteral(expression)) {\n return false;\n } else if (Node.isObjectLiteralExpression(expression)) {\n return parseNestedObject(expression);\n } else if (Node.isArrayLiteralExpression(expression)) {\n return parseArrayLiteral(expression);\n } else {\n return expression.getText();\n }\n}\n\n// Helper function to parse deeply nested object literals\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction parseNestedObject(objectLiteral: any): Record<string, unknown> {\n const result: Record<string, unknown> = {};\n\n try {\n const properties = objectLiteral.getProperties();\n\n for (const property of properties) {\n if (Node.isPropertyAssignment(property)) {\n const name = property.getName();\n const initializer = property.getInitializer();\n\n if (initializer) {\n result[name] = parseExpression(initializer);\n }\n }\n }\n } catch (error) {\n console.warn(`Warning: Could not parse nested object: ${(error as Error).message}`);\n return result; // Return the result even if there was an error\n }\n\n return result;\n}\n\n// Helper function to parse array literals\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction parseArrayLiteral(arrayLiteral: any): unknown[] {\n const result: unknown[] = [];\n\n try {\n const elements = arrayLiteral.getElements();\n\n for (const element of elements) {\n result.push(parseExpression(element));\n }\n } catch (error) {\n console.warn(`Warning: Could not parse array literal: ${(error as Error).message}`);\n }\n\n return result;\n}\n\n// Parse decorator arguments using ts-morph\nfunction parseDecoratorArgs(decorator: Decorator): Record<string, unknown> {\n const result: Record<string, unknown> = {};\n\n try {\n const args = decorator.getArguments();\n\n if (args.length === 0) {\n return result;\n }\n\n // Handle the first argument\n const firstArg = args[0];\n\n if (Node.isObjectLiteralExpression(firstArg)) {\n // First argument is an object literal - parse all its properties\n const properties = firstArg.getProperties();\n\n for (const property of properties) {\n if (Node.isPropertyAssignment(property)) {\n const name = property.getName();\n const initializer = property.getInitializer();\n\n if (initializer) {\n result[name] = parseExpression(initializer);\n }\n }\n }\n } else if (Node.isStringLiteral(firstArg)) {\n // First argument is a string literal - use it as the id\n result.id = parseExpression(firstArg);\n\n // Check if there's a second argument (options object)\n if (args.length > 1) {\n const secondArg = args[1];\n if (Node.isObjectLiteralExpression(secondArg)) {\n const properties = secondArg.getProperties();\n\n for (const property of properties) {\n if (Node.isPropertyAssignment(property)) {\n const name = property.getName();\n const initializer = property.getInitializer();\n\n if (initializer) {\n result[name] = parseExpression(initializer);\n }\n }\n }\n }\n }\n }\n\n return result;\n } catch (error) {\n console.warn(`Warning: Could not parse decorator arguments: ${(error as Error).message}`);\n return result;\n }\n}\n\nfunction extractAttributesFromSource(sourceFile: SourceFile, className: string): Record<string, unknown>[] {\n const attributes: Record<string, unknown>[] = [];\n\n try {\n // Find the class declaration\n const classDeclaration = sourceFile.getClass(className);\n if (!classDeclaration) {\n return attributes;\n }\n\n // Get all properties in the class\n const properties = classDeclaration.getProperties();\n\n for (const property of properties) {\n // Check if the property has an @AttributeDefinition decorator\n const attributeDecorator = property.getDecorator('AttributeDefinition');\n if (!attributeDecorator) {\n continue;\n }\n\n const fieldName = property.getName();\n const config = parseDecoratorArgs(attributeDecorator);\n\n const isRequired = !property.hasQuestionToken();\n\n const inferredType = (config.type as string) || getTypeFromTsMorph(property, sourceFile);\n\n const attribute: Record<string, unknown> = {\n id: config.id || fieldName,\n name: config.name || toHumanReadableName(fieldName),\n type: resolveAttributeType(config.type as string, inferredType, fieldName),\n required: config.required !== undefined ? config.required : isRequired,\n description: config.description || `Field: ${fieldName}`,\n };\n\n if (config.values) {\n attribute.values = config.values;\n }\n\n if (config.defaultValue !== undefined) {\n attribute.default_value = config.defaultValue;\n }\n\n attributes.push(attribute);\n }\n } catch (error) {\n console.warn(`Warning: Could not extract attributes from class ${className}: ${(error as Error).message}`);\n }\n\n return attributes;\n}\n\nfunction extractRegionDefinitionsFromSource(sourceFile: SourceFile, className: string): Record<string, unknown>[] {\n const regionDefinitions: Record<string, unknown>[] = [];\n\n try {\n // Find the class declaration\n const classDeclaration = sourceFile.getClass(className);\n if (!classDeclaration) {\n return regionDefinitions;\n }\n\n // Check for class-level @RegionDefinition decorator\n const classRegionDecorator = classDeclaration.getDecorator('RegionDefinition');\n if (classRegionDecorator) {\n const args = classRegionDecorator.getArguments();\n if (args.length > 0) {\n const firstArg = args[0];\n\n // Handle array literal argument (most common case)\n if (Node.isArrayLiteralExpression(firstArg)) {\n const elements = firstArg.getElements();\n for (const element of elements) {\n if (Node.isObjectLiteralExpression(element)) {\n const regionConfig = parseDecoratorArgs({\n getArguments: () => [element],\n } as unknown as Decorator);\n\n const regionDefinition: Record<string, unknown> = {\n id: regionConfig.id || 'region',\n name: regionConfig.name || 'Region',\n };\n\n // Add optional properties if they exist in the decorator\n if (regionConfig.componentTypes) {\n regionDefinition.component_types = regionConfig.componentTypes;\n }\n\n if (Array.isArray(regionConfig.componentTypeInclusions)) {\n regionDefinition.component_type_inclusions = regionConfig.componentTypeInclusions.map(\n (incl) => ({\n type_id: incl,\n })\n );\n }\n\n if (Array.isArray(regionConfig.componentTypeExclusions)) {\n regionDefinition.component_type_exclusions = regionConfig.componentTypeExclusions.map(\n (excl) => ({\n type_id: excl,\n })\n );\n }\n\n if (regionConfig.maxComponents !== undefined) {\n regionDefinition.max_components = regionConfig.maxComponents;\n }\n\n if (regionConfig.minComponents !== undefined) {\n regionDefinition.min_components = regionConfig.minComponents;\n }\n\n if (regionConfig.allowMultiple !== undefined) {\n regionDefinition.allow_multiple = regionConfig.allowMultiple;\n }\n\n if (regionConfig.defaultComponentConstructors) {\n regionDefinition.default_component_constructors =\n regionConfig.defaultComponentConstructors;\n }\n\n regionDefinitions.push(regionDefinition);\n }\n }\n }\n }\n }\n } catch (error) {\n console.warn(\n `Warning: Could not extract region definitions from class ${className}: ${(error as Error).message}`\n );\n }\n\n return regionDefinitions;\n}\n\nasync function processComponentFile(filePath: string, _projectRoot: string): Promise<unknown[]> {\n try {\n const content = await readFile(filePath, 'utf-8');\n const components: unknown[] = [];\n\n // Check if file contains @Component decorator\n if (!content.includes('@Component')) {\n return components;\n }\n\n // Convert file path to module path (currently unused but may be needed in future)\n // const relativePath = relative(join(projectRoot, 'src'), filePath);\n // const modulePath = relativePath.replace(/\\.tsx?$/, '').replace(/\\\\/g, '/');\n\n try {\n // Create a ts-morph project and add the source file\n const project = new Project({\n useInMemoryFileSystem: true,\n skipAddingFilesFromTsConfig: true,\n });\n\n const sourceFile = project.createSourceFile(filePath, content);\n\n const classes = sourceFile.getClasses();\n\n for (const classDeclaration of classes) {\n const componentDecorator = classDeclaration.getDecorator('Component');\n if (!componentDecorator) {\n continue;\n }\n\n const className = classDeclaration.getName();\n if (!className) {\n continue;\n }\n\n const componentConfig = parseDecoratorArgs(componentDecorator);\n\n const attributes = extractAttributesFromSource(sourceFile, className);\n const regionDefinitions = extractRegionDefinitionsFromSource(sourceFile, className);\n\n const componentMetadata = {\n typeId: componentConfig.id || className.toLowerCase(),\n name: componentConfig.name || toHumanReadableName(className),\n group: componentConfig.group || DEFAULT_COMPONENT_GROUP,\n description: componentConfig.description || `Custom component: ${className}`,\n regionDefinitions,\n attributes,\n };\n\n components.push(componentMetadata);\n }\n } catch (error) {\n console.warn(`Warning: Could not process file ${filePath}:`, (error as Error).message);\n }\n\n return components;\n } catch (error) {\n console.warn(`Warning: Could not read file ${filePath}:`, (error as Error).message);\n return [];\n }\n}\n\nasync function processPageTypeFile(filePath: string, projectRoot: string): Promise<unknown[]> {\n try {\n const content = await readFile(filePath, 'utf-8');\n const pageTypes: unknown[] = [];\n\n // Check if file contains @PageType decorator\n if (!content.includes('@PageType')) {\n return pageTypes;\n }\n\n try {\n // Create a ts-morph project and add the source file\n const project = new Project({\n useInMemoryFileSystem: true,\n skipAddingFilesFromTsConfig: true,\n });\n\n const sourceFile = project.createSourceFile(filePath, content);\n\n const classes = sourceFile.getClasses();\n\n for (const classDeclaration of classes) {\n const pageTypeDecorator = classDeclaration.getDecorator('PageType');\n if (!pageTypeDecorator) {\n continue;\n }\n\n const className = classDeclaration.getName();\n if (!className) {\n continue;\n }\n\n const pageTypeConfig = parseDecoratorArgs(pageTypeDecorator);\n\n const attributes = extractAttributesFromSource(sourceFile, className);\n const regionDefinitions = extractRegionDefinitionsFromSource(sourceFile, className);\n const route = filePathToRoute(filePath, projectRoot);\n\n const pageTypeMetadata = {\n typeId: pageTypeConfig.id || className.toLowerCase(),\n name: pageTypeConfig.name || toHumanReadableName(className),\n description: pageTypeConfig.description || `Custom page type: ${className}`,\n regionDefinitions,\n supportedAspectTypes: pageTypeConfig.supportedAspectTypes || [],\n attributes,\n route,\n };\n\n pageTypes.push(pageTypeMetadata);\n }\n } catch (error) {\n console.warn(`Warning: Could not process file ${filePath}:`, (error as Error).message);\n }\n\n return pageTypes;\n } catch (error) {\n console.warn(`Warning: Could not read file ${filePath}:`, (error as Error).message);\n return [];\n }\n}\n\nasync function processAspectFile(filePath: string, _projectRoot: string): Promise<unknown[]> {\n try {\n const content = await readFile(filePath, 'utf-8');\n const aspects: unknown[] = [];\n\n // Check if file is a JSON aspect file\n if (!filePath.endsWith('.json') || !content.trim().startsWith('{')) {\n return aspects;\n }\n\n // Check if file is in the aspects directory\n if (!filePath.includes('/aspects/') && !filePath.includes('\\\\aspects\\\\')) {\n return aspects;\n }\n\n try {\n // Parse the JSON content\n const aspectData = JSON.parse(content);\n\n // Extract filename without extension as the aspect ID\n const fileName = basename(filePath, '.json');\n\n // Validate that it looks like an aspect file\n if (!aspectData.name || !aspectData.attribute_definitions) {\n return aspects;\n }\n\n const aspectMetadata = {\n id: fileName,\n name: aspectData.name,\n description: aspectData.description || `Aspect type: ${aspectData.name}`,\n attributeDefinitions: aspectData.attribute_definitions || [],\n supportedObjectTypes: aspectData.supported_object_types || [],\n };\n\n aspects.push(aspectMetadata);\n } catch (parseError) {\n console.warn(`Warning: Could not parse JSON in file ${filePath}:`, (parseError as Error).message);\n }\n\n return aspects;\n } catch (error) {\n console.warn(`Warning: Could not read file ${filePath}:`, (error as Error).message);\n return [];\n }\n}\n\nasync function generateComponentCartridge(\n component: Record<string, unknown>,\n outputDir: string,\n dryRun = false\n): Promise<void> {\n const fileName = toCamelCaseFileName(component.typeId as string);\n const groupDir = join(outputDir, component.group as string);\n const outputPath = join(groupDir, `${fileName}.json`);\n\n if (!dryRun) {\n // Ensure the group directory exists\n try {\n await mkdir(groupDir, { recursive: true });\n } catch {\n // Directory might already exist, which is fine\n }\n\n const attributeDefinitionGroups = [\n {\n id: component.typeId,\n name: component.name,\n description: component.description,\n attribute_definitions: component.attributes,\n },\n ];\n\n const cartridgeData = {\n name: component.name,\n description: component.description,\n group: component.group,\n arch_type: ARCH_TYPE_HEADLESS,\n region_definitions: component.regionDefinitions || [],\n attribute_definition_groups: attributeDefinitionGroups,\n };\n\n await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));\n }\n\n const prefix = dryRun ? ' - [DRY RUN]' : ' -';\n console.log(\n `${prefix} ${String(component.typeId)}: ${String(component.name)} (${String((component.attributes as unknown[]).length)} attributes) → ${fileName}.json`\n );\n}\n\nasync function generatePageTypeCartridge(\n pageType: Record<string, unknown>,\n outputDir: string,\n dryRun = false\n): Promise<void> {\n const fileName = toCamelCaseFileName(pageType.name as string);\n const outputPath = join(outputDir, `${fileName}.json`);\n\n if (!dryRun) {\n const cartridgeData: Record<string, unknown> = {\n name: pageType.name,\n description: pageType.description,\n arch_type: ARCH_TYPE_HEADLESS,\n region_definitions: pageType.regionDefinitions || [],\n };\n\n // Add attribute_definition_groups if there are attributes\n if (pageType.attributes && (pageType.attributes as unknown[]).length > 0) {\n const attributeDefinitionGroups = [\n {\n id: pageType.typeId || fileName,\n name: pageType.name,\n description: pageType.description,\n attribute_definitions: pageType.attributes,\n },\n ];\n cartridgeData.attribute_definition_groups = attributeDefinitionGroups;\n }\n\n // Add supported_aspect_types if specified\n if (pageType.supportedAspectTypes) {\n cartridgeData.supported_aspect_types = pageType.supportedAspectTypes;\n }\n\n if (pageType.route) {\n cartridgeData.route = pageType.route;\n }\n\n await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));\n }\n\n const prefix = dryRun ? ' - [DRY RUN]' : ' -';\n console.log(\n `${prefix} ${String(pageType.name)}: ${String(pageType.description)} (${String((pageType.attributes as unknown[]).length)} attributes) → ${fileName}.json`\n );\n}\n\nasync function generateAspectCartridge(\n aspect: Record<string, unknown>,\n outputDir: string,\n dryRun = false\n): Promise<void> {\n const fileName = toCamelCaseFileName(aspect.id as string);\n const outputPath = join(outputDir, `${fileName}.json`);\n\n if (!dryRun) {\n const cartridgeData: Record<string, unknown> = {\n name: aspect.name,\n description: aspect.description,\n arch_type: ARCH_TYPE_HEADLESS,\n attribute_definitions: aspect.attributeDefinitions || [],\n };\n\n // Add supported_object_types if specified\n if (aspect.supportedObjectTypes) {\n cartridgeData.supported_object_types = aspect.supportedObjectTypes;\n }\n\n await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));\n }\n\n const prefix = dryRun ? ' - [DRY RUN]' : ' -';\n console.log(\n `${prefix} ${String(aspect.name)}: ${String(aspect.description)} (${String((aspect.attributeDefinitions as unknown[]).length)} attributes) → ${fileName}.json`\n );\n}\n\n/**\n * Options for generateMetadata function\n */\nexport interface GenerateMetadataOptions {\n /**\n * Optional array of specific file paths to process.\n * If provided, only these files will be processed and existing cartridge files will NOT be deleted.\n * If omitted, the entire src/ directory will be scanned and all existing cartridge files will be deleted first.\n */\n filePaths?: string[];\n\n /**\n * Whether to run ESLint with --fix on generated JSON files to format them according to project settings.\n * Defaults to true.\n */\n lintFix?: boolean;\n\n /**\n * If true, scans files and reports what would be generated without actually writing any files or deleting directories.\n * Defaults to false.\n */\n dryRun?: boolean;\n}\n\n/**\n * Result returned by generateMetadata function\n */\nexport interface GenerateMetadataResult {\n componentsGenerated: number;\n pageTypesGenerated: number;\n aspectsGenerated: number;\n totalFiles: number;\n}\n\n/**\n * Runs ESLint with --fix on the specified directory to format JSON files.\n * This ensures generated JSON files match the project's Prettier/ESLint configuration.\n */\nfunction lintGeneratedFiles(metadataDir: string, projectRoot: string): void {\n try {\n console.log('🔧 Running ESLint --fix on generated JSON files...');\n\n // Run ESLint from the project root directory so it picks up the correct config\n // Use --no-error-on-unmatched-pattern to handle cases where no JSON files exist yet\n const command = `npx eslint \"${metadataDir}/**/*.json\" --fix --no-error-on-unmatched-pattern`;\n\n execSync(command, {\n cwd: projectRoot,\n stdio: 'pipe', // Suppress output unless there's an error\n encoding: 'utf-8',\n });\n\n console.log('✅ JSON files formatted successfully');\n } catch (error) {\n // ESLint returns non-zero exit code even when --fix resolves all issues\n // We only warn if there are actual unfixable issues\n const execError = error as { status?: number; stderr?: string; stdout?: string };\n\n // Exit code 1 usually means there were linting issues (some may have been fixed)\n // Exit code 2 means configuration error or other fatal error\n if (execError.status === 2) {\n const errMsg = execError.stderr || execError.stdout || 'Unknown error';\n console.warn(`⚠️ Warning: Could not run ESLint --fix: ${errMsg}`);\n } else if (execError.stderr && execError.stderr.includes('error')) {\n console.warn(`⚠️ Warning: Some linting issues could not be auto-fixed. Run ESLint manually to review.`);\n } else {\n // Exit code 1 with no errors in stderr usually means all issues were fixed\n console.log('✅ JSON files formatted successfully');\n }\n }\n}\n\n// Main function\nexport async function generateMetadata(\n projectDirectory: string,\n metadataDirectory: string,\n options?: GenerateMetadataOptions\n): Promise<GenerateMetadataResult> {\n try {\n const filePaths = options?.filePaths;\n const isIncrementalMode = filePaths && filePaths.length > 0;\n const dryRun = options?.dryRun || false;\n\n if (dryRun) {\n console.log('🔍 [DRY RUN] Scanning for decorated components and page types...');\n } else if (isIncrementalMode) {\n console.log(`🔍 Generating metadata for ${filePaths.length} specified file(s)...`);\n } else {\n console.log('🔍 Generating metadata for decorated components and page types...');\n }\n\n const projectRoot = resolve(projectDirectory);\n const srcDir = join(projectRoot, 'src');\n const metadataDir = resolve(metadataDirectory);\n const componentsOutputDir = join(metadataDir, 'components');\n const pagesOutputDir = join(metadataDir, 'pages');\n const aspectsOutputDir = join(metadataDir, 'aspects');\n\n // Skip directory operations in dry run mode\n if (!dryRun) {\n // Only delete existing directories in full scan mode (not incremental)\n if (!isIncrementalMode) {\n console.log('🗑️ Cleaning existing output directories...');\n for (const outputDir of [componentsOutputDir, pagesOutputDir, aspectsOutputDir]) {\n try {\n await rm(outputDir, { recursive: true, force: true });\n console.log(` - Deleted: ${outputDir}`);\n } catch {\n // Directory might not exist, which is fine\n console.log(` - Directory not found (skipping): ${outputDir}`);\n }\n }\n } else {\n console.log('📝 Incremental mode: existing cartridge files will be preserved/overwritten');\n }\n\n // Create output directories if they don't exist\n console.log('📁 Creating output directories...');\n for (const outputDir of [componentsOutputDir, pagesOutputDir, aspectsOutputDir]) {\n try {\n await mkdir(outputDir, { recursive: true });\n } catch (error) {\n try {\n await access(outputDir);\n // Directory exists, that's fine\n } catch {\n console.error(\n `❌ Error: Failed to create output directory ${outputDir}: ${(error as Error).message}`\n );\n process.exit(1);\n }\n }\n }\n } else if (isIncrementalMode) {\n console.log(`📝 [DRY RUN] Would process ${filePaths.length} specific file(s)`);\n } else {\n console.log('📝 [DRY RUN] Would clean and regenerate all metadata files');\n }\n\n let files: string[] = [];\n\n if (isIncrementalMode && filePaths) {\n // Use the specified file paths (resolve them relative to project root)\n files = filePaths.map((fp) => resolve(projectRoot, fp));\n console.log(`📂 Processing ${files.length} specified file(s)...`);\n } else {\n // Full scan mode: scan entire src directory\n const scanDirectory = async (dir: string): Promise<void> => {\n const entries = await readdir(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = join(dir, entry.name);\n\n if (entry.isDirectory()) {\n if (!SKIP_DIRECTORIES.includes(entry.name)) {\n await scanDirectory(fullPath);\n }\n } else if (\n entry.isFile() &&\n (extname(entry.name) === '.ts' ||\n extname(entry.name) === '.tsx' ||\n extname(entry.name) === '.json')\n ) {\n files.push(fullPath);\n }\n }\n };\n\n await scanDirectory(srcDir);\n }\n\n // Process each file for both components and page types\n const allComponents: unknown[] = [];\n const allPageTypes: unknown[] = [];\n const allAspects: unknown[] = [];\n\n for (const file of files) {\n const components = await processComponentFile(file, projectRoot);\n allComponents.push(...components);\n\n const pageTypes = await processPageTypeFile(file, projectRoot);\n allPageTypes.push(...pageTypes);\n\n const aspects = await processAspectFile(file, projectRoot);\n allAspects.push(...aspects);\n }\n\n if (allComponents.length === 0 && allPageTypes.length === 0 && allAspects.length === 0) {\n console.log('⚠️ No decorated components, page types, or aspect files found.');\n return {\n componentsGenerated: 0,\n pageTypesGenerated: 0,\n aspectsGenerated: 0,\n totalFiles: 0,\n };\n }\n\n // Generate component cartridge files\n if (allComponents.length > 0) {\n console.log(`✅ Found ${allComponents.length} decorated component(s):`);\n for (const component of allComponents) {\n await generateComponentCartridge(component as Record<string, unknown>, componentsOutputDir, dryRun);\n }\n if (dryRun) {\n console.log(\n `📄 [DRY RUN] Would generate ${allComponents.length} component metadata file(s) in: ${componentsOutputDir}`\n );\n } else {\n console.log(\n `📄 Generated ${allComponents.length} component metadata file(s) in: ${componentsOutputDir}`\n );\n }\n }\n\n // Generate page type cartridge files\n if (allPageTypes.length > 0) {\n console.log(`✅ Found ${allPageTypes.length} decorated page type(s):`);\n for (const pageType of allPageTypes) {\n await generatePageTypeCartridge(pageType as Record<string, unknown>, pagesOutputDir, dryRun);\n }\n if (dryRun) {\n console.log(\n `📄 [DRY RUN] Would generate ${allPageTypes.length} page type metadata file(s) in: ${pagesOutputDir}`\n );\n } else {\n console.log(`📄 Generated ${allPageTypes.length} page type metadata file(s) in: ${pagesOutputDir}`);\n }\n }\n\n if (allAspects.length > 0) {\n console.log(`✅ Found ${allAspects.length} decorated aspect(s):`);\n for (const aspect of allAspects) {\n await generateAspectCartridge(aspect as Record<string, unknown>, aspectsOutputDir, dryRun);\n }\n if (dryRun) {\n console.log(\n `📄 [DRY RUN] Would generate ${allAspects.length} aspect metadata file(s) in: ${aspectsOutputDir}`\n );\n } else {\n console.log(`📄 Generated ${allAspects.length} aspect metadata file(s) in: ${aspectsOutputDir}`);\n }\n }\n\n // Run ESLint --fix to format generated JSON files according to project settings\n const shouldLintFix = options?.lintFix !== false; // Default to true\n if (\n !dryRun &&\n shouldLintFix &&\n (allComponents.length > 0 || allPageTypes.length > 0 || allAspects.length > 0)\n ) {\n lintGeneratedFiles(metadataDir, projectRoot);\n }\n\n // Return statistics\n return {\n componentsGenerated: allComponents.length,\n pageTypesGenerated: allPageTypes.length,\n aspectsGenerated: allAspects.length,\n totalFiles: allComponents.length + allPageTypes.length + allAspects.length,\n };\n } catch (error) {\n console.error('❌ Error:', (error as Error).message);\n process.exit(1);\n }\n}\n"],"mappings":";;;;;;;;;;;;;AA0CA,MAAa,cAAc;AAC3B,MAAa,kBAAkB;AAG/B,MAAa,eAAe;CACxB,KAAK;CACL,MAAM;CACN,QAAQ;CACX;AAGD,MAAa,gBAAgB;CACzB,iBAAiB;CACjB,6BAA6B;CAC7B,kBAAkB;CACrB;AAGD,MAAa,oBAAoB;CAC7B,OAAO;CACP,mBAAmB;CACtB;;;;;;;;;;;;;;AChCD,SAAgB,iBACZ,UACA,QACA,WACA,QAEA,UACkB;CAClB,MAAM,WAAW,GAAG,YAAY,GAAGA;AASnC,QAPiC;EAC7B,SAAS,WAAW;EACpB,KAAK;EACL,MAAM,EAAE,OAAO,WAAW;EAC1B;EACA,GAAI,YAAY,EAAE,MAAM,UAAU;EACrC;;;;;;;;AAUL,SAAgB,yBAAyB,UAA8B;AACnE,KAAI,SAAS,eAAe,IACxB,OAAM,IAAI,MAAM,6CAA6C;;;;;;;;;;;;AAerE,eAAsB,YAAY,MAA0E;CACxG,MAAM,MAAM,KAAK;CAEjB,MAAMC,eAA4B;EAC9B,GAAG;EACH,SAAS;GACL,eAAe,SAAS,KAAK,KAAK;GAClC,GAAG,KAAK;GACX;EACJ;AAGD,KAAI,KAAK,MAAM;EACX,MAAM,WAAW,IAAI,iBAAiB;AACtC,SAAO,QAAQ,KAAK,KAAK,CAAC,SAAS,CAAC,KAAK,WAAW;AAChD,YAAS,OAAO,KAAK,OAAO,MAAM,CAAC;IACrC;AACF,eAAa,OAAO;AACpB,eAAa,UAAU;GACnB,GAAG,aAAa;GAChB,gBAAgB,cAAc;GACjC;;AAGL,KAAI;EACA,MAAM,WAAW,MAAM,MAAM,KAAK,aAAa;EAE/C,MAAM,OAAO,SAAS,QAAQ,IAAI,eAAe,EAAE,SAAS,cAAc,iBAAiB,GACrF,MAAM,SAAS,MAAM,GACrB,MAAM,SAAS,MAAM;EAG3B,MAAMC,UAAkC,EAAE;AAC1C,WAAS,QAAQ,SAAS,OAAO,QAAQ;AACrC,WAAQ,OAAO;IACjB;AAEF,SAAO;GACH,UAAU;IACN,YAAY,SAAS;IACrB,eAAe,SAAS;IACxB;IACH;GACD;GACH;UACI,OAAO;AACZ,QAAM,IAAI,MAAM,wBAAwB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAAG;;;;;;;;;AChGzG,IAAa,kBAAb,cAAqC,MAAM;CACvC,YAAY,SAAiB;AACzB,QAAM,QAAQ;AACd,OAAK,OAAO;;;;;;;;;AAUpB,SAAgB,iBAAiB,UAAwB;AACrD,KAAI,CAAC,YAAY,OAAO,aAAa,SACjC,OAAM,IAAI,gBAAgB,sDAAsD;AAGpF,KAAI,SAAS,MAAM,CAAC,WAAW,EAC3B,OAAM,IAAI,gBAAgB,qCAAqC;AAInE,KAAI,CAAC,SAAS,SAAS,IAAI,CACvB,OAAM,IAAI,gBAAgB,iDAAiD;;;;;;;;AAUnF,SAAgB,sBAAsB,eAA6B;AAC/D,KAAI,CAAC,iBAAiB,OAAO,kBAAkB,SAC3C,OAAM,IAAI,gBAAgB,uDAAuD;AAGrF,KAAI,cAAc,MAAM,CAAC,WAAW,EAChC,OAAM,IAAI,gBAAgB,sCAAsC;CAIpE,MAAM,MAAM,QAAQ,cAAc,CAAC,aAAa;AAChD,KAAI,QAAQ,GACR,OAAM,IAAI,gBAAgB,uCAAuC,MAAM;;;;;;;;AAU/E,SAAgB,kBAAkB,WAAyB;AACvD,KAAI,CAAC,aAAa,OAAO,cAAc,SACnC,OAAM,IAAI,gBAAgB,oEAAoE;AAGlG,KAAI,UAAU,MAAM,CAAC,WAAW,EAC5B,OAAM,IAAI,gBAAgB,mDAAmD;AAIjF,KAAI,UAAU,SAAS,GACnB,OAAM,IAAI,gBAAgB,4DAA4D;;;;;;;;AAU9F,SAAgB,gBAAgB,SAAuB;AACnD,KAAI,CAAC,WAAW,OAAO,YAAY,SAC/B,OAAM,IAAI,gBAAgB,qDAAqD;AAGnF,KAAI,QAAQ,MAAM,CAAC,WAAW,EAC1B,OAAM,IAAI,gBAAgB,oCAAoC;AAKlE,KAAI,CADiB,oBACH,KAAK,QAAQ,CAC3B,OAAM,IAAI,gBACN,+GACH;;;;;;;;AAUT,SAAgB,mBAAmB,YAA0B;AACzD,KAAI,CAAC,cAAc,OAAO,eAAe,SACrC,OAAM,IAAI,gBAAgB,yDAAyD;AAGvF,KAAI,CAAC,WAAW,WAAW,IAAI,CAC3B,OAAM,IAAI,gBAAgB,8CAA8C;;;;;;;;;;;;AAchF,SAAgB,yBACZ,UACA,iBACA,wBACA,WACA,qBACI;AACJ,kBAAiB,SAAS;AAC1B,iBAAgB,gBAAgB;AAChC,uBAAsB,uBAAuB;AAC7C,mBAAkB,UAAU;AAC5B,oBAAmB,oBAAoB;;;;;;;;;;;ACpH3C,SAAS,YAAY,UAA0B;AAC3C,QAAO,KAAK,SAAS,SAAS;;;;;;;;;AAUlC,eAAe,aAAa,WAAmB,YAAmC;CAC9E,MAAM,UAAU,SAAS,OAAO,EAAE,MAAM,EAAE,OAAO,GAAG,EAAE,CAAC;CACvD,MAAM,SAAS,GAAG,kBAAkB,WAAW;AAE/C,SAAQ,KAAK,OAAO;AACpB,SAAQ,UAAU,WAAW,MAAM;AACnC,OAAM,QAAQ,UAAU;;;;;;;;;;AAW5B,SAAS,oBAAoB,UAAkB,YAAoB,MAAsB;AAErF,QAAO,WAAW,WAAW,YAAY,GAAG,WAAW,GADtC,YAAY,KAAK;;;;;;;;;;;AAatC,eAAe,MACX,UACA,YACA,MACA,WAE8C;CAC9C,MAAM,WAAW,oBAAoB,UAAU,YAAY,KAAK;CAChE,MAAM,OAAO,iBAAiB,UAAU,YAAY,WAAW,aAAa,MAAM;EAC9E,QAAQ,kBAAkB;EAC1B,QAAQ,kBAAkB;EAC7B,CAAC;AACF,MAAK,MAAM;CACX,MAAM,SAAS,MAAM,YAAY,KAAK;AACtC,0BAAyB,OAAO,SAAS;AACzC,QAAO;;;;;;;;;;;AAYX,eAAe,WACX,UACA,YACA,MACA,WAE8C;CAC9C,MAAM,WAAW,oBAAoB,UAAU,YAAY,KAAK;CAChE,MAAM,OAAO,iBAAiB,UAAU,YAAY,WAAW,aAAa,OAAO;AACnF,MAAK,MAAM;CACX,MAAM,SAAS,MAAM,YAAY,KAAK;AACtC,0BAAyB,OAAO,SAAS;AACzC,QAAO;;;;;;;;;;;AAYX,eAAe,SACX,UACA,iBACA,UACA,WAE8C;CAC9C,MAAM,aAAa,GAAG,gBAAgB,GAAG;AAEzC,KAAI;EACA,MAAM,WAAW,oBAAoB,UAAU,YAAY,SAAS;EACpE,MAAM,OAAO,iBAAiB,UAAU,YAAY,WAAW,aAAa,IAAI;AAChF,OAAK,MAAM;AAIX,OAAK,OAAO,GAAG,iBAAiB,SAAS;AAGzC,OAAK,SAAS;AACd,OAAK,UAAU;GACX,GAAG,KAAK;GACR,gBAAgB,cAAc;GACjC;EAED,MAAM,SAAS,MAAM,YAAY,KAAK;AACtC,2BAAyB,OAAO,SAAS;AAEzC,MAAI,CAAC;GAAC;GAAK;GAAK;GAAI,CAAC,SAAS,OAAO,SAAS,WAAW,CACrD,OAAM,IAAI,MACN,cAAc,SAAS,YAAY,OAAO,SAAS,WAAW,IAAI,OAAO,SAAS,cAAc,GACnG;AAGL,SAAO;UACF,OAAO;AACZ,QAAM,IAAI,MAAM,cAAc,SAAS,YAAY,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAAG;;;;;;;;;;;;;;;;;;;AAoBpH,eAAe,WACX,UACA,iBACA,WACA,WACqB;AAGrB,0BAAyB,UAAU,iBAAiB,WAAW,WAFzC,IAAI,gBAAgB,GAAG,gBAAgB,aAE2B;CAGxF,MAAM,cAAc,KAAK,KAAK,KAAK,QAAQ,UAAU,EAAE,YAAY,KAAK,KAAK,CAAC,MAAM;AAEpF,KAAI;AAEA,QAAM,aAAa,WAAW,YAAY;EAC1C,MAAM,OAAO,KAAK,SAAS,YAAY;AAIvC,QAAM,SAAS,UAAU,iBAAiB,aAAa,UAAU;EAGjE,MAAM,cAAc,MAAM,MAAM,UAAU,GAAG,gBAAgB,GAAG,mBAAmB,MAAM,UAAU;AACnG,MAAI,CAAC;GAAC;GAAK;GAAK;GAAI,CAAC,SAAS,YAAY,SAAS,WAAW,CAC1D,OAAM,IAAI,MACN,eAAe,KAAK,wBAAwB,YAAY,SAAS,WAAW,IAAI,YAAY,SAAS,cAAc,GACtH;EAIL,MAAM,eAAe,MAAM,WAAW,UAAU,GAAG,gBAAgB,GAAG,mBAAmB,MAAM,UAAU;AACzG,MAAI,CAAC,CAAC,KAAK,IAAI,CAAC,SAAS,aAAa,SAAS,WAAW,CACtD,OAAM,IAAI,MACN,mBAAmB,KAAK,8CAA8C,aAAa,SAAS,WAAW,IAAI,aAAa,SAAS,cAAc,GAClJ;AAKL,SAAO,EAAE,SADO,YAAY,KAAK,CAAC,QAAQ,QAAQ,GAAG,EACnC;UACb,OAAO;AACZ,MAAI,iBAAiB,MACjB,OAAM;AAEV,QAAM,IAAI,MAAM,eAAe,UAAU,WAAW,OAAO,MAAM,GAAG;WAC9D;AAEN,MAAI,GAAG,WAAW,YAAY,CAC1B,IAAG,WAAW,YAAY;;;;;;ACvNtC,IAAIC,iBAAiC;AAErC,SAAS,oBAAoB,kBAAmC;AAC5D,KAAI,mBAAmB,KACnB,QAAO;AAGX,KAAI;AACA,WAAS,0BAA0B;GAC/B,KAAK;GACL,KAAK,eAAe;GACpB,OAAO;GACV,CAAC;AACF,mBAAiB;SACb;AACJ,mBAAiB;;AAErB,QAAO;;;;;;;;;;;;AAaX,SAAS,qBAAqB,kBAA8C;AACxE,KAAI,CAAC,oBAAoB,iBAAiB,CACtC,OAAM,IAAI,MACN,qGACH;CAIL,MAAM,WAAW,KAAK,QAAQ,EAAE,uBAAuB,YAAY,CAAC,OAAO;AAE3E,KAAI;AAEA,WAAS,iCAAiC,SAAS,IAAI;GACnD,KAAK;GACL,KAAK,eAAe;GACpB,UAAU;GACV,OAAO;IAAC;IAAQ;IAAQ;IAAO;GAClC,CAAC;EACF,MAAM,SAAS,aAAa,UAAU,QAAQ;AAC9C,SAAO,KAAK,MAAM,OAAO;UACpB,OAAO;AACZ,QAAM,IAAI,MAAM,+CAAgD,MAAgB,UAAU;WACpF;AAEN,MAAI;AACA,OAAI,WAAW,SAAS,CACpB,YAAW,SAAS;UAEpB;;;;;;;;;;;;;AAgBhB,SAAgB,gBAAgB,UAAkB,aAA6B;CAE3E,MAAM,gBAAgB,SAAS,QAAQ,OAAO,IAAI;CAIlD,MAAM,aAAa,cADJ,qBAAqB,YAAY,CACR;AAGxC,MAAK,MAAM,SAAS,YAAY;EAE5B,MAAM,iBAAiB,MAAM,KAAK,QAAQ,OAAO,IAAI;AAGrD,MAAI,cAAc,SAAS,eAAe,IAAI,cAAc,SAAS,IAAI,iBAAiB,CACtF,QAAO,MAAM;EAIjB,MAAM,sBAAsB,eAAe,QAAQ,SAAS,GAAG;AAC/D,MAAI,cAAc,SAAS,oBAAoB,IAAI,cAAc,SAAS,IAAI,sBAAsB,CAChG,QAAO,MAAM;;AAKrB,SAAQ,KAAK,2CAA2C,WAAW;AACnE,QAAO;;;;;;;;;AAUX,SAAS,cACL,QACA,aAAa,IACqD;CAClE,MAAMC,SAA6E,EAAE;AAErF,MAAK,MAAM,SAAS,QAAQ;EAExB,IAAIC;AACJ,MAAI,MAAM,MACN,YAAW,cAAc;WAClB,MAAM,MAAM;GAEnB,MAAM,cAAc,MAAM,KAAK,WAAW,IAAI,GAAG,MAAM,OAAO,IAAI,MAAM;AACxE,cAAW,aAAa,GAAG,aAAa,cAAc,QAAQ,QAAQ,IAAI,GAAG;QAG7E,YAAW,cAAc;AAI7B,MAAI,MAAM,GACN,QAAO,KAAK;GACR,IAAI,MAAM;GACV,MAAM;GACN,MAAM,MAAM;GACZ,OAAO,MAAM;GAChB,CAAC;AAIN,MAAI,MAAM,YAAY,MAAM,SAAS,SAAS,GAAG;GAC7C,MAAM,YAAY,MAAM,OAAO,WAAW;AAC1C,UAAO,KAAK,GAAG,cAAc,MAAM,UAAU,UAAU,CAAC;;;AAIhE,QAAO;;;;;ACjJX,MAAM,mBAAmB;CAAC;CAAS;CAAQ;CAAgB;CAAQ;CAAS;CAAW;AAEvF,MAAM,0BAA0B;AAChC,MAAM,qBAAqB;AAkB3B,MAAMC,wBAAkD;CACpD;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACH;AAID,MAAMC,eAAuC;CACzC,QAAQ;CACR,QAAQ;CACR,QAAQ;CACR,QAAQ;CACR,SAAS;CACT,SAAS;CACT,MAAM;CACN,KAAK;CACL,WAAW;CACd;AAGD,SAAS,qBAAqB,eAAwB,aAAsB,WAA4B;AAEpG,KAAI,eAAe;AACf,MAAI,CAAC,sBAAsB,SAAS,cAA+B,EAAE;AACjE,WAAQ,MACJ,kCAAkC,cAAc,eAAe,aAAa,UAAU,sBAAsB,sBAAsB,KAAK,KAAK,GAC/I;AACD,WAAQ,KAAK,EAAE;;AAEnB,SAAO;;AAIX,KAAI,eAAe,aAAa,aAC5B,QAAO,aAAa;AAIxB,QAAO;;AAIX,SAAS,oBAAoB,WAA2B;AACpD,QAAO,UACF,QAAQ,YAAY,MAAM,CAC1B,QAAQ,OAAO,QAAQ,IAAI,aAAa,CAAC,CACzC,MAAM;;AAIf,SAAS,oBAAoB,MAAsB;AAE/C,KAAI,CAAC,QAAQ,KAAK,KAAK,CACnB,QAAO;AAGX,QAAO,KACF,MAAM,SAAS,CACf,KAAK,MAAM,UAAU;AAClB,MAAI,UAAU,EACV,QAAO,KAAK,aAAa;AAE7B,SAAO,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE,CAAC,aAAa;GACnE,CACD,KAAK,GAAG;;AAGjB,SAAS,mBAAmB,UAA+B,aAAiC;AACxF,KAAI;EACA,MAAM,WAAW,SAAS,aAAa;AACvC,MAAI,SAIA,QAHiB,SAAS,SAAS,CAET,MAAM,IAAI,CAAC,GAAG,MAAM,IAAI,CAAC,GAAG,MAAM;SAG5D;AAIR,QAAO;;AAKX,SAAS,gBAAgB,YAA0B;AAC/C,KAAI,KAAK,gBAAgB,WAAW,CAChC,QAAO,WAAW,iBAAiB;UAC5B,KAAK,iBAAiB,WAAW,CACxC,QAAO,WAAW,iBAAiB;UAC5B,KAAK,cAAc,WAAW,CACrC,QAAO;UACA,KAAK,eAAe,WAAW,CACtC,QAAO;UACA,KAAK,0BAA0B,WAAW,CACjD,QAAO,kBAAkB,WAAW;UAC7B,KAAK,yBAAyB,WAAW,CAChD,QAAO,kBAAkB,WAAW;KAEpC,QAAO,WAAW,SAAS;;AAMnC,SAAS,kBAAkB,eAA6C;CACpE,MAAMC,SAAkC,EAAE;AAE1C,KAAI;EACA,MAAM,aAAa,cAAc,eAAe;AAEhD,OAAK,MAAM,YAAY,WACnB,KAAI,KAAK,qBAAqB,SAAS,EAAE;GACrC,MAAM,OAAO,SAAS,SAAS;GAC/B,MAAM,cAAc,SAAS,gBAAgB;AAE7C,OAAI,YACA,QAAO,QAAQ,gBAAgB,YAAY;;UAIlD,OAAO;AACZ,UAAQ,KAAK,2CAA4C,MAAgB,UAAU;AACnF,SAAO;;AAGX,QAAO;;AAKX,SAAS,kBAAkB,cAA8B;CACrD,MAAMC,SAAoB,EAAE;AAE5B,KAAI;EACA,MAAM,WAAW,aAAa,aAAa;AAE3C,OAAK,MAAM,WAAW,SAClB,QAAO,KAAK,gBAAgB,QAAQ,CAAC;UAEpC,OAAO;AACZ,UAAQ,KAAK,2CAA4C,MAAgB,UAAU;;AAGvF,QAAO;;AAIX,SAAS,mBAAmB,WAA+C;CACvE,MAAMD,SAAkC,EAAE;AAE1C,KAAI;EACA,MAAM,OAAO,UAAU,cAAc;AAErC,MAAI,KAAK,WAAW,EAChB,QAAO;EAIX,MAAM,WAAW,KAAK;AAEtB,MAAI,KAAK,0BAA0B,SAAS,EAAE;GAE1C,MAAM,aAAa,SAAS,eAAe;AAE3C,QAAK,MAAM,YAAY,WACnB,KAAI,KAAK,qBAAqB,SAAS,EAAE;IACrC,MAAM,OAAO,SAAS,SAAS;IAC/B,MAAM,cAAc,SAAS,gBAAgB;AAE7C,QAAI,YACA,QAAO,QAAQ,gBAAgB,YAAY;;aAIhD,KAAK,gBAAgB,SAAS,EAAE;AAEvC,UAAO,KAAK,gBAAgB,SAAS;AAGrC,OAAI,KAAK,SAAS,GAAG;IACjB,MAAM,YAAY,KAAK;AACvB,QAAI,KAAK,0BAA0B,UAAU,EAAE;KAC3C,MAAM,aAAa,UAAU,eAAe;AAE5C,UAAK,MAAM,YAAY,WACnB,KAAI,KAAK,qBAAqB,SAAS,EAAE;MACrC,MAAM,OAAO,SAAS,SAAS;MAC/B,MAAM,cAAc,SAAS,gBAAgB;AAE7C,UAAI,YACA,QAAO,QAAQ,gBAAgB,YAAY;;;;;AAQnE,SAAO;UACF,OAAO;AACZ,UAAQ,KAAK,iDAAkD,MAAgB,UAAU;AACzF,SAAO;;;AAIf,SAAS,4BAA4B,YAAwB,WAA8C;CACvG,MAAME,aAAwC,EAAE;AAEhD,KAAI;EAEA,MAAM,mBAAmB,WAAW,SAAS,UAAU;AACvD,MAAI,CAAC,iBACD,QAAO;EAIX,MAAM,aAAa,iBAAiB,eAAe;AAEnD,OAAK,MAAM,YAAY,YAAY;GAE/B,MAAM,qBAAqB,SAAS,aAAa,sBAAsB;AACvE,OAAI,CAAC,mBACD;GAGJ,MAAM,YAAY,SAAS,SAAS;GACpC,MAAM,SAAS,mBAAmB,mBAAmB;GAErD,MAAM,aAAa,CAAC,SAAS,kBAAkB;GAE/C,MAAM,eAAgB,OAAO,QAAmB,mBAAmB,UAAU,WAAW;GAExF,MAAMC,YAAqC;IACvC,IAAI,OAAO,MAAM;IACjB,MAAM,OAAO,QAAQ,oBAAoB,UAAU;IACnD,MAAM,qBAAqB,OAAO,MAAgB,cAAc,UAAU;IAC1E,UAAU,OAAO,aAAa,SAAY,OAAO,WAAW;IAC5D,aAAa,OAAO,eAAe,UAAU;IAChD;AAED,OAAI,OAAO,OACP,WAAU,SAAS,OAAO;AAG9B,OAAI,OAAO,iBAAiB,OACxB,WAAU,gBAAgB,OAAO;AAGrC,cAAW,KAAK,UAAU;;UAEzB,OAAO;AACZ,UAAQ,KAAK,oDAAoD,UAAU,IAAK,MAAgB,UAAU;;AAG9G,QAAO;;AAGX,SAAS,mCAAmC,YAAwB,WAA8C;CAC9G,MAAMC,oBAA+C,EAAE;AAEvD,KAAI;EAEA,MAAM,mBAAmB,WAAW,SAAS,UAAU;AACvD,MAAI,CAAC,iBACD,QAAO;EAIX,MAAM,uBAAuB,iBAAiB,aAAa,mBAAmB;AAC9E,MAAI,sBAAsB;GACtB,MAAM,OAAO,qBAAqB,cAAc;AAChD,OAAI,KAAK,SAAS,GAAG;IACjB,MAAM,WAAW,KAAK;AAGtB,QAAI,KAAK,yBAAyB,SAAS,EAAE;KACzC,MAAM,WAAW,SAAS,aAAa;AACvC,UAAK,MAAM,WAAW,SAClB,KAAI,KAAK,0BAA0B,QAAQ,EAAE;MACzC,MAAM,eAAe,mBAAmB,EACpC,oBAAoB,CAAC,QAAQ,EAChC,CAAyB;MAE1B,MAAMC,mBAA4C;OAC9C,IAAI,aAAa,MAAM;OACvB,MAAM,aAAa,QAAQ;OAC9B;AAGD,UAAI,aAAa,eACb,kBAAiB,kBAAkB,aAAa;AAGpD,UAAI,MAAM,QAAQ,aAAa,wBAAwB,CACnD,kBAAiB,4BAA4B,aAAa,wBAAwB,KAC7E,UAAU,EACP,SAAS,MACZ,EACJ;AAGL,UAAI,MAAM,QAAQ,aAAa,wBAAwB,CACnD,kBAAiB,4BAA4B,aAAa,wBAAwB,KAC7E,UAAU,EACP,SAAS,MACZ,EACJ;AAGL,UAAI,aAAa,kBAAkB,OAC/B,kBAAiB,iBAAiB,aAAa;AAGnD,UAAI,aAAa,kBAAkB,OAC/B,kBAAiB,iBAAiB,aAAa;AAGnD,UAAI,aAAa,kBAAkB,OAC/B,kBAAiB,iBAAiB,aAAa;AAGnD,UAAI,aAAa,6BACb,kBAAiB,iCACb,aAAa;AAGrB,wBAAkB,KAAK,iBAAiB;;;;;UAMvD,OAAO;AACZ,UAAQ,KACJ,4DAA4D,UAAU,IAAK,MAAgB,UAC9F;;AAGL,QAAO;;AAGX,eAAe,qBAAqB,UAAkB,cAA0C;AAC5F,KAAI;EACA,MAAM,UAAU,MAAM,SAAS,UAAU,QAAQ;EACjD,MAAMC,aAAwB,EAAE;AAGhC,MAAI,CAAC,QAAQ,SAAS,aAAa,CAC/B,QAAO;AAOX,MAAI;GAOA,MAAM,aALU,IAAI,QAAQ;IACxB,uBAAuB;IACvB,6BAA6B;IAChC,CAAC,CAEyB,iBAAiB,UAAU,QAAQ;GAE9D,MAAM,UAAU,WAAW,YAAY;AAEvC,QAAK,MAAM,oBAAoB,SAAS;IACpC,MAAM,qBAAqB,iBAAiB,aAAa,YAAY;AACrE,QAAI,CAAC,mBACD;IAGJ,MAAM,YAAY,iBAAiB,SAAS;AAC5C,QAAI,CAAC,UACD;IAGJ,MAAM,kBAAkB,mBAAmB,mBAAmB;IAE9D,MAAM,aAAa,4BAA4B,YAAY,UAAU;IACrE,MAAM,oBAAoB,mCAAmC,YAAY,UAAU;IAEnF,MAAM,oBAAoB;KACtB,QAAQ,gBAAgB,MAAM,UAAU,aAAa;KACrD,MAAM,gBAAgB,QAAQ,oBAAoB,UAAU;KAC5D,OAAO,gBAAgB,SAAS;KAChC,aAAa,gBAAgB,eAAe,qBAAqB;KACjE;KACA;KACH;AAED,eAAW,KAAK,kBAAkB;;WAEjC,OAAO;AACZ,WAAQ,KAAK,mCAAmC,SAAS,IAAK,MAAgB,QAAQ;;AAG1F,SAAO;UACF,OAAO;AACZ,UAAQ,KAAK,gCAAgC,SAAS,IAAK,MAAgB,QAAQ;AACnF,SAAO,EAAE;;;AAIjB,eAAe,oBAAoB,UAAkB,aAAyC;AAC1F,KAAI;EACA,MAAM,UAAU,MAAM,SAAS,UAAU,QAAQ;EACjD,MAAMC,YAAuB,EAAE;AAG/B,MAAI,CAAC,QAAQ,SAAS,YAAY,CAC9B,QAAO;AAGX,MAAI;GAOA,MAAM,aALU,IAAI,QAAQ;IACxB,uBAAuB;IACvB,6BAA6B;IAChC,CAAC,CAEyB,iBAAiB,UAAU,QAAQ;GAE9D,MAAM,UAAU,WAAW,YAAY;AAEvC,QAAK,MAAM,oBAAoB,SAAS;IACpC,MAAM,oBAAoB,iBAAiB,aAAa,WAAW;AACnE,QAAI,CAAC,kBACD;IAGJ,MAAM,YAAY,iBAAiB,SAAS;AAC5C,QAAI,CAAC,UACD;IAGJ,MAAM,iBAAiB,mBAAmB,kBAAkB;IAE5D,MAAM,aAAa,4BAA4B,YAAY,UAAU;IACrE,MAAM,oBAAoB,mCAAmC,YAAY,UAAU;IACnF,MAAM,QAAQ,gBAAgB,UAAU,YAAY;IAEpD,MAAM,mBAAmB;KACrB,QAAQ,eAAe,MAAM,UAAU,aAAa;KACpD,MAAM,eAAe,QAAQ,oBAAoB,UAAU;KAC3D,aAAa,eAAe,eAAe,qBAAqB;KAChE;KACA,sBAAsB,eAAe,wBAAwB,EAAE;KAC/D;KACA;KACH;AAED,cAAU,KAAK,iBAAiB;;WAE/B,OAAO;AACZ,WAAQ,KAAK,mCAAmC,SAAS,IAAK,MAAgB,QAAQ;;AAG1F,SAAO;UACF,OAAO;AACZ,UAAQ,KAAK,gCAAgC,SAAS,IAAK,MAAgB,QAAQ;AACnF,SAAO,EAAE;;;AAIjB,eAAe,kBAAkB,UAAkB,cAA0C;AACzF,KAAI;EACA,MAAM,UAAU,MAAM,SAAS,UAAU,QAAQ;EACjD,MAAMC,UAAqB,EAAE;AAG7B,MAAI,CAAC,SAAS,SAAS,QAAQ,IAAI,CAAC,QAAQ,MAAM,CAAC,WAAW,IAAI,CAC9D,QAAO;AAIX,MAAI,CAAC,SAAS,SAAS,YAAY,IAAI,CAAC,SAAS,SAAS,cAAc,CACpE,QAAO;AAGX,MAAI;GAEA,MAAM,aAAa,KAAK,MAAM,QAAQ;GAGtC,MAAM,WAAW,SAAS,UAAU,QAAQ;AAG5C,OAAI,CAAC,WAAW,QAAQ,CAAC,WAAW,sBAChC,QAAO;GAGX,MAAM,iBAAiB;IACnB,IAAI;IACJ,MAAM,WAAW;IACjB,aAAa,WAAW,eAAe,gBAAgB,WAAW;IAClE,sBAAsB,WAAW,yBAAyB,EAAE;IAC5D,sBAAsB,WAAW,0BAA0B,EAAE;IAChE;AAED,WAAQ,KAAK,eAAe;WACvB,YAAY;AACjB,WAAQ,KAAK,yCAAyC,SAAS,IAAK,WAAqB,QAAQ;;AAGrG,SAAO;UACF,OAAO;AACZ,UAAQ,KAAK,gCAAgC,SAAS,IAAK,MAAgB,QAAQ;AACnF,SAAO,EAAE;;;AAIjB,eAAe,2BACX,WACA,WACA,SAAS,OACI;CACb,MAAM,WAAW,oBAAoB,UAAU,OAAiB;CAChE,MAAM,WAAW,KAAK,WAAW,UAAU,MAAgB;CAC3D,MAAM,aAAa,KAAK,UAAU,GAAG,SAAS,OAAO;AAErD,KAAI,CAAC,QAAQ;AAET,MAAI;AACA,SAAM,MAAM,UAAU,EAAE,WAAW,MAAM,CAAC;UACtC;EAIR,MAAM,4BAA4B,CAC9B;GACI,IAAI,UAAU;GACd,MAAM,UAAU;GAChB,aAAa,UAAU;GACvB,uBAAuB,UAAU;GACpC,CACJ;EAED,MAAM,gBAAgB;GAClB,MAAM,UAAU;GAChB,aAAa,UAAU;GACvB,OAAO,UAAU;GACjB,WAAW;GACX,oBAAoB,UAAU,qBAAqB,EAAE;GACrD,6BAA6B;GAChC;AAED,QAAM,UAAU,YAAY,KAAK,UAAU,eAAe,MAAM,EAAE,CAAC;;CAGvE,MAAM,SAAS,SAAS,mBAAmB;AAC3C,SAAQ,IACJ,GAAG,OAAO,GAAG,OAAO,UAAU,OAAO,CAAC,IAAI,OAAO,UAAU,KAAK,CAAC,IAAI,OAAQ,UAAU,WAAyB,OAAO,CAAC,iBAAiB,SAAS,OACrJ;;AAGL,eAAe,0BACX,UACA,WACA,SAAS,OACI;CACb,MAAM,WAAW,oBAAoB,SAAS,KAAe;CAC7D,MAAM,aAAa,KAAK,WAAW,GAAG,SAAS,OAAO;AAEtD,KAAI,CAAC,QAAQ;EACT,MAAMC,gBAAyC;GAC3C,MAAM,SAAS;GACf,aAAa,SAAS;GACtB,WAAW;GACX,oBAAoB,SAAS,qBAAqB,EAAE;GACvD;AAGD,MAAI,SAAS,cAAe,SAAS,WAAyB,SAAS,EASnE,eAAc,8BARoB,CAC9B;GACI,IAAI,SAAS,UAAU;GACvB,MAAM,SAAS;GACf,aAAa,SAAS;GACtB,uBAAuB,SAAS;GACnC,CACJ;AAKL,MAAI,SAAS,qBACT,eAAc,yBAAyB,SAAS;AAGpD,MAAI,SAAS,MACT,eAAc,QAAQ,SAAS;AAGnC,QAAM,UAAU,YAAY,KAAK,UAAU,eAAe,MAAM,EAAE,CAAC;;CAGvE,MAAM,SAAS,SAAS,mBAAmB;AAC3C,SAAQ,IACJ,GAAG,OAAO,GAAG,OAAO,SAAS,KAAK,CAAC,IAAI,OAAO,SAAS,YAAY,CAAC,IAAI,OAAQ,SAAS,WAAyB,OAAO,CAAC,iBAAiB,SAAS,OACvJ;;AAGL,eAAe,wBACX,QACA,WACA,SAAS,OACI;CACb,MAAM,WAAW,oBAAoB,OAAO,GAAa;CACzD,MAAM,aAAa,KAAK,WAAW,GAAG,SAAS,OAAO;AAEtD,KAAI,CAAC,QAAQ;EACT,MAAMA,gBAAyC;GAC3C,MAAM,OAAO;GACb,aAAa,OAAO;GACpB,WAAW;GACX,uBAAuB,OAAO,wBAAwB,EAAE;GAC3D;AAGD,MAAI,OAAO,qBACP,eAAc,yBAAyB,OAAO;AAGlD,QAAM,UAAU,YAAY,KAAK,UAAU,eAAe,MAAM,EAAE,CAAC;;CAGvE,MAAM,SAAS,SAAS,mBAAmB;AAC3C,SAAQ,IACJ,GAAG,OAAO,GAAG,OAAO,OAAO,KAAK,CAAC,IAAI,OAAO,OAAO,YAAY,CAAC,IAAI,OAAQ,OAAO,qBAAmC,OAAO,CAAC,iBAAiB,SAAS,OAC3J;;;;;;AAyCL,SAAS,mBAAmB,aAAqB,aAA2B;AACxE,KAAI;AACA,UAAQ,IAAI,qDAAqD;AAMjE,WAFgB,eAAe,YAAY,oDAEzB;GACd,KAAK;GACL,OAAO;GACP,UAAU;GACb,CAAC;AAEF,UAAQ,IAAI,sCAAsC;UAC7C,OAAO;EAGZ,MAAM,YAAY;AAIlB,MAAI,UAAU,WAAW,GAAG;GACxB,MAAM,SAAS,UAAU,UAAU,UAAU,UAAU;AACvD,WAAQ,KAAK,4CAA4C,SAAS;aAC3D,UAAU,UAAU,UAAU,OAAO,SAAS,QAAQ,CAC7D,SAAQ,KAAK,2FAA2F;MAGxG,SAAQ,IAAI,sCAAsC;;;AAM9D,eAAsB,iBAClB,kBACA,mBACA,SAC+B;AAC/B,KAAI;EACA,MAAM,YAAY,SAAS;EAC3B,MAAM,oBAAoB,aAAa,UAAU,SAAS;EAC1D,MAAM,SAAS,SAAS,UAAU;AAElC,MAAI,OACA,SAAQ,IAAI,mEAAmE;WACxE,kBACP,SAAQ,IAAI,8BAA8B,UAAU,OAAO,uBAAuB;MAElF,SAAQ,IAAI,oEAAoE;EAGpF,MAAM,cAAc,QAAQ,iBAAiB;EAC7C,MAAM,SAAS,KAAK,aAAa,MAAM;EACvC,MAAM,cAAc,QAAQ,kBAAkB;EAC9C,MAAM,sBAAsB,KAAK,aAAa,aAAa;EAC3D,MAAM,iBAAiB,KAAK,aAAa,QAAQ;EACjD,MAAM,mBAAmB,KAAK,aAAa,UAAU;AAGrD,MAAI,CAAC,QAAQ;AAET,OAAI,CAAC,mBAAmB;AACpB,YAAQ,IAAI,+CAA+C;AAC3D,SAAK,MAAM,aAAa;KAAC;KAAqB;KAAgB;KAAiB,CAC3E,KAAI;AACA,WAAM,GAAG,WAAW;MAAE,WAAW;MAAM,OAAO;MAAM,CAAC;AACrD,aAAQ,IAAI,iBAAiB,YAAY;YACrC;AAEJ,aAAQ,IAAI,wCAAwC,YAAY;;SAIxE,SAAQ,IAAI,8EAA8E;AAI9F,WAAQ,IAAI,oCAAoC;AAChD,QAAK,MAAM,aAAa;IAAC;IAAqB;IAAgB;IAAiB,CAC3E,KAAI;AACA,UAAM,MAAM,WAAW,EAAE,WAAW,MAAM,CAAC;YACtC,OAAO;AACZ,QAAI;AACA,WAAM,OAAO,UAAU;YAEnB;AACJ,aAAQ,MACJ,8CAA8C,UAAU,IAAK,MAAgB,UAChF;AACD,aAAQ,KAAK,EAAE;;;aAIpB,kBACP,SAAQ,IAAI,8BAA8B,UAAU,OAAO,mBAAmB;MAE9E,SAAQ,IAAI,6DAA6D;EAG7E,IAAIC,QAAkB,EAAE;AAExB,MAAI,qBAAqB,WAAW;AAEhC,WAAQ,UAAU,KAAK,OAAO,QAAQ,aAAa,GAAG,CAAC;AACvD,WAAQ,IAAI,iBAAiB,MAAM,OAAO,uBAAuB;SAC9D;GAEH,MAAM,gBAAgB,OAAO,QAA+B;IACxD,MAAM,UAAU,MAAM,QAAQ,KAAK,EAAE,eAAe,MAAM,CAAC;AAE3D,SAAK,MAAM,SAAS,SAAS;KACzB,MAAM,WAAW,KAAK,KAAK,MAAM,KAAK;AAEtC,SAAI,MAAM,aAAa,EACnB;UAAI,CAAC,iBAAiB,SAAS,MAAM,KAAK,CACtC,OAAM,cAAc,SAAS;gBAGjC,MAAM,QAAQ,KACbC,UAAQ,MAAM,KAAK,KAAK,SACrBA,UAAQ,MAAM,KAAK,KAAK,UACxBA,UAAQ,MAAM,KAAK,KAAK,SAE5B,OAAM,KAAK,SAAS;;;AAKhC,SAAM,cAAc,OAAO;;EAI/B,MAAMC,gBAA2B,EAAE;EACnC,MAAMC,eAA0B,EAAE;EAClC,MAAMC,aAAwB,EAAE;AAEhC,OAAK,MAAM,QAAQ,OAAO;GACtB,MAAM,aAAa,MAAM,qBAAqB,MAAM,YAAY;AAChE,iBAAc,KAAK,GAAG,WAAW;GAEjC,MAAM,YAAY,MAAM,oBAAoB,MAAM,YAAY;AAC9D,gBAAa,KAAK,GAAG,UAAU;GAE/B,MAAM,UAAU,MAAM,kBAAkB,MAAM,YAAY;AAC1D,cAAW,KAAK,GAAG,QAAQ;;AAG/B,MAAI,cAAc,WAAW,KAAK,aAAa,WAAW,KAAK,WAAW,WAAW,GAAG;AACpF,WAAQ,IAAI,kEAAkE;AAC9E,UAAO;IACH,qBAAqB;IACrB,oBAAoB;IACpB,kBAAkB;IAClB,YAAY;IACf;;AAIL,MAAI,cAAc,SAAS,GAAG;AAC1B,WAAQ,IAAI,WAAW,cAAc,OAAO,0BAA0B;AACtE,QAAK,MAAM,aAAa,cACpB,OAAM,2BAA2B,WAAsC,qBAAqB,OAAO;AAEvG,OAAI,OACA,SAAQ,IACJ,+BAA+B,cAAc,OAAO,kCAAkC,sBACzF;OAED,SAAQ,IACJ,gBAAgB,cAAc,OAAO,kCAAkC,sBAC1E;;AAKT,MAAI,aAAa,SAAS,GAAG;AACzB,WAAQ,IAAI,WAAW,aAAa,OAAO,0BAA0B;AACrE,QAAK,MAAM,YAAY,aACnB,OAAM,0BAA0B,UAAqC,gBAAgB,OAAO;AAEhG,OAAI,OACA,SAAQ,IACJ,+BAA+B,aAAa,OAAO,kCAAkC,iBACxF;OAED,SAAQ,IAAI,gBAAgB,aAAa,OAAO,kCAAkC,iBAAiB;;AAI3G,MAAI,WAAW,SAAS,GAAG;AACvB,WAAQ,IAAI,WAAW,WAAW,OAAO,uBAAuB;AAChE,QAAK,MAAM,UAAU,WACjB,OAAM,wBAAwB,QAAmC,kBAAkB,OAAO;AAE9F,OAAI,OACA,SAAQ,IACJ,+BAA+B,WAAW,OAAO,+BAA+B,mBACnF;OAED,SAAQ,IAAI,gBAAgB,WAAW,OAAO,+BAA+B,mBAAmB;;EAKxG,MAAM,gBAAgB,SAAS,YAAY;AAC3C,MACI,CAAC,UACD,kBACC,cAAc,SAAS,KAAK,aAAa,SAAS,KAAK,WAAW,SAAS,GAE5E,oBAAmB,aAAa,YAAY;AAIhD,SAAO;GACH,qBAAqB,cAAc;GACnC,oBAAoB,aAAa;GACjC,kBAAkB,WAAW;GAC7B,YAAY,cAAc,SAAS,aAAa,SAAS,WAAW;GACvE;UACI,OAAO;AACZ,UAAQ,MAAM,YAAa,MAAgB,QAAQ;AACnD,UAAQ,KAAK,EAAE"}
|