@salesforce/storefront-next-dev 0.2.0-alpha.2 → 0.3.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/dist/cartridge-services/index.d.ts.map +1 -1
  2. package/dist/cartridge-services/index.js +171 -50
  3. package/dist/cartridge-services/index.js.map +1 -1
  4. package/dist/commands/create-bundle.js +12 -11
  5. package/dist/commands/create-instructions.js +7 -5
  6. package/dist/commands/create-storefront.js +18 -22
  7. package/dist/commands/deploy-cartridge.js +67 -26
  8. package/dist/commands/dev.js +6 -4
  9. package/dist/commands/extensions/create.js +2 -0
  10. package/dist/commands/extensions/install.js +3 -7
  11. package/dist/commands/extensions/list.js +2 -0
  12. package/dist/commands/extensions/remove.js +3 -7
  13. package/dist/commands/generate-cartridge.js +23 -2
  14. package/dist/commands/preview.js +15 -10
  15. package/dist/commands/push.js +25 -19
  16. package/dist/commands/validate-cartridge.js +51 -0
  17. package/dist/config.js +74 -47
  18. package/dist/configs/react-router.config.d.ts.map +1 -1
  19. package/dist/configs/react-router.config.js +36 -0
  20. package/dist/configs/react-router.config.js.map +1 -1
  21. package/dist/dependency-utils.js +14 -16
  22. package/dist/entry/server.d.ts.map +1 -1
  23. package/dist/entry/server.js +221 -11
  24. package/dist/entry/server.js.map +1 -1
  25. package/dist/generate-cartridge.js +106 -50
  26. package/dist/index.d.ts +127 -13
  27. package/dist/index.d.ts.map +1 -1
  28. package/dist/index.js +1147 -167
  29. package/dist/index.js.map +1 -1
  30. package/dist/local-dev-setup.js +13 -13
  31. package/dist/logger/index.d.ts +20 -0
  32. package/dist/logger/index.d.ts.map +1 -0
  33. package/dist/logger/index.js +69 -0
  34. package/dist/logger/index.js.map +1 -0
  35. package/dist/logger.js +79 -33
  36. package/dist/logger2.js +1 -0
  37. package/dist/manage-extensions.js +7 -13
  38. package/dist/mrt/ssr.mjs +60 -72
  39. package/dist/mrt/ssr.mjs.map +1 -1
  40. package/dist/mrt/streamingHandler.mjs +66 -78
  41. package/dist/mrt/streamingHandler.mjs.map +1 -1
  42. package/dist/react-router/Scripts.d.ts +1 -1
  43. package/dist/react-router/Scripts.d.ts.map +1 -1
  44. package/dist/react-router/Scripts.js +38 -2
  45. package/dist/react-router/Scripts.js.map +1 -1
  46. package/dist/server.js +296 -16
  47. package/dist/utils.js +4 -4
  48. package/dist/validate-cartridge.js +45 -0
  49. package/package.json +22 -5
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","names":["isCliAvailable: boolean | null","result: Array<{ id: string; path: string; file: string; index?: boolean }>","fullPath: string","VALID_ATTRIBUTE_TYPES: readonly AttributeType[]","TYPE_MAPPING: Record<string, string>","result: Record<string, unknown>","result: unknown[]","attributes: Record<string, unknown>[]","attribute: Record<string, unknown>","regionDefinitions: Record<string, unknown>[]","regionDefinition: Record<string, unknown>","components: unknown[]","pageTypes: unknown[]","aspects: unknown[]","cartridgeData: Record<string, unknown>","files: string[]","allComponents: unknown[]","allPageTypes: unknown[]","allAspects: unknown[]"],"sources":["../../src/cartridge-services/react-router-config.ts","../../src/cartridge-services/generate-cartridge.ts"],"sourcesContent":["/**\n * Copyright 2026 Salesforce, Inc.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { join } from 'node:path';\nimport { existsSync, readFileSync, unlinkSync } from 'node:fs';\nimport { execSync } from 'node:child_process';\nimport { tmpdir } from 'node:os';\nimport { randomUUID } from 'node:crypto';\nimport { npmRunPathEnv } from 'npm-run-path';\nimport type { RouteConfigEntry } from '@react-router/dev/routes';\n\nlet isCliAvailable: boolean | null = null;\n\nfunction checkReactRouterCli(projectDirectory: string): boolean {\n if (isCliAvailable !== null) {\n return isCliAvailable;\n }\n\n try {\n execSync('react-router --version', {\n cwd: projectDirectory,\n env: npmRunPathEnv(),\n stdio: 'pipe',\n });\n isCliAvailable = true;\n } catch {\n isCliAvailable = false;\n }\n return isCliAvailable;\n}\n\n/**\n * Get the fully resolved routes from React Router by invoking its CLI.\n * This ensures we get the exact same route resolution as React Router uses internally,\n * including all presets, file-system routes, and custom route configurations.\n * @param projectDirectory - The project root directory\n * @returns Array of resolved route config entries\n * @example\n * const routes = getReactRouterRoutes('/path/to/project');\n * // Returns the same structure as `react-router routes --json`\n */\nfunction getReactRouterRoutes(projectDirectory: string): RouteConfigEntry[] {\n if (!checkReactRouterCli(projectDirectory)) {\n throw new Error(\n 'React Router CLI is not available. Please make sure @react-router/dev is installed and accessible.'\n );\n }\n\n // Use a temp file to avoid Node.js buffer limits (8KB default)\n const tempFile = join(tmpdir(), `react-router-routes-${randomUUID()}.json`);\n\n try {\n // Redirect output to temp file to avoid buffer truncation\n execSync(`react-router routes --json > \"${tempFile}\"`, {\n cwd: projectDirectory,\n env: npmRunPathEnv(),\n encoding: 'utf-8',\n stdio: ['pipe', 'pipe', 'pipe'],\n });\n const output = readFileSync(tempFile, 'utf-8');\n return JSON.parse(output) as RouteConfigEntry[];\n } catch (error) {\n throw new Error(`Failed to get routes from React Router CLI: ${(error as Error).message}`);\n } finally {\n // Clean up temp file\n try {\n if (existsSync(tempFile)) {\n unlinkSync(tempFile);\n }\n } catch {\n // Ignore cleanup errors\n }\n }\n}\n\n/**\n * Convert a file path to its corresponding route path using React Router's CLI.\n * This ensures we get the exact same route resolution as React Router uses internally.\n * @param filePath - Absolute path to the route file\n * @param projectRoot - The project root directory\n * @returns The route path (e.g., '/cart', '/product/:productId')\n * @example\n * const route = filePathToRoute('/path/to/project/src/routes/_app.cart.tsx', '/path/to/project');\n * // Returns: '/cart'\n */\nexport function filePathToRoute(filePath: string, projectRoot: string): string {\n // Normalize paths to POSIX-style\n const filePathPosix = filePath.replace(/\\\\/g, '/');\n\n // Get all routes from React Router CLI\n const routes = getReactRouterRoutes(projectRoot);\n const flatRoutes = flattenRoutes(routes);\n\n // Find the route that matches this file\n for (const route of flatRoutes) {\n // Normalize the route file path for comparison\n const routeFilePosix = route.file.replace(/\\\\/g, '/');\n\n // Check if the file path ends with the route file (handles relative vs. absolute paths)\n if (filePathPosix.endsWith(routeFilePosix) || filePathPosix.endsWith(`/${routeFilePosix}`)) {\n return route.path;\n }\n\n // Also check without leading ./\n const routeFileNormalized = routeFilePosix.replace(/^\\.\\//, '');\n if (filePathPosix.endsWith(routeFileNormalized) || filePathPosix.endsWith(`/${routeFileNormalized}`)) {\n return route.path;\n }\n }\n\n // Fallback: if no match found, return a warning path\n console.warn(`Warning: Could not find route for file: ${filePath}`);\n return '/unknown';\n}\n\n/**\n * Flatten a nested route tree into a flat array with computed paths.\n * Each route will have its full path computed from parent paths.\n * @param routes - The nested route config entries\n * @param parentPath - The parent path prefix (used internally for recursion)\n * @returns Flat array of routes with their full paths\n */\nfunction flattenRoutes(\n routes: RouteConfigEntry[],\n parentPath = ''\n): Array<{ id: string; path: string; file: string; index?: boolean }> {\n const result: Array<{ id: string; path: string; file: string; index?: boolean }> = [];\n\n for (const route of routes) {\n // Compute the full path\n let fullPath: string;\n if (route.index) {\n fullPath = parentPath || '/';\n } else if (route.path) {\n // Handle paths that already start with / (absolute paths from extensions)\n const pathSegment = route.path.startsWith('/') ? route.path : `/${route.path}`;\n fullPath = parentPath ? `${parentPath}${pathSegment}`.replace(/\\/+/g, '/') : pathSegment;\n } else {\n // Layout route without path - use parent path\n fullPath = parentPath || '/';\n }\n\n // Add this route if it has an id\n if (route.id) {\n result.push({\n id: route.id,\n path: fullPath,\n file: route.file,\n index: route.index,\n });\n }\n\n // Recursively process children\n if (route.children && route.children.length > 0) {\n const childPath = route.path ? fullPath : parentPath;\n result.push(...flattenRoutes(route.children, childPath));\n }\n }\n\n return result;\n}\n","#!/usr/bin/env node\n/**\n * Copyright 2026 Salesforce, Inc.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/* eslint-disable no-console */\nimport { readdir, readFile, writeFile, mkdir, access, rm } from 'node:fs/promises';\nimport { join, extname, resolve, basename } from 'node:path';\nimport { execSync } from 'node:child_process';\nimport { Project, Node, type SourceFile, type PropertyDeclaration, type Decorator } from 'ts-morph';\nimport { filePathToRoute } from './react-router-config.js';\n\n// Re-export `filePathToRoute`\nexport { filePathToRoute };\n\nconst SKIP_DIRECTORIES = ['build', 'dist', 'node_modules', '.git', '.next', 'coverage'];\n\nconst DEFAULT_COMPONENT_GROUP = 'odyssey_base';\nconst ARCH_TYPE_HEADLESS = 'headless';\n\ntype AttributeType =\n | 'string'\n | 'text'\n | 'markup'\n | 'integer'\n | 'boolean'\n | 'product'\n | 'category'\n | 'file'\n | 'page'\n | 'image'\n | 'url'\n | 'enum'\n | 'custom'\n | 'cms_record';\n\nconst VALID_ATTRIBUTE_TYPES: readonly AttributeType[] = [\n 'string',\n 'text',\n 'markup',\n 'integer',\n 'boolean',\n 'product',\n 'category',\n 'file',\n 'page',\n 'image',\n 'url',\n 'enum',\n 'custom',\n 'cms_record',\n] as const;\n\n// Type mapping for TypeScript types to B2C Commerce attribute types\n// Based on official schema: https://salesforcecommercecloud.github.io/b2c-dev-doc/docs/current/content/attributedefinition.json\nconst TYPE_MAPPING: Record<string, string> = {\n String: 'string',\n string: 'string',\n Number: 'integer',\n number: 'integer',\n Boolean: 'boolean',\n boolean: 'boolean',\n Date: 'string', // B2C Commerce doesn't have a native date type, use string\n URL: 'url',\n CMSRecord: 'cms_record',\n};\n\n// Resolve attribute type in order: decorator type -> ts-morph type inference -> fallback to string\nfunction resolveAttributeType(decoratorType?: string, tsMorphType?: string, fieldName?: string): string {\n // 1) If the type is set on the decorator, use that (with validation)\n if (decoratorType) {\n if (!VALID_ATTRIBUTE_TYPES.includes(decoratorType as AttributeType)) {\n console.error(\n `Error: Invalid attribute type '${decoratorType}' for field '${fieldName || 'unknown'}'. Valid types are: ${VALID_ATTRIBUTE_TYPES.join(', ')}`\n );\n process.exit(1);\n }\n return decoratorType;\n }\n\n // 2) Use the type from ts-morph type inference\n if (tsMorphType && TYPE_MAPPING[tsMorphType]) {\n return TYPE_MAPPING[tsMorphType];\n }\n\n // 3) Fall back to string\n return 'string';\n}\n\n// Convert field name to human-readable name\nfunction toHumanReadableName(fieldName: string): string {\n return fieldName\n .replace(/([A-Z])/g, ' $1') // Add space before capital letters\n .replace(/^./, (str) => str.toUpperCase()) // Capitalize first letter\n .trim();\n}\n\n// Convert name to camelCase filename (handles spaces and hyphens, preserves existing camelCase)\nfunction toCamelCaseFileName(name: string): string {\n // If the name is already camelCase (no spaces or hyphens), return as-is\n if (!/[\\s-]/.test(name)) {\n return name;\n }\n\n return name\n .split(/[\\s-]+/) // Split by whitespace and hyphens\n .map((word, index) => {\n if (index === 0) {\n return word.toLowerCase(); // First word is all lowercase\n }\n return word.charAt(0).toUpperCase() + word.slice(1).toLowerCase(); // Subsequent words are capitalized\n })\n .join(''); // Join without spaces or hyphens\n}\n\nfunction getTypeFromTsMorph(property: PropertyDeclaration, _sourceFile: SourceFile): string {\n try {\n const typeNode = property.getTypeNode();\n if (typeNode) {\n const typeText = typeNode.getText();\n // Extract the base type name from complex types\n const baseType = typeText.split('|')[0].split('&')[0].trim();\n return baseType;\n }\n } catch {\n // If type extraction fails, return string\n }\n\n return 'string';\n}\n\n// Helper function to parse any TypeScript expression into a JavaScript value\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction parseExpression(expression: any): unknown {\n if (Node.isStringLiteral(expression)) {\n return expression.getLiteralValue();\n } else if (Node.isNumericLiteral(expression)) {\n return expression.getLiteralValue();\n } else if (Node.isTrueLiteral(expression)) {\n return true;\n } else if (Node.isFalseLiteral(expression)) {\n return false;\n } else if (Node.isObjectLiteralExpression(expression)) {\n return parseNestedObject(expression);\n } else if (Node.isArrayLiteralExpression(expression)) {\n return parseArrayLiteral(expression);\n } else {\n return expression.getText();\n }\n}\n\n// Helper function to parse deeply nested object literals\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction parseNestedObject(objectLiteral: any): Record<string, unknown> {\n const result: Record<string, unknown> = {};\n\n try {\n const properties = objectLiteral.getProperties();\n\n for (const property of properties) {\n if (Node.isPropertyAssignment(property)) {\n const name = property.getName();\n const initializer = property.getInitializer();\n\n if (initializer) {\n result[name] = parseExpression(initializer);\n }\n }\n }\n } catch (error) {\n console.warn(`Warning: Could not parse nested object: ${(error as Error).message}`);\n return result; // Return the result even if there was an error\n }\n\n return result;\n}\n\n// Helper function to parse array literals\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction parseArrayLiteral(arrayLiteral: any): unknown[] {\n const result: unknown[] = [];\n\n try {\n const elements = arrayLiteral.getElements();\n\n for (const element of elements) {\n result.push(parseExpression(element));\n }\n } catch (error) {\n console.warn(`Warning: Could not parse array literal: ${(error as Error).message}`);\n }\n\n return result;\n}\n\n// Parse decorator arguments using ts-morph\nfunction parseDecoratorArgs(decorator: Decorator): Record<string, unknown> {\n const result: Record<string, unknown> = {};\n\n try {\n const args = decorator.getArguments();\n\n if (args.length === 0) {\n return result;\n }\n\n // Handle the first argument\n const firstArg = args[0];\n\n if (Node.isObjectLiteralExpression(firstArg)) {\n // First argument is an object literal - parse all its properties\n const properties = firstArg.getProperties();\n\n for (const property of properties) {\n if (Node.isPropertyAssignment(property)) {\n const name = property.getName();\n const initializer = property.getInitializer();\n\n if (initializer) {\n result[name] = parseExpression(initializer);\n }\n }\n }\n } else if (Node.isStringLiteral(firstArg)) {\n // First argument is a string literal - use it as the id\n result.id = parseExpression(firstArg);\n\n // Check if there's a second argument (options object)\n if (args.length > 1) {\n const secondArg = args[1];\n if (Node.isObjectLiteralExpression(secondArg)) {\n const properties = secondArg.getProperties();\n\n for (const property of properties) {\n if (Node.isPropertyAssignment(property)) {\n const name = property.getName();\n const initializer = property.getInitializer();\n\n if (initializer) {\n result[name] = parseExpression(initializer);\n }\n }\n }\n }\n }\n }\n\n return result;\n } catch (error) {\n console.warn(`Warning: Could not parse decorator arguments: ${(error as Error).message}`);\n return result;\n }\n}\n\nfunction extractAttributesFromSource(sourceFile: SourceFile, className: string): Record<string, unknown>[] {\n const attributes: Record<string, unknown>[] = [];\n\n try {\n // Find the class declaration\n const classDeclaration = sourceFile.getClass(className);\n if (!classDeclaration) {\n return attributes;\n }\n\n // Get all properties in the class\n const properties = classDeclaration.getProperties();\n\n for (const property of properties) {\n // Check if the property has an @AttributeDefinition decorator\n const attributeDecorator = property.getDecorator('AttributeDefinition');\n if (!attributeDecorator) {\n continue;\n }\n\n const fieldName = property.getName();\n const config = parseDecoratorArgs(attributeDecorator);\n\n const isRequired = !property.hasQuestionToken();\n\n const inferredType = (config.type as string) || getTypeFromTsMorph(property, sourceFile);\n\n const attribute: Record<string, unknown> = {\n id: config.id || fieldName,\n name: config.name || toHumanReadableName(fieldName),\n type: resolveAttributeType(config.type as string, inferredType, fieldName),\n required: config.required !== undefined ? config.required : isRequired,\n description: config.description || `Field: ${fieldName}`,\n };\n\n if (config.values) {\n attribute.values = config.values;\n }\n\n if (config.defaultValue !== undefined) {\n attribute.default_value = config.defaultValue;\n }\n\n attributes.push(attribute);\n }\n } catch (error) {\n console.warn(`Warning: Could not extract attributes from class ${className}: ${(error as Error).message}`);\n }\n\n return attributes;\n}\n\nfunction extractRegionDefinitionsFromSource(sourceFile: SourceFile, className: string): Record<string, unknown>[] {\n const regionDefinitions: Record<string, unknown>[] = [];\n\n try {\n // Find the class declaration\n const classDeclaration = sourceFile.getClass(className);\n if (!classDeclaration) {\n return regionDefinitions;\n }\n\n // Check for class-level @RegionDefinition decorator\n const classRegionDecorator = classDeclaration.getDecorator('RegionDefinition');\n if (classRegionDecorator) {\n const args = classRegionDecorator.getArguments();\n if (args.length > 0) {\n const firstArg = args[0];\n\n // Handle array literal argument (most common case)\n if (Node.isArrayLiteralExpression(firstArg)) {\n const elements = firstArg.getElements();\n for (const element of elements) {\n if (Node.isObjectLiteralExpression(element)) {\n const regionConfig = parseDecoratorArgs({\n getArguments: () => [element],\n } as unknown as Decorator);\n\n const regionDefinition: Record<string, unknown> = {\n id: regionConfig.id || 'region',\n name: regionConfig.name || 'Region',\n };\n\n // Add optional properties if they exist in the decorator\n if (regionConfig.componentTypes) {\n regionDefinition.component_types = regionConfig.componentTypes;\n }\n\n if (Array.isArray(regionConfig.componentTypeInclusions)) {\n regionDefinition.component_type_inclusions = regionConfig.componentTypeInclusions.map(\n (incl) => ({\n type_id: incl,\n })\n );\n }\n\n if (Array.isArray(regionConfig.componentTypeExclusions)) {\n regionDefinition.component_type_exclusions = regionConfig.componentTypeExclusions.map(\n (excl) => ({\n type_id: excl,\n })\n );\n }\n\n if (regionConfig.maxComponents !== undefined) {\n regionDefinition.max_components = regionConfig.maxComponents;\n }\n\n if (regionConfig.minComponents !== undefined) {\n regionDefinition.min_components = regionConfig.minComponents;\n }\n\n if (regionConfig.allowMultiple !== undefined) {\n regionDefinition.allow_multiple = regionConfig.allowMultiple;\n }\n\n if (regionConfig.defaultComponentConstructors) {\n regionDefinition.default_component_constructors =\n regionConfig.defaultComponentConstructors;\n }\n\n regionDefinitions.push(regionDefinition);\n }\n }\n }\n }\n }\n } catch (error) {\n console.warn(\n `Warning: Could not extract region definitions from class ${className}: ${(error as Error).message}`\n );\n }\n\n return regionDefinitions;\n}\n\nasync function processComponentFile(filePath: string, _projectRoot: string): Promise<unknown[]> {\n try {\n const content = await readFile(filePath, 'utf-8');\n const components: unknown[] = [];\n\n // Check if file contains @Component decorator\n if (!content.includes('@Component')) {\n return components;\n }\n\n // Convert file path to module path (currently unused but may be needed in future)\n // const relativePath = relative(join(projectRoot, 'src'), filePath);\n // const modulePath = relativePath.replace(/\\.tsx?$/, '').replace(/\\\\/g, '/');\n\n try {\n // Create a ts-morph project and add the source file\n const project = new Project({\n useInMemoryFileSystem: true,\n skipAddingFilesFromTsConfig: true,\n });\n\n const sourceFile = project.createSourceFile(filePath, content);\n\n const classes = sourceFile.getClasses();\n\n for (const classDeclaration of classes) {\n const componentDecorator = classDeclaration.getDecorator('Component');\n if (!componentDecorator) {\n continue;\n }\n\n const className = classDeclaration.getName();\n if (!className) {\n continue;\n }\n\n const componentConfig = parseDecoratorArgs(componentDecorator);\n\n const attributes = extractAttributesFromSource(sourceFile, className);\n const regionDefinitions = extractRegionDefinitionsFromSource(sourceFile, className);\n\n const componentMetadata = {\n typeId: componentConfig.id || className.toLowerCase(),\n name: componentConfig.name || toHumanReadableName(className),\n group: componentConfig.group || DEFAULT_COMPONENT_GROUP,\n description: componentConfig.description || `Custom component: ${className}`,\n regionDefinitions,\n attributes,\n };\n\n components.push(componentMetadata);\n }\n } catch (error) {\n console.warn(`Warning: Could not process file ${filePath}:`, (error as Error).message);\n }\n\n return components;\n } catch (error) {\n console.warn(`Warning: Could not read file ${filePath}:`, (error as Error).message);\n return [];\n }\n}\n\nasync function processPageTypeFile(filePath: string, projectRoot: string): Promise<unknown[]> {\n try {\n const content = await readFile(filePath, 'utf-8');\n const pageTypes: unknown[] = [];\n\n // Check if file contains @PageType decorator\n if (!content.includes('@PageType')) {\n return pageTypes;\n }\n\n try {\n // Create a ts-morph project and add the source file\n const project = new Project({\n useInMemoryFileSystem: true,\n skipAddingFilesFromTsConfig: true,\n });\n\n const sourceFile = project.createSourceFile(filePath, content);\n\n const classes = sourceFile.getClasses();\n\n for (const classDeclaration of classes) {\n const pageTypeDecorator = classDeclaration.getDecorator('PageType');\n if (!pageTypeDecorator) {\n continue;\n }\n\n const className = classDeclaration.getName();\n if (!className) {\n continue;\n }\n\n const pageTypeConfig = parseDecoratorArgs(pageTypeDecorator);\n\n const attributes = extractAttributesFromSource(sourceFile, className);\n const regionDefinitions = extractRegionDefinitionsFromSource(sourceFile, className);\n const route = filePathToRoute(filePath, projectRoot);\n\n const pageTypeMetadata = {\n typeId: pageTypeConfig.id || className.toLowerCase(),\n name: pageTypeConfig.name || toHumanReadableName(className),\n description: pageTypeConfig.description || `Custom page type: ${className}`,\n regionDefinitions,\n supportedAspectTypes: pageTypeConfig.supportedAspectTypes || [],\n attributes,\n route,\n };\n\n pageTypes.push(pageTypeMetadata);\n }\n } catch (error) {\n console.warn(`Warning: Could not process file ${filePath}:`, (error as Error).message);\n }\n\n return pageTypes;\n } catch (error) {\n console.warn(`Warning: Could not read file ${filePath}:`, (error as Error).message);\n return [];\n }\n}\n\nasync function processAspectFile(filePath: string, _projectRoot: string): Promise<unknown[]> {\n try {\n const content = await readFile(filePath, 'utf-8');\n const aspects: unknown[] = [];\n\n // Check if file is a JSON aspect file\n if (!filePath.endsWith('.json') || !content.trim().startsWith('{')) {\n return aspects;\n }\n\n // Check if file is in the aspects directory\n if (!filePath.includes('/aspects/') && !filePath.includes('\\\\aspects\\\\')) {\n return aspects;\n }\n\n try {\n // Parse the JSON content\n const aspectData = JSON.parse(content);\n\n // Extract filename without extension as the aspect ID\n const fileName = basename(filePath, '.json');\n\n // Validate that it looks like an aspect file\n if (!aspectData.name || !aspectData.attribute_definitions) {\n return aspects;\n }\n\n const aspectMetadata = {\n id: fileName,\n name: aspectData.name,\n description: aspectData.description || `Aspect type: ${aspectData.name}`,\n attributeDefinitions: aspectData.attribute_definitions || [],\n supportedObjectTypes: aspectData.supported_object_types || [],\n };\n\n aspects.push(aspectMetadata);\n } catch (parseError) {\n console.warn(`Warning: Could not parse JSON in file ${filePath}:`, (parseError as Error).message);\n }\n\n return aspects;\n } catch (error) {\n console.warn(`Warning: Could not read file ${filePath}:`, (error as Error).message);\n return [];\n }\n}\n\nasync function generateComponentCartridge(\n component: Record<string, unknown>,\n outputDir: string,\n dryRun = false\n): Promise<void> {\n const fileName = toCamelCaseFileName(component.typeId as string);\n const groupDir = join(outputDir, component.group as string);\n const outputPath = join(groupDir, `${fileName}.json`);\n\n if (!dryRun) {\n // Ensure the group directory exists\n try {\n await mkdir(groupDir, { recursive: true });\n } catch {\n // Directory might already exist, which is fine\n }\n\n const attributeDefinitionGroups = [\n {\n id: component.typeId,\n name: component.name,\n description: component.description,\n attribute_definitions: component.attributes,\n },\n ];\n\n const cartridgeData = {\n name: component.name,\n description: component.description,\n group: component.group,\n arch_type: ARCH_TYPE_HEADLESS,\n region_definitions: component.regionDefinitions || [],\n attribute_definition_groups: attributeDefinitionGroups,\n };\n\n await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));\n }\n\n const prefix = dryRun ? ' - [DRY RUN]' : ' -';\n console.log(\n `${prefix} ${String(component.typeId)}: ${String(component.name)} (${String((component.attributes as unknown[]).length)} attributes) → ${fileName}.json`\n );\n}\n\nasync function generatePageTypeCartridge(\n pageType: Record<string, unknown>,\n outputDir: string,\n dryRun = false\n): Promise<void> {\n const fileName = toCamelCaseFileName(pageType.name as string);\n const outputPath = join(outputDir, `${fileName}.json`);\n\n if (!dryRun) {\n const cartridgeData: Record<string, unknown> = {\n name: pageType.name,\n description: pageType.description,\n arch_type: ARCH_TYPE_HEADLESS,\n region_definitions: pageType.regionDefinitions || [],\n };\n\n // Add attribute_definition_groups if there are attributes\n if (pageType.attributes && (pageType.attributes as unknown[]).length > 0) {\n const attributeDefinitionGroups = [\n {\n id: pageType.typeId || fileName,\n name: pageType.name,\n description: pageType.description,\n attribute_definitions: pageType.attributes,\n },\n ];\n cartridgeData.attribute_definition_groups = attributeDefinitionGroups;\n }\n\n // Add supported_aspect_types if specified\n if (pageType.supportedAspectTypes) {\n cartridgeData.supported_aspect_types = pageType.supportedAspectTypes;\n }\n\n if (pageType.route) {\n cartridgeData.route = pageType.route;\n }\n\n await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));\n }\n\n const prefix = dryRun ? ' - [DRY RUN]' : ' -';\n console.log(\n `${prefix} ${String(pageType.name)}: ${String(pageType.description)} (${String((pageType.attributes as unknown[]).length)} attributes) → ${fileName}.json`\n );\n}\n\nasync function generateAspectCartridge(\n aspect: Record<string, unknown>,\n outputDir: string,\n dryRun = false\n): Promise<void> {\n const fileName = toCamelCaseFileName(aspect.id as string);\n const outputPath = join(outputDir, `${fileName}.json`);\n\n if (!dryRun) {\n const cartridgeData: Record<string, unknown> = {\n name: aspect.name,\n description: aspect.description,\n arch_type: ARCH_TYPE_HEADLESS,\n attribute_definitions: aspect.attributeDefinitions || [],\n };\n\n // Add supported_object_types if specified\n if (aspect.supportedObjectTypes) {\n cartridgeData.supported_object_types = aspect.supportedObjectTypes;\n }\n\n await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));\n }\n\n const prefix = dryRun ? ' - [DRY RUN]' : ' -';\n console.log(\n `${prefix} ${String(aspect.name)}: ${String(aspect.description)} (${String((aspect.attributeDefinitions as unknown[]).length)} attributes) → ${fileName}.json`\n );\n}\n\n/**\n * Options for generateMetadata function\n */\nexport interface GenerateMetadataOptions {\n /**\n * Optional array of specific file paths to process.\n * If provided, only these files will be processed and existing cartridge files will NOT be deleted.\n * If omitted, the entire src/ directory will be scanned and all existing cartridge files will be deleted first.\n */\n filePaths?: string[];\n\n /**\n * Whether to run ESLint with --fix on generated JSON files to format them according to project settings.\n * Defaults to true.\n */\n lintFix?: boolean;\n\n /**\n * If true, scans files and reports what would be generated without actually writing any files or deleting directories.\n * Defaults to false.\n */\n dryRun?: boolean;\n}\n\n/**\n * Result returned by generateMetadata function\n */\nexport interface GenerateMetadataResult {\n componentsGenerated: number;\n pageTypesGenerated: number;\n aspectsGenerated: number;\n totalFiles: number;\n}\n\n/**\n * Runs ESLint with --fix on the specified directory to format JSON files.\n * This ensures generated JSON files match the project's Prettier/ESLint configuration.\n */\nfunction lintGeneratedFiles(metadataDir: string, projectRoot: string): void {\n try {\n console.log('🔧 Running ESLint --fix on generated JSON files...');\n\n // Run ESLint from the project root directory so it picks up the correct config\n // Use --no-error-on-unmatched-pattern to handle cases where no JSON files exist yet\n const command = `npx eslint \"${metadataDir}/**/*.json\" --fix --no-error-on-unmatched-pattern`;\n\n execSync(command, {\n cwd: projectRoot,\n stdio: 'pipe', // Suppress output unless there's an error\n encoding: 'utf-8',\n });\n\n console.log('✅ JSON files formatted successfully');\n } catch (error) {\n // ESLint returns non-zero exit code even when --fix resolves all issues\n // We only warn if there are actual unfixable issues\n const execError = error as { status?: number; stderr?: string; stdout?: string };\n\n // Exit code 1 usually means there were linting issues (some may have been fixed)\n // Exit code 2 means configuration error or other fatal error\n if (execError.status === 2) {\n const errMsg = execError.stderr || execError.stdout || 'Unknown error';\n console.warn(`⚠️ Warning: Could not run ESLint --fix: ${errMsg}`);\n } else if (execError.stderr && execError.stderr.includes('error')) {\n console.warn(`⚠️ Warning: Some linting issues could not be auto-fixed. Run ESLint manually to review.`);\n } else {\n // Exit code 1 with no errors in stderr usually means all issues were fixed\n console.log('✅ JSON files formatted successfully');\n }\n }\n}\n\n// Main function\nexport async function generateMetadata(\n projectDirectory: string,\n metadataDirectory: string,\n options?: GenerateMetadataOptions\n): Promise<GenerateMetadataResult> {\n try {\n const filePaths = options?.filePaths;\n const isIncrementalMode = filePaths && filePaths.length > 0;\n const dryRun = options?.dryRun || false;\n\n if (dryRun) {\n console.log('🔍 [DRY RUN] Scanning for decorated components and page types...');\n } else if (isIncrementalMode) {\n console.log(`🔍 Generating metadata for ${filePaths.length} specified file(s)...`);\n } else {\n console.log('🔍 Generating metadata for decorated components and page types...');\n }\n\n const projectRoot = resolve(projectDirectory);\n const srcDir = join(projectRoot, 'src');\n const metadataDir = resolve(metadataDirectory);\n const componentsOutputDir = join(metadataDir, 'components');\n const pagesOutputDir = join(metadataDir, 'pages');\n const aspectsOutputDir = join(metadataDir, 'aspects');\n\n // Skip directory operations in dry run mode\n if (!dryRun) {\n // Only delete existing directories in full scan mode (not incremental)\n if (!isIncrementalMode) {\n console.log('🗑️ Cleaning existing output directories...');\n for (const outputDir of [componentsOutputDir, pagesOutputDir, aspectsOutputDir]) {\n try {\n await rm(outputDir, { recursive: true, force: true });\n console.log(` - Deleted: ${outputDir}`);\n } catch {\n // Directory might not exist, which is fine\n console.log(` - Directory not found (skipping): ${outputDir}`);\n }\n }\n } else {\n console.log('📝 Incremental mode: existing cartridge files will be preserved/overwritten');\n }\n\n // Create output directories if they don't exist\n console.log('📁 Creating output directories...');\n for (const outputDir of [componentsOutputDir, pagesOutputDir, aspectsOutputDir]) {\n try {\n await mkdir(outputDir, { recursive: true });\n } catch (error) {\n try {\n await access(outputDir);\n // Directory exists, that's fine\n } catch {\n console.error(\n `❌ Error: Failed to create output directory ${outputDir}: ${(error as Error).message}`\n );\n process.exit(1);\n }\n }\n }\n } else if (isIncrementalMode) {\n console.log(`📝 [DRY RUN] Would process ${filePaths.length} specific file(s)`);\n } else {\n console.log('📝 [DRY RUN] Would clean and regenerate all metadata files');\n }\n\n let files: string[] = [];\n\n if (isIncrementalMode && filePaths) {\n // Use the specified file paths (resolve them relative to project root)\n files = filePaths.map((fp) => resolve(projectRoot, fp));\n console.log(`📂 Processing ${files.length} specified file(s)...`);\n } else {\n // Full scan mode: scan entire src directory\n const scanDirectory = async (dir: string): Promise<void> => {\n const entries = await readdir(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = join(dir, entry.name);\n\n if (entry.isDirectory()) {\n if (!SKIP_DIRECTORIES.includes(entry.name)) {\n await scanDirectory(fullPath);\n }\n } else if (\n entry.isFile() &&\n (extname(entry.name) === '.ts' ||\n extname(entry.name) === '.tsx' ||\n extname(entry.name) === '.json')\n ) {\n files.push(fullPath);\n }\n }\n };\n\n await scanDirectory(srcDir);\n }\n\n // Process each file for both components and page types\n const allComponents: unknown[] = [];\n const allPageTypes: unknown[] = [];\n const allAspects: unknown[] = [];\n\n for (const file of files) {\n const components = await processComponentFile(file, projectRoot);\n allComponents.push(...components);\n\n const pageTypes = await processPageTypeFile(file, projectRoot);\n allPageTypes.push(...pageTypes);\n\n const aspects = await processAspectFile(file, projectRoot);\n allAspects.push(...aspects);\n }\n\n if (allComponents.length === 0 && allPageTypes.length === 0 && allAspects.length === 0) {\n console.log('⚠️ No decorated components, page types, or aspect files found.');\n return {\n componentsGenerated: 0,\n pageTypesGenerated: 0,\n aspectsGenerated: 0,\n totalFiles: 0,\n };\n }\n\n // Generate component cartridge files\n if (allComponents.length > 0) {\n console.log(`✅ Found ${allComponents.length} decorated component(s):`);\n for (const component of allComponents) {\n await generateComponentCartridge(component as Record<string, unknown>, componentsOutputDir, dryRun);\n }\n if (dryRun) {\n console.log(\n `📄 [DRY RUN] Would generate ${allComponents.length} component metadata file(s) in: ${componentsOutputDir}`\n );\n } else {\n console.log(\n `📄 Generated ${allComponents.length} component metadata file(s) in: ${componentsOutputDir}`\n );\n }\n }\n\n // Generate page type cartridge files\n if (allPageTypes.length > 0) {\n console.log(`✅ Found ${allPageTypes.length} decorated page type(s):`);\n for (const pageType of allPageTypes) {\n await generatePageTypeCartridge(pageType as Record<string, unknown>, pagesOutputDir, dryRun);\n }\n if (dryRun) {\n console.log(\n `📄 [DRY RUN] Would generate ${allPageTypes.length} page type metadata file(s) in: ${pagesOutputDir}`\n );\n } else {\n console.log(`📄 Generated ${allPageTypes.length} page type metadata file(s) in: ${pagesOutputDir}`);\n }\n }\n\n if (allAspects.length > 0) {\n console.log(`✅ Found ${allAspects.length} decorated aspect(s):`);\n for (const aspect of allAspects) {\n await generateAspectCartridge(aspect as Record<string, unknown>, aspectsOutputDir, dryRun);\n }\n if (dryRun) {\n console.log(\n `📄 [DRY RUN] Would generate ${allAspects.length} aspect metadata file(s) in: ${aspectsOutputDir}`\n );\n } else {\n console.log(`📄 Generated ${allAspects.length} aspect metadata file(s) in: ${aspectsOutputDir}`);\n }\n }\n\n // Run ESLint --fix to format generated JSON files according to project settings\n const shouldLintFix = options?.lintFix !== false; // Default to true\n if (\n !dryRun &&\n shouldLintFix &&\n (allComponents.length > 0 || allPageTypes.length > 0 || allAspects.length > 0)\n ) {\n lintGeneratedFiles(metadataDir, projectRoot);\n }\n\n // Return statistics\n return {\n componentsGenerated: allComponents.length,\n pageTypesGenerated: allPageTypes.length,\n aspectsGenerated: allAspects.length,\n totalFiles: allComponents.length + allPageTypes.length + allAspects.length,\n };\n } catch (error) {\n console.error('❌ Error:', (error as Error).message);\n process.exit(1);\n }\n}\n"],"mappings":";;;;;;;;;;AAuBA,IAAIA,iBAAiC;AAErC,SAAS,oBAAoB,kBAAmC;AAC5D,KAAI,mBAAmB,KACnB,QAAO;AAGX,KAAI;AACA,WAAS,0BAA0B;GAC/B,KAAK;GACL,KAAK,eAAe;GACpB,OAAO;GACV,CAAC;AACF,mBAAiB;SACb;AACJ,mBAAiB;;AAErB,QAAO;;;;;;;;;;;;AAaX,SAAS,qBAAqB,kBAA8C;AACxE,KAAI,CAAC,oBAAoB,iBAAiB,CACtC,OAAM,IAAI,MACN,qGACH;CAIL,MAAM,WAAW,KAAK,QAAQ,EAAE,uBAAuB,YAAY,CAAC,OAAO;AAE3E,KAAI;AAEA,WAAS,iCAAiC,SAAS,IAAI;GACnD,KAAK;GACL,KAAK,eAAe;GACpB,UAAU;GACV,OAAO;IAAC;IAAQ;IAAQ;IAAO;GAClC,CAAC;EACF,MAAM,SAAS,aAAa,UAAU,QAAQ;AAC9C,SAAO,KAAK,MAAM,OAAO;UACpB,OAAO;AACZ,QAAM,IAAI,MAAM,+CAAgD,MAAgB,UAAU;WACpF;AAEN,MAAI;AACA,OAAI,WAAW,SAAS,CACpB,YAAW,SAAS;UAEpB;;;;;;;;;;;;;AAgBhB,SAAgB,gBAAgB,UAAkB,aAA6B;CAE3E,MAAM,gBAAgB,SAAS,QAAQ,OAAO,IAAI;CAIlD,MAAM,aAAa,cADJ,qBAAqB,YAAY,CACR;AAGxC,MAAK,MAAM,SAAS,YAAY;EAE5B,MAAM,iBAAiB,MAAM,KAAK,QAAQ,OAAO,IAAI;AAGrD,MAAI,cAAc,SAAS,eAAe,IAAI,cAAc,SAAS,IAAI,iBAAiB,CACtF,QAAO,MAAM;EAIjB,MAAM,sBAAsB,eAAe,QAAQ,SAAS,GAAG;AAC/D,MAAI,cAAc,SAAS,oBAAoB,IAAI,cAAc,SAAS,IAAI,sBAAsB,CAChG,QAAO,MAAM;;AAKrB,SAAQ,KAAK,2CAA2C,WAAW;AACnE,QAAO;;;;;;;;;AAUX,SAAS,cACL,QACA,aAAa,IACqD;CAClE,MAAMC,SAA6E,EAAE;AAErF,MAAK,MAAM,SAAS,QAAQ;EAExB,IAAIC;AACJ,MAAI,MAAM,MACN,YAAW,cAAc;WAClB,MAAM,MAAM;GAEnB,MAAM,cAAc,MAAM,KAAK,WAAW,IAAI,GAAG,MAAM,OAAO,IAAI,MAAM;AACxE,cAAW,aAAa,GAAG,aAAa,cAAc,QAAQ,QAAQ,IAAI,GAAG;QAG7E,YAAW,cAAc;AAI7B,MAAI,MAAM,GACN,QAAO,KAAK;GACR,IAAI,MAAM;GACV,MAAM;GACN,MAAM,MAAM;GACZ,OAAO,MAAM;GAChB,CAAC;AAIN,MAAI,MAAM,YAAY,MAAM,SAAS,SAAS,GAAG;GAC7C,MAAM,YAAY,MAAM,OAAO,WAAW;AAC1C,UAAO,KAAK,GAAG,cAAc,MAAM,UAAU,UAAU,CAAC;;;AAIhE,QAAO;;;;;ACjJX,MAAM,mBAAmB;CAAC;CAAS;CAAQ;CAAgB;CAAQ;CAAS;CAAW;AAEvF,MAAM,0BAA0B;AAChC,MAAM,qBAAqB;AAkB3B,MAAMC,wBAAkD;CACpD;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACH;AAID,MAAMC,eAAuC;CACzC,QAAQ;CACR,QAAQ;CACR,QAAQ;CACR,QAAQ;CACR,SAAS;CACT,SAAS;CACT,MAAM;CACN,KAAK;CACL,WAAW;CACd;AAGD,SAAS,qBAAqB,eAAwB,aAAsB,WAA4B;AAEpG,KAAI,eAAe;AACf,MAAI,CAAC,sBAAsB,SAAS,cAA+B,EAAE;AACjE,WAAQ,MACJ,kCAAkC,cAAc,eAAe,aAAa,UAAU,sBAAsB,sBAAsB,KAAK,KAAK,GAC/I;AACD,WAAQ,KAAK,EAAE;;AAEnB,SAAO;;AAIX,KAAI,eAAe,aAAa,aAC5B,QAAO,aAAa;AAIxB,QAAO;;AAIX,SAAS,oBAAoB,WAA2B;AACpD,QAAO,UACF,QAAQ,YAAY,MAAM,CAC1B,QAAQ,OAAO,QAAQ,IAAI,aAAa,CAAC,CACzC,MAAM;;AAIf,SAAS,oBAAoB,MAAsB;AAE/C,KAAI,CAAC,QAAQ,KAAK,KAAK,CACnB,QAAO;AAGX,QAAO,KACF,MAAM,SAAS,CACf,KAAK,MAAM,UAAU;AAClB,MAAI,UAAU,EACV,QAAO,KAAK,aAAa;AAE7B,SAAO,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE,CAAC,aAAa;GACnE,CACD,KAAK,GAAG;;AAGjB,SAAS,mBAAmB,UAA+B,aAAiC;AACxF,KAAI;EACA,MAAM,WAAW,SAAS,aAAa;AACvC,MAAI,SAIA,QAHiB,SAAS,SAAS,CAET,MAAM,IAAI,CAAC,GAAG,MAAM,IAAI,CAAC,GAAG,MAAM;SAG5D;AAIR,QAAO;;AAKX,SAAS,gBAAgB,YAA0B;AAC/C,KAAI,KAAK,gBAAgB,WAAW,CAChC,QAAO,WAAW,iBAAiB;UAC5B,KAAK,iBAAiB,WAAW,CACxC,QAAO,WAAW,iBAAiB;UAC5B,KAAK,cAAc,WAAW,CACrC,QAAO;UACA,KAAK,eAAe,WAAW,CACtC,QAAO;UACA,KAAK,0BAA0B,WAAW,CACjD,QAAO,kBAAkB,WAAW;UAC7B,KAAK,yBAAyB,WAAW,CAChD,QAAO,kBAAkB,WAAW;KAEpC,QAAO,WAAW,SAAS;;AAMnC,SAAS,kBAAkB,eAA6C;CACpE,MAAMC,SAAkC,EAAE;AAE1C,KAAI;EACA,MAAM,aAAa,cAAc,eAAe;AAEhD,OAAK,MAAM,YAAY,WACnB,KAAI,KAAK,qBAAqB,SAAS,EAAE;GACrC,MAAM,OAAO,SAAS,SAAS;GAC/B,MAAM,cAAc,SAAS,gBAAgB;AAE7C,OAAI,YACA,QAAO,QAAQ,gBAAgB,YAAY;;UAIlD,OAAO;AACZ,UAAQ,KAAK,2CAA4C,MAAgB,UAAU;AACnF,SAAO;;AAGX,QAAO;;AAKX,SAAS,kBAAkB,cAA8B;CACrD,MAAMC,SAAoB,EAAE;AAE5B,KAAI;EACA,MAAM,WAAW,aAAa,aAAa;AAE3C,OAAK,MAAM,WAAW,SAClB,QAAO,KAAK,gBAAgB,QAAQ,CAAC;UAEpC,OAAO;AACZ,UAAQ,KAAK,2CAA4C,MAAgB,UAAU;;AAGvF,QAAO;;AAIX,SAAS,mBAAmB,WAA+C;CACvE,MAAMD,SAAkC,EAAE;AAE1C,KAAI;EACA,MAAM,OAAO,UAAU,cAAc;AAErC,MAAI,KAAK,WAAW,EAChB,QAAO;EAIX,MAAM,WAAW,KAAK;AAEtB,MAAI,KAAK,0BAA0B,SAAS,EAAE;GAE1C,MAAM,aAAa,SAAS,eAAe;AAE3C,QAAK,MAAM,YAAY,WACnB,KAAI,KAAK,qBAAqB,SAAS,EAAE;IACrC,MAAM,OAAO,SAAS,SAAS;IAC/B,MAAM,cAAc,SAAS,gBAAgB;AAE7C,QAAI,YACA,QAAO,QAAQ,gBAAgB,YAAY;;aAIhD,KAAK,gBAAgB,SAAS,EAAE;AAEvC,UAAO,KAAK,gBAAgB,SAAS;AAGrC,OAAI,KAAK,SAAS,GAAG;IACjB,MAAM,YAAY,KAAK;AACvB,QAAI,KAAK,0BAA0B,UAAU,EAAE;KAC3C,MAAM,aAAa,UAAU,eAAe;AAE5C,UAAK,MAAM,YAAY,WACnB,KAAI,KAAK,qBAAqB,SAAS,EAAE;MACrC,MAAM,OAAO,SAAS,SAAS;MAC/B,MAAM,cAAc,SAAS,gBAAgB;AAE7C,UAAI,YACA,QAAO,QAAQ,gBAAgB,YAAY;;;;;AAQnE,SAAO;UACF,OAAO;AACZ,UAAQ,KAAK,iDAAkD,MAAgB,UAAU;AACzF,SAAO;;;AAIf,SAAS,4BAA4B,YAAwB,WAA8C;CACvG,MAAME,aAAwC,EAAE;AAEhD,KAAI;EAEA,MAAM,mBAAmB,WAAW,SAAS,UAAU;AACvD,MAAI,CAAC,iBACD,QAAO;EAIX,MAAM,aAAa,iBAAiB,eAAe;AAEnD,OAAK,MAAM,YAAY,YAAY;GAE/B,MAAM,qBAAqB,SAAS,aAAa,sBAAsB;AACvE,OAAI,CAAC,mBACD;GAGJ,MAAM,YAAY,SAAS,SAAS;GACpC,MAAM,SAAS,mBAAmB,mBAAmB;GAErD,MAAM,aAAa,CAAC,SAAS,kBAAkB;GAE/C,MAAM,eAAgB,OAAO,QAAmB,mBAAmB,UAAU,WAAW;GAExF,MAAMC,YAAqC;IACvC,IAAI,OAAO,MAAM;IACjB,MAAM,OAAO,QAAQ,oBAAoB,UAAU;IACnD,MAAM,qBAAqB,OAAO,MAAgB,cAAc,UAAU;IAC1E,UAAU,OAAO,aAAa,SAAY,OAAO,WAAW;IAC5D,aAAa,OAAO,eAAe,UAAU;IAChD;AAED,OAAI,OAAO,OACP,WAAU,SAAS,OAAO;AAG9B,OAAI,OAAO,iBAAiB,OACxB,WAAU,gBAAgB,OAAO;AAGrC,cAAW,KAAK,UAAU;;UAEzB,OAAO;AACZ,UAAQ,KAAK,oDAAoD,UAAU,IAAK,MAAgB,UAAU;;AAG9G,QAAO;;AAGX,SAAS,mCAAmC,YAAwB,WAA8C;CAC9G,MAAMC,oBAA+C,EAAE;AAEvD,KAAI;EAEA,MAAM,mBAAmB,WAAW,SAAS,UAAU;AACvD,MAAI,CAAC,iBACD,QAAO;EAIX,MAAM,uBAAuB,iBAAiB,aAAa,mBAAmB;AAC9E,MAAI,sBAAsB;GACtB,MAAM,OAAO,qBAAqB,cAAc;AAChD,OAAI,KAAK,SAAS,GAAG;IACjB,MAAM,WAAW,KAAK;AAGtB,QAAI,KAAK,yBAAyB,SAAS,EAAE;KACzC,MAAM,WAAW,SAAS,aAAa;AACvC,UAAK,MAAM,WAAW,SAClB,KAAI,KAAK,0BAA0B,QAAQ,EAAE;MACzC,MAAM,eAAe,mBAAmB,EACpC,oBAAoB,CAAC,QAAQ,EAChC,CAAyB;MAE1B,MAAMC,mBAA4C;OAC9C,IAAI,aAAa,MAAM;OACvB,MAAM,aAAa,QAAQ;OAC9B;AAGD,UAAI,aAAa,eACb,kBAAiB,kBAAkB,aAAa;AAGpD,UAAI,MAAM,QAAQ,aAAa,wBAAwB,CACnD,kBAAiB,4BAA4B,aAAa,wBAAwB,KAC7E,UAAU,EACP,SAAS,MACZ,EACJ;AAGL,UAAI,MAAM,QAAQ,aAAa,wBAAwB,CACnD,kBAAiB,4BAA4B,aAAa,wBAAwB,KAC7E,UAAU,EACP,SAAS,MACZ,EACJ;AAGL,UAAI,aAAa,kBAAkB,OAC/B,kBAAiB,iBAAiB,aAAa;AAGnD,UAAI,aAAa,kBAAkB,OAC/B,kBAAiB,iBAAiB,aAAa;AAGnD,UAAI,aAAa,kBAAkB,OAC/B,kBAAiB,iBAAiB,aAAa;AAGnD,UAAI,aAAa,6BACb,kBAAiB,iCACb,aAAa;AAGrB,wBAAkB,KAAK,iBAAiB;;;;;UAMvD,OAAO;AACZ,UAAQ,KACJ,4DAA4D,UAAU,IAAK,MAAgB,UAC9F;;AAGL,QAAO;;AAGX,eAAe,qBAAqB,UAAkB,cAA0C;AAC5F,KAAI;EACA,MAAM,UAAU,MAAM,SAAS,UAAU,QAAQ;EACjD,MAAMC,aAAwB,EAAE;AAGhC,MAAI,CAAC,QAAQ,SAAS,aAAa,CAC/B,QAAO;AAOX,MAAI;GAOA,MAAM,aALU,IAAI,QAAQ;IACxB,uBAAuB;IACvB,6BAA6B;IAChC,CAAC,CAEyB,iBAAiB,UAAU,QAAQ;GAE9D,MAAM,UAAU,WAAW,YAAY;AAEvC,QAAK,MAAM,oBAAoB,SAAS;IACpC,MAAM,qBAAqB,iBAAiB,aAAa,YAAY;AACrE,QAAI,CAAC,mBACD;IAGJ,MAAM,YAAY,iBAAiB,SAAS;AAC5C,QAAI,CAAC,UACD;IAGJ,MAAM,kBAAkB,mBAAmB,mBAAmB;IAE9D,MAAM,aAAa,4BAA4B,YAAY,UAAU;IACrE,MAAM,oBAAoB,mCAAmC,YAAY,UAAU;IAEnF,MAAM,oBAAoB;KACtB,QAAQ,gBAAgB,MAAM,UAAU,aAAa;KACrD,MAAM,gBAAgB,QAAQ,oBAAoB,UAAU;KAC5D,OAAO,gBAAgB,SAAS;KAChC,aAAa,gBAAgB,eAAe,qBAAqB;KACjE;KACA;KACH;AAED,eAAW,KAAK,kBAAkB;;WAEjC,OAAO;AACZ,WAAQ,KAAK,mCAAmC,SAAS,IAAK,MAAgB,QAAQ;;AAG1F,SAAO;UACF,OAAO;AACZ,UAAQ,KAAK,gCAAgC,SAAS,IAAK,MAAgB,QAAQ;AACnF,SAAO,EAAE;;;AAIjB,eAAe,oBAAoB,UAAkB,aAAyC;AAC1F,KAAI;EACA,MAAM,UAAU,MAAM,SAAS,UAAU,QAAQ;EACjD,MAAMC,YAAuB,EAAE;AAG/B,MAAI,CAAC,QAAQ,SAAS,YAAY,CAC9B,QAAO;AAGX,MAAI;GAOA,MAAM,aALU,IAAI,QAAQ;IACxB,uBAAuB;IACvB,6BAA6B;IAChC,CAAC,CAEyB,iBAAiB,UAAU,QAAQ;GAE9D,MAAM,UAAU,WAAW,YAAY;AAEvC,QAAK,MAAM,oBAAoB,SAAS;IACpC,MAAM,oBAAoB,iBAAiB,aAAa,WAAW;AACnE,QAAI,CAAC,kBACD;IAGJ,MAAM,YAAY,iBAAiB,SAAS;AAC5C,QAAI,CAAC,UACD;IAGJ,MAAM,iBAAiB,mBAAmB,kBAAkB;IAE5D,MAAM,aAAa,4BAA4B,YAAY,UAAU;IACrE,MAAM,oBAAoB,mCAAmC,YAAY,UAAU;IACnF,MAAM,QAAQ,gBAAgB,UAAU,YAAY;IAEpD,MAAM,mBAAmB;KACrB,QAAQ,eAAe,MAAM,UAAU,aAAa;KACpD,MAAM,eAAe,QAAQ,oBAAoB,UAAU;KAC3D,aAAa,eAAe,eAAe,qBAAqB;KAChE;KACA,sBAAsB,eAAe,wBAAwB,EAAE;KAC/D;KACA;KACH;AAED,cAAU,KAAK,iBAAiB;;WAE/B,OAAO;AACZ,WAAQ,KAAK,mCAAmC,SAAS,IAAK,MAAgB,QAAQ;;AAG1F,SAAO;UACF,OAAO;AACZ,UAAQ,KAAK,gCAAgC,SAAS,IAAK,MAAgB,QAAQ;AACnF,SAAO,EAAE;;;AAIjB,eAAe,kBAAkB,UAAkB,cAA0C;AACzF,KAAI;EACA,MAAM,UAAU,MAAM,SAAS,UAAU,QAAQ;EACjD,MAAMC,UAAqB,EAAE;AAG7B,MAAI,CAAC,SAAS,SAAS,QAAQ,IAAI,CAAC,QAAQ,MAAM,CAAC,WAAW,IAAI,CAC9D,QAAO;AAIX,MAAI,CAAC,SAAS,SAAS,YAAY,IAAI,CAAC,SAAS,SAAS,cAAc,CACpE,QAAO;AAGX,MAAI;GAEA,MAAM,aAAa,KAAK,MAAM,QAAQ;GAGtC,MAAM,WAAW,SAAS,UAAU,QAAQ;AAG5C,OAAI,CAAC,WAAW,QAAQ,CAAC,WAAW,sBAChC,QAAO;GAGX,MAAM,iBAAiB;IACnB,IAAI;IACJ,MAAM,WAAW;IACjB,aAAa,WAAW,eAAe,gBAAgB,WAAW;IAClE,sBAAsB,WAAW,yBAAyB,EAAE;IAC5D,sBAAsB,WAAW,0BAA0B,EAAE;IAChE;AAED,WAAQ,KAAK,eAAe;WACvB,YAAY;AACjB,WAAQ,KAAK,yCAAyC,SAAS,IAAK,WAAqB,QAAQ;;AAGrG,SAAO;UACF,OAAO;AACZ,UAAQ,KAAK,gCAAgC,SAAS,IAAK,MAAgB,QAAQ;AACnF,SAAO,EAAE;;;AAIjB,eAAe,2BACX,WACA,WACA,SAAS,OACI;CACb,MAAM,WAAW,oBAAoB,UAAU,OAAiB;CAChE,MAAM,WAAW,KAAK,WAAW,UAAU,MAAgB;CAC3D,MAAM,aAAa,KAAK,UAAU,GAAG,SAAS,OAAO;AAErD,KAAI,CAAC,QAAQ;AAET,MAAI;AACA,SAAM,MAAM,UAAU,EAAE,WAAW,MAAM,CAAC;UACtC;EAIR,MAAM,4BAA4B,CAC9B;GACI,IAAI,UAAU;GACd,MAAM,UAAU;GAChB,aAAa,UAAU;GACvB,uBAAuB,UAAU;GACpC,CACJ;EAED,MAAM,gBAAgB;GAClB,MAAM,UAAU;GAChB,aAAa,UAAU;GACvB,OAAO,UAAU;GACjB,WAAW;GACX,oBAAoB,UAAU,qBAAqB,EAAE;GACrD,6BAA6B;GAChC;AAED,QAAM,UAAU,YAAY,KAAK,UAAU,eAAe,MAAM,EAAE,CAAC;;CAGvE,MAAM,SAAS,SAAS,mBAAmB;AAC3C,SAAQ,IACJ,GAAG,OAAO,GAAG,OAAO,UAAU,OAAO,CAAC,IAAI,OAAO,UAAU,KAAK,CAAC,IAAI,OAAQ,UAAU,WAAyB,OAAO,CAAC,iBAAiB,SAAS,OACrJ;;AAGL,eAAe,0BACX,UACA,WACA,SAAS,OACI;CACb,MAAM,WAAW,oBAAoB,SAAS,KAAe;CAC7D,MAAM,aAAa,KAAK,WAAW,GAAG,SAAS,OAAO;AAEtD,KAAI,CAAC,QAAQ;EACT,MAAMC,gBAAyC;GAC3C,MAAM,SAAS;GACf,aAAa,SAAS;GACtB,WAAW;GACX,oBAAoB,SAAS,qBAAqB,EAAE;GACvD;AAGD,MAAI,SAAS,cAAe,SAAS,WAAyB,SAAS,EASnE,eAAc,8BARoB,CAC9B;GACI,IAAI,SAAS,UAAU;GACvB,MAAM,SAAS;GACf,aAAa,SAAS;GACtB,uBAAuB,SAAS;GACnC,CACJ;AAKL,MAAI,SAAS,qBACT,eAAc,yBAAyB,SAAS;AAGpD,MAAI,SAAS,MACT,eAAc,QAAQ,SAAS;AAGnC,QAAM,UAAU,YAAY,KAAK,UAAU,eAAe,MAAM,EAAE,CAAC;;CAGvE,MAAM,SAAS,SAAS,mBAAmB;AAC3C,SAAQ,IACJ,GAAG,OAAO,GAAG,OAAO,SAAS,KAAK,CAAC,IAAI,OAAO,SAAS,YAAY,CAAC,IAAI,OAAQ,SAAS,WAAyB,OAAO,CAAC,iBAAiB,SAAS,OACvJ;;AAGL,eAAe,wBACX,QACA,WACA,SAAS,OACI;CACb,MAAM,WAAW,oBAAoB,OAAO,GAAa;CACzD,MAAM,aAAa,KAAK,WAAW,GAAG,SAAS,OAAO;AAEtD,KAAI,CAAC,QAAQ;EACT,MAAMA,gBAAyC;GAC3C,MAAM,OAAO;GACb,aAAa,OAAO;GACpB,WAAW;GACX,uBAAuB,OAAO,wBAAwB,EAAE;GAC3D;AAGD,MAAI,OAAO,qBACP,eAAc,yBAAyB,OAAO;AAGlD,QAAM,UAAU,YAAY,KAAK,UAAU,eAAe,MAAM,EAAE,CAAC;;CAGvE,MAAM,SAAS,SAAS,mBAAmB;AAC3C,SAAQ,IACJ,GAAG,OAAO,GAAG,OAAO,OAAO,KAAK,CAAC,IAAI,OAAO,OAAO,YAAY,CAAC,IAAI,OAAQ,OAAO,qBAAmC,OAAO,CAAC,iBAAiB,SAAS,OAC3J;;;;;;AAyCL,SAAS,mBAAmB,aAAqB,aAA2B;AACxE,KAAI;AACA,UAAQ,IAAI,qDAAqD;AAMjE,WAFgB,eAAe,YAAY,oDAEzB;GACd,KAAK;GACL,OAAO;GACP,UAAU;GACb,CAAC;AAEF,UAAQ,IAAI,sCAAsC;UAC7C,OAAO;EAGZ,MAAM,YAAY;AAIlB,MAAI,UAAU,WAAW,GAAG;GACxB,MAAM,SAAS,UAAU,UAAU,UAAU,UAAU;AACvD,WAAQ,KAAK,4CAA4C,SAAS;aAC3D,UAAU,UAAU,UAAU,OAAO,SAAS,QAAQ,CAC7D,SAAQ,KAAK,2FAA2F;MAGxG,SAAQ,IAAI,sCAAsC;;;AAM9D,eAAsB,iBAClB,kBACA,mBACA,SAC+B;AAC/B,KAAI;EACA,MAAM,YAAY,SAAS;EAC3B,MAAM,oBAAoB,aAAa,UAAU,SAAS;EAC1D,MAAM,SAAS,SAAS,UAAU;AAElC,MAAI,OACA,SAAQ,IAAI,mEAAmE;WACxE,kBACP,SAAQ,IAAI,8BAA8B,UAAU,OAAO,uBAAuB;MAElF,SAAQ,IAAI,oEAAoE;EAGpF,MAAM,cAAc,QAAQ,iBAAiB;EAC7C,MAAM,SAAS,KAAK,aAAa,MAAM;EACvC,MAAM,cAAc,QAAQ,kBAAkB;EAC9C,MAAM,sBAAsB,KAAK,aAAa,aAAa;EAC3D,MAAM,iBAAiB,KAAK,aAAa,QAAQ;EACjD,MAAM,mBAAmB,KAAK,aAAa,UAAU;AAGrD,MAAI,CAAC,QAAQ;AAET,OAAI,CAAC,mBAAmB;AACpB,YAAQ,IAAI,+CAA+C;AAC3D,SAAK,MAAM,aAAa;KAAC;KAAqB;KAAgB;KAAiB,CAC3E,KAAI;AACA,WAAM,GAAG,WAAW;MAAE,WAAW;MAAM,OAAO;MAAM,CAAC;AACrD,aAAQ,IAAI,iBAAiB,YAAY;YACrC;AAEJ,aAAQ,IAAI,wCAAwC,YAAY;;SAIxE,SAAQ,IAAI,8EAA8E;AAI9F,WAAQ,IAAI,oCAAoC;AAChD,QAAK,MAAM,aAAa;IAAC;IAAqB;IAAgB;IAAiB,CAC3E,KAAI;AACA,UAAM,MAAM,WAAW,EAAE,WAAW,MAAM,CAAC;YACtC,OAAO;AACZ,QAAI;AACA,WAAM,OAAO,UAAU;YAEnB;AACJ,aAAQ,MACJ,8CAA8C,UAAU,IAAK,MAAgB,UAChF;AACD,aAAQ,KAAK,EAAE;;;aAIpB,kBACP,SAAQ,IAAI,8BAA8B,UAAU,OAAO,mBAAmB;MAE9E,SAAQ,IAAI,6DAA6D;EAG7E,IAAIC,QAAkB,EAAE;AAExB,MAAI,qBAAqB,WAAW;AAEhC,WAAQ,UAAU,KAAK,OAAO,QAAQ,aAAa,GAAG,CAAC;AACvD,WAAQ,IAAI,iBAAiB,MAAM,OAAO,uBAAuB;SAC9D;GAEH,MAAM,gBAAgB,OAAO,QAA+B;IACxD,MAAM,UAAU,MAAM,QAAQ,KAAK,EAAE,eAAe,MAAM,CAAC;AAE3D,SAAK,MAAM,SAAS,SAAS;KACzB,MAAM,WAAW,KAAK,KAAK,MAAM,KAAK;AAEtC,SAAI,MAAM,aAAa,EACnB;UAAI,CAAC,iBAAiB,SAAS,MAAM,KAAK,CACtC,OAAM,cAAc,SAAS;gBAGjC,MAAM,QAAQ,KACb,QAAQ,MAAM,KAAK,KAAK,SACrB,QAAQ,MAAM,KAAK,KAAK,UACxB,QAAQ,MAAM,KAAK,KAAK,SAE5B,OAAM,KAAK,SAAS;;;AAKhC,SAAM,cAAc,OAAO;;EAI/B,MAAMC,gBAA2B,EAAE;EACnC,MAAMC,eAA0B,EAAE;EAClC,MAAMC,aAAwB,EAAE;AAEhC,OAAK,MAAM,QAAQ,OAAO;GACtB,MAAM,aAAa,MAAM,qBAAqB,MAAM,YAAY;AAChE,iBAAc,KAAK,GAAG,WAAW;GAEjC,MAAM,YAAY,MAAM,oBAAoB,MAAM,YAAY;AAC9D,gBAAa,KAAK,GAAG,UAAU;GAE/B,MAAM,UAAU,MAAM,kBAAkB,MAAM,YAAY;AAC1D,cAAW,KAAK,GAAG,QAAQ;;AAG/B,MAAI,cAAc,WAAW,KAAK,aAAa,WAAW,KAAK,WAAW,WAAW,GAAG;AACpF,WAAQ,IAAI,kEAAkE;AAC9E,UAAO;IACH,qBAAqB;IACrB,oBAAoB;IACpB,kBAAkB;IAClB,YAAY;IACf;;AAIL,MAAI,cAAc,SAAS,GAAG;AAC1B,WAAQ,IAAI,WAAW,cAAc,OAAO,0BAA0B;AACtE,QAAK,MAAM,aAAa,cACpB,OAAM,2BAA2B,WAAsC,qBAAqB,OAAO;AAEvG,OAAI,OACA,SAAQ,IACJ,+BAA+B,cAAc,OAAO,kCAAkC,sBACzF;OAED,SAAQ,IACJ,gBAAgB,cAAc,OAAO,kCAAkC,sBAC1E;;AAKT,MAAI,aAAa,SAAS,GAAG;AACzB,WAAQ,IAAI,WAAW,aAAa,OAAO,0BAA0B;AACrE,QAAK,MAAM,YAAY,aACnB,OAAM,0BAA0B,UAAqC,gBAAgB,OAAO;AAEhG,OAAI,OACA,SAAQ,IACJ,+BAA+B,aAAa,OAAO,kCAAkC,iBACxF;OAED,SAAQ,IAAI,gBAAgB,aAAa,OAAO,kCAAkC,iBAAiB;;AAI3G,MAAI,WAAW,SAAS,GAAG;AACvB,WAAQ,IAAI,WAAW,WAAW,OAAO,uBAAuB;AAChE,QAAK,MAAM,UAAU,WACjB,OAAM,wBAAwB,QAAmC,kBAAkB,OAAO;AAE9F,OAAI,OACA,SAAQ,IACJ,+BAA+B,WAAW,OAAO,+BAA+B,mBACnF;OAED,SAAQ,IAAI,gBAAgB,WAAW,OAAO,+BAA+B,mBAAmB;;EAKxG,MAAM,gBAAgB,SAAS,YAAY;AAC3C,MACI,CAAC,UACD,kBACC,cAAc,SAAS,KAAK,aAAa,SAAS,KAAK,WAAW,SAAS,GAE5E,oBAAmB,aAAa,YAAY;AAIhD,SAAO;GACH,qBAAqB,cAAc;GACnC,oBAAoB,aAAa;GACjC,kBAAkB,WAAW;GAC7B,YAAY,cAAc,SAAS,aAAa,SAAS,WAAW;GACvE;UACI,OAAO;AACZ,UAAQ,MAAM,YAAa,MAAgB,QAAQ;AACnD,UAAQ,KAAK,EAAE"}
1
+ {"version":3,"file":"index.js","names":["LEVEL_PRIORITY: Record<LogLevel, number>","overrideLevel: LogLevel | undefined","isCliAvailable: boolean | null","result: Array<{ id: string; path: string; file: string; index?: boolean }>","fullPath: string","VALID_ATTRIBUTE_TYPES: readonly AttributeType[]","TYPE_MAPPING: Record<string, string>","result: Record<string, unknown>","result: unknown[]","attributes: Record<string, unknown>[]","attribute: Record<string, unknown>","regionDefinitions: Record<string, unknown>[]","regionDefinition: Record<string, unknown>","components: unknown[]","pageTypes: unknown[]","aspects: unknown[]","cartridgeData: Record<string, unknown>","files: string[]","allComponents: unknown[]","allPageTypes: unknown[]","allAspects: unknown[]"],"sources":["../../src/utils/logger.ts","../../src/cartridge-services/react-router-config.ts","../../src/cartridge-services/generate-cartridge.ts"],"sourcesContent":["/**\n * Copyright 2026 Salesforce, Inc.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/* eslint-disable no-console */\nimport os from 'os';\nimport chalk from 'chalk';\nimport { createRequire } from 'module';\nimport type { ServerMode } from '../server/modes';\nimport pkg from '../../package.json' with { type: 'json' };\n\n/**\n * Get the local network IPv4 address\n */\nexport function getNetworkAddress(): string | undefined {\n const interfaces = os.networkInterfaces();\n for (const name of Object.keys(interfaces)) {\n const iface = interfaces[name];\n if (!iface) continue;\n for (const alias of iface) {\n if (alias.family === 'IPv4' && !alias.internal) {\n return alias.address;\n }\n }\n }\n return undefined;\n}\n\n/**\n * Get the version of a package from the project's package.json\n */\nexport function getPackageVersion(packageName: string, projectDir: string): string {\n try {\n const require = createRequire(import.meta.url);\n const pkgPath = require.resolve(`${packageName}/package.json`, { paths: [projectDir] });\n const pkgJson = require(pkgPath) as { version: string };\n return pkgJson.version;\n } catch {\n return 'unknown';\n }\n}\n\n/**\n * Centralized, level-gated logger for the SDK.\n *\n * Log level is controlled by `SFCC_LOG_LEVEL` env var (`error` | `warn` | `info` | `debug`).\n * Falls back to: `DEBUG` targeting sfnext -> `debug`, `NODE_ENV=production` -> `warn`, otherwise `info`.\n */\n\nexport type LogLevel = 'error' | 'warn' | 'info' | 'debug';\n\nconst LEVEL_PRIORITY: Record<LogLevel, number> = {\n error: 0,\n warn: 1,\n info: 2,\n debug: 3,\n};\n\nlet overrideLevel: LogLevel | undefined;\n\n/**\n * Returns true when the `DEBUG` env var targets sfnext or is a general enable flag.\n * Avoids accidentally enabling debug mode when DEBUG is set for unrelated libraries\n * (e.g. `DEBUG=express:*`).\n */\nfunction debugEnablesSfnext(): boolean {\n const raw = process.env.DEBUG?.trim();\n if (!raw) return false;\n const normalized = raw.toLowerCase();\n if (['1', 'true', 'yes', 'on'].includes(normalized)) return true;\n return raw.split(',').some((token) => {\n const value = token.trim();\n return value === '*' || value === 'sfnext' || value === 'sfnext:*';\n });\n}\n\nfunction resolveLevel(): LogLevel {\n if (overrideLevel) return overrideLevel;\n const envLevel = process.env.MRT_LOG_LEVEL ?? process.env.SFCC_LOG_LEVEL;\n if (envLevel && envLevel in LEVEL_PRIORITY) return envLevel as LogLevel;\n if (debugEnablesSfnext()) return 'debug';\n if (process.env.NODE_ENV === 'production') return 'warn';\n return 'info';\n}\n\nfunction shouldLog(level: LogLevel): boolean {\n return LEVEL_PRIORITY[level] <= LEVEL_PRIORITY[resolveLevel()];\n}\n\nexport const logger = {\n error(msg: string, ...args: unknown[]): void {\n if (!shouldLog('error')) return;\n console.error(chalk.red('[sfnext:error]'), msg, ...args);\n },\n warn(msg: string, ...args: unknown[]): void {\n if (!shouldLog('warn')) return;\n console.warn(chalk.yellow('[sfnext:warn]'), msg, ...args);\n },\n info(msg: string, ...args: unknown[]): void {\n if (!shouldLog('info')) return;\n console.log(chalk.cyan('[sfnext:info]'), msg, ...args);\n },\n debug(msg: string, ...args: unknown[]): void {\n if (!shouldLog('debug')) return;\n console.log(chalk.gray('[sfnext:debug]'), msg, ...args);\n },\n setLevel(level: LogLevel | undefined): void {\n overrideLevel = level;\n },\n getLevel(): LogLevel {\n return resolveLevel();\n },\n};\n\n/**\n * Print the server information banner with URLs and versions\n */\nexport function printServerInfo(mode: ServerMode, port: number, startTime: number, projectDir: string): void {\n const elapsed = Date.now() - startTime;\n const sfnextVersion = pkg.version;\n const reactVersion = getPackageVersion('react', projectDir);\n const reactRouterVersion = getPackageVersion('react-router', projectDir);\n const viteVersion = getPackageVersion('vite', projectDir);\n\n const modeLabel = mode === 'development' ? 'Development Mode' : 'Preview Mode';\n\n console.log();\n console.log(` ${chalk.cyan.bold('⚡ SFCC Storefront Next')} ${chalk.dim(`v${sfnextVersion}`)}`);\n console.log(` ${chalk.green.bold(modeLabel)}`);\n console.log();\n const logLevel = resolveLevel();\n const logLevelColors: Record<LogLevel, (s: string) => string> = {\n error: chalk.red,\n warn: chalk.yellow,\n info: chalk.cyan,\n debug: chalk.gray,\n };\n\n console.log(\n ` ${chalk.dim('react')} ${chalk.green(`v${reactVersion}`)} ${chalk.dim('|')} ` +\n `${chalk.dim('react-router')} ${chalk.green(`v${reactRouterVersion}`)} ${chalk.dim('|')} ` +\n `${chalk.dim('vite')} ${chalk.green(`v${viteVersion}`)}`\n );\n console.log(\n ` ${chalk.dim('log level')} ${logLevelColors[logLevel](logLevel)} ${chalk.dim('|')} ` +\n `${chalk.green(`ready in ${elapsed}ms`)}`\n );\n console.log();\n}\n\n/**\n * Print server configuration details (proxy, static, etc.)\n */\nexport function printServerConfig(config: {\n mode: ServerMode;\n port: number;\n enableProxy?: boolean;\n enableStaticServing?: boolean;\n enableCompression?: boolean;\n proxyPath?: string;\n proxyHost?: string;\n shortCode?: string;\n organizationId?: string;\n clientId?: string;\n}): void {\n const {\n port,\n enableProxy,\n enableStaticServing,\n enableCompression,\n proxyPath,\n proxyHost,\n shortCode,\n organizationId,\n clientId,\n } = config;\n\n console.log(` ${chalk.bold('Environment Configuration:')}`);\n\n if (enableProxy && proxyPath && proxyHost && shortCode) {\n console.log(\n ` ${chalk.green('✓')} ${chalk.bold('Proxy:')} ${chalk.cyan(`localhost:${port}${proxyPath}`)} ${chalk.dim('→')} ${chalk.cyan(proxyHost)}`\n );\n console.log(` ${chalk.dim('Short Code: ')}${chalk.dim(shortCode)}`);\n if (organizationId) {\n console.log(` ${chalk.dim('Organization ID: ')}${chalk.dim(organizationId)}`);\n }\n if (clientId) {\n console.log(` ${chalk.dim('Client ID: ')}${chalk.dim(clientId)}`);\n }\n } else {\n console.log(` ${chalk.bold('Proxy: ')} ${chalk.dim('disabled')}`);\n }\n\n if (enableStaticServing) {\n console.log(` ${chalk.bold('Static: ')} ${chalk.dim('enabled')}`);\n }\n\n if (enableCompression) {\n console.log(` ${chalk.bold('Compression: ')} ${chalk.dim('enabled')}`);\n }\n\n // URLs\n const localUrl = `http://localhost:${port}`;\n const showNetwork = process.env.SHOW_NETWORK === 'true';\n const networkAddress = showNetwork ? getNetworkAddress() : undefined;\n const networkUrl = networkAddress ? `http://${networkAddress}:${port}` : undefined;\n\n console.log();\n console.log(` ${chalk.bold('Local: ')} ${chalk.cyan(localUrl)}`);\n if (networkUrl) {\n console.log(` ${chalk.bold('Network:')} ${chalk.cyan(networkUrl)}`);\n }\n\n console.log();\n console.log(` ${chalk.dim('Press')} ${chalk.bold('Ctrl+C')} ${chalk.dim('to stop the server')}`);\n console.log();\n}\n\n/**\n * Print shutdown message\n */\nexport function printShutdownMessage(): void {\n console.log(`\\n ${chalk.yellow('⚡')} ${chalk.dim('Server shutting down...')}\\n`);\n}\n","/**\n * Copyright 2026 Salesforce, Inc.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { join } from 'node:path';\nimport { existsSync, readFileSync, unlinkSync } from 'node:fs';\nimport { execSync } from 'node:child_process';\nimport { tmpdir } from 'node:os';\nimport { randomUUID } from 'node:crypto';\nimport { npmRunPathEnv } from 'npm-run-path';\nimport type { RouteConfigEntry } from '@react-router/dev/routes';\nimport { logger } from '../logger';\n\nlet isCliAvailable: boolean | null = null;\n\nfunction checkReactRouterCli(projectDirectory: string): boolean {\n if (isCliAvailable !== null) {\n return isCliAvailable;\n }\n\n try {\n execSync('react-router --version', {\n cwd: projectDirectory,\n env: npmRunPathEnv(),\n stdio: 'pipe',\n });\n isCliAvailable = true;\n } catch {\n isCliAvailable = false;\n }\n return isCliAvailable;\n}\n\n/**\n * Get the fully resolved routes from React Router by invoking its CLI.\n * This ensures we get the exact same route resolution as React Router uses internally,\n * including all presets, file-system routes, and custom route configurations.\n * @param projectDirectory - The project root directory\n * @returns Array of resolved route config entries\n * @example\n * const routes = getReactRouterRoutes('/path/to/project');\n * // Returns the same structure as `react-router routes --json`\n */\nfunction getReactRouterRoutes(projectDirectory: string): RouteConfigEntry[] {\n if (!checkReactRouterCli(projectDirectory)) {\n throw new Error(\n 'React Router CLI is not available. Please make sure @react-router/dev is installed and accessible.'\n );\n }\n\n // Use a temp file to avoid Node.js buffer limits (8KB default)\n const tempFile = join(tmpdir(), `react-router-routes-${randomUUID()}.json`);\n\n try {\n // Redirect output to temp file to avoid buffer truncation\n execSync(`react-router routes --json > \"${tempFile}\"`, {\n cwd: projectDirectory,\n env: npmRunPathEnv(),\n encoding: 'utf-8',\n stdio: ['pipe', 'pipe', 'pipe'],\n });\n const output = readFileSync(tempFile, 'utf-8');\n return JSON.parse(output) as RouteConfigEntry[];\n } catch (error) {\n throw new Error(`Failed to get routes from React Router CLI: ${(error as Error).message}`);\n } finally {\n // Clean up temp file\n try {\n if (existsSync(tempFile)) {\n unlinkSync(tempFile);\n }\n } catch {\n // Ignore cleanup errors\n }\n }\n}\n\n/**\n * Convert a file path to its corresponding route path using React Router's CLI.\n * This ensures we get the exact same route resolution as React Router uses internally.\n * @param filePath - Absolute path to the route file\n * @param projectRoot - The project root directory\n * @returns The route path (e.g., '/cart', '/product/:productId')\n * @example\n * const route = filePathToRoute('/path/to/project/src/routes/_app.cart.tsx', '/path/to/project');\n * // Returns: '/cart'\n */\nexport function filePathToRoute(filePath: string, projectRoot: string): string {\n // Normalize paths to POSIX-style\n const filePathPosix = filePath.replace(/\\\\/g, '/');\n\n // Get all routes from React Router CLI\n const routes = getReactRouterRoutes(projectRoot);\n const flatRoutes = flattenRoutes(routes);\n\n // Find the route that matches this file\n for (const route of flatRoutes) {\n // Normalize the route file path for comparison\n const routeFilePosix = route.file.replace(/\\\\/g, '/');\n\n // Check if the file path ends with the route file (handles relative vs. absolute paths)\n if (filePathPosix.endsWith(routeFilePosix) || filePathPosix.endsWith(`/${routeFilePosix}`)) {\n return route.path;\n }\n\n // Also check without leading ./\n const routeFileNormalized = routeFilePosix.replace(/^\\.\\//, '');\n if (filePathPosix.endsWith(routeFileNormalized) || filePathPosix.endsWith(`/${routeFileNormalized}`)) {\n return route.path;\n }\n }\n\n // Fallback: if no match found, return a warning path\n logger.warn(`Could not find route for file: ${filePath}`);\n return '/unknown';\n}\n\n/**\n * Flatten a nested route tree into a flat array with computed paths.\n * Each route will have its full path computed from parent paths.\n * @param routes - The nested route config entries\n * @param parentPath - The parent path prefix (used internally for recursion)\n * @returns Flat array of routes with their full paths\n */\nfunction flattenRoutes(\n routes: RouteConfigEntry[],\n parentPath = ''\n): Array<{ id: string; path: string; file: string; index?: boolean }> {\n const result: Array<{ id: string; path: string; file: string; index?: boolean }> = [];\n\n for (const route of routes) {\n // Compute the full path\n let fullPath: string;\n if (route.index) {\n fullPath = parentPath || '/';\n } else if (route.path) {\n // Handle paths that already start with / (absolute paths from extensions)\n const pathSegment = route.path.startsWith('/') ? route.path : `/${route.path}`;\n fullPath = parentPath ? `${parentPath}${pathSegment}`.replace(/\\/+/g, '/') : pathSegment;\n } else {\n // Layout route without path - use parent path\n fullPath = parentPath || '/';\n }\n\n // Add this route if it has an id\n if (route.id) {\n result.push({\n id: route.id,\n path: fullPath,\n file: route.file,\n index: route.index,\n });\n }\n\n // Recursively process children\n if (route.children && route.children.length > 0) {\n const childPath = route.path ? fullPath : parentPath;\n result.push(...flattenRoutes(route.children, childPath));\n }\n }\n\n return result;\n}\n","#!/usr/bin/env node\n/**\n * Copyright 2026 Salesforce, Inc.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { readdir, readFile, writeFile, mkdir, access, rm } from 'node:fs/promises';\nimport { join, extname, resolve, basename } from 'node:path';\nimport { execSync } from 'node:child_process';\nimport { Project, Node, type SourceFile, type PropertyDeclaration, type Decorator, type Expression } from 'ts-morph';\nimport { filePathToRoute } from './react-router-config.js';\nimport { logger } from '../logger';\n\n// Re-export `filePathToRoute`\nexport { filePathToRoute };\n\nconst SKIP_DIRECTORIES = ['build', 'dist', 'node_modules', '.git', '.next', 'coverage'];\n\nconst DEFAULT_COMPONENT_GROUP = 'odyssey_base';\nconst ARCH_TYPE_HEADLESS = 'headless';\n\ntype AttributeType =\n | 'string'\n | 'text'\n | 'markup'\n | 'integer'\n | 'boolean'\n | 'product'\n | 'category'\n | 'file'\n | 'page'\n | 'image'\n | 'url'\n | 'enum'\n | 'custom'\n | 'cms_record';\n\nconst VALID_ATTRIBUTE_TYPES: readonly AttributeType[] = [\n 'string',\n 'text',\n 'markup',\n 'integer',\n 'boolean',\n 'product',\n 'category',\n 'file',\n 'page',\n 'image',\n 'url',\n 'enum',\n 'custom',\n 'cms_record',\n] as const;\n\n// Type mapping for TypeScript types to B2C Commerce attribute types\n// Based on official schema: https://salesforcecommercecloud.github.io/b2c-dev-doc/docs/current/content/attributedefinition.json\nconst TYPE_MAPPING: Record<string, string> = {\n String: 'string',\n string: 'string',\n Number: 'integer',\n number: 'integer',\n Boolean: 'boolean',\n boolean: 'boolean',\n Date: 'string', // B2C Commerce doesn't have a native date type, use string\n URL: 'url',\n CMSRecord: 'cms_record',\n};\n\n// Resolve attribute type in order: decorator type -> ts-morph type inference -> fallback to string\nfunction resolveAttributeType(decoratorType?: string, tsMorphType?: string, fieldName?: string): string {\n // 1) If the type is set on the decorator, use that (with validation)\n if (decoratorType) {\n if (!VALID_ATTRIBUTE_TYPES.includes(decoratorType as AttributeType)) {\n logger.error(\n `Invalid attribute type '${decoratorType}' for field '${fieldName || 'unknown'}'. Valid types are: ${VALID_ATTRIBUTE_TYPES.join(', ')}`\n );\n process.exit(1);\n }\n return decoratorType;\n }\n\n // 2) Use the type from ts-morph type inference\n if (tsMorphType && TYPE_MAPPING[tsMorphType]) {\n return TYPE_MAPPING[tsMorphType];\n }\n\n // 3) Fall back to string\n return 'string';\n}\n\n// Convert field name to human-readable name\nfunction toHumanReadableName(fieldName: string): string {\n return fieldName\n .replace(/([A-Z])/g, ' $1') // Add space before capital letters\n .replace(/^./, (str) => str.toUpperCase()) // Capitalize first letter\n .trim();\n}\n\n// Convert name to camelCase filename (handles spaces and hyphens, preserves existing camelCase)\nfunction toCamelCaseFileName(name: string): string {\n // If the name is already camelCase (no spaces or hyphens), return as-is\n if (!/[\\s-]/.test(name)) {\n return name;\n }\n\n return name\n .split(/[\\s-]+/) // Split by whitespace and hyphens\n .map((word, index) => {\n if (index === 0) {\n return word.toLowerCase(); // First word is all lowercase\n }\n return word.charAt(0).toUpperCase() + word.slice(1).toLowerCase(); // Subsequent words are capitalized\n })\n .join(''); // Join without spaces or hyphens\n}\n\nfunction getTypeFromTsMorph(property: PropertyDeclaration, _sourceFile: SourceFile): string {\n try {\n const typeNode = property.getTypeNode();\n if (typeNode) {\n const typeText = typeNode.getText();\n // Extract the base type name from complex types\n const baseType = typeText.split('|')[0].split('&')[0].trim();\n return baseType;\n }\n } catch {\n // If type extraction fails, return string\n }\n\n return 'string';\n}\n\n/**\n * Resolve a variable's initializer expression from the same source file,\n * unwrapping `as const` type assertions.\n */\nfunction resolveVariableInitializer(sourceFile: SourceFile, name: string): Expression | undefined {\n const varDecl = sourceFile.getVariableDeclaration(name);\n if (!varDecl) return undefined;\n let initializer = varDecl.getInitializer();\n if (initializer && Node.isAsExpression(initializer)) {\n initializer = initializer.getExpression();\n }\n return initializer;\n}\n\n/**\n * Check whether an AST node is a type that `parseExpression` can resolve to a\n * concrete JS value (as opposed to falling through to `getText()`).\n */\nfunction isResolvableLiteral(node: Expression): boolean {\n return (\n Node.isStringLiteral(node) ||\n Node.isNumericLiteral(node) ||\n Node.isTrueLiteral(node) ||\n Node.isFalseLiteral(node) ||\n Node.isObjectLiteralExpression(node) ||\n Node.isArrayLiteralExpression(node)\n );\n}\n\nclass UnresolvedConstantReferenceError extends Error {\n constructor(reference: string) {\n super(\n `Cannot resolve constant reference '${reference}'. ` +\n `Ensure the variable is declared in the same file as a literal value.`\n );\n this.name = 'UnresolvedConstantReferenceError';\n }\n}\n\n// Helper function to parse any TypeScript expression into a JavaScript value\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction parseExpression(expression: any): unknown {\n if (Node.isStringLiteral(expression)) {\n return expression.getLiteralValue();\n } else if (Node.isNumericLiteral(expression)) {\n return expression.getLiteralValue();\n } else if (Node.isTrueLiteral(expression)) {\n return true;\n } else if (Node.isFalseLiteral(expression)) {\n return false;\n } else if (Node.isObjectLiteralExpression(expression)) {\n return parseNestedObject(expression);\n } else if (Node.isArrayLiteralExpression(expression)) {\n return parseArrayLiteral(expression);\n } else if (Node.isPropertyAccessExpression(expression)) {\n const obj = expression.getExpression();\n const propName = expression.getName();\n if (Node.isIdentifier(obj)) {\n const resolved = resolveVariableInitializer(expression.getSourceFile(), obj.getText());\n if (resolved && Node.isObjectLiteralExpression(resolved)) {\n const prop = resolved.getProperty(propName);\n if (prop && Node.isPropertyAssignment(prop)) {\n const propInit = prop.getInitializer();\n if (propInit) return parseExpression(propInit);\n }\n }\n throw new UnresolvedConstantReferenceError(expression.getText());\n }\n return expression.getText();\n } else if (Node.isIdentifier(expression)) {\n const resolved = resolveVariableInitializer(expression.getSourceFile(), expression.getText());\n if (resolved && isResolvableLiteral(resolved)) {\n return parseExpression(resolved);\n }\n return expression.getText();\n } else {\n return expression.getText();\n }\n}\n\n// Helper function to parse deeply nested object literals\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction parseNestedObject(objectLiteral: any): Record<string, unknown> {\n const result: Record<string, unknown> = {};\n\n try {\n const properties = objectLiteral.getProperties();\n\n for (const property of properties) {\n if (Node.isPropertyAssignment(property)) {\n const name = property.getName();\n const initializer = property.getInitializer();\n\n if (initializer) {\n result[name] = parseExpression(initializer);\n }\n }\n }\n } catch (error) {\n logger.warn(`Could not parse nested object: ${(error as Error).message}`);\n return result; // Return the result even if there was an error\n }\n\n return result;\n}\n\n// Helper function to parse array literals\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction parseArrayLiteral(arrayLiteral: any): unknown[] {\n const result: unknown[] = [];\n\n try {\n const elements = arrayLiteral.getElements();\n\n for (const element of elements) {\n result.push(parseExpression(element));\n }\n } catch (error) {\n logger.warn(`Could not parse array literal: ${(error as Error).message}`);\n }\n\n return result;\n}\n\n// Parse decorator arguments using ts-morph\nfunction parseDecoratorArgs(decorator: Decorator): Record<string, unknown> {\n const result: Record<string, unknown> = {};\n try {\n const args = decorator.getArguments();\n\n if (args.length === 0) {\n return result;\n }\n\n // Handle the first argument\n const firstArg = args[0];\n\n if (Node.isObjectLiteralExpression(firstArg)) {\n // First argument is an object literal - parse all its properties\n const properties = firstArg.getProperties();\n\n for (const property of properties) {\n if (Node.isPropertyAssignment(property)) {\n const name = property.getName();\n const initializer = property.getInitializer();\n\n if (initializer) {\n result[name] = parseExpression(initializer);\n }\n }\n }\n } else if (Node.isStringLiteral(firstArg)) {\n // First argument is a string literal - use it as the id\n result.id = parseExpression(firstArg);\n\n // Check if there's a second argument (options object)\n if (args.length > 1) {\n const secondArg = args[1];\n if (Node.isObjectLiteralExpression(secondArg)) {\n const properties = secondArg.getProperties();\n\n for (const property of properties) {\n if (Node.isPropertyAssignment(property)) {\n const name = property.getName();\n const initializer = property.getInitializer();\n\n if (initializer) {\n result[name] = parseExpression(initializer);\n }\n }\n }\n }\n }\n }\n\n return result;\n } catch (error) {\n if (error instanceof UnresolvedConstantReferenceError) {\n throw error;\n }\n logger.warn(`Could not parse decorator arguments: ${(error as Error).message}`);\n return result;\n }\n}\n\nfunction extractAttributesFromSource(sourceFile: SourceFile, className: string): Record<string, unknown>[] {\n const attributes: Record<string, unknown>[] = [];\n\n try {\n // Find the class declaration\n const classDeclaration = sourceFile.getClass(className);\n if (!classDeclaration) {\n return attributes;\n }\n\n // Get all properties in the class\n const properties = classDeclaration.getProperties();\n\n for (const property of properties) {\n // Check if the property has an @AttributeDefinition decorator\n const attributeDecorator = property.getDecorator('AttributeDefinition');\n if (!attributeDecorator) {\n continue;\n }\n\n const fieldName = property.getName();\n const config = parseDecoratorArgs(attributeDecorator);\n\n const isRequired = !property.hasQuestionToken();\n\n const inferredType = (config.type as string) || getTypeFromTsMorph(property, sourceFile);\n\n const attribute: Record<string, unknown> = {\n id: config.id || fieldName,\n name: config.name || toHumanReadableName(fieldName),\n type: resolveAttributeType(config.type as string, inferredType, fieldName),\n required: config.required !== undefined ? config.required : isRequired,\n description: config.description || `Field: ${fieldName}`,\n };\n\n if (config.values) {\n attribute.values = config.values;\n }\n\n if (config.defaultValue !== undefined) {\n attribute.default_value = config.defaultValue;\n }\n\n attributes.push(attribute);\n }\n } catch (error) {\n if (error instanceof UnresolvedConstantReferenceError) {\n throw error;\n }\n logger.warn(`Could not extract attributes from class ${className}: ${(error as Error).message}`);\n }\n\n return attributes;\n}\n\nfunction normalizeComponentTypeId(typeId: string, defaultGroup: string): string {\n return typeId.includes('.') ? typeId : `${defaultGroup}.${typeId}`;\n}\n\nfunction extractRegionDefinitionsFromSource(\n sourceFile: SourceFile,\n className: string,\n defaultComponentGroup = DEFAULT_COMPONENT_GROUP\n): Record<string, unknown>[] {\n const regionDefinitions: Record<string, unknown>[] = [];\n\n try {\n // Find the class declaration\n const classDeclaration = sourceFile.getClass(className);\n if (!classDeclaration) {\n return regionDefinitions;\n }\n\n // Check for class-level @RegionDefinition decorator\n const classRegionDecorator = classDeclaration.getDecorator('RegionDefinition');\n if (classRegionDecorator) {\n const args = classRegionDecorator.getArguments();\n if (args.length > 0) {\n const firstArg = args[0];\n\n // Handle array literal argument (most common case)\n if (Node.isArrayLiteralExpression(firstArg)) {\n const elements = firstArg.getElements();\n for (const element of elements) {\n if (Node.isObjectLiteralExpression(element)) {\n const regionConfig = parseDecoratorArgs({\n getArguments: () => [element],\n } as unknown as Decorator);\n\n const regionDefinition: Record<string, unknown> = {\n id: regionConfig.id || 'region',\n name: regionConfig.name || 'Region',\n };\n\n // Add optional properties if they exist in the decorator\n if (regionConfig.componentTypes) {\n regionDefinition.component_types = regionConfig.componentTypes;\n }\n\n if (Array.isArray(regionConfig.componentTypeInclusions)) {\n regionDefinition.component_type_inclusions = regionConfig.componentTypeInclusions.map(\n (incl) => ({\n type_id: normalizeComponentTypeId(String(incl), defaultComponentGroup),\n })\n );\n }\n\n if (Array.isArray(regionConfig.componentTypeExclusions)) {\n regionDefinition.component_type_exclusions = regionConfig.componentTypeExclusions.map(\n (excl) => ({\n type_id: normalizeComponentTypeId(String(excl), defaultComponentGroup),\n })\n );\n }\n\n if (regionConfig.maxComponents !== undefined) {\n regionDefinition.max_components = regionConfig.maxComponents;\n }\n\n if (regionConfig.minComponents !== undefined) {\n regionDefinition.min_components = regionConfig.minComponents;\n }\n\n if (regionConfig.allowMultiple !== undefined) {\n regionDefinition.allow_multiple = regionConfig.allowMultiple;\n }\n\n if (regionConfig.defaultComponentConstructors) {\n regionDefinition.default_component_constructors =\n regionConfig.defaultComponentConstructors;\n }\n\n regionDefinitions.push(regionDefinition);\n }\n }\n }\n }\n }\n } catch (error) {\n logger.warn(\n `Warning: Could not extract region definitions from class ${className}: ${(error as Error).message}`\n );\n }\n\n return regionDefinitions;\n}\n\nasync function processComponentFile(filePath: string, _projectRoot: string): Promise<unknown[]> {\n try {\n const content = await readFile(filePath, 'utf-8');\n const components: unknown[] = [];\n\n // Check if file contains @Component decorator\n if (!content.includes('@Component')) {\n return components;\n }\n\n // Convert file path to module path (currently unused but may be needed in future)\n // const relativePath = relative(join(projectRoot, 'src'), filePath);\n // const modulePath = relativePath.replace(/\\.tsx?$/, '').replace(/\\\\/g, '/');\n\n try {\n // Create a ts-morph project and add the source file\n const project = new Project({\n useInMemoryFileSystem: true,\n skipAddingFilesFromTsConfig: true,\n });\n\n const sourceFile = project.createSourceFile(filePath, content);\n\n const classes = sourceFile.getClasses();\n\n for (const classDeclaration of classes) {\n const componentDecorator = classDeclaration.getDecorator('Component');\n if (!componentDecorator) {\n continue;\n }\n\n const className = classDeclaration.getName();\n if (!className) {\n continue;\n }\n\n const componentConfig = parseDecoratorArgs(componentDecorator);\n const componentGroup = String(componentConfig.group || DEFAULT_COMPONENT_GROUP);\n\n const attributes = extractAttributesFromSource(sourceFile, className);\n const regionDefinitions = extractRegionDefinitionsFromSource(sourceFile, className, componentGroup);\n\n const componentMetadata = {\n typeId: componentConfig.id || className.toLowerCase(),\n name: componentConfig.name || toHumanReadableName(className),\n group: componentGroup,\n description: componentConfig.description || `Custom component: ${className}`,\n regionDefinitions,\n attributes,\n };\n\n components.push(componentMetadata);\n }\n } catch (error) {\n if (error instanceof UnresolvedConstantReferenceError) {\n throw error;\n }\n logger.warn(`Could not process file ${filePath}:`, (error as Error).message);\n }\n\n return components;\n } catch (error) {\n if (error instanceof UnresolvedConstantReferenceError) {\n throw error;\n }\n logger.warn(`Could not read file ${filePath}:`, (error as Error).message);\n return [];\n }\n}\n\nasync function processPageTypeFile(filePath: string, projectRoot: string): Promise<unknown[]> {\n try {\n const content = await readFile(filePath, 'utf-8');\n const pageTypes: unknown[] = [];\n\n // Check if file contains @PageType decorator\n if (!content.includes('@PageType')) {\n return pageTypes;\n }\n\n try {\n // Create a ts-morph project and add the source file\n const project = new Project({\n useInMemoryFileSystem: true,\n skipAddingFilesFromTsConfig: true,\n });\n\n const sourceFile = project.createSourceFile(filePath, content);\n\n const classes = sourceFile.getClasses();\n\n for (const classDeclaration of classes) {\n const pageTypeDecorator = classDeclaration.getDecorator('PageType');\n if (!pageTypeDecorator) {\n continue;\n }\n\n const className = classDeclaration.getName();\n if (!className) {\n continue;\n }\n\n const pageTypeConfig = parseDecoratorArgs(pageTypeDecorator);\n\n const attributes = extractAttributesFromSource(sourceFile, className);\n const regionDefinitions = extractRegionDefinitionsFromSource(sourceFile, className);\n const route = filePathToRoute(filePath, projectRoot);\n\n const pageTypeMetadata = {\n typeId: pageTypeConfig.id || className.toLowerCase(),\n name: pageTypeConfig.name || toHumanReadableName(className),\n description: pageTypeConfig.description || `Custom page type: ${className}`,\n regionDefinitions,\n supportedAspectTypes: pageTypeConfig.supportedAspectTypes || [],\n attributes,\n route,\n };\n\n pageTypes.push(pageTypeMetadata);\n }\n } catch (error) {\n logger.warn(`Could not process file ${filePath}:`, (error as Error).message);\n }\n\n return pageTypes;\n } catch (error) {\n logger.warn(`Could not read file ${filePath}:`, (error as Error).message);\n return [];\n }\n}\n\nasync function processAspectFile(filePath: string, _projectRoot: string): Promise<unknown[]> {\n try {\n const content = await readFile(filePath, 'utf-8');\n const aspects: unknown[] = [];\n\n // Check if file is a JSON aspect file\n if (!filePath.endsWith('.json') || !content.trim().startsWith('{')) {\n return aspects;\n }\n\n // Check if file is in the aspects directory\n if (!filePath.includes('/aspects/') && !filePath.includes('\\\\aspects\\\\')) {\n return aspects;\n }\n\n try {\n // Parse the JSON content\n const aspectData = JSON.parse(content);\n\n // Extract filename without extension as the aspect ID\n const fileName = basename(filePath, '.json');\n\n // Validate that it looks like an aspect file\n if (!aspectData.name || !aspectData.attribute_definitions) {\n return aspects;\n }\n\n const aspectMetadata = {\n id: fileName,\n name: aspectData.name,\n description: aspectData.description || `Aspect type: ${aspectData.name}`,\n attributeDefinitions: aspectData.attribute_definitions || [],\n supportedObjectTypes: aspectData.supported_object_types || [],\n };\n\n aspects.push(aspectMetadata);\n } catch (parseError) {\n logger.warn(`Could not parse JSON in file ${filePath}:`, (parseError as Error).message);\n }\n\n return aspects;\n } catch (error) {\n logger.warn(`Could not read file ${filePath}:`, (error as Error).message);\n return [];\n }\n}\n\nasync function generateComponentCartridge(\n component: Record<string, unknown>,\n outputDir: string,\n dryRun = false\n): Promise<void> {\n const fileName = toCamelCaseFileName(component.typeId as string);\n const groupDir = join(outputDir, component.group as string);\n const outputPath = join(groupDir, `${fileName}.json`);\n\n if (!dryRun) {\n // Ensure the group directory exists\n try {\n await mkdir(groupDir, { recursive: true });\n } catch {\n // Directory might already exist, which is fine\n }\n\n const attributeDefinitionGroups = [\n {\n id: component.typeId,\n name: component.name,\n description: component.description,\n attribute_definitions: component.attributes,\n },\n ];\n\n const cartridgeData = {\n name: component.name,\n description: component.description,\n group: component.group,\n arch_type: ARCH_TYPE_HEADLESS,\n region_definitions: component.regionDefinitions || [],\n attribute_definition_groups: attributeDefinitionGroups,\n };\n\n await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));\n }\n\n const prefix = dryRun ? ' - [DRY RUN]' : ' -';\n logger.debug(\n `${prefix} ${String(component.typeId)}: ${String(component.name)} (${String((component.attributes as unknown[]).length)} attributes) → ${fileName}.json`\n );\n}\n\nasync function generatePageTypeCartridge(\n pageType: Record<string, unknown>,\n outputDir: string,\n dryRun = false\n): Promise<void> {\n const fileName = toCamelCaseFileName(pageType.name as string);\n const outputPath = join(outputDir, `${fileName}.json`);\n\n if (!dryRun) {\n const cartridgeData: Record<string, unknown> = {\n name: pageType.name,\n description: pageType.description,\n arch_type: ARCH_TYPE_HEADLESS,\n region_definitions: pageType.regionDefinitions || [],\n };\n\n // Add attribute_definition_groups if there are attributes\n if (pageType.attributes && (pageType.attributes as unknown[]).length > 0) {\n const attributeDefinitionGroups = [\n {\n id: pageType.typeId || fileName,\n name: pageType.name,\n description: pageType.description,\n attribute_definitions: pageType.attributes,\n },\n ];\n cartridgeData.attribute_definition_groups = attributeDefinitionGroups;\n }\n\n // Add supported_aspect_types if specified\n if (pageType.supportedAspectTypes) {\n cartridgeData.supported_aspect_types = pageType.supportedAspectTypes;\n }\n\n if (pageType.route) {\n cartridgeData.route = pageType.route;\n }\n\n await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));\n }\n\n const prefix = dryRun ? ' - [DRY RUN]' : ' -';\n logger.debug(\n `${prefix} ${String(pageType.name)}: ${String(pageType.description)} (${String((pageType.attributes as unknown[]).length)} attributes) → ${fileName}.json`\n );\n}\n\nasync function generateAspectCartridge(\n aspect: Record<string, unknown>,\n outputDir: string,\n dryRun = false\n): Promise<void> {\n const fileName = toCamelCaseFileName(aspect.id as string);\n const outputPath = join(outputDir, `${fileName}.json`);\n\n if (!dryRun) {\n const cartridgeData: Record<string, unknown> = {\n name: aspect.name,\n description: aspect.description,\n arch_type: ARCH_TYPE_HEADLESS,\n attribute_definitions: aspect.attributeDefinitions || [],\n };\n\n // Add supported_object_types if specified\n if (aspect.supportedObjectTypes) {\n cartridgeData.supported_object_types = aspect.supportedObjectTypes;\n }\n\n await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));\n }\n\n const prefix = dryRun ? ' - [DRY RUN]' : ' -';\n logger.debug(\n `${prefix} ${String(aspect.name)}: ${String(aspect.description)} (${String((aspect.attributeDefinitions as unknown[]).length)} attributes) → ${fileName}.json`\n );\n}\n\n/**\n * Options for generateMetadata function\n */\nexport interface GenerateMetadataOptions {\n /**\n * Optional array of specific file paths to process.\n * If provided, only these files will be processed and existing cartridge files will NOT be deleted.\n * If omitted, the entire src/ directory will be scanned and all existing cartridge files will be deleted first.\n */\n filePaths?: string[];\n\n /**\n * Whether to run ESLint with --fix on generated JSON files to format them according to project settings.\n * Defaults to true.\n */\n lintFix?: boolean;\n\n /**\n * If true, scans files and reports what would be generated without actually writing any files or deleting directories.\n * Defaults to false.\n */\n dryRun?: boolean;\n}\n\n/**\n * Result returned by generateMetadata function\n */\nexport interface GenerateMetadataResult {\n componentsGenerated: number;\n pageTypesGenerated: number;\n aspectsGenerated: number;\n totalFiles: number;\n}\n\n/**\n * Runs ESLint with --fix on the specified directory to format JSON files.\n * This ensures generated JSON files match the project's Prettier/ESLint configuration.\n */\nfunction lintGeneratedFiles(metadataDir: string, projectRoot: string): void {\n try {\n logger.debug('🔧 Running ESLint --fix on generated JSON files...');\n\n // Run ESLint from the project root directory so it picks up the correct config\n // Use --no-error-on-unmatched-pattern to handle cases where no JSON files exist yet\n const command = `npx eslint \"${metadataDir}/**/*.json\" --fix --no-error-on-unmatched-pattern`;\n\n execSync(command, {\n cwd: projectRoot,\n stdio: 'pipe', // Suppress output unless there's an error\n encoding: 'utf-8',\n });\n\n logger.debug('✅ JSON files formatted successfully');\n } catch (error) {\n // ESLint returns non-zero exit code even when --fix resolves all issues\n // We only warn if there are actual unfixable issues\n const execError = error as { status?: number; stderr?: string; stdout?: string };\n\n // Exit code 1 usually means there were linting issues (some may have been fixed)\n // Exit code 2 means configuration error or other fatal error\n if (execError.status === 2) {\n const errMsg = execError.stderr || execError.stdout || 'Unknown error';\n logger.warn(`⚠️ Could not run ESLint --fix: ${errMsg}`);\n } else if (execError.stderr && execError.stderr.includes('error')) {\n logger.warn(`⚠️ Some linting issues could not be auto-fixed. Run ESLint manually to review.`);\n } else {\n // Exit code 1 with no errors in stderr usually means all issues were fixed\n logger.debug('✅ JSON files formatted successfully');\n }\n }\n}\n\n// Main function\nexport async function generateMetadata(\n projectDirectory: string,\n metadataDirectory: string,\n options?: GenerateMetadataOptions\n): Promise<GenerateMetadataResult> {\n try {\n const filePaths = options?.filePaths;\n const isIncrementalMode = filePaths && filePaths.length > 0;\n const dryRun = options?.dryRun || false;\n\n if (dryRun) {\n logger.debug('🔍 [DRY RUN] Scanning for decorated components and page types...');\n } else if (isIncrementalMode) {\n logger.debug(`🔍 Generating metadata for ${filePaths.length} specified file(s)...`);\n } else {\n logger.debug('🔍 Generating metadata for decorated components and page types...');\n }\n\n const projectRoot = resolve(projectDirectory);\n const srcDir = join(projectRoot, 'src');\n const metadataDir = resolve(metadataDirectory);\n const componentsOutputDir = join(metadataDir, 'components');\n const pagesOutputDir = join(metadataDir, 'pages');\n const aspectsOutputDir = join(metadataDir, 'aspects');\n\n // Skip directory operations in dry run mode\n if (!dryRun) {\n // Only delete existing directories in full scan mode (not incremental)\n if (!isIncrementalMode) {\n logger.debug('🗑️ Cleaning existing output directories...');\n for (const outputDir of [componentsOutputDir, pagesOutputDir, aspectsOutputDir]) {\n try {\n await rm(outputDir, { recursive: true, force: true });\n logger.debug(` - Deleted: ${outputDir}`);\n } catch {\n // Directory might not exist, which is fine\n logger.debug(` - Directory not found (skipping): ${outputDir}`);\n }\n }\n } else {\n logger.debug('📝 Incremental mode: existing cartridge files will be preserved/overwritten');\n }\n\n // Create output directories if they don't exist\n logger.debug('Creating output directories...');\n for (const outputDir of [componentsOutputDir, pagesOutputDir, aspectsOutputDir]) {\n try {\n await mkdir(outputDir, { recursive: true });\n } catch (error) {\n try {\n await access(outputDir);\n // Directory exists, that's fine\n } catch {\n const err = error as Error;\n logger.error(`❌ Failed to create output directory ${outputDir}: ${err.message}`);\n process.exit(1);\n throw err;\n }\n }\n }\n } else if (isIncrementalMode) {\n logger.debug(`📝 [DRY RUN] Would process ${filePaths.length} specific file(s)`);\n } else {\n logger.debug('📝 [DRY RUN] Would clean and regenerate all metadata files');\n }\n\n let files: string[] = [];\n\n if (isIncrementalMode && filePaths) {\n // Use the specified file paths (resolve them relative to project root)\n files = filePaths.map((fp) => resolve(projectRoot, fp));\n logger.debug(`📂 Processing ${files.length} specified file(s)...`);\n } else {\n // Full scan mode: scan entire src directory\n const scanDirectory = async (dir: string): Promise<void> => {\n const entries = await readdir(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = join(dir, entry.name);\n\n if (entry.isDirectory()) {\n if (!SKIP_DIRECTORIES.includes(entry.name)) {\n await scanDirectory(fullPath);\n }\n } else if (\n entry.isFile() &&\n (extname(entry.name) === '.ts' ||\n extname(entry.name) === '.tsx' ||\n extname(entry.name) === '.json')\n ) {\n files.push(fullPath);\n }\n }\n };\n\n await scanDirectory(srcDir);\n }\n\n // Process each file for both components and page types\n const allComponents: unknown[] = [];\n const allPageTypes: unknown[] = [];\n const allAspects: unknown[] = [];\n\n for (const file of files) {\n const components = await processComponentFile(file, projectRoot);\n allComponents.push(...components);\n\n const pageTypes = await processPageTypeFile(file, projectRoot);\n allPageTypes.push(...pageTypes);\n\n const aspects = await processAspectFile(file, projectRoot);\n allAspects.push(...aspects);\n }\n\n if (allComponents.length === 0 && allPageTypes.length === 0 && allAspects.length === 0) {\n logger.info('⚠️ No decorated components, page types, or aspect files found.');\n return {\n componentsGenerated: 0,\n pageTypesGenerated: 0,\n aspectsGenerated: 0,\n totalFiles: 0,\n };\n }\n\n // Generate component cartridge files\n if (allComponents.length > 0) {\n logger.debug(`✅ Found ${allComponents.length} decorated component(s)`);\n for (const component of allComponents) {\n await generateComponentCartridge(component as Record<string, unknown>, componentsOutputDir, dryRun);\n }\n if (dryRun) {\n logger.info(`[DRY RUN] Would generate ${allComponents.length} component metadata file(s)`);\n } else {\n logger.info(`Generated ${allComponents.length} component metadata file(s)`);\n }\n }\n\n // Generate page type cartridge files\n if (allPageTypes.length > 0) {\n logger.debug(`✅ Found ${allPageTypes.length} decorated page type(s)`);\n for (const pageType of allPageTypes) {\n await generatePageTypeCartridge(pageType as Record<string, unknown>, pagesOutputDir, dryRun);\n }\n if (dryRun) {\n logger.info(`[DRY RUN] Would generate ${allPageTypes.length} page type metadata file(s)`);\n } else {\n logger.info(`Generated ${allPageTypes.length} page type metadata file(s)`);\n }\n }\n\n if (allAspects.length > 0) {\n logger.debug(`✅ Found ${allAspects.length} decorated aspect(s)`);\n for (const aspect of allAspects) {\n await generateAspectCartridge(aspect as Record<string, unknown>, aspectsOutputDir, dryRun);\n }\n if (dryRun) {\n logger.info(`[DRY RUN] Would generate ${allAspects.length} aspect metadata file(s)`);\n } else {\n logger.info(`Generated ${allAspects.length} aspect metadata file(s)`);\n }\n }\n\n // Run ESLint --fix to format generated JSON files according to project settings\n const shouldLintFix = options?.lintFix !== false; // Default to true\n if (\n !dryRun &&\n shouldLintFix &&\n (allComponents.length > 0 || allPageTypes.length > 0 || allAspects.length > 0)\n ) {\n lintGeneratedFiles(metadataDir, projectRoot);\n }\n\n // Return statistics\n return {\n componentsGenerated: allComponents.length,\n pageTypesGenerated: allPageTypes.length,\n aspectsGenerated: allAspects.length,\n totalFiles: allComponents.length + allPageTypes.length + allAspects.length,\n };\n } catch (error) {\n const err = error as Error;\n logger.error('❌ Error:', err.message);\n process.exit(1);\n throw err;\n }\n}\n"],"mappings":";;;;;;;;;;;AA8DA,MAAMA,iBAA2C;CAC7C,OAAO;CACP,MAAM;CACN,MAAM;CACN,OAAO;CACV;AAED,IAAIC;;;;;;AAOJ,SAAS,qBAA8B;CACnC,MAAM,MAAM,QAAQ,IAAI,OAAO,MAAM;AACrC,KAAI,CAAC,IAAK,QAAO;CACjB,MAAM,aAAa,IAAI,aAAa;AACpC,KAAI;EAAC;EAAK;EAAQ;EAAO;EAAK,CAAC,SAAS,WAAW,CAAE,QAAO;AAC5D,QAAO,IAAI,MAAM,IAAI,CAAC,MAAM,UAAU;EAClC,MAAM,QAAQ,MAAM,MAAM;AAC1B,SAAO,UAAU,OAAO,UAAU,YAAY,UAAU;GAC1D;;AAGN,SAAS,eAAyB;AAC9B,KAAI,cAAe,QAAO;CAC1B,MAAM,WAAW,QAAQ,IAAI,iBAAiB,QAAQ,IAAI;AAC1D,KAAI,YAAY,YAAY,eAAgB,QAAO;AACnD,KAAI,oBAAoB,CAAE,QAAO;AACjC,KAAI,QAAQ,IAAI,aAAa,aAAc,QAAO;AAClD,QAAO;;AAGX,SAAS,UAAU,OAA0B;AACzC,QAAO,eAAe,UAAU,eAAe,cAAc;;AAGjE,MAAa,SAAS;CAClB,MAAM,KAAa,GAAG,MAAuB;AACzC,MAAI,CAAC,UAAU,QAAQ,CAAE;AACzB,UAAQ,MAAM,MAAM,IAAI,iBAAiB,EAAE,KAAK,GAAG,KAAK;;CAE5D,KAAK,KAAa,GAAG,MAAuB;AACxC,MAAI,CAAC,UAAU,OAAO,CAAE;AACxB,UAAQ,KAAK,MAAM,OAAO,gBAAgB,EAAE,KAAK,GAAG,KAAK;;CAE7D,KAAK,KAAa,GAAG,MAAuB;AACxC,MAAI,CAAC,UAAU,OAAO,CAAE;AACxB,UAAQ,IAAI,MAAM,KAAK,gBAAgB,EAAE,KAAK,GAAG,KAAK;;CAE1D,MAAM,KAAa,GAAG,MAAuB;AACzC,MAAI,CAAC,UAAU,QAAQ,CAAE;AACzB,UAAQ,IAAI,MAAM,KAAK,iBAAiB,EAAE,KAAK,GAAG,KAAK;;CAE3D,SAAS,OAAmC;AACxC,kBAAgB;;CAEpB,WAAqB;AACjB,SAAO,cAAc;;CAE5B;;;;ACnGD,IAAIC,iBAAiC;AAErC,SAAS,oBAAoB,kBAAmC;AAC5D,KAAI,mBAAmB,KACnB,QAAO;AAGX,KAAI;AACA,WAAS,0BAA0B;GAC/B,KAAK;GACL,KAAK,eAAe;GACpB,OAAO;GACV,CAAC;AACF,mBAAiB;SACb;AACJ,mBAAiB;;AAErB,QAAO;;;;;;;;;;;;AAaX,SAAS,qBAAqB,kBAA8C;AACxE,KAAI,CAAC,oBAAoB,iBAAiB,CACtC,OAAM,IAAI,MACN,qGACH;CAIL,MAAM,WAAW,KAAK,QAAQ,EAAE,uBAAuB,YAAY,CAAC,OAAO;AAE3E,KAAI;AAEA,WAAS,iCAAiC,SAAS,IAAI;GACnD,KAAK;GACL,KAAK,eAAe;GACpB,UAAU;GACV,OAAO;IAAC;IAAQ;IAAQ;IAAO;GAClC,CAAC;EACF,MAAM,SAAS,aAAa,UAAU,QAAQ;AAC9C,SAAO,KAAK,MAAM,OAAO;UACpB,OAAO;AACZ,QAAM,IAAI,MAAM,+CAAgD,MAAgB,UAAU;WACpF;AAEN,MAAI;AACA,OAAI,WAAW,SAAS,CACpB,YAAW,SAAS;UAEpB;;;;;;;;;;;;;AAgBhB,SAAgB,gBAAgB,UAAkB,aAA6B;CAE3E,MAAM,gBAAgB,SAAS,QAAQ,OAAO,IAAI;CAIlD,MAAM,aAAa,cADJ,qBAAqB,YAAY,CACR;AAGxC,MAAK,MAAM,SAAS,YAAY;EAE5B,MAAM,iBAAiB,MAAM,KAAK,QAAQ,OAAO,IAAI;AAGrD,MAAI,cAAc,SAAS,eAAe,IAAI,cAAc,SAAS,IAAI,iBAAiB,CACtF,QAAO,MAAM;EAIjB,MAAM,sBAAsB,eAAe,QAAQ,SAAS,GAAG;AAC/D,MAAI,cAAc,SAAS,oBAAoB,IAAI,cAAc,SAAS,IAAI,sBAAsB,CAChG,QAAO,MAAM;;AAKrB,QAAO,KAAK,kCAAkC,WAAW;AACzD,QAAO;;;;;;;;;AAUX,SAAS,cACL,QACA,aAAa,IACqD;CAClE,MAAMC,SAA6E,EAAE;AAErF,MAAK,MAAM,SAAS,QAAQ;EAExB,IAAIC;AACJ,MAAI,MAAM,MACN,YAAW,cAAc;WAClB,MAAM,MAAM;GAEnB,MAAM,cAAc,MAAM,KAAK,WAAW,IAAI,GAAG,MAAM,OAAO,IAAI,MAAM;AACxE,cAAW,aAAa,GAAG,aAAa,cAAc,QAAQ,QAAQ,IAAI,GAAG;QAG7E,YAAW,cAAc;AAI7B,MAAI,MAAM,GACN,QAAO,KAAK;GACR,IAAI,MAAM;GACV,MAAM;GACN,MAAM,MAAM;GACZ,OAAO,MAAM;GAChB,CAAC;AAIN,MAAI,MAAM,YAAY,MAAM,SAAS,SAAS,GAAG;GAC7C,MAAM,YAAY,MAAM,OAAO,WAAW;AAC1C,UAAO,KAAK,GAAG,cAAc,MAAM,UAAU,UAAU,CAAC;;;AAIhE,QAAO;;;;;AClJX,MAAM,mBAAmB;CAAC;CAAS;CAAQ;CAAgB;CAAQ;CAAS;CAAW;AAEvF,MAAM,0BAA0B;AAChC,MAAM,qBAAqB;AAkB3B,MAAMC,wBAAkD;CACpD;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACH;AAID,MAAMC,eAAuC;CACzC,QAAQ;CACR,QAAQ;CACR,QAAQ;CACR,QAAQ;CACR,SAAS;CACT,SAAS;CACT,MAAM;CACN,KAAK;CACL,WAAW;CACd;AAGD,SAAS,qBAAqB,eAAwB,aAAsB,WAA4B;AAEpG,KAAI,eAAe;AACf,MAAI,CAAC,sBAAsB,SAAS,cAA+B,EAAE;AACjE,UAAO,MACH,2BAA2B,cAAc,eAAe,aAAa,UAAU,sBAAsB,sBAAsB,KAAK,KAAK,GACxI;AACD,WAAQ,KAAK,EAAE;;AAEnB,SAAO;;AAIX,KAAI,eAAe,aAAa,aAC5B,QAAO,aAAa;AAIxB,QAAO;;AAIX,SAAS,oBAAoB,WAA2B;AACpD,QAAO,UACF,QAAQ,YAAY,MAAM,CAC1B,QAAQ,OAAO,QAAQ,IAAI,aAAa,CAAC,CACzC,MAAM;;AAIf,SAAS,oBAAoB,MAAsB;AAE/C,KAAI,CAAC,QAAQ,KAAK,KAAK,CACnB,QAAO;AAGX,QAAO,KACF,MAAM,SAAS,CACf,KAAK,MAAM,UAAU;AAClB,MAAI,UAAU,EACV,QAAO,KAAK,aAAa;AAE7B,SAAO,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE,CAAC,aAAa;GACnE,CACD,KAAK,GAAG;;AAGjB,SAAS,mBAAmB,UAA+B,aAAiC;AACxF,KAAI;EACA,MAAM,WAAW,SAAS,aAAa;AACvC,MAAI,SAIA,QAHiB,SAAS,SAAS,CAET,MAAM,IAAI,CAAC,GAAG,MAAM,IAAI,CAAC,GAAG,MAAM;SAG5D;AAIR,QAAO;;;;;;AAOX,SAAS,2BAA2B,YAAwB,MAAsC;CAC9F,MAAM,UAAU,WAAW,uBAAuB,KAAK;AACvD,KAAI,CAAC,QAAS,QAAO;CACrB,IAAI,cAAc,QAAQ,gBAAgB;AAC1C,KAAI,eAAe,KAAK,eAAe,YAAY,CAC/C,eAAc,YAAY,eAAe;AAE7C,QAAO;;;;;;AAOX,SAAS,oBAAoB,MAA2B;AACpD,QACI,KAAK,gBAAgB,KAAK,IAC1B,KAAK,iBAAiB,KAAK,IAC3B,KAAK,cAAc,KAAK,IACxB,KAAK,eAAe,KAAK,IACzB,KAAK,0BAA0B,KAAK,IACpC,KAAK,yBAAyB,KAAK;;AAI3C,IAAM,mCAAN,cAA+C,MAAM;CACjD,YAAY,WAAmB;AAC3B,QACI,sCAAsC,UAAU,yEAEnD;AACD,OAAK,OAAO;;;AAMpB,SAAS,gBAAgB,YAA0B;AAC/C,KAAI,KAAK,gBAAgB,WAAW,CAChC,QAAO,WAAW,iBAAiB;UAC5B,KAAK,iBAAiB,WAAW,CACxC,QAAO,WAAW,iBAAiB;UAC5B,KAAK,cAAc,WAAW,CACrC,QAAO;UACA,KAAK,eAAe,WAAW,CACtC,QAAO;UACA,KAAK,0BAA0B,WAAW,CACjD,QAAO,kBAAkB,WAAW;UAC7B,KAAK,yBAAyB,WAAW,CAChD,QAAO,kBAAkB,WAAW;UAC7B,KAAK,2BAA2B,WAAW,EAAE;EACpD,MAAM,MAAM,WAAW,eAAe;EACtC,MAAM,WAAW,WAAW,SAAS;AACrC,MAAI,KAAK,aAAa,IAAI,EAAE;GACxB,MAAM,WAAW,2BAA2B,WAAW,eAAe,EAAE,IAAI,SAAS,CAAC;AACtF,OAAI,YAAY,KAAK,0BAA0B,SAAS,EAAE;IACtD,MAAM,OAAO,SAAS,YAAY,SAAS;AAC3C,QAAI,QAAQ,KAAK,qBAAqB,KAAK,EAAE;KACzC,MAAM,WAAW,KAAK,gBAAgB;AACtC,SAAI,SAAU,QAAO,gBAAgB,SAAS;;;AAGtD,SAAM,IAAI,iCAAiC,WAAW,SAAS,CAAC;;AAEpE,SAAO,WAAW,SAAS;YACpB,KAAK,aAAa,WAAW,EAAE;EACtC,MAAM,WAAW,2BAA2B,WAAW,eAAe,EAAE,WAAW,SAAS,CAAC;AAC7F,MAAI,YAAY,oBAAoB,SAAS,CACzC,QAAO,gBAAgB,SAAS;AAEpC,SAAO,WAAW,SAAS;OAE3B,QAAO,WAAW,SAAS;;AAMnC,SAAS,kBAAkB,eAA6C;CACpE,MAAMC,SAAkC,EAAE;AAE1C,KAAI;EACA,MAAM,aAAa,cAAc,eAAe;AAEhD,OAAK,MAAM,YAAY,WACnB,KAAI,KAAK,qBAAqB,SAAS,EAAE;GACrC,MAAM,OAAO,SAAS,SAAS;GAC/B,MAAM,cAAc,SAAS,gBAAgB;AAE7C,OAAI,YACA,QAAO,QAAQ,gBAAgB,YAAY;;UAIlD,OAAO;AACZ,SAAO,KAAK,kCAAmC,MAAgB,UAAU;AACzE,SAAO;;AAGX,QAAO;;AAKX,SAAS,kBAAkB,cAA8B;CACrD,MAAMC,SAAoB,EAAE;AAE5B,KAAI;EACA,MAAM,WAAW,aAAa,aAAa;AAE3C,OAAK,MAAM,WAAW,SAClB,QAAO,KAAK,gBAAgB,QAAQ,CAAC;UAEpC,OAAO;AACZ,SAAO,KAAK,kCAAmC,MAAgB,UAAU;;AAG7E,QAAO;;AAIX,SAAS,mBAAmB,WAA+C;CACvE,MAAMD,SAAkC,EAAE;AAC1C,KAAI;EACA,MAAM,OAAO,UAAU,cAAc;AAErC,MAAI,KAAK,WAAW,EAChB,QAAO;EAIX,MAAM,WAAW,KAAK;AAEtB,MAAI,KAAK,0BAA0B,SAAS,EAAE;GAE1C,MAAM,aAAa,SAAS,eAAe;AAE3C,QAAK,MAAM,YAAY,WACnB,KAAI,KAAK,qBAAqB,SAAS,EAAE;IACrC,MAAM,OAAO,SAAS,SAAS;IAC/B,MAAM,cAAc,SAAS,gBAAgB;AAE7C,QAAI,YACA,QAAO,QAAQ,gBAAgB,YAAY;;aAIhD,KAAK,gBAAgB,SAAS,EAAE;AAEvC,UAAO,KAAK,gBAAgB,SAAS;AAGrC,OAAI,KAAK,SAAS,GAAG;IACjB,MAAM,YAAY,KAAK;AACvB,QAAI,KAAK,0BAA0B,UAAU,EAAE;KAC3C,MAAM,aAAa,UAAU,eAAe;AAE5C,UAAK,MAAM,YAAY,WACnB,KAAI,KAAK,qBAAqB,SAAS,EAAE;MACrC,MAAM,OAAO,SAAS,SAAS;MAC/B,MAAM,cAAc,SAAS,gBAAgB;AAE7C,UAAI,YACA,QAAO,QAAQ,gBAAgB,YAAY;;;;;AAQnE,SAAO;UACF,OAAO;AACZ,MAAI,iBAAiB,iCACjB,OAAM;AAEV,SAAO,KAAK,wCAAyC,MAAgB,UAAU;AAC/E,SAAO;;;AAIf,SAAS,4BAA4B,YAAwB,WAA8C;CACvG,MAAME,aAAwC,EAAE;AAEhD,KAAI;EAEA,MAAM,mBAAmB,WAAW,SAAS,UAAU;AACvD,MAAI,CAAC,iBACD,QAAO;EAIX,MAAM,aAAa,iBAAiB,eAAe;AAEnD,OAAK,MAAM,YAAY,YAAY;GAE/B,MAAM,qBAAqB,SAAS,aAAa,sBAAsB;AACvE,OAAI,CAAC,mBACD;GAGJ,MAAM,YAAY,SAAS,SAAS;GACpC,MAAM,SAAS,mBAAmB,mBAAmB;GAErD,MAAM,aAAa,CAAC,SAAS,kBAAkB;GAE/C,MAAM,eAAgB,OAAO,QAAmB,mBAAmB,UAAU,WAAW;GAExF,MAAMC,YAAqC;IACvC,IAAI,OAAO,MAAM;IACjB,MAAM,OAAO,QAAQ,oBAAoB,UAAU;IACnD,MAAM,qBAAqB,OAAO,MAAgB,cAAc,UAAU;IAC1E,UAAU,OAAO,aAAa,SAAY,OAAO,WAAW;IAC5D,aAAa,OAAO,eAAe,UAAU;IAChD;AAED,OAAI,OAAO,OACP,WAAU,SAAS,OAAO;AAG9B,OAAI,OAAO,iBAAiB,OACxB,WAAU,gBAAgB,OAAO;AAGrC,cAAW,KAAK,UAAU;;UAEzB,OAAO;AACZ,MAAI,iBAAiB,iCACjB,OAAM;AAEV,SAAO,KAAK,2CAA2C,UAAU,IAAK,MAAgB,UAAU;;AAGpG,QAAO;;AAGX,SAAS,yBAAyB,QAAgB,cAA8B;AAC5E,QAAO,OAAO,SAAS,IAAI,GAAG,SAAS,GAAG,aAAa,GAAG;;AAG9D,SAAS,mCACL,YACA,WACA,wBAAwB,yBACC;CACzB,MAAMC,oBAA+C,EAAE;AAEvD,KAAI;EAEA,MAAM,mBAAmB,WAAW,SAAS,UAAU;AACvD,MAAI,CAAC,iBACD,QAAO;EAIX,MAAM,uBAAuB,iBAAiB,aAAa,mBAAmB;AAC9E,MAAI,sBAAsB;GACtB,MAAM,OAAO,qBAAqB,cAAc;AAChD,OAAI,KAAK,SAAS,GAAG;IACjB,MAAM,WAAW,KAAK;AAGtB,QAAI,KAAK,yBAAyB,SAAS,EAAE;KACzC,MAAM,WAAW,SAAS,aAAa;AACvC,UAAK,MAAM,WAAW,SAClB,KAAI,KAAK,0BAA0B,QAAQ,EAAE;MACzC,MAAM,eAAe,mBAAmB,EACpC,oBAAoB,CAAC,QAAQ,EAChC,CAAyB;MAE1B,MAAMC,mBAA4C;OAC9C,IAAI,aAAa,MAAM;OACvB,MAAM,aAAa,QAAQ;OAC9B;AAGD,UAAI,aAAa,eACb,kBAAiB,kBAAkB,aAAa;AAGpD,UAAI,MAAM,QAAQ,aAAa,wBAAwB,CACnD,kBAAiB,4BAA4B,aAAa,wBAAwB,KAC7E,UAAU,EACP,SAAS,yBAAyB,OAAO,KAAK,EAAE,sBAAsB,EACzE,EACJ;AAGL,UAAI,MAAM,QAAQ,aAAa,wBAAwB,CACnD,kBAAiB,4BAA4B,aAAa,wBAAwB,KAC7E,UAAU,EACP,SAAS,yBAAyB,OAAO,KAAK,EAAE,sBAAsB,EACzE,EACJ;AAGL,UAAI,aAAa,kBAAkB,OAC/B,kBAAiB,iBAAiB,aAAa;AAGnD,UAAI,aAAa,kBAAkB,OAC/B,kBAAiB,iBAAiB,aAAa;AAGnD,UAAI,aAAa,kBAAkB,OAC/B,kBAAiB,iBAAiB,aAAa;AAGnD,UAAI,aAAa,6BACb,kBAAiB,iCACb,aAAa;AAGrB,wBAAkB,KAAK,iBAAiB;;;;;UAMvD,OAAO;AACZ,SAAO,KACH,4DAA4D,UAAU,IAAK,MAAgB,UAC9F;;AAGL,QAAO;;AAGX,eAAe,qBAAqB,UAAkB,cAA0C;AAC5F,KAAI;EACA,MAAM,UAAU,MAAM,SAAS,UAAU,QAAQ;EACjD,MAAMC,aAAwB,EAAE;AAGhC,MAAI,CAAC,QAAQ,SAAS,aAAa,CAC/B,QAAO;AAOX,MAAI;GAOA,MAAM,aALU,IAAI,QAAQ;IACxB,uBAAuB;IACvB,6BAA6B;IAChC,CAAC,CAEyB,iBAAiB,UAAU,QAAQ;GAE9D,MAAM,UAAU,WAAW,YAAY;AAEvC,QAAK,MAAM,oBAAoB,SAAS;IACpC,MAAM,qBAAqB,iBAAiB,aAAa,YAAY;AACrE,QAAI,CAAC,mBACD;IAGJ,MAAM,YAAY,iBAAiB,SAAS;AAC5C,QAAI,CAAC,UACD;IAGJ,MAAM,kBAAkB,mBAAmB,mBAAmB;IAC9D,MAAM,iBAAiB,OAAO,gBAAgB,SAAS,wBAAwB;IAE/E,MAAM,aAAa,4BAA4B,YAAY,UAAU;IACrE,MAAM,oBAAoB,mCAAmC,YAAY,WAAW,eAAe;IAEnG,MAAM,oBAAoB;KACtB,QAAQ,gBAAgB,MAAM,UAAU,aAAa;KACrD,MAAM,gBAAgB,QAAQ,oBAAoB,UAAU;KAC5D,OAAO;KACP,aAAa,gBAAgB,eAAe,qBAAqB;KACjE;KACA;KACH;AAED,eAAW,KAAK,kBAAkB;;WAEjC,OAAO;AACZ,OAAI,iBAAiB,iCACjB,OAAM;AAEV,UAAO,KAAK,0BAA0B,SAAS,IAAK,MAAgB,QAAQ;;AAGhF,SAAO;UACF,OAAO;AACZ,MAAI,iBAAiB,iCACjB,OAAM;AAEV,SAAO,KAAK,uBAAuB,SAAS,IAAK,MAAgB,QAAQ;AACzE,SAAO,EAAE;;;AAIjB,eAAe,oBAAoB,UAAkB,aAAyC;AAC1F,KAAI;EACA,MAAM,UAAU,MAAM,SAAS,UAAU,QAAQ;EACjD,MAAMC,YAAuB,EAAE;AAG/B,MAAI,CAAC,QAAQ,SAAS,YAAY,CAC9B,QAAO;AAGX,MAAI;GAOA,MAAM,aALU,IAAI,QAAQ;IACxB,uBAAuB;IACvB,6BAA6B;IAChC,CAAC,CAEyB,iBAAiB,UAAU,QAAQ;GAE9D,MAAM,UAAU,WAAW,YAAY;AAEvC,QAAK,MAAM,oBAAoB,SAAS;IACpC,MAAM,oBAAoB,iBAAiB,aAAa,WAAW;AACnE,QAAI,CAAC,kBACD;IAGJ,MAAM,YAAY,iBAAiB,SAAS;AAC5C,QAAI,CAAC,UACD;IAGJ,MAAM,iBAAiB,mBAAmB,kBAAkB;IAE5D,MAAM,aAAa,4BAA4B,YAAY,UAAU;IACrE,MAAM,oBAAoB,mCAAmC,YAAY,UAAU;IACnF,MAAM,QAAQ,gBAAgB,UAAU,YAAY;IAEpD,MAAM,mBAAmB;KACrB,QAAQ,eAAe,MAAM,UAAU,aAAa;KACpD,MAAM,eAAe,QAAQ,oBAAoB,UAAU;KAC3D,aAAa,eAAe,eAAe,qBAAqB;KAChE;KACA,sBAAsB,eAAe,wBAAwB,EAAE;KAC/D;KACA;KACH;AAED,cAAU,KAAK,iBAAiB;;WAE/B,OAAO;AACZ,UAAO,KAAK,0BAA0B,SAAS,IAAK,MAAgB,QAAQ;;AAGhF,SAAO;UACF,OAAO;AACZ,SAAO,KAAK,uBAAuB,SAAS,IAAK,MAAgB,QAAQ;AACzE,SAAO,EAAE;;;AAIjB,eAAe,kBAAkB,UAAkB,cAA0C;AACzF,KAAI;EACA,MAAM,UAAU,MAAM,SAAS,UAAU,QAAQ;EACjD,MAAMC,UAAqB,EAAE;AAG7B,MAAI,CAAC,SAAS,SAAS,QAAQ,IAAI,CAAC,QAAQ,MAAM,CAAC,WAAW,IAAI,CAC9D,QAAO;AAIX,MAAI,CAAC,SAAS,SAAS,YAAY,IAAI,CAAC,SAAS,SAAS,cAAc,CACpE,QAAO;AAGX,MAAI;GAEA,MAAM,aAAa,KAAK,MAAM,QAAQ;GAGtC,MAAM,WAAW,SAAS,UAAU,QAAQ;AAG5C,OAAI,CAAC,WAAW,QAAQ,CAAC,WAAW,sBAChC,QAAO;GAGX,MAAM,iBAAiB;IACnB,IAAI;IACJ,MAAM,WAAW;IACjB,aAAa,WAAW,eAAe,gBAAgB,WAAW;IAClE,sBAAsB,WAAW,yBAAyB,EAAE;IAC5D,sBAAsB,WAAW,0BAA0B,EAAE;IAChE;AAED,WAAQ,KAAK,eAAe;WACvB,YAAY;AACjB,UAAO,KAAK,gCAAgC,SAAS,IAAK,WAAqB,QAAQ;;AAG3F,SAAO;UACF,OAAO;AACZ,SAAO,KAAK,uBAAuB,SAAS,IAAK,MAAgB,QAAQ;AACzE,SAAO,EAAE;;;AAIjB,eAAe,2BACX,WACA,WACA,SAAS,OACI;CACb,MAAM,WAAW,oBAAoB,UAAU,OAAiB;CAChE,MAAM,WAAW,KAAK,WAAW,UAAU,MAAgB;CAC3D,MAAM,aAAa,KAAK,UAAU,GAAG,SAAS,OAAO;AAErD,KAAI,CAAC,QAAQ;AAET,MAAI;AACA,SAAM,MAAM,UAAU,EAAE,WAAW,MAAM,CAAC;UACtC;EAIR,MAAM,4BAA4B,CAC9B;GACI,IAAI,UAAU;GACd,MAAM,UAAU;GAChB,aAAa,UAAU;GACvB,uBAAuB,UAAU;GACpC,CACJ;EAED,MAAM,gBAAgB;GAClB,MAAM,UAAU;GAChB,aAAa,UAAU;GACvB,OAAO,UAAU;GACjB,WAAW;GACX,oBAAoB,UAAU,qBAAqB,EAAE;GACrD,6BAA6B;GAChC;AAED,QAAM,UAAU,YAAY,KAAK,UAAU,eAAe,MAAM,EAAE,CAAC;;CAGvE,MAAM,SAAS,SAAS,mBAAmB;AAC3C,QAAO,MACH,GAAG,OAAO,GAAG,OAAO,UAAU,OAAO,CAAC,IAAI,OAAO,UAAU,KAAK,CAAC,IAAI,OAAQ,UAAU,WAAyB,OAAO,CAAC,iBAAiB,SAAS,OACrJ;;AAGL,eAAe,0BACX,UACA,WACA,SAAS,OACI;CACb,MAAM,WAAW,oBAAoB,SAAS,KAAe;CAC7D,MAAM,aAAa,KAAK,WAAW,GAAG,SAAS,OAAO;AAEtD,KAAI,CAAC,QAAQ;EACT,MAAMC,gBAAyC;GAC3C,MAAM,SAAS;GACf,aAAa,SAAS;GACtB,WAAW;GACX,oBAAoB,SAAS,qBAAqB,EAAE;GACvD;AAGD,MAAI,SAAS,cAAe,SAAS,WAAyB,SAAS,EASnE,eAAc,8BARoB,CAC9B;GACI,IAAI,SAAS,UAAU;GACvB,MAAM,SAAS;GACf,aAAa,SAAS;GACtB,uBAAuB,SAAS;GACnC,CACJ;AAKL,MAAI,SAAS,qBACT,eAAc,yBAAyB,SAAS;AAGpD,MAAI,SAAS,MACT,eAAc,QAAQ,SAAS;AAGnC,QAAM,UAAU,YAAY,KAAK,UAAU,eAAe,MAAM,EAAE,CAAC;;CAGvE,MAAM,SAAS,SAAS,mBAAmB;AAC3C,QAAO,MACH,GAAG,OAAO,GAAG,OAAO,SAAS,KAAK,CAAC,IAAI,OAAO,SAAS,YAAY,CAAC,IAAI,OAAQ,SAAS,WAAyB,OAAO,CAAC,iBAAiB,SAAS,OACvJ;;AAGL,eAAe,wBACX,QACA,WACA,SAAS,OACI;CACb,MAAM,WAAW,oBAAoB,OAAO,GAAa;CACzD,MAAM,aAAa,KAAK,WAAW,GAAG,SAAS,OAAO;AAEtD,KAAI,CAAC,QAAQ;EACT,MAAMA,gBAAyC;GAC3C,MAAM,OAAO;GACb,aAAa,OAAO;GACpB,WAAW;GACX,uBAAuB,OAAO,wBAAwB,EAAE;GAC3D;AAGD,MAAI,OAAO,qBACP,eAAc,yBAAyB,OAAO;AAGlD,QAAM,UAAU,YAAY,KAAK,UAAU,eAAe,MAAM,EAAE,CAAC;;CAGvE,MAAM,SAAS,SAAS,mBAAmB;AAC3C,QAAO,MACH,GAAG,OAAO,GAAG,OAAO,OAAO,KAAK,CAAC,IAAI,OAAO,OAAO,YAAY,CAAC,IAAI,OAAQ,OAAO,qBAAmC,OAAO,CAAC,iBAAiB,SAAS,OAC3J;;;;;;AAyCL,SAAS,mBAAmB,aAAqB,aAA2B;AACxE,KAAI;AACA,SAAO,MAAM,qDAAqD;AAMlE,WAFgB,eAAe,YAAY,oDAEzB;GACd,KAAK;GACL,OAAO;GACP,UAAU;GACb,CAAC;AAEF,SAAO,MAAM,sCAAsC;UAC9C,OAAO;EAGZ,MAAM,YAAY;AAIlB,MAAI,UAAU,WAAW,GAAG;GACxB,MAAM,SAAS,UAAU,UAAU,UAAU,UAAU;AACvD,UAAO,KAAK,mCAAmC,SAAS;aACjD,UAAU,UAAU,UAAU,OAAO,SAAS,QAAQ,CAC7D,QAAO,KAAK,kFAAkF;MAG9F,QAAO,MAAM,sCAAsC;;;AAM/D,eAAsB,iBAClB,kBACA,mBACA,SAC+B;AAC/B,KAAI;EACA,MAAM,YAAY,SAAS;EAC3B,MAAM,oBAAoB,aAAa,UAAU,SAAS;EAC1D,MAAM,SAAS,SAAS,UAAU;AAElC,MAAI,OACA,QAAO,MAAM,mEAAmE;WACzE,kBACP,QAAO,MAAM,8BAA8B,UAAU,OAAO,uBAAuB;MAEnF,QAAO,MAAM,oEAAoE;EAGrF,MAAM,cAAc,QAAQ,iBAAiB;EAC7C,MAAM,SAAS,KAAK,aAAa,MAAM;EACvC,MAAM,cAAc,QAAQ,kBAAkB;EAC9C,MAAM,sBAAsB,KAAK,aAAa,aAAa;EAC3D,MAAM,iBAAiB,KAAK,aAAa,QAAQ;EACjD,MAAM,mBAAmB,KAAK,aAAa,UAAU;AAGrD,MAAI,CAAC,QAAQ;AAET,OAAI,CAAC,mBAAmB;AACpB,WAAO,MAAM,+CAA+C;AAC5D,SAAK,MAAM,aAAa;KAAC;KAAqB;KAAgB;KAAiB,CAC3E,KAAI;AACA,WAAM,GAAG,WAAW;MAAE,WAAW;MAAM,OAAO;MAAM,CAAC;AACrD,YAAO,MAAM,iBAAiB,YAAY;YACtC;AAEJ,YAAO,MAAM,wCAAwC,YAAY;;SAIzE,QAAO,MAAM,8EAA8E;AAI/F,UAAO,MAAM,iCAAiC;AAC9C,QAAK,MAAM,aAAa;IAAC;IAAqB;IAAgB;IAAiB,CAC3E,KAAI;AACA,UAAM,MAAM,WAAW,EAAE,WAAW,MAAM,CAAC;YACtC,OAAO;AACZ,QAAI;AACA,WAAM,OAAO,UAAU;YAEnB;KACJ,MAAM,MAAM;AACZ,YAAO,MAAM,uCAAuC,UAAU,IAAI,IAAI,UAAU;AAChF,aAAQ,KAAK,EAAE;AACf,WAAM;;;aAIX,kBACP,QAAO,MAAM,8BAA8B,UAAU,OAAO,mBAAmB;MAE/E,QAAO,MAAM,6DAA6D;EAG9E,IAAIC,QAAkB,EAAE;AAExB,MAAI,qBAAqB,WAAW;AAEhC,WAAQ,UAAU,KAAK,OAAO,QAAQ,aAAa,GAAG,CAAC;AACvD,UAAO,MAAM,iBAAiB,MAAM,OAAO,uBAAuB;SAC/D;GAEH,MAAM,gBAAgB,OAAO,QAA+B;IACxD,MAAM,UAAU,MAAM,QAAQ,KAAK,EAAE,eAAe,MAAM,CAAC;AAE3D,SAAK,MAAM,SAAS,SAAS;KACzB,MAAM,WAAW,KAAK,KAAK,MAAM,KAAK;AAEtC,SAAI,MAAM,aAAa,EACnB;UAAI,CAAC,iBAAiB,SAAS,MAAM,KAAK,CACtC,OAAM,cAAc,SAAS;gBAGjC,MAAM,QAAQ,KACb,QAAQ,MAAM,KAAK,KAAK,SACrB,QAAQ,MAAM,KAAK,KAAK,UACxB,QAAQ,MAAM,KAAK,KAAK,SAE5B,OAAM,KAAK,SAAS;;;AAKhC,SAAM,cAAc,OAAO;;EAI/B,MAAMC,gBAA2B,EAAE;EACnC,MAAMC,eAA0B,EAAE;EAClC,MAAMC,aAAwB,EAAE;AAEhC,OAAK,MAAM,QAAQ,OAAO;GACtB,MAAM,aAAa,MAAM,qBAAqB,MAAM,YAAY;AAChE,iBAAc,KAAK,GAAG,WAAW;GAEjC,MAAM,YAAY,MAAM,oBAAoB,MAAM,YAAY;AAC9D,gBAAa,KAAK,GAAG,UAAU;GAE/B,MAAM,UAAU,MAAM,kBAAkB,MAAM,YAAY;AAC1D,cAAW,KAAK,GAAG,QAAQ;;AAG/B,MAAI,cAAc,WAAW,KAAK,aAAa,WAAW,KAAK,WAAW,WAAW,GAAG;AACpF,UAAO,KAAK,kEAAkE;AAC9E,UAAO;IACH,qBAAqB;IACrB,oBAAoB;IACpB,kBAAkB;IAClB,YAAY;IACf;;AAIL,MAAI,cAAc,SAAS,GAAG;AAC1B,UAAO,MAAM,WAAW,cAAc,OAAO,yBAAyB;AACtE,QAAK,MAAM,aAAa,cACpB,OAAM,2BAA2B,WAAsC,qBAAqB,OAAO;AAEvG,OAAI,OACA,QAAO,KAAK,4BAA4B,cAAc,OAAO,6BAA6B;OAE1F,QAAO,KAAK,aAAa,cAAc,OAAO,6BAA6B;;AAKnF,MAAI,aAAa,SAAS,GAAG;AACzB,UAAO,MAAM,WAAW,aAAa,OAAO,yBAAyB;AACrE,QAAK,MAAM,YAAY,aACnB,OAAM,0BAA0B,UAAqC,gBAAgB,OAAO;AAEhG,OAAI,OACA,QAAO,KAAK,4BAA4B,aAAa,OAAO,6BAA6B;OAEzF,QAAO,KAAK,aAAa,aAAa,OAAO,6BAA6B;;AAIlF,MAAI,WAAW,SAAS,GAAG;AACvB,UAAO,MAAM,WAAW,WAAW,OAAO,sBAAsB;AAChE,QAAK,MAAM,UAAU,WACjB,OAAM,wBAAwB,QAAmC,kBAAkB,OAAO;AAE9F,OAAI,OACA,QAAO,KAAK,4BAA4B,WAAW,OAAO,0BAA0B;OAEpF,QAAO,KAAK,aAAa,WAAW,OAAO,0BAA0B;;EAK7E,MAAM,gBAAgB,SAAS,YAAY;AAC3C,MACI,CAAC,UACD,kBACC,cAAc,SAAS,KAAK,aAAa,SAAS,KAAK,WAAW,SAAS,GAE5E,oBAAmB,aAAa,YAAY;AAIhD,SAAO;GACH,qBAAqB,cAAc;GACnC,oBAAoB,aAAa;GACjC,kBAAkB,WAAW;GAC7B,YAAY,cAAc,SAAS,aAAa,SAAS,WAAW;GACvE;UACI,OAAO;EACZ,MAAM,MAAM;AACZ,SAAO,MAAM,YAAY,IAAI,QAAQ;AACrC,UAAQ,KAAK,EAAE;AACf,QAAM"}
@@ -1,8 +1,9 @@
1
- import { r as info, s as success } from "../logger.js";
1
+ import { t as logger } from "../logger.js";
2
+ import "../logger2.js";
2
3
  import { i as getMrtConfig, n as getDefaultBuildDir, r as getDefaultMessage } from "../utils.js";
4
+ import { a as buildMrtConfig } from "../config.js";
3
5
  import { t as commonFlags } from "../flags.js";
4
6
  import { t as createBundle } from "../bundle.js";
5
- import { a as buildMrtConfig } from "../config.js";
6
7
  import { Command, Flags } from "@oclif/core";
7
8
  import path from "path";
8
9
  import fs from "fs-extra";
@@ -24,10 +25,10 @@ async function createBundleCommand(options) {
24
25
  const outputDirectory = options.outputDirectory ?? path.join(options.projectDirectory, ".bundle");
25
26
  await fs.ensureDir(outputDirectory);
26
27
  const message = options.message ?? getDefaultMessage(options.projectDirectory);
27
- const config = buildMrtConfig(buildDirectory, options.projectDirectory);
28
- info(`Creating bundle for project: ${projectSlug}`);
29
- info(`Build directory: ${buildDirectory}`);
30
- info(`Output directory: ${outputDirectory}`);
28
+ const config = await buildMrtConfig(buildDirectory, options.projectDirectory);
29
+ logger.info(`Creating bundle for project: ${projectSlug}`);
30
+ logger.info(`Build directory: ${buildDirectory}`);
31
+ logger.info(`Output directory: ${outputDirectory}`);
31
32
  const bundle = await createBundle({
32
33
  message,
33
34
  ssr_parameters: config.ssrParameters,
@@ -52,11 +53,11 @@ async function createBundleCommand(options) {
52
53
  data_size: bundleData.length
53
54
  };
54
55
  await fs.writeJson(bundleJsonPath, bundleMetadata, { spaces: 2 });
55
- success(`Bundle created successfully!`);
56
- info(`Bundle tgz file: ${bundleTgzPath}`);
57
- info(`Bundle metadata: ${bundleJsonPath}`);
58
- info(`Uncompressed size: ${(bundleData.length / 1024 / 1024).toFixed(2)} MB`);
59
- info(`Compressed size: ${(compressedData.length / 1024 / 1024).toFixed(2)} MB`);
56
+ logger.info(`Bundle created successfully!`);
57
+ logger.info(`Bundle tgz file: ${bundleTgzPath}`);
58
+ logger.info(`Bundle metadata: ${bundleJsonPath}`);
59
+ logger.info(`Uncompressed size: ${(bundleData.length / 1024 / 1024).toFixed(2)} MB`);
60
+ logger.info(`Compressed size: ${(compressedData.length / 1024 / 1024).toFixed(2)} MB`);
60
61
  }
61
62
 
62
63
  //#endregion
@@ -1,3 +1,5 @@
1
+ import { t as logger } from "../logger.js";
2
+ import "../logger2.js";
1
3
  import { Command, Flags } from "@oclif/core";
2
4
  import path from "path";
3
5
  import fs from "fs";
@@ -85,10 +87,10 @@ const findMarkedFiles = (projectRoot, markerValue) => {
85
87
  }
86
88
  };
87
89
  searchFiles(projectRoot);
88
- console.log(`Found ${mergeFiles.length} files to merge for marker value ${markerValue}:`);
89
- console.log(mergeFiles.join("\n"));
90
- console.log(`Found ${newFiles.length} files to add for marker value ${markerValue}:`);
91
- console.log(newFiles.join("\n"));
90
+ logger.info(`Found ${mergeFiles.length} files to merge for marker value ${markerValue}:`);
91
+ logger.info(mergeFiles.join("\n"));
92
+ logger.info(`Found ${newFiles.length} files to add for marker value ${markerValue}:`);
93
+ logger.info(newFiles.join("\n"));
92
94
  return {
93
95
  mergeFiles,
94
96
  newFiles
@@ -111,7 +113,7 @@ const genertaeAndWriteInstructions = (templateFile, context, outputFile) => {
111
113
  const templateContent = fs.readFileSync(templateFile, "utf8");
112
114
  const mdcContent = Handlebars.compile(templateContent)(context);
113
115
  fs.writeFileSync(outputFile, mdcContent, "utf8");
114
- console.log(`MDC instructions written to ${outputFile}`);
116
+ logger.info(`MDC instructions written to ${outputFile}`);
115
117
  };
116
118
 
117
119
  //#endregion
@@ -1,4 +1,5 @@
1
- import { c as warn, n as error } from "../logger.js";
1
+ import { t as logger } from "../logger.js";
2
+ import "../logger2.js";
2
3
  import { t as generateEnvFile } from "../utils.js";
3
4
  import { a as trimExtensions, i as validateNoCycles, n as resolveDependenciesForMultiple } from "../dependency-utils.js";
4
5
  import { t as prepareForLocalDev } from "../local-dev-setup.js";
@@ -17,7 +18,7 @@ const createStorefront = async (options = {}) => {
17
18
  try {
18
19
  execSync("git --version", { stdio: "ignore" });
19
20
  } catch (e) {
20
- error(`❌ git isn't installed or found in your PATH. Install git before running this command: ${String(e)}`);
21
+ logger.error(`❌ git is not installed or found in your PATH. Install git before running this command: ${String(e)}`);
21
22
  process.exit(1);
22
23
  }
23
24
  let storefront = options.name;
@@ -28,10 +29,10 @@ const createStorefront = async (options = {}) => {
28
29
  initial: DEFAULT_STOREFRONT
29
30
  })).storefront;
30
31
  if (!storefront) {
31
- error("Storefront name is required.");
32
+ logger.error("Storefront name is required.");
32
33
  process.exit(1);
33
34
  }
34
- console.log("\n");
35
+ logger.info("\n");
35
36
  const outputPath = options.outputDir ? path.join(options.outputDir, storefront) : storefront;
36
37
  let template = options.template;
37
38
  if (!template) {
@@ -47,7 +48,7 @@ const createStorefront = async (options = {}) => {
47
48
  value: "custom"
48
49
  }]
49
50
  })).template;
50
- console.log("\n");
51
+ logger.info("\n");
51
52
  if (template === "custom") {
52
53
  const { githubUrl } = await prompts({
53
54
  type: "text",
@@ -55,18 +56,18 @@ const createStorefront = async (options = {}) => {
55
56
  message: "🌐 What is the Github URL for your template?\n"
56
57
  });
57
58
  if (!githubUrl) {
58
- error("Github URL is required.");
59
+ logger.error("Github URL is required.");
59
60
  process.exit(1);
60
61
  }
61
62
  template = githubUrl;
62
63
  }
63
64
  }
64
65
  if (!template) {
65
- error("Template is required.");
66
+ logger.error("Template is required.");
66
67
  process.exit(1);
67
68
  }
68
69
  if (options.templateBranch !== void 0 && options.templateBranch.trim() === "") {
69
- error("--template-branch cannot be empty.");
70
+ logger.error("--template-branch cannot be empty.");
70
71
  process.exit(1);
71
72
  }
72
73
  if (isLocalPath(template)) {
@@ -107,7 +108,7 @@ const createStorefront = async (options = {}) => {
107
108
  defaults: options.defaults
108
109
  });
109
110
  }
110
- console.log("\n");
111
+ logger.info("\n");
111
112
  if (fs.existsSync(path.join(outputPath, "src", "extensions", "config.json"))) {
112
113
  const extensionConfigText = fs.readFileSync(path.join(outputPath, "src", "extensions", "config.json"), "utf8");
113
114
  const extensionConfig = JSON.parse(extensionConfigText);
@@ -115,7 +116,7 @@ const createStorefront = async (options = {}) => {
115
116
  try {
116
117
  validateNoCycles(extensionConfig);
117
118
  } catch (e) {
118
- error(`Extension configuration error: ${e.message}`);
119
+ logger.error(`Extension configuration error: ${e.message}`);
119
120
  process.exit(1);
120
121
  }
121
122
  let selectedExtensions;
@@ -140,17 +141,19 @@ const createStorefront = async (options = {}) => {
140
141
  });
141
142
  if (dependentExts.length > 0) {
142
143
  const addedName = extensionConfig.extensions[addedExt]?.name || addedExt;
143
- warn(`${dependentExts.map((ext) => extensionConfig.extensions[ext]?.name || ext).join(", ")} requires ${addedName}. ${addedName} has been automatically added.`);
144
+ const dependentNames = dependentExts.map((ext) => extensionConfig.extensions[ext]?.name || ext).join(", ");
145
+ logger.warn(`${dependentNames} requires ${addedName}. ${addedName} has been automatically added.`);
144
146
  }
145
147
  }
146
- trimExtensions(outputPath, Object.fromEntries(resolvedExtensions.map((ext) => [ext, true])), { extensions: extensionConfig.extensions }, options?.verbose || false);
148
+ trimExtensions(outputPath, Object.fromEntries(resolvedExtensions.map((ext) => [ext, true])), { extensions: extensionConfig.extensions });
147
149
  }
148
150
  }
149
- const configMeta = JSON.parse(fs.readFileSync(path.join(outputPath, "src", "config", "config-meta.json"), "utf8"));
151
+ const configMetaPath = fs.existsSync(path.join(outputPath, "config-meta.json")) ? path.join(outputPath, "config-meta.json") : path.join(outputPath, "src", "config", "config-meta.json");
152
+ const configMeta = JSON.parse(fs.readFileSync(configMetaPath, "utf8"));
150
153
  const envDefaultPath = path.join(outputPath, ".env.default");
151
154
  let envDefaultValues = {};
152
155
  if (fs.existsSync(envDefaultPath)) envDefaultValues = dotenv.parse(fs.readFileSync(envDefaultPath, "utf8"));
153
- console.log("\n⚙️ We will now configure your storefront before it will be ready to run.\n");
156
+ logger.info("\n⚙️ We will now configure your storefront before it will be ready to run.\n");
154
157
  const configOverrides = {};
155
158
  for (const config of configMeta.configs) if (options.defaults) configOverrides[config.key] = envDefaultValues[config.key] ?? "";
156
159
  else {
@@ -175,7 +178,7 @@ const createStorefront = async (options = {}) => {
175
178
  - Build the storefront: pnpm run build
176
179
  - Run the development server: pnpm run dev
177
180
  `;
178
- console.log(BANNER);
181
+ logger.info(BANNER);
179
182
  };
180
183
 
181
184
  //#endregion
@@ -187,17 +190,11 @@ var CreateStorefront = class CreateStorefront extends Command {
187
190
  static description = "Create a storefront project";
188
191
  static examples = [
189
192
  "<%= config.bin %> <%= command.id %>",
190
- "<%= config.bin %> <%= command.id %> -v",
191
193
  "<%= config.bin %> <%= command.id %> -n my-storefront -t https://github.com/org/template -b release-0.2.x",
192
194
  "<%= config.bin %> <%= command.id %> -n my-storefront -t /path/to/local/template",
193
195
  "<%= config.bin %> <%= command.id %> -l /path/to/monorepo/packages"
194
196
  ];
195
197
  static flags = {
196
- verbose: Flags.boolean({
197
- char: "v",
198
- description: "Verbose mode",
199
- default: false
200
- }),
201
198
  name: Flags.string({
202
199
  char: "n",
203
200
  description: "Storefront project name"
@@ -227,7 +224,6 @@ var CreateStorefront = class CreateStorefront extends Command {
227
224
  async run() {
228
225
  const { flags } = await this.parse(CreateStorefront);
229
226
  await createStorefront({
230
- verbose: flags.verbose,
231
227
  name: flags.name,
232
228
  template: flags.template,
233
229
  templateBranch: flags["template-branch"],