@eldrforge/kodrdriv 1.2.134 → 1.2.137
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.cursor/rules/no-local-dependencies.md +6 -0
- package/README.md +1 -0
- package/dist/application.js +32 -42
- package/dist/application.js.map +1 -1
- package/dist/arguments.js +3 -3
- package/dist/arguments.js.map +1 -1
- package/dist/constants.js +5 -7
- package/dist/constants.js.map +1 -1
- package/dist/logging.js +4 -32
- package/dist/logging.js.map +1 -1
- package/dist/types.js +1 -0
- package/dist/types.js.map +1 -1
- package/package.json +13 -8
- package/dist/commands/audio-commit.js +0 -152
- package/dist/commands/audio-commit.js.map +0 -1
- package/dist/commands/audio-review.js +0 -274
- package/dist/commands/audio-review.js.map +0 -1
- package/dist/commands/clean.js +0 -49
- package/dist/commands/clean.js.map +0 -1
- package/dist/commands/commit.js +0 -680
- package/dist/commands/commit.js.map +0 -1
- package/dist/commands/development.js +0 -467
- package/dist/commands/development.js.map +0 -1
- package/dist/commands/link.js +0 -646
- package/dist/commands/link.js.map +0 -1
- package/dist/commands/precommit.js +0 -99
- package/dist/commands/precommit.js.map +0 -1
- package/dist/commands/publish.js +0 -1432
- package/dist/commands/publish.js.map +0 -1
- package/dist/commands/release.js +0 -376
- package/dist/commands/release.js.map +0 -1
- package/dist/commands/review.js +0 -733
- package/dist/commands/review.js.map +0 -1
- package/dist/commands/select-audio.js +0 -46
- package/dist/commands/select-audio.js.map +0 -1
- package/dist/commands/tree.js +0 -2363
- package/dist/commands/tree.js.map +0 -1
- package/dist/commands/unlink.js +0 -537
- package/dist/commands/unlink.js.map +0 -1
- package/dist/commands/updates.js +0 -211
- package/dist/commands/updates.js.map +0 -1
- package/dist/commands/versions.js +0 -221
- package/dist/commands/versions.js.map +0 -1
- package/dist/content/diff.js +0 -346
- package/dist/content/diff.js.map +0 -1
- package/dist/content/files.js +0 -190
- package/dist/content/files.js.map +0 -1
- package/dist/content/log.js +0 -72
- package/dist/content/log.js.map +0 -1
- package/dist/util/aiAdapter.js +0 -28
- package/dist/util/aiAdapter.js.map +0 -1
- package/dist/util/fileLock.js +0 -241
- package/dist/util/fileLock.js.map +0 -1
- package/dist/util/general.js +0 -379
- package/dist/util/general.js.map +0 -1
- package/dist/util/gitMutex.js +0 -161
- package/dist/util/gitMutex.js.map +0 -1
- package/dist/util/interactive.js +0 -32
- package/dist/util/interactive.js.map +0 -1
- package/dist/util/loggerAdapter.js +0 -41
- package/dist/util/loggerAdapter.js.map +0 -1
- package/dist/util/performance.js +0 -134
- package/dist/util/performance.js.map +0 -1
- package/dist/util/precommitOptimizations.js +0 -310
- package/dist/util/precommitOptimizations.js.map +0 -1
- package/dist/util/stopContext.js +0 -146
- package/dist/util/stopContext.js.map +0 -1
- package/dist/util/storageAdapter.js +0 -31
- package/dist/util/storageAdapter.js.map +0 -1
- package/dist/util/validation.js +0 -45
- package/dist/util/validation.js.map +0 -1
- package/dist/utils/branchState.js +0 -700
- package/dist/utils/branchState.js.map +0 -1
package/dist/util/general.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"general.js","sources":["../../src/util/general.ts"],"sourcesContent":["import path from 'path';\nimport { deepMerge, stringifyJSON, incrementPatchVersion, incrementMinorVersion, incrementMajorVersion, validateVersionString, calculateTargetVersion, incrementPrereleaseVersion, convertToReleaseVersion } from '@eldrforge/shared';\nimport { getLogger } from '../logging';\n\n/**\n * Get version from a specific branch's package.json\n */\nexport const getVersionFromBranch = async (branchName: string): Promise<string | null> => {\n const { runSecure, validateGitRef, safeJsonParse, validatePackageJson } = await import('@eldrforge/git-tools');\n\n try {\n // Validate branch name to prevent injection\n if (!validateGitRef(branchName)) {\n throw new Error(`Invalid branch name: ${branchName}`);\n }\n // Cast to any to avoid type mismatch with node_modules version\n const { stdout } = await (runSecure as any)('git', ['show', `${branchName}:package.json`], { suppressErrorLogging: true });\n const packageJson = safeJsonParse(stdout, 'package.json');\n const validated = validatePackageJson(packageJson, 'package.json');\n return validated.version;\n } catch {\n // Return null if we can't get the version (branch may not exist or no package.json)\n return null;\n }\n};\n\n/**\n * Calculate target version based on branch configuration\n * SEMANTICS: The version config specifies what version should be ON the target branch\n */\nexport const calculateBranchDependentVersion = async (\n currentVersion: string,\n currentBranch: string,\n branchesConfig: any,\n targetBranch?: string\n): Promise<{ version: string; targetBranch: string }> => {\n const { getLogger } = await import('../logging');\n const logger = getLogger();\n\n // Look up the source branch to find the target branch\n if (!branchesConfig || !branchesConfig[currentBranch]) {\n // Use default configuration from constants\n const { KODRDRIV_DEFAULTS } = await import('../constants');\n const defaultConfig = KODRDRIV_DEFAULTS.branches as any;\n\n if (defaultConfig && defaultConfig[currentBranch]) {\n const sourceConfig = defaultConfig[currentBranch];\n const finalTargetBranch = sourceConfig.targetBranch || targetBranch || 'main';\n\n // Look at target branch's version config to determine what version it should have\n const targetConfig = defaultConfig[finalTargetBranch];\n\n logger.info(`VERSION_BRANCH_DEFAULT: Using default branch configuration | Source Branch: ${currentBranch} | Target Branch: ${finalTargetBranch} | Source: default config`);\n\n if (!targetConfig?.version) {\n const defaultVersion = incrementPatchVersion(currentVersion);\n logger.debug(`No version config for target branch '${finalTargetBranch}', using default increment`);\n return { version: defaultVersion, targetBranch: finalTargetBranch };\n }\n\n return calculateVersionFromTargetConfig(currentVersion, finalTargetBranch, targetConfig.version, logger);\n }\n\n // No config at all, use traditional defaults\n const defaultTargetBranch = targetBranch || 'main';\n const defaultVersion = incrementPatchVersion(currentVersion);\n logger.debug(`No branch-specific config found for '${currentBranch}', using defaults`);\n return { version: defaultVersion, targetBranch: defaultTargetBranch };\n }\n\n const sourceConfig = branchesConfig[currentBranch];\n const finalTargetBranch = sourceConfig.targetBranch || targetBranch || 'main';\n\n // Look at target branch's version config to determine what version it should have\n const targetConfig = branchesConfig[finalTargetBranch];\n\n logger.info(`VERSION_BRANCH_DEPENDENT: Using branch-dependent targeting | Source Branch: ${currentBranch} | Target Branch: ${finalTargetBranch} | Source: branch config`);\n\n if (!targetConfig?.version) {\n // No version config for target, use default increment\n const defaultVersion = incrementPatchVersion(currentVersion);\n logger.debug(`No version config for target branch '${finalTargetBranch}', using default increment`);\n return { version: defaultVersion, targetBranch: finalTargetBranch };\n }\n\n return calculateVersionFromTargetConfig(currentVersion, finalTargetBranch, targetConfig.version, logger);\n};\n\n/**\n * Calculate version based on target branch configuration\n */\nconst calculateVersionFromTargetConfig = async (\n currentVersion: string,\n targetBranch: string,\n versionConfig: any,\n logger: any\n): Promise<{ version: string; targetBranch: string }> => {\n if (versionConfig.type === 'release') {\n // Convert to release version (remove prerelease tags)\n const releaseVersion = convertToReleaseVersion(currentVersion);\n logger.info(`VERSION_RELEASE_CONVERSION: Converting prerelease to release version | Current: ${currentVersion} | Release: ${releaseVersion} | Action: Remove prerelease tag`);\n return { version: releaseVersion, targetBranch };\n } else if (versionConfig.type === 'prerelease') {\n if (!versionConfig.tag) {\n throw new Error(`Prerelease version type requires a tag in branch configuration`);\n }\n\n const tag = versionConfig.tag;\n\n if (versionConfig.increment) {\n // Check if there's already a version with this tag in the target branch\n const targetBranchVersion = await getVersionFromBranch(targetBranch);\n\n if (targetBranchVersion) {\n // Use the target branch version as the base and increment\n const newVersion = incrementPrereleaseVersion(targetBranchVersion, tag);\n logger.info(`VERSION_PRERELEASE_INCREMENT: Incrementing prerelease version | Current: ${targetBranchVersion} | New: ${newVersion} | Action: Increment prerelease number`);\n return { version: newVersion, targetBranch };\n } else {\n // No version in target branch, use current version as base\n const newVersion = incrementPrereleaseVersion(currentVersion, tag);\n logger.info(`VERSION_PRERELEASE_CREATE: Creating new prerelease version | Current: ${currentVersion} | New: ${newVersion} | Action: Add prerelease tag`);\n return { version: newVersion, targetBranch };\n }\n } else {\n // Just add/change the prerelease tag without incrementing\n const baseVersion = convertToReleaseVersion(currentVersion);\n const newVersion = `${baseVersion}-${tag}.0`;\n logger.info(`VERSION_PRERELEASE_TAG: Setting prerelease tag | Current: ${currentVersion} | New: ${newVersion} | Tag: ${versionConfig.tag}`);\n return { version: newVersion, targetBranch };\n }\n }\n\n throw new Error(`Invalid version type: ${versionConfig.type}`);\n};\n\n\n/**\n * Find the development branch from branches configuration\n * Returns the branch marked with developmentBranch: true\n */\nexport const findDevelopmentBranch = (branchesConfig: any): string | null => {\n if (!branchesConfig || typeof branchesConfig !== 'object') {\n return null;\n }\n\n for (const [branchName, branchConfig] of Object.entries(branchesConfig)) {\n if (branchConfig && typeof branchConfig === 'object' && (branchConfig as any).developmentBranch === true) {\n return branchName;\n }\n }\n\n return null;\n};\n\n/**\n * Check if two prerelease versions have the same tag\n * Examples:\n * - haveSamePrereleaseTag(\"1.2.3-dev.0\", \"1.2.3-dev.5\") => true\n * - haveSamePrereleaseTag(\"1.2.3-dev.0\", \"1.2.3-test.0\") => false\n * - haveSamePrereleaseTag(\"1.2.3\", \"1.2.3-dev.0\") => false\n */\nexport const haveSamePrereleaseTag = (version1: string, version2: string): boolean => {\n const extractTag = (version: string): string | null => {\n const cleanVersion = version.startsWith('v') ? version.slice(1) : version;\n const parts = cleanVersion.split('.');\n if (parts.length < 3) return null;\n\n const patchAndPrerelease = parts.slice(2).join('.');\n const patchComponents = patchAndPrerelease.split('-');\n\n if (patchComponents.length > 1) {\n const prereleaseString = patchComponents.slice(1).join('-');\n const prereleaseComponents = prereleaseString.split('.');\n return prereleaseComponents[0] || null;\n }\n\n return null;\n };\n\n const tag1 = extractTag(version1);\n const tag2 = extractTag(version2);\n\n return tag1 !== null && tag2 !== null && tag1 === tag2;\n};\n\nexport const checkIfTagExists = async (tagName: string): Promise<boolean> => {\n const { runSecure, validateGitRef } = await import('@eldrforge/git-tools');\n try {\n // Validate tag name to prevent injection\n if (!validateGitRef(tagName)) {\n throw new Error(`Invalid tag name: ${tagName}`);\n }\n const { stdout } = await runSecure('git', ['tag', '-l', tagName]);\n return stdout.trim() === tagName;\n } catch {\n // If git command fails, assume tag doesn't exist\n return false;\n }\n};\n\nexport const confirmVersionInteractively = async (currentVersion: string, proposedVersion: string, targetVersionInput?: string): Promise<string> => {\n const { getUserChoice, getUserTextInput, requireTTY } = await import('./interactive');\n const { getLogger } = await import('../logging');\n\n requireTTY('Interactive version confirmation requires a terminal.');\n\n const logger = getLogger();\n logger.info(`\\nVERSION_CONFIRMATION: Version confirmation required | Current: ${currentVersion} | Proposed: ${proposedVersion}`);\n logger.info(`VERSION_CURRENT: Current package version | Version: ${currentVersion}`);\n logger.info(`VERSION_PROPOSED: Proposed new version | Version: ${proposedVersion}`);\n if (targetVersionInput) {\n logger.info(`VERSION_TARGET_INPUT: Target version provided | Input: ${targetVersionInput}`);\n }\n\n const choices = [\n { key: 'c', label: `Confirm ${proposedVersion}` },\n { key: 'e', label: 'Enter custom version' },\n { key: 'a', label: 'Abort publish' }\n ];\n\n const choice = await getUserChoice('\\n🤔 Confirm the version for this release:', choices);\n\n switch (choice) {\n case 'c':\n return proposedVersion;\n case 'e': {\n const customVersion = await getUserTextInput('\\n📝 Enter the version number (e.g., \"4.30.0\"):');\n if (!validateVersionString(customVersion)) {\n throw new Error(`Invalid version format: ${customVersion}. Expected format: \"x.y.z\"`);\n }\n const cleanCustomVersion = customVersion.startsWith('v') ? customVersion.slice(1) : customVersion;\n logger.info(`VERSION_CUSTOM_SELECTED: Using custom version from user input | Version: ${cleanCustomVersion} | Source: interactive input`);\n return cleanCustomVersion;\n }\n case 'a':\n throw new Error('Release aborted by user');\n default:\n throw new Error(`Unexpected choice: ${choice}`);\n }\n};\n\nexport const getOutputPath = (outputDirectory: string, filename: string): string => {\n return path.join(outputDirectory, filename);\n};\n\nexport const getTimestampedFilename = (baseName: string, extension: string = '.json'): string => {\n const now = new Date();\n\n // Format as YYMMdd-HHmm (e.g., 250701-1030)\n const yy = now.getFullYear().toString().slice(-2);\n const mm = (now.getMonth() + 1).toString().padStart(2, '0');\n const dd = now.getDate().toString().padStart(2, '0');\n const hh = now.getHours().toString().padStart(2, '0');\n const min = now.getMinutes().toString().padStart(2, '0');\n\n const timestamp = `${yy}${mm}${dd}-${hh}${min}`;\n\n return `${timestamp}-${baseName}${extension}`;\n};\n\nexport const getTimestampedRequestFilename = (baseName: string): string => {\n return getTimestampedFilename(baseName, '.request.json');\n};\n\nexport const getTimestampedResponseFilename = (baseName: string): string => {\n return getTimestampedFilename(baseName, '.response.json');\n};\n\nexport const getTimestampedCommitFilename = (): string => {\n return getTimestampedFilename('commit-message', '.md');\n};\n\nexport const getTimestampedReleaseNotesFilename = (): string => {\n return getTimestampedFilename('release-notes', '.md');\n};\n\nexport const getTimestampedAudioFilename = (): string => {\n return getTimestampedFilename('audio-recording', '.wav');\n};\n\nexport const getTimestampedTranscriptFilename = (): string => {\n return getTimestampedFilename('audio-transcript', '.md');\n};\n\nexport const getTimestampedReviewFilename = (): string => {\n return getTimestampedFilename('review-analysis', '.md');\n};\n\nexport const getTimestampedReviewNotesFilename = (): string => {\n return getTimestampedFilename('review-notes', '.md');\n};\n\nexport const getTimestampedArchivedAudioFilename = (originalExtension: string = '.wav'): string => {\n return getTimestampedFilename('review-audio', originalExtension);\n};\n\nexport const getTimestampedArchivedTranscriptFilename = (): string => {\n return getTimestampedFilename('review-transcript', '.md');\n};\n\n// archiveAudio function moved to @eldrforge/audio-tools\n\n/**\n * Query npm registry for published version of a package\n * Returns null if package is not published or on error\n */\nexport const getNpmPublishedVersion = async (packageName: string): Promise<string | null> => {\n const logger = getLogger();\n try {\n const { runSecure } = await import('@eldrforge/git-tools');\n\n // Use npm view to get the latest published version\n // --json flag ensures parseable output\n const { stdout } = await runSecure('npm', ['view', packageName, 'version', '--json']);\n\n if (!stdout || stdout.trim() === '') {\n logger.verbose(`Package ${packageName} not found on npm registry`);\n return null;\n }\n\n // npm view returns just the version string for a single version\n const version = stdout.trim().replace(/^[\"']|[\"']$/g, ''); // Remove quotes if present\n logger.verbose(`Found ${packageName}@${version} on npm registry`);\n return version;\n } catch (error: any) {\n // Package not found or network error\n logger.verbose(`Could not query npm for ${packageName}: ${error.message}`);\n return null;\n }\n};\n\n/**\n * Check if a package version already exists on npm registry\n */\nexport const isVersionPublishedOnNpm = async (packageName: string, version: string): Promise<boolean> => {\n const logger = getLogger();\n try {\n const { runSecure } = await import('@eldrforge/git-tools');\n\n // Use npm view to check for specific version\n const { stdout } = await runSecure('npm', ['view', `${packageName}@${version}`, 'version', '--json']);\n\n if (!stdout || stdout.trim() === '') {\n logger.verbose(`Version ${packageName}@${version} not found on npm registry`);\n return false;\n }\n\n logger.verbose(`Version ${packageName}@${version} exists on npm registry`);\n return true;\n } catch (error: any) {\n // Version not found\n logger.verbose(`Version ${packageName}@${version} not published: ${error.message}`);\n return false;\n }\n};\n\n/**\n * Get detailed info about a tag including the version it points to\n */\nexport const getTagInfo = async (tagName: string): Promise<{ exists: boolean; commit?: string; version?: string } | null> => {\n try {\n const { runSecure, validateGitRef } = await import('@eldrforge/git-tools');\n\n if (!validateGitRef(tagName)) {\n throw new Error(`Invalid tag name: ${tagName}`);\n }\n\n // Check if tag exists\n const { stdout: tagList } = await runSecure('git', ['tag', '-l', tagName]);\n if (tagList.trim() !== tagName) {\n return { exists: false };\n }\n\n // Get the commit the tag points to\n const { stdout: commit } = await runSecure('git', ['rev-list', '-n', '1', tagName]);\n\n // Extract version from tag name (assumes format like v1.2.3 or working/v1.2.3)\n const versionMatch = tagName.match(/v?(\\d+\\.\\d+\\.\\d+(?:-[a-zA-Z0-9.-]+)?)/);\n const version = versionMatch ? versionMatch[1] : undefined;\n\n return {\n exists: true,\n commit: commit.trim(),\n version\n };\n } catch {\n return null;\n }\n};\n\n/**\n * Check if a version is a development/prerelease version (has prerelease tag)\n */\nexport const isDevelopmentVersion = (version: string): boolean => {\n // Development versions have prerelease tags: 1.2.3-dev.0, 1.2.3-alpha.1, etc.\n return version.includes('-');\n};\n\n/**\n * Check if a version is a release version (no prerelease tag)\n */\nexport const isReleaseVersion = (version: string): boolean => {\n // Release versions are X.Y.Z without any suffix\n return /^\\d+\\.\\d+\\.\\d+$/.test(version);\n};\n\n/**\n * Get expected version pattern for a branch\n */\nexport const getExpectedVersionPattern = (branchName: string): { pattern: RegExp; description: string; isDevelopment: boolean } => {\n // Development/working branches should have prerelease versions\n const devBranchPatterns = /^(working|development|dev|feature\\/|wip\\/)/i;\n\n if (devBranchPatterns.test(branchName)) {\n return {\n pattern: /^\\d+\\.\\d+\\.\\d+-[a-zA-Z0-9.-]+$/,\n description: 'X.Y.Z-<tag> (e.g., 1.2.3-dev.0)',\n isDevelopment: true\n };\n }\n\n // Main/master/production branches should have release versions\n const releaseBranchPatterns = /^(main|master|production|release\\/)/i;\n\n if (releaseBranchPatterns.test(branchName)) {\n return {\n pattern: /^\\d+\\.\\d+\\.\\d+$/,\n description: 'X.Y.Z (e.g., 1.2.3)',\n isDevelopment: false\n };\n }\n\n // For other branches, allow both but prefer release versions\n return {\n pattern: /^\\d+\\.\\d+\\.\\d+(-[a-zA-Z0-9.-]+)?$/,\n description: 'X.Y.Z or X.Y.Z-<tag>',\n isDevelopment: false\n };\n};\n\n/**\n * Validate version against branch expectations\n */\nexport const validateVersionForBranch = (version: string, branchName: string): {\n valid: boolean;\n issue?: string;\n fix?: string;\n} => {\n const expected = getExpectedVersionPattern(branchName);\n\n if (!expected.pattern.test(version)) {\n return {\n valid: false,\n issue: `Invalid version format for branch '${branchName}'`,\n fix: `Version should match ${expected.description}`\n };\n }\n\n const isDevVersion = isDevelopmentVersion(version);\n\n // Development branches should have development versions\n if (expected.isDevelopment && !isDevVersion) {\n return {\n valid: false,\n issue: `Release version on development branch '${branchName}'`,\n fix: 'Run kodrdriv development to update to development version'\n };\n }\n\n // Release branches should NOT have development versions\n if (!expected.isDevelopment && branchName.match(/^(main|master|production|release\\/)/) && isDevVersion) {\n return {\n valid: false,\n issue: `Development version on release branch '${branchName}'`,\n fix: 'Do not commit development versions to release branches'\n };\n }\n\n return { valid: true };\n};\n\n// Re-export shared utilities for backwards compatibility\nexport { deepMerge, stringifyJSON, incrementPatchVersion, incrementMinorVersion, incrementMajorVersion, validateVersionString, calculateTargetVersion, incrementPrereleaseVersion, convertToReleaseVersion };\n"],"names":["getVersionFromBranch","branchName","runSecure","validateGitRef","safeJsonParse","validatePackageJson","Error","stdout","suppressErrorLogging","packageJson","validated","version","calculateBranchDependentVersion","currentVersion","currentBranch","branchesConfig","targetBranch","getLogger","logger","KODRDRIV_DEFAULTS","defaultConfig","branches","sourceConfig","finalTargetBranch","targetConfig","info","defaultVersion","incrementPatchVersion","debug","calculateVersionFromTargetConfig","defaultTargetBranch","versionConfig","type","releaseVersion","convertToReleaseVersion","tag","increment","targetBranchVersion","newVersion","incrementPrereleaseVersion","baseVersion","findDevelopmentBranch","branchConfig","Object","entries","developmentBranch","checkIfTagExists","tagName","trim","confirmVersionInteractively","proposedVersion","targetVersionInput","getUserChoice","getUserTextInput","requireTTY","choices","key","label","choice","customVersion","validateVersionString","cleanCustomVersion","startsWith","slice","getOutputPath","outputDirectory","filename","path","join","getTimestampedFilename","baseName","extension","now","Date","yy","getFullYear","toString","mm","getMonth","padStart","dd","getDate","hh","getHours","min","getMinutes","timestamp","getTimestampedRequestFilename","getTimestampedResponseFilename","getTimestampedCommitFilename","getTimestampedReleaseNotesFilename","getTimestampedAudioFilename","getTimestampedReviewFilename","getTimestampedReviewNotesFilename","getNpmPublishedVersion","packageName","verbose","replace","error","message","getTagInfo","tagList","exists","commit","versionMatch","match","undefined","isDevelopmentVersion","includes","getExpectedVersionPattern","devBranchPatterns","test","pattern","description","isDevelopment","releaseBranchPatterns","validateVersionForBranch","expected","valid","issue","fix","isDevVersion"],"mappings":";;;;;AAIA;;IAGO,MAAMA,oBAAAA,GAAuB,OAAOC,UAAAA,GAAAA;AACvC,IAAA,MAAM,EAAEC,SAAS,EAAEC,cAAc,EAAEC,aAAa,EAAEC,mBAAmB,EAAE,GAAG,MAAM,OAAO,sBAAA,CAAA;IAEvF,IAAI;;QAEA,IAAI,CAACF,eAAeF,UAAAA,CAAAA,EAAa;AAC7B,YAAA,MAAM,IAAIK,KAAAA,CAAM,CAAC,qBAAqB,EAAEL,UAAAA,CAAAA,CAAY,CAAA;AACxD,QAAA;;AAEA,QAAA,MAAM,EAAEM,MAAM,EAAE,GAAG,MAAOL,UAAkB,KAAA,EAAO;AAAC,YAAA,MAAA;YAAQ,CAAA,EAAGD,UAAAA,CAAW,aAAa;SAAE,EAAE;YAAEO,oBAAAA,EAAsB;AAAK,SAAA,CAAA;QACxH,MAAMC,WAAAA,GAAcL,cAAcG,MAAAA,EAAQ,cAAA,CAAA;QAC1C,MAAMG,SAAAA,GAAYL,oBAAoBI,WAAAA,EAAa,cAAA,CAAA;AACnD,QAAA,OAAOC,UAAUC,OAAO;AAC5B,IAAA,CAAA,CAAE,OAAM;;QAEJ,OAAO,IAAA;AACX,IAAA;AACJ;AAEA;;;AAGC,IACM,MAAMC,+BAAAA,GAAkC,OAC3CC,cAAAA,EACAC,eACAC,cAAAA,EACAC,YAAAA,GAAAA;AAEA,IAAA,MAAM,EAAEC,SAAS,EAAE,GAAG,MAAM,OAAO,eAAA,CAAA;AACnC,IAAA,MAAMC,MAAAA,GAASD,SAAAA,EAAAA;;AAGf,IAAA,IAAI,CAACF,cAAAA,IAAkB,CAACA,cAAc,CAACD,cAAc,EAAE;;AAEnD,QAAA,MAAM,EAAEK,iBAAiB,EAAE,GAAG,MAAM,OAAO,iBAAA,CAAA;QAC3C,MAAMC,aAAAA,GAAgBD,kBAAkBE,QAAQ;AAEhD,QAAA,IAAID,aAAAA,IAAiBA,aAAa,CAACN,aAAAA,CAAc,EAAE;YAC/C,MAAMQ,YAAAA,GAAeF,aAAa,CAACN,aAAAA,CAAc;AACjD,YAAA,MAAMS,iBAAAA,GAAoBD,YAAAA,CAAaN,YAAY,IAAIA,YAAAA,IAAgB,MAAA;;YAGvE,MAAMQ,YAAAA,GAAeJ,aAAa,CAACG,iBAAAA,CAAkB;YAErDL,MAAAA,CAAOO,IAAI,CAAC,CAAC,4EAA4E,EAAEX,cAAc,kBAAkB,EAAES,iBAAAA,CAAkB,yBAAyB,CAAC,CAAA;AAEzK,YAAA,IAAI,EAACC,YAAAA,KAAAA,IAAAA,IAAAA,YAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,YAAAA,CAAcb,OAAO,CAAA,EAAE;AACxB,gBAAA,MAAMe,iBAAiBC,qBAAAA,CAAsBd,cAAAA,CAAAA;AAC7CK,gBAAAA,MAAAA,CAAOU,KAAK,CAAC,CAAC,qCAAqC,EAAEL,iBAAAA,CAAkB,0BAA0B,CAAC,CAAA;gBAClG,OAAO;oBAAEZ,OAAAA,EAASe,cAAAA;oBAAgBV,YAAAA,EAAcO;AAAkB,iBAAA;AACtE,YAAA;AAEA,YAAA,OAAOM,gCAAAA,CAAiChB,cAAAA,EAAgBU,iBAAAA,EAAmBC,YAAAA,CAAab,OAAO,EAAEO,MAAAA,CAAAA;AACrG,QAAA;;AAGA,QAAA,MAAMY,sBAAsBd,YAAAA,IAAgB,MAAA;AAC5C,QAAA,MAAMU,iBAAiBC,qBAAAA,CAAsBd,cAAAA,CAAAA;AAC7CK,QAAAA,MAAAA,CAAOU,KAAK,CAAC,CAAC,qCAAqC,EAAEd,aAAAA,CAAc,iBAAiB,CAAC,CAAA;QACrF,OAAO;YAAEH,OAAAA,EAASe,cAAAA;YAAgBV,YAAAA,EAAcc;AAAoB,SAAA;AACxE,IAAA;IAEA,MAAMR,YAAAA,GAAeP,cAAc,CAACD,aAAAA,CAAc;AAClD,IAAA,MAAMS,iBAAAA,GAAoBD,YAAAA,CAAaN,YAAY,IAAIA,YAAAA,IAAgB,MAAA;;IAGvE,MAAMQ,YAAAA,GAAeT,cAAc,CAACQ,iBAAAA,CAAkB;IAEtDL,MAAAA,CAAOO,IAAI,CAAC,CAAC,4EAA4E,EAAEX,cAAc,kBAAkB,EAAES,iBAAAA,CAAkB,wBAAwB,CAAC,CAAA;AAExK,IAAA,IAAI,EAACC,YAAAA,KAAAA,IAAAA,IAAAA,YAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,YAAAA,CAAcb,OAAO,CAAA,EAAE;;AAExB,QAAA,MAAMe,iBAAiBC,qBAAAA,CAAsBd,cAAAA,CAAAA;AAC7CK,QAAAA,MAAAA,CAAOU,KAAK,CAAC,CAAC,qCAAqC,EAAEL,iBAAAA,CAAkB,0BAA0B,CAAC,CAAA;QAClG,OAAO;YAAEZ,OAAAA,EAASe,cAAAA;YAAgBV,YAAAA,EAAcO;AAAkB,SAAA;AACtE,IAAA;AAEA,IAAA,OAAOM,gCAAAA,CAAiChB,cAAAA,EAAgBU,iBAAAA,EAAmBC,YAAAA,CAAab,OAAO,EAAEO,MAAAA,CAAAA;AACrG;AAEA;;AAEC,IACD,MAAMW,gCAAAA,GAAmC,OACrChB,cAAAA,EACAG,cACAe,aAAAA,EACAb,MAAAA,GAAAA;IAEA,IAAIa,aAAAA,CAAcC,IAAI,KAAK,SAAA,EAAW;;AAElC,QAAA,MAAMC,iBAAiBC,uBAAAA,CAAwBrB,cAAAA,CAAAA;QAC/CK,MAAAA,CAAOO,IAAI,CAAC,CAAC,gFAAgF,EAAEZ,eAAe,YAAY,EAAEoB,cAAAA,CAAe,gCAAgC,CAAC,CAAA;QAC5K,OAAO;YAAEtB,OAAAA,EAASsB,cAAAA;AAAgBjB,YAAAA;AAAa,SAAA;AACnD,IAAA,CAAA,MAAO,IAAIe,aAAAA,CAAcC,IAAI,KAAK,YAAA,EAAc;QAC5C,IAAI,CAACD,aAAAA,CAAcI,GAAG,EAAE;AACpB,YAAA,MAAM,IAAI7B,KAAAA,CAAM,CAAC,8DAA8D,CAAC,CAAA;AACpF,QAAA;QAEA,MAAM6B,GAAAA,GAAMJ,cAAcI,GAAG;QAE7B,IAAIJ,aAAAA,CAAcK,SAAS,EAAE;;YAEzB,MAAMC,mBAAAA,GAAsB,MAAMrC,oBAAAA,CAAqBgB,YAAAA,CAAAA;AAEvD,YAAA,IAAIqB,mBAAAA,EAAqB;;gBAErB,MAAMC,UAAAA,GAAaC,2BAA2BF,mBAAAA,EAAqBF,GAAAA,CAAAA;gBACnEjB,MAAAA,CAAOO,IAAI,CAAC,CAAC,yEAAyE,EAAEY,oBAAoB,QAAQ,EAAEC,UAAAA,CAAW,sCAAsC,CAAC,CAAA;gBACxK,OAAO;oBAAE3B,OAAAA,EAAS2B,UAAAA;AAAYtB,oBAAAA;AAAa,iBAAA;YAC/C,CAAA,MAAO;;gBAEH,MAAMsB,UAAAA,GAAaC,2BAA2B1B,cAAAA,EAAgBsB,GAAAA,CAAAA;gBAC9DjB,MAAAA,CAAOO,IAAI,CAAC,CAAC,sEAAsE,EAAEZ,eAAe,QAAQ,EAAEyB,UAAAA,CAAW,6BAA6B,CAAC,CAAA;gBACvJ,OAAO;oBAAE3B,OAAAA,EAAS2B,UAAAA;AAAYtB,oBAAAA;AAAa,iBAAA;AAC/C,YAAA;QACJ,CAAA,MAAO;;AAEH,YAAA,MAAMwB,cAAcN,uBAAAA,CAAwBrB,cAAAA,CAAAA;AAC5C,YAAA,MAAMyB,aAAa,CAAA,EAAGE,WAAAA,CAAY,CAAC,EAAEL,GAAAA,CAAI,EAAE,CAAC;AAC5CjB,YAAAA,MAAAA,CAAOO,IAAI,CAAC,CAAC,0DAA0D,EAAEZ,cAAAA,CAAe,QAAQ,EAAEyB,UAAAA,CAAW,QAAQ,EAAEP,aAAAA,CAAcI,GAAG,CAAA,CAAE,CAAA;YAC1I,OAAO;gBAAExB,OAAAA,EAAS2B,UAAAA;AAAYtB,gBAAAA;AAAa,aAAA;AAC/C,QAAA;AACJ,IAAA;AAEA,IAAA,MAAM,IAAIV,KAAAA,CAAM,CAAC,sBAAsB,EAAEyB,aAAAA,CAAcC,IAAI,CAAA,CAAE,CAAA;AACjE,CAAA;AAGA;;;IAIO,MAAMS,qBAAAA,GAAwB,CAAC1B,cAAAA,GAAAA;AAClC,IAAA,IAAI,CAACA,cAAAA,IAAkB,OAAOA,cAAAA,KAAmB,QAAA,EAAU;QACvD,OAAO,IAAA;AACX,IAAA;IAEA,KAAK,MAAM,CAACd,UAAAA,EAAYyC,YAAAA,CAAa,IAAIC,MAAAA,CAAOC,OAAO,CAAC7B,cAAAA,CAAAA,CAAiB;QACrE,IAAI2B,YAAAA,IAAgB,OAAOA,YAAAA,KAAiB,QAAA,IAAY,YAACA,CAAqBG,iBAAiB,KAAK,IAAA,EAAM;YACtG,OAAO5C,UAAAA;AACX,QAAA;AACJ,IAAA;IAEA,OAAO,IAAA;AACX;AAiCO,MAAM6C,mBAAmB,OAAOC,OAAAA,GAAAA;IACnC,MAAM,EAAE7C,SAAS,EAAEC,cAAc,EAAE,GAAG,MAAM,OAAO,sBAAA,CAAA;IACnD,IAAI;;QAEA,IAAI,CAACA,eAAe4C,OAAAA,CAAAA,EAAU;AAC1B,YAAA,MAAM,IAAIzC,KAAAA,CAAM,CAAC,kBAAkB,EAAEyC,OAAAA,CAAAA,CAAS,CAAA;AAClD,QAAA;AACA,QAAA,MAAM,EAAExC,MAAM,EAAE,GAAG,MAAML,UAAU,KAAA,EAAO;AAAC,YAAA,KAAA;AAAO,YAAA,IAAA;AAAM6C,YAAAA;AAAQ,SAAA,CAAA;QAChE,OAAOxC,MAAAA,CAAOyC,IAAI,EAAA,KAAOD,OAAAA;AAC7B,IAAA,CAAA,CAAE,OAAM;;QAEJ,OAAO,KAAA;AACX,IAAA;AACJ;AAEO,MAAME,2BAAAA,GAA8B,OAAOpC,cAAAA,EAAwBqC,eAAAA,EAAyBC,kBAAAA,GAAAA;IAC/F,MAAM,EAAEC,aAAa,EAAEC,gBAAgB,EAAEC,UAAU,EAAE,GAAG,MAAM,OAAO,kBAAA,CAAA;AACrE,IAAA,MAAM,EAAErC,SAAS,EAAE,GAAG,MAAM,OAAO,eAAA,CAAA;IAEnCqC,UAAAA,CAAW,uDAAA,CAAA;AAEX,IAAA,MAAMpC,MAAAA,GAASD,SAAAA,EAAAA;IACfC,MAAAA,CAAOO,IAAI,CAAC,CAAC,iEAAiE,EAAEZ,cAAAA,CAAe,aAAa,EAAEqC,eAAAA,CAAAA,CAAiB,CAAA;AAC/HhC,IAAAA,MAAAA,CAAOO,IAAI,CAAC,CAAC,oDAAoD,EAAEZ,cAAAA,CAAAA,CAAgB,CAAA;AACnFK,IAAAA,MAAAA,CAAOO,IAAI,CAAC,CAAC,kDAAkD,EAAEyB,eAAAA,CAAAA,CAAiB,CAAA;AAClF,IAAA,IAAIC,kBAAAA,EAAoB;AACpBjC,QAAAA,MAAAA,CAAOO,IAAI,CAAC,CAAC,uDAAuD,EAAE0B,kBAAAA,CAAAA,CAAoB,CAAA;AAC9F,IAAA;AAEA,IAAA,MAAMI,OAAAA,GAAU;AACZ,QAAA;YAAEC,GAAAA,EAAK,GAAA;YAAKC,KAAAA,EAAO,CAAC,QAAQ,EAAEP,eAAAA,CAAAA;AAAkB,SAAA;AAChD,QAAA;YAAEM,GAAAA,EAAK,GAAA;YAAKC,KAAAA,EAAO;AAAuB,SAAA;AAC1C,QAAA;YAAED,GAAAA,EAAK,GAAA;YAAKC,KAAAA,EAAO;AAAgB;AACtC,KAAA;IAED,MAAMC,MAAAA,GAAS,MAAMN,aAAAA,CAAc,4CAAA,EAA8CG,OAAAA,CAAAA;IAEjF,OAAQG,MAAAA;QACJ,KAAK,GAAA;YACD,OAAOR,eAAAA;QACX,KAAK,GAAA;AAAK,YAAA;gBACN,MAAMS,aAAAA,GAAgB,MAAMN,gBAAAA,CAAiB,iDAAA,CAAA;gBAC7C,IAAI,CAACO,sBAAsBD,aAAAA,CAAAA,EAAgB;AACvC,oBAAA,MAAM,IAAIrD,KAAAA,CAAM,CAAC,wBAAwB,EAAEqD,aAAAA,CAAc,0BAA0B,CAAC,CAAA;AACxF,gBAAA;gBACA,MAAME,kBAAAA,GAAqBF,cAAcG,UAAU,CAAC,OAAOH,aAAAA,CAAcI,KAAK,CAAC,CAAA,CAAA,GAAKJ,aAAAA;AACpFzC,gBAAAA,MAAAA,CAAOO,IAAI,CAAC,CAAC,yEAAyE,EAAEoC,kBAAAA,CAAmB,4BAA4B,CAAC,CAAA;gBACxI,OAAOA,kBAAAA;AACX,YAAA;QACA,KAAK,GAAA;AACD,YAAA,MAAM,IAAIvD,KAAAA,CAAM,yBAAA,CAAA;AACpB,QAAA;AACI,YAAA,MAAM,IAAIA,KAAAA,CAAM,CAAC,mBAAmB,EAAEoD,MAAAA,CAAAA,CAAQ,CAAA;AACtD;AACJ;AAEO,MAAMM,aAAAA,GAAgB,CAACC,eAAAA,EAAyBC,QAAAA,GAAAA;IACnD,OAAOC,aAAAA,CAAKC,IAAI,CAACH,eAAAA,EAAiBC,QAAAA,CAAAA;AACtC;AAEO,MAAMG,sBAAAA,GAAyB,CAACC,QAAAA,EAAkBC,YAAoB,OAAO,GAAA;AAChF,IAAA,MAAMC,MAAM,IAAIC,IAAAA,EAAAA;;IAGhB,MAAMC,EAAAA,GAAKF,IAAIG,WAAW,EAAA,CAAGC,QAAQ,EAAA,CAAGb,KAAK,CAAC,EAAC,CAAA;AAC/C,IAAA,MAAMc,EAAAA,GAAML,CAAAA,GAAAA,CAAIM,QAAQ,EAAA,GAAK,CAAA,EAAGF,QAAQ,EAAA,CAAGG,QAAQ,CAAC,CAAA,EAAG,GAAA,CAAA;IACvD,MAAMC,EAAAA,GAAKR,IAAIS,OAAO,EAAA,CAAGL,QAAQ,EAAA,CAAGG,QAAQ,CAAC,CAAA,EAAG,GAAA,CAAA;IAChD,MAAMG,EAAAA,GAAKV,IAAIW,QAAQ,EAAA,CAAGP,QAAQ,EAAA,CAAGG,QAAQ,CAAC,CAAA,EAAG,GAAA,CAAA;IACjD,MAAMK,GAAAA,GAAMZ,IAAIa,UAAU,EAAA,CAAGT,QAAQ,EAAA,CAAGG,QAAQ,CAAC,CAAA,EAAG,GAAA,CAAA;IAEpD,MAAMO,SAAAA,GAAY,GAAGZ,EAAAA,CAAAA,EAAKG,EAAAA,CAAAA,EAAKG,GAAG,CAAC,EAAEE,KAAKE,GAAAA,CAAAA,CAAK;AAE/C,IAAA,OAAO,CAAA,EAAGE,SAAAA,CAAU,CAAC,EAAEhB,WAAWC,SAAAA,CAAAA,CAAW;AACjD;AAEO,MAAMgB,gCAAgC,CAACjB,QAAAA,GAAAA;AAC1C,IAAA,OAAOD,uBAAuBC,QAAAA,EAAU,eAAA,CAAA;AAC5C;AAEO,MAAMkB,iCAAiC,CAAClB,QAAAA,GAAAA;AAC3C,IAAA,OAAOD,uBAAuBC,QAAAA,EAAU,gBAAA,CAAA;AAC5C;MAEamB,4BAAAA,GAA+B,IAAA;AACxC,IAAA,OAAOpB,uBAAuB,gBAAA,EAAkB,KAAA,CAAA;AACpD;MAEaqB,kCAAAA,GAAqC,IAAA;AAC9C,IAAA,OAAOrB,uBAAuB,eAAA,EAAiB,KAAA,CAAA;AACnD;MAEasB,2BAAAA,GAA8B,IAAA;AACvC,IAAA,OAAOtB,uBAAuB,iBAAA,EAAmB,MAAA,CAAA;AACrD;MAMauB,4BAAAA,GAA+B,IAAA;AACxC,IAAA,OAAOvB,uBAAuB,iBAAA,EAAmB,KAAA,CAAA;AACrD;MAEawB,iCAAAA,GAAoC,IAAA;AAC7C,IAAA,OAAOxB,uBAAuB,cAAA,EAAgB,KAAA,CAAA;AAClD;AAUA;AAEA;;;IAIO,MAAMyB,sBAAAA,GAAyB,OAAOC,WAAAA,GAAAA;AACzC,IAAA,MAAM7E,MAAAA,GAASD,SAAAA,EAAAA;IACf,IAAI;AACA,QAAA,MAAM,EAAEf,SAAS,EAAE,GAAG,MAAM,OAAO,sBAAA,CAAA;;;AAInC,QAAA,MAAM,EAAEK,MAAM,EAAE,GAAG,MAAML,UAAU,KAAA,EAAO;AAAC,YAAA,MAAA;AAAQ6F,YAAAA,WAAAA;AAAa,YAAA,SAAA;AAAW,YAAA;AAAS,SAAA,CAAA;AAEpF,QAAA,IAAI,CAACxF,MAAAA,IAAUA,MAAAA,CAAOyC,IAAI,OAAO,EAAA,EAAI;AACjC9B,YAAAA,MAAAA,CAAO8E,OAAO,CAAC,CAAC,QAAQ,EAAED,WAAAA,CAAY,0BAA0B,CAAC,CAAA;YACjE,OAAO,IAAA;AACX,QAAA;;QAGA,MAAMpF,OAAAA,GAAUJ,OAAOyC,IAAI,EAAA,CAAGiD,OAAO,CAAC,cAAA,EAAgB;QACtD/E,MAAAA,CAAO8E,OAAO,CAAC,CAAC,MAAM,EAAED,YAAY,CAAC,EAAEpF,OAAAA,CAAQ,gBAAgB,CAAC,CAAA;QAChE,OAAOA,OAAAA;AACX,IAAA,CAAA,CAAE,OAAOuF,KAAAA,EAAY;;QAEjBhF,MAAAA,CAAO8E,OAAO,CAAC,CAAC,wBAAwB,EAAED,YAAY,EAAE,EAAEG,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;QACzE,OAAO,IAAA;AACX,IAAA;AACJ;AA2BA;;IAGO,MAAMC,UAAAA,GAAa,OAAOrD,OAAAA,GAAAA;IAC7B,IAAI;QACA,MAAM,EAAE7C,SAAS,EAAEC,cAAc,EAAE,GAAG,MAAM,OAAO,sBAAA,CAAA;QAEnD,IAAI,CAACA,eAAe4C,OAAAA,CAAAA,EAAU;AAC1B,YAAA,MAAM,IAAIzC,KAAAA,CAAM,CAAC,kBAAkB,EAAEyC,OAAAA,CAAAA,CAAS,CAAA;AAClD,QAAA;;AAGA,QAAA,MAAM,EAAExC,MAAAA,EAAQ8F,OAAO,EAAE,GAAG,MAAMnG,UAAU,KAAA,EAAO;AAAC,YAAA,KAAA;AAAO,YAAA,IAAA;AAAM6C,YAAAA;AAAQ,SAAA,CAAA;QACzE,IAAIsD,OAAAA,CAAQrD,IAAI,EAAA,KAAOD,OAAAA,EAAS;YAC5B,OAAO;gBAAEuD,MAAAA,EAAQ;AAAM,aAAA;AAC3B,QAAA;;AAGA,QAAA,MAAM,EAAE/F,MAAAA,EAAQgG,MAAM,EAAE,GAAG,MAAMrG,UAAU,KAAA,EAAO;AAAC,YAAA,UAAA;AAAY,YAAA,IAAA;AAAM,YAAA,GAAA;AAAK6C,YAAAA;AAAQ,SAAA,CAAA;;QAGlF,MAAMyD,YAAAA,GAAezD,OAAAA,CAAQ0D,KAAK,CAAC,uCAAA,CAAA;AACnC,QAAA,MAAM9F,OAAAA,GAAU6F,YAAAA,GAAeA,YAAY,CAAC,EAAE,GAAGE,SAAAA;QAEjD,OAAO;YACHJ,MAAAA,EAAQ,IAAA;AACRC,YAAAA,MAAAA,EAAQA,OAAOvD,IAAI,EAAA;AACnBrC,YAAAA;AACJ,SAAA;AACJ,IAAA,CAAA,CAAE,OAAM;QACJ,OAAO,IAAA;AACX,IAAA;AACJ;AAEA;;IAGO,MAAMgG,oBAAAA,GAAuB,CAAChG,OAAAA,GAAAA;;IAEjC,OAAOA,OAAAA,CAAQiG,QAAQ,CAAC,GAAA,CAAA;AAC5B;AAUA;;IAGO,MAAMC,yBAAAA,GAA4B,CAAC5G,UAAAA,GAAAA;;AAEtC,IAAA,MAAM6G,iBAAAA,GAAoB,6CAAA;IAE1B,IAAIA,iBAAAA,CAAkBC,IAAI,CAAC9G,UAAAA,CAAAA,EAAa;QACpC,OAAO;YACH+G,OAAAA,EAAS,gCAAA;YACTC,WAAAA,EAAa,iCAAA;YACbC,aAAAA,EAAe;AACnB,SAAA;AACJ,IAAA;;AAGA,IAAA,MAAMC,qBAAAA,GAAwB,sCAAA;IAE9B,IAAIA,qBAAAA,CAAsBJ,IAAI,CAAC9G,UAAAA,CAAAA,EAAa;QACxC,OAAO;YACH+G,OAAAA,EAAS,iBAAA;YACTC,WAAAA,EAAa,qBAAA;YACbC,aAAAA,EAAe;AACnB,SAAA;AACJ,IAAA;;IAGA,OAAO;QACHF,OAAAA,EAAS,mCAAA;QACTC,WAAAA,EAAa,sBAAA;QACbC,aAAAA,EAAe;AACnB,KAAA;AACJ;AAEA;;AAEC,IACM,MAAME,wBAAAA,GAA2B,CAACzG,OAAAA,EAAiBV,UAAAA,GAAAA;AAKtD,IAAA,MAAMoH,WAAWR,yBAAAA,CAA0B5G,UAAAA,CAAAA;AAE3C,IAAA,IAAI,CAACoH,QAAAA,CAASL,OAAO,CAACD,IAAI,CAACpG,OAAAA,CAAAA,EAAU;QACjC,OAAO;YACH2G,KAAAA,EAAO,KAAA;AACPC,YAAAA,KAAAA,EAAO,CAAC,mCAAmC,EAAEtH,UAAAA,CAAW,CAAC,CAAC;AAC1DuH,YAAAA,GAAAA,EAAK,CAAC,qBAAqB,EAAEH,QAAAA,CAASJ,WAAW,CAAA;AACrD,SAAA;AACJ,IAAA;AAEA,IAAA,MAAMQ,eAAed,oBAAAA,CAAqBhG,OAAAA,CAAAA;;AAG1C,IAAA,IAAI0G,QAAAA,CAASH,aAAa,IAAI,CAACO,YAAAA,EAAc;QACzC,OAAO;YACHH,KAAAA,EAAO,KAAA;AACPC,YAAAA,KAAAA,EAAO,CAAC,uCAAuC,EAAEtH,UAAAA,CAAW,CAAC,CAAC;YAC9DuH,GAAAA,EAAK;AACT,SAAA;AACJ,IAAA;;IAGA,IAAI,CAACH,SAASH,aAAa,IAAIjH,WAAWwG,KAAK,CAAC,0CAA0CgB,YAAAA,EAAc;QACpG,OAAO;YACHH,KAAAA,EAAO,KAAA;AACPC,YAAAA,KAAAA,EAAO,CAAC,uCAAuC,EAAEtH,UAAAA,CAAW,CAAC,CAAC;YAC9DuH,GAAAA,EAAK;AACT,SAAA;AACJ,IAAA;IAEA,OAAO;QAAEF,KAAAA,EAAO;AAAK,KAAA;AACzB;;;;"}
|
package/dist/util/gitMutex.js
DELETED
|
@@ -1,161 +0,0 @@
|
|
|
1
|
-
import * as path from 'path';
|
|
2
|
-
import { statSync } from 'fs';
|
|
3
|
-
import { execSync } from 'child_process';
|
|
4
|
-
import { RepositoryFileLockManager } from './fileLock.js';
|
|
5
|
-
import { getLogger } from '../logging.js';
|
|
6
|
-
|
|
7
|
-
function _define_property(obj, key, value) {
|
|
8
|
-
if (key in obj) {
|
|
9
|
-
Object.defineProperty(obj, key, {
|
|
10
|
-
value: value,
|
|
11
|
-
enumerable: true,
|
|
12
|
-
configurable: true,
|
|
13
|
-
writable: true
|
|
14
|
-
});
|
|
15
|
-
} else {
|
|
16
|
-
obj[key] = value;
|
|
17
|
-
}
|
|
18
|
-
return obj;
|
|
19
|
-
}
|
|
20
|
-
/**
|
|
21
|
-
* Manages per-repository locks for git operations (cross-process safe)
|
|
22
|
-
* Prevents concurrent git operations in the same repository (which cause .git/index.lock conflicts)
|
|
23
|
-
* while still allowing parallel operations across different repositories
|
|
24
|
-
*
|
|
25
|
-
* Uses file-based locks to coordinate across multiple processes (e.g., parallel tree execution)
|
|
26
|
-
*/ class RepositoryMutexManager {
|
|
27
|
-
/**
|
|
28
|
-
* Execute a git operation with repository-level locking
|
|
29
|
-
* @param packagePath Path to the package (will find its git repo root)
|
|
30
|
-
* @param operation The async operation to execute under lock
|
|
31
|
-
* @param operationName Optional name for logging
|
|
32
|
-
* @returns Result of the operation
|
|
33
|
-
*/ async withGitLock(packagePath, operation, operationName) {
|
|
34
|
-
const repoPath = getGitRepositoryRoot(packagePath);
|
|
35
|
-
if (!repoPath) {
|
|
36
|
-
// Not in a git repository, execute without lock
|
|
37
|
-
this.logger.debug(`No git repository found for ${packagePath}, executing without lock`);
|
|
38
|
-
return await operation();
|
|
39
|
-
}
|
|
40
|
-
return await this.lockManager.withGitLock(repoPath, operation, operationName);
|
|
41
|
-
}
|
|
42
|
-
/**
|
|
43
|
-
* Destroy all locks and clean up resources
|
|
44
|
-
*/ destroy() {
|
|
45
|
-
this.lockManager.destroy();
|
|
46
|
-
}
|
|
47
|
-
constructor(){
|
|
48
|
-
_define_property(this, "lockManager", void 0);
|
|
49
|
-
_define_property(this, "logger", getLogger());
|
|
50
|
-
this.lockManager = new RepositoryFileLockManager();
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
/**
|
|
54
|
-
* Find the git repository root for a given path
|
|
55
|
-
* Walks up the directory tree until it finds a .git directory
|
|
56
|
-
* @param startPath Starting path (can be a file or directory)
|
|
57
|
-
* @returns Absolute path to git repository root, or null if not in a git repo
|
|
58
|
-
*/ function getGitRepositoryRoot(startPath) {
|
|
59
|
-
let currentPath = path.resolve(startPath);
|
|
60
|
-
// If startPath is a file, start from its directory
|
|
61
|
-
try {
|
|
62
|
-
const stats = statSync(currentPath);
|
|
63
|
-
if (stats.isFile()) {
|
|
64
|
-
currentPath = path.dirname(currentPath);
|
|
65
|
-
}
|
|
66
|
-
} catch {
|
|
67
|
-
// If stat fails, assume it's a directory and continue
|
|
68
|
-
}
|
|
69
|
-
// First try using git command as it's the most reliable
|
|
70
|
-
try {
|
|
71
|
-
const root = execSync('git rev-parse --show-toplevel', {
|
|
72
|
-
cwd: currentPath,
|
|
73
|
-
stdio: [
|
|
74
|
-
'ignore',
|
|
75
|
-
'pipe',
|
|
76
|
-
'ignore'
|
|
77
|
-
],
|
|
78
|
-
encoding: 'utf-8'
|
|
79
|
-
}).trim();
|
|
80
|
-
return root;
|
|
81
|
-
} catch {
|
|
82
|
-
// Fallback to manual walk-up if git command fails (e.g. git not in path or other issues)
|
|
83
|
-
const root = path.parse(currentPath).root;
|
|
84
|
-
while(currentPath !== root){
|
|
85
|
-
const gitPath = path.join(currentPath, '.git');
|
|
86
|
-
try {
|
|
87
|
-
const stats = statSync(gitPath);
|
|
88
|
-
if (stats.isDirectory() || stats.isFile()) {
|
|
89
|
-
// Found .git (can be directory or file for submodules)
|
|
90
|
-
return currentPath;
|
|
91
|
-
}
|
|
92
|
-
} catch {
|
|
93
|
-
// .git doesn't exist at this level, continue up
|
|
94
|
-
}
|
|
95
|
-
// Move up one directory
|
|
96
|
-
const parentPath = path.dirname(currentPath);
|
|
97
|
-
if (parentPath === currentPath) {
|
|
98
|
-
break;
|
|
99
|
-
}
|
|
100
|
-
currentPath = parentPath;
|
|
101
|
-
}
|
|
102
|
-
}
|
|
103
|
-
return null;
|
|
104
|
-
}
|
|
105
|
-
/**
|
|
106
|
-
* Check if a path is within a git repository
|
|
107
|
-
* @param checkPath Path to check
|
|
108
|
-
* @returns true if path is in a git repository
|
|
109
|
-
*/ function isInGitRepository(checkPath) {
|
|
110
|
-
// If it's not a directory that exists, it's not in a git repository
|
|
111
|
-
try {
|
|
112
|
-
const stats = statSync(checkPath);
|
|
113
|
-
if (!stats.isDirectory()) {
|
|
114
|
-
return false;
|
|
115
|
-
}
|
|
116
|
-
} catch {
|
|
117
|
-
return false;
|
|
118
|
-
}
|
|
119
|
-
// Try using git command first
|
|
120
|
-
try {
|
|
121
|
-
execSync('git rev-parse --is-inside-work-tree', {
|
|
122
|
-
cwd: checkPath,
|
|
123
|
-
stdio: [
|
|
124
|
-
'ignore',
|
|
125
|
-
'ignore',
|
|
126
|
-
'ignore'
|
|
127
|
-
]
|
|
128
|
-
});
|
|
129
|
-
return true;
|
|
130
|
-
} catch {
|
|
131
|
-
// If git command fails, it's definitely not a git repo according to git
|
|
132
|
-
return false;
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
// Global singleton instance
|
|
136
|
-
let globalGitMutexManager = null;
|
|
137
|
-
/**
|
|
138
|
-
* Get the global git mutex manager instance
|
|
139
|
-
* Creates one if it doesn't exist
|
|
140
|
-
*/ function getGitMutexManager() {
|
|
141
|
-
if (!globalGitMutexManager) {
|
|
142
|
-
globalGitMutexManager = new RepositoryMutexManager();
|
|
143
|
-
}
|
|
144
|
-
return globalGitMutexManager;
|
|
145
|
-
}
|
|
146
|
-
/**
|
|
147
|
-
* Helper function to wrap git operations with automatic locking
|
|
148
|
-
* Uses the global git mutex manager
|
|
149
|
-
*
|
|
150
|
-
* @example
|
|
151
|
-
* await runGitWithLock(packagePath, async () => {
|
|
152
|
-
* await run('git add package.json');
|
|
153
|
-
* await run('git commit -m "Update version"');
|
|
154
|
-
* }, 'version bump commit');
|
|
155
|
-
*/ async function runGitWithLock(packagePath, operation, operationName) {
|
|
156
|
-
const manager = getGitMutexManager();
|
|
157
|
-
return await manager.withGitLock(packagePath, operation, operationName);
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
export { RepositoryMutexManager, getGitMutexManager, getGitRepositoryRoot, isInGitRepository, runGitWithLock };
|
|
161
|
-
//# sourceMappingURL=gitMutex.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"gitMutex.js","sources":["../../src/util/gitMutex.ts"],"sourcesContent":["import * as path from 'path';\n// eslint-disable-next-line no-restricted-imports\nimport { statSync } from 'fs';\nimport { execSync } from 'child_process';\nimport { RepositoryFileLockManager } from './fileLock';\nimport { getLogger } from '../logging';\n\n/**\n * Manages per-repository locks for git operations (cross-process safe)\n * Prevents concurrent git operations in the same repository (which cause .git/index.lock conflicts)\n * while still allowing parallel operations across different repositories\n *\n * Uses file-based locks to coordinate across multiple processes (e.g., parallel tree execution)\n */\nexport class RepositoryMutexManager {\n private lockManager: RepositoryFileLockManager;\n private logger = getLogger();\n\n constructor() {\n this.lockManager = new RepositoryFileLockManager();\n }\n\n /**\n * Execute a git operation with repository-level locking\n * @param packagePath Path to the package (will find its git repo root)\n * @param operation The async operation to execute under lock\n * @param operationName Optional name for logging\n * @returns Result of the operation\n */\n async withGitLock<T>(\n packagePath: string,\n operation: () => Promise<T>,\n operationName?: string\n ): Promise<T> {\n const repoPath = getGitRepositoryRoot(packagePath);\n\n if (!repoPath) {\n // Not in a git repository, execute without lock\n this.logger.debug(`No git repository found for ${packagePath}, executing without lock`);\n return await operation();\n }\n\n return await this.lockManager.withGitLock(repoPath, operation, operationName);\n }\n\n /**\n * Destroy all locks and clean up resources\n */\n destroy(): void {\n this.lockManager.destroy();\n }\n}\n\n/**\n * Find the git repository root for a given path\n * Walks up the directory tree until it finds a .git directory\n * @param startPath Starting path (can be a file or directory)\n * @returns Absolute path to git repository root, or null if not in a git repo\n */\nexport function getGitRepositoryRoot(startPath: string): string | null {\n let currentPath = path.resolve(startPath);\n\n // If startPath is a file, start from its directory\n try {\n const stats = statSync(currentPath);\n if (stats.isFile()) {\n currentPath = path.dirname(currentPath);\n }\n } catch {\n // If stat fails, assume it's a directory and continue\n }\n\n // First try using git command as it's the most reliable\n try {\n const root = execSync('git rev-parse --show-toplevel', {\n cwd: currentPath,\n stdio: ['ignore', 'pipe', 'ignore'],\n encoding: 'utf-8'\n }).trim();\n return root;\n } catch {\n // Fallback to manual walk-up if git command fails (e.g. git not in path or other issues)\n const root = path.parse(currentPath).root;\n\n while (currentPath !== root) {\n const gitPath = path.join(currentPath, '.git');\n\n try {\n const stats = statSync(gitPath);\n if (stats.isDirectory() || stats.isFile()) {\n // Found .git (can be directory or file for submodules)\n return currentPath;\n }\n } catch {\n // .git doesn't exist at this level, continue up\n }\n\n // Move up one directory\n const parentPath = path.dirname(currentPath);\n if (parentPath === currentPath) {\n // Reached root without finding .git\n break;\n }\n currentPath = parentPath;\n }\n }\n\n return null;\n}\n\n/**\n * Check if a path is within a git repository\n * @param checkPath Path to check\n * @returns true if path is in a git repository\n */\nexport function isInGitRepository(checkPath: string): boolean {\n // If it's not a directory that exists, it's not in a git repository\n try {\n const stats = statSync(checkPath);\n if (!stats.isDirectory()) {\n return false;\n }\n } catch {\n return false;\n }\n\n // Try using git command first\n try {\n execSync('git rev-parse --is-inside-work-tree', {\n cwd: checkPath,\n stdio: ['ignore', 'ignore', 'ignore']\n });\n return true;\n } catch {\n // If git command fails, it's definitely not a git repo according to git\n return false;\n }\n}\n\n/**\n * Check if two paths are in the same git repository\n * @param path1 First path\n * @param path2 Second path\n * @returns true if both paths are in the same git repository\n */\nexport function areInSameRepository(path1: string, path2: string): boolean {\n const repo1 = getGitRepositoryRoot(path1);\n const repo2 = getGitRepositoryRoot(path2);\n\n if (!repo1 || !repo2) {\n return false;\n }\n\n return repo1 === repo2;\n}\n\n// Global singleton instance\nlet globalGitMutexManager: RepositoryMutexManager | null = null;\n\n/**\n * Get the global git mutex manager instance\n * Creates one if it doesn't exist\n */\nexport function getGitMutexManager(): RepositoryMutexManager {\n if (!globalGitMutexManager) {\n globalGitMutexManager = new RepositoryMutexManager();\n }\n return globalGitMutexManager;\n}\n\n/**\n * Destroy the global git mutex manager\n * Should be called when shutting down or during cleanup\n */\nexport function destroyGitMutexManager(): void {\n if (globalGitMutexManager) {\n globalGitMutexManager.destroy();\n globalGitMutexManager = null;\n }\n}\n\n/**\n * Helper function to wrap git operations with automatic locking\n * Uses the global git mutex manager\n *\n * @example\n * await runGitWithLock(packagePath, async () => {\n * await run('git add package.json');\n * await run('git commit -m \"Update version\"');\n * }, 'version bump commit');\n */\nexport async function runGitWithLock<T>(\n packagePath: string,\n operation: () => Promise<T>,\n operationName?: string\n): Promise<T> {\n const manager = getGitMutexManager();\n return await manager.withGitLock(packagePath, operation, operationName);\n}\n"],"names":["RepositoryMutexManager","withGitLock","packagePath","operation","operationName","repoPath","getGitRepositoryRoot","logger","debug","lockManager","destroy","getLogger","RepositoryFileLockManager","startPath","currentPath","path","resolve","stats","statSync","isFile","dirname","root","execSync","cwd","stdio","encoding","trim","parse","gitPath","join","isDirectory","parentPath","isInGitRepository","checkPath","globalGitMutexManager","getGitMutexManager","runGitWithLock","manager"],"mappings":";;;;;;;;;;;;;;;;;;;AAOA;;;;;;AAMC,IACM,MAAMA,sBAAAA,CAAAA;AAQT;;;;;;AAMC,QACD,MAAMC,WAAAA,CACFC,WAAmB,EACnBC,SAA2B,EAC3BC,aAAsB,EACZ;AACV,QAAA,MAAMC,WAAWC,oBAAAA,CAAqBJ,WAAAA,CAAAA;AAEtC,QAAA,IAAI,CAACG,QAAAA,EAAU;;YAEX,IAAI,CAACE,MAAM,CAACC,KAAK,CAAC,CAAC,4BAA4B,EAAEN,WAAAA,CAAY,wBAAwB,CAAC,CAAA;AACtF,YAAA,OAAO,MAAMC,SAAAA,EAAAA;AACjB,QAAA;QAEA,OAAO,MAAM,IAAI,CAACM,WAAW,CAACR,WAAW,CAACI,UAAUF,SAAAA,EAAWC,aAAAA,CAAAA;AACnE,IAAA;AAEA;;AAEC,QACDM,OAAAA,GAAgB;QACZ,IAAI,CAACD,WAAW,CAACC,OAAO,EAAA;AAC5B,IAAA;IAhCA,WAAA,EAAc;AAHd,QAAA,gBAAA,CAAA,IAAA,EAAQD,eAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQF,QAAAA,EAASI,SAAAA,EAAAA,CAAAA;QAGb,IAAI,CAACF,WAAW,GAAG,IAAIG,yBAAAA,EAAAA;AAC3B,IAAA;AA+BJ;AAEA;;;;;IAMO,SAASN,oBAAAA,CAAqBO,SAAiB,EAAA;IAClD,IAAIC,WAAAA,GAAcC,IAAAA,CAAKC,OAAO,CAACH,SAAAA,CAAAA;;IAG/B,IAAI;AACA,QAAA,MAAMI,QAAQC,QAAAA,CAASJ,WAAAA,CAAAA;QACvB,IAAIG,KAAAA,CAAME,MAAM,EAAA,EAAI;YAChBL,WAAAA,GAAcC,IAAAA,CAAKK,OAAO,CAACN,WAAAA,CAAAA;AAC/B,QAAA;AACJ,IAAA,CAAA,CAAE,OAAM;;AAER,IAAA;;IAGA,IAAI;QACA,MAAMO,IAAAA,GAAOC,SAAS,+BAAA,EAAiC;YACnDC,GAAAA,EAAKT,WAAAA;YACLU,KAAAA,EAAO;AAAC,gBAAA,QAAA;AAAU,gBAAA,MAAA;AAAQ,gBAAA;AAAS,aAAA;YACnCC,QAAAA,EAAU;AACd,SAAA,CAAA,CAAGC,IAAI,EAAA;QACP,OAAOL,IAAAA;AACX,IAAA,CAAA,CAAE,OAAM;;AAEJ,QAAA,MAAMA,IAAAA,GAAON,IAAAA,CAAKY,KAAK,CAACb,aAAaO,IAAI;AAEzC,QAAA,MAAOP,gBAAgBO,IAAAA,CAAM;AACzB,YAAA,MAAMO,OAAAA,GAAUb,IAAAA,CAAKc,IAAI,CAACf,WAAAA,EAAa,MAAA,CAAA;YAEvC,IAAI;AACA,gBAAA,MAAMG,QAAQC,QAAAA,CAASU,OAAAA,CAAAA;AACvB,gBAAA,IAAIX,KAAAA,CAAMa,WAAW,EAAA,IAAMb,KAAAA,CAAME,MAAM,EAAA,EAAI;;oBAEvC,OAAOL,WAAAA;AACX,gBAAA;AACJ,YAAA,CAAA,CAAE,OAAM;;AAER,YAAA;;YAGA,MAAMiB,UAAAA,GAAahB,IAAAA,CAAKK,OAAO,CAACN,WAAAA,CAAAA;AAChC,YAAA,IAAIiB,eAAejB,WAAAA,EAAa;AAE5B,gBAAA;AACJ,YAAA;YACAA,WAAAA,GAAciB,UAAAA;AAClB,QAAA;AACJ,IAAA;IAEA,OAAO,IAAA;AACX;AAEA;;;;IAKO,SAASC,iBAAAA,CAAkBC,SAAiB,EAAA;;IAE/C,IAAI;AACA,QAAA,MAAMhB,QAAQC,QAAAA,CAASe,SAAAA,CAAAA;QACvB,IAAI,CAAChB,KAAAA,CAAMa,WAAW,EAAA,EAAI;YACtB,OAAO,KAAA;AACX,QAAA;AACJ,IAAA,CAAA,CAAE,OAAM;QACJ,OAAO,KAAA;AACX,IAAA;;IAGA,IAAI;AACAR,QAAAA,QAAAA,CAAS,qCAAA,EAAuC;YAC5CC,GAAAA,EAAKU,SAAAA;YACLT,KAAAA,EAAO;AAAC,gBAAA,QAAA;AAAU,gBAAA,QAAA;AAAU,gBAAA;AAAS;AACzC,SAAA,CAAA;QACA,OAAO,IAAA;AACX,IAAA,CAAA,CAAE,OAAM;;QAEJ,OAAO,KAAA;AACX,IAAA;AACJ;AAmBA;AACA,IAAIU,qBAAAA,GAAuD,IAAA;AAE3D;;;AAGC,IACM,SAASC,kBAAAA,GAAAA;AACZ,IAAA,IAAI,CAACD,qBAAAA,EAAuB;AACxBA,QAAAA,qBAAAA,GAAwB,IAAIlC,sBAAAA,EAAAA;AAChC,IAAA;IACA,OAAOkC,qBAAAA;AACX;AAaA;;;;;;;;;AASC,IACM,eAAeE,cAAAA,CAClBlC,WAAmB,EACnBC,SAA2B,EAC3BC,aAAsB,EAAA;AAEtB,IAAA,MAAMiC,OAAAA,GAAUF,kBAAAA,EAAAA;AAChB,IAAA,OAAO,MAAME,OAAAA,CAAQpC,WAAW,CAACC,aAAaC,SAAAA,EAAWC,aAAAA,CAAAA;AAC7D;;;;"}
|
package/dist/util/interactive.js
DELETED
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
import { getDryRunLogger } from '../logging.js';
|
|
3
|
-
export { STANDARD_CHOICES, SecureTempFile, cleanupTempFile, createSecureTempFile, editContentInEditor, getLLMFeedbackInEditor, getUserChoice, getUserTextInput, requireTTY } from '@eldrforge/ai-service';
|
|
4
|
-
|
|
5
|
-
/**
|
|
6
|
-
* Generic LLM improvement function that can be configured for different content types
|
|
7
|
-
*
|
|
8
|
-
* This is kodrdriv-specific orchestration logic that combines multiple ai-service
|
|
9
|
-
* primitives into a higher-level workflow.
|
|
10
|
-
*
|
|
11
|
-
* @param currentContent The current content to improve
|
|
12
|
-
* @param runConfig Runtime configuration
|
|
13
|
-
* @param promptConfig Prompt configuration
|
|
14
|
-
* @param promptContext Prompt context
|
|
15
|
-
* @param outputDirectory Output directory for debug files
|
|
16
|
-
* @param improvementConfig Configuration for this specific improvement type
|
|
17
|
-
* @returns Promise resolving to the improved content
|
|
18
|
-
*/ async function improveContentWithLLM(currentContent, runConfig, promptConfig, promptContext, outputDirectory, improvementConfig) {
|
|
19
|
-
const logger = getDryRunLogger(false);
|
|
20
|
-
logger.info(`INTERACTIVE_LLM_IMPROVING: Requesting LLM to improve content | Content Type: ${improvementConfig.contentType} | Service: AI | Purpose: Enhance quality`);
|
|
21
|
-
// Create the improved prompt using the provided function
|
|
22
|
-
const improvedPromptResult = await improvementConfig.createImprovedPrompt(promptConfig, currentContent, promptContext);
|
|
23
|
-
// Call the LLM with the improved prompt
|
|
24
|
-
const improvedResponse = await improvementConfig.callLLM(improvedPromptResult, runConfig, outputDirectory);
|
|
25
|
-
// Process the response if a processor is provided
|
|
26
|
-
const finalResult = improvementConfig.processResponse ? improvementConfig.processResponse(improvedResponse) : improvedResponse;
|
|
27
|
-
logger.info(`INTERACTIVE_LLM_IMPROVED: LLM provided improved content | Content Type: ${improvementConfig.contentType} | Status: enhanced`);
|
|
28
|
-
return finalResult;
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
export { improveContentWithLLM };
|
|
32
|
-
//# sourceMappingURL=interactive.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"interactive.js","sources":["../../src/util/interactive.ts"],"sourcesContent":["#!/usr/bin/env node\n/**\n * Kodrdriv-specific interactive utilities\n *\n * This module contains application-specific interactive workflows that build on\n * top of the generic interactive functions from @eldrforge/ai-service.\n *\n * Re-exports from ai-service for convenience, plus kodrdriv-specific functions.\n */\n\nimport { getDryRunLogger } from '../logging';\n\n// Re-export everything from ai-service for backwards compatibility\nexport {\n getUserChoice,\n getUserTextInput,\n editContentInEditor,\n getLLMFeedbackInEditor,\n requireTTY,\n SecureTempFile,\n createSecureTempFile,\n cleanupTempFile,\n STANDARD_CHOICES,\n type Choice,\n type InteractiveOptions,\n type EditorResult,\n} from '@eldrforge/ai-service';\n\n// Kodrdriv-specific types and functions below\n\nexport interface LLMImprovementConfig {\n /** The type of content being improved (for filenames and logging) */\n contentType: string;\n /** Function that creates a prompt for improvement */\n createImprovedPrompt: (\n promptConfig: any,\n improvementContent: any,\n promptContext: any\n ) => Promise<any>;\n /** Function that calls LLM with the improved prompt */\n callLLM: (\n request: any,\n runConfig: any,\n outputDirectory: string\n ) => Promise<any>;\n /** Function that validates/processes the LLM response */\n processResponse?: (response: any) => any;\n}\n\n/**\n * Generic LLM improvement function that can be configured for different content types\n *\n * This is kodrdriv-specific orchestration logic that combines multiple ai-service\n * primitives into a higher-level workflow.\n *\n * @param currentContent The current content to improve\n * @param runConfig Runtime configuration\n * @param promptConfig Prompt configuration\n * @param promptContext Prompt context\n * @param outputDirectory Output directory for debug files\n * @param improvementConfig Configuration for this specific improvement type\n * @returns Promise resolving to the improved content\n */\nexport async function improveContentWithLLM<T>(\n currentContent: T,\n runConfig: any,\n promptConfig: any,\n promptContext: any,\n outputDirectory: string,\n improvementConfig: LLMImprovementConfig\n): Promise<T> {\n const logger = getDryRunLogger(false);\n\n logger.info(`INTERACTIVE_LLM_IMPROVING: Requesting LLM to improve content | Content Type: ${improvementConfig.contentType} | Service: AI | Purpose: Enhance quality`);\n\n // Create the improved prompt using the provided function\n const improvedPromptResult = await improvementConfig.createImprovedPrompt(\n promptConfig,\n currentContent,\n promptContext\n );\n\n // Call the LLM with the improved prompt\n const improvedResponse = await improvementConfig.callLLM(improvedPromptResult, runConfig, outputDirectory);\n\n // Process the response if a processor is provided\n const finalResult = improvementConfig.processResponse\n ? improvementConfig.processResponse(improvedResponse)\n : improvedResponse;\n\n logger.info(`INTERACTIVE_LLM_IMPROVED: LLM provided improved content | Content Type: ${improvementConfig.contentType} | Status: enhanced`);\n return finalResult;\n}\n"],"names":["improveContentWithLLM","currentContent","runConfig","promptConfig","promptContext","outputDirectory","improvementConfig","logger","getDryRunLogger","info","contentType","improvedPromptResult","createImprovedPrompt","improvedResponse","callLLM","finalResult","processResponse"],"mappings":";;;;AAiDA,CAAA,CAAA,CAAA;;;;;;;;;;;;;AAaC,CAAA,CAAA,CAAA,CACM,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAeA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAClBC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAiB,EACjBC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAc,CAAA,CACdC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAiB,CAAA,CACjBC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAkB,CAAA,CAClBC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAuB,CAAA,CACvBC,iBAAuC,CAAA,CAAA,CAAA;AAEvC,CAAA,CAAA,CAAA,CAAA,MAAMC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,IAASC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAgB,KAAA,CAAA,CAAA;IAE/BD,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAOE,CAAAA,CAAAA,CAAAA,CAAI,CAAC,CAAC,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAA6E,CAAA,CAAEH,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAkBI,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAW,CAAC,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAyC,CAAC,CAAA,CAAA;;AAGpK,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAMC,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,IAAuB,CAAA,CAAA,CAAA,CAAA,CAAA,CAAML,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAkBM,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAoB,CACrET,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,GACAF,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CACAG,aAAAA,CAAAA,CAAAA;;AAIJ,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAMS,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,IAAmB,CAAA,CAAA,CAAA,CAAA,CAAA,CAAMP,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAkBQ,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAO,CAACH,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,GAAsBT,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAWG,eAAAA,CAAAA,CAAAA;;AAG1F,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAMU,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,IAAcT,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAkBU,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAe,CAAA,CAAA,CAC/CV,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAkBU,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAe,CAACH,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,GAClCA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA;IAENN,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAOE,CAAAA,CAAAA,CAAAA,CAAI,CAAC,CAAC,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAwE,CAAA,CAAEH,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAkBI,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAW,CAAC,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAmB,CAAC,CAAA,CAAA;IACzI,OAAOK,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA;AACX,CAAA;;"}
|
|
@@ -1,41 +0,0 @@
|
|
|
1
|
-
import { getDryRunLogger } from '../logging.js';
|
|
2
|
-
|
|
3
|
-
/**
|
|
4
|
-
* Create a Logger implementation using kodrdriv logging
|
|
5
|
-
*/ function createLoggerAdapter(dryRun) {
|
|
6
|
-
const logger = getDryRunLogger(dryRun);
|
|
7
|
-
return {
|
|
8
|
-
info (message, ...meta) {
|
|
9
|
-
logger.info(message, ...meta);
|
|
10
|
-
},
|
|
11
|
-
error (message, ...meta) {
|
|
12
|
-
logger.error(message, ...meta);
|
|
13
|
-
},
|
|
14
|
-
warn (message, ...meta) {
|
|
15
|
-
logger.warn(message, ...meta);
|
|
16
|
-
},
|
|
17
|
-
debug (message, ...meta) {
|
|
18
|
-
logger.debug(message, ...meta);
|
|
19
|
-
},
|
|
20
|
-
// Additional methods required by riotprompt
|
|
21
|
-
verbose (message, ...meta) {
|
|
22
|
-
// Use debug for verbose if available, otherwise info
|
|
23
|
-
if ('verbose' in logger && typeof logger.verbose === 'function') {
|
|
24
|
-
logger.verbose(message, ...meta);
|
|
25
|
-
} else {
|
|
26
|
-
logger.debug(message, ...meta);
|
|
27
|
-
}
|
|
28
|
-
},
|
|
29
|
-
silly (message, ...meta) {
|
|
30
|
-
// Use debug for silly if available, otherwise skip
|
|
31
|
-
if ('silly' in logger && typeof logger.silly === 'function') {
|
|
32
|
-
logger.silly(message, ...meta);
|
|
33
|
-
} else {
|
|
34
|
-
logger.debug(message, ...meta);
|
|
35
|
-
}
|
|
36
|
-
}
|
|
37
|
-
};
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
export { createLoggerAdapter };
|
|
41
|
-
//# sourceMappingURL=loggerAdapter.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"loggerAdapter.js","sources":["../../src/util/loggerAdapter.ts"],"sourcesContent":["/**\n * Adapter for ai-service Logger using kodrdriv logging\n */\n\nimport type { Logger } from '@eldrforge/ai-service';\nimport { getDryRunLogger } from '../logging';\n\n/**\n * Create a Logger implementation using kodrdriv logging\n */\nexport function createLoggerAdapter(dryRun: boolean): Logger {\n const logger = getDryRunLogger(dryRun);\n\n return {\n info(message: string, ...meta: unknown[]): void {\n logger.info(message, ...meta);\n },\n\n error(message: string, ...meta: unknown[]): void {\n logger.error(message, ...meta);\n },\n\n warn(message: string, ...meta: unknown[]): void {\n logger.warn(message, ...meta);\n },\n\n debug(message: string, ...meta: unknown[]): void {\n logger.debug(message, ...meta);\n },\n\n // Additional methods required by riotprompt\n verbose(message: string, ...meta: unknown[]): void {\n // Use debug for verbose if available, otherwise info\n if ('verbose' in logger && typeof logger.verbose === 'function') {\n (logger as any).verbose(message, ...meta);\n } else {\n logger.debug(message, ...meta);\n }\n },\n\n silly(message: string, ...meta: unknown[]): void {\n // Use debug for silly if available, otherwise skip\n if ('silly' in logger && typeof logger.silly === 'function') {\n (logger as any).silly(message, ...meta);\n } else {\n logger.debug(message, ...meta);\n }\n },\n } as Logger;\n}\n\n"],"names":["createLoggerAdapter","dryRun","logger","getDryRunLogger","info","message","meta","error","warn","debug","verbose","silly"],"mappings":";;AAOA;;IAGO,SAASA,mBAAAA,CAAoBC,MAAe,EAAA;AAC/C,IAAA,MAAMC,SAASC,eAAAA,CAAgBF,MAAAA,CAAAA;IAE/B,OAAO;QACHG,IAAAA,CAAAA,CAAKC,OAAe,EAAE,GAAGC,IAAe,EAAA;YACpCJ,MAAAA,CAAOE,IAAI,CAACC,OAAAA,EAAAA,GAAYC,IAAAA,CAAAA;AAC5B,QAAA,CAAA;QAEAC,KAAAA,CAAAA,CAAMF,OAAe,EAAE,GAAGC,IAAe,EAAA;YACrCJ,MAAAA,CAAOK,KAAK,CAACF,OAAAA,EAAAA,GAAYC,IAAAA,CAAAA;AAC7B,QAAA,CAAA;QAEAE,IAAAA,CAAAA,CAAKH,OAAe,EAAE,GAAGC,IAAe,EAAA;YACpCJ,MAAAA,CAAOM,IAAI,CAACH,OAAAA,EAAAA,GAAYC,IAAAA,CAAAA;AAC5B,QAAA,CAAA;QAEAG,KAAAA,CAAAA,CAAMJ,OAAe,EAAE,GAAGC,IAAe,EAAA;YACrCJ,MAAAA,CAAOO,KAAK,CAACJ,OAAAA,EAAAA,GAAYC,IAAAA,CAAAA;AAC7B,QAAA,CAAA;;QAGAI,OAAAA,CAAAA,CAAQL,OAAe,EAAE,GAAGC,IAAe,EAAA;;AAEvC,YAAA,IAAI,aAAaJ,MAAAA,IAAU,OAAOA,MAAAA,CAAOQ,OAAO,KAAK,UAAA,EAAY;gBAC5DR,MAAAA,CAAeQ,OAAO,CAACL,OAAAA,EAAAA,GAAYC,IAAAA,CAAAA;YACxC,CAAA,MAAO;gBACHJ,MAAAA,CAAOO,KAAK,CAACJ,OAAAA,EAAAA,GAAYC,IAAAA,CAAAA;AAC7B,YAAA;AACJ,QAAA,CAAA;QAEAK,KAAAA,CAAAA,CAAMN,OAAe,EAAE,GAAGC,IAAe,EAAA;;AAErC,YAAA,IAAI,WAAWJ,MAAAA,IAAU,OAAOA,MAAAA,CAAOS,KAAK,KAAK,UAAA,EAAY;gBACxDT,MAAAA,CAAeS,KAAK,CAACN,OAAAA,EAAAA,GAAYC,IAAAA,CAAAA;YACtC,CAAA,MAAO;gBACHJ,MAAAA,CAAOO,KAAK,CAACJ,OAAAA,EAAAA,GAAYC,IAAAA,CAAAA;AAC7B,YAAA;AACJ,QAAA;AACJ,KAAA;AACJ;;;;"}
|
package/dist/util/performance.js
DELETED
|
@@ -1,134 +0,0 @@
|
|
|
1
|
-
import path__default from 'path';
|
|
2
|
-
import { getLogger } from '../logging.js';
|
|
3
|
-
import { safeJsonParse, validatePackageJson } from '@eldrforge/git-tools';
|
|
4
|
-
|
|
5
|
-
/* eslint-disable @typescript-eslint/no-unused-vars */ function _define_property(obj, key, value) {
|
|
6
|
-
if (key in obj) {
|
|
7
|
-
Object.defineProperty(obj, key, {
|
|
8
|
-
value: value,
|
|
9
|
-
enumerable: true,
|
|
10
|
-
configurable: true,
|
|
11
|
-
writable: true
|
|
12
|
-
});
|
|
13
|
-
} else {
|
|
14
|
-
obj[key] = value;
|
|
15
|
-
}
|
|
16
|
-
return obj;
|
|
17
|
-
}
|
|
18
|
-
// Performance timing helper
|
|
19
|
-
class PerformanceTimer {
|
|
20
|
-
static start(logger, operation) {
|
|
21
|
-
logger.verbose(`⏱️ Starting: ${operation}`);
|
|
22
|
-
return new PerformanceTimer(logger);
|
|
23
|
-
}
|
|
24
|
-
end(operation) {
|
|
25
|
-
const duration = Date.now() - this.startTime;
|
|
26
|
-
this.logger.verbose(`⏱️ Completed: ${operation} (${duration}ms)`);
|
|
27
|
-
return duration;
|
|
28
|
-
}
|
|
29
|
-
constructor(logger){
|
|
30
|
-
_define_property(this, "startTime", void 0);
|
|
31
|
-
_define_property(this, "logger", void 0);
|
|
32
|
-
this.logger = logger;
|
|
33
|
-
this.startTime = Date.now();
|
|
34
|
-
}
|
|
35
|
-
}
|
|
36
|
-
const EXCLUDED_DIRECTORIES = [
|
|
37
|
-
'node_modules',
|
|
38
|
-
'dist',
|
|
39
|
-
'build',
|
|
40
|
-
'coverage',
|
|
41
|
-
'.git',
|
|
42
|
-
'.next',
|
|
43
|
-
'.nuxt',
|
|
44
|
-
'out',
|
|
45
|
-
'public',
|
|
46
|
-
'static',
|
|
47
|
-
'assets'
|
|
48
|
-
];
|
|
49
|
-
// Batch read multiple package.json files in parallel
|
|
50
|
-
const batchReadPackageJsonFiles = async (packageJsonPaths, storage, rootDir)=>{
|
|
51
|
-
const logger = getLogger();
|
|
52
|
-
const timer = PerformanceTimer.start(logger, `Batch reading ${packageJsonPaths.length} package.json files`);
|
|
53
|
-
const readPromises = packageJsonPaths.map(async (packageJsonPath)=>{
|
|
54
|
-
try {
|
|
55
|
-
const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
|
|
56
|
-
const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
|
|
57
|
-
const packageJson = validatePackageJson(parsed, packageJsonPath, false);
|
|
58
|
-
const relativePath = path__default.relative(rootDir, path__default.dirname(packageJsonPath));
|
|
59
|
-
return {
|
|
60
|
-
path: packageJsonPath,
|
|
61
|
-
packageJson,
|
|
62
|
-
relativePath: relativePath || '.'
|
|
63
|
-
};
|
|
64
|
-
} catch (error) {
|
|
65
|
-
logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);
|
|
66
|
-
return null;
|
|
67
|
-
}
|
|
68
|
-
});
|
|
69
|
-
const results = await Promise.all(readPromises);
|
|
70
|
-
const validResults = results.filter((result)=>result !== null);
|
|
71
|
-
timer.end(`Successfully read ${validResults.length}/${packageJsonPaths.length} package.json files`);
|
|
72
|
-
return validResults;
|
|
73
|
-
};
|
|
74
|
-
// Optimized recursive package.json finder with parallel processing
|
|
75
|
-
const findAllPackageJsonFiles = async (rootDir, storage)=>{
|
|
76
|
-
const logger = getLogger();
|
|
77
|
-
const timer = PerformanceTimer.start(logger, 'Optimized scanning for package.json files');
|
|
78
|
-
const scanForPaths = async (currentDir, depth = 0)=>{
|
|
79
|
-
// Prevent infinite recursion and overly deep scanning
|
|
80
|
-
if (depth > 5) {
|
|
81
|
-
return [];
|
|
82
|
-
}
|
|
83
|
-
try {
|
|
84
|
-
if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {
|
|
85
|
-
return [];
|
|
86
|
-
}
|
|
87
|
-
const items = await storage.listFiles(currentDir);
|
|
88
|
-
const foundPaths = [];
|
|
89
|
-
// Check for package.json in current directory
|
|
90
|
-
if (items.includes('package.json')) {
|
|
91
|
-
const packageJsonPath = path__default.join(currentDir, 'package.json');
|
|
92
|
-
foundPaths.push(packageJsonPath);
|
|
93
|
-
}
|
|
94
|
-
// Process subdirectories in parallel
|
|
95
|
-
const subdirPromises = [];
|
|
96
|
-
for (const item of items){
|
|
97
|
-
if (EXCLUDED_DIRECTORIES.includes(item)) {
|
|
98
|
-
continue;
|
|
99
|
-
}
|
|
100
|
-
const itemPath = path__default.join(currentDir, item);
|
|
101
|
-
subdirPromises.push((async ()=>{
|
|
102
|
-
try {
|
|
103
|
-
if (await storage.isDirectory(itemPath)) {
|
|
104
|
-
return await scanForPaths(itemPath, depth + 1);
|
|
105
|
-
}
|
|
106
|
-
} catch (error) {
|
|
107
|
-
logger.debug(`Skipped directory ${itemPath}: ${error.message}`);
|
|
108
|
-
}
|
|
109
|
-
return [];
|
|
110
|
-
})());
|
|
111
|
-
}
|
|
112
|
-
if (subdirPromises.length > 0) {
|
|
113
|
-
const subdirResults = await Promise.all(subdirPromises);
|
|
114
|
-
for (const subdirPaths of subdirResults){
|
|
115
|
-
foundPaths.push(...subdirPaths);
|
|
116
|
-
}
|
|
117
|
-
}
|
|
118
|
-
return foundPaths;
|
|
119
|
-
} catch (error) {
|
|
120
|
-
logger.debug(`Failed to scan directory ${currentDir}: ${error.message}`);
|
|
121
|
-
return [];
|
|
122
|
-
}
|
|
123
|
-
};
|
|
124
|
-
const pathsTimer = PerformanceTimer.start(logger, 'Finding all package.json paths');
|
|
125
|
-
const allPaths = await scanForPaths(rootDir);
|
|
126
|
-
pathsTimer.end(`Found ${allPaths.length} package.json file paths`);
|
|
127
|
-
// Phase 2: Batch read all package.json files in parallel
|
|
128
|
-
const packageJsonFiles = await batchReadPackageJsonFiles(allPaths, storage, rootDir);
|
|
129
|
-
timer.end(`Found ${packageJsonFiles.length} valid package.json files`);
|
|
130
|
-
return packageJsonFiles;
|
|
131
|
-
};
|
|
132
|
-
|
|
133
|
-
export { PerformanceTimer, batchReadPackageJsonFiles, findAllPackageJsonFiles };
|
|
134
|
-
//# sourceMappingURL=performance.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"performance.js","sources":["../../src/util/performance.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-unused-vars */\nimport path from 'path';\nimport { getLogger } from '../logging';\nimport { safeJsonParse, validatePackageJson } from '@eldrforge/git-tools';\n\n// Performance timing helper\nexport class PerformanceTimer {\n private startTime: number;\n private logger: any;\n\n constructor(logger: any) {\n this.logger = logger;\n this.startTime = Date.now();\n }\n\n static start(logger: any, operation: string): PerformanceTimer {\n logger.verbose(`⏱️ Starting: ${operation}`);\n return new PerformanceTimer(logger);\n }\n\n end(operation: string): number {\n const duration = Date.now() - this.startTime;\n this.logger.verbose(`⏱️ Completed: ${operation} (${duration}ms)`);\n return duration;\n }\n}\n\nexport interface PackageJson {\n name?: string;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n}\n\nexport interface PackageJsonLocation {\n path: string;\n packageJson: PackageJson;\n relativePath: string;\n}\n\nconst EXCLUDED_DIRECTORIES = [\n 'node_modules',\n 'dist',\n 'build',\n 'coverage',\n '.git',\n '.next',\n '.nuxt',\n 'out',\n 'public',\n 'static',\n 'assets'\n];\n\n// Batch read multiple package.json files in parallel\nexport const batchReadPackageJsonFiles = async (\n packageJsonPaths: string[],\n storage: any,\n rootDir: string\n): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, `Batch reading ${packageJsonPaths.length} package.json files`);\n\n const readPromises = packageJsonPaths.map(async (packageJsonPath): Promise<PackageJsonLocation | null> => {\n try {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath, false);\n const relativePath = path.relative(rootDir, path.dirname(packageJsonPath));\n\n return {\n path: packageJsonPath,\n packageJson,\n relativePath: relativePath || '.'\n };\n } catch (error: any) {\n logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);\n return null;\n }\n });\n\n const results = await Promise.all(readPromises);\n const validResults = results.filter((result): result is PackageJsonLocation => result !== null);\n\n timer.end(`Successfully read ${validResults.length}/${packageJsonPaths.length} package.json files`);\n return validResults;\n};\n\n// Optimized recursive package.json finder with parallel processing\nexport const findAllPackageJsonFiles = async (rootDir: string, storage: any): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Optimized scanning for package.json files');\n\n // Phase 1: Find all package.json file paths in parallel\n const packageJsonPaths: string[] = [];\n\n const scanForPaths = async (currentDir: string, depth: number = 0): Promise<string[]> => {\n // Prevent infinite recursion and overly deep scanning\n if (depth > 5) {\n return [];\n }\n\n try {\n if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {\n return [];\n }\n\n const items = await storage.listFiles(currentDir);\n const foundPaths: string[] = [];\n\n // Check for package.json in current directory\n if (items.includes('package.json')) {\n const packageJsonPath = path.join(currentDir, 'package.json');\n foundPaths.push(packageJsonPath);\n }\n\n // Process subdirectories in parallel\n const subdirPromises: Promise<string[]>[] = [];\n for (const item of items) {\n if (EXCLUDED_DIRECTORIES.includes(item)) {\n continue;\n }\n\n const itemPath = path.join(currentDir, item);\n subdirPromises.push(\n (async () => {\n try {\n if (await storage.isDirectory(itemPath)) {\n return await scanForPaths(itemPath, depth + 1);\n }\n } catch (error: any) {\n logger.debug(`Skipped directory ${itemPath}: ${error.message}`);\n }\n return [];\n })()\n );\n }\n\n if (subdirPromises.length > 0) {\n const subdirResults = await Promise.all(subdirPromises);\n for (const subdirPaths of subdirResults) {\n foundPaths.push(...subdirPaths);\n }\n }\n\n return foundPaths;\n } catch (error: any) {\n logger.debug(`Failed to scan directory ${currentDir}: ${error.message}`);\n return [];\n }\n };\n\n const pathsTimer = PerformanceTimer.start(logger, 'Finding all package.json paths');\n const allPaths = await scanForPaths(rootDir);\n pathsTimer.end(`Found ${allPaths.length} package.json file paths`);\n\n // Phase 2: Batch read all package.json files in parallel\n const packageJsonFiles = await batchReadPackageJsonFiles(allPaths, storage, rootDir);\n\n timer.end(`Found ${packageJsonFiles.length} valid package.json files`);\n return packageJsonFiles;\n};\n\n// Optimized package scanning with parallel processing\nexport const scanDirectoryForPackages = async (rootDir: string, storage: any): Promise<Map<string, string>> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, `Optimized package scanning: ${rootDir}`);\n const packageMap = new Map<string, string>(); // packageName -> relativePath\n\n const absoluteRootDir = path.resolve(process.cwd(), rootDir);\n logger.verbose(`Scanning directory for packages: ${absoluteRootDir}`);\n\n try {\n // Quick existence and directory check\n const existsTimer = PerformanceTimer.start(logger, `Checking directory: ${absoluteRootDir}`);\n if (!await storage.exists(absoluteRootDir) || !await storage.isDirectory(absoluteRootDir)) {\n existsTimer.end(`Directory not found or not a directory: ${absoluteRootDir}`);\n timer.end(`Directory invalid: ${rootDir}`);\n return packageMap;\n }\n existsTimer.end(`Directory verified: ${absoluteRootDir}`);\n\n // Get all items and process in parallel\n const listTimer = PerformanceTimer.start(logger, `Listing contents: ${absoluteRootDir}`);\n const items = await storage.listFiles(absoluteRootDir);\n listTimer.end(`Listed ${items.length} items`);\n\n // Create batched promises for better performance\n const BATCH_SIZE = 10; // Process directories in batches to avoid overwhelming filesystem\n const batches = [];\n\n for (let i = 0; i < items.length; i += BATCH_SIZE) {\n const batch = items.slice(i, i + BATCH_SIZE);\n batches.push(batch);\n }\n\n const processTimer = PerformanceTimer.start(logger, `Processing ${batches.length} batches of directories`);\n\n for (const batch of batches) {\n const batchPromises = batch.map(async (item: string) => {\n const itemPath = path.join(absoluteRootDir, item);\n try {\n if (await storage.isDirectory(itemPath)) {\n const packageJsonPath = path.join(itemPath, 'package.json');\n\n if (await storage.exists(packageJsonPath)) {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath);\n\n if (packageJson.name) {\n const relativePath = path.relative(process.cwd(), itemPath);\n return { name: packageJson.name, path: relativePath };\n }\n }\n }\n } catch (error: any) {\n logger.debug(`Skipped ${itemPath}: ${error.message || error}`);\n }\n return null;\n });\n\n const batchResults = await Promise.all(batchPromises);\n\n for (const result of batchResults) {\n if (result) {\n packageMap.set(result.name, result.path);\n logger.debug(`Found package: ${result.name} at ${result.path}`);\n }\n }\n }\n\n processTimer.end(`Processed ${items.length} directories in ${batches.length} batches`);\n logger.verbose(`Found ${packageMap.size} packages in ${items.length} subdirectories`);\n } catch (error) {\n logger.warn(`PERFORMANCE_DIR_READ_FAILED: Unable to read directory | Directory: ${absoluteRootDir} | Error: ${error}`);\n }\n\n timer.end(`Found ${packageMap.size} packages in: ${rootDir}`);\n return packageMap;\n};\n\n// Parallel scope processing for better performance\nexport const findPackagesByScope = async (\n dependencies: Record<string, string>,\n scopeRoots: Record<string, string>,\n storage: any\n): Promise<Map<string, string>> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Finding packages by scope (optimized)');\n const workspacePackages = new Map<string, string>();\n\n logger.silly(`Checking dependencies against scope roots: ${JSON.stringify(scopeRoots)}`);\n\n // Process all scopes in parallel for maximum performance\n const scopeTimer = PerformanceTimer.start(logger, 'Parallel scope scanning');\n const scopePromises = Object.entries(scopeRoots).map(async ([scope, rootDir]) => {\n logger.verbose(`Scanning scope ${scope} at root directory: ${rootDir}`);\n const scopePackages = await scanDirectoryForPackages(rootDir, storage);\n\n // Filter packages that match the scope\n const matchingPackages: Array<[string, string]> = [];\n for (const [packageName, packagePath] of scopePackages) {\n if (packageName.startsWith(scope)) {\n matchingPackages.push([packageName, packagePath]);\n logger.debug(`Registered package: ${packageName} -> ${packagePath}`);\n }\n }\n return { scope, packages: matchingPackages };\n });\n\n const allScopeResults = await Promise.all(scopePromises);\n\n // Aggregate all packages from all scopes\n const allPackages = new Map<string, string>();\n for (const { scope, packages } of allScopeResults) {\n for (const [packageName, packagePath] of packages) {\n allPackages.set(packageName, packagePath);\n }\n }\n\n scopeTimer.end(`Scanned ${Object.keys(scopeRoots).length} scope roots, found ${allPackages.size} packages`);\n\n // Match dependencies to available packages\n const matchTimer = PerformanceTimer.start(logger, 'Matching dependencies to packages');\n for (const [depName, depVersion] of Object.entries(dependencies)) {\n logger.debug(`Processing dependency: ${depName}@${depVersion}`);\n\n if (allPackages.has(depName)) {\n const packagePath = allPackages.get(depName)!;\n workspacePackages.set(depName, packagePath);\n logger.verbose(`Found sibling package: ${depName} at ${packagePath}`);\n }\n }\n matchTimer.end(`Matched ${workspacePackages.size} dependencies to workspace packages`);\n\n timer.end(`Found ${workspacePackages.size} packages to link`);\n return workspacePackages;\n};\n\n// Utility to collect all dependencies from package.json files efficiently\nexport const collectAllDependencies = (packageJsonFiles: PackageJsonLocation[]): Record<string, string> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Collecting all dependencies');\n\n const allDependencies: Record<string, string> = {};\n for (const { packageJson } of packageJsonFiles) {\n Object.assign(allDependencies, packageJson.dependencies);\n Object.assign(allDependencies, packageJson.devDependencies);\n Object.assign(allDependencies, packageJson.peerDependencies);\n }\n\n timer.end(`Collected ${Object.keys(allDependencies).length} unique dependencies`);\n return allDependencies;\n};\n\n// Utility to check for file: dependencies\nexport const checkForFileDependencies = (packageJsonFiles: PackageJsonLocation[]): void => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Checking for file: dependencies');\n const filesWithFileDepedencies: Array<{path: string, dependencies: string[]}> = [];\n\n for (const { path: packagePath, packageJson, relativePath } of packageJsonFiles) {\n const fileDeps: string[] = [];\n\n // Check all dependency types for file: paths\n const allDeps = {\n ...packageJson.dependencies,\n ...packageJson.devDependencies,\n ...packageJson.peerDependencies\n };\n\n for (const [name, version] of Object.entries(allDeps)) {\n if (version.startsWith('file:')) {\n fileDeps.push(`${name}: ${version}`);\n }\n }\n\n if (fileDeps.length > 0) {\n filesWithFileDepedencies.push({\n path: relativePath,\n dependencies: fileDeps\n });\n }\n }\n\n if (filesWithFileDepedencies.length > 0) {\n logger.warn('FILE_DEPS_WARNING: Found file: dependencies that should not be committed | Count: ' + filesWithFileDepedencies.length + ' | Impact: May cause build issues');\n for (const file of filesWithFileDepedencies) {\n logger.warn(`FILE_DEPS_PACKAGE: Package with file dependencies | Path: ${file.path}`);\n for (const dep of file.dependencies) {\n logger.warn(`FILE_DEPS_DETAIL: File dependency detected | Dependency: ${dep}`);\n }\n }\n logger.warn('');\n logger.warn('FILE_DEPS_RESOLUTION: Action required before committing | Command: kodrdriv unlink | Purpose: Restore registry versions');\n logger.warn('FILE_DEPS_PREVENTION: Alternative option | Action: Add pre-commit hook | Purpose: Prevent accidental commits of linked dependencies');\n }\n\n timer.end(`Checked ${packageJsonFiles.length} files, found ${filesWithFileDepedencies.length} with file: dependencies`);\n};\n"],"names":["PerformanceTimer","start","logger","operation","verbose","end","duration","Date","now","startTime","EXCLUDED_DIRECTORIES","batchReadPackageJsonFiles","packageJsonPaths","storage","rootDir","getLogger","timer","length","readPromises","map","packageJsonPath","packageJsonContent","readFile","parsed","safeJsonParse","packageJson","validatePackageJson","relativePath","path","relative","dirname","error","debug","message","results","Promise","all","validResults","filter","result","findAllPackageJsonFiles","scanForPaths","currentDir","depth","exists","isDirectory","items","listFiles","foundPaths","includes","join","push","subdirPromises","item","itemPath","subdirResults","subdirPaths","pathsTimer","allPaths","packageJsonFiles"],"mappings":";;;;AAAA,uDAAoD,SAAA,gBAAA,CAAA,GAAA,EAAA,GAAA,EAAA,KAAA,EAAA;;;;;;;;;;;;;AAKpD;AACO,MAAMA,gBAAAA,CAAAA;AAST,IAAA,OAAOC,KAAAA,CAAMC,MAAW,EAAEC,SAAiB,EAAoB;AAC3DD,QAAAA,MAAAA,CAAOE,OAAO,CAAC,CAAC,cAAc,EAAED,SAAAA,CAAAA,CAAW,CAAA;AAC3C,QAAA,OAAO,IAAIH,gBAAAA,CAAiBE,MAAAA,CAAAA;AAChC,IAAA;AAEAG,IAAAA,GAAAA,CAAIF,SAAiB,EAAU;AAC3B,QAAA,MAAMG,WAAWC,IAAAA,CAAKC,GAAG,EAAA,GAAK,IAAI,CAACC,SAAS;AAC5C,QAAA,IAAI,CAACP,MAAM,CAACE,OAAO,CAAC,CAAC,eAAe,EAAED,SAAAA,CAAU,EAAE,EAAEG,QAAAA,CAAS,GAAG,CAAC,CAAA;QACjE,OAAOA,QAAAA;AACX,IAAA;AAdA,IAAA,WAAA,CAAYJ,MAAW,CAAE;AAHzB,QAAA,gBAAA,CAAA,IAAA,EAAQO,aAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQP,UAAR,MAAA,CAAA;QAGI,IAAI,CAACA,MAAM,GAAGA,MAAAA;AACd,QAAA,IAAI,CAACO,SAAS,GAAGF,IAAAA,CAAKC,GAAG,EAAA;AAC7B,IAAA;AAYJ;AAeA,MAAME,oBAAAA,GAAuB;AACzB,IAAA,cAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,UAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,OAAA;AACA,IAAA,KAAA;AACA,IAAA,QAAA;AACA,IAAA,QAAA;AACA,IAAA;AACH,CAAA;AAED;AACO,MAAMC,yBAAAA,GAA4B,OACrCC,gBAAAA,EACAC,OAAAA,EACAC,OAAAA,GAAAA;AAEA,IAAA,MAAMZ,MAAAA,GAASa,SAAAA,EAAAA;AACf,IAAA,MAAMC,KAAAA,GAAQhB,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,CAAC,cAAc,EAAEU,gBAAAA,CAAiBK,MAAM,CAAC,mBAAmB,CAAC,CAAA;AAE1G,IAAA,MAAMC,YAAAA,GAAeN,gBAAAA,CAAiBO,GAAG,CAAC,OAAOC,eAAAA,GAAAA;QAC7C,IAAI;AACA,YAAA,MAAMC,kBAAAA,GAAqB,MAAMR,OAAAA,CAAQS,QAAQ,CAACF,eAAAA,EAAiB,OAAA,CAAA;YACnE,MAAMG,MAAAA,GAASC,cAAcH,kBAAAA,EAAoBD,eAAAA,CAAAA;YACjD,MAAMK,WAAAA,GAAcC,mBAAAA,CAAoBH,MAAAA,EAAQH,eAAAA,EAAiB,KAAA,CAAA;AACjE,YAAA,MAAMO,eAAeC,aAAAA,CAAKC,QAAQ,CAACf,OAAAA,EAASc,aAAAA,CAAKE,OAAO,CAACV,eAAAA,CAAAA,CAAAA;YAEzD,OAAO;gBACHQ,IAAAA,EAAMR,eAAAA;AACNK,gBAAAA,WAAAA;AACAE,gBAAAA,YAAAA,EAAcA,YAAAA,IAAgB;AAClC,aAAA;AACJ,QAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;YACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,gCAAgC,EAAEZ,gBAAgB,EAAE,EAAEW,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;YACnF,OAAO,IAAA;AACX,QAAA;AACJ,IAAA,CAAA,CAAA;AAEA,IAAA,MAAMC,OAAAA,GAAU,MAAMC,OAAAA,CAAQC,GAAG,CAAClB,YAAAA,CAAAA;AAClC,IAAA,MAAMmB,eAAeH,OAAAA,CAAQI,MAAM,CAAC,CAACC,SAA0CA,MAAAA,KAAW,IAAA,CAAA;AAE1FvB,IAAAA,KAAAA,CAAMX,GAAG,CAAC,CAAC,kBAAkB,EAAEgC,YAAAA,CAAapB,MAAM,CAAC,CAAC,EAAEL,gBAAAA,CAAiBK,MAAM,CAAC,mBAAmB,CAAC,CAAA;IAClG,OAAOoB,YAAAA;AACX;AAEA;AACO,MAAMG,uBAAAA,GAA0B,OAAO1B,OAAAA,EAAiBD,OAAAA,GAAAA;AAC3D,IAAA,MAAMX,MAAAA,GAASa,SAAAA,EAAAA;AACf,IAAA,MAAMC,KAAAA,GAAQhB,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,2CAAA,CAAA;AAK7C,IAAA,MAAMuC,YAAAA,GAAe,OAAOC,UAAAA,EAAoBC,KAAAA,GAAgB,CAAC,GAAA;;AAE7D,QAAA,IAAIA,QAAQ,CAAA,EAAG;AACX,YAAA,OAAO,EAAE;AACb,QAAA;QAEA,IAAI;YACA,IAAI,CAAC,MAAM9B,OAAAA,CAAQ+B,MAAM,CAACF,UAAAA,CAAAA,IAAe,CAAC,MAAM7B,OAAAA,CAAQgC,WAAW,CAACH,UAAAA,CAAAA,EAAa;AAC7E,gBAAA,OAAO,EAAE;AACb,YAAA;AAEA,YAAA,MAAMI,KAAAA,GAAQ,MAAMjC,OAAAA,CAAQkC,SAAS,CAACL,UAAAA,CAAAA;AACtC,YAAA,MAAMM,aAAuB,EAAE;;YAG/B,IAAIF,KAAAA,CAAMG,QAAQ,CAAC,cAAA,CAAA,EAAiB;AAChC,gBAAA,MAAM7B,eAAAA,GAAkBQ,aAAAA,CAAKsB,IAAI,CAACR,UAAAA,EAAY,cAAA,CAAA;AAC9CM,gBAAAA,UAAAA,CAAWG,IAAI,CAAC/B,eAAAA,CAAAA;AACpB,YAAA;;AAGA,YAAA,MAAMgC,iBAAsC,EAAE;YAC9C,KAAK,MAAMC,QAAQP,KAAAA,CAAO;gBACtB,IAAIpC,oBAAAA,CAAqBuC,QAAQ,CAACI,IAAAA,CAAAA,EAAO;AACrC,oBAAA;AACJ,gBAAA;AAEA,gBAAA,MAAMC,QAAAA,GAAW1B,aAAAA,CAAKsB,IAAI,CAACR,UAAAA,EAAYW,IAAAA,CAAAA;gBACvCD,cAAAA,CAAeD,IAAI,CACd,CAAA,UAAA;oBACG,IAAI;AACA,wBAAA,IAAI,MAAMtC,OAAAA,CAAQgC,WAAW,CAACS,QAAAA,CAAAA,EAAW;4BACrC,OAAO,MAAMb,YAAAA,CAAaa,QAAAA,EAAUX,KAAAA,GAAQ,CAAA,CAAA;AAChD,wBAAA;AACJ,oBAAA,CAAA,CAAE,OAAOZ,KAAAA,EAAY;wBACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,kBAAkB,EAAEsB,SAAS,EAAE,EAAEvB,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;AAClE,oBAAA;AACA,oBAAA,OAAO,EAAE;gBACb,CAAA,GAAA,CAAA;AAER,YAAA;YAEA,IAAImB,cAAAA,CAAenC,MAAM,GAAG,CAAA,EAAG;AAC3B,gBAAA,MAAMsC,aAAAA,GAAgB,MAAMpB,OAAAA,CAAQC,GAAG,CAACgB,cAAAA,CAAAA;gBACxC,KAAK,MAAMI,eAAeD,aAAAA,CAAe;AACrCP,oBAAAA,UAAAA,CAAWG,IAAI,CAAA,GAAIK,WAAAA,CAAAA;AACvB,gBAAA;AACJ,YAAA;YAEA,OAAOR,UAAAA;AACX,QAAA,CAAA,CAAE,OAAOjB,KAAAA,EAAY;YACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,yBAAyB,EAAEU,WAAW,EAAE,EAAEX,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;AACvE,YAAA,OAAO,EAAE;AACb,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMwB,UAAAA,GAAazD,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,gCAAA,CAAA;IAClD,MAAMwD,QAAAA,GAAW,MAAMjB,YAAAA,CAAa3B,OAAAA,CAAAA;IACpC2C,UAAAA,CAAWpD,GAAG,CAAC,CAAC,MAAM,EAAEqD,QAAAA,CAASzC,MAAM,CAAC,wBAAwB,CAAC,CAAA;;AAGjE,IAAA,MAAM0C,gBAAAA,GAAmB,MAAMhD,yBAAAA,CAA0B+C,QAAAA,EAAU7C,OAAAA,EAASC,OAAAA,CAAAA;IAE5EE,KAAAA,CAAMX,GAAG,CAAC,CAAC,MAAM,EAAEsD,gBAAAA,CAAiB1C,MAAM,CAAC,yBAAyB,CAAC,CAAA;IACrE,OAAO0C,gBAAAA;AACX;;;;"}
|