@eldrforge/kodrdriv 1.2.19 → 1.2.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/INTEGRATION-SUMMARY.md +232 -0
  2. package/TEST-STATUS.md +168 -0
  3. package/dist/application.js +3 -0
  4. package/dist/application.js.map +1 -1
  5. package/dist/arguments.js +1 -1
  6. package/dist/arguments.js.map +1 -1
  7. package/dist/commands/commit.js +2 -2
  8. package/dist/commands/commit.js.map +1 -1
  9. package/dist/commands/development.js +1 -2
  10. package/dist/commands/development.js.map +1 -1
  11. package/dist/commands/link.js +1 -2
  12. package/dist/commands/link.js.map +1 -1
  13. package/dist/commands/publish.js +1 -3
  14. package/dist/commands/publish.js.map +1 -1
  15. package/dist/commands/release.js +2 -2
  16. package/dist/commands/release.js.map +1 -1
  17. package/dist/commands/tree.js +7 -8
  18. package/dist/commands/tree.js.map +1 -1
  19. package/dist/commands/unlink.js +1 -2
  20. package/dist/commands/unlink.js.map +1 -1
  21. package/dist/commands/updates.js +1 -1
  22. package/dist/commands/updates.js.map +1 -1
  23. package/dist/commands/versions.js +1 -1
  24. package/dist/commands/versions.js.map +1 -1
  25. package/dist/constants.js +1 -1
  26. package/dist/content/diff.js +1 -1
  27. package/dist/content/diff.js.map +1 -1
  28. package/dist/content/log.js +1 -1
  29. package/dist/content/log.js.map +1 -1
  30. package/dist/util/general.js +2 -3
  31. package/dist/util/general.js.map +1 -1
  32. package/dist/util/github.js +1 -1
  33. package/dist/util/github.js.map +1 -1
  34. package/dist/util/openai.js +1 -1
  35. package/dist/util/openai.js.map +1 -1
  36. package/dist/util/performance.js +1 -1
  37. package/dist/util/performance.js.map +1 -1
  38. package/dist/util/safety.js +1 -1
  39. package/dist/util/safety.js.map +1 -1
  40. package/dist/util/validation.js +4 -39
  41. package/dist/util/validation.js.map +1 -1
  42. package/package.json +2 -1
  43. package/dist/util/child.js +0 -174
  44. package/dist/util/child.js.map +0 -1
  45. package/dist/util/git.js +0 -836
  46. package/dist/util/git.js.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"file":"openai.js","sources":["../../src/util/openai.ts"],"sourcesContent":["import { OpenAI } from 'openai';\nimport { ChatCompletionMessageParam } from 'openai/resources';\nimport * as Storage from './storage';\nimport { getLogger } from '../logging';\nimport { archiveAudio } from './general';\nimport { Config } from '../types';\nimport { safeJsonParse } from './validation';\n// eslint-disable-next-line no-restricted-imports\nimport fs from 'fs';\n\nexport interface Transcription {\n text: string;\n}\n\n/**\n * Get the appropriate model to use based on command-specific configuration\n * Command-specific model overrides the global model setting\n */\nexport function getModelForCommand(config: Config, commandName: string): string {\n let commandModel: string | undefined;\n\n switch (commandName) {\n case 'commit':\n case 'audio-commit':\n commandModel = config.commit?.model;\n break;\n case 'release':\n commandModel = config.release?.model;\n break;\n case 'review':\n case 'audio-review':\n commandModel = config.review?.model;\n break;\n default:\n // For other commands, just use global model\n break;\n }\n\n // Return command-specific model if available, otherwise global model\n return commandModel || config.model || 'gpt-4o-mini';\n}\n\n/**\n * Get the appropriate OpenAI reasoning level based on command-specific configuration\n * Command-specific reasoning overrides the global reasoning setting\n */\nexport function getOpenAIReasoningForCommand(config: Config, commandName: string): 'low' | 'medium' | 'high' {\n let commandReasoning: 'low' | 'medium' | 'high' | undefined;\n\n switch (commandName) {\n case 'commit':\n case 'audio-commit':\n commandReasoning = config.commit?.openaiReasoning;\n break;\n case 'release':\n commandReasoning = config.release?.openaiReasoning;\n break;\n case 'review':\n case 'audio-review':\n commandReasoning = config.review?.openaiReasoning;\n break;\n default:\n // For other commands, just use global reasoning\n break;\n }\n\n // Return command-specific reasoning if available, otherwise global reasoning\n return commandReasoning || config.openaiReasoning || 'low';\n}\n\n/**\n * Get the appropriate OpenAI max output tokens based on command-specific configuration\n * Command-specific max output tokens overrides the global setting\n */\nexport function getOpenAIMaxOutputTokensForCommand(config: Config, commandName: string): number {\n let commandMaxOutputTokens: number | undefined;\n\n switch (commandName) {\n case 'commit':\n case 'audio-commit':\n commandMaxOutputTokens = config.commit?.openaiMaxOutputTokens;\n break;\n case 'release':\n commandMaxOutputTokens = config.release?.openaiMaxOutputTokens;\n break;\n case 'review':\n case 'audio-review':\n commandMaxOutputTokens = config.review?.openaiMaxOutputTokens;\n break;\n default:\n // For other commands, just use global max output tokens\n break;\n }\n\n // Return command-specific max output tokens if available, otherwise global setting\n return commandMaxOutputTokens || config.openaiMaxOutputTokens || 10000;\n}\n\nexport class OpenAIError extends Error {\n constructor(message: string, public readonly isTokenLimitError: boolean = false) {\n super(message);\n this.name = 'OpenAIError';\n }\n}\n\n// Check if an error is a token limit exceeded error\nexport function isTokenLimitError(error: any): boolean {\n if (!error?.message) return false;\n\n const message = error.message.toLowerCase();\n return message.includes('maximum context length') ||\n message.includes('context_length_exceeded') ||\n message.includes('token limit') ||\n message.includes('too many tokens') ||\n message.includes('reduce the length');\n}\n\n// Check if an error is a rate limit error\nexport function isRateLimitError(error: any): boolean {\n if (!error?.message && !error?.code && !error?.status) return false;\n\n // Check for OpenAI specific rate limit indicators\n if (error.status === 429 || error.code === 'rate_limit_exceeded') {\n return true;\n }\n\n // Only check message if it exists\n if (error.message) {\n const message = error.message.toLowerCase();\n return message.includes('rate limit exceeded') ||\n message.includes('too many requests') ||\n message.includes('quota exceeded') ||\n (message.includes('rate') && message.includes('limit'));\n }\n\n return false;\n}\n\nexport async function createCompletion(messages: ChatCompletionMessageParam[], options: { responseFormat?: any, model?: string, debug?: boolean, debugFile?: string, debugRequestFile?: string, debugResponseFile?: string, maxTokens?: number, openaiReasoning?: 'low' | 'medium' | 'high', openaiMaxOutputTokens?: number } = { model: \"gpt-4o-mini\" }): Promise<string | any> {\n const logger = getLogger();\n const storage = Storage.create({ log: logger.debug });\n let openai: OpenAI | null = null;\n try {\n const apiKey = process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new OpenAIError('OPENAI_API_KEY environment variable is not set');\n }\n\n // Create the client which we'll close in the finally block.\n const timeoutMs = parseInt(process.env.OPENAI_TIMEOUT_MS || '300000'); // Default to 5 minutes\n openai = new OpenAI({\n apiKey: apiKey,\n timeout: timeoutMs,\n });\n\n const modelToUse = options.model || \"gpt-4o-mini\";\n\n // Calculate request size\n const requestSize = JSON.stringify(messages).length;\n const requestSizeKB = (requestSize / 1024).toFixed(2);\n\n // Log model, reasoning level, and request size\n const reasoningInfo = options.openaiReasoning ? ` | Reasoning: ${options.openaiReasoning}` : '';\n logger.info('🤖 Making request to OpenAI');\n logger.info(' Model: %s%s', modelToUse, reasoningInfo);\n logger.info(' Request size: %s KB (%s bytes)', requestSizeKB, requestSize.toLocaleString());\n\n logger.debug('Sending prompt to OpenAI: %j', messages);\n\n // Use openaiMaxOutputTokens if specified (highest priority), otherwise fall back to maxTokens, or default to 10000\n const maxCompletionTokens = options.openaiMaxOutputTokens ?? options.maxTokens ?? 10000;\n\n // Save request debug file if enabled\n if (options.debug && (options.debugRequestFile || options.debugFile)) {\n const requestData = {\n model: modelToUse,\n messages,\n max_completion_tokens: maxCompletionTokens,\n response_format: options.responseFormat,\n reasoning_effort: options.openaiReasoning,\n };\n const debugFile = options.debugRequestFile || options.debugFile;\n await storage.writeFile(debugFile!, JSON.stringify(requestData, null, 2), 'utf8');\n logger.debug('Wrote request debug file to %s', debugFile);\n }\n\n // Prepare the API call options\n const apiOptions: any = {\n model: modelToUse,\n messages,\n max_completion_tokens: maxCompletionTokens,\n response_format: options.responseFormat,\n };\n\n // Add reasoning parameter if specified and model supports it\n if (options.openaiReasoning && (modelToUse.includes('gpt-5') || modelToUse.includes('o3'))) {\n apiOptions.reasoning_effort = options.openaiReasoning;\n }\n\n // Add timeout wrapper to the OpenAI API call\n const startTime = Date.now();\n const completionPromise = openai.chat.completions.create(apiOptions);\n\n // Create timeout promise with proper cleanup to prevent memory leaks\n let timeoutId: NodeJS.Timeout | null = null;\n const timeoutPromise = new Promise<never>((_, reject) => {\n const timeoutMs = parseInt(process.env.OPENAI_TIMEOUT_MS || '300000'); // Default to 5 minutes\n timeoutId = setTimeout(() => reject(new OpenAIError(`OpenAI API call timed out after ${timeoutMs/1000} seconds`)), timeoutMs);\n });\n\n let completion;\n try {\n completion = await Promise.race([completionPromise, timeoutPromise]);\n } finally {\n // Clear the timeout to prevent memory leaks\n if (timeoutId !== null) {\n clearTimeout(timeoutId);\n }\n }\n\n const elapsedTime = Date.now() - startTime;\n\n // Save response debug file if enabled\n if (options.debug && (options.debugResponseFile || options.debugFile)) {\n const debugFile = options.debugResponseFile || options.debugFile;\n await storage.writeFile(debugFile!, JSON.stringify(completion, null, 2), 'utf8');\n logger.debug('Wrote response debug file to %s', debugFile);\n }\n\n const response = completion.choices[0]?.message?.content?.trim();\n if (!response) {\n throw new OpenAIError('No response received from OpenAI');\n }\n\n // Calculate and log response size\n const responseSize = response.length;\n const responseSizeKB = (responseSize / 1024).toFixed(2);\n logger.info(' Response size: %s KB (%s bytes)', responseSizeKB, responseSize.toLocaleString());\n\n // Log elapsed time\n const elapsedTimeFormatted = elapsedTime >= 1000\n ? `${(elapsedTime / 1000).toFixed(1)}s`\n : `${elapsedTime}ms`;\n logger.info(' Time: %s', elapsedTimeFormatted);\n\n // Log token usage if available\n if (completion.usage) {\n logger.info(' Token usage: %s prompt + %s completion = %s total',\n completion.usage.prompt_tokens?.toLocaleString() || '?',\n completion.usage.completion_tokens?.toLocaleString() || '?',\n completion.usage.total_tokens?.toLocaleString() || '?'\n );\n }\n\n logger.debug('Received response from OpenAI: %s...', response.substring(0, 30));\n if (options.responseFormat) {\n return safeJsonParse(response, 'OpenAI API response');\n } else {\n return response;\n }\n\n } catch (error: any) {\n logger.error('Error calling OpenAI API: %s %s', error.message, error.stack);\n const isTokenError = isTokenLimitError(error);\n throw new OpenAIError(`Failed to create completion: ${error.message}`, isTokenError);\n } finally {\n // OpenAI client cleanup is handled automatically by the library\n // No manual cleanup needed for newer versions\n }\n}\n\n// Create completion with automatic retry on token limit errors\nexport async function createCompletionWithRetry(\n messages: ChatCompletionMessageParam[],\n options: { responseFormat?: any, model?: string, debug?: boolean, debugFile?: string, debugRequestFile?: string, debugResponseFile?: string, maxTokens?: number, openaiReasoning?: 'low' | 'medium' | 'high', openaiMaxOutputTokens?: number } = { model: \"gpt-4o-mini\" },\n retryCallback?: (attempt: number) => Promise<ChatCompletionMessageParam[]>\n): Promise<string | any> {\n const logger = getLogger();\n const maxRetries = 3;\n\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n try {\n const messagesToSend = attempt === 1 ? messages : (retryCallback ? await retryCallback(attempt) : messages);\n return await createCompletion(messagesToSend, options);\n } catch (error: any) {\n if (error instanceof OpenAIError && error.isTokenLimitError && attempt < maxRetries && retryCallback) {\n logger.warn('Token limit exceeded on attempt %d/%d, retrying with reduced content...', attempt, maxRetries);\n // Add exponential backoff for token limit errors\n const backoffMs = Math.min(1000 * Math.pow(2, attempt - 1), 10000);\n await new Promise(resolve => setTimeout(resolve, backoffMs));\n continue;\n } else if (isRateLimitError(error) && attempt < maxRetries) {\n // Handle rate limiting with exponential backoff\n const backoffMs = Math.min(2000 * Math.pow(2, attempt - 1), 15000); // More reasonable backoff: 2s, 4s, 8s, max 15s\n logger.warn(`Rate limit hit on attempt ${attempt}/${maxRetries}, waiting ${backoffMs}ms before retry...`);\n await new Promise(resolve => setTimeout(resolve, backoffMs));\n continue;\n }\n throw error;\n }\n }\n\n // This should never be reached, but TypeScript requires it\n throw new OpenAIError('Max retries exceeded');\n}\n\nexport async function transcribeAudio(filePath: string, options: { model?: string, debug?: boolean, debugFile?: string, debugRequestFile?: string, debugResponseFile?: string, outputDirectory?: string } = { model: \"whisper-1\" }): Promise<Transcription> {\n const logger = getLogger();\n const storage = Storage.create({ log: logger.debug });\n let openai: OpenAI | null = null;\n let audioStream: fs.ReadStream | null = null;\n let streamClosed = false;\n\n // Helper function to safely close the stream\n const closeAudioStream = () => {\n if (audioStream && !streamClosed) {\n try {\n // Only call destroy if it exists and the stream isn't already destroyed\n if (typeof audioStream.destroy === 'function' && !audioStream.destroyed) {\n audioStream.destroy();\n }\n streamClosed = true;\n logger.debug('Audio stream closed successfully');\n } catch (streamErr) {\n logger.debug('Failed to destroy audio read stream: %s', (streamErr as Error).message);\n streamClosed = true; // Mark as closed even if destroy failed\n }\n }\n };\n\n try {\n const apiKey = process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new OpenAIError('OPENAI_API_KEY environment variable is not set');\n }\n\n openai = new OpenAI({\n apiKey: apiKey,\n });\n\n logger.debug('Transcribing audio file: %s', filePath);\n\n // Save request debug file if enabled\n if (options.debug && (options.debugRequestFile || options.debugFile)) {\n const requestData = {\n model: options.model || \"whisper-1\",\n file: filePath, // Can't serialize the stream, so just save the file path\n response_format: \"json\",\n };\n const debugFile = options.debugRequestFile || options.debugFile;\n await storage.writeFile(debugFile!, JSON.stringify(requestData, null, 2), 'utf8');\n logger.debug('Wrote request debug file to %s', debugFile);\n }\n\n audioStream = await storage.readStream(filePath);\n\n // Set up error handler for the stream to ensure cleanup on stream errors\n // Only add handler if the stream has the 'on' method (real streams)\n if (audioStream && typeof audioStream.on === 'function') {\n audioStream.on('error', (streamError) => {\n logger.error('Audio stream error: %s', streamError.message);\n closeAudioStream();\n });\n }\n\n let transcription;\n try {\n transcription = await openai.audio.transcriptions.create({\n model: options.model || \"whisper-1\",\n file: audioStream,\n response_format: \"json\",\n });\n // Close the stream immediately after successful API call to prevent race conditions\n closeAudioStream();\n } catch (apiError) {\n // Close the stream immediately if the API call fails\n closeAudioStream();\n throw apiError;\n }\n\n // Save response debug file if enabled\n if (options.debug && (options.debugResponseFile || options.debugFile)) {\n const debugFile = options.debugResponseFile || options.debugFile;\n await storage.writeFile(debugFile!, JSON.stringify(transcription, null, 2), 'utf8');\n logger.debug('Wrote response debug file to %s', debugFile);\n }\n\n const response = transcription;\n if (!response) {\n throw new OpenAIError('No transcription received from OpenAI');\n }\n\n logger.debug('Received transcription from OpenAI: %s', response);\n\n // Archive the audio file and transcription\n try {\n const outputDir = options.outputDirectory || 'output';\n await archiveAudio(filePath, response.text, outputDir);\n } catch (archiveError: any) {\n // Don't fail the transcription if archiving fails, just log the error\n logger.warn('Failed to archive audio file: %s', archiveError.message);\n }\n\n return response;\n\n } catch (error: any) {\n logger.error('Error transcribing audio file: %s %s', error.message, error.stack);\n throw new OpenAIError(`Failed to transcribe audio: ${error.message}`);\n } finally {\n // Ensure the audio stream is properly closed to release file handles\n closeAudioStream();\n // OpenAI client cleanup is handled automatically by the library\n // No manual cleanup needed for newer versions\n }\n}\n"],"names":["getModelForCommand","config","commandName","commandModel","commit","model","release","review","getOpenAIReasoningForCommand","commandReasoning","openaiReasoning","getOpenAIMaxOutputTokensForCommand","commandMaxOutputTokens","openaiMaxOutputTokens","OpenAIError","Error","message","isTokenLimitError","name","error","toLowerCase","includes","isRateLimitError","code","status","createCompletion","messages","options","logger","getLogger","storage","Storage","log","debug","openai","completion","apiKey","process","env","OPENAI_API_KEY","timeoutMs","parseInt","OPENAI_TIMEOUT_MS","OpenAI","timeout","modelToUse","requestSize","JSON","stringify","length","requestSizeKB","toFixed","reasoningInfo","info","toLocaleString","maxCompletionTokens","maxTokens","debugRequestFile","debugFile","requestData","max_completion_tokens","response_format","responseFormat","reasoning_effort","writeFile","apiOptions","startTime","Date","now","completionPromise","chat","completions","create","timeoutId","timeoutPromise","Promise","_","reject","setTimeout","race","clearTimeout","elapsedTime","debugResponseFile","response","choices","content","trim","responseSize","responseSizeKB","elapsedTimeFormatted","usage","prompt_tokens","completion_tokens","total_tokens","substring","safeJsonParse","stack","isTokenError","createCompletionWithRetry","retryCallback","maxRetries","attempt","messagesToSend","warn","backoffMs","Math","min","pow","resolve","transcribeAudio","filePath","audioStream","streamClosed","closeAudioStream","destroy","destroyed","streamErr","file","readStream","on","streamError","transcription","audio","transcriptions","apiError","outputDir","outputDirectory","archiveAudio","text","archiveError"],"mappings":";;;;;;;;;;;;;;;;;;;AAcA;;;AAGC,IACM,SAASA,kBAAAA,CAAmBC,MAAc,EAAEC,WAAmB,EAAA;IAClE,IAAIC,YAAAA;IAEJ,OAAQD,WAAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACcD,YAAAA,IAAAA,cAAAA;AAAfE,YAAAA,YAAAA,GAAAA,CAAeF,iBAAAA,MAAAA,CAAOG,MAAM,MAAA,IAAA,IAAbH,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeI,KAAK;AACnC,YAAA;QACJ,KAAK,SAAA;AACcJ,YAAAA,IAAAA,eAAAA;AAAfE,YAAAA,YAAAA,GAAAA,CAAeF,kBAAAA,MAAAA,CAAOK,OAAO,MAAA,IAAA,IAAdL,eAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,gBAAgBI,KAAK;AACpC,YAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACcJ,YAAAA,IAAAA,cAAAA;AAAfE,YAAAA,YAAAA,GAAAA,CAAeF,iBAAAA,MAAAA,CAAOM,MAAM,MAAA,IAAA,IAAbN,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeI,KAAK;AACnC,YAAA;AAIR;;IAGA,OAAOF,YAAAA,IAAgBF,MAAAA,CAAOI,KAAK,IAAI,aAAA;AAC3C;AAEA;;;AAGC,IACM,SAASG,4BAAAA,CAA6BP,MAAc,EAAEC,WAAmB,EAAA;IAC5E,IAAIO,gBAAAA;IAEJ,OAAQP,WAAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACkBD,YAAAA,IAAAA,cAAAA;AAAnBQ,YAAAA,gBAAAA,GAAAA,CAAmBR,iBAAAA,MAAAA,CAAOG,MAAM,MAAA,IAAA,IAAbH,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeS,eAAe;AACjD,YAAA;QACJ,KAAK,SAAA;AACkBT,YAAAA,IAAAA,eAAAA;AAAnBQ,YAAAA,gBAAAA,GAAAA,CAAmBR,kBAAAA,MAAAA,CAAOK,OAAO,MAAA,IAAA,IAAdL,eAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,gBAAgBS,eAAe;AAClD,YAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACkBT,YAAAA,IAAAA,cAAAA;AAAnBQ,YAAAA,gBAAAA,GAAAA,CAAmBR,iBAAAA,MAAAA,CAAOM,MAAM,MAAA,IAAA,IAAbN,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeS,eAAe;AACjD,YAAA;AAIR;;IAGA,OAAOD,gBAAAA,IAAoBR,MAAAA,CAAOS,eAAe,IAAI,KAAA;AACzD;AAEA;;;AAGC,IACM,SAASC,kCAAAA,CAAmCV,MAAc,EAAEC,WAAmB,EAAA;IAClF,IAAIU,sBAAAA;IAEJ,OAAQV,WAAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACwBD,YAAAA,IAAAA,cAAAA;AAAzBW,YAAAA,sBAAAA,GAAAA,CAAyBX,iBAAAA,MAAAA,CAAOG,MAAM,MAAA,IAAA,IAAbH,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeY,qBAAqB;AAC7D,YAAA;QACJ,KAAK,SAAA;AACwBZ,YAAAA,IAAAA,eAAAA;AAAzBW,YAAAA,sBAAAA,GAAAA,CAAyBX,kBAAAA,MAAAA,CAAOK,OAAO,MAAA,IAAA,IAAdL,eAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,gBAAgBY,qBAAqB;AAC9D,YAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACwBZ,YAAAA,IAAAA,cAAAA;AAAzBW,YAAAA,sBAAAA,GAAAA,CAAyBX,iBAAAA,MAAAA,CAAOM,MAAM,MAAA,IAAA,IAAbN,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeY,qBAAqB;AAC7D,YAAA;AAIR;;IAGA,OAAOD,sBAAAA,IAA0BX,MAAAA,CAAOY,qBAAqB,IAAI,KAAA;AACrE;AAEO,MAAMC,WAAAA,SAAoBC,KAAAA,CAAAA;AAC7B,IAAA,WAAA,CAAYC,OAAe,EAAE,iBAAgBC,GAA6B,KAAK,CAAE;QAC7E,KAAK,CAACD,oEADmCC,iBAAAA,GAAAA,iBAAAA;QAEzC,IAAI,CAACC,IAAI,GAAG,aAAA;AAChB,IAAA;AACJ;AAEA;AACO,SAASD,kBAAkBE,KAAU,EAAA;AACxC,IAAA,IAAI,EAACA,KAAAA,KAAAA,IAAAA,IAAAA,KAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,KAAAA,CAAOH,OAAO,GAAE,OAAO,KAAA;AAE5B,IAAA,MAAMA,OAAAA,GAAUG,KAAAA,CAAMH,OAAO,CAACI,WAAW,EAAA;AACzC,IAAA,OAAOJ,QAAQK,QAAQ,CAAC,6BACjBL,OAAAA,CAAQK,QAAQ,CAAC,yBAAA,CAAA,IACjBL,OAAAA,CAAQK,QAAQ,CAAC,kBACjBL,OAAAA,CAAQK,QAAQ,CAAC,iBAAA,CAAA,IACjBL,OAAAA,CAAQK,QAAQ,CAAC,mBAAA,CAAA;AAC5B;AAEA;AACO,SAASC,iBAAiBH,KAAU,EAAA;AACvC,IAAA,IAAI,EAACA,KAAAA,KAAAA,IAAAA,IAAAA,KAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,MAAOH,OAAO,CAAA,IAAI,EAACG,KAAAA,KAAAA,IAAAA,IAAAA,KAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,KAAAA,CAAOI,IAAI,KAAI,EAACJ,KAAAA,KAAAA,IAAAA,IAAAA,4BAAAA,KAAAA,CAAOK,MAAM,GAAE,OAAO,KAAA;;AAG9D,IAAA,IAAIL,MAAMK,MAAM,KAAK,OAAOL,KAAAA,CAAMI,IAAI,KAAK,qBAAA,EAAuB;QAC9D,OAAO,IAAA;AACX,IAAA;;IAGA,IAAIJ,KAAAA,CAAMH,OAAO,EAAE;AACf,QAAA,MAAMA,OAAAA,GAAUG,KAAAA,CAAMH,OAAO,CAACI,WAAW,EAAA;AACzC,QAAA,OAAOJ,QAAQK,QAAQ,CAAC,0BACjBL,OAAAA,CAAQK,QAAQ,CAAC,mBAAA,CAAA,IACjBL,OAAAA,CAAQK,QAAQ,CAAC,qBAChBL,OAAAA,CAAQK,QAAQ,CAAC,MAAA,CAAA,IAAWL,OAAAA,CAAQK,QAAQ,CAAC,OAAA,CAAA;AACzD,IAAA;IAEA,OAAO,KAAA;AACX;AAEO,eAAeI,gBAAAA,CAAiBC,QAAsC,EAAEC,OAAAA,GAAiP;IAAEtB,KAAAA,EAAO;AAAc,CAAC,EAAA;AACpV,IAAA,MAAMuB,MAAAA,GAASC,SAAAA,EAAAA;IACf,MAAMC,OAAAA,GAAUC,MAAc,CAAC;AAAEC,QAAAA,GAAAA,EAAKJ,OAAOK;AAAM,KAAA,CAAA;AACnD,IAAA,IAAIC,MAAAA,GAAwB,IAAA;IAC5B,IAAI;AAuFiBC,QAAAA,IAAAA,oCAAAA,EAAAA,4BAAAA,EAAAA,oBAAAA;AAtFjB,QAAA,MAAMC,MAAAA,GAASC,OAAAA,CAAQC,GAAG,CAACC,cAAc;AACzC,QAAA,IAAI,CAACH,MAAAA,EAAQ;AACT,YAAA,MAAM,IAAItB,WAAAA,CAAY,gDAAA,CAAA;AAC1B,QAAA;;QAGA,MAAM0B,SAAAA,GAAYC,SAASJ,OAAAA,CAAQC,GAAG,CAACI,iBAAiB,IAAI;AAC5DR,QAAAA,MAAAA,GAAS,IAAIS,MAAAA,CAAO;YAChBP,MAAAA,EAAQA,MAAAA;YACRQ,OAAAA,EAASJ;AACb,SAAA,CAAA;QAEA,MAAMK,UAAAA,GAAalB,OAAAA,CAAQtB,KAAK,IAAI,aAAA;;AAGpC,QAAA,MAAMyC,WAAAA,GAAcC,IAAAA,CAAKC,SAAS,CAACtB,UAAUuB,MAAM;AACnD,QAAA,MAAMC,gBAAgB,CAACJ,cAAc,IAAG,EAAGK,OAAO,CAAC,CAAA,CAAA;;QAGnD,MAAMC,aAAAA,GAAgBzB,OAAAA,CAAQjB,eAAe,GAAG,CAAC,cAAc,EAAEiB,OAAAA,CAAQjB,eAAe,CAAA,CAAE,GAAG,EAAA;AAC7FkB,QAAAA,MAAAA,CAAOyB,IAAI,CAAC,6BAAA,CAAA;QACZzB,MAAAA,CAAOyB,IAAI,CAAC,gBAAA,EAAkBR,UAAAA,EAAYO,aAAAA,CAAAA;AAC1CxB,QAAAA,MAAAA,CAAOyB,IAAI,CAAC,mCAAA,EAAqCH,aAAAA,EAAeJ,YAAYQ,cAAc,EAAA,CAAA;QAE1F1B,MAAAA,CAAOK,KAAK,CAAC,8BAAA,EAAgCP,QAAAA,CAAAA;YAGjBC,8BAAAA,EAAAA,IAAAA;;AAA5B,QAAA,MAAM4B,mBAAAA,GAAsB5B,CAAAA,IAAAA,GAAAA,CAAAA,iCAAAA,OAAAA,CAAQd,qBAAqB,MAAA,IAAA,IAA7Bc,8BAAAA,KAAAA,KAAAA,CAAAA,GAAAA,8BAAAA,GAAiCA,OAAAA,CAAQ6B,SAAS,MAAA,IAAA,IAAlD7B,kBAAAA,IAAAA,GAAsD,KAAA;;QAGlF,IAAIA,OAAAA,CAAQM,KAAK,KAAKN,OAAAA,CAAQ8B,gBAAgB,IAAI9B,OAAAA,CAAQ+B,SAAQ,CAAA,EAAI;AAClE,YAAA,MAAMC,WAAAA,GAAc;gBAChBtD,KAAAA,EAAOwC,UAAAA;AACPnB,gBAAAA,QAAAA;gBACAkC,qBAAAA,EAAuBL,mBAAAA;AACvBM,gBAAAA,eAAAA,EAAiBlC,QAAQmC,cAAc;AACvCC,gBAAAA,gBAAAA,EAAkBpC,QAAQjB;AAC9B,aAAA;AACA,YAAA,MAAMgD,SAAAA,GAAY/B,OAAAA,CAAQ8B,gBAAgB,IAAI9B,QAAQ+B,SAAS;YAC/D,MAAM5B,OAAAA,CAAQkC,SAAS,CAACN,SAAAA,EAAYX,KAAKC,SAAS,CAACW,WAAAA,EAAa,IAAA,EAAM,CAAA,CAAA,EAAI,MAAA,CAAA;YAC1E/B,MAAAA,CAAOK,KAAK,CAAC,gCAAA,EAAkCyB,SAAAA,CAAAA;AACnD,QAAA;;AAGA,QAAA,MAAMO,UAAAA,GAAkB;YACpB5D,KAAAA,EAAOwC,UAAAA;AACPnB,YAAAA,QAAAA;YACAkC,qBAAAA,EAAuBL,mBAAAA;AACvBM,YAAAA,eAAAA,EAAiBlC,QAAQmC;AAC7B,SAAA;;AAGA,QAAA,IAAInC,OAAAA,CAAQjB,eAAe,KAAKmC,UAAAA,CAAWxB,QAAQ,CAAC,OAAA,CAAA,IAAYwB,UAAAA,CAAWxB,QAAQ,CAAC,IAAA,CAAI,CAAA,EAAI;YACxF4C,UAAAA,CAAWF,gBAAgB,GAAGpC,OAAAA,CAAQjB,eAAe;AACzD,QAAA;;QAGA,MAAMwD,SAAAA,GAAYC,KAAKC,GAAG,EAAA;AAC1B,QAAA,MAAMC,oBAAoBnC,MAAAA,CAAOoC,IAAI,CAACC,WAAW,CAACC,MAAM,CAACP,UAAAA,CAAAA;;AAGzD,QAAA,IAAIQ,SAAAA,GAAmC,IAAA;AACvC,QAAA,MAAMC,cAAAA,GAAiB,IAAIC,OAAAA,CAAe,CAACC,CAAAA,EAAGC,MAAAA,GAAAA;YAC1C,MAAMrC,SAAAA,GAAYC,SAASJ,OAAAA,CAAQC,GAAG,CAACI,iBAAiB,IAAI;AAC5D+B,YAAAA,SAAAA,GAAYK,UAAAA,CAAW,IAAMD,MAAAA,CAAO,IAAI/D,WAAAA,CAAY,CAAC,gCAAgC,EAAE0B,SAAAA,GAAU,IAAA,CAAK,QAAQ,CAAC,CAAA,CAAA,EAAIA,SAAAA,CAAAA;AACvH,QAAA,CAAA,CAAA;QAEA,IAAIL,UAAAA;QACJ,IAAI;YACAA,UAAAA,GAAa,MAAMwC,OAAAA,CAAQI,IAAI,CAAC;AAACV,gBAAAA,iBAAAA;AAAmBK,gBAAAA;AAAe,aAAA,CAAA;QACvE,CAAA,QAAU;;AAEN,YAAA,IAAID,cAAc,IAAA,EAAM;gBACpBO,YAAAA,CAAaP,SAAAA,CAAAA;AACjB,YAAA;AACJ,QAAA;QAEA,MAAMQ,WAAAA,GAAcd,IAAAA,CAAKC,GAAG,EAAA,GAAKF,SAAAA;;QAGjC,IAAIvC,OAAAA,CAAQM,KAAK,KAAKN,OAAAA,CAAQuD,iBAAiB,IAAIvD,OAAAA,CAAQ+B,SAAQ,CAAA,EAAI;AACnE,YAAA,MAAMA,SAAAA,GAAY/B,OAAAA,CAAQuD,iBAAiB,IAAIvD,QAAQ+B,SAAS;YAChE,MAAM5B,OAAAA,CAAQkC,SAAS,CAACN,SAAAA,EAAYX,KAAKC,SAAS,CAACb,UAAAA,EAAY,IAAA,EAAM,CAAA,CAAA,EAAI,MAAA,CAAA;YACzEP,MAAAA,CAAOK,KAAK,CAAC,iCAAA,EAAmCyB,SAAAA,CAAAA;AACpD,QAAA;AAEA,QAAA,MAAMyB,YAAWhD,oBAAAA,GAAAA,UAAAA,CAAWiD,OAAO,CAAC,CAAA,CAAE,cAArBjD,oBAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,CAAAA,4BAAAA,GAAAA,qBAAuBnB,OAAO,MAAA,IAAA,IAA9BmB,oDAAAA,oCAAAA,GAAAA,4BAAAA,CAAgCkD,OAAO,MAAA,IAAA,IAAvClD,oCAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,qCAAyCmD,IAAI,EAAA;AAC9D,QAAA,IAAI,CAACH,QAAAA,EAAU;AACX,YAAA,MAAM,IAAIrE,WAAAA,CAAY,kCAAA,CAAA;AAC1B,QAAA;;QAGA,MAAMyE,YAAAA,GAAeJ,SAASlC,MAAM;AACpC,QAAA,MAAMuC,iBAAiB,CAACD,eAAe,IAAG,EAAGpC,OAAO,CAAC,CAAA,CAAA;AACrDvB,QAAAA,MAAAA,CAAOyB,IAAI,CAAC,oCAAA,EAAsCmC,cAAAA,EAAgBD,aAAajC,cAAc,EAAA,CAAA;;AAG7F,QAAA,MAAMmC,uBAAuBR,WAAAA,IAAe,IAAA,GACtC,GAAG,CAACA,cAAc,IAAG,EAAG9B,OAAO,CAAC,GAAG,CAAC,CAAC,GACrC,CAAA,EAAG8B,WAAAA,CAAY,EAAE,CAAC;QACxBrD,MAAAA,CAAOyB,IAAI,CAAC,aAAA,EAAeoC,oBAAAA,CAAAA;;QAG3B,IAAItD,UAAAA,CAAWuD,KAAK,EAAE;AAEdvD,YAAAA,IAAAA,+BAAAA,EACAA,mCAAAA,EACAA,8BAAAA;AAHJP,YAAAA,MAAAA,CAAOyB,IAAI,CAAC,sDAAA,EACRlB,CAAAA,CAAAA,+BAAAA,GAAAA,WAAWuD,KAAK,CAACC,aAAa,MAAA,IAAA,IAA9BxD,sDAAAA,+BAAAA,CAAgCmB,cAAc,EAAA,KAAM,GAAA,EACpDnB,EAAAA,mCAAAA,GAAAA,UAAAA,CAAWuD,KAAK,CAACE,iBAAiB,MAAA,IAAA,IAAlCzD,mCAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,mCAAAA,CAAoCmB,cAAc,OAAM,GAAA,EACxDnB,CAAAA,CAAAA,8BAAAA,GAAAA,UAAAA,CAAWuD,KAAK,CAACG,YAAY,cAA7B1D,8BAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,8BAAAA,CAA+BmB,cAAc,EAAA,KAAM,GAAA,CAAA;AAE3D,QAAA;AAEA1B,QAAAA,MAAAA,CAAOK,KAAK,CAAC,sCAAA,EAAwCkD,QAAAA,CAASW,SAAS,CAAC,CAAA,EAAG,EAAA,CAAA,CAAA;QAC3E,IAAInE,OAAAA,CAAQmC,cAAc,EAAE;AACxB,YAAA,OAAOiC,cAAcZ,QAAAA,EAAU,qBAAA,CAAA;QACnC,CAAA,MAAO;YACH,OAAOA,QAAAA;AACX,QAAA;AAEJ,IAAA,CAAA,CAAE,OAAOhE,KAAAA,EAAY;AACjBS,QAAAA,MAAAA,CAAOT,KAAK,CAAC,iCAAA,EAAmCA,MAAMH,OAAO,EAAEG,MAAM6E,KAAK,CAAA;AAC1E,QAAA,MAAMC,eAAehF,iBAAAA,CAAkBE,KAAAA,CAAAA;QACvC,MAAM,IAAIL,YAAY,CAAC,6BAA6B,EAAEK,KAAAA,CAAMH,OAAO,EAAE,EAAEiF,YAAAA,CAAAA;IAC3E,CAAA,QAAU;;;AAGV,IAAA;AACJ;AAEA;AACO,eAAeC,yBAAAA,CAClBxE,QAAsC,EACtCC,OAAAA,GAAiP;IAAEtB,KAAAA,EAAO;AAAc,CAAC,EACzQ8F,aAA0E,EAAA;AAE1E,IAAA,MAAMvE,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMuE,UAAAA,GAAa,CAAA;AAEnB,IAAA,IAAK,IAAIC,OAAAA,GAAU,CAAA,EAAGA,OAAAA,IAAWD,YAAYC,OAAAA,EAAAA,CAAW;QACpD,IAAI;AACA,YAAA,MAAMC,iBAAiBD,OAAAA,KAAY,CAAA,GAAI3E,WAAYyE,aAAAA,GAAgB,MAAMA,cAAcE,OAAAA,CAAAA,GAAW3E,QAAAA;YAClG,OAAO,MAAMD,iBAAiB6E,cAAAA,EAAgB3E,OAAAA,CAAAA;AAClD,QAAA,CAAA,CAAE,OAAOR,KAAAA,EAAY;AACjB,YAAA,IAAIA,iBAAiBL,WAAAA,IAAeK,KAAAA,CAAMF,iBAAiB,IAAIoF,OAAAA,GAAUD,cAAcD,aAAAA,EAAe;gBAClGvE,MAAAA,CAAO2E,IAAI,CAAC,yEAAA,EAA2EF,OAAAA,EAASD,UAAAA,CAAAA;;gBAEhG,MAAMI,SAAAA,GAAYC,IAAAA,CAAKC,GAAG,CAAC,IAAA,GAAOD,KAAKE,GAAG,CAAC,CAAA,EAAGN,OAAAA,GAAU,CAAA,CAAA,EAAI,KAAA,CAAA;AAC5D,gBAAA,MAAM,IAAI1B,OAAAA,CAAQiC,CAAAA,OAAAA,GAAW9B,WAAW8B,OAAAA,EAASJ,SAAAA,CAAAA,CAAAA;AACjD,gBAAA;AACJ,YAAA,CAAA,MAAO,IAAIlF,gBAAAA,CAAiBH,KAAAA,CAAAA,IAAUkF,OAAAA,GAAUD,UAAAA,EAAY;;AAExD,gBAAA,MAAMI,SAAAA,GAAYC,IAAAA,CAAKC,GAAG,CAAC,IAAA,GAAOD,IAAAA,CAAKE,GAAG,CAAC,CAAA,EAAGN,OAAAA,GAAU,CAAA,CAAA,EAAI,KAAA,CAAA,CAAA;AAC5DzE,gBAAAA,MAAAA,CAAO2E,IAAI,CAAC,CAAC,0BAA0B,EAAEF,OAAAA,CAAQ,CAAC,EAAED,UAAAA,CAAW,UAAU,EAAEI,SAAAA,CAAU,kBAAkB,CAAC,CAAA;AACxG,gBAAA,MAAM,IAAI7B,OAAAA,CAAQiC,CAAAA,OAAAA,GAAW9B,WAAW8B,OAAAA,EAASJ,SAAAA,CAAAA,CAAAA;AACjD,gBAAA;AACJ,YAAA;YACA,MAAMrF,KAAAA;AACV,QAAA;AACJ,IAAA;;AAGA,IAAA,MAAM,IAAIL,WAAAA,CAAY,sBAAA,CAAA;AAC1B;AAEO,eAAe+F,eAAAA,CAAgBC,QAAgB,EAAEnF,OAAAA,GAAoJ;IAAEtB,KAAAA,EAAO;AAAY,CAAC,EAAA;AAC9N,IAAA,MAAMuB,MAAAA,GAASC,SAAAA,EAAAA;IACf,MAAMC,OAAAA,GAAUC,MAAc,CAAC;AAAEC,QAAAA,GAAAA,EAAKJ,OAAOK;AAAM,KAAA,CAAA;AACnD,IAAA,IAAIC,MAAAA,GAAwB,IAAA;AAC5B,IAAA,IAAI6E,WAAAA,GAAoC,IAAA;AACxC,IAAA,IAAIC,YAAAA,GAAe,KAAA;;AAGnB,IAAA,MAAMC,gBAAAA,GAAmB,IAAA;QACrB,IAAIF,WAAAA,IAAe,CAACC,YAAAA,EAAc;YAC9B,IAAI;;gBAEA,IAAI,OAAOD,YAAYG,OAAO,KAAK,cAAc,CAACH,WAAAA,CAAYI,SAAS,EAAE;AACrEJ,oBAAAA,WAAAA,CAAYG,OAAO,EAAA;AACvB,gBAAA;gBACAF,YAAAA,GAAe,IAAA;AACfpF,gBAAAA,MAAAA,CAAOK,KAAK,CAAC,kCAAA,CAAA;AACjB,YAAA,CAAA,CAAE,OAAOmF,SAAAA,EAAW;AAChBxF,gBAAAA,MAAAA,CAAOK,KAAK,CAAC,yCAAA,EAA4CmF,UAAoBpG,OAAO,CAAA;AACpFgG,gBAAAA,YAAAA,GAAe;AACnB,YAAA;AACJ,QAAA;AACJ,IAAA,CAAA;IAEA,IAAI;AACA,QAAA,MAAM5E,MAAAA,GAASC,OAAAA,CAAQC,GAAG,CAACC,cAAc;AACzC,QAAA,IAAI,CAACH,MAAAA,EAAQ;AACT,YAAA,MAAM,IAAItB,WAAAA,CAAY,gDAAA,CAAA;AAC1B,QAAA;AAEAoB,QAAAA,MAAAA,GAAS,IAAIS,MAAAA,CAAO;YAChBP,MAAAA,EAAQA;AACZ,SAAA,CAAA;QAEAR,MAAAA,CAAOK,KAAK,CAAC,6BAAA,EAA+B6E,QAAAA,CAAAA;;QAG5C,IAAInF,OAAAA,CAAQM,KAAK,KAAKN,OAAAA,CAAQ8B,gBAAgB,IAAI9B,OAAAA,CAAQ+B,SAAQ,CAAA,EAAI;AAClE,YAAA,MAAMC,WAAAA,GAAc;gBAChBtD,KAAAA,EAAOsB,OAAAA,CAAQtB,KAAK,IAAI,WAAA;gBACxBgH,IAAAA,EAAMP,QAAAA;gBACNjD,eAAAA,EAAiB;AACrB,aAAA;AACA,YAAA,MAAMH,SAAAA,GAAY/B,OAAAA,CAAQ8B,gBAAgB,IAAI9B,QAAQ+B,SAAS;YAC/D,MAAM5B,OAAAA,CAAQkC,SAAS,CAACN,SAAAA,EAAYX,KAAKC,SAAS,CAACW,WAAAA,EAAa,IAAA,EAAM,CAAA,CAAA,EAAI,MAAA,CAAA;YAC1E/B,MAAAA,CAAOK,KAAK,CAAC,gCAAA,EAAkCyB,SAAAA,CAAAA;AACnD,QAAA;QAEAqD,WAAAA,GAAc,MAAMjF,OAAAA,CAAQwF,UAAU,CAACR,QAAAA,CAAAA;;;AAIvC,QAAA,IAAIC,WAAAA,IAAe,OAAOA,WAAAA,CAAYQ,EAAE,KAAK,UAAA,EAAY;YACrDR,WAAAA,CAAYQ,EAAE,CAAC,OAAA,EAAS,CAACC,WAAAA,GAAAA;AACrB5F,gBAAAA,MAAAA,CAAOT,KAAK,CAAC,wBAAA,EAA0BqG,WAAAA,CAAYxG,OAAO,CAAA;AAC1DiG,gBAAAA,gBAAAA,EAAAA;AACJ,YAAA,CAAA,CAAA;AACJ,QAAA;QAEA,IAAIQ,aAAAA;QACJ,IAAI;AACAA,YAAAA,aAAAA,GAAgB,MAAMvF,MAAAA,CAAOwF,KAAK,CAACC,cAAc,CAACnD,MAAM,CAAC;gBACrDnE,KAAAA,EAAOsB,OAAAA,CAAQtB,KAAK,IAAI,WAAA;gBACxBgH,IAAAA,EAAMN,WAAAA;gBACNlD,eAAAA,EAAiB;AACrB,aAAA,CAAA;;AAEAoD,YAAAA,gBAAAA,EAAAA;AACJ,QAAA,CAAA,CAAE,OAAOW,QAAAA,EAAU;;AAEfX,YAAAA,gBAAAA,EAAAA;YACA,MAAMW,QAAAA;AACV,QAAA;;QAGA,IAAIjG,OAAAA,CAAQM,KAAK,KAAKN,OAAAA,CAAQuD,iBAAiB,IAAIvD,OAAAA,CAAQ+B,SAAQ,CAAA,EAAI;AACnE,YAAA,MAAMA,SAAAA,GAAY/B,OAAAA,CAAQuD,iBAAiB,IAAIvD,QAAQ+B,SAAS;YAChE,MAAM5B,OAAAA,CAAQkC,SAAS,CAACN,SAAAA,EAAYX,KAAKC,SAAS,CAACyE,aAAAA,EAAe,IAAA,EAAM,CAAA,CAAA,EAAI,MAAA,CAAA;YAC5E7F,MAAAA,CAAOK,KAAK,CAAC,iCAAA,EAAmCyB,SAAAA,CAAAA;AACpD,QAAA;AAEA,QAAA,MAAMyB,QAAAA,GAAWsC,aAAAA;AACjB,QAAA,IAAI,CAACtC,QAAAA,EAAU;AACX,YAAA,MAAM,IAAIrE,WAAAA,CAAY,uCAAA,CAAA;AAC1B,QAAA;QAEAc,MAAAA,CAAOK,KAAK,CAAC,wCAAA,EAA0CkD,QAAAA,CAAAA;;QAGvD,IAAI;YACA,MAAM0C,SAAAA,GAAYlG,OAAAA,CAAQmG,eAAe,IAAI,QAAA;AAC7C,YAAA,MAAMC,YAAAA,CAAajB,QAAAA,EAAU3B,QAAAA,CAAS6C,IAAI,EAAEH,SAAAA,CAAAA;AAChD,QAAA,CAAA,CAAE,OAAOI,YAAAA,EAAmB;;AAExBrG,YAAAA,MAAAA,CAAO2E,IAAI,CAAC,kCAAA,EAAoC0B,YAAAA,CAAajH,OAAO,CAAA;AACxE,QAAA;QAEA,OAAOmE,QAAAA;AAEX,IAAA,CAAA,CAAE,OAAOhE,KAAAA,EAAY;AACjBS,QAAAA,MAAAA,CAAOT,KAAK,CAAC,sCAAA,EAAwCA,MAAMH,OAAO,EAAEG,MAAM6E,KAAK,CAAA;AAC/E,QAAA,MAAM,IAAIlF,WAAAA,CAAY,CAAC,4BAA4B,EAAEK,KAAAA,CAAMH,OAAO,CAAA,CAAE,CAAA;IACxE,CAAA,QAAU;;AAENiG,QAAAA,gBAAAA,EAAAA;;;AAGJ,IAAA;AACJ;;;;"}
1
+ {"version":3,"file":"openai.js","sources":["../../src/util/openai.ts"],"sourcesContent":["import { OpenAI } from 'openai';\nimport { ChatCompletionMessageParam } from 'openai/resources';\nimport * as Storage from './storage';\nimport { getLogger } from '../logging';\nimport { archiveAudio } from './general';\nimport { Config } from '../types';\nimport { safeJsonParse } from '@eldrforge/git-tools';\n// eslint-disable-next-line no-restricted-imports\nimport fs from 'fs';\n\nexport interface Transcription {\n text: string;\n}\n\n/**\n * Get the appropriate model to use based on command-specific configuration\n * Command-specific model overrides the global model setting\n */\nexport function getModelForCommand(config: Config, commandName: string): string {\n let commandModel: string | undefined;\n\n switch (commandName) {\n case 'commit':\n case 'audio-commit':\n commandModel = config.commit?.model;\n break;\n case 'release':\n commandModel = config.release?.model;\n break;\n case 'review':\n case 'audio-review':\n commandModel = config.review?.model;\n break;\n default:\n // For other commands, just use global model\n break;\n }\n\n // Return command-specific model if available, otherwise global model\n return commandModel || config.model || 'gpt-4o-mini';\n}\n\n/**\n * Get the appropriate OpenAI reasoning level based on command-specific configuration\n * Command-specific reasoning overrides the global reasoning setting\n */\nexport function getOpenAIReasoningForCommand(config: Config, commandName: string): 'low' | 'medium' | 'high' {\n let commandReasoning: 'low' | 'medium' | 'high' | undefined;\n\n switch (commandName) {\n case 'commit':\n case 'audio-commit':\n commandReasoning = config.commit?.openaiReasoning;\n break;\n case 'release':\n commandReasoning = config.release?.openaiReasoning;\n break;\n case 'review':\n case 'audio-review':\n commandReasoning = config.review?.openaiReasoning;\n break;\n default:\n // For other commands, just use global reasoning\n break;\n }\n\n // Return command-specific reasoning if available, otherwise global reasoning\n return commandReasoning || config.openaiReasoning || 'low';\n}\n\n/**\n * Get the appropriate OpenAI max output tokens based on command-specific configuration\n * Command-specific max output tokens overrides the global setting\n */\nexport function getOpenAIMaxOutputTokensForCommand(config: Config, commandName: string): number {\n let commandMaxOutputTokens: number | undefined;\n\n switch (commandName) {\n case 'commit':\n case 'audio-commit':\n commandMaxOutputTokens = config.commit?.openaiMaxOutputTokens;\n break;\n case 'release':\n commandMaxOutputTokens = config.release?.openaiMaxOutputTokens;\n break;\n case 'review':\n case 'audio-review':\n commandMaxOutputTokens = config.review?.openaiMaxOutputTokens;\n break;\n default:\n // For other commands, just use global max output tokens\n break;\n }\n\n // Return command-specific max output tokens if available, otherwise global setting\n return commandMaxOutputTokens || config.openaiMaxOutputTokens || 10000;\n}\n\nexport class OpenAIError extends Error {\n constructor(message: string, public readonly isTokenLimitError: boolean = false) {\n super(message);\n this.name = 'OpenAIError';\n }\n}\n\n// Check if an error is a token limit exceeded error\nexport function isTokenLimitError(error: any): boolean {\n if (!error?.message) return false;\n\n const message = error.message.toLowerCase();\n return message.includes('maximum context length') ||\n message.includes('context_length_exceeded') ||\n message.includes('token limit') ||\n message.includes('too many tokens') ||\n message.includes('reduce the length');\n}\n\n// Check if an error is a rate limit error\nexport function isRateLimitError(error: any): boolean {\n if (!error?.message && !error?.code && !error?.status) return false;\n\n // Check for OpenAI specific rate limit indicators\n if (error.status === 429 || error.code === 'rate_limit_exceeded') {\n return true;\n }\n\n // Only check message if it exists\n if (error.message) {\n const message = error.message.toLowerCase();\n return message.includes('rate limit exceeded') ||\n message.includes('too many requests') ||\n message.includes('quota exceeded') ||\n (message.includes('rate') && message.includes('limit'));\n }\n\n return false;\n}\n\nexport async function createCompletion(messages: ChatCompletionMessageParam[], options: { responseFormat?: any, model?: string, debug?: boolean, debugFile?: string, debugRequestFile?: string, debugResponseFile?: string, maxTokens?: number, openaiReasoning?: 'low' | 'medium' | 'high', openaiMaxOutputTokens?: number } = { model: \"gpt-4o-mini\" }): Promise<string | any> {\n const logger = getLogger();\n const storage = Storage.create({ log: logger.debug });\n let openai: OpenAI | null = null;\n try {\n const apiKey = process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new OpenAIError('OPENAI_API_KEY environment variable is not set');\n }\n\n // Create the client which we'll close in the finally block.\n const timeoutMs = parseInt(process.env.OPENAI_TIMEOUT_MS || '300000'); // Default to 5 minutes\n openai = new OpenAI({\n apiKey: apiKey,\n timeout: timeoutMs,\n });\n\n const modelToUse = options.model || \"gpt-4o-mini\";\n\n // Calculate request size\n const requestSize = JSON.stringify(messages).length;\n const requestSizeKB = (requestSize / 1024).toFixed(2);\n\n // Log model, reasoning level, and request size\n const reasoningInfo = options.openaiReasoning ? ` | Reasoning: ${options.openaiReasoning}` : '';\n logger.info('🤖 Making request to OpenAI');\n logger.info(' Model: %s%s', modelToUse, reasoningInfo);\n logger.info(' Request size: %s KB (%s bytes)', requestSizeKB, requestSize.toLocaleString());\n\n logger.debug('Sending prompt to OpenAI: %j', messages);\n\n // Use openaiMaxOutputTokens if specified (highest priority), otherwise fall back to maxTokens, or default to 10000\n const maxCompletionTokens = options.openaiMaxOutputTokens ?? options.maxTokens ?? 10000;\n\n // Save request debug file if enabled\n if (options.debug && (options.debugRequestFile || options.debugFile)) {\n const requestData = {\n model: modelToUse,\n messages,\n max_completion_tokens: maxCompletionTokens,\n response_format: options.responseFormat,\n reasoning_effort: options.openaiReasoning,\n };\n const debugFile = options.debugRequestFile || options.debugFile;\n await storage.writeFile(debugFile!, JSON.stringify(requestData, null, 2), 'utf8');\n logger.debug('Wrote request debug file to %s', debugFile);\n }\n\n // Prepare the API call options\n const apiOptions: any = {\n model: modelToUse,\n messages,\n max_completion_tokens: maxCompletionTokens,\n response_format: options.responseFormat,\n };\n\n // Add reasoning parameter if specified and model supports it\n if (options.openaiReasoning && (modelToUse.includes('gpt-5') || modelToUse.includes('o3'))) {\n apiOptions.reasoning_effort = options.openaiReasoning;\n }\n\n // Add timeout wrapper to the OpenAI API call\n const startTime = Date.now();\n const completionPromise = openai.chat.completions.create(apiOptions);\n\n // Create timeout promise with proper cleanup to prevent memory leaks\n let timeoutId: NodeJS.Timeout | null = null;\n const timeoutPromise = new Promise<never>((_, reject) => {\n const timeoutMs = parseInt(process.env.OPENAI_TIMEOUT_MS || '300000'); // Default to 5 minutes\n timeoutId = setTimeout(() => reject(new OpenAIError(`OpenAI API call timed out after ${timeoutMs/1000} seconds`)), timeoutMs);\n });\n\n let completion;\n try {\n completion = await Promise.race([completionPromise, timeoutPromise]);\n } finally {\n // Clear the timeout to prevent memory leaks\n if (timeoutId !== null) {\n clearTimeout(timeoutId);\n }\n }\n\n const elapsedTime = Date.now() - startTime;\n\n // Save response debug file if enabled\n if (options.debug && (options.debugResponseFile || options.debugFile)) {\n const debugFile = options.debugResponseFile || options.debugFile;\n await storage.writeFile(debugFile!, JSON.stringify(completion, null, 2), 'utf8');\n logger.debug('Wrote response debug file to %s', debugFile);\n }\n\n const response = completion.choices[0]?.message?.content?.trim();\n if (!response) {\n throw new OpenAIError('No response received from OpenAI');\n }\n\n // Calculate and log response size\n const responseSize = response.length;\n const responseSizeKB = (responseSize / 1024).toFixed(2);\n logger.info(' Response size: %s KB (%s bytes)', responseSizeKB, responseSize.toLocaleString());\n\n // Log elapsed time\n const elapsedTimeFormatted = elapsedTime >= 1000\n ? `${(elapsedTime / 1000).toFixed(1)}s`\n : `${elapsedTime}ms`;\n logger.info(' Time: %s', elapsedTimeFormatted);\n\n // Log token usage if available\n if (completion.usage) {\n logger.info(' Token usage: %s prompt + %s completion = %s total',\n completion.usage.prompt_tokens?.toLocaleString() || '?',\n completion.usage.completion_tokens?.toLocaleString() || '?',\n completion.usage.total_tokens?.toLocaleString() || '?'\n );\n }\n\n logger.debug('Received response from OpenAI: %s...', response.substring(0, 30));\n if (options.responseFormat) {\n return safeJsonParse(response, 'OpenAI API response');\n } else {\n return response;\n }\n\n } catch (error: any) {\n logger.error('Error calling OpenAI API: %s %s', error.message, error.stack);\n const isTokenError = isTokenLimitError(error);\n throw new OpenAIError(`Failed to create completion: ${error.message}`, isTokenError);\n } finally {\n // OpenAI client cleanup is handled automatically by the library\n // No manual cleanup needed for newer versions\n }\n}\n\n// Create completion with automatic retry on token limit errors\nexport async function createCompletionWithRetry(\n messages: ChatCompletionMessageParam[],\n options: { responseFormat?: any, model?: string, debug?: boolean, debugFile?: string, debugRequestFile?: string, debugResponseFile?: string, maxTokens?: number, openaiReasoning?: 'low' | 'medium' | 'high', openaiMaxOutputTokens?: number } = { model: \"gpt-4o-mini\" },\n retryCallback?: (attempt: number) => Promise<ChatCompletionMessageParam[]>\n): Promise<string | any> {\n const logger = getLogger();\n const maxRetries = 3;\n\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n try {\n const messagesToSend = attempt === 1 ? messages : (retryCallback ? await retryCallback(attempt) : messages);\n return await createCompletion(messagesToSend, options);\n } catch (error: any) {\n if (error instanceof OpenAIError && error.isTokenLimitError && attempt < maxRetries && retryCallback) {\n logger.warn('Token limit exceeded on attempt %d/%d, retrying with reduced content...', attempt, maxRetries);\n // Add exponential backoff for token limit errors\n const backoffMs = Math.min(1000 * Math.pow(2, attempt - 1), 10000);\n await new Promise(resolve => setTimeout(resolve, backoffMs));\n continue;\n } else if (isRateLimitError(error) && attempt < maxRetries) {\n // Handle rate limiting with exponential backoff\n const backoffMs = Math.min(2000 * Math.pow(2, attempt - 1), 15000); // More reasonable backoff: 2s, 4s, 8s, max 15s\n logger.warn(`Rate limit hit on attempt ${attempt}/${maxRetries}, waiting ${backoffMs}ms before retry...`);\n await new Promise(resolve => setTimeout(resolve, backoffMs));\n continue;\n }\n throw error;\n }\n }\n\n // This should never be reached, but TypeScript requires it\n throw new OpenAIError('Max retries exceeded');\n}\n\nexport async function transcribeAudio(filePath: string, options: { model?: string, debug?: boolean, debugFile?: string, debugRequestFile?: string, debugResponseFile?: string, outputDirectory?: string } = { model: \"whisper-1\" }): Promise<Transcription> {\n const logger = getLogger();\n const storage = Storage.create({ log: logger.debug });\n let openai: OpenAI | null = null;\n let audioStream: fs.ReadStream | null = null;\n let streamClosed = false;\n\n // Helper function to safely close the stream\n const closeAudioStream = () => {\n if (audioStream && !streamClosed) {\n try {\n // Only call destroy if it exists and the stream isn't already destroyed\n if (typeof audioStream.destroy === 'function' && !audioStream.destroyed) {\n audioStream.destroy();\n }\n streamClosed = true;\n logger.debug('Audio stream closed successfully');\n } catch (streamErr) {\n logger.debug('Failed to destroy audio read stream: %s', (streamErr as Error).message);\n streamClosed = true; // Mark as closed even if destroy failed\n }\n }\n };\n\n try {\n const apiKey = process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new OpenAIError('OPENAI_API_KEY environment variable is not set');\n }\n\n openai = new OpenAI({\n apiKey: apiKey,\n });\n\n logger.debug('Transcribing audio file: %s', filePath);\n\n // Save request debug file if enabled\n if (options.debug && (options.debugRequestFile || options.debugFile)) {\n const requestData = {\n model: options.model || \"whisper-1\",\n file: filePath, // Can't serialize the stream, so just save the file path\n response_format: \"json\",\n };\n const debugFile = options.debugRequestFile || options.debugFile;\n await storage.writeFile(debugFile!, JSON.stringify(requestData, null, 2), 'utf8');\n logger.debug('Wrote request debug file to %s', debugFile);\n }\n\n audioStream = await storage.readStream(filePath);\n\n // Set up error handler for the stream to ensure cleanup on stream errors\n // Only add handler if the stream has the 'on' method (real streams)\n if (audioStream && typeof audioStream.on === 'function') {\n audioStream.on('error', (streamError) => {\n logger.error('Audio stream error: %s', streamError.message);\n closeAudioStream();\n });\n }\n\n let transcription;\n try {\n transcription = await openai.audio.transcriptions.create({\n model: options.model || \"whisper-1\",\n file: audioStream,\n response_format: \"json\",\n });\n // Close the stream immediately after successful API call to prevent race conditions\n closeAudioStream();\n } catch (apiError) {\n // Close the stream immediately if the API call fails\n closeAudioStream();\n throw apiError;\n }\n\n // Save response debug file if enabled\n if (options.debug && (options.debugResponseFile || options.debugFile)) {\n const debugFile = options.debugResponseFile || options.debugFile;\n await storage.writeFile(debugFile!, JSON.stringify(transcription, null, 2), 'utf8');\n logger.debug('Wrote response debug file to %s', debugFile);\n }\n\n const response = transcription;\n if (!response) {\n throw new OpenAIError('No transcription received from OpenAI');\n }\n\n logger.debug('Received transcription from OpenAI: %s', response);\n\n // Archive the audio file and transcription\n try {\n const outputDir = options.outputDirectory || 'output';\n await archiveAudio(filePath, response.text, outputDir);\n } catch (archiveError: any) {\n // Don't fail the transcription if archiving fails, just log the error\n logger.warn('Failed to archive audio file: %s', archiveError.message);\n }\n\n return response;\n\n } catch (error: any) {\n logger.error('Error transcribing audio file: %s %s', error.message, error.stack);\n throw new OpenAIError(`Failed to transcribe audio: ${error.message}`);\n } finally {\n // Ensure the audio stream is properly closed to release file handles\n closeAudioStream();\n // OpenAI client cleanup is handled automatically by the library\n // No manual cleanup needed for newer versions\n }\n}\n"],"names":["getModelForCommand","config","commandName","commandModel","commit","model","release","review","getOpenAIReasoningForCommand","commandReasoning","openaiReasoning","getOpenAIMaxOutputTokensForCommand","commandMaxOutputTokens","openaiMaxOutputTokens","OpenAIError","Error","message","isTokenLimitError","name","error","toLowerCase","includes","isRateLimitError","code","status","createCompletion","messages","options","logger","getLogger","storage","Storage","log","debug","openai","completion","apiKey","process","env","OPENAI_API_KEY","timeoutMs","parseInt","OPENAI_TIMEOUT_MS","OpenAI","timeout","modelToUse","requestSize","JSON","stringify","length","requestSizeKB","toFixed","reasoningInfo","info","toLocaleString","maxCompletionTokens","maxTokens","debugRequestFile","debugFile","requestData","max_completion_tokens","response_format","responseFormat","reasoning_effort","writeFile","apiOptions","startTime","Date","now","completionPromise","chat","completions","create","timeoutId","timeoutPromise","Promise","_","reject","setTimeout","race","clearTimeout","elapsedTime","debugResponseFile","response","choices","content","trim","responseSize","responseSizeKB","elapsedTimeFormatted","usage","prompt_tokens","completion_tokens","total_tokens","substring","safeJsonParse","stack","isTokenError","createCompletionWithRetry","retryCallback","maxRetries","attempt","messagesToSend","warn","backoffMs","Math","min","pow","resolve","transcribeAudio","filePath","audioStream","streamClosed","closeAudioStream","destroy","destroyed","streamErr","file","readStream","on","streamError","transcription","audio","transcriptions","apiError","outputDir","outputDirectory","archiveAudio","text","archiveError"],"mappings":";;;;;;;;;;;;;;;;;;;AAcA;;;AAGC,IACM,SAASA,kBAAAA,CAAmBC,MAAc,EAAEC,WAAmB,EAAA;IAClE,IAAIC,YAAAA;IAEJ,OAAQD,WAAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACcD,YAAAA,IAAAA,cAAAA;AAAfE,YAAAA,YAAAA,GAAAA,CAAeF,iBAAAA,MAAAA,CAAOG,MAAM,MAAA,IAAA,IAAbH,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeI,KAAK;AACnC,YAAA;QACJ,KAAK,SAAA;AACcJ,YAAAA,IAAAA,eAAAA;AAAfE,YAAAA,YAAAA,GAAAA,CAAeF,kBAAAA,MAAAA,CAAOK,OAAO,MAAA,IAAA,IAAdL,eAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,gBAAgBI,KAAK;AACpC,YAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACcJ,YAAAA,IAAAA,cAAAA;AAAfE,YAAAA,YAAAA,GAAAA,CAAeF,iBAAAA,MAAAA,CAAOM,MAAM,MAAA,IAAA,IAAbN,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeI,KAAK;AACnC,YAAA;AAIR;;IAGA,OAAOF,YAAAA,IAAgBF,MAAAA,CAAOI,KAAK,IAAI,aAAA;AAC3C;AAEA;;;AAGC,IACM,SAASG,4BAAAA,CAA6BP,MAAc,EAAEC,WAAmB,EAAA;IAC5E,IAAIO,gBAAAA;IAEJ,OAAQP,WAAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACkBD,YAAAA,IAAAA,cAAAA;AAAnBQ,YAAAA,gBAAAA,GAAAA,CAAmBR,iBAAAA,MAAAA,CAAOG,MAAM,MAAA,IAAA,IAAbH,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeS,eAAe;AACjD,YAAA;QACJ,KAAK,SAAA;AACkBT,YAAAA,IAAAA,eAAAA;AAAnBQ,YAAAA,gBAAAA,GAAAA,CAAmBR,kBAAAA,MAAAA,CAAOK,OAAO,MAAA,IAAA,IAAdL,eAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,gBAAgBS,eAAe;AAClD,YAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACkBT,YAAAA,IAAAA,cAAAA;AAAnBQ,YAAAA,gBAAAA,GAAAA,CAAmBR,iBAAAA,MAAAA,CAAOM,MAAM,MAAA,IAAA,IAAbN,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeS,eAAe;AACjD,YAAA;AAIR;;IAGA,OAAOD,gBAAAA,IAAoBR,MAAAA,CAAOS,eAAe,IAAI,KAAA;AACzD;AAEA;;;AAGC,IACM,SAASC,kCAAAA,CAAmCV,MAAc,EAAEC,WAAmB,EAAA;IAClF,IAAIU,sBAAAA;IAEJ,OAAQV,WAAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACwBD,YAAAA,IAAAA,cAAAA;AAAzBW,YAAAA,sBAAAA,GAAAA,CAAyBX,iBAAAA,MAAAA,CAAOG,MAAM,MAAA,IAAA,IAAbH,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeY,qBAAqB;AAC7D,YAAA;QACJ,KAAK,SAAA;AACwBZ,YAAAA,IAAAA,eAAAA;AAAzBW,YAAAA,sBAAAA,GAAAA,CAAyBX,kBAAAA,MAAAA,CAAOK,OAAO,MAAA,IAAA,IAAdL,eAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,gBAAgBY,qBAAqB;AAC9D,YAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACwBZ,YAAAA,IAAAA,cAAAA;AAAzBW,YAAAA,sBAAAA,GAAAA,CAAyBX,iBAAAA,MAAAA,CAAOM,MAAM,MAAA,IAAA,IAAbN,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeY,qBAAqB;AAC7D,YAAA;AAIR;;IAGA,OAAOD,sBAAAA,IAA0BX,MAAAA,CAAOY,qBAAqB,IAAI,KAAA;AACrE;AAEO,MAAMC,WAAAA,SAAoBC,KAAAA,CAAAA;AAC7B,IAAA,WAAA,CAAYC,OAAe,EAAE,iBAAgBC,GAA6B,KAAK,CAAE;QAC7E,KAAK,CAACD,oEADmCC,iBAAAA,GAAAA,iBAAAA;QAEzC,IAAI,CAACC,IAAI,GAAG,aAAA;AAChB,IAAA;AACJ;AAEA;AACO,SAASD,kBAAkBE,KAAU,EAAA;AACxC,IAAA,IAAI,EAACA,KAAAA,KAAAA,IAAAA,IAAAA,KAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,KAAAA,CAAOH,OAAO,GAAE,OAAO,KAAA;AAE5B,IAAA,MAAMA,OAAAA,GAAUG,KAAAA,CAAMH,OAAO,CAACI,WAAW,EAAA;AACzC,IAAA,OAAOJ,QAAQK,QAAQ,CAAC,6BACjBL,OAAAA,CAAQK,QAAQ,CAAC,yBAAA,CAAA,IACjBL,OAAAA,CAAQK,QAAQ,CAAC,kBACjBL,OAAAA,CAAQK,QAAQ,CAAC,iBAAA,CAAA,IACjBL,OAAAA,CAAQK,QAAQ,CAAC,mBAAA,CAAA;AAC5B;AAEA;AACO,SAASC,iBAAiBH,KAAU,EAAA;AACvC,IAAA,IAAI,EAACA,KAAAA,KAAAA,IAAAA,IAAAA,KAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,MAAOH,OAAO,CAAA,IAAI,EAACG,KAAAA,KAAAA,IAAAA,IAAAA,KAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,KAAAA,CAAOI,IAAI,KAAI,EAACJ,KAAAA,KAAAA,IAAAA,IAAAA,4BAAAA,KAAAA,CAAOK,MAAM,GAAE,OAAO,KAAA;;AAG9D,IAAA,IAAIL,MAAMK,MAAM,KAAK,OAAOL,KAAAA,CAAMI,IAAI,KAAK,qBAAA,EAAuB;QAC9D,OAAO,IAAA;AACX,IAAA;;IAGA,IAAIJ,KAAAA,CAAMH,OAAO,EAAE;AACf,QAAA,MAAMA,OAAAA,GAAUG,KAAAA,CAAMH,OAAO,CAACI,WAAW,EAAA;AACzC,QAAA,OAAOJ,QAAQK,QAAQ,CAAC,0BACjBL,OAAAA,CAAQK,QAAQ,CAAC,mBAAA,CAAA,IACjBL,OAAAA,CAAQK,QAAQ,CAAC,qBAChBL,OAAAA,CAAQK,QAAQ,CAAC,MAAA,CAAA,IAAWL,OAAAA,CAAQK,QAAQ,CAAC,OAAA,CAAA;AACzD,IAAA;IAEA,OAAO,KAAA;AACX;AAEO,eAAeI,gBAAAA,CAAiBC,QAAsC,EAAEC,OAAAA,GAAiP;IAAEtB,KAAAA,EAAO;AAAc,CAAC,EAAA;AACpV,IAAA,MAAMuB,MAAAA,GAASC,SAAAA,EAAAA;IACf,MAAMC,OAAAA,GAAUC,MAAc,CAAC;AAAEC,QAAAA,GAAAA,EAAKJ,OAAOK;AAAM,KAAA,CAAA;AACnD,IAAA,IAAIC,MAAAA,GAAwB,IAAA;IAC5B,IAAI;AAuFiBC,QAAAA,IAAAA,oCAAAA,EAAAA,4BAAAA,EAAAA,oBAAAA;AAtFjB,QAAA,MAAMC,MAAAA,GAASC,OAAAA,CAAQC,GAAG,CAACC,cAAc;AACzC,QAAA,IAAI,CAACH,MAAAA,EAAQ;AACT,YAAA,MAAM,IAAItB,WAAAA,CAAY,gDAAA,CAAA;AAC1B,QAAA;;QAGA,MAAM0B,SAAAA,GAAYC,SAASJ,OAAAA,CAAQC,GAAG,CAACI,iBAAiB,IAAI;AAC5DR,QAAAA,MAAAA,GAAS,IAAIS,MAAAA,CAAO;YAChBP,MAAAA,EAAQA,MAAAA;YACRQ,OAAAA,EAASJ;AACb,SAAA,CAAA;QAEA,MAAMK,UAAAA,GAAalB,OAAAA,CAAQtB,KAAK,IAAI,aAAA;;AAGpC,QAAA,MAAMyC,WAAAA,GAAcC,IAAAA,CAAKC,SAAS,CAACtB,UAAUuB,MAAM;AACnD,QAAA,MAAMC,gBAAgB,CAACJ,cAAc,IAAG,EAAGK,OAAO,CAAC,CAAA,CAAA;;QAGnD,MAAMC,aAAAA,GAAgBzB,OAAAA,CAAQjB,eAAe,GAAG,CAAC,cAAc,EAAEiB,OAAAA,CAAQjB,eAAe,CAAA,CAAE,GAAG,EAAA;AAC7FkB,QAAAA,MAAAA,CAAOyB,IAAI,CAAC,6BAAA,CAAA;QACZzB,MAAAA,CAAOyB,IAAI,CAAC,gBAAA,EAAkBR,UAAAA,EAAYO,aAAAA,CAAAA;AAC1CxB,QAAAA,MAAAA,CAAOyB,IAAI,CAAC,mCAAA,EAAqCH,aAAAA,EAAeJ,YAAYQ,cAAc,EAAA,CAAA;QAE1F1B,MAAAA,CAAOK,KAAK,CAAC,8BAAA,EAAgCP,QAAAA,CAAAA;YAGjBC,8BAAAA,EAAAA,IAAAA;;AAA5B,QAAA,MAAM4B,mBAAAA,GAAsB5B,CAAAA,IAAAA,GAAAA,CAAAA,iCAAAA,OAAAA,CAAQd,qBAAqB,MAAA,IAAA,IAA7Bc,8BAAAA,KAAAA,KAAAA,CAAAA,GAAAA,8BAAAA,GAAiCA,OAAAA,CAAQ6B,SAAS,MAAA,IAAA,IAAlD7B,kBAAAA,IAAAA,GAAsD,KAAA;;QAGlF,IAAIA,OAAAA,CAAQM,KAAK,KAAKN,OAAAA,CAAQ8B,gBAAgB,IAAI9B,OAAAA,CAAQ+B,SAAQ,CAAA,EAAI;AAClE,YAAA,MAAMC,WAAAA,GAAc;gBAChBtD,KAAAA,EAAOwC,UAAAA;AACPnB,gBAAAA,QAAAA;gBACAkC,qBAAAA,EAAuBL,mBAAAA;AACvBM,gBAAAA,eAAAA,EAAiBlC,QAAQmC,cAAc;AACvCC,gBAAAA,gBAAAA,EAAkBpC,QAAQjB;AAC9B,aAAA;AACA,YAAA,MAAMgD,SAAAA,GAAY/B,OAAAA,CAAQ8B,gBAAgB,IAAI9B,QAAQ+B,SAAS;YAC/D,MAAM5B,OAAAA,CAAQkC,SAAS,CAACN,SAAAA,EAAYX,KAAKC,SAAS,CAACW,WAAAA,EAAa,IAAA,EAAM,CAAA,CAAA,EAAI,MAAA,CAAA;YAC1E/B,MAAAA,CAAOK,KAAK,CAAC,gCAAA,EAAkCyB,SAAAA,CAAAA;AACnD,QAAA;;AAGA,QAAA,MAAMO,UAAAA,GAAkB;YACpB5D,KAAAA,EAAOwC,UAAAA;AACPnB,YAAAA,QAAAA;YACAkC,qBAAAA,EAAuBL,mBAAAA;AACvBM,YAAAA,eAAAA,EAAiBlC,QAAQmC;AAC7B,SAAA;;AAGA,QAAA,IAAInC,OAAAA,CAAQjB,eAAe,KAAKmC,UAAAA,CAAWxB,QAAQ,CAAC,OAAA,CAAA,IAAYwB,UAAAA,CAAWxB,QAAQ,CAAC,IAAA,CAAI,CAAA,EAAI;YACxF4C,UAAAA,CAAWF,gBAAgB,GAAGpC,OAAAA,CAAQjB,eAAe;AACzD,QAAA;;QAGA,MAAMwD,SAAAA,GAAYC,KAAKC,GAAG,EAAA;AAC1B,QAAA,MAAMC,oBAAoBnC,MAAAA,CAAOoC,IAAI,CAACC,WAAW,CAACC,MAAM,CAACP,UAAAA,CAAAA;;AAGzD,QAAA,IAAIQ,SAAAA,GAAmC,IAAA;AACvC,QAAA,MAAMC,cAAAA,GAAiB,IAAIC,OAAAA,CAAe,CAACC,CAAAA,EAAGC,MAAAA,GAAAA;YAC1C,MAAMrC,SAAAA,GAAYC,SAASJ,OAAAA,CAAQC,GAAG,CAACI,iBAAiB,IAAI;AAC5D+B,YAAAA,SAAAA,GAAYK,UAAAA,CAAW,IAAMD,MAAAA,CAAO,IAAI/D,WAAAA,CAAY,CAAC,gCAAgC,EAAE0B,SAAAA,GAAU,IAAA,CAAK,QAAQ,CAAC,CAAA,CAAA,EAAIA,SAAAA,CAAAA;AACvH,QAAA,CAAA,CAAA;QAEA,IAAIL,UAAAA;QACJ,IAAI;YACAA,UAAAA,GAAa,MAAMwC,OAAAA,CAAQI,IAAI,CAAC;AAACV,gBAAAA,iBAAAA;AAAmBK,gBAAAA;AAAe,aAAA,CAAA;QACvE,CAAA,QAAU;;AAEN,YAAA,IAAID,cAAc,IAAA,EAAM;gBACpBO,YAAAA,CAAaP,SAAAA,CAAAA;AACjB,YAAA;AACJ,QAAA;QAEA,MAAMQ,WAAAA,GAAcd,IAAAA,CAAKC,GAAG,EAAA,GAAKF,SAAAA;;QAGjC,IAAIvC,OAAAA,CAAQM,KAAK,KAAKN,OAAAA,CAAQuD,iBAAiB,IAAIvD,OAAAA,CAAQ+B,SAAQ,CAAA,EAAI;AACnE,YAAA,MAAMA,SAAAA,GAAY/B,OAAAA,CAAQuD,iBAAiB,IAAIvD,QAAQ+B,SAAS;YAChE,MAAM5B,OAAAA,CAAQkC,SAAS,CAACN,SAAAA,EAAYX,KAAKC,SAAS,CAACb,UAAAA,EAAY,IAAA,EAAM,CAAA,CAAA,EAAI,MAAA,CAAA;YACzEP,MAAAA,CAAOK,KAAK,CAAC,iCAAA,EAAmCyB,SAAAA,CAAAA;AACpD,QAAA;AAEA,QAAA,MAAMyB,YAAWhD,oBAAAA,GAAAA,UAAAA,CAAWiD,OAAO,CAAC,CAAA,CAAE,cAArBjD,oBAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,CAAAA,4BAAAA,GAAAA,qBAAuBnB,OAAO,MAAA,IAAA,IAA9BmB,oDAAAA,oCAAAA,GAAAA,4BAAAA,CAAgCkD,OAAO,MAAA,IAAA,IAAvClD,oCAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,qCAAyCmD,IAAI,EAAA;AAC9D,QAAA,IAAI,CAACH,QAAAA,EAAU;AACX,YAAA,MAAM,IAAIrE,WAAAA,CAAY,kCAAA,CAAA;AAC1B,QAAA;;QAGA,MAAMyE,YAAAA,GAAeJ,SAASlC,MAAM;AACpC,QAAA,MAAMuC,iBAAiB,CAACD,eAAe,IAAG,EAAGpC,OAAO,CAAC,CAAA,CAAA;AACrDvB,QAAAA,MAAAA,CAAOyB,IAAI,CAAC,oCAAA,EAAsCmC,cAAAA,EAAgBD,aAAajC,cAAc,EAAA,CAAA;;AAG7F,QAAA,MAAMmC,uBAAuBR,WAAAA,IAAe,IAAA,GACtC,GAAG,CAACA,cAAc,IAAG,EAAG9B,OAAO,CAAC,GAAG,CAAC,CAAC,GACrC,CAAA,EAAG8B,WAAAA,CAAY,EAAE,CAAC;QACxBrD,MAAAA,CAAOyB,IAAI,CAAC,aAAA,EAAeoC,oBAAAA,CAAAA;;QAG3B,IAAItD,UAAAA,CAAWuD,KAAK,EAAE;AAEdvD,YAAAA,IAAAA,+BAAAA,EACAA,mCAAAA,EACAA,8BAAAA;AAHJP,YAAAA,MAAAA,CAAOyB,IAAI,CAAC,sDAAA,EACRlB,CAAAA,CAAAA,+BAAAA,GAAAA,WAAWuD,KAAK,CAACC,aAAa,MAAA,IAAA,IAA9BxD,sDAAAA,+BAAAA,CAAgCmB,cAAc,EAAA,KAAM,GAAA,EACpDnB,EAAAA,mCAAAA,GAAAA,UAAAA,CAAWuD,KAAK,CAACE,iBAAiB,MAAA,IAAA,IAAlCzD,mCAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,mCAAAA,CAAoCmB,cAAc,OAAM,GAAA,EACxDnB,CAAAA,CAAAA,8BAAAA,GAAAA,UAAAA,CAAWuD,KAAK,CAACG,YAAY,cAA7B1D,8BAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,8BAAAA,CAA+BmB,cAAc,EAAA,KAAM,GAAA,CAAA;AAE3D,QAAA;AAEA1B,QAAAA,MAAAA,CAAOK,KAAK,CAAC,sCAAA,EAAwCkD,QAAAA,CAASW,SAAS,CAAC,CAAA,EAAG,EAAA,CAAA,CAAA;QAC3E,IAAInE,OAAAA,CAAQmC,cAAc,EAAE;AACxB,YAAA,OAAOiC,cAAcZ,QAAAA,EAAU,qBAAA,CAAA;QACnC,CAAA,MAAO;YACH,OAAOA,QAAAA;AACX,QAAA;AAEJ,IAAA,CAAA,CAAE,OAAOhE,KAAAA,EAAY;AACjBS,QAAAA,MAAAA,CAAOT,KAAK,CAAC,iCAAA,EAAmCA,MAAMH,OAAO,EAAEG,MAAM6E,KAAK,CAAA;AAC1E,QAAA,MAAMC,eAAehF,iBAAAA,CAAkBE,KAAAA,CAAAA;QACvC,MAAM,IAAIL,YAAY,CAAC,6BAA6B,EAAEK,KAAAA,CAAMH,OAAO,EAAE,EAAEiF,YAAAA,CAAAA;IAC3E,CAAA,QAAU;;;AAGV,IAAA;AACJ;AAEA;AACO,eAAeC,yBAAAA,CAClBxE,QAAsC,EACtCC,OAAAA,GAAiP;IAAEtB,KAAAA,EAAO;AAAc,CAAC,EACzQ8F,aAA0E,EAAA;AAE1E,IAAA,MAAMvE,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMuE,UAAAA,GAAa,CAAA;AAEnB,IAAA,IAAK,IAAIC,OAAAA,GAAU,CAAA,EAAGA,OAAAA,IAAWD,YAAYC,OAAAA,EAAAA,CAAW;QACpD,IAAI;AACA,YAAA,MAAMC,iBAAiBD,OAAAA,KAAY,CAAA,GAAI3E,WAAYyE,aAAAA,GAAgB,MAAMA,cAAcE,OAAAA,CAAAA,GAAW3E,QAAAA;YAClG,OAAO,MAAMD,iBAAiB6E,cAAAA,EAAgB3E,OAAAA,CAAAA;AAClD,QAAA,CAAA,CAAE,OAAOR,KAAAA,EAAY;AACjB,YAAA,IAAIA,iBAAiBL,WAAAA,IAAeK,KAAAA,CAAMF,iBAAiB,IAAIoF,OAAAA,GAAUD,cAAcD,aAAAA,EAAe;gBAClGvE,MAAAA,CAAO2E,IAAI,CAAC,yEAAA,EAA2EF,OAAAA,EAASD,UAAAA,CAAAA;;gBAEhG,MAAMI,SAAAA,GAAYC,IAAAA,CAAKC,GAAG,CAAC,IAAA,GAAOD,KAAKE,GAAG,CAAC,CAAA,EAAGN,OAAAA,GAAU,CAAA,CAAA,EAAI,KAAA,CAAA;AAC5D,gBAAA,MAAM,IAAI1B,OAAAA,CAAQiC,CAAAA,OAAAA,GAAW9B,WAAW8B,OAAAA,EAASJ,SAAAA,CAAAA,CAAAA;AACjD,gBAAA;AACJ,YAAA,CAAA,MAAO,IAAIlF,gBAAAA,CAAiBH,KAAAA,CAAAA,IAAUkF,OAAAA,GAAUD,UAAAA,EAAY;;AAExD,gBAAA,MAAMI,SAAAA,GAAYC,IAAAA,CAAKC,GAAG,CAAC,IAAA,GAAOD,IAAAA,CAAKE,GAAG,CAAC,CAAA,EAAGN,OAAAA,GAAU,CAAA,CAAA,EAAI,KAAA,CAAA,CAAA;AAC5DzE,gBAAAA,MAAAA,CAAO2E,IAAI,CAAC,CAAC,0BAA0B,EAAEF,OAAAA,CAAQ,CAAC,EAAED,UAAAA,CAAW,UAAU,EAAEI,SAAAA,CAAU,kBAAkB,CAAC,CAAA;AACxG,gBAAA,MAAM,IAAI7B,OAAAA,CAAQiC,CAAAA,OAAAA,GAAW9B,WAAW8B,OAAAA,EAASJ,SAAAA,CAAAA,CAAAA;AACjD,gBAAA;AACJ,YAAA;YACA,MAAMrF,KAAAA;AACV,QAAA;AACJ,IAAA;;AAGA,IAAA,MAAM,IAAIL,WAAAA,CAAY,sBAAA,CAAA;AAC1B;AAEO,eAAe+F,eAAAA,CAAgBC,QAAgB,EAAEnF,OAAAA,GAAoJ;IAAEtB,KAAAA,EAAO;AAAY,CAAC,EAAA;AAC9N,IAAA,MAAMuB,MAAAA,GAASC,SAAAA,EAAAA;IACf,MAAMC,OAAAA,GAAUC,MAAc,CAAC;AAAEC,QAAAA,GAAAA,EAAKJ,OAAOK;AAAM,KAAA,CAAA;AACnD,IAAA,IAAIC,MAAAA,GAAwB,IAAA;AAC5B,IAAA,IAAI6E,WAAAA,GAAoC,IAAA;AACxC,IAAA,IAAIC,YAAAA,GAAe,KAAA;;AAGnB,IAAA,MAAMC,gBAAAA,GAAmB,IAAA;QACrB,IAAIF,WAAAA,IAAe,CAACC,YAAAA,EAAc;YAC9B,IAAI;;gBAEA,IAAI,OAAOD,YAAYG,OAAO,KAAK,cAAc,CAACH,WAAAA,CAAYI,SAAS,EAAE;AACrEJ,oBAAAA,WAAAA,CAAYG,OAAO,EAAA;AACvB,gBAAA;gBACAF,YAAAA,GAAe,IAAA;AACfpF,gBAAAA,MAAAA,CAAOK,KAAK,CAAC,kCAAA,CAAA;AACjB,YAAA,CAAA,CAAE,OAAOmF,SAAAA,EAAW;AAChBxF,gBAAAA,MAAAA,CAAOK,KAAK,CAAC,yCAAA,EAA4CmF,UAAoBpG,OAAO,CAAA;AACpFgG,gBAAAA,YAAAA,GAAe;AACnB,YAAA;AACJ,QAAA;AACJ,IAAA,CAAA;IAEA,IAAI;AACA,QAAA,MAAM5E,MAAAA,GAASC,OAAAA,CAAQC,GAAG,CAACC,cAAc;AACzC,QAAA,IAAI,CAACH,MAAAA,EAAQ;AACT,YAAA,MAAM,IAAItB,WAAAA,CAAY,gDAAA,CAAA;AAC1B,QAAA;AAEAoB,QAAAA,MAAAA,GAAS,IAAIS,MAAAA,CAAO;YAChBP,MAAAA,EAAQA;AACZ,SAAA,CAAA;QAEAR,MAAAA,CAAOK,KAAK,CAAC,6BAAA,EAA+B6E,QAAAA,CAAAA;;QAG5C,IAAInF,OAAAA,CAAQM,KAAK,KAAKN,OAAAA,CAAQ8B,gBAAgB,IAAI9B,OAAAA,CAAQ+B,SAAQ,CAAA,EAAI;AAClE,YAAA,MAAMC,WAAAA,GAAc;gBAChBtD,KAAAA,EAAOsB,OAAAA,CAAQtB,KAAK,IAAI,WAAA;gBACxBgH,IAAAA,EAAMP,QAAAA;gBACNjD,eAAAA,EAAiB;AACrB,aAAA;AACA,YAAA,MAAMH,SAAAA,GAAY/B,OAAAA,CAAQ8B,gBAAgB,IAAI9B,QAAQ+B,SAAS;YAC/D,MAAM5B,OAAAA,CAAQkC,SAAS,CAACN,SAAAA,EAAYX,KAAKC,SAAS,CAACW,WAAAA,EAAa,IAAA,EAAM,CAAA,CAAA,EAAI,MAAA,CAAA;YAC1E/B,MAAAA,CAAOK,KAAK,CAAC,gCAAA,EAAkCyB,SAAAA,CAAAA;AACnD,QAAA;QAEAqD,WAAAA,GAAc,MAAMjF,OAAAA,CAAQwF,UAAU,CAACR,QAAAA,CAAAA;;;AAIvC,QAAA,IAAIC,WAAAA,IAAe,OAAOA,WAAAA,CAAYQ,EAAE,KAAK,UAAA,EAAY;YACrDR,WAAAA,CAAYQ,EAAE,CAAC,OAAA,EAAS,CAACC,WAAAA,GAAAA;AACrB5F,gBAAAA,MAAAA,CAAOT,KAAK,CAAC,wBAAA,EAA0BqG,WAAAA,CAAYxG,OAAO,CAAA;AAC1DiG,gBAAAA,gBAAAA,EAAAA;AACJ,YAAA,CAAA,CAAA;AACJ,QAAA;QAEA,IAAIQ,aAAAA;QACJ,IAAI;AACAA,YAAAA,aAAAA,GAAgB,MAAMvF,MAAAA,CAAOwF,KAAK,CAACC,cAAc,CAACnD,MAAM,CAAC;gBACrDnE,KAAAA,EAAOsB,OAAAA,CAAQtB,KAAK,IAAI,WAAA;gBACxBgH,IAAAA,EAAMN,WAAAA;gBACNlD,eAAAA,EAAiB;AACrB,aAAA,CAAA;;AAEAoD,YAAAA,gBAAAA,EAAAA;AACJ,QAAA,CAAA,CAAE,OAAOW,QAAAA,EAAU;;AAEfX,YAAAA,gBAAAA,EAAAA;YACA,MAAMW,QAAAA;AACV,QAAA;;QAGA,IAAIjG,OAAAA,CAAQM,KAAK,KAAKN,OAAAA,CAAQuD,iBAAiB,IAAIvD,OAAAA,CAAQ+B,SAAQ,CAAA,EAAI;AACnE,YAAA,MAAMA,SAAAA,GAAY/B,OAAAA,CAAQuD,iBAAiB,IAAIvD,QAAQ+B,SAAS;YAChE,MAAM5B,OAAAA,CAAQkC,SAAS,CAACN,SAAAA,EAAYX,KAAKC,SAAS,CAACyE,aAAAA,EAAe,IAAA,EAAM,CAAA,CAAA,EAAI,MAAA,CAAA;YAC5E7F,MAAAA,CAAOK,KAAK,CAAC,iCAAA,EAAmCyB,SAAAA,CAAAA;AACpD,QAAA;AAEA,QAAA,MAAMyB,QAAAA,GAAWsC,aAAAA;AACjB,QAAA,IAAI,CAACtC,QAAAA,EAAU;AACX,YAAA,MAAM,IAAIrE,WAAAA,CAAY,uCAAA,CAAA;AAC1B,QAAA;QAEAc,MAAAA,CAAOK,KAAK,CAAC,wCAAA,EAA0CkD,QAAAA,CAAAA;;QAGvD,IAAI;YACA,MAAM0C,SAAAA,GAAYlG,OAAAA,CAAQmG,eAAe,IAAI,QAAA;AAC7C,YAAA,MAAMC,YAAAA,CAAajB,QAAAA,EAAU3B,QAAAA,CAAS6C,IAAI,EAAEH,SAAAA,CAAAA;AAChD,QAAA,CAAA,CAAE,OAAOI,YAAAA,EAAmB;;AAExBrG,YAAAA,MAAAA,CAAO2E,IAAI,CAAC,kCAAA,EAAoC0B,YAAAA,CAAajH,OAAO,CAAA;AACxE,QAAA;QAEA,OAAOmE,QAAAA;AAEX,IAAA,CAAA,CAAE,OAAOhE,KAAAA,EAAY;AACjBS,QAAAA,MAAAA,CAAOT,KAAK,CAAC,sCAAA,EAAwCA,MAAMH,OAAO,EAAEG,MAAM6E,KAAK,CAAA;AAC/E,QAAA,MAAM,IAAIlF,WAAAA,CAAY,CAAC,4BAA4B,EAAEK,KAAAA,CAAMH,OAAO,CAAA,CAAE,CAAA;IACxE,CAAA,QAAU;;AAENiG,QAAAA,gBAAAA,EAAAA;;;AAGJ,IAAA;AACJ;;;;"}
@@ -1,6 +1,6 @@
1
1
  import path__default from 'path';
2
2
  import { getLogger } from '../logging.js';
3
- import { safeJsonParse, validatePackageJson } from './validation.js';
3
+ import { safeJsonParse, validatePackageJson } from '@eldrforge/git-tools';
4
4
 
5
5
  /* eslint-disable @typescript-eslint/no-unused-vars */ function _define_property(obj, key, value) {
6
6
  if (key in obj) {
@@ -1 +1 @@
1
- {"version":3,"file":"performance.js","sources":["../../src/util/performance.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-unused-vars */\nimport path from 'path';\nimport { getLogger } from '../logging';\nimport { safeJsonParse, validatePackageJson } from './validation';\n\n// Performance timing helper\nexport class PerformanceTimer {\n private startTime: number;\n private logger: any;\n\n constructor(logger: any) {\n this.logger = logger;\n this.startTime = Date.now();\n }\n\n static start(logger: any, operation: string): PerformanceTimer {\n logger.verbose(`⏱️ Starting: ${operation}`);\n return new PerformanceTimer(logger);\n }\n\n end(operation: string): number {\n const duration = Date.now() - this.startTime;\n this.logger.verbose(`⏱️ Completed: ${operation} (${duration}ms)`);\n return duration;\n }\n}\n\nexport interface PackageJson {\n name?: string;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n}\n\nexport interface PackageJsonLocation {\n path: string;\n packageJson: PackageJson;\n relativePath: string;\n}\n\nconst EXCLUDED_DIRECTORIES = [\n 'node_modules',\n 'dist',\n 'build',\n 'coverage',\n '.git',\n '.next',\n '.nuxt',\n 'out',\n 'public',\n 'static',\n 'assets'\n];\n\n// Batch read multiple package.json files in parallel\nexport const batchReadPackageJsonFiles = async (\n packageJsonPaths: string[],\n storage: any,\n rootDir: string\n): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, `Batch reading ${packageJsonPaths.length} package.json files`);\n\n const readPromises = packageJsonPaths.map(async (packageJsonPath): Promise<PackageJsonLocation | null> => {\n try {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath, false);\n const relativePath = path.relative(rootDir, path.dirname(packageJsonPath));\n\n return {\n path: packageJsonPath,\n packageJson,\n relativePath: relativePath || '.'\n };\n } catch (error: any) {\n logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);\n return null;\n }\n });\n\n const results = await Promise.all(readPromises);\n const validResults = results.filter((result): result is PackageJsonLocation => result !== null);\n\n timer.end(`Successfully read ${validResults.length}/${packageJsonPaths.length} package.json files`);\n return validResults;\n};\n\n// Optimized recursive package.json finder with parallel processing\nexport const findAllPackageJsonFiles = async (rootDir: string, storage: any): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Optimized scanning for package.json files');\n\n // Phase 1: Find all package.json file paths in parallel\n const packageJsonPaths: string[] = [];\n\n const scanForPaths = async (currentDir: string, depth: number = 0): Promise<string[]> => {\n // Prevent infinite recursion and overly deep scanning\n if (depth > 5) {\n return [];\n }\n\n try {\n if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {\n return [];\n }\n\n const items = await storage.listFiles(currentDir);\n const foundPaths: string[] = [];\n\n // Check for package.json in current directory\n if (items.includes('package.json')) {\n const packageJsonPath = path.join(currentDir, 'package.json');\n foundPaths.push(packageJsonPath);\n }\n\n // Process subdirectories in parallel\n const subdirPromises: Promise<string[]>[] = [];\n for (const item of items) {\n if (EXCLUDED_DIRECTORIES.includes(item)) {\n continue;\n }\n\n const itemPath = path.join(currentDir, item);\n subdirPromises.push(\n (async () => {\n try {\n if (await storage.isDirectory(itemPath)) {\n return await scanForPaths(itemPath, depth + 1);\n }\n } catch (error: any) {\n logger.debug(`Skipped directory ${itemPath}: ${error.message}`);\n }\n return [];\n })()\n );\n }\n\n if (subdirPromises.length > 0) {\n const subdirResults = await Promise.all(subdirPromises);\n for (const subdirPaths of subdirResults) {\n foundPaths.push(...subdirPaths);\n }\n }\n\n return foundPaths;\n } catch (error: any) {\n logger.debug(`Failed to scan directory ${currentDir}: ${error.message}`);\n return [];\n }\n };\n\n const pathsTimer = PerformanceTimer.start(logger, 'Finding all package.json paths');\n const allPaths = await scanForPaths(rootDir);\n pathsTimer.end(`Found ${allPaths.length} package.json file paths`);\n\n // Phase 2: Batch read all package.json files in parallel\n const packageJsonFiles = await batchReadPackageJsonFiles(allPaths, storage, rootDir);\n\n timer.end(`Found ${packageJsonFiles.length} valid package.json files`);\n return packageJsonFiles;\n};\n\n// Optimized package scanning with parallel processing\nexport const scanDirectoryForPackages = async (rootDir: string, storage: any): Promise<Map<string, string>> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, `Optimized package scanning: ${rootDir}`);\n const packageMap = new Map<string, string>(); // packageName -> relativePath\n\n const absoluteRootDir = path.resolve(process.cwd(), rootDir);\n logger.verbose(`Scanning directory for packages: ${absoluteRootDir}`);\n\n try {\n // Quick existence and directory check\n const existsTimer = PerformanceTimer.start(logger, `Checking directory: ${absoluteRootDir}`);\n if (!await storage.exists(absoluteRootDir) || !await storage.isDirectory(absoluteRootDir)) {\n existsTimer.end(`Directory not found or not a directory: ${absoluteRootDir}`);\n timer.end(`Directory invalid: ${rootDir}`);\n return packageMap;\n }\n existsTimer.end(`Directory verified: ${absoluteRootDir}`);\n\n // Get all items and process in parallel\n const listTimer = PerformanceTimer.start(logger, `Listing contents: ${absoluteRootDir}`);\n const items = await storage.listFiles(absoluteRootDir);\n listTimer.end(`Listed ${items.length} items`);\n\n // Create batched promises for better performance\n const BATCH_SIZE = 10; // Process directories in batches to avoid overwhelming filesystem\n const batches = [];\n\n for (let i = 0; i < items.length; i += BATCH_SIZE) {\n const batch = items.slice(i, i + BATCH_SIZE);\n batches.push(batch);\n }\n\n const processTimer = PerformanceTimer.start(logger, `Processing ${batches.length} batches of directories`);\n\n for (const batch of batches) {\n const batchPromises = batch.map(async (item: string) => {\n const itemPath = path.join(absoluteRootDir, item);\n try {\n if (await storage.isDirectory(itemPath)) {\n const packageJsonPath = path.join(itemPath, 'package.json');\n\n if (await storage.exists(packageJsonPath)) {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath);\n\n if (packageJson.name) {\n const relativePath = path.relative(process.cwd(), itemPath);\n return { name: packageJson.name, path: relativePath };\n }\n }\n }\n } catch (error: any) {\n logger.debug(`Skipped ${itemPath}: ${error.message || error}`);\n }\n return null;\n });\n\n const batchResults = await Promise.all(batchPromises);\n\n for (const result of batchResults) {\n if (result) {\n packageMap.set(result.name, result.path);\n logger.debug(`Found package: ${result.name} at ${result.path}`);\n }\n }\n }\n\n processTimer.end(`Processed ${items.length} directories in ${batches.length} batches`);\n logger.verbose(`Found ${packageMap.size} packages in ${items.length} subdirectories`);\n } catch (error) {\n logger.warn(`Failed to read directory ${absoluteRootDir}: ${error}`);\n }\n\n timer.end(`Found ${packageMap.size} packages in: ${rootDir}`);\n return packageMap;\n};\n\n// Parallel scope processing for better performance\nexport const findPackagesByScope = async (\n dependencies: Record<string, string>,\n scopeRoots: Record<string, string>,\n storage: any\n): Promise<Map<string, string>> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Finding packages by scope (optimized)');\n const workspacePackages = new Map<string, string>();\n\n logger.silly(`Checking dependencies against scope roots: ${JSON.stringify(scopeRoots)}`);\n\n // Process all scopes in parallel for maximum performance\n const scopeTimer = PerformanceTimer.start(logger, 'Parallel scope scanning');\n const scopePromises = Object.entries(scopeRoots).map(async ([scope, rootDir]) => {\n logger.verbose(`Scanning scope ${scope} at root directory: ${rootDir}`);\n const scopePackages = await scanDirectoryForPackages(rootDir, storage);\n\n // Filter packages that match the scope\n const matchingPackages: Array<[string, string]> = [];\n for (const [packageName, packagePath] of scopePackages) {\n if (packageName.startsWith(scope)) {\n matchingPackages.push([packageName, packagePath]);\n logger.debug(`Registered package: ${packageName} -> ${packagePath}`);\n }\n }\n return { scope, packages: matchingPackages };\n });\n\n const allScopeResults = await Promise.all(scopePromises);\n\n // Aggregate all packages from all scopes\n const allPackages = new Map<string, string>();\n for (const { scope, packages } of allScopeResults) {\n for (const [packageName, packagePath] of packages) {\n allPackages.set(packageName, packagePath);\n }\n }\n\n scopeTimer.end(`Scanned ${Object.keys(scopeRoots).length} scope roots, found ${allPackages.size} packages`);\n\n // Match dependencies to available packages\n const matchTimer = PerformanceTimer.start(logger, 'Matching dependencies to packages');\n for (const [depName, depVersion] of Object.entries(dependencies)) {\n logger.debug(`Processing dependency: ${depName}@${depVersion}`);\n\n if (allPackages.has(depName)) {\n const packagePath = allPackages.get(depName)!;\n workspacePackages.set(depName, packagePath);\n logger.verbose(`Found sibling package: ${depName} at ${packagePath}`);\n }\n }\n matchTimer.end(`Matched ${workspacePackages.size} dependencies to workspace packages`);\n\n timer.end(`Found ${workspacePackages.size} packages to link`);\n return workspacePackages;\n};\n\n// Utility to collect all dependencies from package.json files efficiently\nexport const collectAllDependencies = (packageJsonFiles: PackageJsonLocation[]): Record<string, string> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Collecting all dependencies');\n\n const allDependencies: Record<string, string> = {};\n for (const { packageJson } of packageJsonFiles) {\n Object.assign(allDependencies, packageJson.dependencies);\n Object.assign(allDependencies, packageJson.devDependencies);\n Object.assign(allDependencies, packageJson.peerDependencies);\n }\n\n timer.end(`Collected ${Object.keys(allDependencies).length} unique dependencies`);\n return allDependencies;\n};\n\n// Utility to check for file: dependencies\nexport const checkForFileDependencies = (packageJsonFiles: PackageJsonLocation[]): void => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Checking for file: dependencies');\n const filesWithFileDepedencies: Array<{path: string, dependencies: string[]}> = [];\n\n for (const { path: packagePath, packageJson, relativePath } of packageJsonFiles) {\n const fileDeps: string[] = [];\n\n // Check all dependency types for file: paths\n const allDeps = {\n ...packageJson.dependencies,\n ...packageJson.devDependencies,\n ...packageJson.peerDependencies\n };\n\n for (const [name, version] of Object.entries(allDeps)) {\n if (version.startsWith('file:')) {\n fileDeps.push(`${name}: ${version}`);\n }\n }\n\n if (fileDeps.length > 0) {\n filesWithFileDepedencies.push({\n path: relativePath,\n dependencies: fileDeps\n });\n }\n }\n\n if (filesWithFileDepedencies.length > 0) {\n logger.warn('⚠️ WARNING: Found file: dependencies that should not be committed:');\n for (const file of filesWithFileDepedencies) {\n logger.warn(` 📄 ${file.path}:`);\n for (const dep of file.dependencies) {\n logger.warn(` - ${dep}`);\n }\n }\n logger.warn('');\n logger.warn('💡 Remember to run \"kodrdriv unlink\" before committing to restore registry versions!');\n logger.warn(' Or add a pre-commit hook to prevent accidental commits of linked dependencies.');\n }\n\n timer.end(`Checked ${packageJsonFiles.length} files, found ${filesWithFileDepedencies.length} with file: dependencies`);\n};\n"],"names":["PerformanceTimer","start","logger","operation","verbose","end","duration","Date","now","startTime","EXCLUDED_DIRECTORIES","batchReadPackageJsonFiles","packageJsonPaths","storage","rootDir","getLogger","timer","length","readPromises","map","packageJsonPath","packageJsonContent","readFile","parsed","safeJsonParse","packageJson","validatePackageJson","relativePath","path","relative","dirname","error","debug","message","results","Promise","all","validResults","filter","result","findAllPackageJsonFiles","scanForPaths","currentDir","depth","exists","isDirectory","items","listFiles","foundPaths","includes","join","push","subdirPromises","item","itemPath","subdirResults","subdirPaths","pathsTimer","allPaths","packageJsonFiles"],"mappings":";;;;AAAA,uDAAoD,SAAA,gBAAA,CAAA,GAAA,EAAA,GAAA,EAAA,KAAA,EAAA;;;;;;;;;;;;;AAKpD;AACO,MAAMA,gBAAAA,CAAAA;AAST,IAAA,OAAOC,KAAAA,CAAMC,MAAW,EAAEC,SAAiB,EAAoB;AAC3DD,QAAAA,MAAAA,CAAOE,OAAO,CAAC,CAAC,cAAc,EAAED,SAAAA,CAAAA,CAAW,CAAA;AAC3C,QAAA,OAAO,IAAIH,gBAAAA,CAAiBE,MAAAA,CAAAA;AAChC,IAAA;AAEAG,IAAAA,GAAAA,CAAIF,SAAiB,EAAU;AAC3B,QAAA,MAAMG,WAAWC,IAAAA,CAAKC,GAAG,EAAA,GAAK,IAAI,CAACC,SAAS;AAC5C,QAAA,IAAI,CAACP,MAAM,CAACE,OAAO,CAAC,CAAC,eAAe,EAAED,SAAAA,CAAU,EAAE,EAAEG,QAAAA,CAAS,GAAG,CAAC,CAAA;QACjE,OAAOA,QAAAA;AACX,IAAA;AAdA,IAAA,WAAA,CAAYJ,MAAW,CAAE;AAHzB,QAAA,gBAAA,CAAA,IAAA,EAAQO,aAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQP,UAAR,MAAA,CAAA;QAGI,IAAI,CAACA,MAAM,GAAGA,MAAAA;AACd,QAAA,IAAI,CAACO,SAAS,GAAGF,IAAAA,CAAKC,GAAG,EAAA;AAC7B,IAAA;AAYJ;AAeA,MAAME,oBAAAA,GAAuB;AACzB,IAAA,cAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,UAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,OAAA;AACA,IAAA,KAAA;AACA,IAAA,QAAA;AACA,IAAA,QAAA;AACA,IAAA;AACH,CAAA;AAED;AACO,MAAMC,yBAAAA,GAA4B,OACrCC,gBAAAA,EACAC,OAAAA,EACAC,OAAAA,GAAAA;AAEA,IAAA,MAAMZ,MAAAA,GAASa,SAAAA,EAAAA;AACf,IAAA,MAAMC,KAAAA,GAAQhB,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,CAAC,cAAc,EAAEU,gBAAAA,CAAiBK,MAAM,CAAC,mBAAmB,CAAC,CAAA;AAE1G,IAAA,MAAMC,YAAAA,GAAeN,gBAAAA,CAAiBO,GAAG,CAAC,OAAOC,eAAAA,GAAAA;QAC7C,IAAI;AACA,YAAA,MAAMC,kBAAAA,GAAqB,MAAMR,OAAAA,CAAQS,QAAQ,CAACF,eAAAA,EAAiB,OAAA,CAAA;YACnE,MAAMG,MAAAA,GAASC,cAAcH,kBAAAA,EAAoBD,eAAAA,CAAAA;YACjD,MAAMK,WAAAA,GAAcC,mBAAAA,CAAoBH,MAAAA,EAAQH,eAAAA,EAAiB,KAAA,CAAA;AACjE,YAAA,MAAMO,eAAeC,aAAAA,CAAKC,QAAQ,CAACf,OAAAA,EAASc,aAAAA,CAAKE,OAAO,CAACV,eAAAA,CAAAA,CAAAA;YAEzD,OAAO;gBACHQ,IAAAA,EAAMR,eAAAA;AACNK,gBAAAA,WAAAA;AACAE,gBAAAA,YAAAA,EAAcA,YAAAA,IAAgB;AAClC,aAAA;AACJ,QAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;YACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,gCAAgC,EAAEZ,gBAAgB,EAAE,EAAEW,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;YACnF,OAAO,IAAA;AACX,QAAA;AACJ,IAAA,CAAA,CAAA;AAEA,IAAA,MAAMC,OAAAA,GAAU,MAAMC,OAAAA,CAAQC,GAAG,CAAClB,YAAAA,CAAAA;AAClC,IAAA,MAAMmB,eAAeH,OAAAA,CAAQI,MAAM,CAAC,CAACC,SAA0CA,MAAAA,KAAW,IAAA,CAAA;AAE1FvB,IAAAA,KAAAA,CAAMX,GAAG,CAAC,CAAC,kBAAkB,EAAEgC,YAAAA,CAAapB,MAAM,CAAC,CAAC,EAAEL,gBAAAA,CAAiBK,MAAM,CAAC,mBAAmB,CAAC,CAAA;IAClG,OAAOoB,YAAAA;AACX;AAEA;AACO,MAAMG,uBAAAA,GAA0B,OAAO1B,OAAAA,EAAiBD,OAAAA,GAAAA;AAC3D,IAAA,MAAMX,MAAAA,GAASa,SAAAA,EAAAA;AACf,IAAA,MAAMC,KAAAA,GAAQhB,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,2CAAA,CAAA;AAK7C,IAAA,MAAMuC,YAAAA,GAAe,OAAOC,UAAAA,EAAoBC,KAAAA,GAAgB,CAAC,GAAA;;AAE7D,QAAA,IAAIA,QAAQ,CAAA,EAAG;AACX,YAAA,OAAO,EAAE;AACb,QAAA;QAEA,IAAI;YACA,IAAI,CAAC,MAAM9B,OAAAA,CAAQ+B,MAAM,CAACF,UAAAA,CAAAA,IAAe,CAAC,MAAM7B,OAAAA,CAAQgC,WAAW,CAACH,UAAAA,CAAAA,EAAa;AAC7E,gBAAA,OAAO,EAAE;AACb,YAAA;AAEA,YAAA,MAAMI,KAAAA,GAAQ,MAAMjC,OAAAA,CAAQkC,SAAS,CAACL,UAAAA,CAAAA;AACtC,YAAA,MAAMM,aAAuB,EAAE;;YAG/B,IAAIF,KAAAA,CAAMG,QAAQ,CAAC,cAAA,CAAA,EAAiB;AAChC,gBAAA,MAAM7B,eAAAA,GAAkBQ,aAAAA,CAAKsB,IAAI,CAACR,UAAAA,EAAY,cAAA,CAAA;AAC9CM,gBAAAA,UAAAA,CAAWG,IAAI,CAAC/B,eAAAA,CAAAA;AACpB,YAAA;;AAGA,YAAA,MAAMgC,iBAAsC,EAAE;YAC9C,KAAK,MAAMC,QAAQP,KAAAA,CAAO;gBACtB,IAAIpC,oBAAAA,CAAqBuC,QAAQ,CAACI,IAAAA,CAAAA,EAAO;AACrC,oBAAA;AACJ,gBAAA;AAEA,gBAAA,MAAMC,QAAAA,GAAW1B,aAAAA,CAAKsB,IAAI,CAACR,UAAAA,EAAYW,IAAAA,CAAAA;gBACvCD,cAAAA,CAAeD,IAAI,CACd,CAAA,UAAA;oBACG,IAAI;AACA,wBAAA,IAAI,MAAMtC,OAAAA,CAAQgC,WAAW,CAACS,QAAAA,CAAAA,EAAW;4BACrC,OAAO,MAAMb,YAAAA,CAAaa,QAAAA,EAAUX,KAAAA,GAAQ,CAAA,CAAA;AAChD,wBAAA;AACJ,oBAAA,CAAA,CAAE,OAAOZ,KAAAA,EAAY;wBACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,kBAAkB,EAAEsB,SAAS,EAAE,EAAEvB,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;AAClE,oBAAA;AACA,oBAAA,OAAO,EAAE;gBACb,CAAA,GAAA,CAAA;AAER,YAAA;YAEA,IAAImB,cAAAA,CAAenC,MAAM,GAAG,CAAA,EAAG;AAC3B,gBAAA,MAAMsC,aAAAA,GAAgB,MAAMpB,OAAAA,CAAQC,GAAG,CAACgB,cAAAA,CAAAA;gBACxC,KAAK,MAAMI,eAAeD,aAAAA,CAAe;AACrCP,oBAAAA,UAAAA,CAAWG,IAAI,CAAA,GAAIK,WAAAA,CAAAA;AACvB,gBAAA;AACJ,YAAA;YAEA,OAAOR,UAAAA;AACX,QAAA,CAAA,CAAE,OAAOjB,KAAAA,EAAY;YACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,yBAAyB,EAAEU,WAAW,EAAE,EAAEX,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;AACvE,YAAA,OAAO,EAAE;AACb,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMwB,UAAAA,GAAazD,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,gCAAA,CAAA;IAClD,MAAMwD,QAAAA,GAAW,MAAMjB,YAAAA,CAAa3B,OAAAA,CAAAA;IACpC2C,UAAAA,CAAWpD,GAAG,CAAC,CAAC,MAAM,EAAEqD,QAAAA,CAASzC,MAAM,CAAC,wBAAwB,CAAC,CAAA;;AAGjE,IAAA,MAAM0C,gBAAAA,GAAmB,MAAMhD,yBAAAA,CAA0B+C,QAAAA,EAAU7C,OAAAA,EAASC,OAAAA,CAAAA;IAE5EE,KAAAA,CAAMX,GAAG,CAAC,CAAC,MAAM,EAAEsD,gBAAAA,CAAiB1C,MAAM,CAAC,yBAAyB,CAAC,CAAA;IACrE,OAAO0C,gBAAAA;AACX;;;;"}
1
+ {"version":3,"file":"performance.js","sources":["../../src/util/performance.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-unused-vars */\nimport path from 'path';\nimport { getLogger } from '../logging';\nimport { safeJsonParse, validatePackageJson } from '@eldrforge/git-tools';\n\n// Performance timing helper\nexport class PerformanceTimer {\n private startTime: number;\n private logger: any;\n\n constructor(logger: any) {\n this.logger = logger;\n this.startTime = Date.now();\n }\n\n static start(logger: any, operation: string): PerformanceTimer {\n logger.verbose(`⏱️ Starting: ${operation}`);\n return new PerformanceTimer(logger);\n }\n\n end(operation: string): number {\n const duration = Date.now() - this.startTime;\n this.logger.verbose(`⏱️ Completed: ${operation} (${duration}ms)`);\n return duration;\n }\n}\n\nexport interface PackageJson {\n name?: string;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n}\n\nexport interface PackageJsonLocation {\n path: string;\n packageJson: PackageJson;\n relativePath: string;\n}\n\nconst EXCLUDED_DIRECTORIES = [\n 'node_modules',\n 'dist',\n 'build',\n 'coverage',\n '.git',\n '.next',\n '.nuxt',\n 'out',\n 'public',\n 'static',\n 'assets'\n];\n\n// Batch read multiple package.json files in parallel\nexport const batchReadPackageJsonFiles = async (\n packageJsonPaths: string[],\n storage: any,\n rootDir: string\n): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, `Batch reading ${packageJsonPaths.length} package.json files`);\n\n const readPromises = packageJsonPaths.map(async (packageJsonPath): Promise<PackageJsonLocation | null> => {\n try {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath, false);\n const relativePath = path.relative(rootDir, path.dirname(packageJsonPath));\n\n return {\n path: packageJsonPath,\n packageJson,\n relativePath: relativePath || '.'\n };\n } catch (error: any) {\n logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);\n return null;\n }\n });\n\n const results = await Promise.all(readPromises);\n const validResults = results.filter((result): result is PackageJsonLocation => result !== null);\n\n timer.end(`Successfully read ${validResults.length}/${packageJsonPaths.length} package.json files`);\n return validResults;\n};\n\n// Optimized recursive package.json finder with parallel processing\nexport const findAllPackageJsonFiles = async (rootDir: string, storage: any): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Optimized scanning for package.json files');\n\n // Phase 1: Find all package.json file paths in parallel\n const packageJsonPaths: string[] = [];\n\n const scanForPaths = async (currentDir: string, depth: number = 0): Promise<string[]> => {\n // Prevent infinite recursion and overly deep scanning\n if (depth > 5) {\n return [];\n }\n\n try {\n if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {\n return [];\n }\n\n const items = await storage.listFiles(currentDir);\n const foundPaths: string[] = [];\n\n // Check for package.json in current directory\n if (items.includes('package.json')) {\n const packageJsonPath = path.join(currentDir, 'package.json');\n foundPaths.push(packageJsonPath);\n }\n\n // Process subdirectories in parallel\n const subdirPromises: Promise<string[]>[] = [];\n for (const item of items) {\n if (EXCLUDED_DIRECTORIES.includes(item)) {\n continue;\n }\n\n const itemPath = path.join(currentDir, item);\n subdirPromises.push(\n (async () => {\n try {\n if (await storage.isDirectory(itemPath)) {\n return await scanForPaths(itemPath, depth + 1);\n }\n } catch (error: any) {\n logger.debug(`Skipped directory ${itemPath}: ${error.message}`);\n }\n return [];\n })()\n );\n }\n\n if (subdirPromises.length > 0) {\n const subdirResults = await Promise.all(subdirPromises);\n for (const subdirPaths of subdirResults) {\n foundPaths.push(...subdirPaths);\n }\n }\n\n return foundPaths;\n } catch (error: any) {\n logger.debug(`Failed to scan directory ${currentDir}: ${error.message}`);\n return [];\n }\n };\n\n const pathsTimer = PerformanceTimer.start(logger, 'Finding all package.json paths');\n const allPaths = await scanForPaths(rootDir);\n pathsTimer.end(`Found ${allPaths.length} package.json file paths`);\n\n // Phase 2: Batch read all package.json files in parallel\n const packageJsonFiles = await batchReadPackageJsonFiles(allPaths, storage, rootDir);\n\n timer.end(`Found ${packageJsonFiles.length} valid package.json files`);\n return packageJsonFiles;\n};\n\n// Optimized package scanning with parallel processing\nexport const scanDirectoryForPackages = async (rootDir: string, storage: any): Promise<Map<string, string>> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, `Optimized package scanning: ${rootDir}`);\n const packageMap = new Map<string, string>(); // packageName -> relativePath\n\n const absoluteRootDir = path.resolve(process.cwd(), rootDir);\n logger.verbose(`Scanning directory for packages: ${absoluteRootDir}`);\n\n try {\n // Quick existence and directory check\n const existsTimer = PerformanceTimer.start(logger, `Checking directory: ${absoluteRootDir}`);\n if (!await storage.exists(absoluteRootDir) || !await storage.isDirectory(absoluteRootDir)) {\n existsTimer.end(`Directory not found or not a directory: ${absoluteRootDir}`);\n timer.end(`Directory invalid: ${rootDir}`);\n return packageMap;\n }\n existsTimer.end(`Directory verified: ${absoluteRootDir}`);\n\n // Get all items and process in parallel\n const listTimer = PerformanceTimer.start(logger, `Listing contents: ${absoluteRootDir}`);\n const items = await storage.listFiles(absoluteRootDir);\n listTimer.end(`Listed ${items.length} items`);\n\n // Create batched promises for better performance\n const BATCH_SIZE = 10; // Process directories in batches to avoid overwhelming filesystem\n const batches = [];\n\n for (let i = 0; i < items.length; i += BATCH_SIZE) {\n const batch = items.slice(i, i + BATCH_SIZE);\n batches.push(batch);\n }\n\n const processTimer = PerformanceTimer.start(logger, `Processing ${batches.length} batches of directories`);\n\n for (const batch of batches) {\n const batchPromises = batch.map(async (item: string) => {\n const itemPath = path.join(absoluteRootDir, item);\n try {\n if (await storage.isDirectory(itemPath)) {\n const packageJsonPath = path.join(itemPath, 'package.json');\n\n if (await storage.exists(packageJsonPath)) {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath);\n\n if (packageJson.name) {\n const relativePath = path.relative(process.cwd(), itemPath);\n return { name: packageJson.name, path: relativePath };\n }\n }\n }\n } catch (error: any) {\n logger.debug(`Skipped ${itemPath}: ${error.message || error}`);\n }\n return null;\n });\n\n const batchResults = await Promise.all(batchPromises);\n\n for (const result of batchResults) {\n if (result) {\n packageMap.set(result.name, result.path);\n logger.debug(`Found package: ${result.name} at ${result.path}`);\n }\n }\n }\n\n processTimer.end(`Processed ${items.length} directories in ${batches.length} batches`);\n logger.verbose(`Found ${packageMap.size} packages in ${items.length} subdirectories`);\n } catch (error) {\n logger.warn(`Failed to read directory ${absoluteRootDir}: ${error}`);\n }\n\n timer.end(`Found ${packageMap.size} packages in: ${rootDir}`);\n return packageMap;\n};\n\n// Parallel scope processing for better performance\nexport const findPackagesByScope = async (\n dependencies: Record<string, string>,\n scopeRoots: Record<string, string>,\n storage: any\n): Promise<Map<string, string>> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Finding packages by scope (optimized)');\n const workspacePackages = new Map<string, string>();\n\n logger.silly(`Checking dependencies against scope roots: ${JSON.stringify(scopeRoots)}`);\n\n // Process all scopes in parallel for maximum performance\n const scopeTimer = PerformanceTimer.start(logger, 'Parallel scope scanning');\n const scopePromises = Object.entries(scopeRoots).map(async ([scope, rootDir]) => {\n logger.verbose(`Scanning scope ${scope} at root directory: ${rootDir}`);\n const scopePackages = await scanDirectoryForPackages(rootDir, storage);\n\n // Filter packages that match the scope\n const matchingPackages: Array<[string, string]> = [];\n for (const [packageName, packagePath] of scopePackages) {\n if (packageName.startsWith(scope)) {\n matchingPackages.push([packageName, packagePath]);\n logger.debug(`Registered package: ${packageName} -> ${packagePath}`);\n }\n }\n return { scope, packages: matchingPackages };\n });\n\n const allScopeResults = await Promise.all(scopePromises);\n\n // Aggregate all packages from all scopes\n const allPackages = new Map<string, string>();\n for (const { scope, packages } of allScopeResults) {\n for (const [packageName, packagePath] of packages) {\n allPackages.set(packageName, packagePath);\n }\n }\n\n scopeTimer.end(`Scanned ${Object.keys(scopeRoots).length} scope roots, found ${allPackages.size} packages`);\n\n // Match dependencies to available packages\n const matchTimer = PerformanceTimer.start(logger, 'Matching dependencies to packages');\n for (const [depName, depVersion] of Object.entries(dependencies)) {\n logger.debug(`Processing dependency: ${depName}@${depVersion}`);\n\n if (allPackages.has(depName)) {\n const packagePath = allPackages.get(depName)!;\n workspacePackages.set(depName, packagePath);\n logger.verbose(`Found sibling package: ${depName} at ${packagePath}`);\n }\n }\n matchTimer.end(`Matched ${workspacePackages.size} dependencies to workspace packages`);\n\n timer.end(`Found ${workspacePackages.size} packages to link`);\n return workspacePackages;\n};\n\n// Utility to collect all dependencies from package.json files efficiently\nexport const collectAllDependencies = (packageJsonFiles: PackageJsonLocation[]): Record<string, string> => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Collecting all dependencies');\n\n const allDependencies: Record<string, string> = {};\n for (const { packageJson } of packageJsonFiles) {\n Object.assign(allDependencies, packageJson.dependencies);\n Object.assign(allDependencies, packageJson.devDependencies);\n Object.assign(allDependencies, packageJson.peerDependencies);\n }\n\n timer.end(`Collected ${Object.keys(allDependencies).length} unique dependencies`);\n return allDependencies;\n};\n\n// Utility to check for file: dependencies\nexport const checkForFileDependencies = (packageJsonFiles: PackageJsonLocation[]): void => {\n const logger = getLogger();\n const timer = PerformanceTimer.start(logger, 'Checking for file: dependencies');\n const filesWithFileDepedencies: Array<{path: string, dependencies: string[]}> = [];\n\n for (const { path: packagePath, packageJson, relativePath } of packageJsonFiles) {\n const fileDeps: string[] = [];\n\n // Check all dependency types for file: paths\n const allDeps = {\n ...packageJson.dependencies,\n ...packageJson.devDependencies,\n ...packageJson.peerDependencies\n };\n\n for (const [name, version] of Object.entries(allDeps)) {\n if (version.startsWith('file:')) {\n fileDeps.push(`${name}: ${version}`);\n }\n }\n\n if (fileDeps.length > 0) {\n filesWithFileDepedencies.push({\n path: relativePath,\n dependencies: fileDeps\n });\n }\n }\n\n if (filesWithFileDepedencies.length > 0) {\n logger.warn('⚠️ WARNING: Found file: dependencies that should not be committed:');\n for (const file of filesWithFileDepedencies) {\n logger.warn(` 📄 ${file.path}:`);\n for (const dep of file.dependencies) {\n logger.warn(` - ${dep}`);\n }\n }\n logger.warn('');\n logger.warn('💡 Remember to run \"kodrdriv unlink\" before committing to restore registry versions!');\n logger.warn(' Or add a pre-commit hook to prevent accidental commits of linked dependencies.');\n }\n\n timer.end(`Checked ${packageJsonFiles.length} files, found ${filesWithFileDepedencies.length} with file: dependencies`);\n};\n"],"names":["PerformanceTimer","start","logger","operation","verbose","end","duration","Date","now","startTime","EXCLUDED_DIRECTORIES","batchReadPackageJsonFiles","packageJsonPaths","storage","rootDir","getLogger","timer","length","readPromises","map","packageJsonPath","packageJsonContent","readFile","parsed","safeJsonParse","packageJson","validatePackageJson","relativePath","path","relative","dirname","error","debug","message","results","Promise","all","validResults","filter","result","findAllPackageJsonFiles","scanForPaths","currentDir","depth","exists","isDirectory","items","listFiles","foundPaths","includes","join","push","subdirPromises","item","itemPath","subdirResults","subdirPaths","pathsTimer","allPaths","packageJsonFiles"],"mappings":";;;;AAAA,uDAAoD,SAAA,gBAAA,CAAA,GAAA,EAAA,GAAA,EAAA,KAAA,EAAA;;;;;;;;;;;;;AAKpD;AACO,MAAMA,gBAAAA,CAAAA;AAST,IAAA,OAAOC,KAAAA,CAAMC,MAAW,EAAEC,SAAiB,EAAoB;AAC3DD,QAAAA,MAAAA,CAAOE,OAAO,CAAC,CAAC,cAAc,EAAED,SAAAA,CAAAA,CAAW,CAAA;AAC3C,QAAA,OAAO,IAAIH,gBAAAA,CAAiBE,MAAAA,CAAAA;AAChC,IAAA;AAEAG,IAAAA,GAAAA,CAAIF,SAAiB,EAAU;AAC3B,QAAA,MAAMG,WAAWC,IAAAA,CAAKC,GAAG,EAAA,GAAK,IAAI,CAACC,SAAS;AAC5C,QAAA,IAAI,CAACP,MAAM,CAACE,OAAO,CAAC,CAAC,eAAe,EAAED,SAAAA,CAAU,EAAE,EAAEG,QAAAA,CAAS,GAAG,CAAC,CAAA;QACjE,OAAOA,QAAAA;AACX,IAAA;AAdA,IAAA,WAAA,CAAYJ,MAAW,CAAE;AAHzB,QAAA,gBAAA,CAAA,IAAA,EAAQO,aAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQP,UAAR,MAAA,CAAA;QAGI,IAAI,CAACA,MAAM,GAAGA,MAAAA;AACd,QAAA,IAAI,CAACO,SAAS,GAAGF,IAAAA,CAAKC,GAAG,EAAA;AAC7B,IAAA;AAYJ;AAeA,MAAME,oBAAAA,GAAuB;AACzB,IAAA,cAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,UAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,OAAA;AACA,IAAA,KAAA;AACA,IAAA,QAAA;AACA,IAAA,QAAA;AACA,IAAA;AACH,CAAA;AAED;AACO,MAAMC,yBAAAA,GAA4B,OACrCC,gBAAAA,EACAC,OAAAA,EACAC,OAAAA,GAAAA;AAEA,IAAA,MAAMZ,MAAAA,GAASa,SAAAA,EAAAA;AACf,IAAA,MAAMC,KAAAA,GAAQhB,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,CAAC,cAAc,EAAEU,gBAAAA,CAAiBK,MAAM,CAAC,mBAAmB,CAAC,CAAA;AAE1G,IAAA,MAAMC,YAAAA,GAAeN,gBAAAA,CAAiBO,GAAG,CAAC,OAAOC,eAAAA,GAAAA;QAC7C,IAAI;AACA,YAAA,MAAMC,kBAAAA,GAAqB,MAAMR,OAAAA,CAAQS,QAAQ,CAACF,eAAAA,EAAiB,OAAA,CAAA;YACnE,MAAMG,MAAAA,GAASC,cAAcH,kBAAAA,EAAoBD,eAAAA,CAAAA;YACjD,MAAMK,WAAAA,GAAcC,mBAAAA,CAAoBH,MAAAA,EAAQH,eAAAA,EAAiB,KAAA,CAAA;AACjE,YAAA,MAAMO,eAAeC,aAAAA,CAAKC,QAAQ,CAACf,OAAAA,EAASc,aAAAA,CAAKE,OAAO,CAACV,eAAAA,CAAAA,CAAAA;YAEzD,OAAO;gBACHQ,IAAAA,EAAMR,eAAAA;AACNK,gBAAAA,WAAAA;AACAE,gBAAAA,YAAAA,EAAcA,YAAAA,IAAgB;AAClC,aAAA;AACJ,QAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;YACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,gCAAgC,EAAEZ,gBAAgB,EAAE,EAAEW,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;YACnF,OAAO,IAAA;AACX,QAAA;AACJ,IAAA,CAAA,CAAA;AAEA,IAAA,MAAMC,OAAAA,GAAU,MAAMC,OAAAA,CAAQC,GAAG,CAAClB,YAAAA,CAAAA;AAClC,IAAA,MAAMmB,eAAeH,OAAAA,CAAQI,MAAM,CAAC,CAACC,SAA0CA,MAAAA,KAAW,IAAA,CAAA;AAE1FvB,IAAAA,KAAAA,CAAMX,GAAG,CAAC,CAAC,kBAAkB,EAAEgC,YAAAA,CAAapB,MAAM,CAAC,CAAC,EAAEL,gBAAAA,CAAiBK,MAAM,CAAC,mBAAmB,CAAC,CAAA;IAClG,OAAOoB,YAAAA;AACX;AAEA;AACO,MAAMG,uBAAAA,GAA0B,OAAO1B,OAAAA,EAAiBD,OAAAA,GAAAA;AAC3D,IAAA,MAAMX,MAAAA,GAASa,SAAAA,EAAAA;AACf,IAAA,MAAMC,KAAAA,GAAQhB,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,2CAAA,CAAA;AAK7C,IAAA,MAAMuC,YAAAA,GAAe,OAAOC,UAAAA,EAAoBC,KAAAA,GAAgB,CAAC,GAAA;;AAE7D,QAAA,IAAIA,QAAQ,CAAA,EAAG;AACX,YAAA,OAAO,EAAE;AACb,QAAA;QAEA,IAAI;YACA,IAAI,CAAC,MAAM9B,OAAAA,CAAQ+B,MAAM,CAACF,UAAAA,CAAAA,IAAe,CAAC,MAAM7B,OAAAA,CAAQgC,WAAW,CAACH,UAAAA,CAAAA,EAAa;AAC7E,gBAAA,OAAO,EAAE;AACb,YAAA;AAEA,YAAA,MAAMI,KAAAA,GAAQ,MAAMjC,OAAAA,CAAQkC,SAAS,CAACL,UAAAA,CAAAA;AACtC,YAAA,MAAMM,aAAuB,EAAE;;YAG/B,IAAIF,KAAAA,CAAMG,QAAQ,CAAC,cAAA,CAAA,EAAiB;AAChC,gBAAA,MAAM7B,eAAAA,GAAkBQ,aAAAA,CAAKsB,IAAI,CAACR,UAAAA,EAAY,cAAA,CAAA;AAC9CM,gBAAAA,UAAAA,CAAWG,IAAI,CAAC/B,eAAAA,CAAAA;AACpB,YAAA;;AAGA,YAAA,MAAMgC,iBAAsC,EAAE;YAC9C,KAAK,MAAMC,QAAQP,KAAAA,CAAO;gBACtB,IAAIpC,oBAAAA,CAAqBuC,QAAQ,CAACI,IAAAA,CAAAA,EAAO;AACrC,oBAAA;AACJ,gBAAA;AAEA,gBAAA,MAAMC,QAAAA,GAAW1B,aAAAA,CAAKsB,IAAI,CAACR,UAAAA,EAAYW,IAAAA,CAAAA;gBACvCD,cAAAA,CAAeD,IAAI,CACd,CAAA,UAAA;oBACG,IAAI;AACA,wBAAA,IAAI,MAAMtC,OAAAA,CAAQgC,WAAW,CAACS,QAAAA,CAAAA,EAAW;4BACrC,OAAO,MAAMb,YAAAA,CAAaa,QAAAA,EAAUX,KAAAA,GAAQ,CAAA,CAAA;AAChD,wBAAA;AACJ,oBAAA,CAAA,CAAE,OAAOZ,KAAAA,EAAY;wBACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,kBAAkB,EAAEsB,SAAS,EAAE,EAAEvB,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;AAClE,oBAAA;AACA,oBAAA,OAAO,EAAE;gBACb,CAAA,GAAA,CAAA;AAER,YAAA;YAEA,IAAImB,cAAAA,CAAenC,MAAM,GAAG,CAAA,EAAG;AAC3B,gBAAA,MAAMsC,aAAAA,GAAgB,MAAMpB,OAAAA,CAAQC,GAAG,CAACgB,cAAAA,CAAAA;gBACxC,KAAK,MAAMI,eAAeD,aAAAA,CAAe;AACrCP,oBAAAA,UAAAA,CAAWG,IAAI,CAAA,GAAIK,WAAAA,CAAAA;AACvB,gBAAA;AACJ,YAAA;YAEA,OAAOR,UAAAA;AACX,QAAA,CAAA,CAAE,OAAOjB,KAAAA,EAAY;YACjB7B,MAAAA,CAAO8B,KAAK,CAAC,CAAC,yBAAyB,EAAEU,WAAW,EAAE,EAAEX,KAAAA,CAAME,OAAO,CAAA,CAAE,CAAA;AACvE,YAAA,OAAO,EAAE;AACb,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMwB,UAAAA,GAAazD,gBAAAA,CAAiBC,KAAK,CAACC,MAAAA,EAAQ,gCAAA,CAAA;IAClD,MAAMwD,QAAAA,GAAW,MAAMjB,YAAAA,CAAa3B,OAAAA,CAAAA;IACpC2C,UAAAA,CAAWpD,GAAG,CAAC,CAAC,MAAM,EAAEqD,QAAAA,CAASzC,MAAM,CAAC,wBAAwB,CAAC,CAAA;;AAGjE,IAAA,MAAM0C,gBAAAA,GAAmB,MAAMhD,yBAAAA,CAA0B+C,QAAAA,EAAU7C,OAAAA,EAASC,OAAAA,CAAAA;IAE5EE,KAAAA,CAAMX,GAAG,CAAC,CAAC,MAAM,EAAEsD,gBAAAA,CAAiB1C,MAAM,CAAC,yBAAyB,CAAC,CAAA;IACrE,OAAO0C,gBAAAA;AACX;;;;"}
@@ -1,6 +1,6 @@
1
1
  import path__default from 'path';
2
2
  import { getLogger } from '../logging.js';
3
- import { safeJsonParse, validatePackageJson } from './validation.js';
3
+ import { safeJsonParse, validatePackageJson } from '@eldrforge/git-tools';
4
4
 
5
5
  const EXCLUDED_DIRECTORIES = [
6
6
  'node_modules',
@@ -1 +1 @@
1
- {"version":3,"file":"safety.js","sources":["../../src/util/safety.ts"],"sourcesContent":["import path from 'path';\nimport { getLogger } from '../logging';\nimport { safeJsonParse, validatePackageJson } from './validation';\n\ninterface PackageJson {\n name?: string;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n}\n\ninterface PackageJsonLocation {\n path: string;\n packageJson: PackageJson;\n relativePath: string;\n}\n\ninterface FileDependencyIssue {\n packagePath: string;\n dependencies: Array<{\n name: string;\n version: string;\n dependencyType: 'dependencies' | 'devDependencies' | 'peerDependencies';\n }>;\n}\n\nconst EXCLUDED_DIRECTORIES = [\n 'node_modules',\n 'dist',\n 'build',\n 'coverage',\n '.git',\n '.next',\n '.nuxt',\n 'out',\n 'public',\n 'static',\n 'assets'\n];\n\nconst findAllPackageJsonFiles = async (rootDir: string, storage: any): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const packageJsonFiles: PackageJsonLocation[] = [];\n\n const scanDirectory = async (currentDir: string, depth: number = 0): Promise<void> => {\n // Prevent infinite recursion and overly deep scanning\n if (depth > 5) {\n return;\n }\n\n try {\n if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {\n return;\n }\n\n const items = await storage.listFiles(currentDir);\n\n // Check for package.json in current directory\n if (items.includes('package.json')) {\n const packageJsonPath = path.join(currentDir, 'package.json');\n try {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath);\n const relativePath = path.relative(rootDir, currentDir);\n\n packageJsonFiles.push({\n path: packageJsonPath,\n packageJson,\n relativePath: relativePath || '.'\n });\n\n logger.debug(`Found package.json at: ${relativePath || '.'}`);\n } catch (error: any) {\n logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);\n }\n }\n\n // Scan subdirectories, excluding build/generated directories\n for (const item of items) {\n if (EXCLUDED_DIRECTORIES.includes(item)) {\n continue;\n }\n\n const itemPath = path.join(currentDir, item);\n try {\n if (await storage.isDirectory(itemPath)) {\n await scanDirectory(itemPath, depth + 1);\n }\n } catch (error: any) {\n // Skip directories that can't be accessed\n logger.debug(`Skipped directory ${itemPath}: ${error.message}`);\n continue;\n }\n }\n } catch (error: any) {\n logger.debug(`Failed to scan directory ${currentDir}: ${error.message}`);\n }\n };\n\n await scanDirectory(rootDir);\n\n logger.debug(`Found ${packageJsonFiles.length} package.json file(s) in directory tree`);\n return packageJsonFiles;\n};\n\n/**\n * Checks for file: dependencies in package.json files that should not be committed\n * @param storage Storage utility instance\n * @param rootDir Root directory to scan (defaults to current working directory)\n * @returns Array of issues found, empty array if no issues\n */\nexport const checkForFileDependencies = async (storage: any, rootDir: string = process.cwd()): Promise<FileDependencyIssue[]> => {\n const logger = getLogger();\n const issues: FileDependencyIssue[] = [];\n\n try {\n const packageJsonFiles = await findAllPackageJsonFiles(rootDir, storage);\n\n for (const { packageJson, relativePath } of packageJsonFiles) {\n const fileDeps: Array<{name: string, version: string, dependencyType: 'dependencies' | 'devDependencies' | 'peerDependencies'}> = [];\n\n // Check all dependency types for file: paths\n const dependencyChecks = [\n { deps: packageJson.dependencies, type: 'dependencies' as const },\n { deps: packageJson.devDependencies, type: 'devDependencies' as const },\n { deps: packageJson.peerDependencies, type: 'peerDependencies' as const }\n ];\n\n for (const { deps, type } of dependencyChecks) {\n if (deps) {\n for (const [name, version] of Object.entries(deps)) {\n if (version.startsWith('file:')) {\n fileDeps.push({ name, version, dependencyType: type });\n }\n }\n }\n }\n\n if (fileDeps.length > 0) {\n issues.push({\n packagePath: relativePath,\n dependencies: fileDeps\n });\n }\n }\n } catch (error: any) {\n logger.debug(`Failed to check for file dependencies: ${error.message}`);\n }\n\n return issues;\n};\n\n/**\n * Logs file dependency issues in a user-friendly format\n * @param issues Array of file dependency issues\n * @param context Context for the warning (e.g., 'commit', 'link check')\n */\nexport const logFileDependencyWarning = (issues: FileDependencyIssue[], context: string = 'operation'): void => {\n const logger = getLogger();\n\n if (issues.length === 0) {\n return;\n }\n\n logger.warn(`⚠️ WARNING: Found file: dependencies that should not be committed during ${context}:`);\n for (const issue of issues) {\n logger.warn(` 📄 ${issue.packagePath}:`);\n for (const dep of issue.dependencies) {\n logger.warn(` - ${dep.name}: ${dep.version} (${dep.dependencyType})`);\n }\n }\n logger.warn('');\n};\n\n/**\n * Provides suggestions for resolving file dependency issues\n * @param hasUnlinkCapability Whether the current context supports unlinking\n */\nexport const logFileDependencySuggestions = (hasUnlinkCapability: boolean = true): void => {\n const logger = getLogger();\n\n logger.warn('💡 To resolve this:');\n if (hasUnlinkCapability) {\n logger.warn(' 1. Run \"kodrdriv unlink\" to restore registry versions');\n logger.warn(' 2. Complete your commit');\n logger.warn(' 3. Run \"kodrdriv link\" again for local development');\n } else {\n logger.warn(' 1. Manually restore registry versions in package.json files');\n logger.warn(' 2. Complete your commit');\n logger.warn(' 3. Re-link your local dependencies');\n }\n logger.warn('');\n logger.warn(' Or to bypass this check:');\n logger.warn(' - Add --skip-file-check flag to your command');\n logger.warn(' - Or use git commit --no-verify to skip all hooks');\n logger.warn('');\n};\n"],"names":["EXCLUDED_DIRECTORIES","findAllPackageJsonFiles","rootDir","storage","logger","getLogger","packageJsonFiles","scanDirectory","currentDir","depth","exists","isDirectory","items","listFiles","includes","packageJsonPath","path","join","packageJsonContent","readFile","parsed","safeJsonParse","packageJson","validatePackageJson","relativePath","relative","push","debug","error","message","item","itemPath","length","checkForFileDependencies","process","cwd","issues","fileDeps","dependencyChecks","deps","dependencies","type","devDependencies","peerDependencies","name","version","Object","entries","startsWith","dependencyType","packagePath","logFileDependencyWarning","context","warn","issue","dep","logFileDependencySuggestions","hasUnlinkCapability"],"mappings":";;;;AA0BA,MAAMA,oBAAAA,GAAuB;AACzB,IAAA,cAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,UAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,OAAA;AACA,IAAA,KAAA;AACA,IAAA,QAAA;AACA,IAAA,QAAA;AACA,IAAA;AACH,CAAA;AAED,MAAMC,uBAAAA,GAA0B,OAAOC,OAAAA,EAAiBC,OAAAA,GAAAA;AACpD,IAAA,MAAMC,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMC,mBAA0C,EAAE;AAElD,IAAA,MAAMC,aAAAA,GAAgB,OAAOC,UAAAA,EAAoBC,KAAAA,GAAgB,CAAC,GAAA;;AAE9D,QAAA,IAAIA,QAAQ,CAAA,EAAG;AACX,YAAA;AACJ,QAAA;QAEA,IAAI;YACA,IAAI,CAAC,MAAMN,OAAAA,CAAQO,MAAM,CAACF,UAAAA,CAAAA,IAAe,CAAC,MAAML,OAAAA,CAAQQ,WAAW,CAACH,UAAAA,CAAAA,EAAa;AAC7E,gBAAA;AACJ,YAAA;AAEA,YAAA,MAAMI,KAAAA,GAAQ,MAAMT,OAAAA,CAAQU,SAAS,CAACL,UAAAA,CAAAA;;YAGtC,IAAII,KAAAA,CAAME,QAAQ,CAAC,cAAA,CAAA,EAAiB;AAChC,gBAAA,MAAMC,eAAAA,GAAkBC,aAAAA,CAAKC,IAAI,CAACT,UAAAA,EAAY,cAAA,CAAA;gBAC9C,IAAI;AACA,oBAAA,MAAMU,kBAAAA,GAAqB,MAAMf,OAAAA,CAAQgB,QAAQ,CAACJ,eAAAA,EAAiB,OAAA,CAAA;oBACnE,MAAMK,MAAAA,GAASC,cAAcH,kBAAAA,EAAoBH,eAAAA,CAAAA;oBACjD,MAAMO,WAAAA,GAAcC,oBAAoBH,MAAAA,EAAQL,eAAAA,CAAAA;AAChD,oBAAA,MAAMS,YAAAA,GAAeR,aAAAA,CAAKS,QAAQ,CAACvB,OAAAA,EAASM,UAAAA,CAAAA;AAE5CF,oBAAAA,gBAAAA,CAAiBoB,IAAI,CAAC;wBAClBV,IAAAA,EAAMD,eAAAA;AACNO,wBAAAA,WAAAA;AACAE,wBAAAA,YAAAA,EAAcA,YAAAA,IAAgB;AAClC,qBAAA,CAAA;AAEApB,oBAAAA,MAAAA,CAAOuB,KAAK,CAAC,CAAC,uBAAuB,EAAEH,gBAAgB,GAAA,CAAA,CAAK,CAAA;AAChE,gBAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;oBACjBxB,MAAAA,CAAOuB,KAAK,CAAC,CAAC,gCAAgC,EAAEZ,gBAAgB,EAAE,EAAEa,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AACvF,gBAAA;AACJ,YAAA;;YAGA,KAAK,MAAMC,QAAQlB,KAAAA,CAAO;gBACtB,IAAIZ,oBAAAA,CAAqBc,QAAQ,CAACgB,IAAAA,CAAAA,EAAO;AACrC,oBAAA;AACJ,gBAAA;AAEA,gBAAA,MAAMC,QAAAA,GAAWf,aAAAA,CAAKC,IAAI,CAACT,UAAAA,EAAYsB,IAAAA,CAAAA;gBACvC,IAAI;AACA,oBAAA,IAAI,MAAM3B,OAAAA,CAAQQ,WAAW,CAACoB,QAAAA,CAAAA,EAAW;wBACrC,MAAMxB,aAAAA,CAAcwB,UAAUtB,KAAAA,GAAQ,CAAA,CAAA;AAC1C,oBAAA;AACJ,gBAAA,CAAA,CAAE,OAAOmB,KAAAA,EAAY;;oBAEjBxB,MAAAA,CAAOuB,KAAK,CAAC,CAAC,kBAAkB,EAAEI,SAAS,EAAE,EAAEH,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AAC9D,oBAAA;AACJ,gBAAA;AACJ,YAAA;AACJ,QAAA,CAAA,CAAE,OAAOD,KAAAA,EAAY;YACjBxB,MAAAA,CAAOuB,KAAK,CAAC,CAAC,yBAAyB,EAAEnB,WAAW,EAAE,EAAEoB,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AAC3E,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMtB,aAAAA,CAAcL,OAAAA,CAAAA;IAEpBE,MAAAA,CAAOuB,KAAK,CAAC,CAAC,MAAM,EAAErB,gBAAAA,CAAiB0B,MAAM,CAAC,uCAAuC,CAAC,CAAA;IACtF,OAAO1B,gBAAAA;AACX,CAAA;AAEA;;;;;UAMa2B,wBAAAA,GAA2B,OAAO9B,SAAcD,OAAAA,GAAkBgC,OAAAA,CAAQC,GAAG,EAAE,GAAA;AACxF,IAAA,MAAM/B,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAM+B,SAAgC,EAAE;IAExC,IAAI;QACA,MAAM9B,gBAAAA,GAAmB,MAAML,uBAAAA,CAAwBC,OAAAA,EAASC,OAAAA,CAAAA;AAEhE,QAAA,KAAK,MAAM,EAAEmB,WAAW,EAAEE,YAAY,EAAE,IAAIlB,gBAAAA,CAAkB;AAC1D,YAAA,MAAM+B,WAA4H,EAAE;;AAGpI,YAAA,MAAMC,gBAAAA,GAAmB;AACrB,gBAAA;AAAEC,oBAAAA,IAAAA,EAAMjB,YAAYkB,YAAY;oBAAEC,IAAAA,EAAM;AAAwB,iBAAA;AAChE,gBAAA;AAAEF,oBAAAA,IAAAA,EAAMjB,YAAYoB,eAAe;oBAAED,IAAAA,EAAM;AAA2B,iBAAA;AACtE,gBAAA;AAAEF,oBAAAA,IAAAA,EAAMjB,YAAYqB,gBAAgB;oBAAEF,IAAAA,EAAM;AAA4B;AAC3E,aAAA;AAED,YAAA,KAAK,MAAM,EAAEF,IAAI,EAAEE,IAAI,EAAE,IAAIH,gBAAAA,CAAkB;AAC3C,gBAAA,IAAIC,IAAAA,EAAM;oBACN,KAAK,MAAM,CAACK,IAAAA,EAAMC,OAAAA,CAAQ,IAAIC,MAAAA,CAAOC,OAAO,CAACR,IAAAA,CAAAA,CAAO;wBAChD,IAAIM,OAAAA,CAAQG,UAAU,CAAC,OAAA,CAAA,EAAU;AAC7BX,4BAAAA,QAAAA,CAASX,IAAI,CAAC;AAAEkB,gCAAAA,IAAAA;AAAMC,gCAAAA,OAAAA;gCAASI,cAAAA,EAAgBR;AAAK,6BAAA,CAAA;AACxD,wBAAA;AACJ,oBAAA;AACJ,gBAAA;AACJ,YAAA;YAEA,IAAIJ,QAAAA,CAASL,MAAM,GAAG,CAAA,EAAG;AACrBI,gBAAAA,MAAAA,CAAOV,IAAI,CAAC;oBACRwB,WAAAA,EAAa1B,YAAAA;oBACbgB,YAAAA,EAAcH;AAClB,iBAAA,CAAA;AACJ,YAAA;AACJ,QAAA;AACJ,IAAA,CAAA,CAAE,OAAOT,KAAAA,EAAY;AACjBxB,QAAAA,MAAAA,CAAOuB,KAAK,CAAC,CAAC,uCAAuC,EAAEC,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AAC1E,IAAA;IAEA,OAAOO,MAAAA;AACX;AAEA;;;;AAIC,IACM,MAAMe,wBAAAA,GAA2B,CAACf,MAAAA,EAA+BgB,UAAkB,WAAW,GAAA;AACjG,IAAA,MAAMhD,MAAAA,GAASC,SAAAA,EAAAA;IAEf,IAAI+B,MAAAA,CAAOJ,MAAM,KAAK,CAAA,EAAG;AACrB,QAAA;AACJ,IAAA;AAEA5B,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,CAAC,0EAA0E,EAAED,OAAAA,CAAQ,CAAC,CAAC,CAAA;IACnG,KAAK,MAAME,SAASlB,MAAAA,CAAQ;QACxBhC,MAAAA,CAAOiD,IAAI,CAAC,CAAC,KAAK,EAAEC,KAAAA,CAAMJ,WAAW,CAAC,CAAC,CAAC,CAAA;AACxC,QAAA,KAAK,MAAMK,GAAAA,IAAOD,KAAAA,CAAMd,YAAY,CAAE;AAClCpC,YAAAA,MAAAA,CAAOiD,IAAI,CAAC,CAAC,MAAM,EAAEE,GAAAA,CAAIX,IAAI,CAAC,EAAE,EAAEW,GAAAA,CAAIV,OAAO,CAAC,EAAE,EAAEU,IAAIN,cAAc,CAAC,CAAC,CAAC,CAAA;AAC3E,QAAA;AACJ,IAAA;AACA7C,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,EAAA,CAAA;AAChB;AAEA;;;AAGC,IACM,MAAMG,4BAAAA,GAA+B,CAACC,sBAA+B,IAAI,GAAA;AAC5E,IAAA,MAAMrD,MAAAA,GAASC,SAAAA,EAAAA;AAEfD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,qBAAA,CAAA;AACZ,IAAA,IAAII,mBAAAA,EAAqB;AACrBrD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,0DAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,4BAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,uDAAA,CAAA;IAChB,CAAA,MAAO;AACHjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,gEAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,4BAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,uCAAA,CAAA;AAChB,IAAA;AACAjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,EAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,6BAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,iDAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,sDAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,EAAA,CAAA;AAChB;;;;"}
1
+ {"version":3,"file":"safety.js","sources":["../../src/util/safety.ts"],"sourcesContent":["import path from 'path';\nimport { getLogger } from '../logging';\nimport { safeJsonParse, validatePackageJson } from '@eldrforge/git-tools';\n\ninterface PackageJson {\n name?: string;\n dependencies?: Record<string, string>;\n devDependencies?: Record<string, string>;\n peerDependencies?: Record<string, string>;\n}\n\ninterface PackageJsonLocation {\n path: string;\n packageJson: PackageJson;\n relativePath: string;\n}\n\ninterface FileDependencyIssue {\n packagePath: string;\n dependencies: Array<{\n name: string;\n version: string;\n dependencyType: 'dependencies' | 'devDependencies' | 'peerDependencies';\n }>;\n}\n\nconst EXCLUDED_DIRECTORIES = [\n 'node_modules',\n 'dist',\n 'build',\n 'coverage',\n '.git',\n '.next',\n '.nuxt',\n 'out',\n 'public',\n 'static',\n 'assets'\n];\n\nconst findAllPackageJsonFiles = async (rootDir: string, storage: any): Promise<PackageJsonLocation[]> => {\n const logger = getLogger();\n const packageJsonFiles: PackageJsonLocation[] = [];\n\n const scanDirectory = async (currentDir: string, depth: number = 0): Promise<void> => {\n // Prevent infinite recursion and overly deep scanning\n if (depth > 5) {\n return;\n }\n\n try {\n if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {\n return;\n }\n\n const items = await storage.listFiles(currentDir);\n\n // Check for package.json in current directory\n if (items.includes('package.json')) {\n const packageJsonPath = path.join(currentDir, 'package.json');\n try {\n const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');\n const parsed = safeJsonParse(packageJsonContent, packageJsonPath);\n const packageJson = validatePackageJson(parsed, packageJsonPath);\n const relativePath = path.relative(rootDir, currentDir);\n\n packageJsonFiles.push({\n path: packageJsonPath,\n packageJson,\n relativePath: relativePath || '.'\n });\n\n logger.debug(`Found package.json at: ${relativePath || '.'}`);\n } catch (error: any) {\n logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);\n }\n }\n\n // Scan subdirectories, excluding build/generated directories\n for (const item of items) {\n if (EXCLUDED_DIRECTORIES.includes(item)) {\n continue;\n }\n\n const itemPath = path.join(currentDir, item);\n try {\n if (await storage.isDirectory(itemPath)) {\n await scanDirectory(itemPath, depth + 1);\n }\n } catch (error: any) {\n // Skip directories that can't be accessed\n logger.debug(`Skipped directory ${itemPath}: ${error.message}`);\n continue;\n }\n }\n } catch (error: any) {\n logger.debug(`Failed to scan directory ${currentDir}: ${error.message}`);\n }\n };\n\n await scanDirectory(rootDir);\n\n logger.debug(`Found ${packageJsonFiles.length} package.json file(s) in directory tree`);\n return packageJsonFiles;\n};\n\n/**\n * Checks for file: dependencies in package.json files that should not be committed\n * @param storage Storage utility instance\n * @param rootDir Root directory to scan (defaults to current working directory)\n * @returns Array of issues found, empty array if no issues\n */\nexport const checkForFileDependencies = async (storage: any, rootDir: string = process.cwd()): Promise<FileDependencyIssue[]> => {\n const logger = getLogger();\n const issues: FileDependencyIssue[] = [];\n\n try {\n const packageJsonFiles = await findAllPackageJsonFiles(rootDir, storage);\n\n for (const { packageJson, relativePath } of packageJsonFiles) {\n const fileDeps: Array<{name: string, version: string, dependencyType: 'dependencies' | 'devDependencies' | 'peerDependencies'}> = [];\n\n // Check all dependency types for file: paths\n const dependencyChecks = [\n { deps: packageJson.dependencies, type: 'dependencies' as const },\n { deps: packageJson.devDependencies, type: 'devDependencies' as const },\n { deps: packageJson.peerDependencies, type: 'peerDependencies' as const }\n ];\n\n for (const { deps, type } of dependencyChecks) {\n if (deps) {\n for (const [name, version] of Object.entries(deps)) {\n if (version.startsWith('file:')) {\n fileDeps.push({ name, version, dependencyType: type });\n }\n }\n }\n }\n\n if (fileDeps.length > 0) {\n issues.push({\n packagePath: relativePath,\n dependencies: fileDeps\n });\n }\n }\n } catch (error: any) {\n logger.debug(`Failed to check for file dependencies: ${error.message}`);\n }\n\n return issues;\n};\n\n/**\n * Logs file dependency issues in a user-friendly format\n * @param issues Array of file dependency issues\n * @param context Context for the warning (e.g., 'commit', 'link check')\n */\nexport const logFileDependencyWarning = (issues: FileDependencyIssue[], context: string = 'operation'): void => {\n const logger = getLogger();\n\n if (issues.length === 0) {\n return;\n }\n\n logger.warn(`⚠️ WARNING: Found file: dependencies that should not be committed during ${context}:`);\n for (const issue of issues) {\n logger.warn(` 📄 ${issue.packagePath}:`);\n for (const dep of issue.dependencies) {\n logger.warn(` - ${dep.name}: ${dep.version} (${dep.dependencyType})`);\n }\n }\n logger.warn('');\n};\n\n/**\n * Provides suggestions for resolving file dependency issues\n * @param hasUnlinkCapability Whether the current context supports unlinking\n */\nexport const logFileDependencySuggestions = (hasUnlinkCapability: boolean = true): void => {\n const logger = getLogger();\n\n logger.warn('💡 To resolve this:');\n if (hasUnlinkCapability) {\n logger.warn(' 1. Run \"kodrdriv unlink\" to restore registry versions');\n logger.warn(' 2. Complete your commit');\n logger.warn(' 3. Run \"kodrdriv link\" again for local development');\n } else {\n logger.warn(' 1. Manually restore registry versions in package.json files');\n logger.warn(' 2. Complete your commit');\n logger.warn(' 3. Re-link your local dependencies');\n }\n logger.warn('');\n logger.warn(' Or to bypass this check:');\n logger.warn(' - Add --skip-file-check flag to your command');\n logger.warn(' - Or use git commit --no-verify to skip all hooks');\n logger.warn('');\n};\n"],"names":["EXCLUDED_DIRECTORIES","findAllPackageJsonFiles","rootDir","storage","logger","getLogger","packageJsonFiles","scanDirectory","currentDir","depth","exists","isDirectory","items","listFiles","includes","packageJsonPath","path","join","packageJsonContent","readFile","parsed","safeJsonParse","packageJson","validatePackageJson","relativePath","relative","push","debug","error","message","item","itemPath","length","checkForFileDependencies","process","cwd","issues","fileDeps","dependencyChecks","deps","dependencies","type","devDependencies","peerDependencies","name","version","Object","entries","startsWith","dependencyType","packagePath","logFileDependencyWarning","context","warn","issue","dep","logFileDependencySuggestions","hasUnlinkCapability"],"mappings":";;;;AA0BA,MAAMA,oBAAAA,GAAuB;AACzB,IAAA,cAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,UAAA;AACA,IAAA,MAAA;AACA,IAAA,OAAA;AACA,IAAA,OAAA;AACA,IAAA,KAAA;AACA,IAAA,QAAA;AACA,IAAA,QAAA;AACA,IAAA;AACH,CAAA;AAED,MAAMC,uBAAAA,GAA0B,OAAOC,OAAAA,EAAiBC,OAAAA,GAAAA;AACpD,IAAA,MAAMC,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMC,mBAA0C,EAAE;AAElD,IAAA,MAAMC,aAAAA,GAAgB,OAAOC,UAAAA,EAAoBC,KAAAA,GAAgB,CAAC,GAAA;;AAE9D,QAAA,IAAIA,QAAQ,CAAA,EAAG;AACX,YAAA;AACJ,QAAA;QAEA,IAAI;YACA,IAAI,CAAC,MAAMN,OAAAA,CAAQO,MAAM,CAACF,UAAAA,CAAAA,IAAe,CAAC,MAAML,OAAAA,CAAQQ,WAAW,CAACH,UAAAA,CAAAA,EAAa;AAC7E,gBAAA;AACJ,YAAA;AAEA,YAAA,MAAMI,KAAAA,GAAQ,MAAMT,OAAAA,CAAQU,SAAS,CAACL,UAAAA,CAAAA;;YAGtC,IAAII,KAAAA,CAAME,QAAQ,CAAC,cAAA,CAAA,EAAiB;AAChC,gBAAA,MAAMC,eAAAA,GAAkBC,aAAAA,CAAKC,IAAI,CAACT,UAAAA,EAAY,cAAA,CAAA;gBAC9C,IAAI;AACA,oBAAA,MAAMU,kBAAAA,GAAqB,MAAMf,OAAAA,CAAQgB,QAAQ,CAACJ,eAAAA,EAAiB,OAAA,CAAA;oBACnE,MAAMK,MAAAA,GAASC,cAAcH,kBAAAA,EAAoBH,eAAAA,CAAAA;oBACjD,MAAMO,WAAAA,GAAcC,oBAAoBH,MAAAA,EAAQL,eAAAA,CAAAA;AAChD,oBAAA,MAAMS,YAAAA,GAAeR,aAAAA,CAAKS,QAAQ,CAACvB,OAAAA,EAASM,UAAAA,CAAAA;AAE5CF,oBAAAA,gBAAAA,CAAiBoB,IAAI,CAAC;wBAClBV,IAAAA,EAAMD,eAAAA;AACNO,wBAAAA,WAAAA;AACAE,wBAAAA,YAAAA,EAAcA,YAAAA,IAAgB;AAClC,qBAAA,CAAA;AAEApB,oBAAAA,MAAAA,CAAOuB,KAAK,CAAC,CAAC,uBAAuB,EAAEH,gBAAgB,GAAA,CAAA,CAAK,CAAA;AAChE,gBAAA,CAAA,CAAE,OAAOI,KAAAA,EAAY;oBACjBxB,MAAAA,CAAOuB,KAAK,CAAC,CAAC,gCAAgC,EAAEZ,gBAAgB,EAAE,EAAEa,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AACvF,gBAAA;AACJ,YAAA;;YAGA,KAAK,MAAMC,QAAQlB,KAAAA,CAAO;gBACtB,IAAIZ,oBAAAA,CAAqBc,QAAQ,CAACgB,IAAAA,CAAAA,EAAO;AACrC,oBAAA;AACJ,gBAAA;AAEA,gBAAA,MAAMC,QAAAA,GAAWf,aAAAA,CAAKC,IAAI,CAACT,UAAAA,EAAYsB,IAAAA,CAAAA;gBACvC,IAAI;AACA,oBAAA,IAAI,MAAM3B,OAAAA,CAAQQ,WAAW,CAACoB,QAAAA,CAAAA,EAAW;wBACrC,MAAMxB,aAAAA,CAAcwB,UAAUtB,KAAAA,GAAQ,CAAA,CAAA;AAC1C,oBAAA;AACJ,gBAAA,CAAA,CAAE,OAAOmB,KAAAA,EAAY;;oBAEjBxB,MAAAA,CAAOuB,KAAK,CAAC,CAAC,kBAAkB,EAAEI,SAAS,EAAE,EAAEH,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AAC9D,oBAAA;AACJ,gBAAA;AACJ,YAAA;AACJ,QAAA,CAAA,CAAE,OAAOD,KAAAA,EAAY;YACjBxB,MAAAA,CAAOuB,KAAK,CAAC,CAAC,yBAAyB,EAAEnB,WAAW,EAAE,EAAEoB,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AAC3E,QAAA;AACJ,IAAA,CAAA;AAEA,IAAA,MAAMtB,aAAAA,CAAcL,OAAAA,CAAAA;IAEpBE,MAAAA,CAAOuB,KAAK,CAAC,CAAC,MAAM,EAAErB,gBAAAA,CAAiB0B,MAAM,CAAC,uCAAuC,CAAC,CAAA;IACtF,OAAO1B,gBAAAA;AACX,CAAA;AAEA;;;;;UAMa2B,wBAAAA,GAA2B,OAAO9B,SAAcD,OAAAA,GAAkBgC,OAAAA,CAAQC,GAAG,EAAE,GAAA;AACxF,IAAA,MAAM/B,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAM+B,SAAgC,EAAE;IAExC,IAAI;QACA,MAAM9B,gBAAAA,GAAmB,MAAML,uBAAAA,CAAwBC,OAAAA,EAASC,OAAAA,CAAAA;AAEhE,QAAA,KAAK,MAAM,EAAEmB,WAAW,EAAEE,YAAY,EAAE,IAAIlB,gBAAAA,CAAkB;AAC1D,YAAA,MAAM+B,WAA4H,EAAE;;AAGpI,YAAA,MAAMC,gBAAAA,GAAmB;AACrB,gBAAA;AAAEC,oBAAAA,IAAAA,EAAMjB,YAAYkB,YAAY;oBAAEC,IAAAA,EAAM;AAAwB,iBAAA;AAChE,gBAAA;AAAEF,oBAAAA,IAAAA,EAAMjB,YAAYoB,eAAe;oBAAED,IAAAA,EAAM;AAA2B,iBAAA;AACtE,gBAAA;AAAEF,oBAAAA,IAAAA,EAAMjB,YAAYqB,gBAAgB;oBAAEF,IAAAA,EAAM;AAA4B;AAC3E,aAAA;AAED,YAAA,KAAK,MAAM,EAAEF,IAAI,EAAEE,IAAI,EAAE,IAAIH,gBAAAA,CAAkB;AAC3C,gBAAA,IAAIC,IAAAA,EAAM;oBACN,KAAK,MAAM,CAACK,IAAAA,EAAMC,OAAAA,CAAQ,IAAIC,MAAAA,CAAOC,OAAO,CAACR,IAAAA,CAAAA,CAAO;wBAChD,IAAIM,OAAAA,CAAQG,UAAU,CAAC,OAAA,CAAA,EAAU;AAC7BX,4BAAAA,QAAAA,CAASX,IAAI,CAAC;AAAEkB,gCAAAA,IAAAA;AAAMC,gCAAAA,OAAAA;gCAASI,cAAAA,EAAgBR;AAAK,6BAAA,CAAA;AACxD,wBAAA;AACJ,oBAAA;AACJ,gBAAA;AACJ,YAAA;YAEA,IAAIJ,QAAAA,CAASL,MAAM,GAAG,CAAA,EAAG;AACrBI,gBAAAA,MAAAA,CAAOV,IAAI,CAAC;oBACRwB,WAAAA,EAAa1B,YAAAA;oBACbgB,YAAAA,EAAcH;AAClB,iBAAA,CAAA;AACJ,YAAA;AACJ,QAAA;AACJ,IAAA,CAAA,CAAE,OAAOT,KAAAA,EAAY;AACjBxB,QAAAA,MAAAA,CAAOuB,KAAK,CAAC,CAAC,uCAAuC,EAAEC,KAAAA,CAAMC,OAAO,CAAA,CAAE,CAAA;AAC1E,IAAA;IAEA,OAAOO,MAAAA;AACX;AAEA;;;;AAIC,IACM,MAAMe,wBAAAA,GAA2B,CAACf,MAAAA,EAA+BgB,UAAkB,WAAW,GAAA;AACjG,IAAA,MAAMhD,MAAAA,GAASC,SAAAA,EAAAA;IAEf,IAAI+B,MAAAA,CAAOJ,MAAM,KAAK,CAAA,EAAG;AACrB,QAAA;AACJ,IAAA;AAEA5B,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,CAAC,0EAA0E,EAAED,OAAAA,CAAQ,CAAC,CAAC,CAAA;IACnG,KAAK,MAAME,SAASlB,MAAAA,CAAQ;QACxBhC,MAAAA,CAAOiD,IAAI,CAAC,CAAC,KAAK,EAAEC,KAAAA,CAAMJ,WAAW,CAAC,CAAC,CAAC,CAAA;AACxC,QAAA,KAAK,MAAMK,GAAAA,IAAOD,KAAAA,CAAMd,YAAY,CAAE;AAClCpC,YAAAA,MAAAA,CAAOiD,IAAI,CAAC,CAAC,MAAM,EAAEE,GAAAA,CAAIX,IAAI,CAAC,EAAE,EAAEW,GAAAA,CAAIV,OAAO,CAAC,EAAE,EAAEU,IAAIN,cAAc,CAAC,CAAC,CAAC,CAAA;AAC3E,QAAA;AACJ,IAAA;AACA7C,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,EAAA,CAAA;AAChB;AAEA;;;AAGC,IACM,MAAMG,4BAAAA,GAA+B,CAACC,sBAA+B,IAAI,GAAA;AAC5E,IAAA,MAAMrD,MAAAA,GAASC,SAAAA,EAAAA;AAEfD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,qBAAA,CAAA;AACZ,IAAA,IAAII,mBAAAA,EAAqB;AACrBrD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,0DAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,4BAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,uDAAA,CAAA;IAChB,CAAA,MAAO;AACHjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,gEAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,4BAAA,CAAA;AACZjD,QAAAA,MAAAA,CAAOiD,IAAI,CAAC,uCAAA,CAAA;AAChB,IAAA;AACAjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,EAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,6BAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,iDAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,sDAAA,CAAA;AACZjD,IAAAA,MAAAA,CAAOiD,IAAI,CAAC,EAAA,CAAA;AAChB;;;;"}
@@ -1,5 +1,8 @@
1
1
  /**
2
2
  * Runtime validation utilities for safe type handling
3
+ *
4
+ * Note: Generic validation functions (safeJsonParse, validateString, etc.)
5
+ * have been moved to @eldrforge/git-tools
3
6
  */ /**
4
7
  * Validates and safely casts data to ReleaseSummary type
5
8
  */ const validateReleaseSummary = (data)=>{
@@ -14,31 +17,6 @@
14
17
  }
15
18
  return data;
16
19
  };
17
- /**
18
- * Safely parses JSON with error handling
19
- */ const safeJsonParse = (jsonString, context)=>{
20
- try {
21
- const parsed = JSON.parse(jsonString);
22
- if (parsed === null || parsed === undefined) {
23
- throw new Error('Parsed JSON is null or undefined');
24
- }
25
- return parsed;
26
- } catch (error) {
27
- const contextStr = context ? ` (${context})` : '';
28
- throw new Error(`Failed to parse JSON${contextStr}: ${error instanceof Error ? error.message : 'Unknown error'}`);
29
- }
30
- };
31
- /**
32
- * Validates that a value is a non-empty string
33
- */ const validateString = (value, fieldName)=>{
34
- if (typeof value !== 'string') {
35
- throw new Error(`${fieldName} must be a string, got ${typeof value}`);
36
- }
37
- if (value.trim() === '') {
38
- throw new Error(`${fieldName} cannot be empty`);
39
- }
40
- return value;
41
- };
42
20
  /**
43
21
  * Sanitizes and truncates direction parameter for safe use in prompts
44
22
  * @param direction The direction string to sanitize
@@ -62,19 +40,6 @@
62
40
  }
63
41
  return sanitized;
64
42
  };
65
- /**
66
- * Validates package.json structure has basic required fields
67
- */ const validatePackageJson = (data, context, requireName = true)=>{
68
- if (!data || typeof data !== 'object') {
69
- const contextStr = context ? ` (${context})` : '';
70
- throw new Error(`Invalid package.json${contextStr}: not an object`);
71
- }
72
- if (requireName && typeof data.name !== 'string') {
73
- const contextStr = context ? ` (${context})` : '';
74
- throw new Error(`Invalid package.json${contextStr}: name must be a string`);
75
- }
76
- return data;
77
- };
78
43
 
79
- export { safeJsonParse, sanitizeDirection, validatePackageJson, validateReleaseSummary, validateString };
44
+ export { sanitizeDirection, validateReleaseSummary };
80
45
  //# sourceMappingURL=validation.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"validation.js","sources":["../../src/util/validation.ts"],"sourcesContent":["/**\n * Runtime validation utilities for safe type handling\n */\n\nexport interface ReleaseSummary {\n title: string;\n body: string;\n}\n\n\n\nexport interface TranscriptionResult {\n text: string;\n [key: string]: any;\n}\n\n/**\n * Validates and safely casts data to ReleaseSummary type\n */\nexport const validateReleaseSummary = (data: any): ReleaseSummary => {\n if (!data || typeof data !== 'object') {\n throw new Error('Invalid release summary: not an object');\n }\n if (typeof data.title !== 'string') {\n throw new Error('Invalid release summary: title must be a string');\n }\n if (typeof data.body !== 'string') {\n throw new Error('Invalid release summary: body must be a string');\n }\n return data as ReleaseSummary;\n};\n\n\n\n/**\n * Validates transcription result has required text property\n */\nexport const validateTranscriptionResult = (data: any): TranscriptionResult => {\n if (!data || typeof data !== 'object') {\n throw new Error('Invalid transcription result: not an object');\n }\n if (typeof data.text !== 'string') {\n throw new Error('Invalid transcription result: text property must be a string');\n }\n return data as TranscriptionResult;\n};\n\n/**\n * Safely parses JSON with error handling\n */\nexport const safeJsonParse = <T = any>(jsonString: string, context?: string): T => {\n try {\n const parsed = JSON.parse(jsonString);\n if (parsed === null || parsed === undefined) {\n throw new Error('Parsed JSON is null or undefined');\n }\n return parsed;\n } catch (error) {\n const contextStr = context ? ` (${context})` : '';\n throw new Error(`Failed to parse JSON${contextStr}: ${error instanceof Error ? error.message : 'Unknown error'}`);\n }\n};\n\n/**\n * Validates that a value is a non-empty string\n */\nexport const validateString = (value: any, fieldName: string): string => {\n if (typeof value !== 'string') {\n throw new Error(`${fieldName} must be a string, got ${typeof value}`);\n }\n if (value.trim() === '') {\n throw new Error(`${fieldName} cannot be empty`);\n }\n return value;\n};\n\n/**\n * Sanitizes and truncates direction parameter for safe use in prompts\n * @param direction The direction string to sanitize\n * @param maxLength Maximum length before truncation (default: 2000)\n * @returns Sanitized and truncated direction string\n */\nexport const sanitizeDirection = (direction: string | undefined, maxLength: number = 2000): string | undefined => {\n if (!direction) {\n return undefined;\n }\n\n // Remove newlines and excessive whitespace to prevent template breakage\n const sanitized = direction\n .replace(/\\r?\\n/g, ' ') // Replace newlines with spaces\n .replace(/\\s+/g, ' ') // Replace multiple whitespace with single space\n .trim();\n\n // Truncate if too long\n if (sanitized.length > maxLength) {\n const truncated = sanitized.substring(0, maxLength - 3) + '...';\n // Log truncation for debugging\n // eslint-disable-next-line no-console\n console.warn(`Direction truncated from ${sanitized.length} to ${truncated.length} characters`);\n return truncated;\n }\n\n return sanitized;\n};\n\n/**\n * Validates that a value exists and has a specific property\n */\nexport const validateHasProperty = (obj: any, property: string, context?: string): void => {\n if (!obj || typeof obj !== 'object') {\n const contextStr = context ? ` in ${context}` : '';\n throw new Error(`Object is null or not an object${contextStr}`);\n }\n if (!(property in obj)) {\n const contextStr = context ? ` in ${context}` : '';\n throw new Error(`Missing required property '${property}'${contextStr}`);\n }\n};\n\n/**\n * Validates package.json structure has basic required fields\n */\nexport const validatePackageJson = (data: any, context?: string, requireName: boolean = true): any => {\n if (!data || typeof data !== 'object') {\n const contextStr = context ? ` (${context})` : '';\n throw new Error(`Invalid package.json${contextStr}: not an object`);\n }\n if (requireName && typeof data.name !== 'string') {\n const contextStr = context ? ` (${context})` : '';\n throw new Error(`Invalid package.json${contextStr}: name must be a string`);\n }\n return data;\n};\n"],"names":["validateReleaseSummary","data","Error","title","body","safeJsonParse","jsonString","context","parsed","JSON","parse","undefined","error","contextStr","message","validateString","value","fieldName","trim","sanitizeDirection","direction","maxLength","sanitized","replace","length","truncated","substring","console","warn","validatePackageJson","requireName","name"],"mappings":"AAAA;;;;IAmBO,MAAMA,sBAAAA,GAAyB,CAACC,IAAAA,GAAAA;AACnC,IAAA,IAAI,CAACA,IAAAA,IAAQ,OAAOA,IAAAA,KAAS,QAAA,EAAU;AACnC,QAAA,MAAM,IAAIC,KAAAA,CAAM,wCAAA,CAAA;AACpB,IAAA;AACA,IAAA,IAAI,OAAOD,IAAAA,CAAKE,KAAK,KAAK,QAAA,EAAU;AAChC,QAAA,MAAM,IAAID,KAAAA,CAAM,iDAAA,CAAA;AACpB,IAAA;AACA,IAAA,IAAI,OAAOD,IAAAA,CAAKG,IAAI,KAAK,QAAA,EAAU;AAC/B,QAAA,MAAM,IAAIF,KAAAA,CAAM,gDAAA,CAAA;AACpB,IAAA;IACA,OAAOD,IAAAA;AACX;AAiBA;;AAEC,IACM,MAAMI,aAAAA,GAAgB,CAAUC,UAAAA,EAAoBC,OAAAA,GAAAA;IACvD,IAAI;QACA,MAAMC,MAAAA,GAASC,IAAAA,CAAKC,KAAK,CAACJ,UAAAA,CAAAA;QAC1B,IAAIE,MAAAA,KAAW,IAAA,IAAQA,MAAAA,KAAWG,SAAAA,EAAW;AACzC,YAAA,MAAM,IAAIT,KAAAA,CAAM,kCAAA,CAAA;AACpB,QAAA;QACA,OAAOM,MAAAA;AACX,IAAA,CAAA,CAAE,OAAOI,KAAAA,EAAO;QACZ,MAAMC,UAAAA,GAAaN,UAAU,CAAC,EAAE,EAAEA,OAAAA,CAAQ,CAAC,CAAC,GAAG,EAAA;AAC/C,QAAA,MAAM,IAAIL,KAAAA,CAAM,CAAC,oBAAoB,EAAEW,UAAAA,CAAW,EAAE,EAAED,KAAAA,YAAiBV,KAAAA,GAAQU,KAAAA,CAAME,OAAO,GAAG,eAAA,CAAA,CAAiB,CAAA;AACpH,IAAA;AACJ;AAEA;;AAEC,IACM,MAAMC,cAAAA,GAAiB,CAACC,KAAAA,EAAYC,SAAAA,GAAAA;IACvC,IAAI,OAAOD,UAAU,QAAA,EAAU;AAC3B,QAAA,MAAM,IAAId,KAAAA,CAAM,CAAA,EAAGe,UAAU,uBAAuB,EAAE,OAAOD,KAAAA,CAAAA,CAAO,CAAA;AACxE,IAAA;IACA,IAAIA,KAAAA,CAAME,IAAI,EAAA,KAAO,EAAA,EAAI;AACrB,QAAA,MAAM,IAAIhB,KAAAA,CAAM,CAAA,EAAGe,SAAAA,CAAU,gBAAgB,CAAC,CAAA;AAClD,IAAA;IACA,OAAOD,KAAAA;AACX;AAEA;;;;;AAKC,IACM,MAAMG,iBAAAA,GAAoB,CAACC,SAAAA,EAA+BC,YAAoB,IAAI,GAAA;AACrF,IAAA,IAAI,CAACD,SAAAA,EAAW;QACZ,OAAOT,SAAAA;AACX,IAAA;;AAGA,IAAA,MAAMW,YAAYF,SAAAA,CACbG,OAAO,CAAC,QAAA,EAAU;KAClBA,OAAO,CAAC,MAAA,EAAQ,GAAA,CAAA;KAChBL,IAAI,EAAA;;IAGT,IAAII,SAAAA,CAAUE,MAAM,GAAGH,SAAAA,EAAW;AAC9B,QAAA,MAAMI,YAAYH,SAAAA,CAAUI,SAAS,CAAC,CAAA,EAAGL,YAAY,CAAA,CAAA,GAAK,KAAA;;;AAG1DM,QAAAA,OAAAA,CAAQC,IAAI,CAAC,CAAC,yBAAyB,EAAEN,SAAAA,CAAUE,MAAM,CAAC,IAAI,EAAEC,SAAAA,CAAUD,MAAM,CAAC,WAAW,CAAC,CAAA;QAC7F,OAAOC,SAAAA;AACX,IAAA;IAEA,OAAOH,SAAAA;AACX;AAgBA;;AAEC,IACM,MAAMO,mBAAAA,GAAsB,CAAC5B,IAAAA,EAAWM,OAAAA,EAAkBuB,cAAuB,IAAI,GAAA;AACxF,IAAA,IAAI,CAAC7B,IAAAA,IAAQ,OAAOA,IAAAA,KAAS,QAAA,EAAU;QACnC,MAAMY,UAAAA,GAAaN,UAAU,CAAC,EAAE,EAAEA,OAAAA,CAAQ,CAAC,CAAC,GAAG,EAAA;AAC/C,QAAA,MAAM,IAAIL,KAAAA,CAAM,CAAC,oBAAoB,EAAEW,UAAAA,CAAW,eAAe,CAAC,CAAA;AACtE,IAAA;AACA,IAAA,IAAIiB,WAAAA,IAAe,OAAO7B,IAAAA,CAAK8B,IAAI,KAAK,QAAA,EAAU;QAC9C,MAAMlB,UAAAA,GAAaN,UAAU,CAAC,EAAE,EAAEA,OAAAA,CAAQ,CAAC,CAAC,GAAG,EAAA;AAC/C,QAAA,MAAM,IAAIL,KAAAA,CAAM,CAAC,oBAAoB,EAAEW,UAAAA,CAAW,uBAAuB,CAAC,CAAA;AAC9E,IAAA;IACA,OAAOZ,IAAAA;AACX;;;;"}
1
+ {"version":3,"file":"validation.js","sources":["../../src/util/validation.ts"],"sourcesContent":["/**\n * Runtime validation utilities for safe type handling\n *\n * Note: Generic validation functions (safeJsonParse, validateString, etc.)\n * have been moved to @eldrforge/git-tools\n */\n\nexport interface ReleaseSummary {\n title: string;\n body: string;\n}\n\nexport interface TranscriptionResult {\n text: string;\n [key: string]: any;\n}\n\n/**\n * Validates and safely casts data to ReleaseSummary type\n */\nexport const validateReleaseSummary = (data: any): ReleaseSummary => {\n if (!data || typeof data !== 'object') {\n throw new Error('Invalid release summary: not an object');\n }\n if (typeof data.title !== 'string') {\n throw new Error('Invalid release summary: title must be a string');\n }\n if (typeof data.body !== 'string') {\n throw new Error('Invalid release summary: body must be a string');\n }\n return data as ReleaseSummary;\n};\n\n/**\n * Validates transcription result has required text property\n */\nexport const validateTranscriptionResult = (data: any): TranscriptionResult => {\n if (!data || typeof data !== 'object') {\n throw new Error('Invalid transcription result: not an object');\n }\n if (typeof data.text !== 'string') {\n throw new Error('Invalid transcription result: text property must be a string');\n }\n return data as TranscriptionResult;\n};\n\n/**\n * Sanitizes and truncates direction parameter for safe use in prompts\n * @param direction The direction string to sanitize\n * @param maxLength Maximum length before truncation (default: 2000)\n * @returns Sanitized and truncated direction string\n */\nexport const sanitizeDirection = (direction: string | undefined, maxLength: number = 2000): string | undefined => {\n if (!direction) {\n return undefined;\n }\n\n // Remove newlines and excessive whitespace to prevent template breakage\n const sanitized = direction\n .replace(/\\r?\\n/g, ' ') // Replace newlines with spaces\n .replace(/\\s+/g, ' ') // Replace multiple whitespace with single space\n .trim();\n\n // Truncate if too long\n if (sanitized.length > maxLength) {\n const truncated = sanitized.substring(0, maxLength - 3) + '...';\n // Log truncation for debugging\n // eslint-disable-next-line no-console\n console.warn(`Direction truncated from ${sanitized.length} to ${truncated.length} characters`);\n return truncated;\n }\n\n return sanitized;\n};\n"],"names":["validateReleaseSummary","data","Error","title","body","sanitizeDirection","direction","maxLength","undefined","sanitized","replace","trim","length","truncated","substring","console","warn"],"mappings":"AAAA;;;;;;;IAoBO,MAAMA,sBAAAA,GAAyB,CAACC,IAAAA,GAAAA;AACnC,IAAA,IAAI,CAACA,IAAAA,IAAQ,OAAOA,IAAAA,KAAS,QAAA,EAAU;AACnC,QAAA,MAAM,IAAIC,KAAAA,CAAM,wCAAA,CAAA;AACpB,IAAA;AACA,IAAA,IAAI,OAAOD,IAAAA,CAAKE,KAAK,KAAK,QAAA,EAAU;AAChC,QAAA,MAAM,IAAID,KAAAA,CAAM,iDAAA,CAAA;AACpB,IAAA;AACA,IAAA,IAAI,OAAOD,IAAAA,CAAKG,IAAI,KAAK,QAAA,EAAU;AAC/B,QAAA,MAAM,IAAIF,KAAAA,CAAM,gDAAA,CAAA;AACpB,IAAA;IACA,OAAOD,IAAAA;AACX;AAeA;;;;;AAKC,IACM,MAAMI,iBAAAA,GAAoB,CAACC,SAAAA,EAA+BC,YAAoB,IAAI,GAAA;AACrF,IAAA,IAAI,CAACD,SAAAA,EAAW;QACZ,OAAOE,SAAAA;AACX,IAAA;;AAGA,IAAA,MAAMC,YAAYH,SAAAA,CACbI,OAAO,CAAC,QAAA,EAAU;KAClBA,OAAO,CAAC,MAAA,EAAQ,GAAA,CAAA;KAChBC,IAAI,EAAA;;IAGT,IAAIF,SAAAA,CAAUG,MAAM,GAAGL,SAAAA,EAAW;AAC9B,QAAA,MAAMM,YAAYJ,SAAAA,CAAUK,SAAS,CAAC,CAAA,EAAGP,YAAY,CAAA,CAAA,GAAK,KAAA;;;AAG1DQ,QAAAA,OAAAA,CAAQC,IAAI,CAAC,CAAC,yBAAyB,EAAEP,SAAAA,CAAUG,MAAM,CAAC,IAAI,EAAEC,SAAAA,CAAUD,MAAM,CAAC,WAAW,CAAC,CAAA;QAC7F,OAAOC,SAAAA;AACX,IAAA;IAEA,OAAOJ,SAAAA;AACX;;;;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@eldrforge/kodrdriv",
3
- "version": "1.2.19",
3
+ "version": "1.2.20",
4
4
  "description": "Create Intelligent Release Notes or Change Logs from Git",
5
5
  "main": "dist/main.js",
6
6
  "type": "module",
@@ -38,6 +38,7 @@
38
38
  "author": "Calen Varek <calenvarek@gmail.com>",
39
39
  "license": "Apache-2.0",
40
40
  "dependencies": {
41
+ "@eldrforge/git-tools": "^0.1.1",
41
42
  "@octokit/rest": "^22.0.0",
42
43
  "@riotprompt/riotprompt": "^0.0.8",
43
44
  "@theunwalked/cardigantime": "^0.0.16",