@sylphx/flow 0.2.1 → 0.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (102) hide show
  1. package/assets/agents/coder.md +1 -1
  2. package/assets/agents/orchestrator.md +1 -1
  3. package/assets/agents/reviewer.md +1 -1
  4. package/assets/agents/writer.md +13 -13
  5. package/assets/slash-commands/context.md +112 -0
  6. package/dist/assets/agents/coder.md +32 -0
  7. package/dist/assets/agents/orchestrator.md +36 -0
  8. package/dist/assets/agents/reviewer.md +30 -0
  9. package/dist/assets/agents/writer.md +30 -0
  10. package/dist/assets/knowledge/data/sql.md +216 -0
  11. package/dist/assets/knowledge/guides/saas-template.md +85 -0
  12. package/dist/assets/knowledge/guides/system-prompt.md +344 -0
  13. package/dist/assets/knowledge/guides/tech-stack.md +92 -0
  14. package/dist/assets/knowledge/guides/ui-ux.md +44 -0
  15. package/dist/assets/knowledge/stacks/nextjs-app.md +165 -0
  16. package/dist/assets/knowledge/stacks/node-api.md +220 -0
  17. package/dist/assets/knowledge/stacks/react-app.md +232 -0
  18. package/dist/assets/knowledge/universal/deployment.md +109 -0
  19. package/dist/assets/knowledge/universal/performance.md +121 -0
  20. package/dist/assets/knowledge/universal/security.md +79 -0
  21. package/dist/assets/knowledge/universal/testing.md +111 -0
  22. package/dist/assets/output-styles/silent.md +23 -0
  23. package/dist/assets/rules/core.md +144 -0
  24. package/dist/assets/slash-commands/commit.md +23 -0
  25. package/dist/assets/slash-commands/context.md +112 -0
  26. package/dist/assets/slash-commands/explain.md +35 -0
  27. package/dist/assets/slash-commands/mep.md +63 -0
  28. package/dist/assets/slash-commands/review.md +39 -0
  29. package/dist/assets/slash-commands/test.md +30 -0
  30. package/dist/chunk-1rptg3yg.js +4 -0
  31. package/dist/chunk-1rptg3yg.js.map +10 -0
  32. package/dist/{chunk-124wqbdb.js → chunk-4fr8q0jy.js} +3 -3
  33. package/dist/{chunk-124wqbdb.js.map → chunk-4fr8q0jy.js.map} +1 -1
  34. package/dist/{chunk-f6y5vttn.js → chunk-5szm4n3x.js} +3 -3
  35. package/dist/{chunk-f6y5vttn.js.map → chunk-5szm4n3x.js.map} +1 -1
  36. package/dist/chunk-7nht27vs.js +4 -0
  37. package/dist/{chunk-g9t3me0w.js.map → chunk-7nht27vs.js.map} +2 -2
  38. package/dist/chunk-8krxe10w.js +3 -0
  39. package/dist/{chunk-e966bjm5.js.map → chunk-8krxe10w.js.map} +2 -2
  40. package/dist/{chunk-wpe7rw5c.js → chunk-8z1sf25t.js} +3 -3
  41. package/dist/{chunk-wpe7rw5c.js.map → chunk-8z1sf25t.js.map} +1 -1
  42. package/dist/chunk-9c2nr2fz.js +25 -0
  43. package/dist/chunk-9c2nr2fz.js.map +61 -0
  44. package/dist/{chunk-4p754rhd.js → chunk-asr22mbn.js} +2 -2
  45. package/dist/{chunk-4p754rhd.js.map → chunk-asr22mbn.js.map} +2 -2
  46. package/dist/chunk-bnxtqetr.js +23 -0
  47. package/dist/chunk-bnxtqetr.js.map +11 -0
  48. package/dist/chunk-cs1s5c3g.js +54 -0
  49. package/dist/chunk-cs1s5c3g.js.map +53 -0
  50. package/dist/chunk-cv1nhr27.js +2 -0
  51. package/dist/{chunk-hshjnpm0.js.map → chunk-cv1nhr27.js.map} +1 -1
  52. package/dist/chunk-d4hj6d4t.js +6 -0
  53. package/dist/chunk-d4hj6d4t.js.map +11 -0
  54. package/dist/chunk-f06ma45b.js +15 -0
  55. package/dist/chunk-f06ma45b.js.map +16 -0
  56. package/dist/chunk-fs3f7acb.js +4 -0
  57. package/dist/chunk-fs3f7acb.js.map +12 -0
  58. package/dist/{chunk-5r4afhzp.js → chunk-gh83x9ya.js} +3 -3
  59. package/dist/{chunk-5r4afhzp.js.map → chunk-gh83x9ya.js.map} +1 -1
  60. package/dist/{chunk-qa8b725g.js → chunk-gyq335sw.js} +6 -5
  61. package/dist/{chunk-qa8b725g.js.map → chunk-gyq335sw.js.map} +1 -1
  62. package/dist/{chunk-hs3nxzyz.js → chunk-hft1735c.js} +2 -2
  63. package/dist/{chunk-hs3nxzyz.js.map → chunk-hft1735c.js.map} +2 -2
  64. package/dist/chunk-hj6r7703.js +3 -0
  65. package/dist/{chunk-78bfvh46.js.map → chunk-hj6r7703.js.map} +2 -2
  66. package/dist/chunk-hxj4eapp.js +14 -0
  67. package/dist/chunk-hxj4eapp.js.map +20 -0
  68. package/dist/chunk-jgsq3xax.js +23 -0
  69. package/dist/chunk-jgsq3xax.js.map +132 -0
  70. package/dist/{chunk-646h52kd.js → chunk-m9nt0bj3.js} +3 -3
  71. package/dist/{chunk-646h52kd.js.map → chunk-m9nt0bj3.js.map} +1 -1
  72. package/dist/{chunk-bd11hvvz.js → chunk-ndah8mn9.js} +2 -2
  73. package/dist/{chunk-bd11hvvz.js.map → chunk-ndah8mn9.js.map} +1 -1
  74. package/dist/chunk-s6g21d1g.js +27 -0
  75. package/dist/{chunk-0h7sfwq3.js.map → chunk-s6g21d1g.js.map} +4 -5
  76. package/dist/{chunk-hshjnpm0.js → chunk-sxy6vp20.js} +2 -2
  77. package/dist/chunk-sxy6vp20.js.map +9 -0
  78. package/dist/chunk-vjf57v4h.js +4 -0
  79. package/dist/chunk-vjf57v4h.js.map +10 -0
  80. package/dist/{chunk-jxny6xft.js → chunk-w2vbmr93.js} +2 -2
  81. package/dist/{chunk-jxny6xft.js.map → chunk-w2vbmr93.js.map} +1 -1
  82. package/dist/chunk-wd9qbbe5.js +5 -0
  83. package/dist/chunk-wd9qbbe5.js.map +10 -0
  84. package/dist/chunk-wnaa55wn.js +108 -0
  85. package/dist/chunk-wnaa55wn.js.map +24 -0
  86. package/dist/chunk-wrx1n6q6.js +16 -0
  87. package/dist/chunk-wrx1n6q6.js.map +16 -0
  88. package/dist/chunk-xata5rw6.js +119 -0
  89. package/dist/{chunk-878q8xdr.js.map → chunk-xata5rw6.js.map} +7 -18
  90. package/dist/chunk-z2rtyk3d.js +7 -0
  91. package/dist/{chunk-ygdr4fw7.js.map → chunk-z2rtyk3d.js.map} +2 -2
  92. package/dist/index.js +446 -482
  93. package/dist/index.js.map +301 -202
  94. package/package.json +4 -1
  95. package/dist/chunk-0h7sfwq3.js +0 -27
  96. package/dist/chunk-78bfvh46.js +0 -3
  97. package/dist/chunk-878q8xdr.js +0 -86
  98. package/dist/chunk-e966bjm5.js +0 -3
  99. package/dist/chunk-fxwaa2mg.js +0 -4
  100. package/dist/chunk-fxwaa2mg.js.map +0 -10
  101. package/dist/chunk-g9t3me0w.js +0 -4
  102. package/dist/chunk-ygdr4fw7.js +0 -7
@@ -1,3 +0,0 @@
1
- import{b as v}from"./chunk-waemzsf4.js";import{J as q}from"./chunk-bd11hvvz.js";import"./chunk-hshjnpm0.js";import w from"node:fs/promises";import B from"node:path";var I=(...b)=>B.join(...b),J=(...b)=>B.resolve(...b),K=(b)=>B.dirname(b),L=(b,k)=>B.basename(b,k),M=(b)=>B.extname(b),N=async(b)=>{return q(async()=>{return await w.readFile(b,"utf-8")},(k)=>v(`Failed to read file: ${b}`,b,"read",{cause:k instanceof Error?k:void 0}))},O=async(b,k)=>{return q(async()=>{await w.writeFile(b,k,"utf-8")},(z)=>v(`Failed to write file: ${b}`,b,"write",{cause:z instanceof Error?z:void 0}))},Q=async(b)=>{return q(async()=>{await w.unlink(b)},(k)=>v(`Failed to delete file: ${b}`,b,"delete",{cause:k instanceof Error?k:void 0}))},R=async(b,k)=>{return q(async()=>{await w.mkdir(b,{recursive:k?.recursive??!0})},(z)=>v(`Failed to create directory: ${b}`,b,"create",{cause:z instanceof Error?z:void 0}))},T=async(b)=>{return q(async()=>{try{return await w.access(b),!0}catch{return!1}},(k)=>v(`Failed to check if path exists: ${b}`,b,"stat",{cause:k instanceof Error?k:void 0}))},U=async(b)=>{return q(async()=>{return await w.readdir(b)},(k)=>v(`Failed to read directory: ${b}`,b,"read",{cause:k instanceof Error?k:void 0}))},V=async(b)=>{return q(async()=>{let k=await w.stat(b);return{isFile:k.isFile(),isDirectory:k.isDirectory(),size:k.size}},(k)=>v(`Failed to get stats for: ${b}`,b,"stat",{cause:k instanceof Error?k:void 0}))};export{O as writeFile,J as resolvePath,N as readFile,U as readDirectory,T as pathExists,I as joinPath,V as getStats,M as extname,K as dirname,Q as deleteFile,R as createDirectory,L as basename};
2
-
3
- //# debugId=03FD5D36438725F164756E2164756E21
@@ -1,4 +0,0 @@
1
- import"./chunk-hshjnpm0.js";import{AutoTokenizer as v}from"@huggingface/transformers";var P=null,S=!1,U=!1,Q={"gpt-4":"Xenova/gpt-4","gpt-4-turbo":"Xenova/gpt-4","gpt-4o":"Xenova/gpt-4","gpt-3.5-turbo":"Xenova/gpt-3.5-turbo","gpt-3.5":"Xenova/gpt-3.5-turbo","claude-3-opus":"Xenova/claude-tokenizer","claude-3-sonnet":"Xenova/claude-tokenizer","claude-3-haiku":"Xenova/claude-tokenizer","claude-3.5-sonnet":"Xenova/claude-tokenizer","claude-3.5-haiku":"Xenova/claude-tokenizer",starcoder:"bigcode/starcoder",starcoder2:"bigcode/starcoder2-3b",codellama:"codellama/CodeLlama-7b-hf",gemini:"Xenova/gpt-4",default:"Xenova/gpt-4"};function X(j){if(!j)return Q.default;if(Q[j])return Q[j];let q=j.toLowerCase();for(let[H,J]of Object.entries(Q))if(q.includes(H))return J;return Q.default}function V(j){if(!j)return 0;let q=j.split(/\s+/).filter(Boolean).length,H=j.length,J=Math.ceil(H/3.5),A=Math.ceil(q*1.3);return Math.round((J+A)/2)}async function Y(j){if(P&&!j)return P;if(U)return null;while(S)await new Promise((H)=>setTimeout(H,100));if(P&&!j)return P;if(U)return null;let q=X(j);try{return S=!0,console.log(`[TokenCounter] Loading tokenizer: ${q} (for model: ${j||"default"})`),P=await v.from_pretrained(q,{cache_dir:"./models/.cache",local_files_only:!1}),console.log("[TokenCounter] Tokenizer loaded successfully"),S=!1,P}catch(H){return console.warn("[TokenCounter] BPE tokenizer initialization failed, using fallback estimation:",H),U=!0,S=!1,null}}async function W(j,q){if(!j)return 0;let H=await Y(q);if(!H)return V(j);try{let J=await H(j);if(J.input_ids&&J.input_ids.size)return J.input_ids.size;if(Array.isArray(J.input_ids))return J.input_ids.length;if(J.input_ids.data)return J.input_ids.data.length;return V(j)}catch(J){return console.warn("[TokenCounter] Token counting failed, using fallback:",J),V(j)}}function $(j){return V(j)}function y(j){if(j<1000)return j.toString();if(j<1e6)return`${(j/1000).toFixed(1)}K`;return`${(j/1e6).toFixed(1)}M`}async function C(j,q){return W(j,q)}async function G(j,q){let H=await W(j,q);return`${y(H)} Tokens`}function h(j){let q=$(j);return`${y(q)} Tokens`}async function w(j,q){return Promise.all(j.map((H)=>W(H,q)))}function f(j){return j.map($)}async function D(j){let q=X(j),H=await Y(j);return{modelName:j||"default",tokenizerName:q,loaded:H!==null,failed:U}}function K(){return Object.keys(Q).filter((j)=>j!=="default")}export{D as getTokenizerInfo,K as getSupportedModels,y as formatTokenCount,f as estimateTokensBatch,$ as estimateTokens,C as countTokensForModel,w as countTokensBatch,W as countTokens,h as countAndFormatSync,G as countAndFormat};
2
- export{y as c};
3
-
4
- //# debugId=D0C5BD1F7BE055F064756E2164756E21
@@ -1,10 +0,0 @@
1
- {
2
- "version": 3,
3
- "sources": ["../src/utils/token-counter.ts"],
4
- "sourcesContent": [
5
- "/**\n * Token Counter Utility\n * BPE-based token counting using Hugging Face AutoTokenizer\n *\n * Primary method: BPE tokenizer (auto-selected by Hugging Face)\n * Fallback: Fast estimation when tokenizer unavailable\n */\n\nimport { AutoTokenizer } from '@huggingface/transformers';\n\n// Singleton instance for BPE tokenizer (lazy-loaded)\nlet bpeTokenizer: any | null = null;\nlet tokenizerInitializing = false;\nlet initializationFailed = false;\n\n/**\n * Map provider model names to tokenizer names\n * AutoTokenizer will automatically find the right tokenizer for each model\n */\nconst MODEL_TO_TOKENIZER: Record<string, string> = {\n // OpenAI models\n 'gpt-4': 'Xenova/gpt-4',\n 'gpt-4-turbo': 'Xenova/gpt-4',\n 'gpt-4o': 'Xenova/gpt-4',\n 'gpt-3.5-turbo': 'Xenova/gpt-3.5-turbo',\n 'gpt-3.5': 'Xenova/gpt-3.5-turbo',\n\n // Anthropic Claude models\n 'claude-3-opus': 'Xenova/claude-tokenizer',\n 'claude-3-sonnet': 'Xenova/claude-tokenizer',\n 'claude-3-haiku': 'Xenova/claude-tokenizer',\n 'claude-3.5-sonnet': 'Xenova/claude-tokenizer',\n 'claude-3.5-haiku': 'Xenova/claude-tokenizer',\n\n // Code models\n 'starcoder': 'bigcode/starcoder',\n 'starcoder2': 'bigcode/starcoder2-3b',\n 'codellama': 'codellama/CodeLlama-7b-hf',\n\n // Google models\n 'gemini': 'Xenova/gpt-4', // Fallback to GPT-4 (no official Gemini tokenizer)\n\n // Fallback\n 'default': 'Xenova/gpt-4',\n};\n\n/**\n * Get tokenizer name for a model\n * AutoTokenizer will find the right tokenizer automatically\n */\nfunction getTokenizerForModel(modelName?: string): string {\n if (!modelName) return MODEL_TO_TOKENIZER['default'];\n\n // Direct match\n if (MODEL_TO_TOKENIZER[modelName]) {\n return MODEL_TO_TOKENIZER[modelName];\n }\n\n // Fuzzy match (e.g., \"gpt-4-turbo-preview\" → \"gpt-4\")\n const modelLower = modelName.toLowerCase();\n for (const [key, tokenizer] of Object.entries(MODEL_TO_TOKENIZER)) {\n if (modelLower.includes(key)) {\n return tokenizer;\n }\n }\n\n // Default fallback\n return MODEL_TO_TOKENIZER['default'];\n}\n\n/**\n * Fast fallback estimation (only when BPE tokenizer unavailable)\n * Based on ~3.5 chars per token for code\n */\nfunction estimateFallback(text: string): number {\n if (!text) return 0;\n\n const words = text.split(/\\s+/).filter(Boolean).length;\n const chars = text.length;\n\n const charBasedEstimate = Math.ceil(chars / 3.5);\n const wordBasedEstimate = Math.ceil(words * 1.3);\n\n return Math.round((charBasedEstimate + wordBasedEstimate) / 2);\n}\n\n/**\n * Initialize BPE tokenizer (lazy, singleton)\n * Uses Hugging Face AutoTokenizer to automatically select best tokenizer\n */\nasync function ensureTokenizer(modelName?: string): Promise<any | null> {\n // Already initialized - check if we need to reinitialize for different model\n if (bpeTokenizer && !modelName) return bpeTokenizer;\n\n // Previous initialization failed\n if (initializationFailed) return null;\n\n // Wait if initialization in progress\n while (tokenizerInitializing) {\n await new Promise(resolve => setTimeout(resolve, 100));\n }\n\n // Check again after waiting\n if (bpeTokenizer && !modelName) return bpeTokenizer;\n if (initializationFailed) return null;\n\n // Get tokenizer name for this model\n const tokenizerName = getTokenizerForModel(modelName);\n\n // Initialize with Hugging Face AutoTokenizer\n try {\n tokenizerInitializing = true;\n console.log(`[TokenCounter] Loading tokenizer: ${tokenizerName} (for model: ${modelName || 'default'})`);\n\n // Let Hugging Face auto-select and load the best tokenizer\n bpeTokenizer = await AutoTokenizer.from_pretrained(tokenizerName, {\n // Cache models locally for faster subsequent loads\n cache_dir: './models/.cache',\n // Use local files if available, otherwise download\n local_files_only: false,\n });\n\n console.log(`[TokenCounter] Tokenizer loaded successfully`);\n tokenizerInitializing = false;\n return bpeTokenizer;\n } catch (error) {\n console.warn('[TokenCounter] BPE tokenizer initialization failed, using fallback estimation:', error);\n initializationFailed = true;\n tokenizerInitializing = false;\n return null;\n }\n}\n\n/**\n * Count tokens using BPE tokenizer (Hugging Face AutoTokenizer)\n * Falls back to estimation if tokenizer unavailable\n *\n * @param text Text to count tokens for\n * @param modelName Optional model name to use specific tokenizer\n * @returns Token count\n */\nexport async function countTokens(text: string, modelName?: string): Promise<number> {\n if (!text) return 0;\n\n const tokenizer = await ensureTokenizer(modelName);\n\n if (!tokenizer) {\n // Tokenizer unavailable, use fallback\n return estimateFallback(text);\n }\n\n try {\n // Use Hugging Face tokenizer API\n const encoded = await tokenizer(text);\n\n // Get token count from encoded result\n if (encoded.input_ids && encoded.input_ids.size) {\n return encoded.input_ids.size;\n }\n\n // Fallback: count array length\n if (Array.isArray(encoded.input_ids)) {\n return encoded.input_ids.length;\n }\n\n // Fallback: if it's a tensor, get its length\n if (encoded.input_ids.data) {\n return encoded.input_ids.data.length;\n }\n\n // Last resort fallback\n return estimateFallback(text);\n } catch (error) {\n console.warn('[TokenCounter] Token counting failed, using fallback:', error);\n return estimateFallback(text);\n }\n}\n\n/**\n * Synchronous token estimation (for cases where async is not possible)\n * Uses fallback estimation only\n */\nexport function estimateTokens(text: string): number {\n return estimateFallback(text);\n}\n\n/**\n * Format token count for display\n * Examples: 150 -> \"150\", 1500 -> \"1.5K\", 1500000 -> \"1.5M\"\n */\nexport function formatTokenCount(count: number): string {\n if (count < 1000) {\n return count.toString();\n }\n\n if (count < 1000000) {\n const k = count / 1000;\n return `${k.toFixed(1)}K`;\n }\n\n const m = count / 1000000;\n return `${m.toFixed(1)}M`;\n}\n\n/**\n * Count tokens for specific model\n * Uses the correct tokenizer for that model\n */\nexport async function countTokensForModel(text: string, modelName: string): Promise<number> {\n return countTokens(text, modelName);\n}\n\n/**\n * Count tokens with display formatting\n * Uses BPE tokenizer (async)\n */\nexport async function countAndFormat(text: string, modelName?: string): Promise<string> {\n const count = await countTokens(text, modelName);\n return `${formatTokenCount(count)} Tokens`;\n}\n\n/**\n * Count tokens with display formatting (sync, estimation only)\n * Use this only when async is not possible\n */\nexport function countAndFormatSync(text: string): string {\n const count = estimateTokens(text);\n return `${formatTokenCount(count)} Tokens`;\n}\n\n/**\n * Batch count tokens for multiple texts\n * Uses BPE tokenizer\n */\nexport async function countTokensBatch(texts: string[], modelName?: string): Promise<number[]> {\n return Promise.all(texts.map(text => countTokens(text, modelName)));\n}\n\n/**\n * Batch count tokens (sync estimation fallback)\n */\nexport function estimateTokensBatch(texts: string[]): number[] {\n return texts.map(estimateTokens);\n}\n\n/**\n * Get tokenizer info (for debugging)\n */\nexport async function getTokenizerInfo(modelName?: string): Promise<{\n modelName: string;\n tokenizerName: string;\n loaded: boolean;\n failed: boolean;\n} | null> {\n const tokenizerName = getTokenizerForModel(modelName);\n const tokenizer = await ensureTokenizer(modelName);\n\n return {\n modelName: modelName || 'default',\n tokenizerName,\n loaded: tokenizer !== null,\n failed: initializationFailed,\n };\n}\n\n/**\n * Get supported models\n */\nexport function getSupportedModels(): string[] {\n return Object.keys(MODEL_TO_TOKENIZER).filter(k => k !== 'default');\n}\n"
6
- ],
7
- "mappings": "4BAQA,wBAAS,kCAGT,IAAI,EAA2B,KAC3B,EAAwB,GACxB,EAAuB,GAMrB,EAA6C,CAEjD,QAAS,eACT,cAAe,eACf,SAAU,eACV,gBAAiB,uBACjB,UAAW,uBAGX,gBAAiB,0BACjB,kBAAmB,0BACnB,iBAAkB,0BAClB,oBAAqB,0BACrB,mBAAoB,0BAGpB,UAAa,oBACb,WAAc,wBACd,UAAa,4BAGb,OAAU,eAGV,QAAW,cACb,EAMA,SAAS,CAAoB,CAAC,EAA4B,CACxD,GAAI,CAAC,EAAW,OAAO,EAAmB,QAG1C,GAAI,EAAmB,GACrB,OAAO,EAAmB,GAI5B,IAAM,EAAa,EAAU,YAAY,EACzC,QAAY,EAAK,KAAc,OAAO,QAAQ,CAAkB,EAC9D,GAAI,EAAW,SAAS,CAAG,EACzB,OAAO,EAKX,OAAO,EAAmB,QAO5B,SAAS,CAAgB,CAAC,EAAsB,CAC9C,GAAI,CAAC,EAAM,MAAO,GAElB,IAAM,EAAQ,EAAK,MAAM,KAAK,EAAE,OAAO,OAAO,EAAE,OAC1C,EAAQ,EAAK,OAEb,EAAoB,KAAK,KAAK,EAAQ,GAAG,EACzC,EAAoB,KAAK,KAAK,EAAQ,GAAG,EAE/C,OAAO,KAAK,OAAO,EAAoB,GAAqB,CAAC,EAO/D,eAAe,CAAe,CAAC,EAAyC,CAEtE,GAAI,GAAgB,CAAC,EAAW,OAAO,EAGvC,GAAI,EAAsB,OAAO,KAGjC,MAAO,EACL,MAAM,IAAI,QAAQ,KAAW,WAAW,EAAS,GAAG,CAAC,EAIvD,GAAI,GAAgB,CAAC,EAAW,OAAO,EACvC,GAAI,EAAsB,OAAO,KAGjC,IAAM,EAAgB,EAAqB,CAAS,EAGpD,GAAI,CAcF,OAbA,EAAwB,GACxB,QAAQ,IAAI,qCAAqC,iBAA6B,GAAa,YAAY,EAGvG,EAAe,MAAM,EAAc,gBAAgB,EAAe,CAEhE,UAAW,kBAEX,iBAAkB,EACpB,CAAC,EAED,QAAQ,IAAI,8CAA8C,EAC1D,EAAwB,GACjB,EACP,MAAO,EAAO,CAId,OAHA,QAAQ,KAAK,iFAAkF,CAAK,EACpG,EAAuB,GACvB,EAAwB,GACjB,MAYX,eAAsB,CAAW,CAAC,EAAc,EAAqC,CACnF,GAAI,CAAC,EAAM,MAAO,GAElB,IAAM,EAAY,MAAM,EAAgB,CAAS,EAEjD,GAAI,CAAC,EAEH,OAAO,EAAiB,CAAI,EAG9B,GAAI,CAEF,IAAM,EAAU,MAAM,EAAU,CAAI,EAGpC,GAAI,EAAQ,WAAa,EAAQ,UAAU,KACzC,OAAO,EAAQ,UAAU,KAI3B,GAAI,MAAM,QAAQ,EAAQ,SAAS,EACjC,OAAO,EAAQ,UAAU,OAI3B,GAAI,EAAQ,UAAU,KACpB,OAAO,EAAQ,UAAU,KAAK,OAIhC,OAAO,EAAiB,CAAI,EAC5B,MAAO,EAAO,CAEd,OADA,QAAQ,KAAK,wDAAyD,CAAK,EACpE,EAAiB,CAAI,GAQzB,SAAS,CAAc,CAAC,EAAsB,CACnD,OAAO,EAAiB,CAAI,EAOvB,SAAS,CAAgB,CAAC,EAAuB,CACtD,GAAI,EAAQ,KACV,OAAO,EAAM,SAAS,EAGxB,GAAI,EAAQ,IAEV,MAAO,IADG,EAAQ,MACN,QAAQ,CAAC,KAIvB,MAAO,IADG,EAAQ,KACN,QAAQ,CAAC,KAOvB,eAAsB,CAAmB,CAAC,EAAc,EAAoC,CAC1F,OAAO,EAAY,EAAM,CAAS,EAOpC,eAAsB,CAAc,CAAC,EAAc,EAAqC,CACtF,IAAM,EAAQ,MAAM,EAAY,EAAM,CAAS,EAC/C,MAAO,GAAG,EAAiB,CAAK,WAO3B,SAAS,CAAkB,CAAC,EAAsB,CACvD,IAAM,EAAQ,EAAe,CAAI,EACjC,MAAO,GAAG,EAAiB,CAAK,WAOlC,eAAsB,CAAgB,CAAC,EAAiB,EAAuC,CAC7F,OAAO,QAAQ,IAAI,EAAM,IAAI,KAAQ,EAAY,EAAM,CAAS,CAAC,CAAC,EAM7D,SAAS,CAAmB,CAAC,EAA2B,CAC7D,OAAO,EAAM,IAAI,CAAc,EAMjC,eAAsB,CAAgB,CAAC,EAK7B,CACR,IAAM,EAAgB,EAAqB,CAAS,EAC9C,EAAY,MAAM,EAAgB,CAAS,EAEjD,MAAO,CACL,UAAW,GAAa,UACxB,gBACA,OAAQ,IAAc,KACtB,OAAQ,CACV,EAMK,SAAS,CAAkB,EAAa,CAC7C,OAAO,OAAO,KAAK,CAAkB,EAAE,OAAO,KAAK,IAAM,SAAS",
8
- "debugId": "D0C5BD1F7BE055F064756E2164756E21",
9
- "names": []
10
- }
@@ -1,4 +0,0 @@
1
- import{pa as b}from"./chunk-878q8xdr.js";import"./chunk-qa8b725g.js";import"./chunk-hshjnpm0.js";async function A(k,q={}){return b(k).fetchModels(q)}export{A as fetchModels};
2
- export{A as K};
3
-
4
- //# debugId=2F2EFAA3FC4BAD5A64756E2164756E21
@@ -1,7 +0,0 @@
1
- import{J as M}from"./chunk-bd11hvvz.js";import{qa as E}from"./chunk-878q8xdr.js";import{ua as Z}from"./chunk-qa8b725g.js";import{va as N,ya as R}from"./chunk-hshjnpm0.js";import V from"node:fs/promises";import J from"node:path";import j from"node:os";var v=E(),x=Z.object({defaultProvider:Z.enum(["anthropic","openai","google","openrouter","claude-code","zai"]).optional(),defaultModel:Z.string().optional(),providers:Z.record(Z.string(),Z.object({defaultModel:Z.string().optional()}).passthrough()).optional()}),z=J.join(j.homedir(),".sylphx-flow","settings.json"),S=".sylphx-flow/settings.json",_=".sylphx-flow/settings.local.json",G=".sylphx-flow/ai-config.json",Y=(q=process.cwd())=>({global:z,project:J.join(q,S),local:J.join(q,_),legacy:J.join(q,G)}),B=async(q)=>{try{let k=await V.readFile(q,"utf8"),D=JSON.parse(k);return x.parse(D)}catch(k){if(k.code==="ENOENT")return null;throw k}},$=(q,k)=>{let D=new Set([...Object.keys(q.providers||{}),...Object.keys(k.providers||{})]),K={};for(let H of D)K[H]={...q.providers?.[H],...k.providers?.[H]};return{defaultProvider:k.defaultProvider??q.defaultProvider,defaultModel:k.defaultModel??q.defaultModel,providers:K}},h=async(q=process.cwd())=>{let k=Y(q);try{return await V.access(k.global).catch(()=>{}),!0}catch{}try{return await V.access(k.project),!0}catch{}try{return await V.access(k.local),!0}catch{}try{return await V.access(k.legacy),!0}catch{}return!1},T=async(q=process.cwd())=>{return M(async()=>{let k=Y(q),[D,K,H,U]=await Promise.all([B(k.global),B(k.project),B(k.local),B(k.legacy)]);if(U&&!D){await y(q);let X=await B(k.global);if(X){let W={};if(W=$(W,X),K)W=$(W,K);if(H)W=$(W,H);return W}}let Q={};if(D)Q=$(Q,D);if(K)Q=$(Q,K);if(H)Q=$(Q,H);if(U)Q=$(Q,U);return Q},(k)=>Error(`Failed to load AI config: ${k.message}`))},L=async(q,k=process.cwd())=>{let K=Y(k).global;return M(async()=>{await V.mkdir(J.dirname(K),{recursive:!0});let H={...q};if(!H.defaultProvider&&H.providers){let{getProvider:Q}=await import("./chunk-878q8xdr.js"),X=[];for(let[W,O]of Object.entries(H.providers))try{if(Q(W).isConfigured(O))X.push(W)}catch{}if(X.length>0)H.defaultProvider=X[X.length-1]}let U=x.parse(H);await V.writeFile(K,JSON.stringify(U,null,2)+`
2
- `,"utf8")},(H)=>Error(`Failed to save AI config: ${H.message}`))},p=async(q,k,D=process.cwd())=>{let H=Y(D)[k];return M(async()=>{await V.mkdir(J.dirname(H),{recursive:!0});let U=x.parse(q);await V.writeFile(H,JSON.stringify(U,null,2)+`
3
- `,"utf8")},(U)=>Error(`Failed to save AI config to ${k}: ${U.message}`))},C=async(q,k=process.cwd())=>{let D=await T(k);if(D._tag==="Failure")return D;let K={...D.value,...q,providers:{...D.value.providers,...q.providers}};return L(K,k)},l=async(q=process.cwd())=>{let k=await T(q);if(k._tag==="Failure")return[];let D=[],K=k.value;if(!K.providers)return[];let{getProvider:H}=await import("./chunk-878q8xdr.js");for(let[U,Q]of Object.entries(K.providers))try{if(H(U).isConfigured(Q))D.push(U)}catch{}return D},y=async(q=process.cwd())=>{return M(async()=>{let k=Y(q),D=await B(k.legacy);if(!D)return;if(await B(k.global)){console.log("Legacy config found but global config already exists. Skipping migration."),console.log(`You can manually delete ${k.legacy} if migration is complete.`);return}await V.mkdir(J.dirname(k.global),{recursive:!0}),await V.writeFile(k.global,JSON.stringify(D,null,2)+`
4
- `,"utf8"),console.log(`✓ Migrated configuration from ${k.legacy} to ${k.global}`),console.log(` You can now safely delete the legacy file: ${k.legacy}`)},(k)=>Error(`Failed to migrate legacy config: ${k.message}`))};export{C as updateAIConfig,p as saveAIConfigTo,L as saveAIConfig,y as migrateLegacyConfig,T as loadAIConfig,l as getConfiguredProviders,Y as getAIConfigPaths,h as aiConfigExists,v as AI_PROVIDERS};
5
- export{v as C,T as D,L as E,l as F};
6
-
7
- //# debugId=CEA259537CFD4AAE64756E2164756E21