@workglow/ai-provider 0.0.121 → 0.0.123

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (556) hide show
  1. package/README.md +30 -67
  2. package/dist/common/HfModelSearch.d.ts +32 -0
  3. package/dist/common/HfModelSearch.d.ts.map +1 -0
  4. package/dist/common/PipelineTaskMapping.d.ts +12 -0
  5. package/dist/common/PipelineTaskMapping.d.ts.map +1 -0
  6. package/dist/{anthropic → provider-anthropic}/AnthropicProvider.d.ts +2 -14
  7. package/dist/provider-anthropic/AnthropicProvider.d.ts.map +1 -0
  8. package/dist/provider-anthropic/AnthropicQueuedProvider.d.ts +16 -0
  9. package/dist/provider-anthropic/AnthropicQueuedProvider.d.ts.map +1 -0
  10. package/dist/provider-anthropic/common/Anthropic_Client.d.ts +13 -0
  11. package/dist/provider-anthropic/common/Anthropic_Client.d.ts.map +1 -0
  12. package/dist/provider-anthropic/common/Anthropic_Constants.d.ts.map +1 -0
  13. package/dist/provider-anthropic/common/Anthropic_CountTokens.d.ts +10 -0
  14. package/dist/provider-anthropic/common/Anthropic_CountTokens.d.ts.map +1 -0
  15. package/dist/provider-anthropic/common/Anthropic_JobRunFns.d.ts +12 -0
  16. package/dist/provider-anthropic/common/Anthropic_JobRunFns.d.ts.map +1 -0
  17. package/dist/provider-anthropic/common/Anthropic_ModelInfo.d.ts +9 -0
  18. package/dist/provider-anthropic/common/Anthropic_ModelInfo.d.ts.map +1 -0
  19. package/dist/{anthropic → provider-anthropic}/common/Anthropic_ModelSchema.d.ts +31 -31
  20. package/dist/provider-anthropic/common/Anthropic_ModelSchema.d.ts.map +1 -0
  21. package/dist/provider-anthropic/common/Anthropic_ModelSearch.d.ts +8 -0
  22. package/dist/provider-anthropic/common/Anthropic_ModelSearch.d.ts.map +1 -0
  23. package/dist/provider-anthropic/common/Anthropic_StructuredGeneration.d.ts +10 -0
  24. package/dist/provider-anthropic/common/Anthropic_StructuredGeneration.d.ts.map +1 -0
  25. package/dist/provider-anthropic/common/Anthropic_TextGeneration.d.ts +10 -0
  26. package/dist/provider-anthropic/common/Anthropic_TextGeneration.d.ts.map +1 -0
  27. package/dist/provider-anthropic/common/Anthropic_TextRewriter.d.ts +10 -0
  28. package/dist/provider-anthropic/common/Anthropic_TextRewriter.d.ts.map +1 -0
  29. package/dist/provider-anthropic/common/Anthropic_TextSummary.d.ts +10 -0
  30. package/dist/provider-anthropic/common/Anthropic_TextSummary.d.ts.map +1 -0
  31. package/dist/provider-anthropic/common/Anthropic_ToolCalling.d.ts +10 -0
  32. package/dist/provider-anthropic/common/Anthropic_ToolCalling.d.ts.map +1 -0
  33. package/dist/{anthropic → provider-anthropic}/index.d.ts +1 -3
  34. package/dist/provider-anthropic/index.d.ts.map +1 -0
  35. package/dist/{index-60ev6k93.js → provider-anthropic/index.js} +43 -11
  36. package/dist/provider-anthropic/index.js.map +13 -0
  37. package/dist/provider-anthropic/registerAnthropic.d.ts +10 -0
  38. package/dist/provider-anthropic/registerAnthropic.d.ts.map +1 -0
  39. package/dist/provider-anthropic/registerAnthropicInline.d.ts +8 -0
  40. package/dist/provider-anthropic/registerAnthropicInline.d.ts.map +1 -0
  41. package/dist/provider-anthropic/registerAnthropicWorker.d.ts +7 -0
  42. package/dist/provider-anthropic/registerAnthropicWorker.d.ts.map +1 -0
  43. package/dist/provider-anthropic/runtime.d.ts +16 -0
  44. package/dist/provider-anthropic/runtime.d.ts.map +1 -0
  45. package/dist/{anthropic/index.js → provider-anthropic/runtime.js} +291 -177
  46. package/dist/provider-anthropic/runtime.js.map +24 -0
  47. package/dist/{web-browser → provider-chrome}/WebBrowserProvider.d.ts +2 -15
  48. package/dist/provider-chrome/WebBrowserProvider.d.ts.map +1 -0
  49. package/dist/provider-chrome/WebBrowserQueuedProvider.d.ts +16 -0
  50. package/dist/provider-chrome/WebBrowserQueuedProvider.d.ts.map +1 -0
  51. package/dist/provider-chrome/common/WebBrowser_ChromeHelpers.d.ts +31 -0
  52. package/dist/provider-chrome/common/WebBrowser_ChromeHelpers.d.ts.map +1 -0
  53. package/dist/provider-chrome/common/WebBrowser_Constants.d.ts.map +1 -0
  54. package/dist/provider-chrome/common/WebBrowser_JobRunFns.d.ts +10 -0
  55. package/dist/provider-chrome/common/WebBrowser_JobRunFns.d.ts.map +1 -0
  56. package/dist/provider-chrome/common/WebBrowser_ModelInfo.d.ts +9 -0
  57. package/dist/provider-chrome/common/WebBrowser_ModelInfo.d.ts.map +1 -0
  58. package/dist/{web-browser → provider-chrome}/common/WebBrowser_ModelSchema.d.ts +31 -31
  59. package/dist/provider-chrome/common/WebBrowser_ModelSchema.d.ts.map +1 -0
  60. package/dist/provider-chrome/common/WebBrowser_ModelSearch.d.ts +8 -0
  61. package/dist/provider-chrome/common/WebBrowser_ModelSearch.d.ts.map +1 -0
  62. package/dist/provider-chrome/common/WebBrowser_TextGeneration.d.ts +10 -0
  63. package/dist/provider-chrome/common/WebBrowser_TextGeneration.d.ts.map +1 -0
  64. package/dist/provider-chrome/common/WebBrowser_TextLanguageDetection.d.ts +9 -0
  65. package/dist/provider-chrome/common/WebBrowser_TextLanguageDetection.d.ts.map +1 -0
  66. package/dist/provider-chrome/common/WebBrowser_TextRewriter.d.ts +10 -0
  67. package/dist/provider-chrome/common/WebBrowser_TextRewriter.d.ts.map +1 -0
  68. package/dist/provider-chrome/common/WebBrowser_TextSummary.d.ts +10 -0
  69. package/dist/provider-chrome/common/WebBrowser_TextSummary.d.ts.map +1 -0
  70. package/dist/provider-chrome/common/WebBrowser_TextTranslation.d.ts +10 -0
  71. package/dist/provider-chrome/common/WebBrowser_TextTranslation.d.ts.map +1 -0
  72. package/dist/{web-browser → provider-chrome}/index.d.ts +1 -3
  73. package/dist/provider-chrome/index.d.ts.map +1 -0
  74. package/dist/provider-chrome/index.js +132 -0
  75. package/dist/provider-chrome/index.js.map +13 -0
  76. package/dist/provider-chrome/registerWebBrowser.d.ts +10 -0
  77. package/dist/provider-chrome/registerWebBrowser.d.ts.map +1 -0
  78. package/dist/provider-chrome/registerWebBrowserInline.d.ts +8 -0
  79. package/dist/provider-chrome/registerWebBrowserInline.d.ts.map +1 -0
  80. package/dist/provider-chrome/registerWebBrowserWorker.d.ts +7 -0
  81. package/dist/provider-chrome/registerWebBrowserWorker.d.ts.map +1 -0
  82. package/dist/provider-chrome/runtime.d.ts +14 -0
  83. package/dist/provider-chrome/runtime.d.ts.map +1 -0
  84. package/dist/{web-browser/index.js → provider-chrome/runtime.js} +260 -235
  85. package/dist/provider-chrome/runtime.js.map +23 -0
  86. package/dist/{google-gemini → provider-gemini}/GoogleGeminiProvider.d.ts +2 -15
  87. package/dist/provider-gemini/GoogleGeminiProvider.d.ts.map +1 -0
  88. package/dist/provider-gemini/GoogleGeminiQueuedProvider.d.ts +16 -0
  89. package/dist/provider-gemini/GoogleGeminiQueuedProvider.d.ts.map +1 -0
  90. package/dist/provider-gemini/common/Gemini_Client.d.ts +10 -0
  91. package/dist/provider-gemini/common/Gemini_Client.d.ts.map +1 -0
  92. package/dist/provider-gemini/common/Gemini_Constants.d.ts.map +1 -0
  93. package/dist/provider-gemini/common/Gemini_CountTokens.d.ts +10 -0
  94. package/dist/provider-gemini/common/Gemini_CountTokens.d.ts.map +1 -0
  95. package/dist/provider-gemini/common/Gemini_JobRunFns.d.ts +13 -0
  96. package/dist/provider-gemini/common/Gemini_JobRunFns.d.ts.map +1 -0
  97. package/dist/provider-gemini/common/Gemini_ModelInfo.d.ts +9 -0
  98. package/dist/provider-gemini/common/Gemini_ModelInfo.d.ts.map +1 -0
  99. package/dist/{google-gemini → provider-gemini}/common/Gemini_ModelSchema.d.ts +31 -31
  100. package/dist/provider-gemini/common/Gemini_ModelSchema.d.ts.map +1 -0
  101. package/dist/provider-gemini/common/Gemini_ModelSearch.d.ts +8 -0
  102. package/dist/provider-gemini/common/Gemini_ModelSearch.d.ts.map +1 -0
  103. package/dist/provider-gemini/common/Gemini_Schema.d.ts +11 -0
  104. package/dist/provider-gemini/common/Gemini_Schema.d.ts.map +1 -0
  105. package/dist/provider-gemini/common/Gemini_StructuredGeneration.d.ts +10 -0
  106. package/dist/provider-gemini/common/Gemini_StructuredGeneration.d.ts.map +1 -0
  107. package/dist/provider-gemini/common/Gemini_TextEmbedding.d.ts +9 -0
  108. package/dist/provider-gemini/common/Gemini_TextEmbedding.d.ts.map +1 -0
  109. package/dist/provider-gemini/common/Gemini_TextGeneration.d.ts +10 -0
  110. package/dist/provider-gemini/common/Gemini_TextGeneration.d.ts.map +1 -0
  111. package/dist/provider-gemini/common/Gemini_TextRewriter.d.ts +10 -0
  112. package/dist/provider-gemini/common/Gemini_TextRewriter.d.ts.map +1 -0
  113. package/dist/provider-gemini/common/Gemini_TextSummary.d.ts +10 -0
  114. package/dist/provider-gemini/common/Gemini_TextSummary.d.ts.map +1 -0
  115. package/dist/provider-gemini/common/Gemini_ToolCalling.d.ts +10 -0
  116. package/dist/provider-gemini/common/Gemini_ToolCalling.d.ts.map +1 -0
  117. package/dist/{google-gemini → provider-gemini}/index.d.ts +1 -3
  118. package/dist/provider-gemini/index.d.ts.map +1 -0
  119. package/dist/{index-8651nz8y.js → provider-gemini/index.js} +43 -11
  120. package/dist/provider-gemini/index.js.map +13 -0
  121. package/dist/provider-gemini/registerGemini.d.ts +10 -0
  122. package/dist/provider-gemini/registerGemini.d.ts.map +1 -0
  123. package/dist/provider-gemini/registerGeminiInline.d.ts +8 -0
  124. package/dist/provider-gemini/registerGeminiInline.d.ts.map +1 -0
  125. package/dist/{anthropic/Anthropic_Worker.d.ts → provider-gemini/registerGeminiWorker.d.ts} +2 -2
  126. package/dist/provider-gemini/registerGeminiWorker.d.ts.map +1 -0
  127. package/dist/provider-gemini/runtime.d.ts +16 -0
  128. package/dist/provider-gemini/runtime.d.ts.map +1 -0
  129. package/dist/{google-gemini/index.js → provider-gemini/runtime.js} +281 -173
  130. package/dist/provider-gemini/runtime.js.map +26 -0
  131. package/dist/provider-hf-inference/HfInferenceProvider.d.ts +2 -19
  132. package/dist/provider-hf-inference/HfInferenceProvider.d.ts.map +1 -1
  133. package/dist/provider-hf-inference/HfInferenceQueuedProvider.d.ts +16 -0
  134. package/dist/provider-hf-inference/HfInferenceQueuedProvider.d.ts.map +1 -0
  135. package/dist/provider-hf-inference/common/HFI_Client.d.ts +12 -0
  136. package/dist/provider-hf-inference/common/HFI_Client.d.ts.map +1 -0
  137. package/dist/provider-hf-inference/common/HFI_JobRunFns.d.ts +2 -11
  138. package/dist/provider-hf-inference/common/HFI_JobRunFns.d.ts.map +1 -1
  139. package/dist/provider-hf-inference/common/HFI_ModelInfo.d.ts +9 -0
  140. package/dist/provider-hf-inference/common/HFI_ModelInfo.d.ts.map +1 -0
  141. package/dist/provider-hf-inference/common/HFI_ModelSchema.d.ts +31 -31
  142. package/dist/provider-hf-inference/common/HFI_ModelSchema.d.ts.map +1 -1
  143. package/dist/provider-hf-inference/common/HFI_ModelSearch.d.ts +8 -0
  144. package/dist/provider-hf-inference/common/HFI_ModelSearch.d.ts.map +1 -0
  145. package/dist/provider-hf-inference/common/HFI_TextEmbedding.d.ts +9 -0
  146. package/dist/provider-hf-inference/common/HFI_TextEmbedding.d.ts.map +1 -0
  147. package/dist/provider-hf-inference/common/HFI_TextGeneration.d.ts +10 -0
  148. package/dist/provider-hf-inference/common/HFI_TextGeneration.d.ts.map +1 -0
  149. package/dist/provider-hf-inference/common/HFI_TextRewriter.d.ts +10 -0
  150. package/dist/provider-hf-inference/common/HFI_TextRewriter.d.ts.map +1 -0
  151. package/dist/provider-hf-inference/common/HFI_TextSummary.d.ts +10 -0
  152. package/dist/provider-hf-inference/common/HFI_TextSummary.d.ts.map +1 -0
  153. package/dist/provider-hf-inference/common/HFI_ToolCalling.d.ts +10 -0
  154. package/dist/provider-hf-inference/common/HFI_ToolCalling.d.ts.map +1 -0
  155. package/dist/provider-hf-inference/index.d.ts +1 -3
  156. package/dist/provider-hf-inference/index.d.ts.map +1 -1
  157. package/dist/provider-hf-inference/index.js +98 -411
  158. package/dist/provider-hf-inference/index.js.map +7 -5
  159. package/dist/provider-hf-inference/registerHfInference.d.ts +10 -0
  160. package/dist/provider-hf-inference/registerHfInference.d.ts.map +1 -0
  161. package/dist/provider-hf-inference/registerHfInferenceInline.d.ts +8 -0
  162. package/dist/provider-hf-inference/registerHfInferenceInline.d.ts.map +1 -0
  163. package/dist/provider-hf-inference/registerHfInferenceWorker.d.ts +7 -0
  164. package/dist/provider-hf-inference/registerHfInferenceWorker.d.ts.map +1 -0
  165. package/dist/provider-hf-inference/runtime.d.ts +16 -0
  166. package/dist/provider-hf-inference/runtime.d.ts.map +1 -0
  167. package/dist/provider-hf-inference/runtime.js +592 -0
  168. package/dist/provider-hf-inference/runtime.js.map +25 -0
  169. package/dist/{hf-transformers → provider-hf-transformers}/HuggingFaceTransformersProvider.d.ts +2 -21
  170. package/dist/provider-hf-transformers/HuggingFaceTransformersProvider.d.ts.map +1 -0
  171. package/dist/provider-hf-transformers/HuggingFaceTransformersQueuedProvider.d.ts +16 -0
  172. package/dist/provider-hf-transformers/HuggingFaceTransformersQueuedProvider.d.ts.map +1 -0
  173. package/dist/provider-hf-transformers/common/HFT_BackgroundRemoval.d.ts +12 -0
  174. package/dist/provider-hf-transformers/common/HFT_BackgroundRemoval.d.ts.map +1 -0
  175. package/dist/{hf-transformers → provider-hf-transformers}/common/HFT_Constants.d.ts +25 -23
  176. package/dist/provider-hf-transformers/common/HFT_Constants.d.ts.map +1 -0
  177. package/dist/provider-hf-transformers/common/HFT_CountTokens.d.ts +10 -0
  178. package/dist/provider-hf-transformers/common/HFT_CountTokens.d.ts.map +1 -0
  179. package/dist/provider-hf-transformers/common/HFT_Download.d.ts +13 -0
  180. package/dist/provider-hf-transformers/common/HFT_Download.d.ts.map +1 -0
  181. package/dist/provider-hf-transformers/common/HFT_ImageClassification.d.ts +13 -0
  182. package/dist/provider-hf-transformers/common/HFT_ImageClassification.d.ts.map +1 -0
  183. package/dist/provider-hf-transformers/common/HFT_ImageEmbedding.d.ts +12 -0
  184. package/dist/provider-hf-transformers/common/HFT_ImageEmbedding.d.ts.map +1 -0
  185. package/dist/provider-hf-transformers/common/HFT_ImageHelpers.d.ts +11 -0
  186. package/dist/provider-hf-transformers/common/HFT_ImageHelpers.d.ts.map +1 -0
  187. package/dist/provider-hf-transformers/common/HFT_ImageSegmentation.d.ts +12 -0
  188. package/dist/provider-hf-transformers/common/HFT_ImageSegmentation.d.ts.map +1 -0
  189. package/dist/provider-hf-transformers/common/HFT_ImageToText.d.ts +12 -0
  190. package/dist/provider-hf-transformers/common/HFT_ImageToText.d.ts.map +1 -0
  191. package/dist/provider-hf-transformers/common/HFT_InlineLifecycle.d.ts +7 -0
  192. package/dist/provider-hf-transformers/common/HFT_InlineLifecycle.d.ts.map +1 -0
  193. package/dist/{hf-transformers → provider-hf-transformers}/common/HFT_JobRunFns.d.ts +673 -789
  194. package/dist/provider-hf-transformers/common/HFT_JobRunFns.d.ts.map +1 -0
  195. package/dist/provider-hf-transformers/common/HFT_ModelInfo.d.ts +9 -0
  196. package/dist/provider-hf-transformers/common/HFT_ModelInfo.d.ts.map +1 -0
  197. package/dist/{hf-transformers → provider-hf-transformers}/common/HFT_ModelSchema.d.ts +37 -37
  198. package/dist/provider-hf-transformers/common/HFT_ModelSchema.d.ts.map +1 -0
  199. package/dist/provider-hf-transformers/common/HFT_ModelSearch.d.ts +8 -0
  200. package/dist/provider-hf-transformers/common/HFT_ModelSearch.d.ts.map +1 -0
  201. package/dist/provider-hf-transformers/common/HFT_ObjectDetection.d.ts +13 -0
  202. package/dist/provider-hf-transformers/common/HFT_ObjectDetection.d.ts.map +1 -0
  203. package/dist/provider-hf-transformers/common/HFT_OnnxDtypes.d.ts +23 -0
  204. package/dist/provider-hf-transformers/common/HFT_OnnxDtypes.d.ts.map +1 -0
  205. package/dist/provider-hf-transformers/common/HFT_Pipeline.d.ts +32 -0
  206. package/dist/provider-hf-transformers/common/HFT_Pipeline.d.ts.map +1 -0
  207. package/dist/provider-hf-transformers/common/HFT_Streaming.d.ts +25 -0
  208. package/dist/provider-hf-transformers/common/HFT_Streaming.d.ts.map +1 -0
  209. package/dist/provider-hf-transformers/common/HFT_StructuredGeneration.d.ts +10 -0
  210. package/dist/provider-hf-transformers/common/HFT_StructuredGeneration.d.ts.map +1 -0
  211. package/dist/provider-hf-transformers/common/HFT_TextClassification.d.ts +9 -0
  212. package/dist/provider-hf-transformers/common/HFT_TextClassification.d.ts.map +1 -0
  213. package/dist/provider-hf-transformers/common/HFT_TextEmbedding.d.ts +13 -0
  214. package/dist/provider-hf-transformers/common/HFT_TextEmbedding.d.ts.map +1 -0
  215. package/dist/provider-hf-transformers/common/HFT_TextFillMask.d.ts +9 -0
  216. package/dist/provider-hf-transformers/common/HFT_TextFillMask.d.ts.map +1 -0
  217. package/dist/provider-hf-transformers/common/HFT_TextGeneration.d.ts +14 -0
  218. package/dist/provider-hf-transformers/common/HFT_TextGeneration.d.ts.map +1 -0
  219. package/dist/provider-hf-transformers/common/HFT_TextLanguageDetection.d.ts +9 -0
  220. package/dist/provider-hf-transformers/common/HFT_TextLanguageDetection.d.ts.map +1 -0
  221. package/dist/provider-hf-transformers/common/HFT_TextNamedEntityRecognition.d.ts +9 -0
  222. package/dist/provider-hf-transformers/common/HFT_TextNamedEntityRecognition.d.ts.map +1 -0
  223. package/dist/provider-hf-transformers/common/HFT_TextOutput.d.ts +8 -0
  224. package/dist/provider-hf-transformers/common/HFT_TextOutput.d.ts.map +1 -0
  225. package/dist/provider-hf-transformers/common/HFT_TextQuestionAnswer.d.ts +14 -0
  226. package/dist/provider-hf-transformers/common/HFT_TextQuestionAnswer.d.ts.map +1 -0
  227. package/dist/provider-hf-transformers/common/HFT_TextRewriter.d.ts +14 -0
  228. package/dist/provider-hf-transformers/common/HFT_TextRewriter.d.ts.map +1 -0
  229. package/dist/provider-hf-transformers/common/HFT_TextSummary.d.ts +14 -0
  230. package/dist/provider-hf-transformers/common/HFT_TextSummary.d.ts.map +1 -0
  231. package/dist/provider-hf-transformers/common/HFT_TextTranslation.d.ts +14 -0
  232. package/dist/provider-hf-transformers/common/HFT_TextTranslation.d.ts.map +1 -0
  233. package/dist/provider-hf-transformers/common/HFT_ToolCalling.d.ts +10 -0
  234. package/dist/provider-hf-transformers/common/HFT_ToolCalling.d.ts.map +1 -0
  235. package/dist/provider-hf-transformers/common/HFT_ToolMarkup.d.ts +40 -0
  236. package/dist/provider-hf-transformers/common/HFT_ToolMarkup.d.ts.map +1 -0
  237. package/dist/provider-hf-transformers/common/HFT_Unload.d.ts +13 -0
  238. package/dist/provider-hf-transformers/common/HFT_Unload.d.ts.map +1 -0
  239. package/dist/{hf-transformers → provider-hf-transformers}/index.d.ts +4 -2
  240. package/dist/provider-hf-transformers/index.d.ts.map +1 -0
  241. package/dist/provider-hf-transformers/index.js +513 -0
  242. package/dist/provider-hf-transformers/index.js.map +16 -0
  243. package/dist/provider-hf-transformers/registerHuggingFaceTransformers.d.ts +14 -0
  244. package/dist/provider-hf-transformers/registerHuggingFaceTransformers.d.ts.map +1 -0
  245. package/dist/provider-hf-transformers/registerHuggingFaceTransformersInline.d.ts +15 -0
  246. package/dist/provider-hf-transformers/registerHuggingFaceTransformersInline.d.ts.map +1 -0
  247. package/dist/provider-hf-transformers/registerHuggingFaceTransformersWorker.d.ts +7 -0
  248. package/dist/provider-hf-transformers/registerHuggingFaceTransformersWorker.d.ts.map +1 -0
  249. package/dist/provider-hf-transformers/runtime.d.ts +21 -0
  250. package/dist/provider-hf-transformers/runtime.d.ts.map +1 -0
  251. package/dist/{index-j4g81r4k.js → provider-hf-transformers/runtime.js} +1564 -928
  252. package/dist/provider-hf-transformers/runtime.js.map +49 -0
  253. package/dist/provider-llamacpp/LlamaCppProvider.d.ts +2 -15
  254. package/dist/provider-llamacpp/LlamaCppProvider.d.ts.map +1 -1
  255. package/dist/provider-llamacpp/LlamaCppQueuedProvider.d.ts +16 -0
  256. package/dist/provider-llamacpp/LlamaCppQueuedProvider.d.ts.map +1 -0
  257. package/dist/provider-llamacpp/common/LlamaCpp_CountTokens.d.ts +10 -0
  258. package/dist/provider-llamacpp/common/LlamaCpp_CountTokens.d.ts.map +1 -0
  259. package/dist/provider-llamacpp/common/LlamaCpp_Download.d.ts +9 -0
  260. package/dist/provider-llamacpp/common/LlamaCpp_Download.d.ts.map +1 -0
  261. package/dist/provider-llamacpp/common/LlamaCpp_JobRunFns.d.ts +2 -18
  262. package/dist/provider-llamacpp/common/LlamaCpp_JobRunFns.d.ts.map +1 -1
  263. package/dist/provider-llamacpp/common/LlamaCpp_ModelInfo.d.ts +9 -0
  264. package/dist/provider-llamacpp/common/LlamaCpp_ModelInfo.d.ts.map +1 -0
  265. package/dist/provider-llamacpp/common/LlamaCpp_ModelSchema.d.ts +31 -31
  266. package/dist/provider-llamacpp/common/LlamaCpp_ModelSchema.d.ts.map +1 -1
  267. package/dist/provider-llamacpp/common/LlamaCpp_ModelSearch.d.ts +8 -0
  268. package/dist/provider-llamacpp/common/LlamaCpp_ModelSearch.d.ts.map +1 -0
  269. package/dist/provider-llamacpp/common/LlamaCpp_Runtime.d.ts +28 -0
  270. package/dist/provider-llamacpp/common/LlamaCpp_Runtime.d.ts.map +1 -0
  271. package/dist/provider-llamacpp/common/LlamaCpp_StructuredGeneration.d.ts +10 -0
  272. package/dist/provider-llamacpp/common/LlamaCpp_StructuredGeneration.d.ts.map +1 -0
  273. package/dist/provider-llamacpp/common/LlamaCpp_TextEmbedding.d.ts +9 -0
  274. package/dist/provider-llamacpp/common/LlamaCpp_TextEmbedding.d.ts.map +1 -0
  275. package/dist/provider-llamacpp/common/LlamaCpp_TextGeneration.d.ts +10 -0
  276. package/dist/provider-llamacpp/common/LlamaCpp_TextGeneration.d.ts.map +1 -0
  277. package/dist/provider-llamacpp/common/LlamaCpp_TextRewriter.d.ts +10 -0
  278. package/dist/provider-llamacpp/common/LlamaCpp_TextRewriter.d.ts.map +1 -0
  279. package/dist/provider-llamacpp/common/LlamaCpp_TextSummary.d.ts +10 -0
  280. package/dist/provider-llamacpp/common/LlamaCpp_TextSummary.d.ts.map +1 -0
  281. package/dist/provider-llamacpp/common/LlamaCpp_ToolCalling.d.ts +10 -0
  282. package/dist/provider-llamacpp/common/LlamaCpp_ToolCalling.d.ts.map +1 -0
  283. package/dist/provider-llamacpp/common/LlamaCpp_Unload.d.ts +9 -0
  284. package/dist/provider-llamacpp/common/LlamaCpp_Unload.d.ts.map +1 -0
  285. package/dist/provider-llamacpp/index.d.ts +1 -3
  286. package/dist/provider-llamacpp/index.d.ts.map +1 -1
  287. package/dist/provider-llamacpp/index.js +121 -725
  288. package/dist/provider-llamacpp/index.js.map +7 -5
  289. package/dist/provider-llamacpp/registerLlamaCpp.d.ts +10 -0
  290. package/dist/provider-llamacpp/registerLlamaCpp.d.ts.map +1 -0
  291. package/dist/provider-llamacpp/registerLlamaCppInline.d.ts +8 -0
  292. package/dist/provider-llamacpp/registerLlamaCppInline.d.ts.map +1 -0
  293. package/dist/provider-llamacpp/registerLlamaCppWorker.d.ts +7 -0
  294. package/dist/provider-llamacpp/registerLlamaCppWorker.d.ts.map +1 -0
  295. package/dist/provider-llamacpp/runtime.d.ts +16 -0
  296. package/dist/provider-llamacpp/runtime.d.ts.map +1 -0
  297. package/dist/provider-llamacpp/runtime.js +929 -0
  298. package/dist/provider-llamacpp/runtime.js.map +29 -0
  299. package/dist/provider-ollama/OllamaProvider.d.ts +2 -15
  300. package/dist/provider-ollama/OllamaProvider.d.ts.map +1 -1
  301. package/dist/provider-ollama/OllamaQueuedProvider.d.ts +16 -0
  302. package/dist/provider-ollama/OllamaQueuedProvider.d.ts.map +1 -0
  303. package/dist/provider-ollama/common/Ollama_Client.browser.d.ts +13 -0
  304. package/dist/provider-ollama/common/Ollama_Client.browser.d.ts.map +1 -0
  305. package/dist/provider-ollama/common/Ollama_Client.d.ts +13 -0
  306. package/dist/provider-ollama/common/Ollama_Client.d.ts.map +1 -0
  307. package/dist/provider-ollama/common/Ollama_JobRunFns.browser.d.ts +362 -11
  308. package/dist/provider-ollama/common/Ollama_JobRunFns.browser.d.ts.map +1 -1
  309. package/dist/provider-ollama/common/Ollama_JobRunFns.d.ts +361 -11
  310. package/dist/provider-ollama/common/Ollama_JobRunFns.d.ts.map +1 -1
  311. package/dist/provider-ollama/common/Ollama_ModelInfo.d.ts +11 -0
  312. package/dist/provider-ollama/common/Ollama_ModelInfo.d.ts.map +1 -0
  313. package/dist/provider-ollama/common/Ollama_ModelSchema.d.ts +30 -30
  314. package/dist/provider-ollama/common/Ollama_ModelSchema.d.ts.map +1 -1
  315. package/dist/provider-ollama/common/Ollama_ModelSearch.d.ts +11 -0
  316. package/dist/provider-ollama/common/Ollama_ModelSearch.d.ts.map +1 -0
  317. package/dist/provider-ollama/common/Ollama_ModelUtil.d.ts +8 -0
  318. package/dist/provider-ollama/common/Ollama_ModelUtil.d.ts.map +1 -0
  319. package/dist/provider-ollama/common/Ollama_TextEmbedding.d.ts +11 -0
  320. package/dist/provider-ollama/common/Ollama_TextEmbedding.d.ts.map +1 -0
  321. package/dist/provider-ollama/common/Ollama_TextGeneration.d.ts +12 -0
  322. package/dist/provider-ollama/common/Ollama_TextGeneration.d.ts.map +1 -0
  323. package/dist/provider-ollama/common/Ollama_TextRewriter.d.ts +12 -0
  324. package/dist/provider-ollama/common/Ollama_TextRewriter.d.ts.map +1 -0
  325. package/dist/provider-ollama/common/Ollama_TextSummary.d.ts +12 -0
  326. package/dist/provider-ollama/common/Ollama_TextSummary.d.ts.map +1 -0
  327. package/dist/provider-ollama/common/Ollama_ToolCalling.d.ts +16 -0
  328. package/dist/provider-ollama/common/Ollama_ToolCalling.d.ts.map +1 -0
  329. package/dist/provider-ollama/index.browser.d.ts +1 -3
  330. package/dist/provider-ollama/index.browser.d.ts.map +1 -1
  331. package/dist/provider-ollama/index.browser.js +18 -396
  332. package/dist/provider-ollama/index.browser.js.map +6 -7
  333. package/dist/provider-ollama/index.d.ts +1 -3
  334. package/dist/provider-ollama/index.d.ts.map +1 -1
  335. package/dist/provider-ollama/index.js +93 -382
  336. package/dist/provider-ollama/index.js.map +7 -5
  337. package/dist/provider-ollama/registerOllama.d.ts +10 -0
  338. package/dist/provider-ollama/registerOllama.d.ts.map +1 -0
  339. package/dist/provider-ollama/registerOllamaInline.browser.d.ts +8 -0
  340. package/dist/provider-ollama/registerOllamaInline.browser.d.ts.map +1 -0
  341. package/dist/provider-ollama/registerOllamaInline.d.ts +8 -0
  342. package/dist/provider-ollama/registerOllamaInline.d.ts.map +1 -0
  343. package/dist/provider-ollama/registerOllamaWorker.browser.d.ts +7 -0
  344. package/dist/provider-ollama/registerOllamaWorker.browser.d.ts.map +1 -0
  345. package/dist/{google-gemini/Gemini_Worker.d.ts → provider-ollama/registerOllamaWorker.d.ts} +2 -2
  346. package/dist/provider-ollama/registerOllamaWorker.d.ts.map +1 -0
  347. package/dist/provider-ollama/runtime.browser.d.ts +16 -0
  348. package/dist/provider-ollama/runtime.browser.d.ts.map +1 -0
  349. package/dist/provider-ollama/runtime.browser.js +528 -0
  350. package/dist/provider-ollama/runtime.browser.js.map +24 -0
  351. package/dist/provider-ollama/runtime.d.ts +16 -0
  352. package/dist/provider-ollama/runtime.d.ts.map +1 -0
  353. package/dist/provider-ollama/runtime.js +538 -0
  354. package/dist/provider-ollama/runtime.js.map +24 -0
  355. package/dist/provider-openai/OpenAiProvider.d.ts +2 -19
  356. package/dist/provider-openai/OpenAiProvider.d.ts.map +1 -1
  357. package/dist/provider-openai/OpenAiQueuedProvider.d.ts +16 -0
  358. package/dist/provider-openai/OpenAiQueuedProvider.d.ts.map +1 -0
  359. package/dist/provider-openai/common/OpenAI_Client.d.ts +10 -0
  360. package/dist/provider-openai/common/OpenAI_Client.d.ts.map +1 -0
  361. package/dist/provider-openai/common/OpenAI_CountTokens.browser.d.ts +10 -0
  362. package/dist/provider-openai/common/OpenAI_CountTokens.browser.d.ts.map +1 -0
  363. package/dist/provider-openai/common/OpenAI_CountTokens.d.ts +10 -0
  364. package/dist/provider-openai/common/OpenAI_CountTokens.d.ts.map +1 -0
  365. package/dist/provider-openai/common/OpenAI_JobRunFns.browser.d.ts +12 -0
  366. package/dist/provider-openai/common/OpenAI_JobRunFns.browser.d.ts.map +1 -0
  367. package/dist/provider-openai/common/OpenAI_JobRunFns.d.ts +2 -15
  368. package/dist/provider-openai/common/OpenAI_JobRunFns.d.ts.map +1 -1
  369. package/dist/provider-openai/common/OpenAI_ModelInfo.d.ts +9 -0
  370. package/dist/provider-openai/common/OpenAI_ModelInfo.d.ts.map +1 -0
  371. package/dist/provider-openai/common/OpenAI_ModelSchema.d.ts +31 -31
  372. package/dist/provider-openai/common/OpenAI_ModelSchema.d.ts.map +1 -1
  373. package/dist/provider-openai/common/OpenAI_ModelSearch.d.ts +8 -0
  374. package/dist/provider-openai/common/OpenAI_ModelSearch.d.ts.map +1 -0
  375. package/dist/provider-openai/common/OpenAI_StructuredGeneration.d.ts +10 -0
  376. package/dist/provider-openai/common/OpenAI_StructuredGeneration.d.ts.map +1 -0
  377. package/dist/provider-openai/common/OpenAI_TextEmbedding.d.ts +9 -0
  378. package/dist/provider-openai/common/OpenAI_TextEmbedding.d.ts.map +1 -0
  379. package/dist/provider-openai/common/OpenAI_TextGeneration.d.ts +10 -0
  380. package/dist/provider-openai/common/OpenAI_TextGeneration.d.ts.map +1 -0
  381. package/dist/provider-openai/common/OpenAI_TextRewriter.d.ts +10 -0
  382. package/dist/provider-openai/common/OpenAI_TextRewriter.d.ts.map +1 -0
  383. package/dist/provider-openai/common/OpenAI_TextSummary.d.ts +10 -0
  384. package/dist/provider-openai/common/OpenAI_TextSummary.d.ts.map +1 -0
  385. package/dist/provider-openai/common/OpenAI_ToolCalling.d.ts +10 -0
  386. package/dist/provider-openai/common/OpenAI_ToolCalling.d.ts.map +1 -0
  387. package/dist/provider-openai/index.browser.d.ts +9 -0
  388. package/dist/provider-openai/index.browser.d.ts.map +1 -0
  389. package/dist/{index-q2t627d5.js → provider-openai/index.browser.js} +26 -9
  390. package/dist/provider-openai/index.browser.js.map +13 -0
  391. package/dist/provider-openai/index.d.ts +1 -3
  392. package/dist/provider-openai/index.d.ts.map +1 -1
  393. package/dist/provider-openai/index.js +108 -519
  394. package/dist/provider-openai/index.js.map +7 -5
  395. package/dist/provider-openai/registerOpenAi.d.ts +10 -0
  396. package/dist/provider-openai/registerOpenAi.d.ts.map +1 -0
  397. package/dist/provider-openai/registerOpenAiInline.browser.d.ts +8 -0
  398. package/dist/provider-openai/registerOpenAiInline.browser.d.ts.map +1 -0
  399. package/dist/provider-openai/registerOpenAiInline.d.ts +8 -0
  400. package/dist/provider-openai/registerOpenAiInline.d.ts.map +1 -0
  401. package/dist/provider-openai/registerOpenAiWorker.browser.d.ts +7 -0
  402. package/dist/provider-openai/registerOpenAiWorker.browser.d.ts.map +1 -0
  403. package/dist/{ggml/model/GgmlLocalModel.d.ts → provider-openai/registerOpenAiWorker.d.ts} +2 -2
  404. package/dist/provider-openai/registerOpenAiWorker.d.ts.map +1 -0
  405. package/dist/provider-openai/runtime.browser.d.ts +15 -0
  406. package/dist/provider-openai/runtime.browser.d.ts.map +1 -0
  407. package/dist/provider-openai/runtime.browser.js +647 -0
  408. package/dist/provider-openai/runtime.browser.js.map +25 -0
  409. package/dist/provider-openai/runtime.d.ts +16 -0
  410. package/dist/provider-openai/runtime.d.ts.map +1 -0
  411. package/dist/provider-openai/runtime.js +662 -0
  412. package/dist/provider-openai/runtime.js.map +25 -0
  413. package/dist/provider-tf-mediapipe/TensorFlowMediaPipeProvider.d.ts +24 -0
  414. package/dist/provider-tf-mediapipe/TensorFlowMediaPipeProvider.d.ts.map +1 -0
  415. package/dist/provider-tf-mediapipe/TensorFlowMediaPipeQueuedProvider.d.ts +16 -0
  416. package/dist/provider-tf-mediapipe/TensorFlowMediaPipeQueuedProvider.d.ts.map +1 -0
  417. package/dist/provider-tf-mediapipe/common/TFMP_Client.d.ts +8 -0
  418. package/dist/provider-tf-mediapipe/common/TFMP_Client.d.ts.map +1 -0
  419. package/dist/{tf-mediapipe → provider-tf-mediapipe}/common/TFMP_Constants.d.ts +1 -0
  420. package/dist/provider-tf-mediapipe/common/TFMP_Constants.d.ts.map +1 -0
  421. package/dist/provider-tf-mediapipe/common/TFMP_Download.d.ts +9 -0
  422. package/dist/provider-tf-mediapipe/common/TFMP_Download.d.ts.map +1 -0
  423. package/dist/provider-tf-mediapipe/common/TFMP_FaceDetector.d.ts +9 -0
  424. package/dist/provider-tf-mediapipe/common/TFMP_FaceDetector.d.ts.map +1 -0
  425. package/dist/provider-tf-mediapipe/common/TFMP_FaceLandmarker.d.ts +9 -0
  426. package/dist/provider-tf-mediapipe/common/TFMP_FaceLandmarker.d.ts.map +1 -0
  427. package/dist/provider-tf-mediapipe/common/TFMP_GestureRecognizer.d.ts +9 -0
  428. package/dist/provider-tf-mediapipe/common/TFMP_GestureRecognizer.d.ts.map +1 -0
  429. package/dist/provider-tf-mediapipe/common/TFMP_HandLandmarker.d.ts +9 -0
  430. package/dist/provider-tf-mediapipe/common/TFMP_HandLandmarker.d.ts.map +1 -0
  431. package/dist/provider-tf-mediapipe/common/TFMP_ImageClassification.d.ts +9 -0
  432. package/dist/provider-tf-mediapipe/common/TFMP_ImageClassification.d.ts.map +1 -0
  433. package/dist/provider-tf-mediapipe/common/TFMP_ImageEmbedding.d.ts +9 -0
  434. package/dist/provider-tf-mediapipe/common/TFMP_ImageEmbedding.d.ts.map +1 -0
  435. package/dist/provider-tf-mediapipe/common/TFMP_ImageSegmentation.d.ts +9 -0
  436. package/dist/provider-tf-mediapipe/common/TFMP_ImageSegmentation.d.ts.map +1 -0
  437. package/dist/{tf-mediapipe → provider-tf-mediapipe}/common/TFMP_JobRunFns.d.ts +341 -409
  438. package/dist/provider-tf-mediapipe/common/TFMP_JobRunFns.d.ts.map +1 -0
  439. package/dist/provider-tf-mediapipe/common/TFMP_ModelInfo.d.ts +9 -0
  440. package/dist/provider-tf-mediapipe/common/TFMP_ModelInfo.d.ts.map +1 -0
  441. package/dist/{tf-mediapipe → provider-tf-mediapipe}/common/TFMP_ModelSchema.d.ts +40 -37
  442. package/dist/provider-tf-mediapipe/common/TFMP_ModelSchema.d.ts.map +1 -0
  443. package/dist/provider-tf-mediapipe/common/TFMP_ModelSearch.d.ts +9 -0
  444. package/dist/provider-tf-mediapipe/common/TFMP_ModelSearch.d.ts.map +1 -0
  445. package/dist/provider-tf-mediapipe/common/TFMP_ObjectDetection.d.ts +9 -0
  446. package/dist/provider-tf-mediapipe/common/TFMP_ObjectDetection.d.ts.map +1 -0
  447. package/dist/provider-tf-mediapipe/common/TFMP_PoseLandmarker.d.ts +9 -0
  448. package/dist/provider-tf-mediapipe/common/TFMP_PoseLandmarker.d.ts.map +1 -0
  449. package/dist/provider-tf-mediapipe/common/TFMP_Runtime.d.ts +43 -0
  450. package/dist/provider-tf-mediapipe/common/TFMP_Runtime.d.ts.map +1 -0
  451. package/dist/provider-tf-mediapipe/common/TFMP_TextClassification.d.ts +9 -0
  452. package/dist/provider-tf-mediapipe/common/TFMP_TextClassification.d.ts.map +1 -0
  453. package/dist/provider-tf-mediapipe/common/TFMP_TextEmbedding.d.ts +9 -0
  454. package/dist/provider-tf-mediapipe/common/TFMP_TextEmbedding.d.ts.map +1 -0
  455. package/dist/provider-tf-mediapipe/common/TFMP_TextLanguageDetection.d.ts +9 -0
  456. package/dist/provider-tf-mediapipe/common/TFMP_TextLanguageDetection.d.ts.map +1 -0
  457. package/dist/provider-tf-mediapipe/common/TFMP_Unload.d.ts +9 -0
  458. package/dist/provider-tf-mediapipe/common/TFMP_Unload.d.ts.map +1 -0
  459. package/dist/{tf-mediapipe → provider-tf-mediapipe}/index.d.ts +1 -3
  460. package/dist/provider-tf-mediapipe/index.d.ts.map +1 -0
  461. package/dist/provider-tf-mediapipe/index.js +129 -0
  462. package/dist/provider-tf-mediapipe/index.js.map +13 -0
  463. package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipe.d.ts +10 -0
  464. package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipe.d.ts.map +1 -0
  465. package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipeInline.d.ts +8 -0
  466. package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipeInline.d.ts.map +1 -0
  467. package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipeWorker.d.ts +7 -0
  468. package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipeWorker.d.ts.map +1 -0
  469. package/dist/provider-tf-mediapipe/runtime.d.ts +16 -0
  470. package/dist/provider-tf-mediapipe/runtime.d.ts.map +1 -0
  471. package/dist/{tf-mediapipe/index.js → provider-tf-mediapipe/runtime.js} +417 -380
  472. package/dist/provider-tf-mediapipe/runtime.js.map +33 -0
  473. package/package.json +74 -41
  474. package/dist/HFT_JobRunFns-8hcpea4c.js +0 -80
  475. package/dist/HFT_JobRunFns-8hcpea4c.js.map +0 -9
  476. package/dist/anthropic/AnthropicProvider.d.ts.map +0 -1
  477. package/dist/anthropic/Anthropic_Worker.d.ts.map +0 -1
  478. package/dist/anthropic/common/Anthropic_Constants.d.ts.map +0 -1
  479. package/dist/anthropic/common/Anthropic_JobRunFns.d.ts +0 -24
  480. package/dist/anthropic/common/Anthropic_JobRunFns.d.ts.map +0 -1
  481. package/dist/anthropic/common/Anthropic_ModelSchema.d.ts.map +0 -1
  482. package/dist/anthropic/index.d.ts.map +0 -1
  483. package/dist/anthropic/index.js.map +0 -11
  484. package/dist/ggml/model/GgmlLocalModel.d.ts.map +0 -1
  485. package/dist/google-gemini/Gemini_Worker.d.ts.map +0 -1
  486. package/dist/google-gemini/GoogleGeminiProvider.d.ts.map +0 -1
  487. package/dist/google-gemini/common/Gemini_Constants.d.ts.map +0 -1
  488. package/dist/google-gemini/common/Gemini_JobRunFns.d.ts +0 -25
  489. package/dist/google-gemini/common/Gemini_JobRunFns.d.ts.map +0 -1
  490. package/dist/google-gemini/common/Gemini_ModelSchema.d.ts.map +0 -1
  491. package/dist/google-gemini/index.d.ts.map +0 -1
  492. package/dist/google-gemini/index.js.map +0 -11
  493. package/dist/hf-transformers/HFT_Worker.d.ts +0 -7
  494. package/dist/hf-transformers/HFT_Worker.d.ts.map +0 -1
  495. package/dist/hf-transformers/HuggingFaceTransformersProvider.d.ts.map +0 -1
  496. package/dist/hf-transformers/common/HFT_Constants.d.ts.map +0 -1
  497. package/dist/hf-transformers/common/HFT_JobRunFns.d.ts.map +0 -1
  498. package/dist/hf-transformers/common/HFT_ModelSchema.d.ts.map +0 -1
  499. package/dist/hf-transformers/index.d.ts.map +0 -1
  500. package/dist/hf-transformers/index.js +0 -116
  501. package/dist/hf-transformers/index.js.map +0 -10
  502. package/dist/index-60ev6k93.js.map +0 -12
  503. package/dist/index-6j5pq722.js +0 -11
  504. package/dist/index-6j5pq722.js.map +0 -9
  505. package/dist/index-8651nz8y.js.map +0 -12
  506. package/dist/index-j4g81r4k.js.map +0 -10
  507. package/dist/index-pkd79j8b.js +0 -58
  508. package/dist/index-pkd79j8b.js.map +0 -10
  509. package/dist/index-q2t627d5.js.map +0 -12
  510. package/dist/index-tp5s7355.js +0 -77
  511. package/dist/index-tp5s7355.js.map +0 -12
  512. package/dist/index-v72vr07f.js +0 -81
  513. package/dist/index-v72vr07f.js.map +0 -12
  514. package/dist/index-wr57rwyx.js +0 -104
  515. package/dist/index-wr57rwyx.js.map +0 -12
  516. package/dist/index-zqq3kw0n.js +0 -171
  517. package/dist/index-zqq3kw0n.js.map +0 -11
  518. package/dist/index.browser-6j5pq722.js +0 -11
  519. package/dist/index.browser-6j5pq722.js.map +0 -9
  520. package/dist/index.d.ts +0 -33
  521. package/dist/index.d.ts.map +0 -1
  522. package/dist/index.js +0 -316
  523. package/dist/index.js.map +0 -15
  524. package/dist/provider-hf-inference/HFI_Worker.d.ts +0 -7
  525. package/dist/provider-hf-inference/HFI_Worker.d.ts.map +0 -1
  526. package/dist/provider-llamacpp/LlamaCpp_Worker.d.ts +0 -7
  527. package/dist/provider-llamacpp/LlamaCpp_Worker.d.ts.map +0 -1
  528. package/dist/provider-ollama/Ollama_Worker.browser.d.ts +0 -7
  529. package/dist/provider-ollama/Ollama_Worker.browser.d.ts.map +0 -1
  530. package/dist/provider-ollama/Ollama_Worker.d.ts +0 -7
  531. package/dist/provider-ollama/Ollama_Worker.d.ts.map +0 -1
  532. package/dist/provider-openai/OpenAI_Worker.d.ts +0 -7
  533. package/dist/provider-openai/OpenAI_Worker.d.ts.map +0 -1
  534. package/dist/tf-mediapipe/TFMP_Worker.d.ts +0 -7
  535. package/dist/tf-mediapipe/TFMP_Worker.d.ts.map +0 -1
  536. package/dist/tf-mediapipe/TensorFlowMediaPipeProvider.d.ts +0 -41
  537. package/dist/tf-mediapipe/TensorFlowMediaPipeProvider.d.ts.map +0 -1
  538. package/dist/tf-mediapipe/common/TFMP_Constants.d.ts.map +0 -1
  539. package/dist/tf-mediapipe/common/TFMP_JobRunFns.d.ts.map +0 -1
  540. package/dist/tf-mediapipe/common/TFMP_ModelSchema.d.ts.map +0 -1
  541. package/dist/tf-mediapipe/index.d.ts.map +0 -1
  542. package/dist/tf-mediapipe/index.js.map +0 -14
  543. package/dist/types.d.ts +0 -7
  544. package/dist/types.d.ts.map +0 -1
  545. package/dist/web-browser/WebBrowserProvider.d.ts.map +0 -1
  546. package/dist/web-browser/WebBrowser_Worker.d.ts +0 -7
  547. package/dist/web-browser/WebBrowser_Worker.d.ts.map +0 -1
  548. package/dist/web-browser/common/WebBrowser_Constants.d.ts.map +0 -1
  549. package/dist/web-browser/common/WebBrowser_JobRunFns.d.ts +0 -20
  550. package/dist/web-browser/common/WebBrowser_JobRunFns.d.ts.map +0 -1
  551. package/dist/web-browser/common/WebBrowser_ModelSchema.d.ts.map +0 -1
  552. package/dist/web-browser/index.d.ts.map +0 -1
  553. package/dist/web-browser/index.js.map +0 -14
  554. /package/dist/{anthropic → provider-anthropic}/common/Anthropic_Constants.d.ts +0 -0
  555. /package/dist/{web-browser → provider-chrome}/common/WebBrowser_Constants.d.ts +0 -0
  556. /package/dist/{google-gemini → provider-gemini}/common/Gemini_Constants.d.ts +0 -0
@@ -0,0 +1,49 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../src/provider-hf-transformers/common/HFT_Pipeline.ts", "../src/provider-hf-transformers/common/HFT_Constants.ts", "../src/provider-hf-transformers/common/HFT_ModelSchema.ts", "../src/provider-hf-transformers/common/HFT_OnnxDtypes.ts", "../src/provider-hf-transformers/common/HFT_ToolMarkup.ts", "../src/provider-hf-transformers/common/HFT_InlineLifecycle.ts", "../src/common/PipelineTaskMapping.ts", "../src/common/HfModelSearch.ts", "../src/provider-hf-transformers/common/HFT_ModelSearch.ts", "../src/provider-hf-transformers/common/HFT_ImageHelpers.ts", "../src/provider-hf-transformers/common/HFT_BackgroundRemoval.ts", "../src/provider-hf-transformers/common/HFT_CountTokens.ts", "../src/provider-hf-transformers/common/HFT_Download.ts", "../src/provider-hf-transformers/common/HFT_ImageClassification.ts", "../src/provider-hf-transformers/common/HFT_ImageEmbedding.ts", "../src/provider-hf-transformers/common/HFT_ImageSegmentation.ts", "../src/provider-hf-transformers/common/HFT_ImageToText.ts", "../src/provider-hf-transformers/common/HFT_ModelInfo.ts", "../src/provider-hf-transformers/common/HFT_ObjectDetection.ts", "../src/provider-hf-transformers/common/HFT_StructuredGeneration.ts", "../src/provider-hf-transformers/common/HFT_Streaming.ts", "../src/provider-hf-transformers/common/HFT_TextOutput.ts", "../src/provider-hf-transformers/common/HFT_TextClassification.ts", "../src/provider-hf-transformers/common/HFT_TextEmbedding.ts", "../src/provider-hf-transformers/common/HFT_TextFillMask.ts", "../src/provider-hf-transformers/common/HFT_TextGeneration.ts", "../src/provider-hf-transformers/common/HFT_TextLanguageDetection.ts", "../src/provider-hf-transformers/common/HFT_TextNamedEntityRecognition.ts", "../src/provider-hf-transformers/common/HFT_TextQuestionAnswer.ts", "../src/provider-hf-transformers/common/HFT_TextRewriter.ts", "../src/provider-hf-transformers/common/HFT_TextSummary.ts", "../src/provider-hf-transformers/common/HFT_TextTranslation.ts", "../src/provider-hf-transformers/common/HFT_ToolCalling.ts", "../src/provider-hf-transformers/common/HFT_Unload.ts", "../src/provider-hf-transformers/common/HFT_JobRunFns.ts", "../src/provider-hf-transformers/HuggingFaceTransformersQueuedProvider.ts", "../src/provider-hf-transformers/registerHuggingFaceTransformersInline.ts", "../src/provider-hf-transformers/registerHuggingFaceTransformersWorker.ts", "../src/provider-hf-transformers/HuggingFaceTransformersProvider.ts", "../src/provider-hf-transformers/runtime.ts"],
4
+ "sourcesContent": [
5
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { PretrainedModelOptions, ProgressInfo } from \"@huggingface/transformers\";\nimport { getLogger } from \"@workglow/util/worker\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\n\nlet _transformersSdk: typeof import(\"@huggingface/transformers\") | undefined;\nlet _cacheDir: string | undefined;\n\n/**\n * Set the filesystem cache directory for downloaded transformers.js models.\n * Must be called before any model is loaded. Works in both Node and browser.\n */\nexport function setHftCacheDir(dir: string): void {\n _cacheDir = dir;\n if (_transformersSdk) {\n _transformersSdk.env.cacheDir = dir;\n }\n}\n\nexport async function loadTransformersSDK() {\n if (!_transformersSdk) {\n try {\n _transformersSdk = await import(\"@huggingface/transformers\");\n _transformersSdk.env.fetch = abortableFetch as typeof fetch;\n if (_cacheDir) {\n _transformersSdk.env.cacheDir = _cacheDir;\n }\n } catch {\n throw new Error(\n \"@huggingface/transformers is required for HuggingFace Transformers tasks. Install it with: bun add @huggingface/transformers\"\n );\n }\n }\n return _transformersSdk;\n}\n\n/** Per-model AbortControllers used by abortableFetch; keyed by model_path. */\nconst modelAbortControllers = new Map<string, AbortController>();\n\nfunction abortableFetch(url: string, options: RequestInit): Promise<Response> {\n let signal: AbortSignal | undefined;\n try {\n const pathname = new URL(url).pathname;\n for (const [modelPath, controller] of modelAbortControllers) {\n if (pathname.includes(`/${modelPath}/`)) {\n signal = controller.signal;\n break;\n }\n }\n } catch {\n /* not a parseable URL, proceed without abort */\n }\n return fetch(url, { ...options, ...(signal ? { signal } : {}) });\n}\n\nconst pipelines = new Map<string, any>();\n\n/** In-flight pipeline loads by cache key. Ensures only one load per model at a time to avoid corrupt ONNX files (Protobuf parsing failed). */\nconst pipelineLoadPromises = new Map<string, Promise<any>>();\n\n/**\n * Clear all cached pipelines\n */\nexport function clearPipelineCache(): void {\n pipelines.clear();\n}\n\nexport function hasCachedPipeline(cacheKey: string): boolean {\n return pipelines.has(cacheKey);\n}\n\nexport function removeCachedPipeline(cacheKey: string): boolean {\n return pipelines.delete(cacheKey);\n}\n\n/** True when running in a browser. Transformers.js only accepts device \"wasm\" in the browser build. */\nfunction isBrowserEnv(): boolean {\n return typeof globalThis !== \"undefined\" && typeof (globalThis as any).window !== \"undefined\";\n}\n\n/**\n * Generate a cache key for a pipeline that includes all configuration options\n * that affect pipeline creation (model_path, pipeline, dtype, device)\n */\nexport function getPipelineCacheKey(model: HfTransformersOnnxModelConfig): string {\n const dtype = model.provider_config.dtype || \"q8\";\n const device = model.provider_config.device || \"\";\n return `${model.provider_config.model_path}:${model.provider_config.pipeline}:${dtype}:${device}`;\n}\n\n/**\n * Helper function to get a pipeline for a model\n * @param progressScaleMax - Maximum progress value for download phase (100 for download-only, 10 for download+run)\n *\n * Explicit `Promise<any>` return avoids TS2883 (inferred type not portable across package boundaries).\n */\nexport async function getPipeline(\n model: HfTransformersOnnxModelConfig,\n onProgress: (progress: number, message?: string, details?: any) => void,\n options: PretrainedModelOptions = {},\n signal?: AbortSignal,\n progressScaleMax: number = 10\n): Promise<any> {\n const cacheKey = getPipelineCacheKey(model);\n if (pipelines.has(cacheKey)) {\n getLogger().debug(\"HFT pipeline cache hit\", { cacheKey });\n return pipelines.get(cacheKey);\n }\n\n // Output[number]-flight: only one load per model at a time to avoid concurrent writes to the same\n // ONNX cache path (which can yield \"Protobuf parsing failed\" when one process reads while another writes).\n const inFlight = pipelineLoadPromises.get(cacheKey);\n if (inFlight) {\n await inFlight;\n const cached = pipelines.get(cacheKey);\n if (cached) return cached;\n // Load failed for the other caller; fall through to retry (we remove from map in finally).\n }\n\n const loadPromise = doGetPipeline(\n model,\n onProgress,\n options,\n progressScaleMax,\n cacheKey,\n signal\n ).finally(() => {\n pipelineLoadPromises.delete(cacheKey);\n });\n pipelineLoadPromises.set(cacheKey, loadPromise);\n return loadPromise;\n}\n\nconst doGetPipeline = async (\n model: HfTransformersOnnxModelConfig,\n onProgress: (progress: number, message?: string, details?: any) => void,\n options: PretrainedModelOptions,\n progressScaleMax: number,\n cacheKey: string,\n signal?: AbortSignal\n) => {\n // Throttle state for progress events\n let lastProgressTime = 0;\n type FilesByteMap = Record<string, { loaded: number; total: number }>;\n let pendingProgress: {\n progress: number;\n file: string;\n fileProgress: number;\n filesMap?: FilesByteMap;\n } | null = null;\n let throttleTimer: ReturnType<typeof setTimeout> | null = null;\n const THROTTLE_MS = 160;\n\n const buildProgressDetails = (\n file: string,\n fileProgress: number,\n filesMap?: FilesByteMap\n ): { file: string; progress: number; files?: FilesByteMap } => {\n const details: { file: string; progress: number; files?: FilesByteMap } = {\n file,\n progress: fileProgress,\n };\n if (filesMap && Object.keys(filesMap).length > 0) {\n details.files = filesMap;\n }\n return details;\n };\n\n /**\n * Sends a progress event, throttled to avoid flooding the worker channel.\n * Always sends first event and final (>=progressScaleMax) immediately.\n */\n const sendProgress = (\n progress: number,\n file: string,\n fileProgress: number,\n filesMap?: FilesByteMap\n ): void => {\n const now = Date.now();\n const timeSinceLastEvent = now - lastProgressTime;\n const isFirst = lastProgressTime === 0;\n const isFinal = progress >= progressScaleMax;\n\n if (isFirst || isFinal) {\n if (throttleTimer) {\n clearTimeout(throttleTimer);\n throttleTimer = null;\n }\n pendingProgress = null;\n onProgress(\n Math.round(progress),\n \"Downloading model\",\n buildProgressDetails(file, fileProgress, filesMap)\n );\n lastProgressTime = now;\n return;\n }\n\n if (timeSinceLastEvent < THROTTLE_MS) {\n pendingProgress = { progress, file, fileProgress, filesMap };\n if (!throttleTimer) {\n const timeRemaining = Math.max(1, THROTTLE_MS - timeSinceLastEvent);\n throttleTimer = setTimeout(() => {\n throttleTimer = null;\n if (pendingProgress) {\n const p = pendingProgress;\n onProgress(\n Math.round(p.progress),\n \"Downloading model\",\n buildProgressDetails(p.file, p.fileProgress, p.filesMap)\n );\n lastProgressTime = Date.now();\n pendingProgress = null;\n }\n }, timeRemaining);\n }\n return;\n }\n\n onProgress(\n Math.round(progress),\n \"Downloading model\",\n buildProgressDetails(file, fileProgress, filesMap)\n );\n lastProgressTime = now;\n pendingProgress = null;\n };\n\n // Get the abort signal from the signal parameter\n const abortSignal = signal;\n\n // Register a per-model AbortController so abortableFetch can cancel in-flight fetches\n const modelPath = model.provider_config.model_path;\n const modelController = new AbortController();\n modelAbortControllers.set(modelPath, modelController);\n if (abortSignal) {\n if (abortSignal.aborted) {\n modelController.abort();\n } else {\n abortSignal.addEventListener(\"abort\", () => modelController.abort(), { once: true });\n }\n }\n\n // Use aggregate progress_total event from @huggingface/transformers v4 pipeline()\n const progressCallback = (status: ProgressInfo) => {\n if (abortSignal?.aborted) return;\n\n if ((status as any).status === \"progress_total\") {\n const totalStatus = status as any;\n const scaledProgress = (totalStatus.progress * progressScaleMax) / 100;\n\n // Find the currently active file (one still downloading)\n let activeFile = \"\";\n let activeFileProgress = 0;\n const files: Record<string, { loaded: number; total: number }> | undefined =\n totalStatus.files;\n if (files) {\n for (const [file, info] of Object.entries(files)) {\n if (info.loaded < info.total) {\n activeFile = file;\n activeFileProgress = info.total > 0 ? (info.loaded / info.total) * 100 : 0;\n break;\n }\n }\n if (!activeFile) {\n const fileNames = Object.keys(files);\n if (fileNames.length > 0) {\n activeFile = fileNames[fileNames.length - 1];\n activeFileProgress = 100;\n }\n }\n }\n\n sendProgress(scaledProgress, activeFile, activeFileProgress, files);\n }\n };\n\n let device = model.provider_config.device as string | undefined;\n if (!isBrowserEnv()) {\n if (device === \"wasm\" || device === \"webgpu\") {\n device = undefined;\n }\n }\n\n const pipelineOptions: PretrainedModelOptions = {\n dtype: model.provider_config.dtype || \"q8\",\n ...(model.provider_config.use_external_data_format\n ? { useExternalDataFormat: model.provider_config.use_external_data_format }\n : {}),\n ...(device ? { device: device as any } : {}),\n ...options,\n progress_callback: progressCallback,\n };\n\n // Check if already aborted before starting\n if (abortSignal?.aborted) {\n modelAbortControllers.delete(modelPath);\n throw new Error(\"Operation aborted before pipeline creation\");\n }\n\n const pipelineType = model.provider_config.pipeline;\n\n const { pipeline } = await loadTransformersSDK();\n\n const logger = getLogger();\n const pipelineTimerLabel = `hft:pipeline:${cacheKey}`;\n logger.time(pipelineTimerLabel, { pipelineType, modelPath });\n\n try {\n const result = await pipeline(pipelineType, model.provider_config.model_path, pipelineOptions);\n\n // Flush pending throttled progress and clean up timer\n if (throttleTimer) {\n clearTimeout(throttleTimer);\n throttleTimer = null;\n }\n // pendingProgress may have been set by progressCallback during the pipeline() await\n const finalPending = pendingProgress as {\n progress: number;\n file: string;\n fileProgress: number;\n filesMap?: FilesByteMap;\n } | null;\n if (finalPending) {\n onProgress(\n Math.round(finalPending.progress),\n \"Downloading model\",\n buildProgressDetails(finalPending.file, finalPending.fileProgress, finalPending.filesMap)\n );\n pendingProgress = null;\n }\n\n // Check if aborted after pipeline creation\n if (abortSignal?.aborted) {\n logger.timeEnd(pipelineTimerLabel, { status: \"aborted\" });\n throw new Error(\"Operation aborted after pipeline creation\");\n }\n\n logger.timeEnd(pipelineTimerLabel, { status: \"loaded\" });\n pipelines.set(cacheKey, result);\n return result;\n } catch (error: any) {\n logger.timeEnd(pipelineTimerLabel, { status: \"error\", error: String(error) });\n // If aborted, throw a clean abort error rather than internal stream errors\n if (abortSignal?.aborted || modelController.signal.aborted) {\n throw new Error(\"Pipeline download aborted\");\n }\n throw error;\n } finally {\n modelAbortControllers.delete(modelPath);\n }\n};\n",
6
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nexport const HF_TRANSFORMERS_ONNX = \"HF_TRANSFORMERS_ONNX\";\nexport const HTF_CACHE_NAME = \"transformers-cache\";\n\nexport type QuantizationDataType =\n | \"auto\" // Auto-detect based on environment\n | \"fp32\"\n | \"fp16\"\n | \"q8\"\n | \"int8\"\n | \"uint8\"\n | \"q4\"\n | \"bnb4\"\n | \"q4f16\"; // fp16 model with int4 block weight quantization\n\nexport const QuantizationDataType = {\n auto: \"auto\",\n fp32: \"fp32\",\n fp16: \"fp16\",\n q8: \"q8\",\n int8: \"int8\",\n uint8: \"uint8\",\n q4: \"q4\",\n bnb4: \"bnb4\",\n q4f16: \"q4f16\",\n} as const satisfies Record<QuantizationDataType, QuantizationDataType>;\n\ntype TextPipelineUseCase =\n | \"fill-mask\" // https://huggingface.co/tasks/fill-mask\n | \"token-classification\" // https://huggingface.co/tasks/token-classification\n | \"text-generation\" // https://huggingface.co/tasks/text-generation#completion-generation-models\n | \"text2text-generation\" // https://huggingface.co/tasks/text-generation#text-to-text-generation-models\n | \"text-classification\" // https://huggingface.co/tasks/text-classification\n | \"summarization\" // https://huggingface.co/tasks/sentence-similarity\n | \"translation\" // https://huggingface.co/tasks/translation\n | \"feature-extraction\" // https://huggingface.co/tasks/feature-extraction\n | \"zero-shot-classification\" // https://huggingface.co/tasks/zero-shot-classification\n | \"question-answering\"; // https://huggingface.co/tasks/question-answering\n\nconst TextPipelineUseCase = {\n \"fill-mask\": \"fill-mask\",\n \"token-classification\": \"token-classification\",\n \"text-generation\": \"text-generation\",\n \"text2text-generation\": \"text2text-generation\",\n \"text-classification\": \"text-classification\",\n summarization: \"summarization\",\n translation: \"translation\",\n \"feature-extraction\": \"feature-extraction\",\n \"zero-shot-classification\": \"zero-shot-classification\",\n \"question-answering\": \"question-answering\",\n} as const satisfies Record<TextPipelineUseCase, TextPipelineUseCase>;\n\ntype VisionPipelineUseCase =\n | \"background-removal\" // https://huggingface.co/tasks/image-segmentation#background-removal\n | \"image-segmentation\" // https://huggingface.co/tasks/image-segmentation\n | \"depth-estimation\" // https://huggingface.co/tasks/depth-estimation\n | \"image-classification\" // https://huggingface.co/tasks/image-classification\n | \"image-to-image\" // https://huggingface.co/tasks/image-to-image\n // | \"text-to-image\" // https://huggingface.co/tasks/text-to-image\n | \"image-to-text\" // https://huggingface.co/tasks/image-to-text\n // | \"image-text-to-text\" // https://huggingface.co/tasks/image-text-to-text\n | \"object-detection\" // https://huggingface.co/tasks/object-detection\n | \"image-feature-extraction\"; // https://huggingface.co/tasks/image-feature-extraction\n\nconst VisionPipelineUseCase = {\n \"background-removal\": \"background-removal\",\n \"image-segmentation\": \"image-segmentation\",\n \"depth-estimation\": \"depth-estimation\",\n \"image-classification\": \"image-classification\",\n \"image-to-image\": \"image-to-image\",\n // \"text-to-image\": \"text-to-image\",\n \"image-to-text\": \"image-to-text\",\n // \"image-text-to-text\": \"image-text-to-text\",\n \"object-detection\": \"object-detection\",\n \"image-feature-extraction\": \"image-feature-extraction\",\n} as const satisfies Record<VisionPipelineUseCase, VisionPipelineUseCase>;\n\ntype AudioPipelineUseCase =\n | \"audio-classification\" // https://huggingface.co/tasks/audio-classification\n | \"automatic-speech-recognition\" // https://huggingface.co/tasks/automatic-speech-recognition\n | \"text-to-speech\"; // https://huggingface.co/tasks/text-to-speech\n\nconst AudioPipelineUseCase = {\n \"audio-classification\": \"audio-classification\",\n \"automatic-speech-recognition\": \"automatic-speech-recognition\",\n \"text-to-speech\": \"text-to-speech\",\n} as const satisfies Record<AudioPipelineUseCase, AudioPipelineUseCase>;\n\ntype MultimodalPipelineUseCase =\n | \"document-question-answering\" // https://huggingface.co/tasks/document-question-answering\n | \"image-to-text\" // https://huggingface.co/tasks/image-to-text\n | \"zero-shot-audio-classification\" // https://huggingface.co/tasks/zero-shot-audio-classification\n | \"zero-shot-image-classification\" // https://huggingface.co/tasks/zero-shot-image-classification\n | \"zero-shot-object-detection\"; // https://huggingface.co/tasks/zero-shot-object-detection\n\nconst MultimodalPipelineUseCase = {\n \"document-question-answering\": \"document-question-answering\",\n \"image-to-text\": \"image-to-text\",\n \"zero-shot-audio-classification\": \"zero-shot-audio-classification\",\n \"zero-shot-image-classification\": \"zero-shot-image-classification\",\n \"zero-shot-object-detection\": \"zero-shot-object-detection\",\n} as const satisfies Record<MultimodalPipelineUseCase, MultimodalPipelineUseCase>;\n\nexport type PipelineUseCase =\n | TextPipelineUseCase\n | VisionPipelineUseCase\n | AudioPipelineUseCase\n | MultimodalPipelineUseCase;\n\nexport const PipelineUseCase = {\n ...TextPipelineUseCase,\n ...VisionPipelineUseCase,\n ...AudioPipelineUseCase,\n ...MultimodalPipelineUseCase,\n} as const satisfies Record<PipelineUseCase, PipelineUseCase>;\n",
7
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { ModelConfigSchema, ModelRecordSchema } from \"@workglow/ai/worker\";\nimport { DataPortSchemaObject, FromSchema } from \"@workglow/util/worker\";\nimport { HF_TRANSFORMERS_ONNX, PipelineUseCase, QuantizationDataType } from \"./HFT_Constants\";\n\nexport const HfTransformersOnnxModelSchema = {\n type: \"object\",\n properties: {\n provider: {\n const: HF_TRANSFORMERS_ONNX,\n description: \"Discriminator: ONNX runtime backend.\",\n },\n provider_config: {\n type: \"object\",\n description: \"ONNX runtime-specific options.\",\n properties: {\n pipeline: {\n type: \"string\",\n enum: Object.values(PipelineUseCase),\n description: \"Pipeline type for the ONNX model.\",\n default: \"text-generation\",\n },\n model_path: {\n type: \"string\",\n description: \"Filesystem path or URI for the ONNX model.\",\n },\n dtype: {\n type: \"string\",\n enum: Object.values(QuantizationDataType),\n description: \"Data type for the ONNX model.\",\n default: \"auto\",\n },\n device: {\n type: \"string\",\n enum: [\"cpu\", \"gpu\", \"webgpu\", \"wasm\", \"metal\"],\n description: \"High-level device selection.\",\n default: \"webgpu\",\n },\n execution_providers: {\n type: \"array\",\n items: { type: \"string\" },\n description: \"Raw ONNX Runtime execution provider identifiers.\",\n \"x-ui-hidden\": true,\n },\n intra_op_num_threads: {\n type: \"integer\",\n minimum: 1,\n },\n inter_op_num_threads: {\n type: \"integer\",\n minimum: 1,\n },\n use_external_data_format: {\n type: \"boolean\",\n description: \"Whether the model uses external data format.\",\n },\n native_dimensions: {\n type: \"integer\",\n description: \"The native dimensions of the model.\",\n },\n pooling: {\n type: \"string\",\n enum: [\"mean\", \"last_token\", \"cls\"],\n description: \"The pooling strategy to use for the model.\",\n default: \"mean\",\n },\n normalize: {\n type: \"boolean\",\n description: \"Whether the model uses normalization.\",\n default: true,\n },\n language_style: {\n type: \"string\",\n description: \"The language style of the model.\",\n },\n mrl: {\n type: \"boolean\",\n description: \"Whether the model uses matryoshka.\",\n default: false,\n },\n },\n required: [\"model_path\", \"pipeline\"],\n additionalProperties: false,\n if: {\n properties: {\n pipeline: {\n const: \"feature-extraction\",\n },\n },\n },\n then: {\n required: [\"native_dimensions\"],\n },\n },\n },\n required: [\"provider\", \"provider_config\"],\n additionalProperties: true,\n} as const satisfies DataPortSchemaObject;\n\nexport const HfTransformersOnnxModelRecordSchema = {\n type: \"object\",\n properties: {\n ...ModelRecordSchema.properties,\n ...HfTransformersOnnxModelSchema.properties,\n },\n required: [...ModelRecordSchema.required, ...HfTransformersOnnxModelSchema.required],\n additionalProperties: false,\n} as const satisfies DataPortSchemaObject;\n\nexport type HfTransformersOnnxModelRecord = FromSchema<typeof HfTransformersOnnxModelRecordSchema>;\n\nexport const HfTransformersOnnxModelConfigSchema = {\n type: \"object\",\n properties: {\n ...ModelConfigSchema.properties,\n ...HfTransformersOnnxModelSchema.properties,\n },\n required: [...ModelConfigSchema.required, ...HfTransformersOnnxModelSchema.required],\n additionalProperties: false,\n} as const satisfies DataPortSchemaObject;\n\nexport type HfTransformersOnnxModelConfig = FromSchema<typeof HfTransformersOnnxModelConfigSchema>;\n",
8
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nexport const ONNX_QUANTIZATION_SUFFIX_MAPPING = {\n fp32: \"\",\n fp16: \"_fp16\",\n int8: \"_int8\",\n uint8: \"_uint8\",\n q8: \"_quantized\",\n q4: \"_q4\",\n q4f16: \"_q4f16\",\n bnb4: \"_bnb4\",\n} as const;\n\nexport type OnnxQuantization = keyof typeof ONNX_QUANTIZATION_SUFFIX_MAPPING;\n\nconst SUFFIXES_LONGEST_FIRST = (Object.entries(ONNX_QUANTIZATION_SUFFIX_MAPPING) as [OnnxQuantization, string][])\n .filter(([, suffix]) => suffix !== \"\")\n .sort((a, b) => b[1].length - a[1].length);\n\nexport function parseOnnxQuantizations(params: {\n /** File paths, e.g. from listModels/modelInfo with additionalFields: [\"filePaths\"] */\n filePaths: string[];\n /** Subdirectory containing ONNX files. @default \"onnx\" */\n subfolder?: string;\n}): OnnxQuantization[] {\n const subfolder = params.subfolder ?? \"onnx\";\n const prefix = subfolder + \"/\";\n\n const stems: string[] = [];\n for (const fp of params.filePaths) {\n if (!fp.startsWith(prefix)) continue;\n if (!fp.endsWith(\".onnx\")) continue;\n if (fp.endsWith(\".onnx_data\")) continue;\n stems.push(fp.slice(prefix.length, -\".onnx\".length));\n }\n\n if (stems.length === 0) return [];\n\n const parsed: Array<{ baseName: string; dtype: OnnxQuantization }> = [];\n for (const stem of stems) {\n let matched = false;\n for (const [dtype, suffix] of SUFFIXES_LONGEST_FIRST) {\n if (stem.endsWith(suffix)) {\n parsed.push({ baseName: stem.slice(0, -suffix.length), dtype });\n matched = true;\n break;\n }\n }\n if (!matched) {\n parsed.push({ baseName: stem, dtype: \"fp32\" });\n }\n }\n\n const allBaseNames = new Set(parsed.map((p) => p.baseName));\n const byDtype = new Map<OnnxQuantization, Set<string>>();\n for (const { baseName, dtype } of parsed) {\n let set = byDtype.get(dtype);\n if (!set) {\n set = new Set();\n byDtype.set(dtype, set);\n }\n set.add(baseName);\n }\n\n const allDtypes = Object.keys(ONNX_QUANTIZATION_SUFFIX_MAPPING) as OnnxQuantization[];\n return allDtypes.filter((dtype) => {\n const set = byDtype.get(dtype);\n return set !== undefined && set.size === allBaseNames.size;\n });\n}\n",
9
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { ToolCalls } from \"@workglow/ai\";\n\n/**\n * Parse tool calls from model-generated text.\n *\n * Many instruct models (Qwen, Llama, Hermes, etc.) emit tool calls in one of\n * these formats:\n *\n * 1. `<tool_call>{\"name\":\"fn\",\"arguments\":{...}}</tool_call>` (Qwen/Hermes)\n * 2. Plain JSON objects with a \"name\" + \"arguments\" key\n * 3. `{\"function\":{\"name\":\"fn\",\"arguments\":{...}}}`\n *\n * This function extracts all such tool calls from the raw response text\n * and returns both the cleaned text (with tool-call markup removed) and\n * the parsed ToolCall array.\n */\nexport function parseToolCallsFromText(responseText: string): {\n text: string;\n toolCalls: ToolCalls;\n} {\n const toolCalls: ToolCalls = [];\n let callIndex = 0;\n let cleanedText = responseText;\n\n // Pattern 1: <tool_call>...</tool_call> blocks (Qwen, Hermes, etc.)\n const toolCallTagRegex = /<tool_call>([\\s\\S]*?)<\\/tool_call>/g;\n let tagMatch;\n while ((tagMatch = toolCallTagRegex.exec(responseText)) !== null) {\n try {\n const parsed = JSON.parse(tagMatch[1].trim());\n const id = `call_${callIndex++}`;\n toolCalls.push({\n id,\n name: parsed.name ?? parsed.function?.name ?? \"\",\n input: (parsed.arguments ??\n parsed.function?.arguments ??\n parsed.parameters ??\n {}) as Record<string, unknown>,\n });\n } catch {\n // Not valid JSON inside the tag, skip\n }\n }\n\n if (toolCalls.length > 0) {\n // Remove tool_call tags from the text output\n cleanedText = responseText.replace(/<tool_call>[\\s\\S]*?<\\/tool_call>/g, \"\").trim();\n return { text: cleanedText, toolCalls };\n }\n\n // Pattern 2: Use a brace-balanced scanner to correctly handle nested JSON objects.\n const jsonCandidates: Array<{ text: string; start: number; end: number }> = [];\n (function collectBalancedJsonBlocks(source: string) {\n const length = source.length;\n let i = 0;\n while (i < length) {\n if (source[i] !== \"{\") {\n i++;\n continue;\n }\n let depth = 1;\n let j = i + 1;\n let inString = false;\n let escape = false;\n while (j < length && depth > 0) {\n const ch = source[j];\n if (inString) {\n if (escape) {\n escape = false;\n } else if (ch === \"\\\\\") {\n escape = true;\n } else if (ch === '\"') {\n inString = false;\n }\n } else {\n if (ch === '\"') {\n inString = true;\n } else if (ch === \"{\") {\n depth++;\n } else if (ch === \"}\") {\n depth--;\n }\n }\n j++;\n }\n if (depth === 0) {\n jsonCandidates.push({ text: source.slice(i, j), start: i, end: j });\n i = j;\n } else {\n break;\n }\n }\n })(responseText);\n\n const matchedRanges: Array<{ start: number; end: number }> = [];\n for (const candidate of jsonCandidates) {\n try {\n const parsed = JSON.parse(candidate.text);\n if (parsed.name && (parsed.arguments !== undefined || parsed.parameters !== undefined)) {\n const id = `call_${callIndex++}`;\n toolCalls.push({\n id,\n name: parsed.name as string,\n input: (parsed.arguments ?? parsed.parameters ?? {}) as Record<string, unknown>,\n });\n matchedRanges.push({ start: candidate.start, end: candidate.end });\n } else if (parsed.function?.name) {\n let functionArgs: unknown = parsed.function.arguments ?? {};\n if (typeof functionArgs === \"string\") {\n try {\n functionArgs = JSON.parse(functionArgs);\n } catch (innerError) {\n console.warn(\"Failed to parse tool call function.arguments as JSON\", innerError);\n functionArgs = {};\n }\n }\n const id = `call_${callIndex++}`;\n toolCalls.push({\n id,\n name: parsed.function.name as string,\n input: (functionArgs ?? {}) as Record<string, unknown>,\n });\n matchedRanges.push({ start: candidate.start, end: candidate.end });\n }\n } catch {\n // Not valid JSON, skip\n }\n }\n\n if (toolCalls.length > 0) {\n // Remove only the matched JSON portions, preserving surrounding text\n let result = \"\";\n let lastIndex = 0;\n for (const range of matchedRanges) {\n result += responseText.slice(lastIndex, range.start);\n lastIndex = range.end;\n }\n result += responseText.slice(lastIndex);\n cleanedText = result.trim();\n }\n\n return { text: cleanedText, toolCalls };\n}\n\n/**\n * State machine that filters `<tool_call>…</tool_call>` markup out of a\n * stream of text-delta tokens. Tokens that are clearly outside markup are\n * flushed immediately; tokens that *might* be the start of a tag are held\n * in a lookahead buffer until they can be disambiguated.\n *\n * This only handles the XML-tag pattern (Pattern 1 in parseToolCallsFromText).\n * Bare-JSON tool calls (Pattern 2) cannot be reliably detected token-by-token\n * and are still cleaned up via the post-hoc `parseToolCallsFromText` pass on\n * the finish event.\n */\nexport function createToolCallMarkupFilter(emit: (text: string) => void) {\n const OPEN_TAG = \"<tool_call>\";\n const CLOSE_TAG = \"</tool_call>\";\n\n /** \"text\" = normal output, \"tag\" = inside a tool_call block */\n let state: \"text\" | \"tag\" = \"text\";\n /** Buffered text that might be a partial tag prefix */\n let pending = \"\";\n\n function feed(token: string) {\n if (state === \"tag\") {\n // Inside a tool_call block — suppress everything until we see the close tag\n pending += token;\n const closeIdx = pending.indexOf(CLOSE_TAG);\n if (closeIdx !== -1) {\n // End of the tool_call block; resume normal output after the close tag\n const afterClose = pending.slice(closeIdx + CLOSE_TAG.length);\n pending = \"\";\n state = \"text\";\n if (afterClose.length > 0) {\n feed(afterClose);\n }\n }\n // else: still inside the tag block, keep suppressing\n return;\n }\n\n // state === \"text\"\n const combined = pending + token;\n\n // Check for a complete open tag\n const openIdx = combined.indexOf(OPEN_TAG);\n if (openIdx !== -1) {\n // Emit everything before the tag\n const before = combined.slice(0, openIdx);\n if (before.length > 0) {\n emit(before);\n }\n // Switch to tag state; feed the remainder (after the open tag) back through\n pending = \"\";\n state = \"tag\";\n const afterOpen = combined.slice(openIdx + OPEN_TAG.length);\n if (afterOpen.length > 0) {\n feed(afterOpen);\n }\n return;\n }\n\n // Check if the tail of `combined` could be the start of \"<tool_call>\"\n // e.g. combined ends with \"<\", \"<t\", \"<to\", ..., \"<tool_call\"\n let prefixLen = 0;\n for (let len = Math.min(combined.length, OPEN_TAG.length - 1); len >= 1; len--) {\n if (combined.endsWith(OPEN_TAG.slice(0, len))) {\n prefixLen = len;\n break;\n }\n }\n\n if (prefixLen > 0) {\n // The tail is ambiguous — hold it back, flush the rest\n const safe = combined.slice(0, combined.length - prefixLen);\n if (safe.length > 0) {\n emit(safe);\n }\n pending = combined.slice(combined.length - prefixLen);\n } else {\n // No ambiguity — flush everything\n if (combined.length > 0) {\n emit(combined);\n }\n pending = \"\";\n }\n }\n\n /** Flush any remaining buffered text (called when the stream ends). */\n function flush() {\n if (pending.length > 0 && state === \"text\") {\n emit(pending);\n pending = \"\";\n }\n // If state === \"tag\", the pending content is suppressed tool-call markup\n pending = \"\";\n state = \"text\";\n }\n\n return { feed, flush };\n}\n",
10
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nexport async function clearHftInlinePipelineCache(): Promise<void> {\n const { clearPipelineCache } = await import(\"./HFT_Pipeline\");\n clearPipelineCache();\n}\n",
11
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\n/**\n * Mapping from app task types to HuggingFace pipeline names.\n * Each task type maps to one or more pipelines (first is primary).\n */\nconst TASK_TO_PIPELINES: Record<string, string[]> = {\n TextEmbeddingTask: [\"feature-extraction\"],\n TextGenerationTask: [\"text-generation\"],\n TextSummaryTask: [\"sentence-similarity\", \"summarization\"],\n TextTranslationTask: [\"translation\"],\n TextClassificationTask: [\"text-classification\", \"zero-shot-classification\"],\n TextQuestionAnswerTask: [\"question-answering\"],\n TextFillMaskTask: [\"fill-mask\"],\n TextLanguageDetectionTask: [\"text-classification\"],\n TextNamedEntityRecognitionTask: [\"token-classification\"],\n TokenClassificationTask: [\"token-classification\"],\n ImageClassificationTask: [\"image-classification\", \"zero-shot-image-classification\"],\n ImageEmbeddingTask: [\"image-feature-extraction\"],\n ImageSegmentationTask: [\"image-segmentation\"],\n ImageToImageTask: [\"image-to-image\"],\n ImageToTextTask: [\"image-to-text\"],\n ObjectDetectionTask: [\"object-detection\", \"zero-shot-object-detection\"],\n DepthEstimationTask: [\"depth-estimation\"],\n AudioClassificationTask: [\"audio-classification\"],\n SpeechRecognitionTask: [\"automatic-speech-recognition\"],\n};\n\n/** Convert an app task type to its primary HuggingFace pipeline name. */\nexport function taskTypeToPipeline(taskType: string): string | undefined {\n return TASK_TO_PIPELINES[taskType]?.[0];\n}\n\n/** Convert an app task type to all matching HuggingFace pipeline names. */\nexport function taskTypeToPipelines(taskType: string): string[] {\n return TASK_TO_PIPELINES[taskType] ?? [];\n}\n\n/** Reverse lookup: given a HuggingFace pipeline name, return all matching app task types. */\nexport function pipelineToTaskTypes(pipeline: string): string[] {\n return Object.entries(TASK_TO_PIPELINES)\n .filter(([, pipelines]) => pipelines.includes(pipeline))\n .map(([task]) => task);\n}\n",
12
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { ModelSearchResultItem } from \"@workglow/ai\";\nimport { pipelineToTaskTypes } from \"./PipelineTaskMapping\";\n\nexport interface HfModelEntry {\n id: string;\n modelId: string;\n pipeline_tag?: string;\n library_name?: string;\n likes: number;\n downloads: number;\n tags?: string[];\n siblings?: Array<{ rfilename: string }>;\n}\n\nconst HF_API_BASE = \"https://huggingface.co/api\";\n\nexport function formatDownloads(n: number): string {\n if (n >= 1_000_000) return `${(n / 1_000_000).toFixed(1)}M`;\n if (n >= 1_000) return `${(n / 1_000).toFixed(1)}k`;\n return String(n);\n}\n\n/**\n * Map an HF model entry to a provider-specific config object.\n */\nexport function mapHfProviderConfig(\n entry: HfModelEntry,\n provider: string\n): Record<string, unknown> {\n switch (provider) {\n case \"HF_TRANSFORMERS_ONNX\":\n return {\n model_path: entry.id,\n ...(entry.pipeline_tag ? { pipeline: entry.pipeline_tag } : {}),\n };\n case \"LOCAL_LLAMACPP\":\n return { model_path: entry.id };\n default:\n return { model_name: entry.id };\n }\n}\n\n/**\n * Map an HF model entry to a ModelSearchResultItem.\n */\nexport function mapHfModelResult(entry: HfModelEntry, provider: string): ModelSearchResultItem {\n const badges = [entry.pipeline_tag, entry.library_name].filter(Boolean).join(\" | \");\n return {\n id: entry.id,\n label: `${entry.id}${badges ? ` ${badges}` : \"\"}`,\n description: `${formatDownloads(entry.downloads)} downloads`,\n record: {\n model_id: entry.id,\n provider,\n title: entry.id.split(\"/\").pop() ?? entry.id,\n description: [entry.pipeline_tag, `${formatDownloads(entry.downloads)} downloads`]\n .filter(Boolean)\n .join(\" \\u2014 \"),\n tasks: entry.pipeline_tag ? pipelineToTaskTypes(entry.pipeline_tag) : [],\n provider_config: mapHfProviderConfig(entry, provider),\n metadata: {},\n },\n raw: entry,\n };\n}\n\n/**\n * Search HuggingFace models API. Returns all results (limit=500, no pagination).\n */\nexport async function searchHfModels(\n query: string,\n extraParams?: Record<string, string>,\n expandFields?: string[],\n signal?: AbortSignal\n): Promise<HfModelEntry[]> {\n const params = new URLSearchParams({\n search: query,\n limit: \"500\",\n sort: \"downloads\",\n direction: \"-1\",\n ...extraParams,\n });\n params.append(\"expand[]\", \"pipeline_tag\");\n if (expandFields) {\n for (const field of expandFields) {\n params.append(\"expand[]\", field);\n }\n }\n const res = await fetch(`${HF_API_BASE}/models?${params}`, { signal });\n if (!res.ok) throw new Error(`HuggingFace API returned ${res.status}`);\n return res.json();\n}\n",
13
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { AiProviderRunFn, ModelSearchTaskInput, ModelSearchTaskOutput } from \"@workglow/ai\";\nimport { searchHfModels, mapHfModelResult } from \"../../common/HfModelSearch\";\nimport { HF_TRANSFORMERS_ONNX } from \"./HFT_Constants\";\nimport { parseOnnxQuantizations } from \"./HFT_OnnxDtypes\";\n\nexport const HFT_ModelSearch: AiProviderRunFn<\n ModelSearchTaskInput,\n ModelSearchTaskOutput\n> = async (input, _model, _onProgress, signal) => {\n const entries = await searchHfModels(\n input.query,\n { filter: \"onnx\" },\n [\"siblings\"],\n signal\n );\n const results = entries.map((entry) => {\n const item = mapHfModelResult(entry, HF_TRANSFORMERS_ONNX);\n\n // Parse ONNX quantizations from siblings and include in record\n if (entry.siblings && entry.siblings.length > 0) {\n const filePaths = entry.siblings.map((s) => s.rfilename);\n const quantizations = parseOnnxQuantizations({ filePaths });\n if (quantizations.length > 0) {\n const record = item.record as Record<string, unknown>;\n const providerConfig = (record.provider_config ?? {}) as Record<string, unknown>;\n providerConfig.quantizations = quantizations;\n record.provider_config = providerConfig;\n }\n }\n\n // Strip raw siblings data — consumers get pre-parsed quantizations\n const raw = item.raw as Record<string, unknown>;\n delete raw.siblings;\n\n return item;\n });\n return { results };\n};\n",
14
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { RawImage } from \"@huggingface/transformers\";\n\n/**\n * Helper function to convert RawImage to base64 PNG\n */\nexport function imageToBase64(image: RawImage): string {\n return (image as any).toBase64?.() || \"\";\n}\n",
15
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { BackgroundRemovalPipeline } from \"@huggingface/transformers\";\nimport type {\n AiProviderRunFn,\n BackgroundRemovalTaskInput,\n BackgroundRemovalTaskOutput,\n} from \"@workglow/ai\";\nimport { imageToBase64 } from \"./HFT_ImageHelpers\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline } from \"./HFT_Pipeline\";\n\n/**\n * Core implementation for background removal using Hugging Face Transformers.\n */\nexport const HFT_BackgroundRemoval: AiProviderRunFn<\n BackgroundRemovalTaskInput,\n BackgroundRemovalTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n const remover: BackgroundRemovalPipeline = await getPipeline(model!, onProgress, {}, signal);\n\n const result = await remover(input.image as string);\n\n const resultImage = Array.isArray(result) ? result[0] : result;\n\n return {\n image: imageToBase64(resultImage),\n };\n};\n",
16
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type {\n AiProviderReactiveRunFn,\n AiProviderRunFn,\n CountTokensTaskInput,\n CountTokensTaskOutput,\n} from \"@workglow/ai\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { loadTransformersSDK } from \"./HFT_Pipeline\";\n\nexport const HFT_CountTokens: AiProviderRunFn<\n CountTokensTaskInput,\n CountTokensTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, _signal) => {\n const isArrayInput = Array.isArray(input.text);\n\n const { AutoTokenizer } = await loadTransformersSDK();\n const tokenizer = await AutoTokenizer.from_pretrained(model!.provider_config.model_path, {\n progress_callback: (progress: any) => onProgress(progress?.progress ?? 0),\n });\n\n if (isArrayInput) {\n const texts = input.text as string[];\n const counts = texts.map((t) => tokenizer.encode(t).length);\n return { count: counts };\n }\n\n // encode() returns number[] of token IDs for a single input string\n const tokenIds = tokenizer.encode(input.text as string);\n return { count: tokenIds.length };\n};\n\nexport const HFT_CountTokens_Reactive: AiProviderReactiveRunFn<\n CountTokensTaskInput,\n CountTokensTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, _output, model) => {\n return HFT_CountTokens(input, model, () => {}, new AbortController().signal);\n};\n",
17
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { AiProviderRunFn, DownloadModelTaskRunInput, DownloadModelTaskRunOutput } from \"@workglow/ai\";\nimport { getLogger } from \"@workglow/util/worker\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline } from \"./HFT_Pipeline\";\n\n/**\n * Core implementation for downloading and caching a Hugging Face Transformers model.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_Download: AiProviderRunFn<\n DownloadModelTaskRunInput,\n DownloadModelTaskRunOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n const logger = getLogger();\n const timerLabel = `hft:Download:${model?.provider_config.model_path}`;\n logger.time(timerLabel, { model: model?.provider_config.model_path });\n\n // Download the model by creating a pipeline\n // Use 100 as progressScaleMax since this is download-only (0-100%)\n await getPipeline(model!, onProgress, {}, signal, 100);\n\n logger.timeEnd(timerLabel, { model: model?.provider_config.model_path });\n return {\n model: input.model!,\n };\n};\n",
18
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type {\n ImageClassificationPipeline,\n ZeroShotImageClassificationPipeline,\n} from \"@huggingface/transformers\";\nimport type {\n AiProviderRunFn,\n ImageClassificationTaskInput,\n ImageClassificationTaskOutput,\n} from \"@workglow/ai\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline } from \"./HFT_Pipeline\";\n\n/**\n * Core implementation for image classification using Hugging Face Transformers.\n * Auto-selects between regular and zero-shot classification.\n */\nexport const HFT_ImageClassification: AiProviderRunFn<\n ImageClassificationTaskInput,\n ImageClassificationTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n if (model?.provider_config?.pipeline === \"zero-shot-image-classification\") {\n if (!input.categories || !Array.isArray(input.categories) || input.categories.length === 0) {\n console.warn(\"Zero-shot image classification requires categories\", input);\n throw new Error(\"Zero-shot image classification requires categories\");\n }\n const zeroShotClassifier: ZeroShotImageClassificationPipeline = await getPipeline(\n model!,\n onProgress,\n {},\n signal\n );\n const result: any = await zeroShotClassifier(\n input.image as string,\n input.categories! as string[],\n {}\n );\n\n const results = Array.isArray(result) ? result : [result];\n\n return {\n categories: results.map((r: any) => ({\n label: r.label,\n score: r.score,\n })),\n };\n }\n\n const classifier: ImageClassificationPipeline = await getPipeline(model!, onProgress, {}, signal);\n const result: any = await classifier(input.image as string, {\n top_k: (input as any).maxCategories,\n });\n\n const results = Array.isArray(result) ? result : [result];\n\n return {\n categories: results.map((r: any) => ({\n label: r.label,\n score: r.score,\n })),\n };\n};\n",
19
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { ImageFeatureExtractionPipeline } from \"@huggingface/transformers\";\nimport type {\n AiProviderRunFn,\n ImageEmbeddingTaskInput,\n ImageEmbeddingTaskOutput,\n} from \"@workglow/ai\";\nimport { getLogger, TypedArray } from \"@workglow/util/worker\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline } from \"./HFT_Pipeline\";\n\n/**\n * Core implementation for image embedding using Hugging Face Transformers.\n */\nexport const HFT_ImageEmbedding: AiProviderRunFn<\n ImageEmbeddingTaskInput,\n ImageEmbeddingTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n const logger = getLogger();\n const timerLabel = `hft:ImageEmbedding:${model?.provider_config.model_path}`;\n logger.time(timerLabel, { model: model?.provider_config.model_path });\n\n const embedder: ImageFeatureExtractionPipeline = await getPipeline(\n model!,\n onProgress,\n {},\n signal\n );\n\n logger.debug(\"HFT ImageEmbedding: pipeline ready, generating embedding\", {\n model: model?.provider_config.model_path,\n });\n\n const result: any = await embedder(input.image as string);\n\n logger.timeEnd(timerLabel, { dimensions: result?.data?.length });\n return {\n vector: result.data as TypedArray,\n } as ImageEmbeddingTaskOutput;\n};\n",
20
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { ImageSegmentationPipeline } from \"@huggingface/transformers\";\nimport type {\n AiProviderRunFn,\n ImageSegmentationTaskInput,\n ImageSegmentationTaskOutput,\n} from \"@workglow/ai\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline } from \"./HFT_Pipeline\";\n\n/**\n * Core implementation for image segmentation using Hugging Face Transformers.\n */\nexport const HFT_ImageSegmentation: AiProviderRunFn<\n ImageSegmentationTaskInput,\n ImageSegmentationTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n const segmenter: ImageSegmentationPipeline = await getPipeline(model!, onProgress, {}, signal);\n\n const result = await segmenter(input.image as any, {\n threshold: input.threshold,\n mask_threshold: input.maskThreshold,\n });\n\n const masks = Array.isArray(result) ? result : [result];\n\n const processedMasks = await Promise.all(\n masks.map(async (mask) => ({\n label: mask.label || \"\",\n score: mask.score || 0,\n mask: {} as { [x: string]: unknown },\n }))\n );\n\n return {\n masks: processedMasks,\n };\n};\n",
21
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { ImageToTextPipeline } from \"@huggingface/transformers\";\nimport type { AiProviderRunFn, ImageToTextTaskInput, ImageToTextTaskOutput } from \"@workglow/ai\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline } from \"./HFT_Pipeline\";\n\n/**\n * Core implementation for image to text using Hugging Face Transformers.\n */\nexport const HFT_ImageToText: AiProviderRunFn<\n ImageToTextTaskInput,\n ImageToTextTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n const captioner: ImageToTextPipeline = await getPipeline(model!, onProgress, {}, signal);\n\n const result: any = await captioner(input.image as string, {\n max_new_tokens: input.maxTokens,\n });\n\n const text = Array.isArray(result) ? result[0]?.generated_text : result?.generated_text;\n\n return {\n text: text || \"\",\n };\n};\n",
22
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { AiProviderRunFn, ModelInfoTaskInput, ModelInfoTaskOutput } from \"@workglow/ai\";\nimport { getLogger } from \"@workglow/util/worker\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { parseOnnxQuantizations } from \"./HFT_OnnxDtypes\";\nimport { getPipelineCacheKey, hasCachedPipeline, loadTransformersSDK } from \"./HFT_Pipeline\";\n\nexport const HFT_ModelInfo: AiProviderRunFn<\n ModelInfoTaskInput,\n ModelInfoTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model) => {\n const logger = getLogger();\n const { ModelRegistry } = await loadTransformersSDK();\n const timerLabel = `hft:ModelInfo:${model?.provider_config.model_path}`;\n logger.time(timerLabel, { model: model?.provider_config.model_path });\n\n const detail = input.detail;\n const is_loaded = hasCachedPipeline(getPipelineCacheKey(model!));\n\n const { pipeline: pipelineType, model_path, dtype } = model!.provider_config;\n\n const cacheStatus = await ModelRegistry.is_pipeline_cached_files(pipelineType, model_path, {\n ...(dtype ? { dtype } : {}),\n });\n logger.debug(\"is_pipeline_cached\", {\n input: [\n pipelineType,\n model_path,\n {\n ...(dtype ? { dtype } : {}),\n },\n ],\n result: cacheStatus,\n });\n const is_cached = is_loaded || cacheStatus.allCached;\n\n // Build file_sizes based on requested detail level\n let file_sizes: Record<string, number> | null = null;\n if (detail === \"files\" && cacheStatus.files.length > 0) {\n // Return file names with zero sizes (no network calls)\n const sizes: Record<string, number> = {};\n for (const { file } of cacheStatus.files) {\n sizes[file] = 0;\n }\n file_sizes = sizes;\n } else if (detail === \"files_with_metadata\" && cacheStatus.files.length > 0) {\n // Full metadata fetch per file (N network calls)\n const sizes: Record<string, number> = {};\n await Promise.all(\n cacheStatus.files.map(async ({ file }) => {\n const metadata = await ModelRegistry.get_file_metadata(model_path, file);\n if (metadata.exists && metadata.size !== undefined) {\n sizes[file] = metadata.size;\n }\n })\n );\n if (Object.keys(sizes).length > 0) {\n file_sizes = sizes;\n }\n }\n\n // Parse ONNX quantizations from file list\n let quantizations: string[] | undefined;\n if (cacheStatus.files.length > 0) {\n const filePaths = cacheStatus.files.map((f) => f.file);\n const quantizations_parsed = parseOnnxQuantizations({ filePaths });\n if (quantizations_parsed.length > 0) {\n quantizations = quantizations_parsed;\n }\n }\n\n logger.timeEnd(timerLabel, { model: model?.provider_config.model_path });\n\n return {\n model: input.model,\n is_local: true,\n is_remote: false,\n supports_browser: true,\n supports_node: true,\n is_cached,\n is_loaded,\n file_sizes,\n ...(quantizations ? { quantizations } : {}),\n };\n};\n",
23
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type {\n ObjectDetectionPipeline,\n ZeroShotObjectDetectionPipeline,\n} from \"@huggingface/transformers\";\nimport type {\n AiProviderRunFn,\n ObjectDetectionTaskInput,\n ObjectDetectionTaskOutput,\n} from \"@workglow/ai\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline } from \"./HFT_Pipeline\";\n\n/**\n * Core implementation for object detection using Hugging Face Transformers.\n * Auto-selects between regular and zero-shot detection.\n */\nexport const HFT_ObjectDetection: AiProviderRunFn<\n ObjectDetectionTaskInput,\n ObjectDetectionTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n if (model?.provider_config?.pipeline === \"zero-shot-object-detection\") {\n if (!input.labels || !Array.isArray(input.labels) || input.labels.length === 0) {\n throw new Error(\"Zero-shot object detection requires labels\");\n }\n const zeroShotDetector: ZeroShotObjectDetectionPipeline = await getPipeline(\n model!,\n onProgress,\n {},\n signal\n );\n const result: any = await zeroShotDetector(input.image as string, Array.from(input.labels!), {\n threshold: (input as any).threshold,\n });\n\n const detections = Array.isArray(result) ? result : [result];\n\n return {\n detections: detections.map((d: any) => ({\n label: d.label,\n score: d.score,\n box: d.box,\n })),\n };\n }\n\n const detector: ObjectDetectionPipeline = await getPipeline(model!, onProgress, {}, signal);\n const result: any = await detector(input.image as string, {\n threshold: (input as any).threshold,\n });\n\n const detections = Array.isArray(result) ? result : [result];\n\n return {\n detections: detections.map((d: any) => ({\n label: d.label,\n score: d.score,\n box: d.box,\n })),\n };\n};\n",
24
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type {\n Message,\n TextGenerationOutput,\n TextGenerationPipeline,\n} from \"@huggingface/transformers\";\nimport type {\n AiProviderRunFn,\n AiProviderStreamFn,\n StructuredGenerationTaskInput,\n StructuredGenerationTaskOutput,\n} from \"@workglow/ai\";\nimport type { StreamEvent } from \"@workglow/task-graph\";\nimport { parsePartialJson } from \"@workglow/util/worker\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline, loadTransformersSDK } from \"./HFT_Pipeline\";\nimport {\n createStreamEventQueue,\n createStreamingTextStreamer,\n createTextStreamer,\n} from \"./HFT_Streaming\";\nimport { extractGeneratedText } from \"./HFT_TextOutput\";\n\nfunction buildStructuredGenerationPrompt(input: StructuredGenerationTaskInput): string {\n const schemaStr = JSON.stringify(input.outputSchema, null, 2);\n return (\n `${input.prompt}\\n\\n` +\n `You MUST respond with ONLY a valid JSON object conforming to this JSON schema:\\n${schemaStr}\\n\\n` +\n `Output ONLY the JSON object, no other text.`\n );\n}\n\nfunction extractJsonFromText(text: string): Record<string, unknown> {\n // Try parsing directly first\n try {\n return JSON.parse(text);\n } catch {\n // Try to extract JSON object from the text\n const match = text.match(/\\{[\\s\\S]*\\}/);\n if (match) {\n try {\n return JSON.parse(match[0]);\n } catch {\n return (parsePartialJson(match[0]) as Record<string, unknown>) ?? {};\n }\n }\n return {};\n }\n}\n\nexport const HFT_StructuredGeneration: AiProviderRunFn<\n StructuredGenerationTaskInput,\n StructuredGenerationTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n const generateText: TextGenerationPipeline = await getPipeline(model!, onProgress, {}, signal);\n const { TextStreamer } = await loadTransformersSDK();\n\n const prompt = buildStructuredGenerationPrompt(input);\n\n const messages: Message[] = [{ role: \"user\", content: prompt }];\n\n const formattedPrompt = (generateText.tokenizer as any).apply_chat_template(messages, {\n tokenize: false,\n add_generation_prompt: true,\n }) as string;\n\n const streamer = createTextStreamer(generateText.tokenizer, onProgress, TextStreamer);\n\n let results = await generateText(formattedPrompt, {\n max_new_tokens: input.maxTokens ?? 1024,\n temperature: input.temperature ?? undefined,\n return_full_text: false,\n streamer,\n });\n\n if (!Array.isArray(results)) {\n results = [results];\n }\n\n const responseText = extractGeneratedText(\n (results[0] as TextGenerationOutput[number])?.generated_text\n ).trim();\n\n const object = extractJsonFromText(responseText);\n return { object };\n};\n\nexport const HFT_StructuredGeneration_Stream: AiProviderStreamFn<\n StructuredGenerationTaskInput,\n StructuredGenerationTaskOutput,\n HfTransformersOnnxModelConfig\n> = async function* (\n input,\n model,\n signal\n): AsyncIterable<StreamEvent<StructuredGenerationTaskOutput>> {\n const noopProgress = () => {};\n const generateText: TextGenerationPipeline = await getPipeline(model!, noopProgress, {}, signal);\n const { TextStreamer } = await loadTransformersSDK();\n\n const prompt = buildStructuredGenerationPrompt(input);\n\n const messages: Message[] = [{ role: \"user\", content: prompt }];\n\n const formattedPrompt = (generateText.tokenizer as any).apply_chat_template(messages, {\n tokenize: false,\n add_generation_prompt: true,\n }) as string;\n\n const queue = createStreamEventQueue<StreamEvent<StructuredGenerationTaskOutput>>();\n const streamer = createStreamingTextStreamer(generateText.tokenizer, queue, TextStreamer);\n\n let fullText = \"\";\n\n const originalPush = queue.push;\n queue.push = (event: StreamEvent<StructuredGenerationTaskOutput>) => {\n if (event.type === \"text-delta\" && \"textDelta\" in event) {\n fullText += (event as any).textDelta;\n // Try to parse partial JSON and emit object-delta\n const match = fullText.match(/\\{[\\s\\S]*/);\n if (match) {\n const partial = parsePartialJson(match[0]);\n if (partial !== undefined) {\n originalPush({\n type: \"object-delta\",\n port: \"object\",\n objectDelta: partial,\n } as StreamEvent<StructuredGenerationTaskOutput>);\n return;\n }\n }\n }\n originalPush(event);\n };\n\n const pipelinePromise = generateText(formattedPrompt, {\n max_new_tokens: input.maxTokens ?? 1024,\n temperature: input.temperature ?? undefined,\n return_full_text: false,\n streamer,\n }).then(\n () => queue.done(),\n (err: Error) => queue.error(err)\n );\n\n yield* queue.iterable;\n await pipelinePromise;\n\n const object = extractJsonFromText(fullText);\n yield { type: \"finish\", data: { object } as StructuredGenerationTaskOutput };\n};\n",
25
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { TextStreamer } from \"@huggingface/transformers\";\nimport type { StreamEvent } from \"@workglow/task-graph\";\n\nexport type StreamEventQueue<T> = {\n push: (event: T) => void;\n done: () => void;\n error: (err: Error) => void;\n iterable: AsyncIterable<T>;\n};\n\nexport function createStreamEventQueue<T>(): StreamEventQueue<T> {\n const buffer: T[] = [];\n let resolve: ((value: IteratorResult<T>) => void) | null = null;\n let finished = false;\n let err: Error | null = null;\n\n const push = (event: T) => {\n if (resolve) {\n const r = resolve;\n resolve = null;\n r({ value: event, done: false });\n } else {\n buffer.push(event);\n }\n };\n\n const done = () => {\n finished = true;\n if (resolve) {\n const r = resolve;\n resolve = null;\n r({ value: undefined as any, done: true });\n }\n };\n\n const error = (e: Error) => {\n err = e;\n if (resolve) {\n const r = resolve;\n resolve = null;\n r({ value: undefined as any, done: true });\n }\n };\n\n const iterable: AsyncIterable<T> = {\n [Symbol.asyncIterator]() {\n return {\n next(): Promise<IteratorResult<T>> {\n if (err) return Promise.reject(err);\n if (buffer.length > 0) {\n return Promise.resolve({ value: buffer.shift()!, done: false });\n }\n if (finished) {\n return Promise.resolve({ value: undefined as any, done: true });\n }\n return new Promise<IteratorResult<T>>((r) => {\n resolve = r;\n });\n },\n };\n },\n };\n\n return { push, done, error, iterable };\n}\n\n/**\n * Creates a TextStreamer that pushes StreamEvents into an async queue.\n * The pipeline runs to completion and updates the queue; the caller\n * consumes the queue as an AsyncIterable<StreamEvent>.\n */\nexport function createStreamingTextStreamer(\n tokenizer: any,\n queue: StreamEventQueue<StreamEvent<any>>,\n textStreamer: typeof TextStreamer\n) {\n return new textStreamer(tokenizer, {\n skip_prompt: true,\n decode_kwargs: { skip_special_tokens: true },\n callback_function: (text: string) => {\n queue.push({ type: \"text-delta\", port: \"text\", textDelta: text });\n },\n });\n}\n\n/**\n * Create a text streamer for a given tokenizer and update progress function\n */\nexport function createTextStreamer(\n tokenizer: any,\n updateProgress: (progress: number, message?: string, details?: any) => void,\n textStreamer: typeof TextStreamer\n) {\n let count = 0;\n return new textStreamer(tokenizer, {\n skip_prompt: true,\n decode_kwargs: { skip_special_tokens: true },\n callback_function: (text: string) => {\n count++;\n const result = 100 * (1 - Math.exp(-0.05 * count));\n const progress = Math.round(Math.min(result, 100));\n updateProgress(progress, \"Generating\", { text, progress });\n },\n });\n}\n",
26
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { Message } from \"@huggingface/transformers\";\n\nexport function extractGeneratedText(generatedText: string | Message[] | undefined): string {\n if (generatedText == null) return \"\";\n if (typeof generatedText === \"string\") return generatedText;\n const lastMessage = generatedText[generatedText.length - 1];\n if (!lastMessage) return \"\";\n const content = lastMessage.content;\n if (typeof content === \"string\") return content;\n for (const part of content) {\n if (part.type === \"text\" && \"text\" in part) {\n return (part as { type: \"text\"; text: string }).text;\n }\n }\n return \"\";\n}\n",
27
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type {\n TextClassificationOutput,\n TextClassificationPipeline,\n ZeroShotClassificationPipeline,\n} from \"@huggingface/transformers\";\nimport type {\n AiProviderRunFn,\n TextClassificationTaskInput,\n TextClassificationTaskOutput,\n} from \"@workglow/ai\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline } from \"./HFT_Pipeline\";\n\nexport const HFT_TextClassification: AiProviderRunFn<\n TextClassificationTaskInput,\n TextClassificationTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n const isArrayInput = Array.isArray(input.text);\n\n if (model?.provider_config?.pipeline === \"zero-shot-classification\") {\n if (\n !input.candidateLabels ||\n !Array.isArray(input.candidateLabels) ||\n input.candidateLabels.length === 0\n ) {\n throw new Error(\"Zero-shot text classification requires candidate labels\");\n }\n\n const zeroShotClassifier: ZeroShotClassificationPipeline = await getPipeline(\n model!,\n onProgress,\n {},\n signal\n );\n const result: any = await zeroShotClassifier(\n input.text as any,\n input.candidateLabels as string[],\n {}\n );\n\n if (isArrayInput) {\n // Batch result: result is an array of { labels, scores } per input\n const results = Array.isArray(result) && Array.isArray(result[0]?.labels) ? result : [result];\n return {\n categories: results.map((r: any) =>\n r.labels.map((label: string, idx: number) => ({\n label,\n score: r.scores[idx],\n }))\n ),\n };\n }\n\n return {\n categories: result.labels.map((label: string, idx: number) => ({\n label,\n score: result.scores[idx],\n })),\n };\n }\n\n const TextClassification: TextClassificationPipeline = await getPipeline(\n model!,\n onProgress,\n {},\n signal\n );\n const result = await TextClassification(input.text as any, {\n top_k: input.maxCategories || undefined,\n });\n\n if (isArrayInput) {\n // Batch result: outer array per input, inner array of categories\n return {\n categories: (result as any[]).map((perInput: any) => {\n const items = Array.isArray(perInput) ? perInput : [perInput];\n return items.map((category: any) => ({\n label: category.label as string,\n score: category.score as number,\n }));\n }),\n };\n }\n\n if (Array.isArray(result[0])) {\n return {\n categories: result[0].map((category) => ({\n label: category.label,\n score: category.score,\n })),\n };\n }\n\n return {\n categories: (result as TextClassificationOutput).map((category) => ({\n label: category.label,\n score: category.score,\n })),\n };\n};\n",
28
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { FeatureExtractionPipeline } from \"@huggingface/transformers\";\nimport type {\n AiProviderRunFn,\n TextEmbeddingTaskInput,\n TextEmbeddingTaskOutput,\n} from \"@workglow/ai\";\nimport { getLogger, TypedArray } from \"@workglow/util/worker\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline } from \"./HFT_Pipeline\";\n\n/**\n * Core implementation for text embedding using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextEmbedding: AiProviderRunFn<\n TextEmbeddingTaskInput,\n TextEmbeddingTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n const logger = getLogger();\n const uuid = crypto.randomUUID();\n const timerLabel = `hft:TextEmbedding:${model?.provider_config.model_path}:${uuid}`;\n logger.time(timerLabel, { model: model?.provider_config.model_path });\n\n const generateEmbedding: FeatureExtractionPipeline = await getPipeline(\n model!,\n onProgress,\n {},\n signal\n );\n\n logger.debug(\"HFT TextEmbedding: pipeline ready, generating embedding\", {\n model: model?.provider_config.model_path,\n inputLength: Array.isArray(input.text) ? input.text.length : input.text?.length,\n });\n\n // Generate the embedding\n const hfVector = await generateEmbedding(input.text, {\n pooling: model?.provider_config.pooling || \"mean\",\n normalize: model?.provider_config.normalize,\n });\n\n const isArrayInput = Array.isArray(input.text);\n const embeddingDim = model?.provider_config.native_dimensions;\n\n // If the input is an array, the tensor will have multiple dimensions (e.g., [10, 384])\n // We need to split it into separate vectors for each input text\n if (isArrayInput && hfVector.dims.length > 1) {\n const [numTexts, vectorDim] = hfVector.dims;\n\n // Validate that the number of texts matches\n if (numTexts !== input.text.length) {\n throw new Error(\n `HuggingFace Embedding tensor batch size does not match input array length: ${numTexts} != ${input.text.length}`\n );\n }\n\n // Validate dimensions\n if (vectorDim !== embeddingDim) {\n throw new Error(\n `HuggingFace Embedding vector dimension does not match model dimensions: ${vectorDim} != ${embeddingDim}`\n );\n }\n\n // Extract each embedding vector using tensor indexing\n // hfVector[i] returns a sub-tensor for the i-th text\n // .slice() is required to create independent TypedArrays with their own ArrayBuffers,\n // because sub-tensor views all share the same backing buffer, which causes DataCloneError\n // when postMessage tries to transfer the same ArrayBuffer multiple times.\n const vectors: TypedArray[] = Array.from({ length: numTexts }, (_, i) =>\n ((hfVector as any)[i].data as TypedArray).slice()\n );\n\n logger.timeEnd(timerLabel, { batchSize: numTexts, dimensions: vectorDim });\n return { vector: vectors };\n }\n\n // Output[number] text input - validate dimensions\n if (hfVector.size !== embeddingDim) {\n logger.timeEnd(timerLabel, { status: \"error\", reason: \"dimension mismatch\" });\n console.warn(\n `HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${embeddingDim}`,\n input,\n hfVector\n );\n throw new Error(\n `HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${embeddingDim}`\n );\n }\n\n logger.timeEnd(timerLabel, { dimensions: hfVector.size });\n return { vector: hfVector.data as TypedArray };\n};\n",
29
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { FillMaskOutput, FillMaskPipeline } from \"@huggingface/transformers\";\nimport type { AiProviderRunFn, TextFillMaskTaskInput, TextFillMaskTaskOutput } from \"@workglow/ai\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline } from \"./HFT_Pipeline\";\n\nexport const HFT_TextFillMask: AiProviderRunFn<\n TextFillMaskTaskInput,\n TextFillMaskTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n const isArrayInput = Array.isArray(input.text);\n\n const unmasker: FillMaskPipeline = await getPipeline(model!, onProgress, {}, signal);\n const results = await unmasker(input.text as any);\n\n if (isArrayInput) {\n return {\n predictions: (results as unknown as FillMaskOutput[]).map((perInput) => {\n const items = Array.isArray(perInput) ? perInput : [perInput];\n return items.map((prediction) => ({\n entity: prediction.token_str,\n score: prediction.score,\n sequence: prediction.sequence,\n }));\n }),\n };\n }\n\n let predictions: FillMaskOutput = [];\n if (!Array.isArray(results)) {\n predictions = [results];\n } else {\n predictions = results as FillMaskOutput;\n }\n return {\n predictions: predictions.map((prediction) => ({\n entity: prediction.token_str,\n score: prediction.score,\n sequence: prediction.sequence,\n })),\n };\n};\n",
30
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { TextGenerationOutput, TextGenerationPipeline } from \"@huggingface/transformers\";\nimport type {\n AiProviderRunFn,\n AiProviderStreamFn,\n TextGenerationTaskInput,\n TextGenerationTaskOutput,\n} from \"@workglow/ai\";\nimport type { StreamEvent } from \"@workglow/task-graph\";\nimport { getLogger } from \"@workglow/util/worker\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline, loadTransformersSDK } from \"./HFT_Pipeline\";\nimport {\n createStreamEventQueue,\n createStreamingTextStreamer,\n createTextStreamer,\n} from \"./HFT_Streaming\";\nimport { extractGeneratedText } from \"./HFT_TextOutput\";\n\n/**\n * Core implementation for text generation using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextGeneration: AiProviderRunFn<\n TextGenerationTaskInput,\n TextGenerationTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n const logger = getLogger();\n const timerLabel = `hft:TextGeneration:${model?.provider_config.model_path}`;\n logger.time(timerLabel, { model: model?.provider_config.model_path });\n\n const isArrayInput = Array.isArray(input.prompt);\n\n const generateText: TextGenerationPipeline = await getPipeline(model!, onProgress, {}, signal);\n const { TextStreamer } = await loadTransformersSDK();\n\n logger.debug(\"HFT TextGeneration: pipeline ready, generating text\", {\n model: model?.provider_config.model_path,\n promptLength: isArrayInput ? (input.prompt as string[]).length : input.prompt?.length,\n });\n\n const streamer = isArrayInput\n ? undefined\n : createTextStreamer(generateText.tokenizer, onProgress, TextStreamer);\n\n let results = await generateText(input.prompt as any, {\n ...(streamer ? { streamer } : {}),\n });\n\n if (isArrayInput) {\n // Batch result: results is an array, one entry per prompt\n const batchResults = Array.isArray(results) ? results : [results];\n const texts = batchResults.map((r) => {\n const seqs = Array.isArray(r) ? r : [r];\n return extractGeneratedText((seqs[0] as TextGenerationOutput[number])?.generated_text);\n });\n logger.timeEnd(timerLabel, { batchSize: texts.length });\n return { text: texts };\n }\n\n if (!Array.isArray(results)) {\n results = [results];\n }\n const text = extractGeneratedText((results[0] as TextGenerationOutput[number])?.generated_text);\n logger.timeEnd(timerLabel, { outputLength: text?.length });\n return {\n text,\n };\n};\n\nexport const HFT_TextGeneration_Stream: AiProviderStreamFn<\n TextGenerationTaskInput,\n TextGenerationTaskOutput,\n HfTransformersOnnxModelConfig\n> = async function* (input, model, signal): AsyncIterable<StreamEvent<TextGenerationTaskOutput>> {\n const noopProgress = () => {};\n const generateText: TextGenerationPipeline = await getPipeline(model!, noopProgress, {}, signal);\n const { TextStreamer } = await loadTransformersSDK();\n\n const queue = createStreamEventQueue<StreamEvent<TextGenerationTaskOutput>>();\n const streamer = createStreamingTextStreamer(generateText.tokenizer, queue, TextStreamer);\n\n const pipelinePromise = generateText(input.prompt as string, {\n streamer,\n }).then(\n () => queue.done(),\n (err: Error) => queue.error(err)\n );\n\n yield* queue.iterable;\n await pipelinePromise;\n yield { type: \"finish\", data: {} as TextGenerationTaskOutput };\n};\n",
31
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type {\n TextClassificationOutput,\n TextClassificationPipeline,\n} from \"@huggingface/transformers\";\nimport type {\n AiProviderRunFn,\n TextLanguageDetectionTaskInput,\n TextLanguageDetectionTaskOutput,\n} from \"@workglow/ai\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline } from \"./HFT_Pipeline\";\n\nexport const HFT_TextLanguageDetection: AiProviderRunFn<\n TextLanguageDetectionTaskInput,\n TextLanguageDetectionTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n const isArrayInput = Array.isArray(input.text);\n\n const TextClassification: TextClassificationPipeline = await getPipeline(\n model!,\n onProgress,\n {},\n signal\n );\n const result = await TextClassification(input.text as any, {\n top_k: input.maxLanguages || undefined,\n });\n\n if (isArrayInput) {\n return {\n languages: (result as any[]).map((perInput: any) => {\n const items = Array.isArray(perInput) ? perInput : [perInput];\n return items.map((category: any) => ({\n language: category.label as string,\n score: category.score as number,\n }));\n }),\n };\n }\n\n if (Array.isArray(result[0])) {\n return {\n languages: result[0].map((category) => ({\n language: category.label,\n score: category.score,\n })),\n };\n }\n\n return {\n languages: (result as TextClassificationOutput).map((category) => ({\n language: category.label,\n score: category.score,\n })),\n };\n};\n",
32
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type {\n TokenClassificationOutput,\n TokenClassificationPipeline,\n} from \"@huggingface/transformers\";\nimport type {\n AiProviderRunFn,\n TextNamedEntityRecognitionTaskInput,\n TextNamedEntityRecognitionTaskOutput,\n} from \"@workglow/ai\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline } from \"./HFT_Pipeline\";\n\nexport const HFT_TextNamedEntityRecognition: AiProviderRunFn<\n TextNamedEntityRecognitionTaskInput,\n TextNamedEntityRecognitionTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n const isArrayInput = Array.isArray(input.text);\n\n const textNamedEntityRecognition: TokenClassificationPipeline = await getPipeline(\n model!,\n onProgress,\n {},\n signal\n );\n const results = await textNamedEntityRecognition(input.text as any, {\n ignore_labels: input.blockList as string[] | undefined,\n });\n\n if (isArrayInput) {\n return {\n entities: (results as unknown as TokenClassificationOutput[]).map((perInput) => {\n const items = Array.isArray(perInput) ? perInput : [perInput];\n return items.map((entity) => ({\n entity: entity.entity,\n score: entity.score,\n word: entity.word,\n }));\n }),\n };\n }\n\n let entities: TokenClassificationOutput = [];\n if (!Array.isArray(results)) {\n entities = [results];\n } else {\n entities = results as TokenClassificationOutput;\n }\n return {\n entities: entities.map((entity) => ({\n entity: entity.entity,\n score: entity.score,\n word: entity.word,\n })),\n };\n};\n",
33
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type {\n DocumentQuestionAnsweringOutput,\n QuestionAnsweringPipeline,\n} from \"@huggingface/transformers\";\nimport type {\n AiProviderRunFn,\n AiProviderStreamFn,\n TextQuestionAnswerTaskInput,\n TextQuestionAnswerTaskOutput,\n} from \"@workglow/ai\";\nimport type { StreamEvent } from \"@workglow/task-graph\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline, loadTransformersSDK } from \"./HFT_Pipeline\";\nimport {\n createStreamEventQueue,\n createStreamingTextStreamer,\n createTextStreamer,\n} from \"./HFT_Streaming\";\n\n/**\n * Core implementation for question answering using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextQuestionAnswer: AiProviderRunFn<\n TextQuestionAnswerTaskInput,\n TextQuestionAnswerTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n const isArrayInput = Array.isArray(input.question);\n\n // Get the question answering pipeline\n const generateAnswer: QuestionAnsweringPipeline = await getPipeline(\n model!,\n onProgress,\n {},\n signal\n );\n\n if (isArrayInput) {\n const questions = input.question as string[];\n const contexts = input.context as string[];\n if (questions.length !== contexts.length) {\n throw new Error(\n `question[] and context[] must have the same length: ${questions.length} != ${contexts.length}`\n );\n }\n\n const answers: string[] = [];\n for (let i = 0; i < questions.length; i++) {\n const result = await generateAnswer(questions[i], contexts[i], {} as any);\n let answerText = \"\";\n if (Array.isArray(result)) {\n answerText = (result[0] as DocumentQuestionAnsweringOutput[number])?.answer || \"\";\n } else {\n answerText = (result as DocumentQuestionAnsweringOutput[number])?.answer || \"\";\n }\n answers.push(answerText);\n }\n\n return { text: answers };\n }\n\n const { TextStreamer } = await loadTransformersSDK();\n const streamer = createTextStreamer(generateAnswer.tokenizer, onProgress, TextStreamer);\n\n const result = await generateAnswer(\n input.question as string,\n input.context as string,\n {\n streamer,\n } as any\n );\n\n let answerText = \"\";\n if (Array.isArray(result)) {\n answerText = (result[0] as DocumentQuestionAnsweringOutput[number])?.answer || \"\";\n } else {\n answerText = (result as DocumentQuestionAnsweringOutput[number])?.answer || \"\";\n }\n\n return {\n text: answerText,\n };\n};\n\nexport const HFT_TextQuestionAnswer_Stream: AiProviderStreamFn<\n TextQuestionAnswerTaskInput,\n TextQuestionAnswerTaskOutput,\n HfTransformersOnnxModelConfig\n> = async function* (\n input,\n model,\n signal\n): AsyncIterable<StreamEvent<TextQuestionAnswerTaskOutput>> {\n const noopProgress = () => {};\n const generateAnswer: QuestionAnsweringPipeline = await getPipeline(\n model!,\n noopProgress,\n {},\n signal\n );\n const { TextStreamer } = await loadTransformersSDK();\n\n const queue = createStreamEventQueue<StreamEvent<TextQuestionAnswerTaskOutput>>();\n const streamer = createStreamingTextStreamer(generateAnswer.tokenizer, queue, TextStreamer);\n\n let pipelineResult:\n | DocumentQuestionAnsweringOutput[number]\n | DocumentQuestionAnsweringOutput\n | undefined;\n const pipelinePromise = generateAnswer(\n input.question as string,\n input.context as string,\n {\n streamer,\n } as any\n ).then(\n (result) => {\n pipelineResult = result;\n queue.done();\n },\n (err: Error) => queue.error(err)\n );\n\n yield* queue.iterable;\n await pipelinePromise;\n\n let answerText = \"\";\n if (pipelineResult !== undefined) {\n if (Array.isArray(pipelineResult)) {\n answerText = (pipelineResult[0] as DocumentQuestionAnsweringOutput[number])?.answer ?? \"\";\n } else {\n answerText = (pipelineResult as DocumentQuestionAnsweringOutput[number])?.answer ?? \"\";\n }\n }\n yield { type: \"finish\", data: { text: answerText } as TextQuestionAnswerTaskOutput };\n};\n",
34
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { TextGenerationOutput, TextGenerationPipeline } from \"@huggingface/transformers\";\nimport type {\n AiProviderRunFn,\n AiProviderStreamFn,\n TextRewriterTaskInput,\n TextRewriterTaskOutput,\n} from \"@workglow/ai\";\nimport type { StreamEvent } from \"@workglow/task-graph\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline, loadTransformersSDK } from \"./HFT_Pipeline\";\nimport {\n createStreamEventQueue,\n createStreamingTextStreamer,\n createTextStreamer,\n} from \"./HFT_Streaming\";\nimport { extractGeneratedText } from \"./HFT_TextOutput\";\n\n/**\n * Core implementation for text rewriting using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextRewriter: AiProviderRunFn<\n TextRewriterTaskInput,\n TextRewriterTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n const isArrayInput = Array.isArray(input.text);\n\n const generateText: TextGenerationPipeline = await getPipeline(model!, onProgress, {}, signal);\n const { TextStreamer } = await loadTransformersSDK();\n const streamer = isArrayInput\n ? undefined\n : createTextStreamer(generateText.tokenizer, onProgress, TextStreamer);\n\n if (isArrayInput) {\n const texts = input.text as string[];\n const promptedTexts = texts.map((t) => (input.prompt ? input.prompt + \"\\n\" : \"\") + t);\n\n let results = await generateText(promptedTexts, {});\n\n const batchResults = Array.isArray(results) ? results : [results];\n const outputTexts = batchResults.map((r, i) => {\n const seqs = Array.isArray(r) ? r : [r];\n const text = extractGeneratedText((seqs[0] as TextGenerationOutput[number])?.generated_text);\n if (text === promptedTexts[i]) {\n throw new Error(\"Rewriter failed to generate new text\");\n }\n return text;\n });\n\n return { text: outputTexts };\n }\n\n // This lib doesn't support this kind of rewriting with a separate prompt vs text\n const promptedText = (input.prompt ? input.prompt + \"\\n\" : \"\") + input.text;\n\n let results = await generateText(promptedText, {\n ...(streamer ? { streamer } : {}),\n });\n\n if (!Array.isArray(results)) {\n results = [results];\n }\n\n const text = extractGeneratedText((results[0] as TextGenerationOutput[number])?.generated_text);\n\n if (text === promptedText) {\n throw new Error(\"Rewriter failed to generate new text\");\n }\n\n return {\n text,\n };\n};\n\nexport const HFT_TextRewriter_Stream: AiProviderStreamFn<\n TextRewriterTaskInput,\n TextRewriterTaskOutput,\n HfTransformersOnnxModelConfig\n> = async function* (input, model, signal): AsyncIterable<StreamEvent<TextRewriterTaskOutput>> {\n const noopProgress = () => {};\n const generateText: TextGenerationPipeline = await getPipeline(model!, noopProgress, {}, signal);\n const { TextStreamer } = await loadTransformersSDK();\n\n const queue = createStreamEventQueue<StreamEvent<TextRewriterTaskOutput>>();\n const streamer = createStreamingTextStreamer(generateText.tokenizer, queue, TextStreamer);\n\n const promptedText = (input.prompt ? input.prompt + \"\\n\" : \"\") + (input.text as string);\n\n const pipelinePromise = generateText(promptedText, {\n streamer,\n }).then(\n () => queue.done(),\n (err: Error) => queue.error(err)\n );\n\n yield* queue.iterable;\n await pipelinePromise;\n yield { type: \"finish\", data: {} as TextRewriterTaskOutput };\n};\n",
35
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { SummarizationOutput, SummarizationPipeline } from \"@huggingface/transformers\";\nimport type {\n AiProviderRunFn,\n AiProviderStreamFn,\n TextSummaryTaskInput,\n TextSummaryTaskOutput,\n} from \"@workglow/ai\";\nimport type { StreamEvent } from \"@workglow/task-graph\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline, loadTransformersSDK } from \"./HFT_Pipeline\";\nimport {\n createStreamEventQueue,\n createStreamingTextStreamer,\n createTextStreamer,\n} from \"./HFT_Streaming\";\n\n/**\n * Core implementation for text summarization using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextSummary: AiProviderRunFn<\n TextSummaryTaskInput,\n TextSummaryTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n const isArrayInput = Array.isArray(input.text);\n\n const generateSummary: SummarizationPipeline = await getPipeline(model!, onProgress, {}, signal);\n const { TextStreamer } = await loadTransformersSDK();\n const streamer = isArrayInput\n ? undefined\n : createTextStreamer(generateSummary.tokenizer, onProgress, TextStreamer);\n\n const result = await generateSummary(\n input.text as any,\n {\n ...(streamer ? { streamer } : {}),\n } as any\n );\n\n if (isArrayInput) {\n const batchResults = Array.isArray(result) ? result : [result];\n return {\n text: batchResults.map((r) => (r as SummarizationOutput[number])?.summary_text || \"\"),\n };\n }\n\n let summaryText = \"\";\n if (Array.isArray(result)) {\n summaryText = (result[0] as SummarizationOutput[number])?.summary_text || \"\";\n } else {\n summaryText = (result as SummarizationOutput[number])?.summary_text || \"\";\n }\n\n return {\n text: summaryText,\n };\n};\n\nexport const HFT_TextSummary_Stream: AiProviderStreamFn<\n TextSummaryTaskInput,\n TextSummaryTaskOutput,\n HfTransformersOnnxModelConfig\n> = async function* (input, model, signal): AsyncIterable<StreamEvent<TextSummaryTaskOutput>> {\n const noopProgress = () => {};\n const generateSummary: SummarizationPipeline = await getPipeline(\n model!,\n noopProgress,\n {},\n signal\n );\n const { TextStreamer } = await loadTransformersSDK();\n\n const queue = createStreamEventQueue<StreamEvent<TextSummaryTaskOutput>>();\n const streamer = createStreamingTextStreamer(generateSummary.tokenizer, queue, TextStreamer);\n\n const pipelinePromise = generateSummary(\n input.text as string,\n {\n streamer,\n } as any\n ).then(\n () => queue.done(),\n (err: Error) => queue.error(err)\n );\n\n yield* queue.iterable;\n await pipelinePromise;\n yield { type: \"finish\", data: {} as TextSummaryTaskOutput };\n};\n",
36
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { TranslationOutput, TranslationPipeline } from \"@huggingface/transformers\";\nimport type {\n AiProviderRunFn,\n AiProviderStreamFn,\n TextTranslationTaskInput,\n TextTranslationTaskOutput,\n} from \"@workglow/ai\";\nimport type { StreamEvent } from \"@workglow/task-graph\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline, loadTransformersSDK } from \"./HFT_Pipeline\";\nimport {\n createStreamEventQueue,\n createStreamingTextStreamer,\n createTextStreamer,\n} from \"./HFT_Streaming\";\n\n/**\n * Core implementation for text translation using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextTranslation: AiProviderRunFn<\n TextTranslationTaskInput,\n TextTranslationTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n const isArrayInput = Array.isArray(input.text);\n\n const translate: TranslationPipeline = await getPipeline(model!, onProgress, {}, signal);\n const { TextStreamer } = await loadTransformersSDK();\n const streamer = isArrayInput\n ? undefined\n : createTextStreamer(translate.tokenizer, onProgress, TextStreamer);\n\n const result = await translate(\n input.text as any,\n {\n src_lang: input.source_lang,\n tgt_lang: input.target_lang,\n ...(streamer ? { streamer } : {}),\n } as any\n );\n\n if (isArrayInput) {\n const batchResults = Array.isArray(result) ? result : [result];\n return {\n text: batchResults.map((r) => (r as TranslationOutput[number])?.translation_text || \"\"),\n target_lang: input.target_lang,\n };\n }\n\n const translatedText = Array.isArray(result)\n ? (result[0] as TranslationOutput[number])?.translation_text || \"\"\n : (result as TranslationOutput[number])?.translation_text || \"\";\n\n return {\n text: translatedText,\n target_lang: input.target_lang,\n };\n};\n\nexport const HFT_TextTranslation_Stream: AiProviderStreamFn<\n TextTranslationTaskInput,\n TextTranslationTaskOutput,\n HfTransformersOnnxModelConfig\n> = async function* (input, model, signal): AsyncIterable<StreamEvent<TextTranslationTaskOutput>> {\n const noopProgress = () => {};\n const translate: TranslationPipeline = await getPipeline(model!, noopProgress, {}, signal);\n const { TextStreamer } = await loadTransformersSDK();\n\n const queue = createStreamEventQueue<StreamEvent<TextTranslationTaskOutput>>();\n const streamer = createStreamingTextStreamer(translate.tokenizer, queue, TextStreamer);\n\n const pipelinePromise = translate(\n input.text as string,\n {\n src_lang: input.source_lang,\n tgt_lang: input.target_lang,\n streamer,\n } as any\n ).then(\n () => queue.done(),\n (err: Error) => queue.error(err)\n );\n\n yield* queue.iterable;\n await pipelinePromise;\n yield { type: \"finish\", data: { target_lang: input.target_lang } as TextTranslationTaskOutput };\n};\n",
37
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { TextGenerationOutput, TextGenerationPipeline } from \"@huggingface/transformers\";\nimport {\n buildToolDescription,\n filterValidToolCalls,\n toTextFlatMessages,\n} from \"@workglow/ai/worker\";\nimport type {\n AiProviderRunFn,\n AiProviderStreamFn,\n ToolCallingTaskInput,\n ToolCallingTaskOutput,\n ToolDefinition,\n} from \"@workglow/ai\";\nimport type { StreamEvent } from \"@workglow/task-graph\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipeline, loadTransformersSDK } from \"./HFT_Pipeline\";\nimport {\n createStreamEventQueue,\n createStreamingTextStreamer,\n createTextStreamer,\n} from \"./HFT_Streaming\";\nimport { extractGeneratedText } from \"./HFT_TextOutput\";\nimport { createToolCallMarkupFilter, parseToolCallsFromText } from \"./HFT_ToolMarkup\";\n\nfunction mapHFTTools(tools: ReadonlyArray<ToolDefinition>) {\n return tools.map((t) => ({\n type: \"function\" as const,\n function: {\n name: t.name,\n description: buildToolDescription(t),\n parameters: t.inputSchema as any,\n },\n }));\n}\n\n/**\n * Resolve the tools list and optionally mutate the messages array based on the toolChoice option.\n * - \"none\": no tools\n * - \"required\": all tools + adds a system instruction so the model must call a tool\n * - specific name: filter to that tool (falls back to all tools if not found)\n * - \"auto\" / undefined: all tools\n */\nfunction resolveHFTToolsAndMessages(\n input: ToolCallingTaskInput,\n messages: Array<{ role: string; content: string }>\n): ReturnType<typeof mapHFTTools> | undefined {\n if (input.toolChoice === \"none\") {\n return undefined;\n }\n\n if (input.toolChoice === \"required\") {\n const requiredInstruction =\n \"You must call at least one tool from the provided tool list when answering.\";\n if (messages.length > 0 && messages[0].role === \"system\") {\n messages[0] = { ...messages[0], content: `${messages[0].content}\\n\\n${requiredInstruction}` };\n } else {\n messages.unshift({ role: \"system\", content: requiredInstruction });\n }\n return mapHFTTools(input.tools);\n }\n\n if (typeof input.toolChoice === \"string\" && input.toolChoice !== \"auto\") {\n // Specific tool name: filter to that tool if it exists\n const selectedTools = input.tools?.filter(\n (tool: ToolDefinition) => tool.name === input.toolChoice\n );\n const toolsToMap = selectedTools && selectedTools.length > 0 ? selectedTools : input.tools;\n return mapHFTTools(toolsToMap);\n }\n\n return mapHFTTools(input.tools);\n}\n\nexport const HFT_ToolCalling: AiProviderRunFn<\n ToolCallingTaskInput,\n ToolCallingTaskOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, signal) => {\n const isArrayInput = Array.isArray(input.prompt);\n\n const generateText: TextGenerationPipeline = await getPipeline(model!, onProgress, {}, signal);\n const { TextStreamer } = await loadTransformersSDK();\n\n if (isArrayInput) {\n const prompts = input.prompt as Array<\n (typeof input)[\"prompt\"] extends Array<infer T> ? T : unknown\n >;\n const texts: string[] = [];\n const toolCallsList: Array<{ id: string; input: { [x: string]: unknown }; name: string }[]> =\n [];\n\n for (const singlePrompt of prompts) {\n const singleInput = { ...input, prompt: singlePrompt } as ToolCallingTaskInput;\n const messages = toTextFlatMessages(singleInput);\n\n const tools = resolveHFTToolsAndMessages(singleInput, messages);\n\n // Use the tokenizer's chat template to format the prompt with tool definitions\n const prompt = (generateText.tokenizer as any).apply_chat_template(messages, {\n tools,\n tokenize: false,\n add_generation_prompt: true,\n }) as string;\n\n const streamer = createTextStreamer(generateText.tokenizer, onProgress, TextStreamer);\n\n let results = await generateText(prompt, {\n max_new_tokens: input.maxTokens ?? 1024,\n temperature: input.temperature ?? undefined,\n return_full_text: false,\n streamer,\n });\n\n if (!Array.isArray(results)) {\n results = [results];\n }\n\n const responseText = extractGeneratedText(\n (results[0] as TextGenerationOutput[number])?.generated_text\n ).trim();\n\n const { text, toolCalls } = parseToolCallsFromText(responseText);\n texts.push(text);\n toolCallsList.push(filterValidToolCalls(toolCalls, singleInput.tools));\n }\n\n // When input.prompt is an array, return a single ToolCallingTaskOutput whose\n // `text` and `toolCalls` fields are arrays aligned by index (TypeSingleOrArray behavior).\n // FromSchema does not express array-of-arrays for toolCalls, so we assert the batch shape.\n return { text: texts, toolCalls: toolCallsList } as unknown as ToolCallingTaskOutput;\n }\n const messages = toTextFlatMessages(input);\n\n const tools = resolveHFTToolsAndMessages(input, messages);\n\n // Use the tokenizer's chat template to format the prompt with tool definitions\n const prompt = (generateText.tokenizer as any).apply_chat_template(messages, {\n tools,\n tokenize: false,\n add_generation_prompt: true,\n }) as string;\n\n const streamer = createTextStreamer(generateText.tokenizer, onProgress, TextStreamer);\n\n let results = await generateText(prompt, {\n max_new_tokens: input.maxTokens ?? 1024,\n temperature: input.temperature ?? undefined,\n return_full_text: false,\n streamer,\n });\n\n if (!Array.isArray(results)) {\n results = [results];\n }\n\n const responseText = extractGeneratedText(\n (results[0] as TextGenerationOutput[number])?.generated_text\n ).trim();\n\n const { text, toolCalls } = parseToolCallsFromText(responseText);\n return {\n text,\n toolCalls: filterValidToolCalls(toolCalls, input.tools),\n };\n};\n\nexport const HFT_ToolCalling_Stream: AiProviderStreamFn<\n ToolCallingTaskInput,\n ToolCallingTaskOutput,\n HfTransformersOnnxModelConfig\n> = async function* (input, model, signal): AsyncIterable<StreamEvent<ToolCallingTaskOutput>> {\n const noopProgress = () => {};\n const generateText: TextGenerationPipeline = await getPipeline(model!, noopProgress, {}, signal);\n const { TextStreamer } = await loadTransformersSDK();\n\n const messages = toTextFlatMessages(input);\n\n const tools = resolveHFTToolsAndMessages(input, messages);\n\n const prompt = (generateText.tokenizer as any).apply_chat_template(messages, {\n tools,\n tokenize: false,\n add_generation_prompt: true,\n }) as string;\n\n // Two queues: the inner queue receives raw tokens from the TextStreamer,\n // the outer queue receives filtered text-delta events (markup stripped).\n const innerQueue = createStreamEventQueue<StreamEvent<ToolCallingTaskOutput>>();\n const outerQueue = createStreamEventQueue<StreamEvent<ToolCallingTaskOutput>>();\n const streamer = createStreamingTextStreamer(generateText.tokenizer, innerQueue, TextStreamer);\n\n let fullText = \"\";\n const filter = createToolCallMarkupFilter((text) => {\n outerQueue.push({ type: \"text-delta\", port: \"text\", textDelta: text });\n });\n\n // Intercept raw text-delta events: accumulate the full text for post-hoc\n // parsing and feed tokens through the markup filter before forwarding.\n const originalPush = innerQueue.push;\n innerQueue.push = (event: StreamEvent<ToolCallingTaskOutput>) => {\n if (event.type === \"text-delta\" && \"textDelta\" in event) {\n fullText += event.textDelta;\n filter.feed(event.textDelta);\n } else {\n outerQueue.push(event);\n }\n // Still call originalPush so the inner queue's done/error mechanics work\n originalPush(event);\n };\n\n const originalDone = innerQueue.done;\n innerQueue.done = () => {\n filter.flush();\n outerQueue.done();\n originalDone();\n };\n\n const originalError = innerQueue.error;\n innerQueue.error = (e: Error) => {\n filter.flush();\n outerQueue.error(e);\n originalError(e);\n };\n\n const pipelinePromise = generateText(prompt, {\n max_new_tokens: input.maxTokens ?? 1024,\n temperature: input.temperature ?? undefined,\n return_full_text: false,\n streamer,\n }).then(\n () => innerQueue.done(),\n (err: Error) => innerQueue.error(err)\n );\n\n yield* outerQueue.iterable;\n await pipelinePromise;\n\n // Parse the accumulated (unfiltered) text for tool calls. The filter already\n // stripped tag-based markup from text-delta events; this pass also handles\n // bare-JSON tool calls and produces the canonical cleanedText for the finish event.\n const { text: cleanedText, toolCalls } = parseToolCallsFromText(fullText);\n const validToolCalls = filterValidToolCalls(toolCalls, input.tools);\n\n if (validToolCalls.length > 0) {\n yield { type: \"object-delta\", port: \"toolCalls\", objectDelta: [...validToolCalls] };\n }\n\n yield {\n type: \"finish\",\n data: { text: cleanedText, toolCalls: validToolCalls } as ToolCallingTaskOutput,\n };\n};\n",
38
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type {\n AiProviderRunFn,\n UnloadModelTaskRunInput,\n UnloadModelTaskRunOutput,\n} from \"@workglow/ai\";\nimport { HTF_CACHE_NAME } from \"./HFT_Constants\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { getPipelineCacheKey, loadTransformersSDK, removeCachedPipeline } from \"./HFT_Pipeline\";\n\nfunction hasBrowserCacheStorage(): boolean {\n return (\n typeof globalThis !== \"undefined\" &&\n \"caches\" in globalThis &&\n typeof (globalThis as unknown as { caches?: CacheStorage }).caches?.open === \"function\"\n );\n}\n\n/**\n * Deletes all Cache Storage entries for a given model path (browser / Service Worker).\n */\nasync function deleteModelCacheFromBrowser(model_path: string): Promise<void> {\n const cachesApi = (globalThis as unknown as { caches: CacheStorage }).caches;\n const cache = await cachesApi.open(HTF_CACHE_NAME);\n const keys = await cache.keys();\n const prefix = `/${model_path}/`;\n\n const requestsToDelete: Request[] = [];\n for (const request of keys) {\n const url = new URL(request.url);\n if (url.pathname.startsWith(prefix)) {\n requestsToDelete.push(request);\n }\n }\n\n for (const request of requestsToDelete) {\n try {\n const deleted = await cache.delete(request);\n if (!deleted) {\n const deletedByUrl = await cache.delete(request.url);\n if (!deletedByUrl) {\n /* ignore */\n }\n }\n } catch (error) {\n console.error(`Failed to delete cache entry: ${request.url}`, error);\n }\n }\n}\n\n/**\n * Removes cached ONNX/tokenizer files from the filesystem (Node/Bun / worker).\n */\nasync function deleteModelCacheFromFilesystem(model: HfTransformersOnnxModelConfig): Promise<void> {\n const { ModelRegistry } = await loadTransformersSDK();\n const { pipeline: pipelineType, model_path, dtype } = model.provider_config;\n await ModelRegistry.clear_pipeline_cache(pipelineType, model_path, {\n ...(dtype ? { dtype } : {}),\n });\n}\n\n/**\n * Core implementation for unloading a Hugging Face Transformers model.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_Unload: AiProviderRunFn<\n UnloadModelTaskRunInput,\n UnloadModelTaskRunOutput,\n HfTransformersOnnxModelConfig\n> = async (input, model, onProgress, _signal) => {\n // Delete the pipeline from the in-memory map\n const cacheKey = getPipelineCacheKey(model!);\n if (removeCachedPipeline(cacheKey)) {\n onProgress(50, \"Pipeline removed from memory\");\n }\n\n const model_path = model!.provider_config.model_path;\n if (hasBrowserCacheStorage()) {\n await deleteModelCacheFromBrowser(model_path);\n } else {\n await deleteModelCacheFromFilesystem(model!);\n }\n onProgress(100, \"Model cache deleted\");\n\n return {\n model: input.model!,\n };\n};\n",
39
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { AiProviderReactiveRunFn, AiProviderStreamFn } from \"@workglow/ai\";\nimport type { HfTransformersOnnxModelConfig } from \"./HFT_ModelSchema\";\nimport { HFT_ModelSearch } from \"./HFT_ModelSearch\";\n\nimport { HFT_BackgroundRemoval } from \"./HFT_BackgroundRemoval\";\nimport { HFT_CountTokens, HFT_CountTokens_Reactive } from \"./HFT_CountTokens\";\nimport { HFT_Download } from \"./HFT_Download\";\nimport { HFT_ImageClassification } from \"./HFT_ImageClassification\";\nimport { HFT_ImageEmbedding } from \"./HFT_ImageEmbedding\";\nimport { HFT_ImageSegmentation } from \"./HFT_ImageSegmentation\";\nimport { HFT_ImageToText } from \"./HFT_ImageToText\";\nimport { HFT_ModelInfo } from \"./HFT_ModelInfo\";\nimport { HFT_ObjectDetection } from \"./HFT_ObjectDetection\";\nimport {\n HFT_StructuredGeneration,\n HFT_StructuredGeneration_Stream,\n} from \"./HFT_StructuredGeneration\";\nimport { HFT_TextClassification } from \"./HFT_TextClassification\";\nimport { HFT_TextEmbedding } from \"./HFT_TextEmbedding\";\nimport { HFT_TextFillMask } from \"./HFT_TextFillMask\";\nimport { HFT_TextGeneration, HFT_TextGeneration_Stream } from \"./HFT_TextGeneration\";\nimport { HFT_TextLanguageDetection } from \"./HFT_TextLanguageDetection\";\nimport { HFT_TextNamedEntityRecognition } from \"./HFT_TextNamedEntityRecognition\";\nimport { HFT_TextQuestionAnswer, HFT_TextQuestionAnswer_Stream } from \"./HFT_TextQuestionAnswer\";\nimport { HFT_TextRewriter, HFT_TextRewriter_Stream } from \"./HFT_TextRewriter\";\nimport { HFT_TextSummary, HFT_TextSummary_Stream } from \"./HFT_TextSummary\";\nimport { HFT_TextTranslation, HFT_TextTranslation_Stream } from \"./HFT_TextTranslation\";\nimport { HFT_ToolCalling, HFT_ToolCalling_Stream } from \"./HFT_ToolCalling\";\nimport { HFT_Unload } from \"./HFT_Unload\";\n\n/**\n * All HuggingFace Transformers task run functions, keyed by task type name.\n * Used by `@workglow/ai-provider/hf-transformers/runtime` (inline + worker registration) and custom worker scripts when the\n * actual run function implementations are needed (inline mode, worker server).\n */\nexport const HFT_TASKS = {\n DownloadModelTask: HFT_Download,\n UnloadModelTask: HFT_Unload,\n ModelInfoTask: HFT_ModelInfo,\n CountTokensTask: HFT_CountTokens,\n TextEmbeddingTask: HFT_TextEmbedding,\n TextGenerationTask: HFT_TextGeneration,\n TextQuestionAnswerTask: HFT_TextQuestionAnswer,\n TextLanguageDetectionTask: HFT_TextLanguageDetection,\n TextClassificationTask: HFT_TextClassification,\n TextFillMaskTask: HFT_TextFillMask,\n TextNamedEntityRecognitionTask: HFT_TextNamedEntityRecognition,\n TextRewriterTask: HFT_TextRewriter,\n TextSummaryTask: HFT_TextSummary,\n TextTranslationTask: HFT_TextTranslation,\n ImageSegmentationTask: HFT_ImageSegmentation,\n ImageToTextTask: HFT_ImageToText,\n BackgroundRemovalTask: HFT_BackgroundRemoval,\n ImageEmbeddingTask: HFT_ImageEmbedding,\n ImageClassificationTask: HFT_ImageClassification,\n ObjectDetectionTask: HFT_ObjectDetection,\n ToolCallingTask: HFT_ToolCalling,\n StructuredGenerationTask: HFT_StructuredGeneration,\n ModelSearchTask: HFT_ModelSearch,\n} as const;\n\n/**\n * Streaming variants of HuggingFace Transformers task run functions.\n */\nexport const HFT_STREAM_TASKS: Record<\n string,\n AiProviderStreamFn<any, any, HfTransformersOnnxModelConfig>\n> = {\n TextGenerationTask: HFT_TextGeneration_Stream,\n TextRewriterTask: HFT_TextRewriter_Stream,\n TextSummaryTask: HFT_TextSummary_Stream,\n TextQuestionAnswerTask: HFT_TextQuestionAnswer_Stream,\n TextTranslationTask: HFT_TextTranslation_Stream,\n ToolCallingTask: HFT_ToolCalling_Stream,\n StructuredGenerationTask: HFT_StructuredGeneration_Stream,\n};\n\nexport const HFT_REACTIVE_TASKS: Record<\n string,\n AiProviderReactiveRunFn<any, any, HfTransformersOnnxModelConfig>\n> = {\n CountTokensTask: HFT_CountTokens_Reactive,\n};\n",
40
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n QueuedAiProvider,\n type AiProviderReactiveRunFn,\n type AiProviderRunFn,\n type AiProviderStreamFn,\n} from \"@workglow/ai\";\nimport { HF_TRANSFORMERS_ONNX } from \"./common/HFT_Constants\";\nimport type { HfTransformersOnnxModelConfig } from \"./common/HFT_ModelSchema\";\n\n/** Main-thread registration (inline or worker-backed); creates the default job queue. */\nexport class HuggingFaceTransformersQueuedProvider extends QueuedAiProvider<HfTransformersOnnxModelConfig> {\n readonly name = HF_TRANSFORMERS_ONNX;\n readonly isLocal = true;\n readonly supportsBrowser = true;\n\n readonly taskTypes = [\n \"DownloadModelTask\",\n \"UnloadModelTask\",\n \"ModelInfoTask\",\n \"CountTokensTask\",\n \"TextEmbeddingTask\",\n \"TextGenerationTask\",\n \"TextQuestionAnswerTask\",\n \"TextLanguageDetectionTask\",\n \"TextClassificationTask\",\n \"TextFillMaskTask\",\n \"TextNamedEntityRecognitionTask\",\n \"TextRewriterTask\",\n \"TextSummaryTask\",\n \"TextTranslationTask\",\n \"ImageSegmentationTask\",\n \"ImageToTextTask\",\n \"BackgroundRemovalTask\",\n \"ImageEmbeddingTask\",\n \"ImageClassificationTask\",\n \"ObjectDetectionTask\",\n \"ToolCallingTask\",\n \"ModelSearchTask\",\n ] as const;\n\n constructor(\n tasks?: Record<string, AiProviderRunFn<any, any, HfTransformersOnnxModelConfig>>,\n streamTasks?: Record<string, AiProviderStreamFn<any, any, HfTransformersOnnxModelConfig>>,\n reactiveTasks?: Record<string, AiProviderReactiveRunFn<any, any, HfTransformersOnnxModelConfig>>\n ) {\n super(tasks, streamTasks, reactiveTasks);\n }\n}\n",
41
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { AiProviderRegisterOptions } from \"@workglow/ai\";\nimport { clearHftInlinePipelineCache } from \"./common/HFT_InlineLifecycle\";\nimport { HFT_REACTIVE_TASKS, HFT_STREAM_TASKS, HFT_TASKS } from \"./common/HFT_JobRunFns\";\nimport { HuggingFaceTransformersQueuedProvider } from \"./HuggingFaceTransformersQueuedProvider\";\nimport { loadTransformersSDK } from \"./common/HFT_Pipeline\";\n\n/**\n * Register HuggingFace Transformers ONNX on the **main thread** with inline execution\n * (full `@huggingface/transformers` and all task run functions in this bundle).\n *\n * **Re-exported from `@workglow/ai-provider/hf-transformers/runtime`** — not from\n * `@workglow/ai-provider/hf-transformers` — so worker-only apps do not pull this graph.\n */\nexport async function registerHuggingFaceTransformersInline(\n options?: AiProviderRegisterOptions\n): Promise<void> {\n const { env } = await loadTransformersSDK();\n env.backends!.onnx!.wasm!.proxy = true;\n const provider = new HuggingFaceTransformersQueuedProvider(\n HFT_TASKS,\n HFT_STREAM_TASKS,\n HFT_REACTIVE_TASKS\n );\n const baseDispose = provider.dispose.bind(provider);\n provider.dispose = async () => {\n await clearHftInlinePipelineCache();\n await baseDispose();\n };\n await provider.register(options ?? {});\n}\n",
42
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\n/**\n * Worker-only entry: registers HuggingFace Transformers task run functions on the worker server.\n * Re-exported from `@workglow/ai-provider/hf-transformers/runtime` so the main bundle does not resolve\n * `HFT_JobRunFns` / full task implementations.\n */\n\nimport { getLogger, globalServiceRegistry, WORKER_SERVER } from \"@workglow/util/worker\";\nimport { HFT_REACTIVE_TASKS, HFT_STREAM_TASKS, HFT_TASKS } from \"./common/HFT_JobRunFns\";\nimport { HuggingFaceTransformersProvider } from \"./HuggingFaceTransformersProvider\";\nimport { loadTransformersSDK } from \"./common/HFT_Pipeline\";\n\nexport async function registerHuggingFaceTransformersWorker(): Promise<void> {\n const { env } = await loadTransformersSDK();\n env.backends!.onnx!.wasm!.proxy = true;\n const workerServer = globalServiceRegistry.get(WORKER_SERVER);\n new HuggingFaceTransformersProvider(\n HFT_TASKS,\n HFT_STREAM_TASKS,\n HFT_REACTIVE_TASKS\n ).registerOnWorkerServer(workerServer);\n workerServer.sendReady();\n getLogger().info(\"HuggingFaceTransformers worker job run functions registered\");\n}\n",
43
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n AiProvider,\n type AiProviderReactiveRunFn,\n type AiProviderRunFn,\n type AiProviderStreamFn,\n} from \"@workglow/ai/worker\";\nimport { HF_TRANSFORMERS_ONNX } from \"./common/HFT_Constants\";\nimport type { HfTransformersOnnxModelConfig } from \"./common/HFT_ModelSchema\";\n\n/**\n * AI provider for HuggingFace Transformers ONNX models.\n *\n * Supports text, vision, and multimodal tasks via the @huggingface/transformers library.\n *\n * Task run functions are injected via the constructor so that the heavy\n * `@huggingface/transformers` library is only imported where actually needed\n * (inline mode, worker server), not on the main thread in worker mode.\n */\nexport class HuggingFaceTransformersProvider extends AiProvider<HfTransformersOnnxModelConfig> {\n readonly name = HF_TRANSFORMERS_ONNX;\n readonly isLocal = true;\n readonly supportsBrowser = true;\n\n readonly taskTypes = [\n \"DownloadModelTask\",\n \"UnloadModelTask\",\n \"ModelInfoTask\",\n \"CountTokensTask\",\n \"TextEmbeddingTask\",\n \"TextGenerationTask\",\n \"TextQuestionAnswerTask\",\n \"TextLanguageDetectionTask\",\n \"TextClassificationTask\",\n \"TextFillMaskTask\",\n \"TextNamedEntityRecognitionTask\",\n \"TextRewriterTask\",\n \"TextSummaryTask\",\n \"TextTranslationTask\",\n \"ImageSegmentationTask\",\n \"ImageToTextTask\",\n \"BackgroundRemovalTask\",\n \"ImageEmbeddingTask\",\n \"ImageClassificationTask\",\n \"ObjectDetectionTask\",\n \"ToolCallingTask\",\n \"ModelSearchTask\",\n ] as const;\n\n constructor(\n tasks?: Record<string, AiProviderRunFn<any, any, HfTransformersOnnxModelConfig>>,\n streamTasks?: Record<string, AiProviderStreamFn<any, any, HfTransformersOnnxModelConfig>>,\n reactiveTasks?: Record<string, AiProviderReactiveRunFn<any, any, HfTransformersOnnxModelConfig>>\n ) {\n super(tasks, streamTasks, reactiveTasks);\n }\n}\n",
44
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\n/**\n * Heavyweight HuggingFace Transformers registration: worker server (`registerHuggingFaceTransformersWorker`)\n * and main-thread inline (`registerHuggingFaceTransformersInline`). Import from\n * `@workglow/ai-provider/hf-transformers/runtime` only — not from the main `hf-transformers` barrel.\n *\n * Use `export *` (not `export { … } from \"…\"`) so the Bun bundler keeps the module graph; the latter\n * was emitted as bare re-exports with no bindings.\n */\n\nexport * from \"./common/HFT_Constants\";\nexport * from \"./common/HFT_ModelSchema\";\nexport * from \"./common/HFT_OnnxDtypes\";\nexport * from \"./common/HFT_ToolMarkup\";\nexport * from \"./registerHuggingFaceTransformersInline\";\nexport * from \"./registerHuggingFaceTransformersWorker\";\nexport * from \"./common/HFT_Pipeline\";\n"
45
+ ],
46
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAOA;AAUO,SAAS,cAAc,CAAC,KAAmB;AAAA,EAChD,YAAY;AAAA,EACZ,IAAI,kBAAkB;AAAA,IACpB,iBAAiB,IAAI,WAAW;AAAA,EAClC;AAAA;AAGF,eAAsB,mBAAmB,GAAG;AAAA,EAC1C,IAAI,CAAC,kBAAkB;AAAA,IACrB,IAAI;AAAA,MACF,mBAAmB,MAAa;AAAA,MAChC,iBAAiB,IAAI,QAAQ;AAAA,MAC7B,IAAI,WAAW;AAAA,QACb,iBAAiB,IAAI,WAAW;AAAA,MAClC;AAAA,MACA,MAAM;AAAA,MACN,MAAM,IAAI,MACR,8HACF;AAAA;AAAA,EAEJ;AAAA,EACA,OAAO;AAAA;AAMT,SAAS,cAAc,CAAC,KAAa,SAAyC;AAAA,EAC5E,IAAI;AAAA,EACJ,IAAI;AAAA,IACF,MAAM,WAAW,IAAI,IAAI,GAAG,EAAE;AAAA,IAC9B,YAAY,WAAW,eAAe,uBAAuB;AAAA,MAC3D,IAAI,SAAS,SAAS,IAAI,YAAY,GAAG;AAAA,QACvC,SAAS,WAAW;AAAA,QACpB;AAAA,MACF;AAAA,IACF;AAAA,IACA,MAAM;AAAA,EAGR,OAAO,MAAM,KAAK,KAAK,YAAa,SAAS,EAAE,OAAO,IAAI,CAAC,EAAG,CAAC;AAAA;AAW1D,SAAS,kBAAkB,GAAS;AAAA,EACzC,UAAU,MAAM;AAAA;AAGX,SAAS,iBAAiB,CAAC,UAA2B;AAAA,EAC3D,OAAO,UAAU,IAAI,QAAQ;AAAA;AAGxB,SAAS,oBAAoB,CAAC,UAA2B;AAAA,EAC9D,OAAO,UAAU,OAAO,QAAQ;AAAA;AAIlC,SAAS,YAAY,GAAY;AAAA,EAC/B,OAAO,OAAO,eAAe,eAAe,OAAQ,WAAmB,WAAW;AAAA;AAO7E,SAAS,mBAAmB,CAAC,OAA8C;AAAA,EAChF,MAAM,QAAQ,MAAM,gBAAgB,SAAS;AAAA,EAC7C,MAAM,SAAS,MAAM,gBAAgB,UAAU;AAAA,EAC/C,OAAO,GAAG,MAAM,gBAAgB,cAAc,MAAM,gBAAgB,YAAY,SAAS;AAAA;AAS3F,eAAsB,WAAW,CAC/B,OACA,YACA,UAAkC,CAAC,GACnC,QACA,mBAA2B,IACb;AAAA,EACd,MAAM,WAAW,oBAAoB,KAAK;AAAA,EAC1C,IAAI,UAAU,IAAI,QAAQ,GAAG;AAAA,IAC3B,UAAU,EAAE,MAAM,0BAA0B,EAAE,SAAS,CAAC;AAAA,IACxD,OAAO,UAAU,IAAI,QAAQ;AAAA,EAC/B;AAAA,EAIA,MAAM,WAAW,qBAAqB,IAAI,QAAQ;AAAA,EAClD,IAAI,UAAU;AAAA,IACZ,MAAM;AAAA,IACN,MAAM,SAAS,UAAU,IAAI,QAAQ;AAAA,IACrC,IAAI;AAAA,MAAQ,OAAO;AAAA,EAErB;AAAA,EAEA,MAAM,cAAc,cAClB,OACA,YACA,SACA,kBACA,UACA,MACF,EAAE,QAAQ,MAAM;AAAA,IACd,qBAAqB,OAAO,QAAQ;AAAA,GACrC;AAAA,EACD,qBAAqB,IAAI,UAAU,WAAW;AAAA,EAC9C,OAAO;AAAA;AAAA,IA7HL,kBACA,WA+BE,uBAkBA,WAGA,sBA2EA,gBAAgB,OACpB,OACA,YACA,SACA,kBACA,UACA,WACG;AAAA,EAEH,IAAI,mBAAmB;AAAA,EAEvB,IAAI,kBAKO;AAAA,EACX,IAAI,gBAAsD;AAAA,EAC1D,MAAM,cAAc;AAAA,EAEpB,MAAM,uBAAuB,CAC3B,MACA,cACA,aAC6D;AAAA,IAC7D,MAAM,UAAoE;AAAA,MACxE;AAAA,MACA,UAAU;AAAA,IACZ;AAAA,IACA,IAAI,YAAY,OAAO,KAAK,QAAQ,EAAE,SAAS,GAAG;AAAA,MAChD,QAAQ,QAAQ;AAAA,IAClB;AAAA,IACA,OAAO;AAAA;AAAA,EAOT,MAAM,eAAe,CACnB,UACA,MACA,cACA,aACS;AAAA,IACT,MAAM,MAAM,KAAK,IAAI;AAAA,IACrB,MAAM,qBAAqB,MAAM;AAAA,IACjC,MAAM,UAAU,qBAAqB;AAAA,IACrC,MAAM,UAAU,YAAY;AAAA,IAE5B,IAAI,WAAW,SAAS;AAAA,MACtB,IAAI,eAAe;AAAA,QACjB,aAAa,aAAa;AAAA,QAC1B,gBAAgB;AAAA,MAClB;AAAA,MACA,kBAAkB;AAAA,MAClB,WACE,KAAK,MAAM,QAAQ,GACnB,qBACA,qBAAqB,MAAM,cAAc,QAAQ,CACnD;AAAA,MACA,mBAAmB;AAAA,MACnB;AAAA,IACF;AAAA,IAEA,IAAI,qBAAqB,aAAa;AAAA,MACpC,kBAAkB,EAAE,UAAU,MAAM,cAAc,SAAS;AAAA,MAC3D,IAAI,CAAC,eAAe;AAAA,QAClB,MAAM,gBAAgB,KAAK,IAAI,GAAG,cAAc,kBAAkB;AAAA,QAClE,gBAAgB,WAAW,MAAM;AAAA,UAC/B,gBAAgB;AAAA,UAChB,IAAI,iBAAiB;AAAA,YACnB,MAAM,IAAI;AAAA,YACV,WACE,KAAK,MAAM,EAAE,QAAQ,GACrB,qBACA,qBAAqB,EAAE,MAAM,EAAE,cAAc,EAAE,QAAQ,CACzD;AAAA,YACA,mBAAmB,KAAK,IAAI;AAAA,YAC5B,kBAAkB;AAAA,UACpB;AAAA,WACC,aAAa;AAAA,MAClB;AAAA,MACA;AAAA,IACF;AAAA,IAEA,WACE,KAAK,MAAM,QAAQ,GACnB,qBACA,qBAAqB,MAAM,cAAc,QAAQ,CACnD;AAAA,IACA,mBAAmB;AAAA,IACnB,kBAAkB;AAAA;AAAA,EAIpB,MAAM,cAAc;AAAA,EAGpB,MAAM,YAAY,MAAM,gBAAgB;AAAA,EACxC,MAAM,kBAAkB,IAAI;AAAA,EAC5B,sBAAsB,IAAI,WAAW,eAAe;AAAA,EACpD,IAAI,aAAa;AAAA,IACf,IAAI,YAAY,SAAS;AAAA,MACvB,gBAAgB,MAAM;AAAA,IACxB,EAAO;AAAA,MACL,YAAY,iBAAiB,SAAS,MAAM,gBAAgB,MAAM,GAAG,EAAE,MAAM,KAAK,CAAC;AAAA;AAAA,EAEvF;AAAA,EAGA,MAAM,mBAAmB,CAAC,WAAyB;AAAA,IACjD,IAAI,aAAa;AAAA,MAAS;AAAA,IAE1B,IAAK,OAAe,WAAW,kBAAkB;AAAA,MAC/C,MAAM,cAAc;AAAA,MACpB,MAAM,iBAAkB,YAAY,WAAW,mBAAoB;AAAA,MAGnE,IAAI,aAAa;AAAA,MACjB,IAAI,qBAAqB;AAAA,MACzB,MAAM,QACJ,YAAY;AAAA,MACd,IAAI,OAAO;AAAA,QACT,YAAY,MAAM,SAAS,OAAO,QAAQ,KAAK,GAAG;AAAA,UAChD,IAAI,KAAK,SAAS,KAAK,OAAO;AAAA,YAC5B,aAAa;AAAA,YACb,qBAAqB,KAAK,QAAQ,IAAK,KAAK,SAAS,KAAK,QAAS,MAAM;AAAA,YACzE;AAAA,UACF;AAAA,QACF;AAAA,QACA,IAAI,CAAC,YAAY;AAAA,UACf,MAAM,YAAY,OAAO,KAAK,KAAK;AAAA,UACnC,IAAI,UAAU,SAAS,GAAG;AAAA,YACxB,aAAa,UAAU,UAAU,SAAS;AAAA,YAC1C,qBAAqB;AAAA,UACvB;AAAA,QACF;AAAA,MACF;AAAA,MAEA,aAAa,gBAAgB,YAAY,oBAAoB,KAAK;AAAA,IACpE;AAAA;AAAA,EAGF,IAAI,SAAS,MAAM,gBAAgB;AAAA,EACnC,IAAI,CAAC,aAAa,GAAG;AAAA,IACnB,IAAI,WAAW,UAAU,WAAW,UAAU;AAAA,MAC5C,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EAEA,MAAM,kBAA0C;AAAA,IAC9C,OAAO,MAAM,gBAAgB,SAAS;AAAA,OAClC,MAAM,gBAAgB,2BACtB,EAAE,uBAAuB,MAAM,gBAAgB,yBAAyB,IACxE,CAAC;AAAA,OACD,SAAS,EAAE,OAAsB,IAAI,CAAC;AAAA,OACvC;AAAA,IACH,mBAAmB;AAAA,EACrB;AAAA,EAGA,IAAI,aAAa,SAAS;AAAA,IACxB,sBAAsB,OAAO,SAAS;AAAA,IACtC,MAAM,IAAI,MAAM,4CAA4C;AAAA,EAC9D;AAAA,EAEA,MAAM,eAAe,MAAM,gBAAgB;AAAA,EAE3C,QAAQ,aAAa,MAAM,oBAAoB;AAAA,EAE/C,MAAM,SAAS,UAAU;AAAA,EACzB,MAAM,qBAAqB,gBAAgB;AAAA,EAC3C,OAAO,KAAK,oBAAoB,EAAE,cAAc,UAAU,CAAC;AAAA,EAE3D,IAAI;AAAA,IACF,MAAM,SAAS,MAAM,SAAS,cAAc,MAAM,gBAAgB,YAAY,eAAe;AAAA,IAG7F,IAAI,eAAe;AAAA,MACjB,aAAa,aAAa;AAAA,MAC1B,gBAAgB;AAAA,IAClB;AAAA,IAEA,MAAM,eAAe;AAAA,IAMrB,IAAI,cAAc;AAAA,MAChB,WACE,KAAK,MAAM,aAAa,QAAQ,GAChC,qBACA,qBAAqB,aAAa,MAAM,aAAa,cAAc,aAAa,QAAQ,CAC1F;AAAA,MACA,kBAAkB;AAAA,IACpB;AAAA,IAGA,IAAI,aAAa,SAAS;AAAA,MACxB,OAAO,QAAQ,oBAAoB,EAAE,QAAQ,UAAU,CAAC;AAAA,MACxD,MAAM,IAAI,MAAM,2CAA2C;AAAA,IAC7D;AAAA,IAEA,OAAO,QAAQ,oBAAoB,EAAE,QAAQ,SAAS,CAAC;AAAA,IACvD,UAAU,IAAI,UAAU,MAAM;AAAA,IAC9B,OAAO;AAAA,IACP,OAAO,OAAY;AAAA,IACnB,OAAO,QAAQ,oBAAoB,EAAE,QAAQ,SAAS,OAAO,OAAO,KAAK,EAAE,CAAC;AAAA,IAE5E,IAAI,aAAa,WAAW,gBAAgB,OAAO,SAAS;AAAA,MAC1D,MAAM,IAAI,MAAM,2BAA2B;AAAA,IAC7C;AAAA,IACA,MAAM;AAAA,YACN;AAAA,IACA,sBAAsB,OAAO,SAAS;AAAA;AAAA;AAAA;AAAA,EAxTpC,wBAAwB,IAAI;AAAA,EAkB5B,YAAY,IAAI;AAAA,EAGhB,uBAAuB,IAAI;AAAA;;;ACzD1B,IAAM,uBAAuB;AAC7B,IAAM,iBAAiB;AAavB,IAAM,uBAAuB;AAAA,EAClC,MAAM;AAAA,EACN,MAAM;AAAA,EACN,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,MAAM;AAAA,EACN,OAAO;AAAA,EACP,IAAI;AAAA,EACJ,MAAM;AAAA,EACN,OAAO;AACT;AAcA,IAAM,sBAAsB;AAAA,EAC1B,aAAa;AAAA,EACb,wBAAwB;AAAA,EACxB,mBAAmB;AAAA,EACnB,wBAAwB;AAAA,EACxB,uBAAuB;AAAA,EACvB,eAAe;AAAA,EACf,aAAa;AAAA,EACb,sBAAsB;AAAA,EACtB,4BAA4B;AAAA,EAC5B,sBAAsB;AACxB;AAcA,IAAM,wBAAwB;AAAA,EAC5B,sBAAsB;AAAA,EACtB,sBAAsB;AAAA,EACtB,oBAAoB;AAAA,EACpB,wBAAwB;AAAA,EACxB,kBAAkB;AAAA,EAElB,iBAAiB;AAAA,EAEjB,oBAAoB;AAAA,EACpB,4BAA4B;AAC9B;AAOA,IAAM,uBAAuB;AAAA,EAC3B,wBAAwB;AAAA,EACxB,gCAAgC;AAAA,EAChC,kBAAkB;AACpB;AASA,IAAM,4BAA4B;AAAA,EAChC,+BAA+B;AAAA,EAC/B,iBAAiB;AAAA,EACjB,kCAAkC;AAAA,EAClC,kCAAkC;AAAA,EAClC,8BAA8B;AAChC;AAQO,IAAM,kBAAkB;AAAA,KAC1B;AAAA,KACA;AAAA,KACA;AAAA,KACA;AACL;;ACjHA;AAIO,IAAM,gCAAgC;AAAA,EAC3C,MAAM;AAAA,EACN,YAAY;AAAA,IACV,UAAU;AAAA,MACR,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,iBAAiB;AAAA,MACf,MAAM;AAAA,MACN,aAAa;AAAA,MACb,YAAY;AAAA,QACV,UAAU;AAAA,UACR,MAAM;AAAA,UACN,MAAM,OAAO,OAAO,eAAe;AAAA,UACnC,aAAa;AAAA,UACb,SAAS;AAAA,QACX;AAAA,QACA,YAAY;AAAA,UACV,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,QACA,OAAO;AAAA,UACL,MAAM;AAAA,UACN,MAAM,OAAO,OAAO,oBAAoB;AAAA,UACxC,aAAa;AAAA,UACb,SAAS;AAAA,QACX;AAAA,QACA,QAAQ;AAAA,UACN,MAAM;AAAA,UACN,MAAM,CAAC,OAAO,OAAO,UAAU,QAAQ,OAAO;AAAA,UAC9C,aAAa;AAAA,UACb,SAAS;AAAA,QACX;AAAA,QACA,qBAAqB;AAAA,UACnB,MAAM;AAAA,UACN,OAAO,EAAE,MAAM,SAAS;AAAA,UACxB,aAAa;AAAA,UACb,eAAe;AAAA,QACjB;AAAA,QACA,sBAAsB;AAAA,UACpB,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,QACA,sBAAsB;AAAA,UACpB,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,QACA,0BAA0B;AAAA,UACxB,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,QACA,mBAAmB;AAAA,UACjB,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,QACA,SAAS;AAAA,UACP,MAAM;AAAA,UACN,MAAM,CAAC,QAAQ,cAAc,KAAK;AAAA,UAClC,aAAa;AAAA,UACb,SAAS;AAAA,QACX;AAAA,QACA,WAAW;AAAA,UACT,MAAM;AAAA,UACN,aAAa;AAAA,UACb,SAAS;AAAA,QACX;AAAA,QACA,gBAAgB;AAAA,UACd,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,QACA,KAAK;AAAA,UACH,MAAM;AAAA,UACN,aAAa;AAAA,UACb,SAAS;AAAA,QACX;AAAA,MACF;AAAA,MACA,UAAU,CAAC,cAAc,UAAU;AAAA,MACnC,sBAAsB;AAAA,MACtB,IAAI;AAAA,QACF,YAAY;AAAA,UACV,UAAU;AAAA,YACR,OAAO;AAAA,UACT;AAAA,QACF;AAAA,MACF;AAAA,MACA,MAAM;AAAA,QACJ,UAAU,CAAC,mBAAmB;AAAA,MAChC;AAAA,IACF;AAAA,EACF;AAAA,EACA,UAAU,CAAC,YAAY,iBAAiB;AAAA,EACxC,sBAAsB;AACxB;AAEO,IAAM,sCAAsC;AAAA,EACjD,MAAM;AAAA,EACN,YAAY;AAAA,OACP,kBAAkB;AAAA,OAClB,8BAA8B;AAAA,EACnC;AAAA,EACA,UAAU,CAAC,GAAG,kBAAkB,UAAU,GAAG,8BAA8B,QAAQ;AAAA,EACnF,sBAAsB;AACxB;AAIO,IAAM,sCAAsC;AAAA,EACjD,MAAM;AAAA,EACN,YAAY;AAAA,OACP,kBAAkB;AAAA,OAClB,8BAA8B;AAAA,EACnC;AAAA,EACA,UAAU,CAAC,GAAG,kBAAkB,UAAU,GAAG,8BAA8B,QAAQ;AAAA,EACnF,sBAAsB;AACxB;;ACtHO,IAAM,mCAAmC;AAAA,EAC9C,MAAM;AAAA,EACN,MAAM;AAAA,EACN,MAAM;AAAA,EACN,OAAO;AAAA,EACP,IAAI;AAAA,EACJ,IAAI;AAAA,EACJ,OAAO;AAAA,EACP,MAAM;AACR;AAIA,IAAM,yBAA0B,OAAO,QAAQ,gCAAgC,EAC5E,OAAO,IAAI,YAAY,WAAW,EAAE,EACpC,KAAK,CAAC,GAAG,MAAM,EAAE,GAAG,SAAS,EAAE,GAAG,MAAM;AAEpC,SAAS,sBAAsB,CAAC,QAKhB;AAAA,EACrB,MAAM,YAAY,OAAO,aAAa;AAAA,EACtC,MAAM,SAAS,YAAY;AAAA,EAE3B,MAAM,QAAkB,CAAC;AAAA,EACzB,WAAW,MAAM,OAAO,WAAW;AAAA,IACjC,IAAI,CAAC,GAAG,WAAW,MAAM;AAAA,MAAG;AAAA,IAC5B,IAAI,CAAC,GAAG,SAAS,OAAO;AAAA,MAAG;AAAA,IAC3B,IAAI,GAAG,SAAS,YAAY;AAAA,MAAG;AAAA,IAC/B,MAAM,KAAK,GAAG,MAAM,OAAO,QAAQ,CAAC,QAAQ,MAAM,CAAC;AAAA,EACrD;AAAA,EAEA,IAAI,MAAM,WAAW;AAAA,IAAG,OAAO,CAAC;AAAA,EAEhC,MAAM,SAA+D,CAAC;AAAA,EACtE,WAAW,QAAQ,OAAO;AAAA,IACxB,IAAI,UAAU;AAAA,IACd,YAAY,OAAO,WAAW,wBAAwB;AAAA,MACpD,IAAI,KAAK,SAAS,MAAM,GAAG;AAAA,QACzB,OAAO,KAAK,EAAE,UAAU,KAAK,MAAM,GAAG,CAAC,OAAO,MAAM,GAAG,MAAM,CAAC;AAAA,QAC9D,UAAU;AAAA,QACV;AAAA,MACF;AAAA,IACF;AAAA,IACA,IAAI,CAAC,SAAS;AAAA,MACZ,OAAO,KAAK,EAAE,UAAU,MAAM,OAAO,OAAO,CAAC;AAAA,IAC/C;AAAA,EACF;AAAA,EAEA,MAAM,eAAe,IAAI,IAAI,OAAO,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC;AAAA,EAC1D,MAAM,UAAU,IAAI;AAAA,EACpB,aAAa,UAAU,WAAW,QAAQ;AAAA,IACxC,IAAI,MAAM,QAAQ,IAAI,KAAK;AAAA,IAC3B,IAAI,CAAC,KAAK;AAAA,MACR,MAAM,IAAI;AAAA,MACV,QAAQ,IAAI,OAAO,GAAG;AAAA,IACxB;AAAA,IACA,IAAI,IAAI,QAAQ;AAAA,EAClB;AAAA,EAEA,MAAM,YAAY,OAAO,KAAK,gCAAgC;AAAA,EAC9D,OAAO,UAAU,OAAO,CAAC,UAAU;AAAA,IACjC,MAAM,MAAM,QAAQ,IAAI,KAAK;AAAA,IAC7B,OAAO,QAAQ,aAAa,IAAI,SAAS,aAAa;AAAA,GACvD;AAAA;;AClDI,SAAS,sBAAsB,CAAC,cAGrC;AAAA,EACA,MAAM,YAAuB,CAAC;AAAA,EAC9B,IAAI,YAAY;AAAA,EAChB,IAAI,cAAc;AAAA,EAGlB,MAAM,mBAAmB;AAAA,EACzB,IAAI;AAAA,EACJ,QAAQ,WAAW,iBAAiB,KAAK,YAAY,OAAO,MAAM;AAAA,IAChE,IAAI;AAAA,MACF,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG,KAAK,CAAC;AAAA,MAC5C,MAAM,KAAK,QAAQ;AAAA,MACnB,UAAU,KAAK;AAAA,QACb;AAAA,QACA,MAAM,OAAO,QAAQ,OAAO,UAAU,QAAQ;AAAA,QAC9C,OAAQ,OAAO,aACb,OAAO,UAAU,aACjB,OAAO,cACP,CAAC;AAAA,MACL,CAAC;AAAA,MACD,MAAM;AAAA,EAGV;AAAA,EAEA,IAAI,UAAU,SAAS,GAAG;AAAA,IAExB,cAAc,aAAa,QAAQ,qCAAqC,EAAE,EAAE,KAAK;AAAA,IACjF,OAAO,EAAE,MAAM,aAAa,UAAU;AAAA,EACxC;AAAA,EAGA,MAAM,iBAAsE,CAAC;AAAA,GAC5E,SAAS,yBAAyB,CAAC,QAAgB;AAAA,IAClD,MAAM,SAAS,OAAO;AAAA,IACtB,IAAI,IAAI;AAAA,IACR,OAAO,IAAI,QAAQ;AAAA,MACjB,IAAI,OAAO,OAAO,KAAK;AAAA,QACrB;AAAA,QACA;AAAA,MACF;AAAA,MACA,IAAI,QAAQ;AAAA,MACZ,IAAI,IAAI,IAAI;AAAA,MACZ,IAAI,WAAW;AAAA,MACf,IAAI,SAAS;AAAA,MACb,OAAO,IAAI,UAAU,QAAQ,GAAG;AAAA,QAC9B,MAAM,KAAK,OAAO;AAAA,QAClB,IAAI,UAAU;AAAA,UACZ,IAAI,QAAQ;AAAA,YACV,SAAS;AAAA,UACX,EAAO,SAAI,OAAO,MAAM;AAAA,YACtB,SAAS;AAAA,UACX,EAAO,SAAI,OAAO,KAAK;AAAA,YACrB,WAAW;AAAA,UACb;AAAA,QACF,EAAO;AAAA,UACL,IAAI,OAAO,KAAK;AAAA,YACd,WAAW;AAAA,UACb,EAAO,SAAI,OAAO,KAAK;AAAA,YACrB;AAAA,UACF,EAAO,SAAI,OAAO,KAAK;AAAA,YACrB;AAAA,UACF;AAAA;AAAA,QAEF;AAAA,MACF;AAAA,MACA,IAAI,UAAU,GAAG;AAAA,QACf,eAAe,KAAK,EAAE,MAAM,OAAO,MAAM,GAAG,CAAC,GAAG,OAAO,GAAG,KAAK,EAAE,CAAC;AAAA,QAClE,IAAI;AAAA,MACN,EAAO;AAAA,QACL;AAAA;AAAA,IAEJ;AAAA,KACC,YAAY;AAAA,EAEf,MAAM,gBAAuD,CAAC;AAAA,EAC9D,WAAW,aAAa,gBAAgB;AAAA,IACtC,IAAI;AAAA,MACF,MAAM,SAAS,KAAK,MAAM,UAAU,IAAI;AAAA,MACxC,IAAI,OAAO,SAAS,OAAO,cAAc,aAAa,OAAO,eAAe,YAAY;AAAA,QACtF,MAAM,KAAK,QAAQ;AAAA,QACnB,UAAU,KAAK;AAAA,UACb;AAAA,UACA,MAAM,OAAO;AAAA,UACb,OAAQ,OAAO,aAAa,OAAO,cAAc,CAAC;AAAA,QACpD,CAAC;AAAA,QACD,cAAc,KAAK,EAAE,OAAO,UAAU,OAAO,KAAK,UAAU,IAAI,CAAC;AAAA,MACnE,EAAO,SAAI,OAAO,UAAU,MAAM;AAAA,QAChC,IAAI,eAAwB,OAAO,SAAS,aAAa,CAAC;AAAA,QAC1D,IAAI,OAAO,iBAAiB,UAAU;AAAA,UACpC,IAAI;AAAA,YACF,eAAe,KAAK,MAAM,YAAY;AAAA,YACtC,OAAO,YAAY;AAAA,YACnB,QAAQ,KAAK,wDAAwD,UAAU;AAAA,YAC/E,eAAe,CAAC;AAAA;AAAA,QAEpB;AAAA,QACA,MAAM,KAAK,QAAQ;AAAA,QACnB,UAAU,KAAK;AAAA,UACb;AAAA,UACA,MAAM,OAAO,SAAS;AAAA,UACtB,OAAQ,gBAAgB,CAAC;AAAA,QAC3B,CAAC;AAAA,QACD,cAAc,KAAK,EAAE,OAAO,UAAU,OAAO,KAAK,UAAU,IAAI,CAAC;AAAA,MACnE;AAAA,MACA,MAAM;AAAA,EAGV;AAAA,EAEA,IAAI,UAAU,SAAS,GAAG;AAAA,IAExB,IAAI,SAAS;AAAA,IACb,IAAI,YAAY;AAAA,IAChB,WAAW,SAAS,eAAe;AAAA,MACjC,UAAU,aAAa,MAAM,WAAW,MAAM,KAAK;AAAA,MACnD,YAAY,MAAM;AAAA,IACpB;AAAA,IACA,UAAU,aAAa,MAAM,SAAS;AAAA,IACtC,cAAc,OAAO,KAAK;AAAA,EAC5B;AAAA,EAEA,OAAO,EAAE,MAAM,aAAa,UAAU;AAAA;AAcjC,SAAS,0BAA0B,CAAC,MAA8B;AAAA,EACvE,MAAM,WAAW;AAAA,EACjB,MAAM,YAAY;AAAA,EAGlB,IAAI,QAAwB;AAAA,EAE5B,IAAI,UAAU;AAAA,EAEd,SAAS,IAAI,CAAC,OAAe;AAAA,IAC3B,IAAI,UAAU,OAAO;AAAA,MAEnB,WAAW;AAAA,MACX,MAAM,WAAW,QAAQ,QAAQ,SAAS;AAAA,MAC1C,IAAI,aAAa,IAAI;AAAA,QAEnB,MAAM,aAAa,QAAQ,MAAM,WAAW,UAAU,MAAM;AAAA,QAC5D,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,IAAI,WAAW,SAAS,GAAG;AAAA,UACzB,KAAK,UAAU;AAAA,QACjB;AAAA,MACF;AAAA,MAEA;AAAA,IACF;AAAA,IAGA,MAAM,WAAW,UAAU;AAAA,IAG3B,MAAM,UAAU,SAAS,QAAQ,QAAQ;AAAA,IACzC,IAAI,YAAY,IAAI;AAAA,MAElB,MAAM,SAAS,SAAS,MAAM,GAAG,OAAO;AAAA,MACxC,IAAI,OAAO,SAAS,GAAG;AAAA,QACrB,KAAK,MAAM;AAAA,MACb;AAAA,MAEA,UAAU;AAAA,MACV,QAAQ;AAAA,MACR,MAAM,YAAY,SAAS,MAAM,UAAU,SAAS,MAAM;AAAA,MAC1D,IAAI,UAAU,SAAS,GAAG;AAAA,QACxB,KAAK,SAAS;AAAA,MAChB;AAAA,MACA;AAAA,IACF;AAAA,IAIA,IAAI,YAAY;AAAA,IAChB,SAAS,MAAM,KAAK,IAAI,SAAS,QAAQ,SAAS,SAAS,CAAC,EAAG,OAAO,GAAG,OAAO;AAAA,MAC9E,IAAI,SAAS,SAAS,SAAS,MAAM,GAAG,GAAG,CAAC,GAAG;AAAA,QAC7C,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF;AAAA,IAEA,IAAI,YAAY,GAAG;AAAA,MAEjB,MAAM,OAAO,SAAS,MAAM,GAAG,SAAS,SAAS,SAAS;AAAA,MAC1D,IAAI,KAAK,SAAS,GAAG;AAAA,QACnB,KAAK,IAAI;AAAA,MACX;AAAA,MACA,UAAU,SAAS,MAAM,SAAS,SAAS,SAAS;AAAA,IACtD,EAAO;AAAA,MAEL,IAAI,SAAS,SAAS,GAAG;AAAA,QACvB,KAAK,QAAQ;AAAA,MACf;AAAA,MACA,UAAU;AAAA;AAAA;AAAA,EAKd,SAAS,KAAK,GAAG;AAAA,IACf,IAAI,QAAQ,SAAS,KAAK,UAAU,QAAQ;AAAA,MAC1C,KAAK,OAAO;AAAA,MACZ,UAAU;AAAA,IACZ;AAAA,IAEA,UAAU;AAAA,IACV,QAAQ;AAAA;AAAA,EAGV,OAAO,EAAE,MAAM,MAAM;AAAA;;AChPvB,eAAsB,2BAA2B,GAAkB;AAAA,EACjE,QAAQ,4CAAuB;AAAA,EAC/B,oBAAmB;AAAA;;;ACErB,IAAM,oBAA8C;AAAA,EAClD,mBAAmB,CAAC,oBAAoB;AAAA,EACxC,oBAAoB,CAAC,iBAAiB;AAAA,EACtC,iBAAiB,CAAC,uBAAuB,eAAe;AAAA,EACxD,qBAAqB,CAAC,aAAa;AAAA,EACnC,wBAAwB,CAAC,uBAAuB,0BAA0B;AAAA,EAC1E,wBAAwB,CAAC,oBAAoB;AAAA,EAC7C,kBAAkB,CAAC,WAAW;AAAA,EAC9B,2BAA2B,CAAC,qBAAqB;AAAA,EACjD,gCAAgC,CAAC,sBAAsB;AAAA,EACvD,yBAAyB,CAAC,sBAAsB;AAAA,EAChD,yBAAyB,CAAC,wBAAwB,gCAAgC;AAAA,EAClF,oBAAoB,CAAC,0BAA0B;AAAA,EAC/C,uBAAuB,CAAC,oBAAoB;AAAA,EAC5C,kBAAkB,CAAC,gBAAgB;AAAA,EACnC,iBAAiB,CAAC,eAAe;AAAA,EACjC,qBAAqB,CAAC,oBAAoB,4BAA4B;AAAA,EACtE,qBAAqB,CAAC,kBAAkB;AAAA,EACxC,yBAAyB,CAAC,sBAAsB;AAAA,EAChD,uBAAuB,CAAC,8BAA8B;AACxD;AAaO,SAAS,mBAAmB,CAAC,UAA4B;AAAA,EAC9D,OAAO,OAAO,QAAQ,iBAAiB,EACpC,OAAO,IAAI,gBAAe,WAAU,SAAS,QAAQ,CAAC,EACtD,IAAI,EAAE,UAAU,IAAI;AAAA;;;AC1BzB,IAAM,cAAc;AAEb,SAAS,eAAe,CAAC,GAAmB;AAAA,EACjD,IAAI,KAAK;AAAA,IAAW,OAAO,IAAI,IAAI,KAAW,QAAQ,CAAC;AAAA,EACvD,IAAI,KAAK;AAAA,IAAO,OAAO,IAAI,IAAI,MAAO,QAAQ,CAAC;AAAA,EAC/C,OAAO,OAAO,CAAC;AAAA;AAMV,SAAS,mBAAmB,CACjC,OACA,UACyB;AAAA,EACzB,QAAQ;AAAA,SACD;AAAA,MACH,OAAO;AAAA,QACL,YAAY,MAAM;AAAA,WACd,MAAM,eAAe,EAAE,UAAU,MAAM,aAAa,IAAI,CAAC;AAAA,MAC/D;AAAA,SACG;AAAA,MACH,OAAO,EAAE,YAAY,MAAM,GAAG;AAAA;AAAA,MAE9B,OAAO,EAAE,YAAY,MAAM,GAAG;AAAA;AAAA;AAO7B,SAAS,gBAAgB,CAAC,OAAqB,UAAyC;AAAA,EAC7F,MAAM,SAAS,CAAC,MAAM,cAAc,MAAM,YAAY,EAAE,OAAO,OAAO,EAAE,KAAK,KAAK;AAAA,EAClF,OAAO;AAAA,IACL,IAAI,MAAM;AAAA,IACV,OAAO,GAAG,MAAM,KAAK,SAAS,KAAK,WAAW;AAAA,IAC9C,aAAa,GAAG,gBAAgB,MAAM,SAAS;AAAA,IAC/C,QAAQ;AAAA,MACN,UAAU,MAAM;AAAA,MAChB;AAAA,MACA,OAAO,MAAM,GAAG,MAAM,GAAG,EAAE,IAAI,KAAK,MAAM;AAAA,MAC1C,aAAa,CAAC,MAAM,cAAc,GAAG,gBAAgB,MAAM,SAAS,aAAa,EAC9E,OAAO,OAAO,EACd,KAAK,KAAU;AAAA,MAClB,OAAO,MAAM,eAAe,oBAAoB,MAAM,YAAY,IAAI,CAAC;AAAA,MACvE,iBAAiB,oBAAoB,OAAO,QAAQ;AAAA,MACpD,UAAU,CAAC;AAAA,IACb;AAAA,IACA,KAAK;AAAA,EACP;AAAA;AAMF,eAAsB,cAAc,CAClC,OACA,aACA,cACA,QACyB;AAAA,EACzB,MAAM,SAAS,IAAI,gBAAgB;AAAA,IACjC,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,MAAM;AAAA,IACN,WAAW;AAAA,OACR;AAAA,EACL,CAAC;AAAA,EACD,OAAO,OAAO,YAAY,cAAc;AAAA,EACxC,IAAI,cAAc;AAAA,IAChB,WAAW,SAAS,cAAc;AAAA,MAChC,OAAO,OAAO,YAAY,KAAK;AAAA,IACjC;AAAA,EACF;AAAA,EACA,MAAM,MAAM,MAAM,MAAM,GAAG,sBAAsB,UAAU,EAAE,OAAO,CAAC;AAAA,EACrE,IAAI,CAAC,IAAI;AAAA,IAAI,MAAM,IAAI,MAAM,4BAA4B,IAAI,QAAQ;AAAA,EACrE,OAAO,IAAI,KAAK;AAAA;;;ACrFX,IAAM,kBAGT,OAAO,OAAO,QAAQ,aAAa,WAAW;AAAA,EAChD,MAAM,UAAU,MAAM,eACpB,MAAM,OACN,EAAE,QAAQ,OAAO,GACjB,CAAC,UAAU,GACX,MACF;AAAA,EACA,MAAM,UAAU,QAAQ,IAAI,CAAC,UAAU;AAAA,IACrC,MAAM,OAAO,iBAAiB,OAAO,oBAAoB;AAAA,IAGzD,IAAI,MAAM,YAAY,MAAM,SAAS,SAAS,GAAG;AAAA,MAC/C,MAAM,YAAY,MAAM,SAAS,IAAI,CAAC,MAAM,EAAE,SAAS;AAAA,MACvD,MAAM,gBAAgB,uBAAuB,EAAE,UAAU,CAAC;AAAA,MAC1D,IAAI,cAAc,SAAS,GAAG;AAAA,QAC5B,MAAM,SAAS,KAAK;AAAA,QACpB,MAAM,iBAAkB,OAAO,mBAAmB,CAAC;AAAA,QACnD,eAAe,gBAAgB;AAAA,QAC/B,OAAO,kBAAkB;AAAA,MAC3B;AAAA,IACF;AAAA,IAGA,MAAM,MAAM,KAAK;AAAA,IACjB,OAAO,IAAI;AAAA,IAEX,OAAO;AAAA,GACR;AAAA,EACD,OAAO,EAAE,QAAQ;AAAA;;;AC/BZ,SAAS,aAAa,CAAC,OAAyB;AAAA,EACrD,OAAQ,MAAc,WAAW,KAAK;AAAA;;;ACExC;AAKO,IAAM,wBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,UAAqC,MAAM,YAAY,OAAQ,YAAY,CAAC,GAAG,MAAM;AAAA,EAE3F,MAAM,SAAS,MAAM,QAAQ,MAAM,KAAe;AAAA,EAElD,MAAM,cAAc,MAAM,QAAQ,MAAM,IAAI,OAAO,KAAK;AAAA,EAExD,OAAO;AAAA,IACL,OAAO,cAAc,WAAW;AAAA,EAClC;AAAA;;;ACnBF;AAEO,IAAM,kBAIT,OAAO,OAAO,OAAO,YAAY,YAAY;AAAA,EAC/C,MAAM,eAAe,MAAM,QAAQ,MAAM,IAAI;AAAA,EAE7C,QAAQ,kBAAkB,MAAM,oBAAoB;AAAA,EACpD,MAAM,YAAY,MAAM,cAAc,gBAAgB,MAAO,gBAAgB,YAAY;AAAA,IACvF,mBAAmB,CAAC,aAAkB,WAAW,UAAU,YAAY,CAAC;AAAA,EAC1E,CAAC;AAAA,EAED,IAAI,cAAc;AAAA,IAChB,MAAM,QAAQ,MAAM;AAAA,IACpB,MAAM,SAAS,MAAM,IAAI,CAAC,MAAM,UAAU,OAAO,CAAC,EAAE,MAAM;AAAA,IAC1D,OAAO,EAAE,OAAO,OAAO;AAAA,EACzB;AAAA,EAGA,MAAM,WAAW,UAAU,OAAO,MAAM,IAAc;AAAA,EACtD,OAAO,EAAE,OAAO,SAAS,OAAO;AAAA;AAG3B,IAAM,2BAIT,OAAO,OAAO,SAAS,UAAU;AAAA,EACnC,OAAO,gBAAgB,OAAO,OAAO,MAAM,IAAI,IAAI,gBAAgB,EAAE,MAAM;AAAA;;;AClC7E;AAFA,sBAAS;AAQF,IAAM,eAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,SAAS,WAAU;AAAA,EACzB,MAAM,aAAa,gBAAgB,OAAO,gBAAgB;AAAA,EAC1D,OAAO,KAAK,YAAY,EAAE,OAAO,OAAO,gBAAgB,WAAW,CAAC;AAAA,EAIpE,MAAM,YAAY,OAAQ,YAAY,CAAC,GAAG,QAAQ,GAAG;AAAA,EAErD,OAAO,QAAQ,YAAY,EAAE,OAAO,OAAO,gBAAgB,WAAW,CAAC;AAAA,EACvE,OAAO;AAAA,IACL,OAAO,MAAM;AAAA,EACf;AAAA;;;ACfF;AAMO,IAAM,0BAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,IAAI,OAAO,iBAAiB,aAAa,kCAAkC;AAAA,IACzE,IAAI,CAAC,MAAM,cAAc,CAAC,MAAM,QAAQ,MAAM,UAAU,KAAK,MAAM,WAAW,WAAW,GAAG;AAAA,MAC1F,QAAQ,KAAK,sDAAsD,KAAK;AAAA,MACxE,MAAM,IAAI,MAAM,oDAAoD;AAAA,IACtE;AAAA,IACA,MAAM,qBAA0D,MAAM,YACpE,OACA,YACA,CAAC,GACD,MACF;AAAA,IACA,MAAM,UAAc,MAAM,mBACxB,MAAM,OACN,MAAM,YACN,CAAC,CACH;AAAA,IAEA,MAAM,WAAU,MAAM,QAAQ,OAAM,IAAI,UAAS,CAAC,OAAM;AAAA,IAExD,OAAO;AAAA,MACL,YAAY,SAAQ,IAAI,CAAC,OAAY;AAAA,QACnC,OAAO,EAAE;AAAA,QACT,OAAO,EAAE;AAAA,MACX,EAAE;AAAA,IACJ;AAAA,EACF;AAAA,EAEA,MAAM,aAA0C,MAAM,YAAY,OAAQ,YAAY,CAAC,GAAG,MAAM;AAAA,EAChG,MAAM,SAAc,MAAM,WAAW,MAAM,OAAiB;AAAA,IAC1D,OAAQ,MAAc;AAAA,EACxB,CAAC;AAAA,EAED,MAAM,UAAU,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC,MAAM;AAAA,EAExD,OAAO;AAAA,IACL,YAAY,QAAQ,IAAI,CAAC,OAAY;AAAA,MACnC,OAAO,EAAE;AAAA,MACT,OAAO,EAAE;AAAA,IACX,EAAE;AAAA,EACJ;AAAA;;;ACpDF;AAFA,sBAAS;AAOF,IAAM,qBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,SAAS,WAAU;AAAA,EACzB,MAAM,aAAa,sBAAsB,OAAO,gBAAgB;AAAA,EAChE,OAAO,KAAK,YAAY,EAAE,OAAO,OAAO,gBAAgB,WAAW,CAAC;AAAA,EAEpE,MAAM,WAA2C,MAAM,YACrD,OACA,YACA,CAAC,GACD,MACF;AAAA,EAEA,OAAO,MAAM,4DAA4D;AAAA,IACvE,OAAO,OAAO,gBAAgB;AAAA,EAChC,CAAC;AAAA,EAED,MAAM,SAAc,MAAM,SAAS,MAAM,KAAe;AAAA,EAExD,OAAO,QAAQ,YAAY,EAAE,YAAY,QAAQ,MAAM,OAAO,CAAC;AAAA,EAC/D,OAAO;AAAA,IACL,QAAQ,OAAO;AAAA,EACjB;AAAA;;;AC/BF;AAKO,IAAM,wBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,YAAuC,MAAM,YAAY,OAAQ,YAAY,CAAC,GAAG,MAAM;AAAA,EAE7F,MAAM,SAAS,MAAM,UAAU,MAAM,OAAc;AAAA,IACjD,WAAW,MAAM;AAAA,IACjB,gBAAgB,MAAM;AAAA,EACxB,CAAC;AAAA,EAED,MAAM,QAAQ,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC,MAAM;AAAA,EAEtD,MAAM,iBAAiB,MAAM,QAAQ,IACnC,MAAM,IAAI,OAAO,UAAU;AAAA,IACzB,OAAO,KAAK,SAAS;AAAA,IACrB,OAAO,KAAK,SAAS;AAAA,IACrB,MAAM,CAAC;AAAA,EACT,EAAE,CACJ;AAAA,EAEA,OAAO;AAAA,IACL,OAAO;AAAA,EACT;AAAA;;;ACjCF;AAKO,IAAM,kBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,YAAiC,MAAM,YAAY,OAAQ,YAAY,CAAC,GAAG,MAAM;AAAA,EAEvF,MAAM,SAAc,MAAM,UAAU,MAAM,OAAiB;AAAA,IACzD,gBAAgB,MAAM;AAAA,EACxB,CAAC;AAAA,EAED,MAAM,OAAO,MAAM,QAAQ,MAAM,IAAI,OAAO,IAAI,iBAAiB,QAAQ;AAAA,EAEzE,OAAO;AAAA,IACL,MAAM,QAAQ;AAAA,EAChB;AAAA;;;ACtBF,sBAAS;AAGT;AAEO,IAAM,gBAIT,OAAO,OAAO,UAAU;AAAA,EAC1B,MAAM,SAAS,WAAU;AAAA,EACzB,QAAQ,kBAAkB,MAAM,oBAAoB;AAAA,EACpD,MAAM,aAAa,iBAAiB,OAAO,gBAAgB;AAAA,EAC3D,OAAO,KAAK,YAAY,EAAE,OAAO,OAAO,gBAAgB,WAAW,CAAC;AAAA,EAEpE,MAAM,SAAS,MAAM;AAAA,EACrB,MAAM,YAAY,kBAAkB,oBAAoB,KAAM,CAAC;AAAA,EAE/D,QAAQ,UAAU,cAAc,YAAY,UAAU,MAAO;AAAA,EAE7D,MAAM,cAAc,MAAM,cAAc,yBAAyB,cAAc,YAAY;AAAA,OACrF,QAAQ,EAAE,MAAM,IAAI,CAAC;AAAA,EAC3B,CAAC;AAAA,EACD,OAAO,MAAM,sBAAsB;AAAA,IACjC,OAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,WACM,QAAQ,EAAE,MAAM,IAAI,CAAC;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,EACV,CAAC;AAAA,EACD,MAAM,YAAY,aAAa,YAAY;AAAA,EAG3C,IAAI,aAA4C;AAAA,EAChD,IAAI,WAAW,WAAW,YAAY,MAAM,SAAS,GAAG;AAAA,IAEtD,MAAM,QAAgC,CAAC;AAAA,IACvC,aAAa,UAAU,YAAY,OAAO;AAAA,MACxC,MAAM,QAAQ;AAAA,IAChB;AAAA,IACA,aAAa;AAAA,EACf,EAAO,SAAI,WAAW,yBAAyB,YAAY,MAAM,SAAS,GAAG;AAAA,IAE3E,MAAM,QAAgC,CAAC;AAAA,IACvC,MAAM,QAAQ,IACZ,YAAY,MAAM,IAAI,SAAS,WAAW;AAAA,MACxC,MAAM,WAAW,MAAM,cAAc,kBAAkB,YAAY,IAAI;AAAA,MACvE,IAAI,SAAS,UAAU,SAAS,SAAS,WAAW;AAAA,QAClD,MAAM,QAAQ,SAAS;AAAA,MACzB;AAAA,KACD,CACH;AAAA,IACA,IAAI,OAAO,KAAK,KAAK,EAAE,SAAS,GAAG;AAAA,MACjC,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EAGA,IAAI;AAAA,EACJ,IAAI,YAAY,MAAM,SAAS,GAAG;AAAA,IAChC,MAAM,YAAY,YAAY,MAAM,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,IACrD,MAAM,uBAAuB,uBAAuB,EAAE,UAAU,CAAC;AAAA,IACjE,IAAI,qBAAqB,SAAS,GAAG;AAAA,MACnC,gBAAgB;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,OAAO,QAAQ,YAAY,EAAE,OAAO,OAAO,gBAAgB,WAAW,CAAC;AAAA,EAEvE,OAAO;AAAA,IACL,OAAO,MAAM;AAAA,IACb,UAAU;AAAA,IACV,WAAW;AAAA,IACX,kBAAkB;AAAA,IAClB,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,OACI,gBAAgB,EAAE,cAAc,IAAI,CAAC;AAAA,EAC3C;AAAA;;;ACzEF;AAMO,IAAM,sBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,IAAI,OAAO,iBAAiB,aAAa,8BAA8B;AAAA,IACrE,IAAI,CAAC,MAAM,UAAU,CAAC,MAAM,QAAQ,MAAM,MAAM,KAAK,MAAM,OAAO,WAAW,GAAG;AAAA,MAC9E,MAAM,IAAI,MAAM,4CAA4C;AAAA,IAC9D;AAAA,IACA,MAAM,mBAAoD,MAAM,YAC9D,OACA,YACA,CAAC,GACD,MACF;AAAA,IACA,MAAM,UAAc,MAAM,iBAAiB,MAAM,OAAiB,MAAM,KAAK,MAAM,MAAO,GAAG;AAAA,MAC3F,WAAY,MAAc;AAAA,IAC5B,CAAC;AAAA,IAED,MAAM,cAAa,MAAM,QAAQ,OAAM,IAAI,UAAS,CAAC,OAAM;AAAA,IAE3D,OAAO;AAAA,MACL,YAAY,YAAW,IAAI,CAAC,OAAY;AAAA,QACtC,OAAO,EAAE;AAAA,QACT,OAAO,EAAE;AAAA,QACT,KAAK,EAAE;AAAA,MACT,EAAE;AAAA,IACJ;AAAA,EACF;AAAA,EAEA,MAAM,WAAoC,MAAM,YAAY,OAAQ,YAAY,CAAC,GAAG,MAAM;AAAA,EAC1F,MAAM,SAAc,MAAM,SAAS,MAAM,OAAiB;AAAA,IACxD,WAAY,MAAc;AAAA,EAC5B,CAAC;AAAA,EAED,MAAM,aAAa,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC,MAAM;AAAA,EAE3D,OAAO;AAAA,IACL,YAAY,WAAW,IAAI,CAAC,OAAY;AAAA,MACtC,OAAO,EAAE;AAAA,MACT,OAAO,EAAE;AAAA,MACT,KAAK,EAAE;AAAA,IACT,EAAE;AAAA,EACJ;AAAA;;;AC7CF;AAFA;;;ACFO,SAAS,sBAAyB,GAAwB;AAAA,EAC/D,MAAM,SAAc,CAAC;AAAA,EACrB,IAAI,UAAuD;AAAA,EAC3D,IAAI,WAAW;AAAA,EACf,IAAI,MAAoB;AAAA,EAExB,MAAM,OAAO,CAAC,UAAa;AAAA,IACzB,IAAI,SAAS;AAAA,MACX,MAAM,IAAI;AAAA,MACV,UAAU;AAAA,MACV,EAAE,EAAE,OAAO,OAAO,MAAM,MAAM,CAAC;AAAA,IACjC,EAAO;AAAA,MACL,OAAO,KAAK,KAAK;AAAA;AAAA;AAAA,EAIrB,MAAM,OAAO,MAAM;AAAA,IACjB,WAAW;AAAA,IACX,IAAI,SAAS;AAAA,MACX,MAAM,IAAI;AAAA,MACV,UAAU;AAAA,MACV,EAAE,EAAE,OAAO,WAAkB,MAAM,KAAK,CAAC;AAAA,IAC3C;AAAA;AAAA,EAGF,MAAM,QAAQ,CAAC,MAAa;AAAA,IAC1B,MAAM;AAAA,IACN,IAAI,SAAS;AAAA,MACX,MAAM,IAAI;AAAA,MACV,UAAU;AAAA,MACV,EAAE,EAAE,OAAO,WAAkB,MAAM,KAAK,CAAC;AAAA,IAC3C;AAAA;AAAA,EAGF,MAAM,WAA6B;AAAA,KAChC,OAAO,cAAc,GAAG;AAAA,MACvB,OAAO;AAAA,QACL,IAAI,GAA+B;AAAA,UACjC,IAAI;AAAA,YAAK,OAAO,QAAQ,OAAO,GAAG;AAAA,UAClC,IAAI,OAAO,SAAS,GAAG;AAAA,YACrB,OAAO,QAAQ,QAAQ,EAAE,OAAO,OAAO,MAAM,GAAI,MAAM,MAAM,CAAC;AAAA,UAChE;AAAA,UACA,IAAI,UAAU;AAAA,YACZ,OAAO,QAAQ,QAAQ,EAAE,OAAO,WAAkB,MAAM,KAAK,CAAC;AAAA,UAChE;AAAA,UACA,OAAO,IAAI,QAA2B,CAAC,MAAM;AAAA,YAC3C,UAAU;AAAA,WACX;AAAA;AAAA,MAEL;AAAA;AAAA,EAEJ;AAAA,EAEA,OAAO,EAAE,MAAM,MAAM,OAAO,SAAS;AAAA;AAQhC,SAAS,2BAA2B,CACzC,WACA,OACA,cACA;AAAA,EACA,OAAO,IAAI,aAAa,WAAW;AAAA,IACjC,aAAa;AAAA,IACb,eAAe,EAAE,qBAAqB,KAAK;AAAA,IAC3C,mBAAmB,CAAC,SAAiB;AAAA,MACnC,MAAM,KAAK,EAAE,MAAM,cAAc,MAAM,QAAQ,WAAW,KAAK,CAAC;AAAA;AAAA,EAEpE,CAAC;AAAA;AAMI,SAAS,kBAAkB,CAChC,WACA,gBACA,cACA;AAAA,EACA,IAAI,QAAQ;AAAA,EACZ,OAAO,IAAI,aAAa,WAAW;AAAA,IACjC,aAAa;AAAA,IACb,eAAe,EAAE,qBAAqB,KAAK;AAAA,IAC3C,mBAAmB,CAAC,SAAiB;AAAA,MACnC;AAAA,MACA,MAAM,SAAS,OAAO,IAAI,KAAK,IAAI,QAAQ,KAAK;AAAA,MAChD,MAAM,WAAW,KAAK,MAAM,KAAK,IAAI,QAAQ,GAAG,CAAC;AAAA,MACjD,eAAe,UAAU,cAAc,EAAE,MAAM,SAAS,CAAC;AAAA;AAAA,EAE7D,CAAC;AAAA;;;ACrGI,SAAS,oBAAoB,CAAC,eAAuD;AAAA,EAC1F,IAAI,iBAAiB;AAAA,IAAM,OAAO;AAAA,EAClC,IAAI,OAAO,kBAAkB;AAAA,IAAU,OAAO;AAAA,EAC9C,MAAM,cAAc,cAAc,cAAc,SAAS;AAAA,EACzD,IAAI,CAAC;AAAA,IAAa,OAAO;AAAA,EACzB,MAAM,UAAU,YAAY;AAAA,EAC5B,IAAI,OAAO,YAAY;AAAA,IAAU,OAAO;AAAA,EACxC,WAAW,QAAQ,SAAS;AAAA,IAC1B,IAAI,KAAK,SAAS,UAAU,UAAU,MAAM;AAAA,MAC1C,OAAQ,KAAwC;AAAA,IAClD;AAAA,EACF;AAAA,EACA,OAAO;AAAA;;;AFQT,SAAS,+BAA+B,CAAC,OAA8C;AAAA,EACrF,MAAM,YAAY,KAAK,UAAU,MAAM,cAAc,MAAM,CAAC;AAAA,EAC5D,OACE,GAAG,MAAM;AAAA;AAAA,IACT;AAAA,EAAmF;AAAA;AAAA,IACnF;AAAA;AAIJ,SAAS,mBAAmB,CAAC,MAAuC;AAAA,EAElE,IAAI;AAAA,IACF,OAAO,KAAK,MAAM,IAAI;AAAA,IACtB,MAAM;AAAA,IAEN,MAAM,QAAQ,KAAK,MAAM,aAAa;AAAA,IACtC,IAAI,OAAO;AAAA,MACT,IAAI;AAAA,QACF,OAAO,KAAK,MAAM,MAAM,EAAE;AAAA,QAC1B,MAAM;AAAA,QACN,OAAQ,iBAAiB,MAAM,EAAE,KAAiC,CAAC;AAAA;AAAA,IAEvE;AAAA,IACA,OAAO,CAAC;AAAA;AAAA;AAIL,IAAM,2BAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAuC,MAAM,YAAY,OAAQ,YAAY,CAAC,GAAG,MAAM;AAAA,EAC7F,QAAQ,iBAAiB,MAAM,oBAAoB;AAAA,EAEnD,MAAM,SAAS,gCAAgC,KAAK;AAAA,EAEpD,MAAM,WAAsB,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,EAE9D,MAAM,kBAAmB,aAAa,UAAkB,oBAAoB,UAAU;AAAA,IACpF,UAAU;AAAA,IACV,uBAAuB;AAAA,EACzB,CAAC;AAAA,EAED,MAAM,WAAW,mBAAmB,aAAa,WAAW,YAAY,YAAY;AAAA,EAEpF,IAAI,UAAU,MAAM,aAAa,iBAAiB;AAAA,IAChD,gBAAgB,MAAM,aAAa;AAAA,IACnC,aAAa,MAAM,eAAe;AAAA,IAClC,kBAAkB;AAAA,IAClB;AAAA,EACF,CAAC;AAAA,EAED,IAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAAA,IAC3B,UAAU,CAAC,OAAO;AAAA,EACpB;AAAA,EAEA,MAAM,eAAe,qBAClB,QAAQ,IAAqC,cAChD,EAAE,KAAK;AAAA,EAEP,MAAM,SAAS,oBAAoB,YAAY;AAAA,EAC/C,OAAO,EAAE,OAAO;AAAA;AAGX,IAAM,kCAIT,gBAAgB,CAClB,OACA,OACA,QAC4D;AAAA,EAC5D,MAAM,eAAe,MAAM;AAAA,EAC3B,MAAM,eAAuC,MAAM,YAAY,OAAQ,cAAc,CAAC,GAAG,MAAM;AAAA,EAC/F,QAAQ,iBAAiB,MAAM,oBAAoB;AAAA,EAEnD,MAAM,SAAS,gCAAgC,KAAK;AAAA,EAEpD,MAAM,WAAsB,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,EAE9D,MAAM,kBAAmB,aAAa,UAAkB,oBAAoB,UAAU;AAAA,IACpF,UAAU;AAAA,IACV,uBAAuB;AAAA,EACzB,CAAC;AAAA,EAED,MAAM,QAAQ,uBAAoE;AAAA,EAClF,MAAM,WAAW,4BAA4B,aAAa,WAAW,OAAO,YAAY;AAAA,EAExF,IAAI,WAAW;AAAA,EAEf,MAAM,eAAe,MAAM;AAAA,EAC3B,MAAM,OAAO,CAAC,UAAuD;AAAA,IACnE,IAAI,MAAM,SAAS,gBAAgB,eAAe,OAAO;AAAA,MACvD,YAAa,MAAc;AAAA,MAE3B,MAAM,QAAQ,SAAS,MAAM,WAAW;AAAA,MACxC,IAAI,OAAO;AAAA,QACT,MAAM,UAAU,iBAAiB,MAAM,EAAE;AAAA,QACzC,IAAI,YAAY,WAAW;AAAA,UACzB,aAAa;AAAA,YACX,MAAM;AAAA,YACN,MAAM;AAAA,YACN,aAAa;AAAA,UACf,CAAgD;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IACA,aAAa,KAAK;AAAA;AAAA,EAGpB,MAAM,kBAAkB,aAAa,iBAAiB;AAAA,IACpD,gBAAgB,MAAM,aAAa;AAAA,IACnC,aAAa,MAAM,eAAe;AAAA,IAClC,kBAAkB;AAAA,IAClB;AAAA,EACF,CAAC,EAAE,KACD,MAAM,MAAM,KAAK,GACjB,CAAC,QAAe,MAAM,MAAM,GAAG,CACjC;AAAA,EAEA,OAAO,MAAM;AAAA,EACb,MAAM;AAAA,EAEN,MAAM,SAAS,oBAAoB,QAAQ;AAAA,EAC3C,MAAM,EAAE,MAAM,UAAU,MAAM,EAAE,OAAO,EAAoC;AAAA;;;AG1I7E;AAEO,IAAM,yBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAe,MAAM,QAAQ,MAAM,IAAI;AAAA,EAE7C,IAAI,OAAO,iBAAiB,aAAa,4BAA4B;AAAA,IACnE,IACE,CAAC,MAAM,mBACP,CAAC,MAAM,QAAQ,MAAM,eAAe,KACpC,MAAM,gBAAgB,WAAW,GACjC;AAAA,MACA,MAAM,IAAI,MAAM,yDAAyD;AAAA,IAC3E;AAAA,IAEA,MAAM,qBAAqD,MAAM,YAC/D,OACA,YACA,CAAC,GACD,MACF;AAAA,IACA,MAAM,UAAc,MAAM,mBACxB,MAAM,MACN,MAAM,iBACN,CAAC,CACH;AAAA,IAEA,IAAI,cAAc;AAAA,MAEhB,MAAM,UAAU,MAAM,QAAQ,OAAM,KAAK,MAAM,QAAQ,QAAO,IAAI,MAAM,IAAI,UAAS,CAAC,OAAM;AAAA,MAC5F,OAAO;AAAA,QACL,YAAY,QAAQ,IAAI,CAAC,MACvB,EAAE,OAAO,IAAI,CAAC,OAAe,SAAiB;AAAA,UAC5C;AAAA,UACA,OAAO,EAAE,OAAO;AAAA,QAClB,EAAE,CACJ;AAAA,MACF;AAAA,IACF;AAAA,IAEA,OAAO;AAAA,MACL,YAAY,QAAO,OAAO,IAAI,CAAC,OAAe,SAAiB;AAAA,QAC7D;AAAA,QACA,OAAO,QAAO,OAAO;AAAA,MACvB,EAAE;AAAA,IACJ;AAAA,EACF;AAAA,EAEA,MAAM,qBAAiD,MAAM,YAC3D,OACA,YACA,CAAC,GACD,MACF;AAAA,EACA,MAAM,SAAS,MAAM,mBAAmB,MAAM,MAAa;AAAA,IACzD,OAAO,MAAM,iBAAiB;AAAA,EAChC,CAAC;AAAA,EAED,IAAI,cAAc;AAAA,IAEhB,OAAO;AAAA,MACL,YAAa,OAAiB,IAAI,CAAC,aAAkB;AAAA,QACnD,MAAM,QAAQ,MAAM,QAAQ,QAAQ,IAAI,WAAW,CAAC,QAAQ;AAAA,QAC5D,OAAO,MAAM,IAAI,CAAC,cAAmB;AAAA,UACnC,OAAO,SAAS;AAAA,UAChB,OAAO,SAAS;AAAA,QAClB,EAAE;AAAA,OACH;AAAA,IACH;AAAA,EACF;AAAA,EAEA,IAAI,MAAM,QAAQ,OAAO,EAAE,GAAG;AAAA,IAC5B,OAAO;AAAA,MACL,YAAY,OAAO,GAAG,IAAI,CAAC,cAAc;AAAA,QACvC,OAAO,SAAS;AAAA,QAChB,OAAO,SAAS;AAAA,MAClB,EAAE;AAAA,IACJ;AAAA,EACF;AAAA,EAEA,OAAO;AAAA,IACL,YAAa,OAAoC,IAAI,CAAC,cAAc;AAAA,MAClE,OAAO,SAAS;AAAA,MAChB,OAAO,SAAS;AAAA,IAClB,EAAE;AAAA,EACJ;AAAA;;;AC3FF;AAFA,sBAAS;AAQF,IAAM,oBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,SAAS,WAAU;AAAA,EACzB,MAAM,OAAO,OAAO,WAAW;AAAA,EAC/B,MAAM,aAAa,qBAAqB,OAAO,gBAAgB,cAAc;AAAA,EAC7E,OAAO,KAAK,YAAY,EAAE,OAAO,OAAO,gBAAgB,WAAW,CAAC;AAAA,EAEpE,MAAM,oBAA+C,MAAM,YACzD,OACA,YACA,CAAC,GACD,MACF;AAAA,EAEA,OAAO,MAAM,2DAA2D;AAAA,IACtE,OAAO,OAAO,gBAAgB;AAAA,IAC9B,aAAa,MAAM,QAAQ,MAAM,IAAI,IAAI,MAAM,KAAK,SAAS,MAAM,MAAM;AAAA,EAC3E,CAAC;AAAA,EAGD,MAAM,WAAW,MAAM,kBAAkB,MAAM,MAAM;AAAA,IACnD,SAAS,OAAO,gBAAgB,WAAW;AAAA,IAC3C,WAAW,OAAO,gBAAgB;AAAA,EACpC,CAAC;AAAA,EAED,MAAM,eAAe,MAAM,QAAQ,MAAM,IAAI;AAAA,EAC7C,MAAM,eAAe,OAAO,gBAAgB;AAAA,EAI5C,IAAI,gBAAgB,SAAS,KAAK,SAAS,GAAG;AAAA,IAC5C,OAAO,UAAU,aAAa,SAAS;AAAA,IAGvC,IAAI,aAAa,MAAM,KAAK,QAAQ;AAAA,MAClC,MAAM,IAAI,MACR,8EAA8E,eAAe,MAAM,KAAK,QAC1G;AAAA,IACF;AAAA,IAGA,IAAI,cAAc,cAAc;AAAA,MAC9B,MAAM,IAAI,MACR,2EAA2E,gBAAgB,cAC7F;AAAA,IACF;AAAA,IAOA,MAAM,UAAwB,MAAM,KAAK,EAAE,QAAQ,SAAS,GAAG,CAAC,GAAG,MAC/D,SAAiB,GAAG,KAAoB,MAAM,CAClD;AAAA,IAEA,OAAO,QAAQ,YAAY,EAAE,WAAW,UAAU,YAAY,UAAU,CAAC;AAAA,IACzE,OAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAAA,EAGA,IAAI,SAAS,SAAS,cAAc;AAAA,IAClC,OAAO,QAAQ,YAAY,EAAE,QAAQ,SAAS,QAAQ,qBAAqB,CAAC;AAAA,IAC5E,QAAQ,KACN,wEAAwE,SAAS,YAAY,gBAC7F,OACA,QACF;AAAA,IACA,MAAM,IAAI,MACR,wEAAwE,SAAS,YAAY,cAC/F;AAAA,EACF;AAAA,EAEA,OAAO,QAAQ,YAAY,EAAE,YAAY,SAAS,KAAK,CAAC;AAAA,EACxD,OAAO,EAAE,QAAQ,SAAS,KAAmB;AAAA;;;ACxF/C;AAEO,IAAM,mBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAe,MAAM,QAAQ,MAAM,IAAI;AAAA,EAE7C,MAAM,WAA6B,MAAM,YAAY,OAAQ,YAAY,CAAC,GAAG,MAAM;AAAA,EACnF,MAAM,UAAU,MAAM,SAAS,MAAM,IAAW;AAAA,EAEhD,IAAI,cAAc;AAAA,IAChB,OAAO;AAAA,MACL,aAAc,QAAwC,IAAI,CAAC,aAAa;AAAA,QACtE,MAAM,QAAQ,MAAM,QAAQ,QAAQ,IAAI,WAAW,CAAC,QAAQ;AAAA,QAC5D,OAAO,MAAM,IAAI,CAAC,gBAAgB;AAAA,UAChC,QAAQ,WAAW;AAAA,UACnB,OAAO,WAAW;AAAA,UAClB,UAAU,WAAW;AAAA,QACvB,EAAE;AAAA,OACH;AAAA,IACH;AAAA,EACF;AAAA,EAEA,IAAI,cAA8B,CAAC;AAAA,EACnC,IAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAAA,IAC3B,cAAc,CAAC,OAAO;AAAA,EACxB,EAAO;AAAA,IACL,cAAc;AAAA;AAAA,EAEhB,OAAO;AAAA,IACL,aAAa,YAAY,IAAI,CAAC,gBAAgB;AAAA,MAC5C,QAAQ,WAAW;AAAA,MACnB,OAAO,WAAW;AAAA,MAClB,UAAU,WAAW;AAAA,IACvB,EAAE;AAAA,EACJ;AAAA;;;AC9BF;AAFA,sBAAS;AAcF,IAAM,qBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,SAAS,WAAU;AAAA,EACzB,MAAM,aAAa,sBAAsB,OAAO,gBAAgB;AAAA,EAChE,OAAO,KAAK,YAAY,EAAE,OAAO,OAAO,gBAAgB,WAAW,CAAC;AAAA,EAEpE,MAAM,eAAe,MAAM,QAAQ,MAAM,MAAM;AAAA,EAE/C,MAAM,eAAuC,MAAM,YAAY,OAAQ,YAAY,CAAC,GAAG,MAAM;AAAA,EAC7F,QAAQ,iBAAiB,MAAM,oBAAoB;AAAA,EAEnD,OAAO,MAAM,uDAAuD;AAAA,IAClE,OAAO,OAAO,gBAAgB;AAAA,IAC9B,cAAc,eAAgB,MAAM,OAAoB,SAAS,MAAM,QAAQ;AAAA,EACjF,CAAC;AAAA,EAED,MAAM,WAAW,eACb,YACA,mBAAmB,aAAa,WAAW,YAAY,YAAY;AAAA,EAEvE,IAAI,UAAU,MAAM,aAAa,MAAM,QAAe;AAAA,OAChD,WAAW,EAAE,SAAS,IAAI,CAAC;AAAA,EACjC,CAAC;AAAA,EAED,IAAI,cAAc;AAAA,IAEhB,MAAM,eAAe,MAAM,QAAQ,OAAO,IAAI,UAAU,CAAC,OAAO;AAAA,IAChE,MAAM,QAAQ,aAAa,IAAI,CAAC,MAAM;AAAA,MACpC,MAAM,OAAO,MAAM,QAAQ,CAAC,IAAI,IAAI,CAAC,CAAC;AAAA,MACtC,OAAO,qBAAsB,KAAK,IAAqC,cAAc;AAAA,KACtF;AAAA,IACD,OAAO,QAAQ,YAAY,EAAE,WAAW,MAAM,OAAO,CAAC;AAAA,IACtD,OAAO,EAAE,MAAM,MAAM;AAAA,EACvB;AAAA,EAEA,IAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAAA,IAC3B,UAAU,CAAC,OAAO;AAAA,EACpB;AAAA,EACA,MAAM,OAAO,qBAAsB,QAAQ,IAAqC,cAAc;AAAA,EAC9F,OAAO,QAAQ,YAAY,EAAE,cAAc,MAAM,OAAO,CAAC;AAAA,EACzD,OAAO;AAAA,IACL;AAAA,EACF;AAAA;AAGK,IAAM,4BAIT,gBAAgB,CAAC,OAAO,OAAO,QAA8D;AAAA,EAC/F,MAAM,eAAe,MAAM;AAAA,EAC3B,MAAM,eAAuC,MAAM,YAAY,OAAQ,cAAc,CAAC,GAAG,MAAM;AAAA,EAC/F,QAAQ,iBAAiB,MAAM,oBAAoB;AAAA,EAEnD,MAAM,QAAQ,uBAA8D;AAAA,EAC5E,MAAM,WAAW,4BAA4B,aAAa,WAAW,OAAO,YAAY;AAAA,EAExF,MAAM,kBAAkB,aAAa,MAAM,QAAkB;AAAA,IAC3D;AAAA,EACF,CAAC,EAAE,KACD,MAAM,MAAM,KAAK,GACjB,CAAC,QAAe,MAAM,MAAM,GAAG,CACjC;AAAA,EAEA,OAAO,MAAM;AAAA,EACb,MAAM;AAAA,EACN,MAAM,EAAE,MAAM,UAAU,MAAM,CAAC,EAA8B;AAAA;;;ACjF/D;AAEO,IAAM,4BAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAe,MAAM,QAAQ,MAAM,IAAI;AAAA,EAE7C,MAAM,qBAAiD,MAAM,YAC3D,OACA,YACA,CAAC,GACD,MACF;AAAA,EACA,MAAM,SAAS,MAAM,mBAAmB,MAAM,MAAa;AAAA,IACzD,OAAO,MAAM,gBAAgB;AAAA,EAC/B,CAAC;AAAA,EAED,IAAI,cAAc;AAAA,IAChB,OAAO;AAAA,MACL,WAAY,OAAiB,IAAI,CAAC,aAAkB;AAAA,QAClD,MAAM,QAAQ,MAAM,QAAQ,QAAQ,IAAI,WAAW,CAAC,QAAQ;AAAA,QAC5D,OAAO,MAAM,IAAI,CAAC,cAAmB;AAAA,UACnC,UAAU,SAAS;AAAA,UACnB,OAAO,SAAS;AAAA,QAClB,EAAE;AAAA,OACH;AAAA,IACH;AAAA,EACF;AAAA,EAEA,IAAI,MAAM,QAAQ,OAAO,EAAE,GAAG;AAAA,IAC5B,OAAO;AAAA,MACL,WAAW,OAAO,GAAG,IAAI,CAAC,cAAc;AAAA,QACtC,UAAU,SAAS;AAAA,QACnB,OAAO,SAAS;AAAA,MAClB,EAAE;AAAA,IACJ;AAAA,EACF;AAAA,EAEA,OAAO;AAAA,IACL,WAAY,OAAoC,IAAI,CAAC,cAAc;AAAA,MACjE,UAAU,SAAS;AAAA,MACnB,OAAO,SAAS;AAAA,IAClB,EAAE;AAAA,EACJ;AAAA;;;AC7CF;AAEO,IAAM,iCAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAe,MAAM,QAAQ,MAAM,IAAI;AAAA,EAE7C,MAAM,6BAA0D,MAAM,YACpE,OACA,YACA,CAAC,GACD,MACF;AAAA,EACA,MAAM,UAAU,MAAM,2BAA2B,MAAM,MAAa;AAAA,IAClE,eAAe,MAAM;AAAA,EACvB,CAAC;AAAA,EAED,IAAI,cAAc;AAAA,IAChB,OAAO;AAAA,MACL,UAAW,QAAmD,IAAI,CAAC,aAAa;AAAA,QAC9E,MAAM,QAAQ,MAAM,QAAQ,QAAQ,IAAI,WAAW,CAAC,QAAQ;AAAA,QAC5D,OAAO,MAAM,IAAI,CAAC,YAAY;AAAA,UAC5B,QAAQ,OAAO;AAAA,UACf,OAAO,OAAO;AAAA,UACd,MAAM,OAAO;AAAA,QACf,EAAE;AAAA,OACH;AAAA,IACH;AAAA,EACF;AAAA,EAEA,IAAI,WAAsC,CAAC;AAAA,EAC3C,IAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAAA,IAC3B,WAAW,CAAC,OAAO;AAAA,EACrB,EAAO;AAAA,IACL,WAAW;AAAA;AAAA,EAEb,OAAO;AAAA,IACL,UAAU,SAAS,IAAI,CAAC,YAAY;AAAA,MAClC,QAAQ,OAAO;AAAA,MACf,OAAO,OAAO;AAAA,MACd,MAAM,OAAO;AAAA,IACf,EAAE;AAAA,EACJ;AAAA;;;AC1CF;AAWO,IAAM,yBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAe,MAAM,QAAQ,MAAM,QAAQ;AAAA,EAGjD,MAAM,iBAA4C,MAAM,YACtD,OACA,YACA,CAAC,GACD,MACF;AAAA,EAEA,IAAI,cAAc;AAAA,IAChB,MAAM,YAAY,MAAM;AAAA,IACxB,MAAM,WAAW,MAAM;AAAA,IACvB,IAAI,UAAU,WAAW,SAAS,QAAQ;AAAA,MACxC,MAAM,IAAI,MACR,uDAAuD,UAAU,aAAa,SAAS,QACzF;AAAA,IACF;AAAA,IAEA,MAAM,UAAoB,CAAC;AAAA,IAC3B,SAAS,IAAI,EAAG,IAAI,UAAU,QAAQ,KAAK;AAAA,MACzC,MAAM,UAAS,MAAM,eAAe,UAAU,IAAI,SAAS,IAAI,CAAC,CAAQ;AAAA,MACxE,IAAI,cAAa;AAAA,MACjB,IAAI,MAAM,QAAQ,OAAM,GAAG;AAAA,QACzB,cAAc,QAAO,IAAgD,UAAU;AAAA,MACjF,EAAO;AAAA,QACL,cAAc,SAAoD,UAAU;AAAA;AAAA,MAE9E,QAAQ,KAAK,WAAU;AAAA,IACzB;AAAA,IAEA,OAAO,EAAE,MAAM,QAAQ;AAAA,EACzB;AAAA,EAEA,QAAQ,iBAAiB,MAAM,oBAAoB;AAAA,EACnD,MAAM,WAAW,mBAAmB,eAAe,WAAW,YAAY,YAAY;AAAA,EAEtF,MAAM,SAAS,MAAM,eACnB,MAAM,UACN,MAAM,SACN;AAAA,IACE;AAAA,EACF,CACF;AAAA,EAEA,IAAI,aAAa;AAAA,EACjB,IAAI,MAAM,QAAQ,MAAM,GAAG;AAAA,IACzB,aAAc,OAAO,IAAgD,UAAU;AAAA,EACjF,EAAO;AAAA,IACL,aAAc,QAAoD,UAAU;AAAA;AAAA,EAG9E,OAAO;AAAA,IACL,MAAM;AAAA,EACR;AAAA;AAGK,IAAM,gCAIT,gBAAgB,CAClB,OACA,OACA,QAC0D;AAAA,EAC1D,MAAM,eAAe,MAAM;AAAA,EAC3B,MAAM,iBAA4C,MAAM,YACtD,OACA,cACA,CAAC,GACD,MACF;AAAA,EACA,QAAQ,iBAAiB,MAAM,oBAAoB;AAAA,EAEnD,MAAM,QAAQ,uBAAkE;AAAA,EAChF,MAAM,WAAW,4BAA4B,eAAe,WAAW,OAAO,YAAY;AAAA,EAE1F,IAAI;AAAA,EAIJ,MAAM,kBAAkB,eACtB,MAAM,UACN,MAAM,SACN;AAAA,IACE;AAAA,EACF,CACF,EAAE,KACA,CAAC,WAAW;AAAA,IACV,iBAAiB;AAAA,IACjB,MAAM,KAAK;AAAA,KAEb,CAAC,QAAe,MAAM,MAAM,GAAG,CACjC;AAAA,EAEA,OAAO,MAAM;AAAA,EACb,MAAM;AAAA,EAEN,IAAI,aAAa;AAAA,EACjB,IAAI,mBAAmB,WAAW;AAAA,IAChC,IAAI,MAAM,QAAQ,cAAc,GAAG;AAAA,MACjC,aAAc,eAAe,IAAgD,UAAU;AAAA,IACzF,EAAO;AAAA,MACL,aAAc,gBAA4D,UAAU;AAAA;AAAA,EAExF;AAAA,EACA,MAAM,EAAE,MAAM,UAAU,MAAM,EAAE,MAAM,WAAW,EAAkC;AAAA;;;AC9HrF;AAYO,IAAM,mBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAe,MAAM,QAAQ,MAAM,IAAI;AAAA,EAE7C,MAAM,eAAuC,MAAM,YAAY,OAAQ,YAAY,CAAC,GAAG,MAAM;AAAA,EAC7F,QAAQ,iBAAiB,MAAM,oBAAoB;AAAA,EACnD,MAAM,WAAW,eACb,YACA,mBAAmB,aAAa,WAAW,YAAY,YAAY;AAAA,EAEvE,IAAI,cAAc;AAAA,IAChB,MAAM,QAAQ,MAAM;AAAA,IACpB,MAAM,gBAAgB,MAAM,IAAI,CAAC,OAAO,MAAM,SAAS,MAAM,SAAS;AAAA,IAAO,MAAM,CAAC;AAAA,IAEpF,IAAI,WAAU,MAAM,aAAa,eAAe,CAAC,CAAC;AAAA,IAElD,MAAM,eAAe,MAAM,QAAQ,QAAO,IAAI,WAAU,CAAC,QAAO;AAAA,IAChE,MAAM,cAAc,aAAa,IAAI,CAAC,GAAG,MAAM;AAAA,MAC7C,MAAM,OAAO,MAAM,QAAQ,CAAC,IAAI,IAAI,CAAC,CAAC;AAAA,MACtC,MAAM,QAAO,qBAAsB,KAAK,IAAqC,cAAc;AAAA,MAC3F,IAAI,UAAS,cAAc,IAAI;AAAA,QAC7B,MAAM,IAAI,MAAM,sCAAsC;AAAA,MACxD;AAAA,MACA,OAAO;AAAA,KACR;AAAA,IAED,OAAO,EAAE,MAAM,YAAY;AAAA,EAC7B;AAAA,EAGA,MAAM,gBAAgB,MAAM,SAAS,MAAM,SAAS;AAAA,IAAO,MAAM,MAAM;AAAA,EAEvE,IAAI,UAAU,MAAM,aAAa,cAAc;AAAA,OACzC,WAAW,EAAE,SAAS,IAAI,CAAC;AAAA,EACjC,CAAC;AAAA,EAED,IAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAAA,IAC3B,UAAU,CAAC,OAAO;AAAA,EACpB;AAAA,EAEA,MAAM,OAAO,qBAAsB,QAAQ,IAAqC,cAAc;AAAA,EAE9F,IAAI,SAAS,cAAc;AAAA,IACzB,MAAM,IAAI,MAAM,sCAAsC;AAAA,EACxD;AAAA,EAEA,OAAO;AAAA,IACL;AAAA,EACF;AAAA;AAGK,IAAM,0BAIT,gBAAgB,CAAC,OAAO,OAAO,QAA4D;AAAA,EAC7F,MAAM,eAAe,MAAM;AAAA,EAC3B,MAAM,eAAuC,MAAM,YAAY,OAAQ,cAAc,CAAC,GAAG,MAAM;AAAA,EAC/F,QAAQ,iBAAiB,MAAM,oBAAoB;AAAA,EAEnD,MAAM,QAAQ,uBAA4D;AAAA,EAC1E,MAAM,WAAW,4BAA4B,aAAa,WAAW,OAAO,YAAY;AAAA,EAExF,MAAM,gBAAgB,MAAM,SAAS,MAAM,SAAS;AAAA,IAAO,MAAO,MAAM;AAAA,EAExE,MAAM,kBAAkB,aAAa,cAAc;AAAA,IACjD;AAAA,EACF,CAAC,EAAE,KACD,MAAM,MAAM,KAAK,GACjB,CAAC,QAAe,MAAM,MAAM,GAAG,CACjC;AAAA,EAEA,OAAO,MAAM;AAAA,EACb,MAAM;AAAA,EACN,MAAM,EAAE,MAAM,UAAU,MAAM,CAAC,EAA4B;AAAA;;;ACzF7D;AAWO,IAAM,kBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAe,MAAM,QAAQ,MAAM,IAAI;AAAA,EAE7C,MAAM,kBAAyC,MAAM,YAAY,OAAQ,YAAY,CAAC,GAAG,MAAM;AAAA,EAC/F,QAAQ,iBAAiB,MAAM,oBAAoB;AAAA,EACnD,MAAM,WAAW,eACb,YACA,mBAAmB,gBAAgB,WAAW,YAAY,YAAY;AAAA,EAE1E,MAAM,SAAS,MAAM,gBACnB,MAAM,MACN;AAAA,OACM,WAAW,EAAE,SAAS,IAAI,CAAC;AAAA,EACjC,CACF;AAAA,EAEA,IAAI,cAAc;AAAA,IAChB,MAAM,eAAe,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC,MAAM;AAAA,IAC7D,OAAO;AAAA,MACL,MAAM,aAAa,IAAI,CAAC,MAAO,GAAmC,gBAAgB,EAAE;AAAA,IACtF;AAAA,EACF;AAAA,EAEA,IAAI,cAAc;AAAA,EAClB,IAAI,MAAM,QAAQ,MAAM,GAAG;AAAA,IACzB,cAAe,OAAO,IAAoC,gBAAgB;AAAA,EAC5E,EAAO;AAAA,IACL,cAAe,QAAwC,gBAAgB;AAAA;AAAA,EAGzE,OAAO;AAAA,IACL,MAAM;AAAA,EACR;AAAA;AAGK,IAAM,yBAIT,gBAAgB,CAAC,OAAO,OAAO,QAA2D;AAAA,EAC5F,MAAM,eAAe,MAAM;AAAA,EAC3B,MAAM,kBAAyC,MAAM,YACnD,OACA,cACA,CAAC,GACD,MACF;AAAA,EACA,QAAQ,iBAAiB,MAAM,oBAAoB;AAAA,EAEnD,MAAM,QAAQ,uBAA2D;AAAA,EACzE,MAAM,WAAW,4BAA4B,gBAAgB,WAAW,OAAO,YAAY;AAAA,EAE3F,MAAM,kBAAkB,gBACtB,MAAM,MACN;AAAA,IACE;AAAA,EACF,CACF,EAAE,KACA,MAAM,MAAM,KAAK,GACjB,CAAC,QAAe,MAAM,MAAM,GAAG,CACjC;AAAA,EAEA,OAAO,MAAM;AAAA,EACb,MAAM;AAAA,EACN,MAAM,EAAE,MAAM,UAAU,MAAM,CAAC,EAA2B;AAAA;;;AC/E5D;AAWO,IAAM,sBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAe,MAAM,QAAQ,MAAM,IAAI;AAAA,EAE7C,MAAM,YAAiC,MAAM,YAAY,OAAQ,YAAY,CAAC,GAAG,MAAM;AAAA,EACvF,QAAQ,iBAAiB,MAAM,oBAAoB;AAAA,EACnD,MAAM,WAAW,eACb,YACA,mBAAmB,UAAU,WAAW,YAAY,YAAY;AAAA,EAEpE,MAAM,SAAS,MAAM,UACnB,MAAM,MACN;AAAA,IACE,UAAU,MAAM;AAAA,IAChB,UAAU,MAAM;AAAA,OACZ,WAAW,EAAE,SAAS,IAAI,CAAC;AAAA,EACjC,CACF;AAAA,EAEA,IAAI,cAAc;AAAA,IAChB,MAAM,eAAe,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC,MAAM;AAAA,IAC7D,OAAO;AAAA,MACL,MAAM,aAAa,IAAI,CAAC,MAAO,GAAiC,oBAAoB,EAAE;AAAA,MACtF,aAAa,MAAM;AAAA,IACrB;AAAA,EACF;AAAA,EAEA,MAAM,iBAAiB,MAAM,QAAQ,MAAM,IACtC,OAAO,IAAkC,oBAAoB,KAC7D,QAAsC,oBAAoB;AAAA,EAE/D,OAAO;AAAA,IACL,MAAM;AAAA,IACN,aAAa,MAAM;AAAA,EACrB;AAAA;AAGK,IAAM,6BAIT,gBAAgB,CAAC,OAAO,OAAO,QAA+D;AAAA,EAChG,MAAM,eAAe,MAAM;AAAA,EAC3B,MAAM,YAAiC,MAAM,YAAY,OAAQ,cAAc,CAAC,GAAG,MAAM;AAAA,EACzF,QAAQ,iBAAiB,MAAM,oBAAoB;AAAA,EAEnD,MAAM,QAAQ,uBAA+D;AAAA,EAC7E,MAAM,WAAW,4BAA4B,UAAU,WAAW,OAAO,YAAY;AAAA,EAErF,MAAM,kBAAkB,UACtB,MAAM,MACN;AAAA,IACE,UAAU,MAAM;AAAA,IAChB,UAAU,MAAM;AAAA,IAChB;AAAA,EACF,CACF,EAAE,KACA,MAAM,MAAM,KAAK,GACjB,CAAC,QAAe,MAAM,MAAM,GAAG,CACjC;AAAA,EAEA,OAAO,MAAM;AAAA,EACb,MAAM;AAAA,EACN,MAAM,EAAE,MAAM,UAAU,MAAM,EAAE,aAAa,MAAM,YAAY,EAA+B;AAAA;;;ACvEhG;AAdA;AAAA;AAAA;AAAA;AAAA;AAuBA,SAAS,WAAW,CAAC,OAAsC;AAAA,EACzD,OAAO,MAAM,IAAI,CAAC,OAAO;AAAA,IACvB,MAAM;AAAA,IACN,UAAU;AAAA,MACR,MAAM,EAAE;AAAA,MACR,aAAa,qBAAqB,CAAC;AAAA,MACnC,YAAY,EAAE;AAAA,IAChB;AAAA,EACF,EAAE;AAAA;AAUJ,SAAS,0BAA0B,CACjC,OACA,UAC4C;AAAA,EAC5C,IAAI,MAAM,eAAe,QAAQ;AAAA,IAC/B;AAAA,EACF;AAAA,EAEA,IAAI,MAAM,eAAe,YAAY;AAAA,IACnC,MAAM,sBACJ;AAAA,IACF,IAAI,SAAS,SAAS,KAAK,SAAS,GAAG,SAAS,UAAU;AAAA,MACxD,SAAS,KAAK,KAAK,SAAS,IAAI,SAAS,GAAG,SAAS,GAAG;AAAA;AAAA,EAAc,sBAAsB;AAAA,IAC9F,EAAO;AAAA,MACL,SAAS,QAAQ,EAAE,MAAM,UAAU,SAAS,oBAAoB,CAAC;AAAA;AAAA,IAEnE,OAAO,YAAY,MAAM,KAAK;AAAA,EAChC;AAAA,EAEA,IAAI,OAAO,MAAM,eAAe,YAAY,MAAM,eAAe,QAAQ;AAAA,IAEvE,MAAM,gBAAgB,MAAM,OAAO,OACjC,CAAC,SAAyB,KAAK,SAAS,MAAM,UAChD;AAAA,IACA,MAAM,aAAa,iBAAiB,cAAc,SAAS,IAAI,gBAAgB,MAAM;AAAA,IACrF,OAAO,YAAY,UAAU;AAAA,EAC/B;AAAA,EAEA,OAAO,YAAY,MAAM,KAAK;AAAA;AAGzB,IAAM,kBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAe,MAAM,QAAQ,MAAM,MAAM;AAAA,EAE/C,MAAM,eAAuC,MAAM,YAAY,OAAQ,YAAY,CAAC,GAAG,MAAM;AAAA,EAC7F,QAAQ,iBAAiB,MAAM,oBAAoB;AAAA,EAEnD,IAAI,cAAc;AAAA,IAChB,MAAM,UAAU,MAAM;AAAA,IAGtB,MAAM,QAAkB,CAAC;AAAA,IACzB,MAAM,gBACJ,CAAC;AAAA,IAEH,WAAW,gBAAgB,SAAS;AAAA,MAClC,MAAM,cAAc,KAAK,OAAO,QAAQ,aAAa;AAAA,MACrD,MAAM,YAAW,mBAAmB,WAAW;AAAA,MAE/C,MAAM,SAAQ,2BAA2B,aAAa,SAAQ;AAAA,MAG9D,MAAM,UAAU,aAAa,UAAkB,oBAAoB,WAAU;AAAA,QAC3E;AAAA,QACA,UAAU;AAAA,QACV,uBAAuB;AAAA,MACzB,CAAC;AAAA,MAED,MAAM,YAAW,mBAAmB,aAAa,WAAW,YAAY,YAAY;AAAA,MAEpF,IAAI,WAAU,MAAM,aAAa,SAAQ;AAAA,QACvC,gBAAgB,MAAM,aAAa;AAAA,QACnC,aAAa,MAAM,eAAe;AAAA,QAClC,kBAAkB;AAAA,QAClB;AAAA,MACF,CAAC;AAAA,MAED,IAAI,CAAC,MAAM,QAAQ,QAAO,GAAG;AAAA,QAC3B,WAAU,CAAC,QAAO;AAAA,MACpB;AAAA,MAEA,MAAM,gBAAe,qBAClB,SAAQ,IAAqC,cAChD,EAAE,KAAK;AAAA,MAEP,QAAQ,aAAM,0BAAc,uBAAuB,aAAY;AAAA,MAC/D,MAAM,KAAK,KAAI;AAAA,MACf,cAAc,KAAK,qBAAqB,YAAW,YAAY,KAAK,CAAC;AAAA,IACvE;AAAA,IAKA,OAAO,EAAE,MAAM,OAAO,WAAW,cAAc;AAAA,EACjD;AAAA,EACA,MAAM,WAAW,mBAAmB,KAAK;AAAA,EAEzC,MAAM,QAAQ,2BAA2B,OAAO,QAAQ;AAAA,EAGxD,MAAM,SAAU,aAAa,UAAkB,oBAAoB,UAAU;AAAA,IAC3E;AAAA,IACA,UAAU;AAAA,IACV,uBAAuB;AAAA,EACzB,CAAC;AAAA,EAED,MAAM,WAAW,mBAAmB,aAAa,WAAW,YAAY,YAAY;AAAA,EAEpF,IAAI,UAAU,MAAM,aAAa,QAAQ;AAAA,IACvC,gBAAgB,MAAM,aAAa;AAAA,IACnC,aAAa,MAAM,eAAe;AAAA,IAClC,kBAAkB;AAAA,IAClB;AAAA,EACF,CAAC;AAAA,EAED,IAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAAA,IAC3B,UAAU,CAAC,OAAO;AAAA,EACpB;AAAA,EAEA,MAAM,eAAe,qBAClB,QAAQ,IAAqC,cAChD,EAAE,KAAK;AAAA,EAEP,QAAQ,MAAM,cAAc,uBAAuB,YAAY;AAAA,EAC/D,OAAO;AAAA,IACL;AAAA,IACA,WAAW,qBAAqB,WAAW,MAAM,KAAK;AAAA,EACxD;AAAA;AAGK,IAAM,yBAIT,gBAAgB,CAAC,OAAO,OAAO,QAA2D;AAAA,EAC5F,MAAM,eAAe,MAAM;AAAA,EAC3B,MAAM,eAAuC,MAAM,YAAY,OAAQ,cAAc,CAAC,GAAG,MAAM;AAAA,EAC/F,QAAQ,iBAAiB,MAAM,oBAAoB;AAAA,EAEnD,MAAM,WAAW,mBAAmB,KAAK;AAAA,EAEzC,MAAM,QAAQ,2BAA2B,OAAO,QAAQ;AAAA,EAExD,MAAM,SAAU,aAAa,UAAkB,oBAAoB,UAAU;AAAA,IAC3E;AAAA,IACA,UAAU;AAAA,IACV,uBAAuB;AAAA,EACzB,CAAC;AAAA,EAID,MAAM,aAAa,uBAA2D;AAAA,EAC9E,MAAM,aAAa,uBAA2D;AAAA,EAC9E,MAAM,WAAW,4BAA4B,aAAa,WAAW,YAAY,YAAY;AAAA,EAE7F,IAAI,WAAW;AAAA,EACf,MAAM,SAAS,2BAA2B,CAAC,SAAS;AAAA,IAClD,WAAW,KAAK,EAAE,MAAM,cAAc,MAAM,QAAQ,WAAW,KAAK,CAAC;AAAA,GACtE;AAAA,EAID,MAAM,eAAe,WAAW;AAAA,EAChC,WAAW,OAAO,CAAC,UAA8C;AAAA,IAC/D,IAAI,MAAM,SAAS,gBAAgB,eAAe,OAAO;AAAA,MACvD,YAAY,MAAM;AAAA,MAClB,OAAO,KAAK,MAAM,SAAS;AAAA,IAC7B,EAAO;AAAA,MACL,WAAW,KAAK,KAAK;AAAA;AAAA,IAGvB,aAAa,KAAK;AAAA;AAAA,EAGpB,MAAM,eAAe,WAAW;AAAA,EAChC,WAAW,OAAO,MAAM;AAAA,IACtB,OAAO,MAAM;AAAA,IACb,WAAW,KAAK;AAAA,IAChB,aAAa;AAAA;AAAA,EAGf,MAAM,gBAAgB,WAAW;AAAA,EACjC,WAAW,QAAQ,CAAC,MAAa;AAAA,IAC/B,OAAO,MAAM;AAAA,IACb,WAAW,MAAM,CAAC;AAAA,IAClB,cAAc,CAAC;AAAA;AAAA,EAGjB,MAAM,kBAAkB,aAAa,QAAQ;AAAA,IAC3C,gBAAgB,MAAM,aAAa;AAAA,IACnC,aAAa,MAAM,eAAe;AAAA,IAClC,kBAAkB;AAAA,IAClB;AAAA,EACF,CAAC,EAAE,KACD,MAAM,WAAW,KAAK,GACtB,CAAC,QAAe,WAAW,MAAM,GAAG,CACtC;AAAA,EAEA,OAAO,WAAW;AAAA,EAClB,MAAM;AAAA,EAKN,QAAQ,MAAM,aAAa,cAAc,uBAAuB,QAAQ;AAAA,EACxE,MAAM,iBAAiB,qBAAqB,WAAW,MAAM,KAAK;AAAA,EAElE,IAAI,eAAe,SAAS,GAAG;AAAA,IAC7B,MAAM,EAAE,MAAM,gBAAgB,MAAM,aAAa,aAAa,CAAC,GAAG,cAAc,EAAE;AAAA,EACpF;AAAA,EAEA,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,MAAM,EAAE,MAAM,aAAa,WAAW,eAAe;AAAA,EACvD;AAAA;;;ACnPF;AAEA,SAAS,sBAAsB,GAAY;AAAA,EACzC,OACE,OAAO,eAAe,eACtB,YAAY,cACZ,OAAQ,WAAoD,QAAQ,SAAS;AAAA;AAOjF,eAAe,2BAA2B,CAAC,YAAmC;AAAA,EAC5E,MAAM,YAAa,WAAmD;AAAA,EACtE,MAAM,QAAQ,MAAM,UAAU,KAAK,cAAc;AAAA,EACjD,MAAM,OAAO,MAAM,MAAM,KAAK;AAAA,EAC9B,MAAM,SAAS,IAAI;AAAA,EAEnB,MAAM,mBAA8B,CAAC;AAAA,EACrC,WAAW,WAAW,MAAM;AAAA,IAC1B,MAAM,MAAM,IAAI,IAAI,QAAQ,GAAG;AAAA,IAC/B,IAAI,IAAI,SAAS,WAAW,MAAM,GAAG;AAAA,MACnC,iBAAiB,KAAK,OAAO;AAAA,IAC/B;AAAA,EACF;AAAA,EAEA,WAAW,WAAW,kBAAkB;AAAA,IACtC,IAAI;AAAA,MACF,MAAM,UAAU,MAAM,MAAM,OAAO,OAAO;AAAA,MAC1C,IAAI,CAAC,SAAS;AAAA,QACZ,MAAM,eAAe,MAAM,MAAM,OAAO,QAAQ,GAAG;AAAA,QACnD,IAAI,CAAC,cAAc,CAEnB;AAAA,MACF;AAAA,MACA,OAAO,OAAO;AAAA,MACd,QAAQ,MAAM,iCAAiC,QAAQ,OAAO,KAAK;AAAA;AAAA,EAEvE;AAAA;AAMF,eAAe,8BAA8B,CAAC,OAAqD;AAAA,EACjG,QAAQ,kBAAkB,MAAM,oBAAoB;AAAA,EACpD,QAAQ,UAAU,cAAc,YAAY,UAAU,MAAM;AAAA,EAC5D,MAAM,cAAc,qBAAqB,cAAc,YAAY;AAAA,OAC7D,QAAQ,EAAE,MAAM,IAAI,CAAC;AAAA,EAC3B,CAAC;AAAA;AAOI,IAAM,aAIT,OAAO,OAAO,OAAO,YAAY,YAAY;AAAA,EAE/C,MAAM,WAAW,oBAAoB,KAAM;AAAA,EAC3C,IAAI,qBAAqB,QAAQ,GAAG;AAAA,IAClC,WAAW,IAAI,8BAA8B;AAAA,EAC/C;AAAA,EAEA,MAAM,aAAa,MAAO,gBAAgB;AAAA,EAC1C,IAAI,uBAAuB,GAAG;AAAA,IAC5B,MAAM,4BAA4B,UAAU;AAAA,EAC9C,EAAO;AAAA,IACL,MAAM,+BAA+B,KAAM;AAAA;AAAA,EAE7C,WAAW,KAAK,qBAAqB;AAAA,EAErC,OAAO;AAAA,IACL,OAAO,MAAM;AAAA,EACf;AAAA;;;AClDK,IAAM,YAAY;AAAA,EACvB,mBAAmB;AAAA,EACnB,iBAAiB;AAAA,EACjB,eAAe;AAAA,EACf,iBAAiB;AAAA,EACjB,mBAAmB;AAAA,EACnB,oBAAoB;AAAA,EACpB,wBAAwB;AAAA,EACxB,2BAA2B;AAAA,EAC3B,wBAAwB;AAAA,EACxB,kBAAkB;AAAA,EAClB,gCAAgC;AAAA,EAChC,kBAAkB;AAAA,EAClB,iBAAiB;AAAA,EACjB,qBAAqB;AAAA,EACrB,uBAAuB;AAAA,EACvB,iBAAiB;AAAA,EACjB,uBAAuB;AAAA,EACvB,oBAAoB;AAAA,EACpB,yBAAyB;AAAA,EACzB,qBAAqB;AAAA,EACrB,iBAAiB;AAAA,EACjB,0BAA0B;AAAA,EAC1B,iBAAiB;AACnB;AAKO,IAAM,mBAGT;AAAA,EACF,oBAAoB;AAAA,EACpB,kBAAkB;AAAA,EAClB,iBAAiB;AAAA,EACjB,wBAAwB;AAAA,EACxB,qBAAqB;AAAA,EACrB,iBAAiB;AAAA,EACjB,0BAA0B;AAC5B;AAEO,IAAM,qBAGT;AAAA,EACF,iBAAiB;AACnB;;;AClFA;AAAA;AAAA;AAUO,MAAM,8CAA8C,iBAAgD;AAAA,EAChG,OAAO;AAAA,EACP,UAAU;AAAA,EACV,kBAAkB;AAAA,EAElB,YAAY;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EAEA,WAAW,CACT,OACA,aACA,eACA;AAAA,IACA,MAAM,OAAO,aAAa,aAAa;AAAA;AAE3C;;;AC3CA;AASA,eAAsB,qCAAqC,CACzD,SACe;AAAA,EACf,QAAQ,QAAQ,MAAM,oBAAoB;AAAA,EAC1C,IAAI,SAAU,KAAM,KAAM,QAAQ;AAAA,EAClC,MAAM,WAAW,IAAI,sCACnB,WACA,kBACA,kBACF;AAAA,EACA,MAAM,cAAc,SAAS,QAAQ,KAAK,QAAQ;AAAA,EAClD,SAAS,UAAU,YAAY;AAAA,IAC7B,MAAM,4BAA4B;AAAA,IAClC,MAAM,YAAY;AAAA;AAAA,EAEpB,MAAM,SAAS,SAAS,WAAW,CAAC,CAAC;AAAA;;ACtBvC,sBAAS;;;ACNT;AAAA;AAAA;AAkBO,MAAM,wCAAwC,WAA0C;AAAA,EACpF,OAAO;AAAA,EACP,UAAU;AAAA,EACV,kBAAkB;AAAA,EAElB,YAAY;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EAEA,WAAW,CACT,OACA,aACA,eACA;AAAA,IACA,MAAM,OAAO,aAAa,aAAa;AAAA;AAE3C;;;AD9CA;AAEA,eAAsB,qCAAqC,GAAkB;AAAA,EAC3E,QAAQ,QAAQ,MAAM,oBAAoB;AAAA,EAC1C,IAAI,SAAU,KAAM,KAAM,QAAQ;AAAA,EAClC,MAAM,eAAe,sBAAsB,IAAI,aAAa;AAAA,EAC5D,IAAI,gCACF,WACA,kBACA,kBACF,EAAE,uBAAuB,YAAY;AAAA,EACrC,aAAa,UAAU;AAAA,EACvB,WAAU,EAAE,KAAK,6DAA6D;AAAA;;;AENhF;",
47
+ "debugId": "8397F27EBE0B0C0B64756E2164756E21",
48
+ "names": []
49
+ }
@@ -3,7 +3,7 @@
3
3
  * Copyright 2025 Steven Roussey <sroussey@gmail.com>
4
4
  * SPDX-License-Identifier: Apache-2.0
5
5
  */
6
- import { AiProvider, type AiProviderReactiveRunFn, type AiProviderRunFn, type AiProviderStreamFn } from "@workglow/ai";
6
+ import { AiProvider, type AiProviderReactiveRunFn, type AiProviderRunFn, type AiProviderStreamFn } from "@workglow/ai/worker";
7
7
  import type { LlamaCppModelConfig } from "./common/LlamaCpp_ModelSchema";
8
8
  /**
9
9
  * AI provider for running GGUF models locally via node-llama-cpp.
@@ -16,25 +16,12 @@ import type { LlamaCppModelConfig } from "./common/LlamaCpp_ModelSchema";
16
16
  *
17
17
  * Models are cached in memory after the first load. Use UnloadModelTask to
18
18
  * release memory when a model is no longer needed.
19
- *
20
- * @example
21
- * ```typescript
22
- * // Inline mode:
23
- * import { LLAMACPP_TASKS, LLAMACPP_STREAM_TASKS } from "@workglow/ai-provider/llamacpp";
24
- * await new LlamaCppProvider(LLAMACPP_TASKS, LLAMACPP_STREAM_TASKS).register({ mode: "inline" });
25
- *
26
- * // Worker mode (main thread) -- lightweight, no SDK import:
27
- * await new LlamaCppProvider().register({
28
- * mode: "worker",
29
- * worker: new Worker(new URL("./worker_llamacpp.ts", import.meta.url), { type: "module" }),
30
- * });
31
- * ```
32
19
  */
33
20
  export declare class LlamaCppProvider extends AiProvider<LlamaCppModelConfig> {
34
21
  readonly name = "LOCAL_LLAMACPP";
35
22
  readonly isLocal = true;
36
23
  readonly supportsBrowser = false;
37
- readonly taskTypes: readonly ["DownloadModelTask", "UnloadModelTask", "ModelInfoTask", "CountTokensTask", "TextGenerationTask", "TextEmbeddingTask", "TextRewriterTask", "TextSummaryTask", "ToolCallingTask"];
24
+ readonly taskTypes: readonly ["DownloadModelTask", "UnloadModelTask", "ModelInfoTask", "CountTokensTask", "TextGenerationTask", "TextEmbeddingTask", "TextRewriterTask", "TextSummaryTask", "ToolCallingTask", "ModelSearchTask"];
38
25
  constructor(tasks?: Record<string, AiProviderRunFn<any, any, LlamaCppModelConfig>>, streamTasks?: Record<string, AiProviderStreamFn<any, any, LlamaCppModelConfig>>, reactiveTasks?: Record<string, AiProviderReactiveRunFn<any, any, LlamaCppModelConfig>>);
39
26
  }
40
27
  //# sourceMappingURL=LlamaCppProvider.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"LlamaCppProvider.d.ts","sourceRoot":"","sources":["../../src/provider-llamacpp/LlamaCppProvider.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EACL,UAAU,EACV,KAAK,uBAAuB,EAC5B,KAAK,eAAe,EACpB,KAAK,kBAAkB,EACxB,MAAM,cAAc,CAAC;AAEtB,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,+BAA+B,CAAC;AAEzE;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;AACH,qBAAa,gBAAiB,SAAQ,UAAU,CAAC,mBAAmB,CAAC;IACnE,QAAQ,CAAC,IAAI,oBAAkB;IAC/B,QAAQ,CAAC,OAAO,QAAQ;IACxB,QAAQ,CAAC,eAAe,SAAS;IAEjC,QAAQ,CAAC,SAAS,6LAUP;gBAGT,KAAK,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,eAAe,CAAC,GAAG,EAAE,GAAG,EAAE,mBAAmB,CAAC,CAAC,EACtE,WAAW,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,kBAAkB,CAAC,GAAG,EAAE,GAAG,EAAE,mBAAmB,CAAC,CAAC,EAC/E,aAAa,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,uBAAuB,CAAC,GAAG,EAAE,GAAG,EAAE,mBAAmB,CAAC,CAAC;CAIzF"}
1
+ {"version":3,"file":"LlamaCppProvider.d.ts","sourceRoot":"","sources":["../../src/provider-llamacpp/LlamaCppProvider.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EACL,UAAU,EACV,KAAK,uBAAuB,EAC5B,KAAK,eAAe,EACpB,KAAK,kBAAkB,EACxB,MAAM,qBAAqB,CAAC;AAE7B,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,+BAA+B,CAAC;AAEzE;;;;;;;;;;;GAWG;AACH,qBAAa,gBAAiB,SAAQ,UAAU,CAAC,mBAAmB,CAAC;IACnE,QAAQ,CAAC,IAAI,oBAAkB;IAC/B,QAAQ,CAAC,OAAO,QAAQ;IACxB,QAAQ,CAAC,eAAe,SAAS;IAEjC,QAAQ,CAAC,SAAS,gNAWP;IAEX,YACE,KAAK,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,eAAe,CAAC,GAAG,EAAE,GAAG,EAAE,mBAAmB,CAAC,CAAC,EACtE,WAAW,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,kBAAkB,CAAC,GAAG,EAAE,GAAG,EAAE,mBAAmB,CAAC,CAAC,EAC/E,aAAa,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,uBAAuB,CAAC,GAAG,EAAE,GAAG,EAAE,mBAAmB,CAAC,CAAC,EAGvF;CACF"}
@@ -0,0 +1,16 @@
1
+ /**
2
+ * @license
3
+ * Copyright 2025 Steven Roussey <sroussey@gmail.com>
4
+ * SPDX-License-Identifier: Apache-2.0
5
+ */
6
+ import { QueuedAiProvider, type AiProviderReactiveRunFn, type AiProviderRunFn, type AiProviderStreamFn } from "@workglow/ai";
7
+ import type { LlamaCppModelConfig } from "./common/LlamaCpp_ModelSchema";
8
+ /** Main-thread registration (inline or worker-backed); creates the default job queue. */
9
+ export declare class LlamaCppQueuedProvider extends QueuedAiProvider<LlamaCppModelConfig> {
10
+ readonly name = "LOCAL_LLAMACPP";
11
+ readonly isLocal = true;
12
+ readonly supportsBrowser = false;
13
+ readonly taskTypes: readonly ["DownloadModelTask", "UnloadModelTask", "ModelInfoTask", "CountTokensTask", "TextGenerationTask", "TextEmbeddingTask", "TextRewriterTask", "TextSummaryTask", "ToolCallingTask", "ModelSearchTask"];
14
+ constructor(tasks?: Record<string, AiProviderRunFn<any, any, LlamaCppModelConfig>>, streamTasks?: Record<string, AiProviderStreamFn<any, any, LlamaCppModelConfig>>, reactiveTasks?: Record<string, AiProviderReactiveRunFn<any, any, LlamaCppModelConfig>>);
15
+ }
16
+ //# sourceMappingURL=LlamaCppQueuedProvider.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"LlamaCppQueuedProvider.d.ts","sourceRoot":"","sources":["../../src/provider-llamacpp/LlamaCppQueuedProvider.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EACL,gBAAgB,EAChB,KAAK,uBAAuB,EAC5B,KAAK,eAAe,EACpB,KAAK,kBAAkB,EACxB,MAAM,cAAc,CAAC;AAEtB,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,+BAA+B,CAAC;AAEzE,yFAAyF;AACzF,qBAAa,sBAAuB,SAAQ,gBAAgB,CAAC,mBAAmB,CAAC;IAC/E,QAAQ,CAAC,IAAI,oBAAkB;IAC/B,QAAQ,CAAC,OAAO,QAAQ;IACxB,QAAQ,CAAC,eAAe,SAAS;IAEjC,QAAQ,CAAC,SAAS,gNAWP;IAEX,YACE,KAAK,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,eAAe,CAAC,GAAG,EAAE,GAAG,EAAE,mBAAmB,CAAC,CAAC,EACtE,WAAW,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,kBAAkB,CAAC,GAAG,EAAE,GAAG,EAAE,mBAAmB,CAAC,CAAC,EAC/E,aAAa,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,uBAAuB,CAAC,GAAG,EAAE,GAAG,EAAE,mBAAmB,CAAC,CAAC,EAGvF;CACF"}
@@ -0,0 +1,10 @@
1
+ /**
2
+ * @license
3
+ * Copyright 2025 Steven Roussey <sroussey@gmail.com>
4
+ * SPDX-License-Identifier: Apache-2.0
5
+ */
6
+ import type { AiProviderReactiveRunFn, AiProviderRunFn, CountTokensTaskInput, CountTokensTaskOutput } from "@workglow/ai";
7
+ import type { LlamaCppModelConfig } from "./LlamaCpp_ModelSchema";
8
+ export declare const LlamaCpp_CountTokens: AiProviderRunFn<CountTokensTaskInput, CountTokensTaskOutput, LlamaCppModelConfig>;
9
+ export declare const LlamaCpp_CountTokens_Reactive: AiProviderReactiveRunFn<CountTokensTaskInput, CountTokensTaskOutput, LlamaCppModelConfig>;
10
+ //# sourceMappingURL=LlamaCpp_CountTokens.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"LlamaCpp_CountTokens.d.ts","sourceRoot":"","sources":["../../../src/provider-llamacpp/common/LlamaCpp_CountTokens.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,KAAK,EACV,uBAAuB,EACvB,eAAe,EACf,oBAAoB,EACpB,qBAAqB,EACtB,MAAM,cAAc,CAAC;AAEtB,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,wBAAwB,CAAC;AAGlE,eAAO,MAAM,oBAAoB,EAAE,eAAe,CAChD,oBAAoB,EACpB,qBAAqB,EACrB,mBAAmB,CAkBpB,CAAC;AAEF,eAAO,MAAM,6BAA6B,EAAE,uBAAuB,CACjE,oBAAoB,EACpB,qBAAqB,EACrB,mBAAmB,CAGpB,CAAC"}
@@ -0,0 +1,9 @@
1
+ /**
2
+ * @license
3
+ * Copyright 2025 Steven Roussey <sroussey@gmail.com>
4
+ * SPDX-License-Identifier: Apache-2.0
5
+ */
6
+ import type { AiProviderRunFn, DownloadModelTaskRunInput, DownloadModelTaskRunOutput } from "@workglow/ai";
7
+ import type { LlamaCppModelConfig } from "./LlamaCpp_ModelSchema";
8
+ export declare const LlamaCpp_Download: AiProviderRunFn<DownloadModelTaskRunInput, DownloadModelTaskRunOutput, LlamaCppModelConfig>;
9
+ //# sourceMappingURL=LlamaCpp_Download.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"LlamaCpp_Download.d.ts","sourceRoot":"","sources":["../../../src/provider-llamacpp/common/LlamaCpp_Download.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,KAAK,EACV,eAAe,EACf,yBAAyB,EACzB,0BAA0B,EAC3B,MAAM,cAAc,CAAC;AAEtB,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,wBAAwB,CAAC;AAGlE,eAAO,MAAM,iBAAiB,EAAE,eAAe,CAC7C,yBAAyB,EACzB,0BAA0B,EAC1B,mBAAmB,CAkCpB,CAAC"}
@@ -3,25 +3,9 @@
3
3
  * Copyright 2025 Steven Roussey <sroussey@gmail.com>
4
4
  * SPDX-License-Identifier: Apache-2.0
5
5
  */
6
- import type { AiProviderReactiveRunFn, AiProviderRunFn, AiProviderStreamFn, CountTokensTaskInput, CountTokensTaskOutput, DownloadModelTaskRunInput, DownloadModelTaskRunOutput, ModelInfoTaskInput, ModelInfoTaskOutput, StructuredGenerationTaskInput, StructuredGenerationTaskOutput, TextEmbeddingTaskInput, TextEmbeddingTaskOutput, TextGenerationTaskInput, TextGenerationTaskOutput, TextRewriterTaskInput, TextRewriterTaskOutput, TextSummaryTaskInput, TextSummaryTaskOutput, ToolCallingTaskInput, ToolCallingTaskOutput, UnloadModelTaskRunInput, UnloadModelTaskRunOutput } from "@workglow/ai";
6
+ import type { AiProviderReactiveRunFn, AiProviderRunFn, AiProviderStreamFn } from "@workglow/ai";
7
7
  import type { LlamaCppModelConfig } from "./LlamaCpp_ModelSchema";
8
- export declare const LlamaCpp_Download: AiProviderRunFn<DownloadModelTaskRunInput, DownloadModelTaskRunOutput, LlamaCppModelConfig>;
9
- export declare const LlamaCpp_Unload: AiProviderRunFn<UnloadModelTaskRunInput, UnloadModelTaskRunOutput, LlamaCppModelConfig>;
10
- export declare const LlamaCpp_TextGeneration: AiProviderRunFn<TextGenerationTaskInput, TextGenerationTaskOutput, LlamaCppModelConfig>;
11
- export declare const LlamaCpp_TextGeneration_Stream: AiProviderStreamFn<TextGenerationTaskInput, TextGenerationTaskOutput, LlamaCppModelConfig>;
12
- export declare const LlamaCpp_TextEmbedding: AiProviderRunFn<TextEmbeddingTaskInput, TextEmbeddingTaskOutput, LlamaCppModelConfig>;
13
- export declare const LlamaCpp_TextRewriter: AiProviderRunFn<TextRewriterTaskInput, TextRewriterTaskOutput, LlamaCppModelConfig>;
14
- export declare const LlamaCpp_TextRewriter_Stream: AiProviderStreamFn<TextRewriterTaskInput, TextRewriterTaskOutput, LlamaCppModelConfig>;
15
- export declare const LlamaCpp_TextSummary: AiProviderRunFn<TextSummaryTaskInput, TextSummaryTaskOutput, LlamaCppModelConfig>;
16
- export declare const LlamaCpp_TextSummary_Stream: AiProviderStreamFn<TextSummaryTaskInput, TextSummaryTaskOutput, LlamaCppModelConfig>;
17
- export declare function disposeLlamaCppResources(): Promise<void>;
18
- export declare const LlamaCpp_CountTokens: AiProviderRunFn<CountTokensTaskInput, CountTokensTaskOutput, LlamaCppModelConfig>;
19
- export declare const LlamaCpp_CountTokens_Reactive: AiProviderReactiveRunFn<CountTokensTaskInput, CountTokensTaskOutput, LlamaCppModelConfig>;
20
- export declare const LlamaCpp_ToolCalling: AiProviderRunFn<ToolCallingTaskInput, ToolCallingTaskOutput, LlamaCppModelConfig>;
21
- export declare const LlamaCpp_ToolCalling_Stream: AiProviderStreamFn<ToolCallingTaskInput, ToolCallingTaskOutput, LlamaCppModelConfig>;
22
- export declare const LlamaCpp_StructuredGeneration: AiProviderRunFn<StructuredGenerationTaskInput, StructuredGenerationTaskOutput, LlamaCppModelConfig>;
23
- export declare const LlamaCpp_StructuredGeneration_Stream: AiProviderStreamFn<StructuredGenerationTaskInput, StructuredGenerationTaskOutput, LlamaCppModelConfig>;
24
- export declare const LlamaCpp_ModelInfo: AiProviderRunFn<ModelInfoTaskInput, ModelInfoTaskOutput, LlamaCppModelConfig>;
8
+ export { disposeLlamaCppResources, loadSdk, getLlamaCppSdk, getLlamaInstance, getOrCreateTextContext, getOrCreateEmbeddingContext, getOrLoadModel, getActualModelPath, getConfigKey, resolvedPaths, streamFromSession, } from "./LlamaCpp_Runtime";
25
9
  export declare const LLAMACPP_TASKS: Record<string, AiProviderRunFn<any, any, LlamaCppModelConfig>>;
26
10
  export declare const LLAMACPP_STREAM_TASKS: Record<string, AiProviderStreamFn<any, any, LlamaCppModelConfig>>;
27
11
  export declare const LLAMACPP_REACTIVE_TASKS: Record<string, AiProviderReactiveRunFn<any, any, LlamaCppModelConfig>>;
@@ -1 +1 @@
1
- {"version":3,"file":"LlamaCpp_JobRunFns.d.ts","sourceRoot":"","sources":["../../../src/provider-llamacpp/common/LlamaCpp_JobRunFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAGH,OAAO,KAAK,EACV,uBAAuB,EACvB,eAAe,EACf,kBAAkB,EAClB,oBAAoB,EACpB,qBAAqB,EACrB,yBAAyB,EACzB,0BAA0B,EAC1B,kBAAkB,EAClB,mBAAmB,EACnB,6BAA6B,EAC7B,8BAA8B,EAC9B,sBAAsB,EACtB,uBAAuB,EACvB,uBAAuB,EACvB,wBAAwB,EACxB,qBAAqB,EACrB,sBAAsB,EACtB,oBAAoB,EACpB,qBAAqB,EACrB,oBAAoB,EACpB,qBAAqB,EAGrB,uBAAuB,EACvB,wBAAwB,EACzB,MAAM,cAAc,CAAC;AAItB,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,wBAAwB,CAAC;AA2KlE,eAAO,MAAM,iBAAiB,EAAE,eAAe,CAC7C,yBAAyB,EACzB,0BAA0B,EAC1B,mBAAmB,CAoCpB,CAAC;AAMF,eAAO,MAAM,eAAe,EAAE,eAAe,CAC3C,uBAAuB,EACvB,wBAAwB,EACxB,mBAAmB,CAiCpB,CAAC;AAMF,eAAO,MAAM,uBAAuB,EAAE,eAAe,CACnD,uBAAuB,EACvB,wBAAwB,EACxB,mBAAmB,CA0CpB,CAAC;AAMF,eAAO,MAAM,8BAA8B,EAAE,kBAAkB,CAC7D,uBAAuB,EACvB,wBAAwB,EACxB,mBAAmB,CAsBpB,CAAC;AAMF,eAAO,MAAM,sBAAsB,EAAE,eAAe,CAClD,sBAAsB,EACtB,uBAAuB,EACvB,mBAAmB,CAoBpB,CAAC;AAMF,eAAO,MAAM,qBAAqB,EAAE,eAAe,CACjD,qBAAqB,EACrB,sBAAsB,EACtB,mBAAmB,CAwCpB,CAAC;AAMF,eAAO,MAAM,4BAA4B,EAAE,kBAAkB,CAC3D,qBAAqB,EACrB,sBAAsB,EACtB,mBAAmB,CAmBpB,CAAC;AAMF,eAAO,MAAM,oBAAoB,EAAE,eAAe,CAChD,oBAAoB,EACpB,qBAAqB,EACrB,mBAAmB,CAwCpB,CAAC;AAMF,eAAO,MAAM,2BAA2B,EAAE,kBAAkB,CAC1D,oBAAoB,EACpB,qBAAqB,EACrB,mBAAmB,CAmBpB,CAAC;AAMF,wBAAsB,wBAAwB,IAAI,OAAO,CAAC,IAAI,CAAC,CAkB9D;AAED,eAAO,MAAM,oBAAoB,EAAE,eAAe,CAChD,oBAAoB,EACpB,qBAAqB,EACrB,mBAAmB,CAmBpB,CAAC;AAEF,eAAO,MAAM,6BAA6B,EAAE,uBAAuB,CACjE,oBAAoB,EACpB,qBAAqB,EACrB,mBAAmB,CAGpB,CAAC;AA+DF,eAAO,MAAM,oBAAoB,EAAE,eAAe,CAChD,oBAAoB,EACpB,qBAAqB,EACrB,mBAAmB,CA6DpB,CAAC;AAMF,eAAO,MAAM,2BAA2B,EAAE,kBAAkB,CAC1D,oBAAoB,EACpB,qBAAqB,EACrB,mBAAmB,CA+FpB,CAAC;AAMF,eAAO,MAAM,6BAA6B,EAAE,eAAe,CACzD,6BAA6B,EAC7B,8BAA8B,EAC9B,mBAAmB,CAoCpB,CAAC;AAMF,eAAO,MAAM,oCAAoC,EAAE,kBAAkB,CACnE,6BAA6B,EAC7B,8BAA8B,EAC9B,mBAAmB,CA6FpB,CAAC;AAMF,eAAO,MAAM,kBAAkB,EAAE,eAAe,CAC9C,kBAAkB,EAClB,mBAAmB,EACnB,mBAAmB,CAkCpB,CAAC;AAMF,eAAO,MAAM,cAAc,EAAE,MAAM,CAAC,MAAM,EAAE,eAAe,CAAC,GAAG,EAAE,GAAG,EAAE,mBAAmB,CAAC,CAWzF,CAAC;AAEF,eAAO,MAAM,qBAAqB,EAAE,MAAM,CACxC,MAAM,EACN,kBAAkB,CAAC,GAAG,EAAE,GAAG,EAAE,mBAAmB,CAAC,CAOlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,EAAE,MAAM,CAC1C,MAAM,EACN,uBAAuB,CAAC,GAAG,EAAE,GAAG,EAAE,mBAAmB,CAAC,CAGvD,CAAC"}
1
+ {"version":3,"file":"LlamaCpp_JobRunFns.d.ts","sourceRoot":"","sources":["../../../src/provider-llamacpp/common/LlamaCpp_JobRunFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,KAAK,EAAE,uBAAuB,EAAE,eAAe,EAAE,kBAAkB,EAAE,MAAM,cAAc,CAAC;AACjG,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,wBAAwB,CAAC;AAGlE,OAAO,EACL,wBAAwB,EACxB,OAAO,EACP,cAAc,EACd,gBAAgB,EAChB,sBAAsB,EACtB,2BAA2B,EAC3B,cAAc,EACd,kBAAkB,EAClB,YAAY,EACZ,aAAa,EACb,iBAAiB,GAClB,MAAM,oBAAoB,CAAC;AAgB5B,eAAO,MAAM,cAAc,EAAE,MAAM,CAAC,MAAM,EAAE,eAAe,CAAC,GAAG,EAAE,GAAG,EAAE,mBAAmB,CAAC,CAYzF,CAAC;AAEF,eAAO,MAAM,qBAAqB,EAAE,MAAM,CACxC,MAAM,EACN,kBAAkB,CAAC,GAAG,EAAE,GAAG,EAAE,mBAAmB,CAAC,CAOlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,EAAE,MAAM,CAC1C,MAAM,EACN,uBAAuB,CAAC,GAAG,EAAE,GAAG,EAAE,mBAAmB,CAAC,CAGvD,CAAC"}