beeai-framework 0.1.19 → 0.1.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (375) hide show
  1. package/dist/adapters/a2a/agents/agent.cjs +4 -4
  2. package/dist/adapters/a2a/agents/agent.cjs.map +1 -1
  3. package/dist/adapters/a2a/agents/agent.d.cts +2 -2
  4. package/dist/adapters/a2a/agents/agent.d.ts +2 -2
  5. package/dist/adapters/a2a/agents/agent.js +4 -4
  6. package/dist/adapters/a2a/agents/agent.js.map +1 -1
  7. package/dist/adapters/a2a/agents/events.d.cts +1 -1
  8. package/dist/adapters/a2a/agents/events.d.ts +1 -1
  9. package/dist/adapters/a2a/agents/types.d.cts +2 -2
  10. package/dist/adapters/a2a/agents/types.d.ts +2 -2
  11. package/dist/adapters/a2a/agents/utils.cjs +2 -2
  12. package/dist/adapters/a2a/agents/utils.cjs.map +1 -1
  13. package/dist/adapters/a2a/agents/utils.js +2 -2
  14. package/dist/adapters/a2a/agents/utils.js.map +1 -1
  15. package/dist/adapters/a2a/serve/agent_executor.cjs +1 -3
  16. package/dist/adapters/a2a/serve/agent_executor.cjs.map +1 -1
  17. package/dist/adapters/a2a/serve/agent_executor.d.cts +4 -3
  18. package/dist/adapters/a2a/serve/agent_executor.d.ts +4 -3
  19. package/dist/adapters/a2a/serve/agent_executor.js +1 -3
  20. package/dist/adapters/a2a/serve/agent_executor.js.map +1 -1
  21. package/dist/adapters/a2a/serve/server.cjs +15 -0
  22. package/dist/adapters/a2a/serve/server.cjs.map +1 -1
  23. package/dist/adapters/a2a/serve/server.d.cts +4 -2
  24. package/dist/adapters/a2a/serve/server.d.ts +4 -2
  25. package/dist/adapters/a2a/serve/server.js +15 -0
  26. package/dist/adapters/a2a/serve/server.js.map +1 -1
  27. package/dist/adapters/acp/agents/agent.d.cts +2 -2
  28. package/dist/adapters/acp/agents/agent.d.ts +2 -2
  29. package/dist/adapters/acp/agents/events.d.cts +1 -1
  30. package/dist/adapters/acp/agents/events.d.ts +1 -1
  31. package/dist/adapters/{beeai_platform → agentstack}/agents/agent.cjs +4 -4
  32. package/dist/adapters/agentstack/agents/agent.cjs.map +1 -0
  33. package/dist/adapters/{beeai_platform → agentstack}/agents/agent.d.cts +12 -12
  34. package/dist/adapters/{beeai_platform → agentstack}/agents/agent.d.ts +12 -12
  35. package/dist/adapters/{beeai_platform → agentstack}/agents/agent.js +4 -4
  36. package/dist/adapters/agentstack/agents/agent.js.map +1 -0
  37. package/dist/adapters/agentstack/agents/events.d.cts +23 -0
  38. package/dist/adapters/agentstack/agents/events.d.ts +23 -0
  39. package/dist/adapters/{beeai_platform → agentstack}/agents/types.d.cts +4 -4
  40. package/dist/adapters/{beeai_platform → agentstack}/agents/types.d.ts +4 -4
  41. package/dist/adapters/amazon-bedrock/backend/chat.cjs +3 -2
  42. package/dist/adapters/amazon-bedrock/backend/chat.cjs.map +1 -1
  43. package/dist/adapters/amazon-bedrock/backend/chat.d.cts +5 -6
  44. package/dist/adapters/amazon-bedrock/backend/chat.d.ts +5 -6
  45. package/dist/adapters/amazon-bedrock/backend/chat.js +3 -2
  46. package/dist/adapters/amazon-bedrock/backend/chat.js.map +1 -1
  47. package/dist/adapters/amazon-bedrock/backend/embedding.cjs +2 -2
  48. package/dist/adapters/amazon-bedrock/backend/embedding.cjs.map +1 -1
  49. package/dist/adapters/amazon-bedrock/backend/embedding.d.cts +4 -4
  50. package/dist/adapters/amazon-bedrock/backend/embedding.d.ts +4 -4
  51. package/dist/adapters/amazon-bedrock/backend/embedding.js +2 -2
  52. package/dist/adapters/amazon-bedrock/backend/embedding.js.map +1 -1
  53. package/dist/adapters/anthropic/backend/chat.cjs +3 -2
  54. package/dist/adapters/anthropic/backend/chat.cjs.map +1 -1
  55. package/dist/adapters/anthropic/backend/chat.d.cts +5 -6
  56. package/dist/adapters/anthropic/backend/chat.d.ts +5 -6
  57. package/dist/adapters/anthropic/backend/chat.js +3 -2
  58. package/dist/adapters/anthropic/backend/chat.js.map +1 -1
  59. package/dist/adapters/anthropic/backend/embedding.d.cts +2 -2
  60. package/dist/adapters/anthropic/backend/embedding.d.ts +2 -2
  61. package/dist/adapters/azure-openai/backend/chat.cjs +3 -2
  62. package/dist/adapters/azure-openai/backend/chat.cjs.map +1 -1
  63. package/dist/adapters/azure-openai/backend/chat.d.cts +5 -6
  64. package/dist/adapters/azure-openai/backend/chat.d.ts +5 -6
  65. package/dist/adapters/azure-openai/backend/chat.js +3 -2
  66. package/dist/adapters/azure-openai/backend/chat.js.map +1 -1
  67. package/dist/adapters/azure-openai/backend/embedding.cjs +2 -2
  68. package/dist/adapters/azure-openai/backend/embedding.cjs.map +1 -1
  69. package/dist/adapters/azure-openai/backend/embedding.d.cts +3 -3
  70. package/dist/adapters/azure-openai/backend/embedding.d.ts +3 -3
  71. package/dist/adapters/azure-openai/backend/embedding.js +2 -2
  72. package/dist/adapters/azure-openai/backend/embedding.js.map +1 -1
  73. package/dist/adapters/dummy/backend/chat.d.cts +2 -2
  74. package/dist/adapters/dummy/backend/chat.d.ts +2 -2
  75. package/dist/adapters/dummy/backend/embedding.d.cts +2 -2
  76. package/dist/adapters/dummy/backend/embedding.d.ts +2 -2
  77. package/dist/adapters/google-vertex/backend/chat.cjs +3 -2
  78. package/dist/adapters/google-vertex/backend/chat.cjs.map +1 -1
  79. package/dist/adapters/google-vertex/backend/chat.d.cts +5 -6
  80. package/dist/adapters/google-vertex/backend/chat.d.ts +5 -6
  81. package/dist/adapters/google-vertex/backend/chat.js +3 -2
  82. package/dist/adapters/google-vertex/backend/chat.js.map +1 -1
  83. package/dist/adapters/google-vertex/backend/embedding.d.cts +2 -2
  84. package/dist/adapters/google-vertex/backend/embedding.d.ts +2 -2
  85. package/dist/adapters/groq/backend/chat.cjs +3 -2
  86. package/dist/adapters/groq/backend/chat.cjs.map +1 -1
  87. package/dist/adapters/groq/backend/chat.d.cts +5 -6
  88. package/dist/adapters/groq/backend/chat.d.ts +5 -6
  89. package/dist/adapters/groq/backend/chat.js +3 -2
  90. package/dist/adapters/groq/backend/chat.js.map +1 -1
  91. package/dist/adapters/groq/backend/embedding.d.cts +2 -2
  92. package/dist/adapters/groq/backend/embedding.d.ts +2 -2
  93. package/dist/adapters/langchain/backend/chat.cjs +6 -5
  94. package/dist/adapters/langchain/backend/chat.cjs.map +1 -1
  95. package/dist/adapters/langchain/backend/chat.d.cts +2 -2
  96. package/dist/adapters/langchain/backend/chat.d.ts +2 -2
  97. package/dist/adapters/langchain/backend/chat.js +6 -5
  98. package/dist/adapters/langchain/backend/chat.js.map +1 -1
  99. package/dist/adapters/langchain/backend/embedding.d.cts +3 -3
  100. package/dist/adapters/langchain/backend/embedding.d.ts +3 -3
  101. package/dist/adapters/langchain/tools.d.cts +1 -1
  102. package/dist/adapters/langchain/tools.d.ts +1 -1
  103. package/dist/adapters/mcp/serve/server.cjs +12 -4
  104. package/dist/adapters/mcp/serve/server.cjs.map +1 -1
  105. package/dist/adapters/mcp/serve/server.d.cts +1 -1
  106. package/dist/adapters/mcp/serve/server.d.ts +1 -1
  107. package/dist/adapters/mcp/serve/server.js +12 -4
  108. package/dist/adapters/mcp/serve/server.js.map +1 -1
  109. package/dist/adapters/ollama/backend/chat.cjs +1 -4
  110. package/dist/adapters/ollama/backend/chat.cjs.map +1 -1
  111. package/dist/adapters/ollama/backend/chat.d.cts +4 -4
  112. package/dist/adapters/ollama/backend/chat.d.ts +4 -4
  113. package/dist/adapters/ollama/backend/chat.js +1 -4
  114. package/dist/adapters/ollama/backend/chat.js.map +1 -1
  115. package/dist/adapters/ollama/backend/client.cjs +2 -2
  116. package/dist/adapters/ollama/backend/client.cjs.map +1 -1
  117. package/dist/adapters/ollama/backend/client.d.cts +1 -1
  118. package/dist/adapters/ollama/backend/client.d.ts +1 -1
  119. package/dist/adapters/ollama/backend/client.js +1 -1
  120. package/dist/adapters/ollama/backend/client.js.map +1 -1
  121. package/dist/adapters/ollama/backend/embedding.cjs.map +1 -1
  122. package/dist/adapters/ollama/backend/embedding.d.cts +3 -3
  123. package/dist/adapters/ollama/backend/embedding.d.ts +3 -3
  124. package/dist/adapters/ollama/backend/embedding.js.map +1 -1
  125. package/dist/adapters/openai/backend/chat.cjs +3 -2
  126. package/dist/adapters/openai/backend/chat.cjs.map +1 -1
  127. package/dist/adapters/openai/backend/chat.d.cts +5 -6
  128. package/dist/adapters/openai/backend/chat.d.ts +5 -6
  129. package/dist/adapters/openai/backend/chat.js +3 -2
  130. package/dist/adapters/openai/backend/chat.js.map +1 -1
  131. package/dist/adapters/openai/backend/client.cjs +2 -11
  132. package/dist/adapters/openai/backend/client.cjs.map +1 -1
  133. package/dist/adapters/openai/backend/client.js +3 -12
  134. package/dist/adapters/openai/backend/client.js.map +1 -1
  135. package/dist/adapters/openai/backend/embedding.cjs +2 -2
  136. package/dist/adapters/openai/backend/embedding.cjs.map +1 -1
  137. package/dist/adapters/openai/backend/embedding.d.cts +4 -4
  138. package/dist/adapters/openai/backend/embedding.d.ts +4 -4
  139. package/dist/adapters/openai/backend/embedding.js +2 -2
  140. package/dist/adapters/openai/backend/embedding.js.map +1 -1
  141. package/dist/adapters/vercel/backend/chat.cjs +26 -21
  142. package/dist/adapters/vercel/backend/chat.cjs.map +1 -1
  143. package/dist/adapters/vercel/backend/chat.d.cts +6 -4
  144. package/dist/adapters/vercel/backend/chat.d.ts +6 -4
  145. package/dist/adapters/vercel/backend/chat.js +27 -22
  146. package/dist/adapters/vercel/backend/chat.js.map +1 -1
  147. package/dist/adapters/vercel/backend/embedding.cjs.map +1 -1
  148. package/dist/adapters/vercel/backend/embedding.d.cts +3 -3
  149. package/dist/adapters/vercel/backend/embedding.d.ts +3 -3
  150. package/dist/adapters/vercel/backend/embedding.js.map +1 -1
  151. package/dist/adapters/vercel/backend/utils.cjs +11 -0
  152. package/dist/adapters/vercel/backend/utils.cjs.map +1 -1
  153. package/dist/adapters/vercel/backend/utils.d.cts +19 -2
  154. package/dist/adapters/vercel/backend/utils.d.ts +19 -2
  155. package/dist/adapters/vercel/backend/utils.js +11 -1
  156. package/dist/adapters/vercel/backend/utils.js.map +1 -1
  157. package/dist/adapters/watsonx/backend/chat.cjs +10 -4
  158. package/dist/adapters/watsonx/backend/chat.cjs.map +1 -1
  159. package/dist/adapters/watsonx/backend/chat.d.cts +2 -2
  160. package/dist/adapters/watsonx/backend/chat.d.ts +2 -2
  161. package/dist/adapters/watsonx/backend/chat.js +10 -4
  162. package/dist/adapters/watsonx/backend/chat.js.map +1 -1
  163. package/dist/adapters/watsonx/backend/embedding.d.cts +2 -2
  164. package/dist/adapters/watsonx/backend/embedding.d.ts +2 -2
  165. package/dist/adapters/xai/backend/chat.cjs +3 -2
  166. package/dist/adapters/xai/backend/chat.cjs.map +1 -1
  167. package/dist/adapters/xai/backend/chat.d.cts +5 -6
  168. package/dist/adapters/xai/backend/chat.d.ts +5 -6
  169. package/dist/adapters/xai/backend/chat.js +3 -2
  170. package/dist/adapters/xai/backend/chat.js.map +1 -1
  171. package/dist/{agent-7NzfPC6d.d.ts → agent-CHebzsQB.d.ts} +4 -4
  172. package/dist/{agent-B6pOfx0K.d.cts → agent-DvvVGvLr.d.cts} +4 -4
  173. package/dist/agents/base.d.cts +2 -2
  174. package/dist/agents/base.d.ts +2 -2
  175. package/dist/agents/experimental/replan/agent.d.cts +3 -3
  176. package/dist/agents/experimental/replan/agent.d.ts +3 -3
  177. package/dist/agents/experimental/replan/prompts.d.cts +1 -1
  178. package/dist/agents/experimental/replan/prompts.d.ts +1 -1
  179. package/dist/agents/experimental/streamlit/agent.d.cts +3 -3
  180. package/dist/agents/experimental/streamlit/agent.d.ts +3 -3
  181. package/dist/agents/react/agent.d.cts +5 -4
  182. package/dist/agents/react/agent.d.ts +5 -4
  183. package/dist/agents/react/prompts.d.cts +1 -1
  184. package/dist/agents/react/prompts.d.ts +1 -1
  185. package/dist/agents/react/runners/base.cjs.map +1 -1
  186. package/dist/agents/react/runners/base.d.cts +5 -4
  187. package/dist/agents/react/runners/base.d.ts +5 -4
  188. package/dist/agents/react/runners/base.js.map +1 -1
  189. package/dist/agents/react/runners/deep-think/prompts.d.cts +1 -1
  190. package/dist/agents/react/runners/deep-think/prompts.d.ts +1 -1
  191. package/dist/agents/react/runners/deep-think/runner.d.cts +6 -5
  192. package/dist/agents/react/runners/deep-think/runner.d.ts +6 -5
  193. package/dist/agents/react/runners/default/runner.cjs.map +1 -1
  194. package/dist/agents/react/runners/default/runner.d.cts +6 -5
  195. package/dist/agents/react/runners/default/runner.d.ts +6 -5
  196. package/dist/agents/react/runners/default/runner.js.map +1 -1
  197. package/dist/agents/react/runners/granite/prompts.d.cts +1 -1
  198. package/dist/agents/react/runners/granite/prompts.d.ts +1 -1
  199. package/dist/agents/react/runners/granite/runner.cjs +4 -2
  200. package/dist/agents/react/runners/granite/runner.cjs.map +1 -1
  201. package/dist/agents/react/runners/granite/runner.d.cts +6 -5
  202. package/dist/agents/react/runners/granite/runner.d.ts +6 -5
  203. package/dist/agents/react/runners/granite/runner.js +4 -2
  204. package/dist/agents/react/runners/granite/runner.js.map +1 -1
  205. package/dist/agents/react/types.d.cts +3 -2
  206. package/dist/agents/react/types.d.ts +3 -2
  207. package/dist/agents/toolCalling/agent.cjs +9 -5
  208. package/dist/agents/toolCalling/agent.cjs.map +1 -1
  209. package/dist/agents/toolCalling/agent.d.cts +3 -3
  210. package/dist/agents/toolCalling/agent.d.ts +3 -3
  211. package/dist/agents/toolCalling/agent.js +9 -5
  212. package/dist/agents/toolCalling/agent.js.map +1 -1
  213. package/dist/agents/toolCalling/types.d.cts +1 -1
  214. package/dist/agents/toolCalling/types.d.ts +1 -1
  215. package/dist/agents/types.d.cts +2 -2
  216. package/dist/agents/types.d.ts +2 -2
  217. package/dist/backend/backend.d.cts +2 -2
  218. package/dist/backend/backend.d.ts +2 -2
  219. package/dist/backend/chat.cjs +1 -1
  220. package/dist/backend/chat.cjs.map +1 -1
  221. package/dist/backend/chat.d.cts +2 -2
  222. package/dist/backend/chat.d.ts +2 -2
  223. package/dist/backend/chat.js +1 -1
  224. package/dist/backend/chat.js.map +1 -1
  225. package/dist/backend/core.d.cts +2 -2
  226. package/dist/backend/core.d.ts +2 -2
  227. package/dist/backend/embedding.d.cts +2 -2
  228. package/dist/backend/embedding.d.ts +2 -2
  229. package/dist/backend/message.cjs +59 -3
  230. package/dist/backend/message.cjs.map +1 -1
  231. package/dist/backend/message.d.cts +3 -0
  232. package/dist/backend/message.d.ts +3 -0
  233. package/dist/backend/message.js +59 -3
  234. package/dist/backend/message.js.map +1 -1
  235. package/dist/backend/utils.d.cts +2 -2
  236. package/dist/backend/utils.d.ts +2 -2
  237. package/dist/{base-Bguef5Dg.d.ts → base-BQgDlcGa.d.ts} +1 -1
  238. package/dist/{base-C1hO5EJe.d.cts → base-CwcxwybB.d.cts} +1 -1
  239. package/dist/cache/fileCache.cjs.map +1 -1
  240. package/dist/cache/fileCache.js.map +1 -1
  241. package/dist/{chat-BBoOSvzm.d.ts → chat-C0s-o6ll.d.ts} +4 -2
  242. package/dist/{chat-BZ55YQab.d.cts → chat-CRb3vUVg.d.cts} +4 -2
  243. package/dist/context.d.cts +1 -1
  244. package/dist/context.d.ts +1 -1
  245. package/dist/emitter/emitter.d.cts +1 -1
  246. package/dist/emitter/emitter.d.ts +1 -1
  247. package/dist/emitter/types.d.cts +1 -1
  248. package/dist/emitter/types.d.ts +1 -1
  249. package/dist/{emitter-BqpLJQVb.d.ts → emitter-CZFbzlUi.d.ts} +3 -3
  250. package/dist/{emitter-pJzHC_AM.d.cts → emitter-DpqUYjXH.d.cts} +3 -3
  251. package/dist/index.d.cts +3 -3
  252. package/dist/index.d.ts +3 -3
  253. package/dist/internals/fetcher.d.cts +1 -1
  254. package/dist/internals/fetcher.d.ts +1 -1
  255. package/dist/internals/helpers/array.cjs +35 -0
  256. package/dist/internals/helpers/array.cjs.map +1 -1
  257. package/dist/internals/helpers/array.d.cts +6 -1
  258. package/dist/internals/helpers/array.d.ts +6 -1
  259. package/dist/internals/helpers/array.js +35 -1
  260. package/dist/internals/helpers/array.js.map +1 -1
  261. package/dist/internals/helpers/object.cjs +30 -5
  262. package/dist/internals/helpers/object.cjs.map +1 -1
  263. package/dist/internals/helpers/object.d.cts +3 -2
  264. package/dist/internals/helpers/object.d.ts +3 -2
  265. package/dist/internals/helpers/object.js +29 -5
  266. package/dist/internals/helpers/object.js.map +1 -1
  267. package/dist/logger/logger.cjs +8 -4
  268. package/dist/logger/logger.cjs.map +1 -1
  269. package/dist/logger/logger.d.cts +21 -15
  270. package/dist/logger/logger.d.ts +21 -15
  271. package/dist/logger/logger.js +6 -3
  272. package/dist/logger/logger.js.map +1 -1
  273. package/dist/logger/pretty.cjs +2 -2
  274. package/dist/logger/pretty.cjs.map +1 -1
  275. package/dist/logger/pretty.js +2 -2
  276. package/dist/logger/pretty.js.map +1 -1
  277. package/dist/memory/summarizeMemory.d.cts +2 -2
  278. package/dist/memory/summarizeMemory.d.ts +2 -2
  279. package/dist/parsers/linePrefix.cjs.map +1 -1
  280. package/dist/parsers/linePrefix.d.cts +4 -8
  281. package/dist/parsers/linePrefix.d.ts +4 -8
  282. package/dist/parsers/linePrefix.js.map +1 -1
  283. package/dist/serializer/utils.cjs.map +1 -1
  284. package/dist/serializer/utils.js.map +1 -1
  285. package/dist/tools/arxiv.cjs.map +1 -1
  286. package/dist/tools/arxiv.d.cts +1 -1
  287. package/dist/tools/arxiv.d.ts +1 -1
  288. package/dist/tools/arxiv.js.map +1 -1
  289. package/dist/tools/base.cjs.map +1 -1
  290. package/dist/tools/base.d.cts +1 -1
  291. package/dist/tools/base.d.ts +1 -1
  292. package/dist/tools/base.js.map +1 -1
  293. package/dist/tools/calculator.d.cts +1 -1
  294. package/dist/tools/calculator.d.ts +1 -1
  295. package/dist/tools/custom.d.cts +2 -2
  296. package/dist/tools/custom.d.ts +2 -2
  297. package/dist/tools/database/elasticsearch.cjs.map +1 -1
  298. package/dist/tools/database/elasticsearch.d.cts +1 -1
  299. package/dist/tools/database/elasticsearch.d.ts +1 -1
  300. package/dist/tools/database/elasticsearch.js.map +1 -1
  301. package/dist/tools/database/milvus.cjs +3 -3
  302. package/dist/tools/database/milvus.cjs.map +1 -1
  303. package/dist/tools/database/milvus.d.cts +1 -1
  304. package/dist/tools/database/milvus.d.ts +1 -1
  305. package/dist/tools/database/milvus.js +3 -3
  306. package/dist/tools/database/milvus.js.map +1 -1
  307. package/dist/tools/database/qdrant.cjs +2 -2
  308. package/dist/tools/database/qdrant.cjs.map +1 -1
  309. package/dist/tools/database/qdrant.d.cts +1 -1
  310. package/dist/tools/database/qdrant.d.ts +1 -1
  311. package/dist/tools/database/qdrant.js +2 -2
  312. package/dist/tools/database/qdrant.js.map +1 -1
  313. package/dist/tools/database/sql.cjs.map +1 -1
  314. package/dist/tools/database/sql.d.cts +1 -1
  315. package/dist/tools/database/sql.d.ts +1 -1
  316. package/dist/tools/database/sql.js.map +1 -1
  317. package/dist/tools/llm.d.cts +2 -2
  318. package/dist/tools/llm.d.ts +2 -2
  319. package/dist/tools/mcp.d.cts +1 -1
  320. package/dist/tools/mcp.d.ts +1 -1
  321. package/dist/tools/openapi.d.cts +1 -1
  322. package/dist/tools/openapi.d.ts +1 -1
  323. package/dist/tools/python/output.d.cts +1 -1
  324. package/dist/tools/python/output.d.ts +1 -1
  325. package/dist/tools/python/python.d.cts +2 -2
  326. package/dist/tools/python/python.d.ts +2 -2
  327. package/dist/tools/search/base.cjs.map +1 -1
  328. package/dist/tools/search/base.d.cts +1 -1
  329. package/dist/tools/search/base.d.ts +1 -1
  330. package/dist/tools/search/base.js.map +1 -1
  331. package/dist/tools/search/duckDuckGoSearch.cjs.map +1 -1
  332. package/dist/tools/search/duckDuckGoSearch.d.cts +1 -1
  333. package/dist/tools/search/duckDuckGoSearch.d.ts +1 -1
  334. package/dist/tools/search/duckDuckGoSearch.js.map +1 -1
  335. package/dist/tools/search/googleSearch.cjs.map +1 -1
  336. package/dist/tools/search/googleSearch.d.cts +1 -1
  337. package/dist/tools/search/googleSearch.d.ts +1 -1
  338. package/dist/tools/search/googleSearch.js.map +1 -1
  339. package/dist/tools/search/searXNGSearch.d.cts +1 -1
  340. package/dist/tools/search/searXNGSearch.d.ts +1 -1
  341. package/dist/tools/search/wikipedia.cjs +25 -0
  342. package/dist/tools/search/wikipedia.cjs.map +1 -1
  343. package/dist/tools/search/wikipedia.d.cts +2 -3
  344. package/dist/tools/search/wikipedia.d.ts +2 -3
  345. package/dist/tools/search/wikipedia.js +23 -0
  346. package/dist/tools/search/wikipedia.js.map +1 -1
  347. package/dist/tools/similarity.d.cts +1 -1
  348. package/dist/tools/similarity.d.ts +1 -1
  349. package/dist/tools/weather/openMeteo.cjs.map +1 -1
  350. package/dist/tools/weather/openMeteo.d.cts +1 -1
  351. package/dist/tools/weather/openMeteo.d.ts +1 -1
  352. package/dist/tools/weather/openMeteo.js.map +1 -1
  353. package/dist/tools/web/webCrawler.cjs.map +1 -1
  354. package/dist/tools/web/webCrawler.d.cts +1 -1
  355. package/dist/tools/web/webCrawler.d.ts +1 -1
  356. package/dist/tools/web/webCrawler.js.map +1 -1
  357. package/dist/version.cjs +1 -1
  358. package/dist/version.js +1 -1
  359. package/dist/workflows/agent.d.cts +3 -3
  360. package/dist/workflows/agent.d.ts +3 -3
  361. package/dist/workflows/workflow.d.cts +1 -1
  362. package/dist/workflows/workflow.d.ts +1 -1
  363. package/package.json +68 -67
  364. package/dist/adapters/beeai_platform/agents/agent.cjs.map +0 -1
  365. package/dist/adapters/beeai_platform/agents/agent.js.map +0 -1
  366. package/dist/adapters/beeai_platform/agents/events.d.cts +0 -23
  367. package/dist/adapters/beeai_platform/agents/events.d.ts +0 -23
  368. /package/dist/adapters/{beeai_platform → agentstack}/agents/events.cjs +0 -0
  369. /package/dist/adapters/{beeai_platform → agentstack}/agents/events.cjs.map +0 -0
  370. /package/dist/adapters/{beeai_platform → agentstack}/agents/events.js +0 -0
  371. /package/dist/adapters/{beeai_platform → agentstack}/agents/events.js.map +0 -0
  372. /package/dist/adapters/{beeai_platform → agentstack}/agents/types.cjs +0 -0
  373. /package/dist/adapters/{beeai_platform → agentstack}/agents/types.cjs.map +0 -0
  374. /package/dist/adapters/{beeai_platform → agentstack}/agents/types.js +0 -0
  375. /package/dist/adapters/{beeai_platform → agentstack}/agents/types.js.map +0 -0
@@ -1,12 +1,12 @@
1
1
  import { EmbeddingModel, EmbeddingModelEvents, EmbeddingModelInput, EmbeddingModelOutput } from '../../../backend/embedding.js';
2
2
  import { RunContext } from '../../../context.js';
3
- import { E as Emitter } from '../../../emitter-BqpLJQVb.js';
3
+ import { E as Emitter } from '../../../emitter-CZFbzlUi.js';
4
4
  import { Embeddings } from '@langchain/core/embeddings';
5
5
  import '../../../internals/serializable.js';
6
6
  import '../../../internals/types.js';
7
7
  import '../../../internals/helpers/guards.js';
8
8
  import '../../../errors.js';
9
- import '../../../chat-BBoOSvzm.js';
9
+ import '../../../chat-C0s-o6ll.js';
10
10
  import '../../../backend/message.js';
11
11
  import 'ai';
12
12
  import 'promise-based-task';
@@ -35,7 +35,7 @@ declare class LangChainEmbeddingModel extends EmbeddingModel {
35
35
  get providerId(): string;
36
36
  protected _create(input: EmbeddingModelInput, run: RunContext<this>): Promise<EmbeddingModelOutput>;
37
37
  createSnapshot(): {
38
- lcEmbedding: Embeddings;
38
+ lcEmbedding: Embeddings<number[]>;
39
39
  emitter: Emitter<EmbeddingModelEvents>;
40
40
  };
41
41
  loadSnapshot(snapshot: ReturnType<typeof this.createSnapshot>): void;
@@ -4,7 +4,7 @@ import { BaseToolRunOptions, BaseToolOptions, JSONToolOutput, Tool, ToolEmitter,
4
4
  import { GetRunContext } from '../../context.cjs';
5
5
  import { RunnableConfig } from '@langchain/core/runnables';
6
6
  import * as LCTools from '@langchain/core/tools';
7
- import { E as Emitter } from '../../emitter-pJzHC_AM.cjs';
7
+ import { E as Emitter } from '../../emitter-DpqUYjXH.cjs';
8
8
  import '../../internals/serializable.cjs';
9
9
  import '../../internals/types.cjs';
10
10
  import '../../internals/helpers/guards.cjs';
@@ -4,7 +4,7 @@ import { BaseToolRunOptions, BaseToolOptions, JSONToolOutput, Tool, ToolEmitter,
4
4
  import { GetRunContext } from '../../context.js';
5
5
  import { RunnableConfig } from '@langchain/core/runnables';
6
6
  import * as LCTools from '@langchain/core/tools';
7
- import { E as Emitter } from '../../emitter-BqpLJQVb.js';
7
+ import { E as Emitter } from '../../emitter-CZFbzlUi.js';
8
8
  import '../../internals/serializable.js';
9
9
  import '../../internals/types.js';
10
10
  import '../../internals/helpers/guards.js';
@@ -65,10 +65,18 @@ class MCPServer extends server_cjs.Server {
65
65
  throw new errors_cjs.ValueError("Input type is not supported by this server.");
66
66
  }
67
67
  }
68
- if (this.config.transport === "sse") {
69
- http_server_cjs.runServer(this.server, this.config.hostname, this.config.port);
70
- } else {
71
- await this.server.connect(new stdio_js.StdioServerTransport());
68
+ switch (this.config.transport) {
69
+ case "stdio":
70
+ await this.server.connect(new stdio_js.StdioServerTransport());
71
+ break;
72
+ case "sse":
73
+ case "streamable-http":
74
+ http_server_cjs.runServer(this.server, this.config.hostname, this.config.port);
75
+ break;
76
+ default: {
77
+ this.config.transport;
78
+ throw new Error("Unsupported transport type.");
79
+ }
72
80
  }
73
81
  }
74
82
  getFactory(member) {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/adapters/mcp/serve/server.ts"],"names":["MCPServerConfig","transport","hostname","port","name","version","settings","partial","Object","assign","MCPServer","Server","server","config","McpServer","serve","member","members","factory","getFactory","entry","type","tool","description","paramsSchema","callback","prompt","argsSchema","resource","uri","template","ValueError","runServer","connect","StdioServerTransport","factories","has","Tool","get","toolFactory","schema","inputSchema","ZodType","shape","args","result","run","content","text","getTextContent","registerFactory"],"mappings":";;;;;;;;;;;;AA8DO,MAAMA,eAAAA,CAAAA;EA9Db;;;EA+DEC,SAAAA,GAA6B,OAAA;EAC7BC,QAAAA,GAAW,WAAA;EACXC,IAAAA,GAAO,GAAA;EACPC,IAAAA,GAAO,YAAA;EACPC,OAAAA,GAAU,OAAA;AACVC,EAAAA,QAAAA;AAEA,EAAA,WAAA,CAAYC,OAAAA,EAAoC;AAC9C,IAAA,IAAIA,OAAAA,EAAS;AACXC,MAAAA,MAAAA,CAAOC,MAAAA,CAAO,MAAMF,OAAAA,CAAAA;AACtB;AACF;AACF;AAEO,MAAMG,kBAAkBC,iBAAAA,CAAAA;EA7E/B;;;AA8EYC,EAAAA,MAAAA;AAEV,EAAA,WAAA,CAAYC,MAAAA,EAA0B;AACpC,IAAA,KAAA,CAAMA,MAAAA,IAAU,IAAIb,eAAAA,EAAAA,CAAAA;AACpB,IAAA,IAAA,CAAKY,MAAAA,GAAS,IAAIE,gBAAAA,CAAU;AAC1BV,MAAAA,IAAAA,EAAM,KAAKS,MAAAA,CAAOT,IAAAA;AAClBC,MAAAA,OAAAA,EAAS,KAAKQ,MAAAA,CAAOR,OAAAA;AACrB,MAAA,GAAG,KAAKQ,MAAAA,CAAOP;KACjB,CAAA;AACF;AAEA,EAAA,MAAMS,KAAAA,GAAQ;AACZ,IAAA,KAAA,MAAWC,MAAAA,IAAU,KAAKC,OAAAA,EAAS;AACjC,MAAA,MAAMC,OAAAA,GAAU,IAAA,CAAKC,UAAAA,CAAWH,MAAAA,CAAAA;AAChC,MAAA,MAAMI,KAAAA,GAAQ,MAAMF,OAAAA,CAAQF,MAAAA,CAAAA;AAE5B,MAAA,QAAQI,MAAMC,IAAAA;QACZ,KAAK,MAAA;AACH,UAAA,IAAA,CAAKT,MAAAA,CAAOU,KAAKF,KAAAA,CAAMhB,IAAAA,EAAMgB,MAAMG,WAAAA,EAAaH,KAAAA,CAAMI,YAAAA,EAAcJ,KAAAA,CAAMK,QAAQ,CAAA;AAClF,UAAA;QACF,KAAK,QAAA;AACH,UAAA,IAAI,gBAAgBL,KAAAA,EAAO;AACzB,YAAA,IAAA,CAAKR,MAAAA,CAAOc,OAAON,KAAAA,CAAMhB,IAAAA,EAAMgB,MAAMG,WAAAA,EAAaH,KAAAA,CAAMO,UAAAA,EAAYP,KAAAA,CAAMK,QAAQ,CAAA;WACpF,MAAO;AACL,YAAA,IAAA,CAAKb,OAAOc,MAAAA,CAAON,KAAAA,CAAMhB,MAAMgB,KAAAA,CAAMG,WAAAA,EAAaH,MAAMK,QAAQ,CAAA;AAClE;AACA,UAAA;QACF,KAAK,UAAA;AACH,UAAA,IAAI,SAASL,KAAAA,EAAO;AAClB,YAAA,IAAA,CAAKR,OAAOgB,QAAAA,CAASR,KAAAA,CAAMhB,MAAMgB,KAAAA,CAAMS,GAAAA,EAAKT,MAAMK,QAAQ,CAAA;WAC5D,MAAO;AACL,YAAA,IAAA,CAAKb,OAAOgB,QAAAA,CAASR,KAAAA,CAAMhB,MAAMgB,KAAAA,CAAMU,QAAAA,EAAUV,MAAMK,QAAQ,CAAA;AACjE;AACA,UAAA;AACF,QAAA;AACE,UAAA,MAAM,IAAIM,sBAAW,6CAAA,CAAA;AACzB;AACF;AAEA,IAAA,IAAI,IAAA,CAAKlB,MAAAA,CAAOZ,SAAAA,KAAc,KAAA,EAAO;AACnC+B,MAAAA,yBAAAA,CAAU,KAAKpB,MAAAA,EAAQ,IAAA,CAAKC,OAAOX,QAAAA,EAAU,IAAA,CAAKW,OAAOV,IAAI,CAAA;KAC/D,MAAO;AACL,MAAA,MAAM,IAAA,CAAKS,MAAAA,CAAOqB,OAAAA,CAAQ,IAAIC,+BAAAA,CAAAA;AAChC;AACF;AAEAf,EAAAA,UAAAA,CAAWH,MAAAA,EAAa;AACtB,IAAA,MAAMmB,SAAAA,GAAa,KAAK,WAAA,CAA8BA,SAAAA;AACtD,IAAA,OAAO,CAACA,SAAAA,CAAUC,GAAAA,CAAIpB,OAAO,WAAW,CAAA,IAAKA,kBAAkBqB,aAAAA,IAAQF,SAAAA,CAAUC,GAAAA,CAAIC,aAAAA,IACjFF,SAAAA,CAAUG,GAAAA,CAAID,aAAAA,CAAAA,GACd,KAAA,CAAMlB,WAAWH,MAAAA,CAAAA;AACvB;AACF;AAEA,eAAeuB,YAAYjB,IAAAA,EAAa;AACtC,EAAA,MAAMkB,MAAAA,GAAS,MAAMlB,IAAAA,CAAKmB,WAAAA,EAAW;AACrC,EAAA,IAAI,EAAED,kBAAkBE,WAAAA,CAAAA,EAAU;AAChC,IAAA,MAAM,IAAIX,sBAAW,4CAAA,CAAA;AACvB;AACA,EAAA,MAAMP,eAAegB,MAAAA,CAAOG,KAAAA;AAC5B,EAAA,OAAO;IACLtB,IAAAA,EAAM,MAAA;AACNjB,IAAAA,IAAAA,EAAMkB,IAAAA,CAAKlB,IAAAA;AACXmB,IAAAA,WAAAA,EAAaD,IAAAA,CAAKC,WAAAA;AAClBC,IAAAA,YAAAA;AACAC,IAAAA,QAAAA,mCAAoBmB,IAAAA,KAAAA;AAClB,MAAA,MAAMC,MAAAA,GAAS,MAAMvB,IAAAA,CAAKwB,GAAAA,CAAG,GAAIF,IAAAA,CAAAA;AACjC,MAAA,OAAO;QACLG,OAAAA,EAAS;AACP,UAAA;YACE1B,IAAAA,EAAM,MAAA;AACN2B,YAAAA,IAAAA,EAAMH,OAAOI,cAAAA;AACf;;AAEJ,OAAA;KACF,EAVU,UAAA;AAWZ,GAAA;AACF;AAvBeV,MAAAA,CAAAA,WAAAA,EAAAA,aAAAA,CAAAA;AAyBf7B,SAAAA,CAAUwC,eAAAA,CAAgBb,eAAME,WAAAA,CAAAA","file":"server.cjs","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { ValueError } from \"@/errors.js\";\nimport { Server } from \"@/serve/server.js\";\nimport { ServerOptions } from \"@modelcontextprotocol/sdk/server/index.js\";\nimport {\n McpServer,\n PromptCallback,\n ReadResourceCallback,\n ReadResourceTemplateCallback,\n ResourceTemplate,\n ToolCallback,\n} from \"@modelcontextprotocol/sdk/server/mcp.js\";\nimport { StdioServerTransport } from \"@modelcontextprotocol/sdk/server/stdio.js\";\nimport { AnyTool, Tool } from \"@/tools/base.js\";\nimport { runServer } from \"./http_server.js\";\nimport { ZodRawShape, ZodType } from \"zod\";\nimport { ToolAnnotations } from \"@modelcontextprotocol/sdk/types.js\";\n\ntype MCPServerPrompt =\n | {\n type: \"prompt\";\n name: string;\n description: string;\n callback: PromptCallback;\n }\n | {\n type: \"prompt\";\n name: string;\n description: string;\n argsSchema: ZodRawShape;\n callback: PromptCallback<ZodRawShape>;\n };\n\ntype MCPServerResource =\n | {\n type: \"resource\";\n name: string;\n uri: string;\n callback: ReadResourceCallback;\n }\n | {\n type: \"resource\";\n name: string;\n template: ResourceTemplate;\n callback: ReadResourceTemplateCallback;\n };\n\ninterface MCPServerTool {\n type: \"tool\";\n name: string;\n description: string;\n paramsSchema: ZodRawShape | ToolAnnotations;\n callback: ToolCallback<ZodRawShape>;\n}\n\ntype MCPServerEntry = MCPServerPrompt | MCPServerResource | MCPServerTool;\n\n// Configuration for the MCPServer.\nexport class MCPServerConfig {\n transport: \"stdio\" | \"sse\" = \"stdio\";\n hostname = \"127.0.0.1\";\n port = 8000;\n name = \"MCP Server\";\n version = \"1.0.0\";\n settings?: ServerOptions;\n\n constructor(partial?: Partial<MCPServerConfig>) {\n if (partial) {\n Object.assign(this, partial);\n }\n }\n}\n\nexport class MCPServer extends Server<any, MCPServerEntry, MCPServerConfig, never> {\n protected server: McpServer;\n\n constructor(config?: MCPServerConfig) {\n super(config || new MCPServerConfig());\n this.server = new McpServer({\n name: this.config.name,\n version: this.config.version,\n ...this.config.settings,\n });\n }\n\n async serve() {\n for (const member of this.members) {\n const factory = this.getFactory(member);\n const entry = await factory(member);\n\n switch (entry.type) {\n case \"tool\":\n this.server.tool(entry.name, entry.description, entry.paramsSchema, entry.callback);\n break;\n case \"prompt\":\n if (\"argsSchema\" in entry) {\n this.server.prompt(entry.name, entry.description, entry.argsSchema, entry.callback);\n } else {\n this.server.prompt(entry.name, entry.description, entry.callback);\n }\n break;\n case \"resource\":\n if (\"uri\" in entry) {\n this.server.resource(entry.name, entry.uri, entry.callback);\n } else {\n this.server.resource(entry.name, entry.template, entry.callback);\n }\n break;\n default:\n throw new ValueError(\"Input type is not supported by this server.\");\n }\n }\n\n if (this.config.transport === \"sse\") {\n runServer(this.server, this.config.hostname, this.config.port);\n } else {\n await this.server.connect(new StdioServerTransport());\n }\n }\n\n getFactory(member: any) {\n const factories = (this.constructor as typeof Server).factories;\n return !factories.has(member.constructor) && member instanceof Tool && factories.has(Tool)\n ? factories.get(Tool)!\n : super.getFactory(member);\n }\n}\n\nasync function toolFactory(tool: AnyTool): Promise<MCPServerEntry> {\n const schema = await tool.inputSchema();\n if (!(schema instanceof ZodType)) {\n throw new ValueError(\"JsonSchema is not supported for MCP tools.\");\n }\n const paramsSchema = schema.shape;\n return {\n type: \"tool\",\n name: tool.name,\n description: tool.description,\n paramsSchema: paramsSchema,\n callback: async (...args: Parameters<typeof tool.run>) => {\n const result = await tool.run(...args);\n return {\n content: [\n {\n type: \"text\",\n text: result.getTextContent(),\n },\n ],\n };\n },\n };\n}\n\nMCPServer.registerFactory(Tool, toolFactory);\n"]}
1
+ {"version":3,"sources":["../../../../src/adapters/mcp/serve/server.ts"],"names":["MCPServerConfig","transport","hostname","port","name","version","settings","partial","Object","assign","MCPServer","Server","server","config","McpServer","serve","member","members","factory","getFactory","entry","type","tool","description","paramsSchema","callback","prompt","argsSchema","resource","uri","template","ValueError","connect","StdioServerTransport","runServer","Error","factories","has","Tool","get","toolFactory","schema","inputSchema","ZodType","shape","args","result","run","content","text","getTextContent","registerFactory"],"mappings":";;;;;;;;;;;;AA8DO,MAAMA,eAAAA,CAAAA;EA9Db;;;EA+DEC,SAAAA,GAAiD,OAAA;EACjDC,QAAAA,GAAW,WAAA;EACXC,IAAAA,GAAO,GAAA;EACPC,IAAAA,GAAO,YAAA;EACPC,OAAAA,GAAU,OAAA;AACVC,EAAAA,QAAAA;AAEA,EAAA,WAAA,CAAYC,OAAAA,EAAoC;AAC9C,IAAA,IAAIA,OAAAA,EAAS;AACXC,MAAAA,MAAAA,CAAOC,MAAAA,CAAO,MAAMF,OAAAA,CAAAA;AACtB;AACF;AACF;AAEO,MAAMG,kBAAkBC,iBAAAA,CAAAA;EA7E/B;;;AA8EYC,EAAAA,MAAAA;AAEV,EAAA,WAAA,CAAYC,MAAAA,EAA0B;AACpC,IAAA,KAAA,CAAMA,MAAAA,IAAU,IAAIb,eAAAA,EAAAA,CAAAA;AACpB,IAAA,IAAA,CAAKY,MAAAA,GAAS,IAAIE,gBAAAA,CAAU;AAC1BV,MAAAA,IAAAA,EAAM,KAAKS,MAAAA,CAAOT,IAAAA;AAClBC,MAAAA,OAAAA,EAAS,KAAKQ,MAAAA,CAAOR,OAAAA;AACrB,MAAA,GAAG,KAAKQ,MAAAA,CAAOP;KACjB,CAAA;AACF;AAEA,EAAA,MAAMS,KAAAA,GAAQ;AACZ,IAAA,KAAA,MAAWC,MAAAA,IAAU,KAAKC,OAAAA,EAAS;AACjC,MAAA,MAAMC,OAAAA,GAAU,IAAA,CAAKC,UAAAA,CAAWH,MAAAA,CAAAA;AAChC,MAAA,MAAMI,KAAAA,GAAQ,MAAMF,OAAAA,CAAQF,MAAAA,CAAAA;AAE5B,MAAA,QAAQI,MAAMC,IAAAA;QACZ,KAAK,MAAA;AACH,UAAA,IAAA,CAAKT,MAAAA,CAAOU,KAAKF,KAAAA,CAAMhB,IAAAA,EAAMgB,MAAMG,WAAAA,EAAaH,KAAAA,CAAMI,YAAAA,EAAcJ,KAAAA,CAAMK,QAAQ,CAAA;AAClF,UAAA;QACF,KAAK,QAAA;AACH,UAAA,IAAI,gBAAgBL,KAAAA,EAAO;AACzB,YAAA,IAAA,CAAKR,MAAAA,CAAOc,OAAON,KAAAA,CAAMhB,IAAAA,EAAMgB,MAAMG,WAAAA,EAAaH,KAAAA,CAAMO,UAAAA,EAAYP,KAAAA,CAAMK,QAAQ,CAAA;WACpF,MAAO;AACL,YAAA,IAAA,CAAKb,OAAOc,MAAAA,CAAON,KAAAA,CAAMhB,MAAMgB,KAAAA,CAAMG,WAAAA,EAAaH,MAAMK,QAAQ,CAAA;AAClE;AACA,UAAA;QACF,KAAK,UAAA;AACH,UAAA,IAAI,SAASL,KAAAA,EAAO;AAClB,YAAA,IAAA,CAAKR,OAAOgB,QAAAA,CAASR,KAAAA,CAAMhB,MAAMgB,KAAAA,CAAMS,GAAAA,EAAKT,MAAMK,QAAQ,CAAA;WAC5D,MAAO;AACL,YAAA,IAAA,CAAKb,OAAOgB,QAAAA,CAASR,KAAAA,CAAMhB,MAAMgB,KAAAA,CAAMU,QAAAA,EAAUV,MAAMK,QAAQ,CAAA;AACjE;AACA,UAAA;AACF,QAAA;AACE,UAAA,MAAM,IAAIM,sBAAW,6CAAA,CAAA;AACzB;AACF;AAEA,IAAA,QAAQ,IAAA,CAAKlB,OAAOZ,SAAAA;MAClB,KAAK,OAAA;AACH,QAAA,MAAM,IAAA,CAAKW,MAAAA,CAAOoB,OAAAA,CAAQ,IAAIC,+BAAAA,CAAAA;AAC9B,QAAA;MACF,KAAK,KAAA;MACL,KAAK,iBAAA;AACHC,QAAAA,yBAAAA,CAAU,KAAKtB,MAAAA,EAAQ,IAAA,CAAKC,OAAOX,QAAAA,EAAU,IAAA,CAAKW,OAAOV,IAAI,CAAA;AAC7D,QAAA;MACF,SAAS;AAEP,QAAgC,KAAKU,MAAAA,CAAOZ;AAC5C,QAAA,MAAM,IAAIkC,MAAM,6BAAA,CAAA;AAClB;AACF;AACF;AAEAhB,EAAAA,UAAAA,CAAWH,MAAAA,EAAa;AACtB,IAAA,MAAMoB,SAAAA,GAAa,KAAK,WAAA,CAA8BA,SAAAA;AACtD,IAAA,OAAO,CAACA,SAAAA,CAAUC,GAAAA,CAAIrB,OAAO,WAAW,CAAA,IAAKA,kBAAkBsB,aAAAA,IAAQF,SAAAA,CAAUC,GAAAA,CAAIC,aAAAA,IACjFF,SAAAA,CAAUG,GAAAA,CAAID,aAAAA,CAAAA,GACd,KAAA,CAAMnB,WAAWH,MAAAA,CAAAA;AACvB;AACF;AAEA,eAAewB,YAAYlB,IAAAA,EAAa;AACtC,EAAA,MAAMmB,MAAAA,GAAS,MAAMnB,IAAAA,CAAKoB,WAAAA,EAAW;AACrC,EAAA,IAAI,EAAED,kBAAkBE,WAAAA,CAAAA,EAAU;AAChC,IAAA,MAAM,IAAIZ,sBAAW,4CAAA,CAAA;AACvB;AACA,EAAA,MAAMP,eAAeiB,MAAAA,CAAOG,KAAAA;AAC5B,EAAA,OAAO;IACLvB,IAAAA,EAAM,MAAA;AACNjB,IAAAA,IAAAA,EAAMkB,IAAAA,CAAKlB,IAAAA;AACXmB,IAAAA,WAAAA,EAAaD,IAAAA,CAAKC,WAAAA;AAClBC,IAAAA,YAAAA;AACAC,IAAAA,QAAAA,mCAAoBoB,IAAAA,KAAAA;AAClB,MAAA,MAAMC,MAAAA,GAAS,MAAMxB,IAAAA,CAAKyB,GAAAA,CAAG,GAAIF,IAAAA,CAAAA;AACjC,MAAA,OAAO;QACLG,OAAAA,EAAS;AACP,UAAA;YACE3B,IAAAA,EAAM,MAAA;AACN4B,YAAAA,IAAAA,EAAMH,OAAOI,cAAAA;AACf;;AAEJ,OAAA;KACF,EAVU,UAAA;AAWZ,GAAA;AACF;AAvBeV,MAAAA,CAAAA,WAAAA,EAAAA,aAAAA,CAAAA;AAyBf9B,SAAAA,CAAUyC,eAAAA,CAAgBb,eAAME,WAAAA,CAAAA","file":"server.cjs","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { ValueError } from \"@/errors.js\";\nimport { Server } from \"@/serve/server.js\";\nimport { ServerOptions } from \"@modelcontextprotocol/sdk/server/index.js\";\nimport {\n McpServer,\n PromptCallback,\n ReadResourceCallback,\n ReadResourceTemplateCallback,\n ResourceTemplate,\n ToolCallback,\n} from \"@modelcontextprotocol/sdk/server/mcp.js\";\nimport { StdioServerTransport } from \"@modelcontextprotocol/sdk/server/stdio.js\";\nimport { AnyTool, Tool } from \"@/tools/base.js\";\nimport { runServer } from \"./http_server.js\";\nimport { ZodRawShape, ZodType } from \"zod\";\nimport { ToolAnnotations } from \"@modelcontextprotocol/sdk/types.js\";\n\ntype MCPServerPrompt =\n | {\n type: \"prompt\";\n name: string;\n description: string;\n callback: PromptCallback;\n }\n | {\n type: \"prompt\";\n name: string;\n description: string;\n argsSchema: ZodRawShape;\n callback: PromptCallback<ZodRawShape>;\n };\n\ntype MCPServerResource =\n | {\n type: \"resource\";\n name: string;\n uri: string;\n callback: ReadResourceCallback;\n }\n | {\n type: \"resource\";\n name: string;\n template: ResourceTemplate;\n callback: ReadResourceTemplateCallback;\n };\n\ninterface MCPServerTool {\n type: \"tool\";\n name: string;\n description: string;\n paramsSchema: ZodRawShape | ToolAnnotations;\n callback: ToolCallback<ZodRawShape>;\n}\n\ntype MCPServerEntry = MCPServerPrompt | MCPServerResource | MCPServerTool;\n\n// Configuration for the MCPServer.\nexport class MCPServerConfig {\n transport: \"stdio\" | \"sse\" | \"streamable-http\" = \"stdio\";\n hostname = \"127.0.0.1\";\n port = 8000;\n name = \"MCP Server\";\n version = \"1.0.0\";\n settings?: ServerOptions;\n\n constructor(partial?: Partial<MCPServerConfig>) {\n if (partial) {\n Object.assign(this, partial);\n }\n }\n}\n\nexport class MCPServer extends Server<any, MCPServerEntry, MCPServerConfig, never> {\n protected server: McpServer;\n\n constructor(config?: MCPServerConfig) {\n super(config || new MCPServerConfig());\n this.server = new McpServer({\n name: this.config.name,\n version: this.config.version,\n ...this.config.settings,\n });\n }\n\n async serve() {\n for (const member of this.members) {\n const factory = this.getFactory(member);\n const entry = await factory(member);\n\n switch (entry.type) {\n case \"tool\":\n this.server.tool(entry.name, entry.description, entry.paramsSchema, entry.callback);\n break;\n case \"prompt\":\n if (\"argsSchema\" in entry) {\n this.server.prompt(entry.name, entry.description, entry.argsSchema, entry.callback);\n } else {\n this.server.prompt(entry.name, entry.description, entry.callback);\n }\n break;\n case \"resource\":\n if (\"uri\" in entry) {\n this.server.resource(entry.name, entry.uri, entry.callback);\n } else {\n this.server.resource(entry.name, entry.template, entry.callback);\n }\n break;\n default:\n throw new ValueError(\"Input type is not supported by this server.\");\n }\n }\n\n switch (this.config.transport) {\n case \"stdio\":\n await this.server.connect(new StdioServerTransport());\n break;\n case \"sse\":\n case \"streamable-http\":\n runServer(this.server, this.config.hostname, this.config.port);\n break;\n default: {\n // The following line ensures exhaustiveness checking at compile time.\n const _exhaustiveCheck: never = this.config.transport;\n throw new Error(\"Unsupported transport type.\");\n }\n }\n }\n\n getFactory(member: any) {\n const factories = (this.constructor as typeof Server).factories;\n return !factories.has(member.constructor) && member instanceof Tool && factories.has(Tool)\n ? factories.get(Tool)!\n : super.getFactory(member);\n }\n}\n\nasync function toolFactory(tool: AnyTool): Promise<MCPServerEntry> {\n const schema = await tool.inputSchema();\n if (!(schema instanceof ZodType)) {\n throw new ValueError(\"JsonSchema is not supported for MCP tools.\");\n }\n const paramsSchema = schema.shape;\n return {\n type: \"tool\",\n name: tool.name,\n description: tool.description,\n paramsSchema: paramsSchema,\n callback: async (...args: Parameters<typeof tool.run>) => {\n const result = await tool.run(...args);\n return {\n content: [\n {\n type: \"text\",\n text: result.getTextContent(),\n },\n ],\n };\n },\n };\n}\n\nMCPServer.registerFactory(Tool, toolFactory);\n"]}
@@ -41,7 +41,7 @@ interface MCPServerTool {
41
41
  }
42
42
  type MCPServerEntry = MCPServerPrompt | MCPServerResource | MCPServerTool;
43
43
  declare class MCPServerConfig {
44
- transport: "stdio" | "sse";
44
+ transport: "stdio" | "sse" | "streamable-http";
45
45
  hostname: string;
46
46
  port: number;
47
47
  name: string;
@@ -41,7 +41,7 @@ interface MCPServerTool {
41
41
  }
42
42
  type MCPServerEntry = MCPServerPrompt | MCPServerResource | MCPServerTool;
43
43
  declare class MCPServerConfig {
44
- transport: "stdio" | "sse";
44
+ transport: "stdio" | "sse" | "streamable-http";
45
45
  hostname: string;
46
46
  port: number;
47
47
  name: string;
@@ -63,10 +63,18 @@ class MCPServer extends Server {
63
63
  throw new ValueError("Input type is not supported by this server.");
64
64
  }
65
65
  }
66
- if (this.config.transport === "sse") {
67
- runServer(this.server, this.config.hostname, this.config.port);
68
- } else {
69
- await this.server.connect(new StdioServerTransport());
66
+ switch (this.config.transport) {
67
+ case "stdio":
68
+ await this.server.connect(new StdioServerTransport());
69
+ break;
70
+ case "sse":
71
+ case "streamable-http":
72
+ runServer(this.server, this.config.hostname, this.config.port);
73
+ break;
74
+ default: {
75
+ this.config.transport;
76
+ throw new Error("Unsupported transport type.");
77
+ }
70
78
  }
71
79
  }
72
80
  getFactory(member) {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/adapters/mcp/serve/server.ts"],"names":["MCPServerConfig","transport","hostname","port","name","version","settings","partial","Object","assign","MCPServer","Server","server","config","McpServer","serve","member","members","factory","getFactory","entry","type","tool","description","paramsSchema","callback","prompt","argsSchema","resource","uri","template","ValueError","runServer","connect","StdioServerTransport","factories","has","Tool","get","toolFactory","schema","inputSchema","ZodType","shape","args","result","run","content","text","getTextContent","registerFactory"],"mappings":";;;;;;;;;;AA8DO,MAAMA,eAAAA,CAAAA;EA9Db;;;EA+DEC,SAAAA,GAA6B,OAAA;EAC7BC,QAAAA,GAAW,WAAA;EACXC,IAAAA,GAAO,GAAA;EACPC,IAAAA,GAAO,YAAA;EACPC,OAAAA,GAAU,OAAA;AACVC,EAAAA,QAAAA;AAEA,EAAA,WAAA,CAAYC,OAAAA,EAAoC;AAC9C,IAAA,IAAIA,OAAAA,EAAS;AACXC,MAAAA,MAAAA,CAAOC,MAAAA,CAAO,MAAMF,OAAAA,CAAAA;AACtB;AACF;AACF;AAEO,MAAMG,kBAAkBC,MAAAA,CAAAA;EA7E/B;;;AA8EYC,EAAAA,MAAAA;AAEV,EAAA,WAAA,CAAYC,MAAAA,EAA0B;AACpC,IAAA,KAAA,CAAMA,MAAAA,IAAU,IAAIb,eAAAA,EAAAA,CAAAA;AACpB,IAAA,IAAA,CAAKY,MAAAA,GAAS,IAAIE,SAAAA,CAAU;AAC1BV,MAAAA,IAAAA,EAAM,KAAKS,MAAAA,CAAOT,IAAAA;AAClBC,MAAAA,OAAAA,EAAS,KAAKQ,MAAAA,CAAOR,OAAAA;AACrB,MAAA,GAAG,KAAKQ,MAAAA,CAAOP;KACjB,CAAA;AACF;AAEA,EAAA,MAAMS,KAAAA,GAAQ;AACZ,IAAA,KAAA,MAAWC,MAAAA,IAAU,KAAKC,OAAAA,EAAS;AACjC,MAAA,MAAMC,OAAAA,GAAU,IAAA,CAAKC,UAAAA,CAAWH,MAAAA,CAAAA;AAChC,MAAA,MAAMI,KAAAA,GAAQ,MAAMF,OAAAA,CAAQF,MAAAA,CAAAA;AAE5B,MAAA,QAAQI,MAAMC,IAAAA;QACZ,KAAK,MAAA;AACH,UAAA,IAAA,CAAKT,MAAAA,CAAOU,KAAKF,KAAAA,CAAMhB,IAAAA,EAAMgB,MAAMG,WAAAA,EAAaH,KAAAA,CAAMI,YAAAA,EAAcJ,KAAAA,CAAMK,QAAQ,CAAA;AAClF,UAAA;QACF,KAAK,QAAA;AACH,UAAA,IAAI,gBAAgBL,KAAAA,EAAO;AACzB,YAAA,IAAA,CAAKR,MAAAA,CAAOc,OAAON,KAAAA,CAAMhB,IAAAA,EAAMgB,MAAMG,WAAAA,EAAaH,KAAAA,CAAMO,UAAAA,EAAYP,KAAAA,CAAMK,QAAQ,CAAA;WACpF,MAAO;AACL,YAAA,IAAA,CAAKb,OAAOc,MAAAA,CAAON,KAAAA,CAAMhB,MAAMgB,KAAAA,CAAMG,WAAAA,EAAaH,MAAMK,QAAQ,CAAA;AAClE;AACA,UAAA;QACF,KAAK,UAAA;AACH,UAAA,IAAI,SAASL,KAAAA,EAAO;AAClB,YAAA,IAAA,CAAKR,OAAOgB,QAAAA,CAASR,KAAAA,CAAMhB,MAAMgB,KAAAA,CAAMS,GAAAA,EAAKT,MAAMK,QAAQ,CAAA;WAC5D,MAAO;AACL,YAAA,IAAA,CAAKb,OAAOgB,QAAAA,CAASR,KAAAA,CAAMhB,MAAMgB,KAAAA,CAAMU,QAAAA,EAAUV,MAAMK,QAAQ,CAAA;AACjE;AACA,UAAA;AACF,QAAA;AACE,UAAA,MAAM,IAAIM,WAAW,6CAAA,CAAA;AACzB;AACF;AAEA,IAAA,IAAI,IAAA,CAAKlB,MAAAA,CAAOZ,SAAAA,KAAc,KAAA,EAAO;AACnC+B,MAAAA,SAAAA,CAAU,KAAKpB,MAAAA,EAAQ,IAAA,CAAKC,OAAOX,QAAAA,EAAU,IAAA,CAAKW,OAAOV,IAAI,CAAA;KAC/D,MAAO;AACL,MAAA,MAAM,IAAA,CAAKS,MAAAA,CAAOqB,OAAAA,CAAQ,IAAIC,sBAAAA,CAAAA;AAChC;AACF;AAEAf,EAAAA,UAAAA,CAAWH,MAAAA,EAAa;AACtB,IAAA,MAAMmB,SAAAA,GAAa,KAAK,WAAA,CAA8BA,SAAAA;AACtD,IAAA,OAAO,CAACA,SAAAA,CAAUC,GAAAA,CAAIpB,OAAO,WAAW,CAAA,IAAKA,kBAAkBqB,IAAAA,IAAQF,SAAAA,CAAUC,GAAAA,CAAIC,IAAAA,IACjFF,SAAAA,CAAUG,GAAAA,CAAID,IAAAA,CAAAA,GACd,KAAA,CAAMlB,WAAWH,MAAAA,CAAAA;AACvB;AACF;AAEA,eAAeuB,YAAYjB,IAAAA,EAAa;AACtC,EAAA,MAAMkB,MAAAA,GAAS,MAAMlB,IAAAA,CAAKmB,WAAAA,EAAW;AACrC,EAAA,IAAI,EAAED,kBAAkBE,OAAAA,CAAAA,EAAU;AAChC,IAAA,MAAM,IAAIX,WAAW,4CAAA,CAAA;AACvB;AACA,EAAA,MAAMP,eAAegB,MAAAA,CAAOG,KAAAA;AAC5B,EAAA,OAAO;IACLtB,IAAAA,EAAM,MAAA;AACNjB,IAAAA,IAAAA,EAAMkB,IAAAA,CAAKlB,IAAAA;AACXmB,IAAAA,WAAAA,EAAaD,IAAAA,CAAKC,WAAAA;AAClBC,IAAAA,YAAAA;AACAC,IAAAA,QAAAA,mCAAoBmB,IAAAA,KAAAA;AAClB,MAAA,MAAMC,MAAAA,GAAS,MAAMvB,IAAAA,CAAKwB,GAAAA,CAAG,GAAIF,IAAAA,CAAAA;AACjC,MAAA,OAAO;QACLG,OAAAA,EAAS;AACP,UAAA;YACE1B,IAAAA,EAAM,MAAA;AACN2B,YAAAA,IAAAA,EAAMH,OAAOI,cAAAA;AACf;;AAEJ,OAAA;KACF,EAVU,UAAA;AAWZ,GAAA;AACF;AAvBeV,MAAAA,CAAAA,WAAAA,EAAAA,aAAAA,CAAAA;AAyBf7B,SAAAA,CAAUwC,eAAAA,CAAgBb,MAAME,WAAAA,CAAAA","file":"server.js","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { ValueError } from \"@/errors.js\";\nimport { Server } from \"@/serve/server.js\";\nimport { ServerOptions } from \"@modelcontextprotocol/sdk/server/index.js\";\nimport {\n McpServer,\n PromptCallback,\n ReadResourceCallback,\n ReadResourceTemplateCallback,\n ResourceTemplate,\n ToolCallback,\n} from \"@modelcontextprotocol/sdk/server/mcp.js\";\nimport { StdioServerTransport } from \"@modelcontextprotocol/sdk/server/stdio.js\";\nimport { AnyTool, Tool } from \"@/tools/base.js\";\nimport { runServer } from \"./http_server.js\";\nimport { ZodRawShape, ZodType } from \"zod\";\nimport { ToolAnnotations } from \"@modelcontextprotocol/sdk/types.js\";\n\ntype MCPServerPrompt =\n | {\n type: \"prompt\";\n name: string;\n description: string;\n callback: PromptCallback;\n }\n | {\n type: \"prompt\";\n name: string;\n description: string;\n argsSchema: ZodRawShape;\n callback: PromptCallback<ZodRawShape>;\n };\n\ntype MCPServerResource =\n | {\n type: \"resource\";\n name: string;\n uri: string;\n callback: ReadResourceCallback;\n }\n | {\n type: \"resource\";\n name: string;\n template: ResourceTemplate;\n callback: ReadResourceTemplateCallback;\n };\n\ninterface MCPServerTool {\n type: \"tool\";\n name: string;\n description: string;\n paramsSchema: ZodRawShape | ToolAnnotations;\n callback: ToolCallback<ZodRawShape>;\n}\n\ntype MCPServerEntry = MCPServerPrompt | MCPServerResource | MCPServerTool;\n\n// Configuration for the MCPServer.\nexport class MCPServerConfig {\n transport: \"stdio\" | \"sse\" = \"stdio\";\n hostname = \"127.0.0.1\";\n port = 8000;\n name = \"MCP Server\";\n version = \"1.0.0\";\n settings?: ServerOptions;\n\n constructor(partial?: Partial<MCPServerConfig>) {\n if (partial) {\n Object.assign(this, partial);\n }\n }\n}\n\nexport class MCPServer extends Server<any, MCPServerEntry, MCPServerConfig, never> {\n protected server: McpServer;\n\n constructor(config?: MCPServerConfig) {\n super(config || new MCPServerConfig());\n this.server = new McpServer({\n name: this.config.name,\n version: this.config.version,\n ...this.config.settings,\n });\n }\n\n async serve() {\n for (const member of this.members) {\n const factory = this.getFactory(member);\n const entry = await factory(member);\n\n switch (entry.type) {\n case \"tool\":\n this.server.tool(entry.name, entry.description, entry.paramsSchema, entry.callback);\n break;\n case \"prompt\":\n if (\"argsSchema\" in entry) {\n this.server.prompt(entry.name, entry.description, entry.argsSchema, entry.callback);\n } else {\n this.server.prompt(entry.name, entry.description, entry.callback);\n }\n break;\n case \"resource\":\n if (\"uri\" in entry) {\n this.server.resource(entry.name, entry.uri, entry.callback);\n } else {\n this.server.resource(entry.name, entry.template, entry.callback);\n }\n break;\n default:\n throw new ValueError(\"Input type is not supported by this server.\");\n }\n }\n\n if (this.config.transport === \"sse\") {\n runServer(this.server, this.config.hostname, this.config.port);\n } else {\n await this.server.connect(new StdioServerTransport());\n }\n }\n\n getFactory(member: any) {\n const factories = (this.constructor as typeof Server).factories;\n return !factories.has(member.constructor) && member instanceof Tool && factories.has(Tool)\n ? factories.get(Tool)!\n : super.getFactory(member);\n }\n}\n\nasync function toolFactory(tool: AnyTool): Promise<MCPServerEntry> {\n const schema = await tool.inputSchema();\n if (!(schema instanceof ZodType)) {\n throw new ValueError(\"JsonSchema is not supported for MCP tools.\");\n }\n const paramsSchema = schema.shape;\n return {\n type: \"tool\",\n name: tool.name,\n description: tool.description,\n paramsSchema: paramsSchema,\n callback: async (...args: Parameters<typeof tool.run>) => {\n const result = await tool.run(...args);\n return {\n content: [\n {\n type: \"text\",\n text: result.getTextContent(),\n },\n ],\n };\n },\n };\n}\n\nMCPServer.registerFactory(Tool, toolFactory);\n"]}
1
+ {"version":3,"sources":["../../../../src/adapters/mcp/serve/server.ts"],"names":["MCPServerConfig","transport","hostname","port","name","version","settings","partial","Object","assign","MCPServer","Server","server","config","McpServer","serve","member","members","factory","getFactory","entry","type","tool","description","paramsSchema","callback","prompt","argsSchema","resource","uri","template","ValueError","connect","StdioServerTransport","runServer","Error","factories","has","Tool","get","toolFactory","schema","inputSchema","ZodType","shape","args","result","run","content","text","getTextContent","registerFactory"],"mappings":";;;;;;;;;;AA8DO,MAAMA,eAAAA,CAAAA;EA9Db;;;EA+DEC,SAAAA,GAAiD,OAAA;EACjDC,QAAAA,GAAW,WAAA;EACXC,IAAAA,GAAO,GAAA;EACPC,IAAAA,GAAO,YAAA;EACPC,OAAAA,GAAU,OAAA;AACVC,EAAAA,QAAAA;AAEA,EAAA,WAAA,CAAYC,OAAAA,EAAoC;AAC9C,IAAA,IAAIA,OAAAA,EAAS;AACXC,MAAAA,MAAAA,CAAOC,MAAAA,CAAO,MAAMF,OAAAA,CAAAA;AACtB;AACF;AACF;AAEO,MAAMG,kBAAkBC,MAAAA,CAAAA;EA7E/B;;;AA8EYC,EAAAA,MAAAA;AAEV,EAAA,WAAA,CAAYC,MAAAA,EAA0B;AACpC,IAAA,KAAA,CAAMA,MAAAA,IAAU,IAAIb,eAAAA,EAAAA,CAAAA;AACpB,IAAA,IAAA,CAAKY,MAAAA,GAAS,IAAIE,SAAAA,CAAU;AAC1BV,MAAAA,IAAAA,EAAM,KAAKS,MAAAA,CAAOT,IAAAA;AAClBC,MAAAA,OAAAA,EAAS,KAAKQ,MAAAA,CAAOR,OAAAA;AACrB,MAAA,GAAG,KAAKQ,MAAAA,CAAOP;KACjB,CAAA;AACF;AAEA,EAAA,MAAMS,KAAAA,GAAQ;AACZ,IAAA,KAAA,MAAWC,MAAAA,IAAU,KAAKC,OAAAA,EAAS;AACjC,MAAA,MAAMC,OAAAA,GAAU,IAAA,CAAKC,UAAAA,CAAWH,MAAAA,CAAAA;AAChC,MAAA,MAAMI,KAAAA,GAAQ,MAAMF,OAAAA,CAAQF,MAAAA,CAAAA;AAE5B,MAAA,QAAQI,MAAMC,IAAAA;QACZ,KAAK,MAAA;AACH,UAAA,IAAA,CAAKT,MAAAA,CAAOU,KAAKF,KAAAA,CAAMhB,IAAAA,EAAMgB,MAAMG,WAAAA,EAAaH,KAAAA,CAAMI,YAAAA,EAAcJ,KAAAA,CAAMK,QAAQ,CAAA;AAClF,UAAA;QACF,KAAK,QAAA;AACH,UAAA,IAAI,gBAAgBL,KAAAA,EAAO;AACzB,YAAA,IAAA,CAAKR,MAAAA,CAAOc,OAAON,KAAAA,CAAMhB,IAAAA,EAAMgB,MAAMG,WAAAA,EAAaH,KAAAA,CAAMO,UAAAA,EAAYP,KAAAA,CAAMK,QAAQ,CAAA;WACpF,MAAO;AACL,YAAA,IAAA,CAAKb,OAAOc,MAAAA,CAAON,KAAAA,CAAMhB,MAAMgB,KAAAA,CAAMG,WAAAA,EAAaH,MAAMK,QAAQ,CAAA;AAClE;AACA,UAAA;QACF,KAAK,UAAA;AACH,UAAA,IAAI,SAASL,KAAAA,EAAO;AAClB,YAAA,IAAA,CAAKR,OAAOgB,QAAAA,CAASR,KAAAA,CAAMhB,MAAMgB,KAAAA,CAAMS,GAAAA,EAAKT,MAAMK,QAAQ,CAAA;WAC5D,MAAO;AACL,YAAA,IAAA,CAAKb,OAAOgB,QAAAA,CAASR,KAAAA,CAAMhB,MAAMgB,KAAAA,CAAMU,QAAAA,EAAUV,MAAMK,QAAQ,CAAA;AACjE;AACA,UAAA;AACF,QAAA;AACE,UAAA,MAAM,IAAIM,WAAW,6CAAA,CAAA;AACzB;AACF;AAEA,IAAA,QAAQ,IAAA,CAAKlB,OAAOZ,SAAAA;MAClB,KAAK,OAAA;AACH,QAAA,MAAM,IAAA,CAAKW,MAAAA,CAAOoB,OAAAA,CAAQ,IAAIC,sBAAAA,CAAAA;AAC9B,QAAA;MACF,KAAK,KAAA;MACL,KAAK,iBAAA;AACHC,QAAAA,SAAAA,CAAU,KAAKtB,MAAAA,EAAQ,IAAA,CAAKC,OAAOX,QAAAA,EAAU,IAAA,CAAKW,OAAOV,IAAI,CAAA;AAC7D,QAAA;MACF,SAAS;AAEP,QAAgC,KAAKU,MAAAA,CAAOZ;AAC5C,QAAA,MAAM,IAAIkC,MAAM,6BAAA,CAAA;AAClB;AACF;AACF;AAEAhB,EAAAA,UAAAA,CAAWH,MAAAA,EAAa;AACtB,IAAA,MAAMoB,SAAAA,GAAa,KAAK,WAAA,CAA8BA,SAAAA;AACtD,IAAA,OAAO,CAACA,SAAAA,CAAUC,GAAAA,CAAIrB,OAAO,WAAW,CAAA,IAAKA,kBAAkBsB,IAAAA,IAAQF,SAAAA,CAAUC,GAAAA,CAAIC,IAAAA,IACjFF,SAAAA,CAAUG,GAAAA,CAAID,IAAAA,CAAAA,GACd,KAAA,CAAMnB,WAAWH,MAAAA,CAAAA;AACvB;AACF;AAEA,eAAewB,YAAYlB,IAAAA,EAAa;AACtC,EAAA,MAAMmB,MAAAA,GAAS,MAAMnB,IAAAA,CAAKoB,WAAAA,EAAW;AACrC,EAAA,IAAI,EAAED,kBAAkBE,OAAAA,CAAAA,EAAU;AAChC,IAAA,MAAM,IAAIZ,WAAW,4CAAA,CAAA;AACvB;AACA,EAAA,MAAMP,eAAeiB,MAAAA,CAAOG,KAAAA;AAC5B,EAAA,OAAO;IACLvB,IAAAA,EAAM,MAAA;AACNjB,IAAAA,IAAAA,EAAMkB,IAAAA,CAAKlB,IAAAA;AACXmB,IAAAA,WAAAA,EAAaD,IAAAA,CAAKC,WAAAA;AAClBC,IAAAA,YAAAA;AACAC,IAAAA,QAAAA,mCAAoBoB,IAAAA,KAAAA;AAClB,MAAA,MAAMC,MAAAA,GAAS,MAAMxB,IAAAA,CAAKyB,GAAAA,CAAG,GAAIF,IAAAA,CAAAA;AACjC,MAAA,OAAO;QACLG,OAAAA,EAAS;AACP,UAAA;YACE3B,IAAAA,EAAM,MAAA;AACN4B,YAAAA,IAAAA,EAAMH,OAAOI,cAAAA;AACf;;AAEJ,OAAA;KACF,EAVU,UAAA;AAWZ,GAAA;AACF;AAvBeV,MAAAA,CAAAA,WAAAA,EAAAA,aAAAA,CAAAA;AAyBf9B,SAAAA,CAAUyC,eAAAA,CAAgBb,MAAME,WAAAA,CAAAA","file":"server.js","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { ValueError } from \"@/errors.js\";\nimport { Server } from \"@/serve/server.js\";\nimport { ServerOptions } from \"@modelcontextprotocol/sdk/server/index.js\";\nimport {\n McpServer,\n PromptCallback,\n ReadResourceCallback,\n ReadResourceTemplateCallback,\n ResourceTemplate,\n ToolCallback,\n} from \"@modelcontextprotocol/sdk/server/mcp.js\";\nimport { StdioServerTransport } from \"@modelcontextprotocol/sdk/server/stdio.js\";\nimport { AnyTool, Tool } from \"@/tools/base.js\";\nimport { runServer } from \"./http_server.js\";\nimport { ZodRawShape, ZodType } from \"zod\";\nimport { ToolAnnotations } from \"@modelcontextprotocol/sdk/types.js\";\n\ntype MCPServerPrompt =\n | {\n type: \"prompt\";\n name: string;\n description: string;\n callback: PromptCallback;\n }\n | {\n type: \"prompt\";\n name: string;\n description: string;\n argsSchema: ZodRawShape;\n callback: PromptCallback<ZodRawShape>;\n };\n\ntype MCPServerResource =\n | {\n type: \"resource\";\n name: string;\n uri: string;\n callback: ReadResourceCallback;\n }\n | {\n type: \"resource\";\n name: string;\n template: ResourceTemplate;\n callback: ReadResourceTemplateCallback;\n };\n\ninterface MCPServerTool {\n type: \"tool\";\n name: string;\n description: string;\n paramsSchema: ZodRawShape | ToolAnnotations;\n callback: ToolCallback<ZodRawShape>;\n}\n\ntype MCPServerEntry = MCPServerPrompt | MCPServerResource | MCPServerTool;\n\n// Configuration for the MCPServer.\nexport class MCPServerConfig {\n transport: \"stdio\" | \"sse\" | \"streamable-http\" = \"stdio\";\n hostname = \"127.0.0.1\";\n port = 8000;\n name = \"MCP Server\";\n version = \"1.0.0\";\n settings?: ServerOptions;\n\n constructor(partial?: Partial<MCPServerConfig>) {\n if (partial) {\n Object.assign(this, partial);\n }\n }\n}\n\nexport class MCPServer extends Server<any, MCPServerEntry, MCPServerConfig, never> {\n protected server: McpServer;\n\n constructor(config?: MCPServerConfig) {\n super(config || new MCPServerConfig());\n this.server = new McpServer({\n name: this.config.name,\n version: this.config.version,\n ...this.config.settings,\n });\n }\n\n async serve() {\n for (const member of this.members) {\n const factory = this.getFactory(member);\n const entry = await factory(member);\n\n switch (entry.type) {\n case \"tool\":\n this.server.tool(entry.name, entry.description, entry.paramsSchema, entry.callback);\n break;\n case \"prompt\":\n if (\"argsSchema\" in entry) {\n this.server.prompt(entry.name, entry.description, entry.argsSchema, entry.callback);\n } else {\n this.server.prompt(entry.name, entry.description, entry.callback);\n }\n break;\n case \"resource\":\n if (\"uri\" in entry) {\n this.server.resource(entry.name, entry.uri, entry.callback);\n } else {\n this.server.resource(entry.name, entry.template, entry.callback);\n }\n break;\n default:\n throw new ValueError(\"Input type is not supported by this server.\");\n }\n }\n\n switch (this.config.transport) {\n case \"stdio\":\n await this.server.connect(new StdioServerTransport());\n break;\n case \"sse\":\n case \"streamable-http\":\n runServer(this.server, this.config.hostname, this.config.port);\n break;\n default: {\n // The following line ensures exhaustiveness checking at compile time.\n const _exhaustiveCheck: never = this.config.transport;\n throw new Error(\"Unsupported transport type.\");\n }\n }\n }\n\n getFactory(member: any) {\n const factories = (this.constructor as typeof Server).factories;\n return !factories.has(member.constructor) && member instanceof Tool && factories.has(Tool)\n ? factories.get(Tool)!\n : super.getFactory(member);\n }\n}\n\nasync function toolFactory(tool: AnyTool): Promise<MCPServerEntry> {\n const schema = await tool.inputSchema();\n if (!(schema instanceof ZodType)) {\n throw new ValueError(\"JsonSchema is not supported for MCP tools.\");\n }\n const paramsSchema = schema.shape;\n return {\n type: \"tool\",\n name: tool.name,\n description: tool.description,\n paramsSchema: paramsSchema,\n callback: async (...args: Parameters<typeof tool.run>) => {\n const result = await tool.run(...args);\n return {\n content: [\n {\n type: \"text\",\n text: result.getTextContent(),\n },\n ],\n };\n },\n };\n}\n\nMCPServer.registerFactory(Tool, toolFactory);\n"]}
@@ -16,10 +16,7 @@ class OllamaChatModel extends chat_cjs.VercelChatModel {
16
16
  "auto"
17
17
  ];
18
18
  constructor(modelId = env_cjs.getEnv("OLLAMA_CHAT_MODEL", "llama3.1:8b"), settings = {}, client) {
19
- const model = client_cjs.OllamaClient.ensure(client).instance.chat(modelId, {
20
- ...settings,
21
- structuredOutputs: true
22
- });
19
+ const model = client_cjs.OllamaClient.ensure(client).instance.chat(modelId, settings);
23
20
  super(model);
24
21
  }
25
22
  static {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/adapters/ollama/backend/chat.ts"],"names":["OllamaChatModel","VercelChatModel","supportsToolStreaming","toolChoiceSupport","modelId","getEnv","settings","client","model","OllamaClient","ensure","instance","chat","structuredOutputs","register"],"mappings":";;;;;;;;AAeO,MAAMA,wBAAwBC,wBAAAA,CAAAA;EAfrC;;;EAgBWC,qBAAAA,GAAwB,KAAA;EACjBC,iBAAAA,GAAkD;AAAC,IAAA,MAAA;AAAQ,IAAA;;EAE3E,WAAA,CACEC,OAAAA,GAA6BC,eAAO,mBAAA,EAAqB,aAAA,GACzDC,QAAAA,GAAoC,IACpCC,MAAAA,EACA;AACA,IAAA,MAAMC,QAAQC,uBAAAA,CAAaC,MAAAA,CAAOH,MAAAA,CAAAA,CAAQI,QAAAA,CAASC,KAAKR,OAAAA,EAAS;MAC/D,GAAGE,QAAAA;MACHO,iBAAAA,EAAmB;KACrB,CAAA;AACA,IAAA,KAAA,CAAML,KAAAA,CAAAA;AACR;EAEA;AACE,IAAA,IAAA,CAAKM,QAAAA,EAAQ;AACf;AACF","file":"chat.cjs","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { VercelChatModel } from \"@/adapters/vercel/backend/chat.js\";\nimport { OllamaProvider } from \"ollama-ai-provider\";\nimport { OllamaClient, OllamaClientSettings } from \"@/adapters/ollama/backend/client.js\";\nimport { getEnv } from \"@/internals/env.js\";\nimport { ChatModelToolChoiceSupport } from \"@/backend/chat.js\";\n\ntype OllamaParameters = Parameters<OllamaProvider[\"languageModel\"]>;\nexport type OllamaChatModelId = NonNullable<OllamaParameters[0]>;\nexport type OllamaChatModelSettings = NonNullable<OllamaParameters[1]>;\n\nexport class OllamaChatModel extends VercelChatModel {\n readonly supportsToolStreaming = false;\n public readonly toolChoiceSupport: ChatModelToolChoiceSupport[] = [\"none\", \"auto\"];\n\n constructor(\n modelId: OllamaChatModelId = getEnv(\"OLLAMA_CHAT_MODEL\", \"llama3.1:8b\"),\n settings: OllamaChatModelSettings = {},\n client?: OllamaClient | OllamaClientSettings,\n ) {\n const model = OllamaClient.ensure(client).instance.chat(modelId, {\n ...settings,\n structuredOutputs: true, // otherwise breaks generated structure\n });\n super(model);\n }\n\n static {\n this.register();\n }\n}\n"]}
1
+ {"version":3,"sources":["../../../../src/adapters/ollama/backend/chat.ts"],"names":["OllamaChatModel","VercelChatModel","supportsToolStreaming","toolChoiceSupport","modelId","getEnv","settings","client","model","OllamaClient","ensure","instance","chat","register"],"mappings":";;;;;;;;AAeO,MAAMA,wBAAwBC,wBAAAA,CAAAA;EAfrC;;;EAgBWC,qBAAAA,GAAwB,KAAA;EACjBC,iBAAAA,GAAkD;AAAC,IAAA,MAAA;AAAQ,IAAA;;EAE3E,WAAA,CACEC,OAAAA,GAA6BC,eAAO,mBAAA,EAAqB,aAAA,GACzDC,QAAAA,GAAoC,IACpCC,MAAAA,EACA;AACA,IAAA,MAAMC,KAAAA,GAAQC,wBAAaC,MAAAA,CAAOH,MAAAA,EAAQI,QAAAA,CAASC,IAAAA,CAAKR,SAASE,QAAAA,CAAAA;AACjE,IAAA,KAAA,CAAME,KAAAA,CAAAA;AACR;EAEA;AACE,IAAA,IAAA,CAAKK,QAAAA,EAAQ;AACf;AACF","file":"chat.cjs","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { VercelChatModel } from \"@/adapters/vercel/backend/chat.js\";\nimport { OllamaProvider } from \"ollama-ai-provider-v2\";\nimport { OllamaClient, OllamaClientSettings } from \"@/adapters/ollama/backend/client.js\";\nimport { getEnv } from \"@/internals/env.js\";\nimport { ChatModelToolChoiceSupport } from \"@/backend/chat.js\";\n\ntype OllamaParameters = Parameters<OllamaProvider[\"chat\"]>;\nexport type OllamaChatModelId = NonNullable<OllamaParameters[0]>;\nexport type OllamaChatModelSettings = NonNullable<OllamaParameters[1]>;\n\nexport class OllamaChatModel extends VercelChatModel {\n readonly supportsToolStreaming = false;\n public readonly toolChoiceSupport: ChatModelToolChoiceSupport[] = [\"none\", \"auto\"];\n\n constructor(\n modelId: OllamaChatModelId = getEnv(\"OLLAMA_CHAT_MODEL\", \"llama3.1:8b\"),\n settings: OllamaChatModelSettings = {},\n client?: OllamaClient | OllamaClientSettings,\n ) {\n const model = OllamaClient.ensure(client).instance.chat(modelId, settings);\n super(model);\n }\n\n static {\n this.register();\n }\n}\n"]}
@@ -1,7 +1,7 @@
1
1
  import { VercelChatModel } from '../../vercel/backend/chat.cjs';
2
- import { OllamaProvider } from 'ollama-ai-provider';
2
+ import { OllamaProvider } from 'ollama-ai-provider-v2';
3
3
  import { OllamaClient, OllamaClientSettings } from './client.cjs';
4
- import { l as ChatModelToolChoiceSupport } from '../../../chat-BZ55YQab.cjs';
4
+ import { l as ChatModelToolChoiceSupport } from '../../../chat-CRb3vUVg.cjs';
5
5
  import '../../../logger/logger.cjs';
6
6
  import 'pino';
7
7
  import '../../../errors.cjs';
@@ -9,7 +9,7 @@ import '../../../internals/types.cjs';
9
9
  import '../../../internals/helpers/guards.cjs';
10
10
  import '../../../internals/serializable.cjs';
11
11
  import 'ai';
12
- import '../../../emitter-pJzHC_AM.cjs';
12
+ import '../../../emitter-DpqUYjXH.cjs';
13
13
  import '../../../backend/message.cjs';
14
14
  import '../../../context.cjs';
15
15
  import '../../../internals/helpers/promise.cjs';
@@ -29,7 +29,7 @@ import '../../../backend/client.cjs';
29
29
  * SPDX-License-Identifier: Apache-2.0
30
30
  */
31
31
 
32
- type OllamaParameters = Parameters<OllamaProvider["languageModel"]>;
32
+ type OllamaParameters = Parameters<OllamaProvider["chat"]>;
33
33
  type OllamaChatModelId = NonNullable<OllamaParameters[0]>;
34
34
  type OllamaChatModelSettings = NonNullable<OllamaParameters[1]>;
35
35
  declare class OllamaChatModel extends VercelChatModel {
@@ -1,7 +1,7 @@
1
1
  import { VercelChatModel } from '../../vercel/backend/chat.js';
2
- import { OllamaProvider } from 'ollama-ai-provider';
2
+ import { OllamaProvider } from 'ollama-ai-provider-v2';
3
3
  import { OllamaClient, OllamaClientSettings } from './client.js';
4
- import { l as ChatModelToolChoiceSupport } from '../../../chat-BBoOSvzm.js';
4
+ import { l as ChatModelToolChoiceSupport } from '../../../chat-C0s-o6ll.js';
5
5
  import '../../../logger/logger.js';
6
6
  import 'pino';
7
7
  import '../../../errors.js';
@@ -9,7 +9,7 @@ import '../../../internals/types.js';
9
9
  import '../../../internals/helpers/guards.js';
10
10
  import '../../../internals/serializable.js';
11
11
  import 'ai';
12
- import '../../../emitter-BqpLJQVb.js';
12
+ import '../../../emitter-CZFbzlUi.js';
13
13
  import '../../../backend/message.js';
14
14
  import '../../../context.js';
15
15
  import '../../../internals/helpers/promise.js';
@@ -29,7 +29,7 @@ import '../../../backend/client.js';
29
29
  * SPDX-License-Identifier: Apache-2.0
30
30
  */
31
31
 
32
- type OllamaParameters = Parameters<OllamaProvider["languageModel"]>;
32
+ type OllamaParameters = Parameters<OllamaProvider["chat"]>;
33
33
  type OllamaChatModelId = NonNullable<OllamaParameters[0]>;
34
34
  type OllamaChatModelSettings = NonNullable<OllamaParameters[1]>;
35
35
  declare class OllamaChatModel extends VercelChatModel {
@@ -14,10 +14,7 @@ class OllamaChatModel extends VercelChatModel {
14
14
  "auto"
15
15
  ];
16
16
  constructor(modelId = getEnv("OLLAMA_CHAT_MODEL", "llama3.1:8b"), settings = {}, client) {
17
- const model = OllamaClient.ensure(client).instance.chat(modelId, {
18
- ...settings,
19
- structuredOutputs: true
20
- });
17
+ const model = OllamaClient.ensure(client).instance.chat(modelId, settings);
21
18
  super(model);
22
19
  }
23
20
  static {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/adapters/ollama/backend/chat.ts"],"names":["OllamaChatModel","VercelChatModel","supportsToolStreaming","toolChoiceSupport","modelId","getEnv","settings","client","model","OllamaClient","ensure","instance","chat","structuredOutputs","register"],"mappings":";;;;;;AAeO,MAAMA,wBAAwBC,eAAAA,CAAAA;EAfrC;;;EAgBWC,qBAAAA,GAAwB,KAAA;EACjBC,iBAAAA,GAAkD;AAAC,IAAA,MAAA;AAAQ,IAAA;;EAE3E,WAAA,CACEC,OAAAA,GAA6BC,OAAO,mBAAA,EAAqB,aAAA,GACzDC,QAAAA,GAAoC,IACpCC,MAAAA,EACA;AACA,IAAA,MAAMC,QAAQC,YAAAA,CAAaC,MAAAA,CAAOH,MAAAA,CAAAA,CAAQI,QAAAA,CAASC,KAAKR,OAAAA,EAAS;MAC/D,GAAGE,QAAAA;MACHO,iBAAAA,EAAmB;KACrB,CAAA;AACA,IAAA,KAAA,CAAML,KAAAA,CAAAA;AACR;EAEA;AACE,IAAA,IAAA,CAAKM,QAAAA,EAAQ;AACf;AACF","file":"chat.js","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { VercelChatModel } from \"@/adapters/vercel/backend/chat.js\";\nimport { OllamaProvider } from \"ollama-ai-provider\";\nimport { OllamaClient, OllamaClientSettings } from \"@/adapters/ollama/backend/client.js\";\nimport { getEnv } from \"@/internals/env.js\";\nimport { ChatModelToolChoiceSupport } from \"@/backend/chat.js\";\n\ntype OllamaParameters = Parameters<OllamaProvider[\"languageModel\"]>;\nexport type OllamaChatModelId = NonNullable<OllamaParameters[0]>;\nexport type OllamaChatModelSettings = NonNullable<OllamaParameters[1]>;\n\nexport class OllamaChatModel extends VercelChatModel {\n readonly supportsToolStreaming = false;\n public readonly toolChoiceSupport: ChatModelToolChoiceSupport[] = [\"none\", \"auto\"];\n\n constructor(\n modelId: OllamaChatModelId = getEnv(\"OLLAMA_CHAT_MODEL\", \"llama3.1:8b\"),\n settings: OllamaChatModelSettings = {},\n client?: OllamaClient | OllamaClientSettings,\n ) {\n const model = OllamaClient.ensure(client).instance.chat(modelId, {\n ...settings,\n structuredOutputs: true, // otherwise breaks generated structure\n });\n super(model);\n }\n\n static {\n this.register();\n }\n}\n"]}
1
+ {"version":3,"sources":["../../../../src/adapters/ollama/backend/chat.ts"],"names":["OllamaChatModel","VercelChatModel","supportsToolStreaming","toolChoiceSupport","modelId","getEnv","settings","client","model","OllamaClient","ensure","instance","chat","register"],"mappings":";;;;;;AAeO,MAAMA,wBAAwBC,eAAAA,CAAAA;EAfrC;;;EAgBWC,qBAAAA,GAAwB,KAAA;EACjBC,iBAAAA,GAAkD;AAAC,IAAA,MAAA;AAAQ,IAAA;;EAE3E,WAAA,CACEC,OAAAA,GAA6BC,OAAO,mBAAA,EAAqB,aAAA,GACzDC,QAAAA,GAAoC,IACpCC,MAAAA,EACA;AACA,IAAA,MAAMC,KAAAA,GAAQC,aAAaC,MAAAA,CAAOH,MAAAA,EAAQI,QAAAA,CAASC,IAAAA,CAAKR,SAASE,QAAAA,CAAAA;AACjE,IAAA,KAAA,CAAME,KAAAA,CAAAA;AACR;EAEA;AACE,IAAA,IAAA,CAAKK,QAAAA,EAAQ;AACf;AACF","file":"chat.js","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { VercelChatModel } from \"@/adapters/vercel/backend/chat.js\";\nimport { OllamaProvider } from \"ollama-ai-provider-v2\";\nimport { OllamaClient, OllamaClientSettings } from \"@/adapters/ollama/backend/client.js\";\nimport { getEnv } from \"@/internals/env.js\";\nimport { ChatModelToolChoiceSupport } from \"@/backend/chat.js\";\n\ntype OllamaParameters = Parameters<OllamaProvider[\"chat\"]>;\nexport type OllamaChatModelId = NonNullable<OllamaParameters[0]>;\nexport type OllamaChatModelSettings = NonNullable<OllamaParameters[1]>;\n\nexport class OllamaChatModel extends VercelChatModel {\n readonly supportsToolStreaming = false;\n public readonly toolChoiceSupport: ChatModelToolChoiceSupport[] = [\"none\", \"auto\"];\n\n constructor(\n modelId: OllamaChatModelId = getEnv(\"OLLAMA_CHAT_MODEL\", \"llama3.1:8b\"),\n settings: OllamaChatModelSettings = {},\n client?: OllamaClient | OllamaClientSettings,\n ) {\n const model = OllamaClient.ensure(client).instance.chat(modelId, settings);\n super(model);\n }\n\n static {\n this.register();\n }\n}\n"]}
@@ -1,7 +1,7 @@
1
1
  'use strict';
2
2
 
3
3
  var env_cjs = require('../../../internals/env.cjs');
4
- var ollamaAiProvider = require('ollama-ai-provider');
4
+ var ollamaAiProviderV2 = require('ollama-ai-provider-v2');
5
5
  var client_cjs = require('../../../backend/client.cjs');
6
6
  var utils_cjs = require('../../vercel/backend/utils.cjs');
7
7
 
@@ -14,7 +14,7 @@ class OllamaClient extends client_cjs.BackendClient {
14
14
  create() {
15
15
  const { apiKey: _apiKey, baseURL, headers, ...settings } = this.settings ?? {};
16
16
  const apiKey = _apiKey || env_cjs.getEnv("OLLAMA_API_KEY");
17
- return ollamaAiProvider.createOllama({
17
+ return ollamaAiProviderV2.createOllama({
18
18
  ...settings,
19
19
  baseURL: baseURL || env_cjs.getEnv("OLLAMA_BASE_URL"),
20
20
  fetch: utils_cjs.vercelFetcher(this.settings?.fetch),
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/adapters/ollama/backend/client.ts"],"names":["OllamaClient","BackendClient","create","apiKey","_apiKey","baseURL","headers","settings","getEnv","createOllama","fetch","vercelFetcher","parseHeadersFromEnv","Authorization"],"mappings":";;;;;;;;;AAYO,MAAMA,qBAAqBC,wBAAAA,CAAAA;EAZlC;;;EAaYC,MAAAA,GAAyB;AACjC,IAAA,MAAM,EAAEC,MAAAA,EAAQC,OAAAA,EAASC,OAAAA,EAASC,OAAAA,EAAS,GAAGC,QAAAA,EAAAA,GAAa,IAAA,CAAKA,QAAAA,IAAY,EAAC;AAC7E,IAAA,MAAMJ,MAAAA,GAASC,OAAAA,IAAWI,cAAAA,CAAO,gBAAA,CAAA;AAEjC,IAAA,OAAOC,6BAAAA,CAAa;MAClB,GAAGF,QAAAA;MACHF,OAAAA,EAASA,OAAAA,IAAWG,eAAO,iBAAA,CAAA;MAC3BE,KAAAA,EAAOC,uBAAAA,CAAc,IAAA,CAAKJ,QAAAA,EAAUG,KAAAA,CAAAA;MACpCJ,OAAAA,EAAS;AACP,QAAA,GAAGM,8BAAoB,oBAAA,CAAA;QACvB,GAAGN,OAAAA;AACH,QAAA,GAAIH,MAAAA,IAAU;AAAEU,UAAAA,aAAAA,EAAe,UAAUV,MAAAA,CAAAA;AAAS;AACpD;KACF,CAAA;AACF;AACF","file":"client.cjs","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { getEnv } from \"@/internals/env.js\";\nimport { createOllama, OllamaProvider, OllamaProviderSettings } from \"ollama-ai-provider\";\nimport { BackendClient } from \"@/backend/client.js\";\nimport { parseHeadersFromEnv, vercelFetcher } from \"@/adapters/vercel/backend/utils.js\";\n\nexport type OllamaClientSettings = OllamaProviderSettings & { apiKey?: string };\n\nexport class OllamaClient extends BackendClient<OllamaClientSettings, OllamaProvider> {\n protected create(): OllamaProvider {\n const { apiKey: _apiKey, baseURL, headers, ...settings } = this.settings ?? {};\n const apiKey = _apiKey || getEnv(\"OLLAMA_API_KEY\");\n\n return createOllama({\n ...settings,\n baseURL: baseURL || getEnv(\"OLLAMA_BASE_URL\"),\n fetch: vercelFetcher(this.settings?.fetch),\n headers: {\n ...parseHeadersFromEnv(\"OLLAMA_API_HEADERS\"),\n ...headers,\n ...(apiKey && { Authorization: `Bearer ${apiKey}` }),\n },\n });\n }\n}\n"]}
1
+ {"version":3,"sources":["../../../../src/adapters/ollama/backend/client.ts"],"names":["OllamaClient","BackendClient","create","apiKey","_apiKey","baseURL","headers","settings","getEnv","createOllama","fetch","vercelFetcher","parseHeadersFromEnv","Authorization"],"mappings":";;;;;;;;;AAYO,MAAMA,qBAAqBC,wBAAAA,CAAAA;EAZlC;;;EAaYC,MAAAA,GAAyB;AACjC,IAAA,MAAM,EAAEC,MAAAA,EAAQC,OAAAA,EAASC,OAAAA,EAASC,OAAAA,EAAS,GAAGC,QAAAA,EAAAA,GAAa,IAAA,CAAKA,QAAAA,IAAY,EAAC;AAC7E,IAAA,MAAMJ,MAAAA,GAASC,OAAAA,IAAWI,cAAAA,CAAO,gBAAA,CAAA;AAEjC,IAAA,OAAOC,+BAAAA,CAAa;MAClB,GAAGF,QAAAA;MACHF,OAAAA,EAASA,OAAAA,IAAWG,eAAO,iBAAA,CAAA;MAC3BE,KAAAA,EAAOC,uBAAAA,CAAc,IAAA,CAAKJ,QAAAA,EAAUG,KAAAA,CAAAA;MACpCJ,OAAAA,EAAS;AACP,QAAA,GAAGM,8BAAoB,oBAAA,CAAA;QACvB,GAAGN,OAAAA;AACH,QAAA,GAAIH,MAAAA,IAAU;AAAEU,UAAAA,aAAAA,EAAe,UAAUV,MAAAA,CAAAA;AAAS;AACpD;KACF,CAAA;AACF;AACF","file":"client.cjs","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { getEnv } from \"@/internals/env.js\";\nimport { createOllama, OllamaProvider, OllamaProviderSettings } from \"ollama-ai-provider-v2\";\nimport { BackendClient } from \"@/backend/client.js\";\nimport { parseHeadersFromEnv, vercelFetcher } from \"@/adapters/vercel/backend/utils.js\";\n\nexport type OllamaClientSettings = OllamaProviderSettings & { apiKey?: string };\n\nexport class OllamaClient extends BackendClient<OllamaClientSettings, OllamaProvider> {\n protected create(): OllamaProvider {\n const { apiKey: _apiKey, baseURL, headers, ...settings } = this.settings ?? {};\n const apiKey = _apiKey || getEnv(\"OLLAMA_API_KEY\");\n\n return createOllama({\n ...settings,\n baseURL: baseURL || getEnv(\"OLLAMA_BASE_URL\"),\n fetch: vercelFetcher(this.settings?.fetch),\n headers: {\n ...parseHeadersFromEnv(\"OLLAMA_API_HEADERS\"),\n ...headers,\n ...(apiKey && { Authorization: `Bearer ${apiKey}` }),\n },\n });\n }\n}\n"]}
@@ -1,4 +1,4 @@
1
- import { OllamaProviderSettings, OllamaProvider } from 'ollama-ai-provider';
1
+ import { OllamaProviderSettings, OllamaProvider } from 'ollama-ai-provider-v2';
2
2
  import { BackendClient } from '../../../backend/client.cjs';
3
3
  import '../../../internals/serializable.cjs';
4
4
  import '../../../internals/types.cjs';
@@ -1,4 +1,4 @@
1
- import { OllamaProviderSettings, OllamaProvider } from 'ollama-ai-provider';
1
+ import { OllamaProviderSettings, OllamaProvider } from 'ollama-ai-provider-v2';
2
2
  import { BackendClient } from '../../../backend/client.js';
3
3
  import '../../../internals/serializable.js';
4
4
  import '../../../internals/types.js';
@@ -1,5 +1,5 @@
1
1
  import { getEnv } from '../../../internals/env.js';
2
- import { createOllama } from 'ollama-ai-provider';
2
+ import { createOllama } from 'ollama-ai-provider-v2';
3
3
  import { BackendClient } from '../../../backend/client.js';
4
4
  import { parseHeadersFromEnv, vercelFetcher } from '../../vercel/backend/utils.js';
5
5
 
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/adapters/ollama/backend/client.ts"],"names":["OllamaClient","BackendClient","create","apiKey","_apiKey","baseURL","headers","settings","getEnv","createOllama","fetch","vercelFetcher","parseHeadersFromEnv","Authorization"],"mappings":";;;;;;;AAYO,MAAMA,qBAAqBC,aAAAA,CAAAA;EAZlC;;;EAaYC,MAAAA,GAAyB;AACjC,IAAA,MAAM,EAAEC,MAAAA,EAAQC,OAAAA,EAASC,OAAAA,EAASC,OAAAA,EAAS,GAAGC,QAAAA,EAAAA,GAAa,IAAA,CAAKA,QAAAA,IAAY,EAAC;AAC7E,IAAA,MAAMJ,MAAAA,GAASC,OAAAA,IAAWI,MAAAA,CAAO,gBAAA,CAAA;AAEjC,IAAA,OAAOC,YAAAA,CAAa;MAClB,GAAGF,QAAAA;MACHF,OAAAA,EAASA,OAAAA,IAAWG,OAAO,iBAAA,CAAA;MAC3BE,KAAAA,EAAOC,aAAAA,CAAc,IAAA,CAAKJ,QAAAA,EAAUG,KAAAA,CAAAA;MACpCJ,OAAAA,EAAS;AACP,QAAA,GAAGM,oBAAoB,oBAAA,CAAA;QACvB,GAAGN,OAAAA;AACH,QAAA,GAAIH,MAAAA,IAAU;AAAEU,UAAAA,aAAAA,EAAe,UAAUV,MAAAA,CAAAA;AAAS;AACpD;KACF,CAAA;AACF;AACF","file":"client.js","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { getEnv } from \"@/internals/env.js\";\nimport { createOllama, OllamaProvider, OllamaProviderSettings } from \"ollama-ai-provider\";\nimport { BackendClient } from \"@/backend/client.js\";\nimport { parseHeadersFromEnv, vercelFetcher } from \"@/adapters/vercel/backend/utils.js\";\n\nexport type OllamaClientSettings = OllamaProviderSettings & { apiKey?: string };\n\nexport class OllamaClient extends BackendClient<OllamaClientSettings, OllamaProvider> {\n protected create(): OllamaProvider {\n const { apiKey: _apiKey, baseURL, headers, ...settings } = this.settings ?? {};\n const apiKey = _apiKey || getEnv(\"OLLAMA_API_KEY\");\n\n return createOllama({\n ...settings,\n baseURL: baseURL || getEnv(\"OLLAMA_BASE_URL\"),\n fetch: vercelFetcher(this.settings?.fetch),\n headers: {\n ...parseHeadersFromEnv(\"OLLAMA_API_HEADERS\"),\n ...headers,\n ...(apiKey && { Authorization: `Bearer ${apiKey}` }),\n },\n });\n }\n}\n"]}
1
+ {"version":3,"sources":["../../../../src/adapters/ollama/backend/client.ts"],"names":["OllamaClient","BackendClient","create","apiKey","_apiKey","baseURL","headers","settings","getEnv","createOllama","fetch","vercelFetcher","parseHeadersFromEnv","Authorization"],"mappings":";;;;;;;AAYO,MAAMA,qBAAqBC,aAAAA,CAAAA;EAZlC;;;EAaYC,MAAAA,GAAyB;AACjC,IAAA,MAAM,EAAEC,MAAAA,EAAQC,OAAAA,EAASC,OAAAA,EAASC,OAAAA,EAAS,GAAGC,QAAAA,EAAAA,GAAa,IAAA,CAAKA,QAAAA,IAAY,EAAC;AAC7E,IAAA,MAAMJ,MAAAA,GAASC,OAAAA,IAAWI,MAAAA,CAAO,gBAAA,CAAA;AAEjC,IAAA,OAAOC,YAAAA,CAAa;MAClB,GAAGF,QAAAA;MACHF,OAAAA,EAASA,OAAAA,IAAWG,OAAO,iBAAA,CAAA;MAC3BE,KAAAA,EAAOC,aAAAA,CAAc,IAAA,CAAKJ,QAAAA,EAAUG,KAAAA,CAAAA;MACpCJ,OAAAA,EAAS;AACP,QAAA,GAAGM,oBAAoB,oBAAA,CAAA;QACvB,GAAGN,OAAAA;AACH,QAAA,GAAIH,MAAAA,IAAU;AAAEU,UAAAA,aAAAA,EAAe,UAAUV,MAAAA,CAAAA;AAAS;AACpD;KACF,CAAA;AACF;AACF","file":"client.js","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { getEnv } from \"@/internals/env.js\";\nimport { createOllama, OllamaProvider, OllamaProviderSettings } from \"ollama-ai-provider-v2\";\nimport { BackendClient } from \"@/backend/client.js\";\nimport { parseHeadersFromEnv, vercelFetcher } from \"@/adapters/vercel/backend/utils.js\";\n\nexport type OllamaClientSettings = OllamaProviderSettings & { apiKey?: string };\n\nexport class OllamaClient extends BackendClient<OllamaClientSettings, OllamaProvider> {\n protected create(): OllamaProvider {\n const { apiKey: _apiKey, baseURL, headers, ...settings } = this.settings ?? {};\n const apiKey = _apiKey || getEnv(\"OLLAMA_API_KEY\");\n\n return createOllama({\n ...settings,\n baseURL: baseURL || getEnv(\"OLLAMA_BASE_URL\"),\n fetch: vercelFetcher(this.settings?.fetch),\n headers: {\n ...parseHeadersFromEnv(\"OLLAMA_API_HEADERS\"),\n ...headers,\n ...(apiKey && { Authorization: `Bearer ${apiKey}` }),\n },\n });\n }\n}\n"]}
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/adapters/ollama/backend/embedding.ts"],"names":["OllamaEmbeddingModel","VercelEmbeddingModel","modelId","getEnv","settings","client","model","OllamaClient","ensure","instance","embedding"],"mappings":";;;;;;;;AAcO,MAAMA,6BAA6BC,kCAAAA,CAAAA;EAd1C;;;EAeE,WAAA,CACEC,OAAAA,GAAkCC,eAAO,wBAAA,EAA0B,kBAAA,GACnEC,QAAAA,GAAyC,IACzCC,MAAAA,EACA;AACA,IAAA,MAAMC,KAAAA,GAAQC,wBAAaC,MAAAA,CAAOH,MAAAA,EAAQI,QAAAA,CAASC,SAAAA,CAAUR,SAASE,QAAAA,CAAAA;AACtE,IAAA,KAAA,CAAME,KAAAA,CAAAA;AACR;AACF","file":"embedding.cjs","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { OllamaProvider } from \"ollama-ai-provider\";\nimport { OllamaClient, OllamaClientSettings } from \"@/adapters/ollama/backend/client.js\";\nimport { VercelEmbeddingModel } from \"@/adapters/vercel/backend/embedding.js\";\nimport { getEnv } from \"@/internals/env.js\";\n\ntype OllamaParameters = Parameters<OllamaProvider[\"textEmbeddingModel\"]>;\nexport type OllamaEmbeddingModelId = NonNullable<OllamaParameters[0]>;\nexport type OllamaEmbeddingModelSettings = NonNullable<OllamaParameters[1]>;\n\nexport class OllamaEmbeddingModel extends VercelEmbeddingModel {\n constructor(\n modelId: OllamaEmbeddingModelId = getEnv(\"OLLAMA_EMBEDDING_MODEL\", \"nomic-embed-text\"),\n settings: OllamaEmbeddingModelSettings = {},\n client?: OllamaClient | OllamaClientSettings,\n ) {\n const model = OllamaClient.ensure(client).instance.embedding(modelId, settings);\n super(model);\n }\n}\n"]}
1
+ {"version":3,"sources":["../../../../src/adapters/ollama/backend/embedding.ts"],"names":["OllamaEmbeddingModel","VercelEmbeddingModel","modelId","getEnv","settings","client","model","OllamaClient","ensure","instance","embedding"],"mappings":";;;;;;;;AAcO,MAAMA,6BAA6BC,kCAAAA,CAAAA;EAd1C;;;EAeE,WAAA,CACEC,OAAAA,GAAkCC,eAAO,wBAAA,EAA0B,kBAAA,GACnEC,QAAAA,GAAyC,IACzCC,MAAAA,EACA;AACA,IAAA,MAAMC,KAAAA,GAAQC,wBAAaC,MAAAA,CAAOH,MAAAA,EAAQI,QAAAA,CAASC,SAAAA,CAAUR,SAASE,QAAAA,CAAAA;AACtE,IAAA,KAAA,CAAME,KAAAA,CAAAA;AACR;AACF","file":"embedding.cjs","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { OllamaProvider } from \"ollama-ai-provider-v2\";\nimport { OllamaClient, OllamaClientSettings } from \"@/adapters/ollama/backend/client.js\";\nimport { VercelEmbeddingModel } from \"@/adapters/vercel/backend/embedding.js\";\nimport { getEnv } from \"@/internals/env.js\";\n\ntype OllamaParameters = Parameters<OllamaProvider[\"textEmbeddingModel\"]>;\nexport type OllamaEmbeddingModelId = NonNullable<OllamaParameters[0]>;\nexport type OllamaEmbeddingModelSettings = NonNullable<OllamaParameters[1]>;\n\nexport class OllamaEmbeddingModel extends VercelEmbeddingModel {\n constructor(\n modelId: OllamaEmbeddingModelId = getEnv(\"OLLAMA_EMBEDDING_MODEL\", \"nomic-embed-text\"),\n settings: OllamaEmbeddingModelSettings = {},\n client?: OllamaClient | OllamaClientSettings,\n ) {\n const model = OllamaClient.ensure(client).instance.embedding(modelId, settings);\n super(model);\n }\n}\n"]}
@@ -1,4 +1,4 @@
1
- import { OllamaProvider } from 'ollama-ai-provider';
1
+ import { OllamaProvider } from 'ollama-ai-provider-v2';
2
2
  import { OllamaClient, OllamaClientSettings } from './client.cjs';
3
3
  import { VercelEmbeddingModel } from '../../vercel/backend/embedding.cjs';
4
4
  import '../../../backend/client.cjs';
@@ -7,10 +7,10 @@ import '../../../internals/types.cjs';
7
7
  import '../../../internals/helpers/guards.cjs';
8
8
  import '../../../backend/embedding.cjs';
9
9
  import '../../../context.cjs';
10
- import '../../../emitter-pJzHC_AM.cjs';
10
+ import '../../../emitter-DpqUYjXH.cjs';
11
11
  import '../../../internals/helpers/promise.cjs';
12
12
  import '../../../errors.cjs';
13
- import '../../../chat-BZ55YQab.cjs';
13
+ import '../../../chat-CRb3vUVg.cjs';
14
14
  import '../../../backend/message.cjs';
15
15
  import 'ai';
16
16
  import 'promise-based-task';
@@ -1,4 +1,4 @@
1
- import { OllamaProvider } from 'ollama-ai-provider';
1
+ import { OllamaProvider } from 'ollama-ai-provider-v2';
2
2
  import { OllamaClient, OllamaClientSettings } from './client.js';
3
3
  import { VercelEmbeddingModel } from '../../vercel/backend/embedding.js';
4
4
  import '../../../backend/client.js';
@@ -7,10 +7,10 @@ import '../../../internals/types.js';
7
7
  import '../../../internals/helpers/guards.js';
8
8
  import '../../../backend/embedding.js';
9
9
  import '../../../context.js';
10
- import '../../../emitter-BqpLJQVb.js';
10
+ import '../../../emitter-CZFbzlUi.js';
11
11
  import '../../../internals/helpers/promise.js';
12
12
  import '../../../errors.js';
13
- import '../../../chat-BBoOSvzm.js';
13
+ import '../../../chat-C0s-o6ll.js';
14
14
  import '../../../backend/message.js';
15
15
  import 'ai';
16
16
  import 'promise-based-task';
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/adapters/ollama/backend/embedding.ts"],"names":["OllamaEmbeddingModel","VercelEmbeddingModel","modelId","getEnv","settings","client","model","OllamaClient","ensure","instance","embedding"],"mappings":";;;;;;AAcO,MAAMA,6BAA6BC,oBAAAA,CAAAA;EAd1C;;;EAeE,WAAA,CACEC,OAAAA,GAAkCC,OAAO,wBAAA,EAA0B,kBAAA,GACnEC,QAAAA,GAAyC,IACzCC,MAAAA,EACA;AACA,IAAA,MAAMC,KAAAA,GAAQC,aAAaC,MAAAA,CAAOH,MAAAA,EAAQI,QAAAA,CAASC,SAAAA,CAAUR,SAASE,QAAAA,CAAAA;AACtE,IAAA,KAAA,CAAME,KAAAA,CAAAA;AACR;AACF","file":"embedding.js","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { OllamaProvider } from \"ollama-ai-provider\";\nimport { OllamaClient, OllamaClientSettings } from \"@/adapters/ollama/backend/client.js\";\nimport { VercelEmbeddingModel } from \"@/adapters/vercel/backend/embedding.js\";\nimport { getEnv } from \"@/internals/env.js\";\n\ntype OllamaParameters = Parameters<OllamaProvider[\"textEmbeddingModel\"]>;\nexport type OllamaEmbeddingModelId = NonNullable<OllamaParameters[0]>;\nexport type OllamaEmbeddingModelSettings = NonNullable<OllamaParameters[1]>;\n\nexport class OllamaEmbeddingModel extends VercelEmbeddingModel {\n constructor(\n modelId: OllamaEmbeddingModelId = getEnv(\"OLLAMA_EMBEDDING_MODEL\", \"nomic-embed-text\"),\n settings: OllamaEmbeddingModelSettings = {},\n client?: OllamaClient | OllamaClientSettings,\n ) {\n const model = OllamaClient.ensure(client).instance.embedding(modelId, settings);\n super(model);\n }\n}\n"]}
1
+ {"version":3,"sources":["../../../../src/adapters/ollama/backend/embedding.ts"],"names":["OllamaEmbeddingModel","VercelEmbeddingModel","modelId","getEnv","settings","client","model","OllamaClient","ensure","instance","embedding"],"mappings":";;;;;;AAcO,MAAMA,6BAA6BC,oBAAAA,CAAAA;EAd1C;;;EAeE,WAAA,CACEC,OAAAA,GAAkCC,OAAO,wBAAA,EAA0B,kBAAA,GACnEC,QAAAA,GAAyC,IACzCC,MAAAA,EACA;AACA,IAAA,MAAMC,KAAAA,GAAQC,aAAaC,MAAAA,CAAOH,MAAAA,EAAQI,QAAAA,CAASC,SAAAA,CAAUR,SAASE,QAAAA,CAAAA;AACtE,IAAA,KAAA,CAAME,KAAAA,CAAAA;AACR;AACF","file":"embedding.js","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { OllamaProvider } from \"ollama-ai-provider-v2\";\nimport { OllamaClient, OllamaClientSettings } from \"@/adapters/ollama/backend/client.js\";\nimport { VercelEmbeddingModel } from \"@/adapters/vercel/backend/embedding.js\";\nimport { getEnv } from \"@/internals/env.js\";\n\ntype OllamaParameters = Parameters<OllamaProvider[\"textEmbeddingModel\"]>;\nexport type OllamaEmbeddingModelId = NonNullable<OllamaParameters[0]>;\nexport type OllamaEmbeddingModelSettings = NonNullable<OllamaParameters[1]>;\n\nexport class OllamaEmbeddingModel extends VercelEmbeddingModel {\n constructor(\n modelId: OllamaEmbeddingModelId = getEnv(\"OLLAMA_EMBEDDING_MODEL\", \"nomic-embed-text\"),\n settings: OllamaEmbeddingModelSettings = {},\n client?: OllamaClient | OllamaClientSettings,\n ) {\n const model = OllamaClient.ensure(client).instance.embedding(modelId, settings);\n super(model);\n }\n}\n"]}
@@ -10,9 +10,10 @@ class OpenAIChatModel extends chat_cjs.VercelChatModel {
10
10
  static {
11
11
  __name(this, "OpenAIChatModel");
12
12
  }
13
- constructor(modelId = env_cjs.getEnv("OPENAI_CHAT_MODEL", "gpt-4o"), settings = {}, client) {
14
- const model = client_cjs.OpenAIClient.ensure(client).instance.chat(modelId, settings);
13
+ constructor(modelId = env_cjs.getEnv("OPENAI_CHAT_MODEL", "gpt-4o"), parameters = {}, client) {
14
+ const model = client_cjs.OpenAIClient.ensure(client).instance.chat(modelId);
15
15
  super(model);
16
+ Object.assign(this.parameters, parameters ?? {});
16
17
  }
17
18
  }
18
19
 
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/adapters/openai/backend/chat.ts"],"names":["OpenAIChatModel","VercelChatModel","modelId","getEnv","settings","client","model","OpenAIClient","ensure","instance","chat"],"mappings":";;;;;;;;AAcO,MAAMA,wBAAwBC,wBAAAA,CAAAA;EAdrC;;;EAeE,WAAA,CACEC,OAAAA,GAA6BC,eAAO,mBAAA,EAAqB,QAAA,GACzDC,QAAAA,GAAoC,IACpCC,MAAAA,EACA;AACA,IAAA,MAAMC,KAAAA,GAAQC,wBAAaC,MAAAA,CAAOH,MAAAA,EAAQI,QAAAA,CAASC,IAAAA,CAAKR,SAASE,QAAAA,CAAAA;AACjE,IAAA,KAAA,CAAME,KAAAA,CAAAA;AACR;AACF","file":"chat.cjs","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { OpenAIProvider } from \"@ai-sdk/openai\";\nimport { OpenAIClient, OpenAIClientSettings } from \"@/adapters/openai/backend/client.js\";\nimport { VercelChatModel } from \"@/adapters/vercel/backend/chat.js\";\nimport { getEnv } from \"@/internals/env.js\";\n\ntype OpenAIParameters = Parameters<OpenAIProvider[\"chat\"]>;\nexport type OpenAIChatModelId = NonNullable<OpenAIParameters[0]>;\nexport type OpenAIChatModelSettings = NonNullable<OpenAIParameters[1]>;\n\nexport class OpenAIChatModel extends VercelChatModel {\n constructor(\n modelId: OpenAIChatModelId = getEnv(\"OPENAI_CHAT_MODEL\", \"gpt-4o\"),\n settings: OpenAIChatModelSettings = {},\n client?: OpenAIClient | OpenAIClientSettings,\n ) {\n const model = OpenAIClient.ensure(client).instance.chat(modelId, settings);\n super(model);\n }\n}\n"]}
1
+ {"version":3,"sources":["../../../../src/adapters/openai/backend/chat.ts"],"names":["OpenAIChatModel","VercelChatModel","modelId","getEnv","parameters","client","model","OpenAIClient","ensure","instance","chat","Object","assign"],"mappings":";;;;;;;;AAcO,MAAMA,wBAAwBC,wBAAAA,CAAAA;EAdrC;;;EAeE,WAAA,CACEC,OAAAA,GAA6BC,eAAO,mBAAA,EAAqB,QAAA,GACzDC,UAAAA,GAAkC,IAClCC,MAAAA,EACA;AACA,IAAA,MAAMC,QAAQC,uBAAAA,CAAaC,MAAAA,CAAOH,MAAAA,CAAAA,CAAQI,QAAAA,CAASC,KAAKR,OAAAA,CAAAA;AACxD,IAAA,KAAA,CAAMI,KAAAA,CAAAA;AACNK,IAAAA,MAAAA,CAAOC,MAAAA,CAAO,IAAA,CAAKR,UAAAA,EAAYA,UAAAA,IAAc,EAAC,CAAA;AAChD;AACF","file":"chat.cjs","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { OpenAIProvider } from \"@ai-sdk/openai\";\nimport { OpenAIClient, OpenAIClientSettings } from \"@/adapters/openai/backend/client.js\";\nimport { VercelChatModel } from \"@/adapters/vercel/backend/chat.js\";\nimport { getEnv } from \"@/internals/env.js\";\nimport { ChatModelParameters } from \"@/backend/chat.js\";\n\ntype OpenAIParameters = Parameters<OpenAIProvider[\"chat\"]>;\nexport type OpenAIChatModelId = NonNullable<OpenAIParameters[0]>;\n\nexport class OpenAIChatModel extends VercelChatModel {\n constructor(\n modelId: OpenAIChatModelId = getEnv(\"OPENAI_CHAT_MODEL\", \"gpt-4o\"),\n parameters: ChatModelParameters = {},\n client?: OpenAIClient | OpenAIClientSettings,\n ) {\n const model = OpenAIClient.ensure(client).instance.chat(modelId);\n super(model);\n Object.assign(this.parameters, parameters ?? {});\n }\n}\n"]}
@@ -1,6 +1,7 @@
1
1
  import { OpenAIProvider } from '@ai-sdk/openai';
2
2
  import { OpenAIClient, OpenAIClientSettings } from './client.cjs';
3
3
  import { VercelChatModel } from '../../vercel/backend/chat.cjs';
4
+ import { C as ChatModelParameters } from '../../../chat-CRb3vUVg.cjs';
4
5
  import '../../../backend/client.cjs';
5
6
  import '../../../internals/serializable.cjs';
6
7
  import '../../../internals/types.cjs';
@@ -8,11 +9,10 @@ import '../../../internals/helpers/guards.cjs';
8
9
  import '../../../logger/logger.cjs';
9
10
  import 'pino';
10
11
  import '../../../errors.cjs';
11
- import '../../../chat-BZ55YQab.cjs';
12
- import '../../../backend/message.cjs';
13
12
  import 'ai';
13
+ import '../../../emitter-DpqUYjXH.cjs';
14
+ import '../../../backend/message.cjs';
14
15
  import '../../../context.cjs';
15
- import '../../../emitter-pJzHC_AM.cjs';
16
16
  import '../../../internals/helpers/promise.cjs';
17
17
  import 'promise-based-task';
18
18
  import '../../../cache/base.cjs';
@@ -31,9 +31,8 @@ import '../../../template.cjs';
31
31
 
32
32
  type OpenAIParameters = Parameters<OpenAIProvider["chat"]>;
33
33
  type OpenAIChatModelId = NonNullable<OpenAIParameters[0]>;
34
- type OpenAIChatModelSettings = NonNullable<OpenAIParameters[1]>;
35
34
  declare class OpenAIChatModel extends VercelChatModel {
36
- constructor(modelId?: OpenAIChatModelId, settings?: OpenAIChatModelSettings, client?: OpenAIClient | OpenAIClientSettings);
35
+ constructor(modelId?: OpenAIChatModelId, parameters?: ChatModelParameters, client?: OpenAIClient | OpenAIClientSettings);
37
36
  }
38
37
 
39
- export { OpenAIChatModel, type OpenAIChatModelId, type OpenAIChatModelSettings };
38
+ export { OpenAIChatModel, type OpenAIChatModelId };
@@ -1,6 +1,7 @@
1
1
  import { OpenAIProvider } from '@ai-sdk/openai';
2
2
  import { OpenAIClient, OpenAIClientSettings } from './client.js';
3
3
  import { VercelChatModel } from '../../vercel/backend/chat.js';
4
+ import { C as ChatModelParameters } from '../../../chat-C0s-o6ll.js';
4
5
  import '../../../backend/client.js';
5
6
  import '../../../internals/serializable.js';
6
7
  import '../../../internals/types.js';
@@ -8,11 +9,10 @@ import '../../../internals/helpers/guards.js';
8
9
  import '../../../logger/logger.js';
9
10
  import 'pino';
10
11
  import '../../../errors.js';
11
- import '../../../chat-BBoOSvzm.js';
12
- import '../../../backend/message.js';
13
12
  import 'ai';
13
+ import '../../../emitter-CZFbzlUi.js';
14
+ import '../../../backend/message.js';
14
15
  import '../../../context.js';
15
- import '../../../emitter-BqpLJQVb.js';
16
16
  import '../../../internals/helpers/promise.js';
17
17
  import 'promise-based-task';
18
18
  import '../../../cache/base.js';
@@ -31,9 +31,8 @@ import '../../../template.js';
31
31
 
32
32
  type OpenAIParameters = Parameters<OpenAIProvider["chat"]>;
33
33
  type OpenAIChatModelId = NonNullable<OpenAIParameters[0]>;
34
- type OpenAIChatModelSettings = NonNullable<OpenAIParameters[1]>;
35
34
  declare class OpenAIChatModel extends VercelChatModel {
36
- constructor(modelId?: OpenAIChatModelId, settings?: OpenAIChatModelSettings, client?: OpenAIClient | OpenAIClientSettings);
35
+ constructor(modelId?: OpenAIChatModelId, parameters?: ChatModelParameters, client?: OpenAIClient | OpenAIClientSettings);
37
36
  }
38
37
 
39
- export { OpenAIChatModel, type OpenAIChatModelId, type OpenAIChatModelSettings };
38
+ export { OpenAIChatModel, type OpenAIChatModelId };
@@ -8,9 +8,10 @@ class OpenAIChatModel extends VercelChatModel {
8
8
  static {
9
9
  __name(this, "OpenAIChatModel");
10
10
  }
11
- constructor(modelId = getEnv("OPENAI_CHAT_MODEL", "gpt-4o"), settings = {}, client) {
12
- const model = OpenAIClient.ensure(client).instance.chat(modelId, settings);
11
+ constructor(modelId = getEnv("OPENAI_CHAT_MODEL", "gpt-4o"), parameters = {}, client) {
12
+ const model = OpenAIClient.ensure(client).instance.chat(modelId);
13
13
  super(model);
14
+ Object.assign(this.parameters, parameters ?? {});
14
15
  }
15
16
  }
16
17
 
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/adapters/openai/backend/chat.ts"],"names":["OpenAIChatModel","VercelChatModel","modelId","getEnv","settings","client","model","OpenAIClient","ensure","instance","chat"],"mappings":";;;;;;AAcO,MAAMA,wBAAwBC,eAAAA,CAAAA;EAdrC;;;EAeE,WAAA,CACEC,OAAAA,GAA6BC,OAAO,mBAAA,EAAqB,QAAA,GACzDC,QAAAA,GAAoC,IACpCC,MAAAA,EACA;AACA,IAAA,MAAMC,KAAAA,GAAQC,aAAaC,MAAAA,CAAOH,MAAAA,EAAQI,QAAAA,CAASC,IAAAA,CAAKR,SAASE,QAAAA,CAAAA;AACjE,IAAA,KAAA,CAAME,KAAAA,CAAAA;AACR;AACF","file":"chat.js","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { OpenAIProvider } from \"@ai-sdk/openai\";\nimport { OpenAIClient, OpenAIClientSettings } from \"@/adapters/openai/backend/client.js\";\nimport { VercelChatModel } from \"@/adapters/vercel/backend/chat.js\";\nimport { getEnv } from \"@/internals/env.js\";\n\ntype OpenAIParameters = Parameters<OpenAIProvider[\"chat\"]>;\nexport type OpenAIChatModelId = NonNullable<OpenAIParameters[0]>;\nexport type OpenAIChatModelSettings = NonNullable<OpenAIParameters[1]>;\n\nexport class OpenAIChatModel extends VercelChatModel {\n constructor(\n modelId: OpenAIChatModelId = getEnv(\"OPENAI_CHAT_MODEL\", \"gpt-4o\"),\n settings: OpenAIChatModelSettings = {},\n client?: OpenAIClient | OpenAIClientSettings,\n ) {\n const model = OpenAIClient.ensure(client).instance.chat(modelId, settings);\n super(model);\n }\n}\n"]}
1
+ {"version":3,"sources":["../../../../src/adapters/openai/backend/chat.ts"],"names":["OpenAIChatModel","VercelChatModel","modelId","getEnv","parameters","client","model","OpenAIClient","ensure","instance","chat","Object","assign"],"mappings":";;;;;;AAcO,MAAMA,wBAAwBC,eAAAA,CAAAA;EAdrC;;;EAeE,WAAA,CACEC,OAAAA,GAA6BC,OAAO,mBAAA,EAAqB,QAAA,GACzDC,UAAAA,GAAkC,IAClCC,MAAAA,EACA;AACA,IAAA,MAAMC,QAAQC,YAAAA,CAAaC,MAAAA,CAAOH,MAAAA,CAAAA,CAAQI,QAAAA,CAASC,KAAKR,OAAAA,CAAAA;AACxD,IAAA,KAAA,CAAMI,KAAAA,CAAAA;AACNK,IAAAA,MAAAA,CAAOC,MAAAA,CAAO,IAAA,CAAKR,UAAAA,EAAYA,UAAAA,IAAc,EAAC,CAAA;AAChD;AACF","file":"chat.js","sourcesContent":["/**\n * Copyright 2025 © BeeAI a Series of LF Projects, LLC\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { OpenAIProvider } from \"@ai-sdk/openai\";\nimport { OpenAIClient, OpenAIClientSettings } from \"@/adapters/openai/backend/client.js\";\nimport { VercelChatModel } from \"@/adapters/vercel/backend/chat.js\";\nimport { getEnv } from \"@/internals/env.js\";\nimport { ChatModelParameters } from \"@/backend/chat.js\";\n\ntype OpenAIParameters = Parameters<OpenAIProvider[\"chat\"]>;\nexport type OpenAIChatModelId = NonNullable<OpenAIParameters[0]>;\n\nexport class OpenAIChatModel extends VercelChatModel {\n constructor(\n modelId: OpenAIChatModelId = getEnv(\"OPENAI_CHAT_MODEL\", \"gpt-4o\"),\n parameters: ChatModelParameters = {},\n client?: OpenAIClient | OpenAIClientSettings,\n ) {\n const model = OpenAIClient.ensure(client).instance.chat(modelId);\n super(model);\n Object.assign(this.parameters, parameters ?? {});\n }\n}\n"]}
@@ -12,21 +12,12 @@ class OpenAIClient extends client_cjs.BackendClient {
12
12
  __name(this, "OpenAIClient");
13
13
  }
14
14
  create() {
15
- const extraHeaders = utils_cjs.parseHeadersFromEnv("OPENAI_API_HEADERS");
16
- const baseURL = this.settings?.baseURL || env_cjs.getEnv("OPENAI_API_ENDPOINT");
17
- let compatibility = this.settings?.compatibility || env_cjs.getEnv("OPENAI_COMPATIBILITY_MODE");
18
- if (baseURL && !compatibility) {
19
- compatibility = "compatible";
20
- } else if (!baseURL && !compatibility) {
21
- compatibility = "strict";
22
- }
23
15
  return openai.createOpenAI({
24
16
  ...this.settings,
25
- compatibility,
26
17
  apiKey: this.settings?.apiKey || env_cjs.getEnv("OPENAI_API_KEY"),
27
- baseURL,
18
+ baseURL: this.settings?.baseURL || env_cjs.getEnv("OPENAI_API_ENDPOINT"),
28
19
  headers: {
29
- ...extraHeaders,
20
+ ...utils_cjs.parseHeadersFromEnv("OPENAI_API_HEADERS"),
30
21
  ...this.settings?.headers
31
22
  },
32
23
  fetch: utils_cjs.vercelFetcher(this.settings?.fetch)