PraisonAI 3.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (393) hide show
  1. praisonai/__init__.py +54 -0
  2. praisonai/__main__.py +15 -0
  3. praisonai/acp/__init__.py +54 -0
  4. praisonai/acp/config.py +159 -0
  5. praisonai/acp/server.py +587 -0
  6. praisonai/acp/session.py +219 -0
  7. praisonai/adapters/__init__.py +50 -0
  8. praisonai/adapters/readers.py +395 -0
  9. praisonai/adapters/rerankers.py +315 -0
  10. praisonai/adapters/retrievers.py +394 -0
  11. praisonai/adapters/vector_stores.py +409 -0
  12. praisonai/agent_scheduler.py +337 -0
  13. praisonai/agents_generator.py +903 -0
  14. praisonai/api/call.py +292 -0
  15. praisonai/auto.py +1197 -0
  16. praisonai/capabilities/__init__.py +275 -0
  17. praisonai/capabilities/a2a.py +140 -0
  18. praisonai/capabilities/assistants.py +283 -0
  19. praisonai/capabilities/audio.py +320 -0
  20. praisonai/capabilities/batches.py +469 -0
  21. praisonai/capabilities/completions.py +336 -0
  22. praisonai/capabilities/container_files.py +155 -0
  23. praisonai/capabilities/containers.py +93 -0
  24. praisonai/capabilities/embeddings.py +158 -0
  25. praisonai/capabilities/files.py +467 -0
  26. praisonai/capabilities/fine_tuning.py +293 -0
  27. praisonai/capabilities/guardrails.py +182 -0
  28. praisonai/capabilities/images.py +330 -0
  29. praisonai/capabilities/mcp.py +190 -0
  30. praisonai/capabilities/messages.py +270 -0
  31. praisonai/capabilities/moderations.py +154 -0
  32. praisonai/capabilities/ocr.py +217 -0
  33. praisonai/capabilities/passthrough.py +204 -0
  34. praisonai/capabilities/rag.py +207 -0
  35. praisonai/capabilities/realtime.py +160 -0
  36. praisonai/capabilities/rerank.py +165 -0
  37. praisonai/capabilities/responses.py +266 -0
  38. praisonai/capabilities/search.py +109 -0
  39. praisonai/capabilities/skills.py +133 -0
  40. praisonai/capabilities/vector_store_files.py +334 -0
  41. praisonai/capabilities/vector_stores.py +304 -0
  42. praisonai/capabilities/videos.py +141 -0
  43. praisonai/chainlit_ui.py +304 -0
  44. praisonai/chat/__init__.py +106 -0
  45. praisonai/chat/app.py +125 -0
  46. praisonai/cli/__init__.py +26 -0
  47. praisonai/cli/app.py +213 -0
  48. praisonai/cli/commands/__init__.py +75 -0
  49. praisonai/cli/commands/acp.py +70 -0
  50. praisonai/cli/commands/completion.py +333 -0
  51. praisonai/cli/commands/config.py +166 -0
  52. praisonai/cli/commands/debug.py +142 -0
  53. praisonai/cli/commands/diag.py +55 -0
  54. praisonai/cli/commands/doctor.py +166 -0
  55. praisonai/cli/commands/environment.py +179 -0
  56. praisonai/cli/commands/lsp.py +112 -0
  57. praisonai/cli/commands/mcp.py +210 -0
  58. praisonai/cli/commands/profile.py +457 -0
  59. praisonai/cli/commands/run.py +228 -0
  60. praisonai/cli/commands/schedule.py +150 -0
  61. praisonai/cli/commands/serve.py +97 -0
  62. praisonai/cli/commands/session.py +212 -0
  63. praisonai/cli/commands/traces.py +145 -0
  64. praisonai/cli/commands/version.py +101 -0
  65. praisonai/cli/configuration/__init__.py +18 -0
  66. praisonai/cli/configuration/loader.py +353 -0
  67. praisonai/cli/configuration/paths.py +114 -0
  68. praisonai/cli/configuration/schema.py +164 -0
  69. praisonai/cli/features/__init__.py +268 -0
  70. praisonai/cli/features/acp.py +236 -0
  71. praisonai/cli/features/action_orchestrator.py +546 -0
  72. praisonai/cli/features/agent_scheduler.py +773 -0
  73. praisonai/cli/features/agent_tools.py +474 -0
  74. praisonai/cli/features/agents.py +375 -0
  75. praisonai/cli/features/at_mentions.py +471 -0
  76. praisonai/cli/features/auto_memory.py +182 -0
  77. praisonai/cli/features/autonomy_mode.py +490 -0
  78. praisonai/cli/features/background.py +356 -0
  79. praisonai/cli/features/base.py +168 -0
  80. praisonai/cli/features/capabilities.py +1326 -0
  81. praisonai/cli/features/checkpoints.py +338 -0
  82. praisonai/cli/features/code_intelligence.py +652 -0
  83. praisonai/cli/features/compaction.py +294 -0
  84. praisonai/cli/features/compare.py +534 -0
  85. praisonai/cli/features/cost_tracker.py +514 -0
  86. praisonai/cli/features/debug.py +810 -0
  87. praisonai/cli/features/deploy.py +517 -0
  88. praisonai/cli/features/diag.py +289 -0
  89. praisonai/cli/features/doctor/__init__.py +63 -0
  90. praisonai/cli/features/doctor/checks/__init__.py +24 -0
  91. praisonai/cli/features/doctor/checks/acp_checks.py +240 -0
  92. praisonai/cli/features/doctor/checks/config_checks.py +366 -0
  93. praisonai/cli/features/doctor/checks/db_checks.py +366 -0
  94. praisonai/cli/features/doctor/checks/env_checks.py +543 -0
  95. praisonai/cli/features/doctor/checks/lsp_checks.py +199 -0
  96. praisonai/cli/features/doctor/checks/mcp_checks.py +349 -0
  97. praisonai/cli/features/doctor/checks/memory_checks.py +268 -0
  98. praisonai/cli/features/doctor/checks/network_checks.py +251 -0
  99. praisonai/cli/features/doctor/checks/obs_checks.py +328 -0
  100. praisonai/cli/features/doctor/checks/performance_checks.py +235 -0
  101. praisonai/cli/features/doctor/checks/permissions_checks.py +259 -0
  102. praisonai/cli/features/doctor/checks/selftest_checks.py +322 -0
  103. praisonai/cli/features/doctor/checks/serve_checks.py +426 -0
  104. praisonai/cli/features/doctor/checks/skills_checks.py +231 -0
  105. praisonai/cli/features/doctor/checks/tools_checks.py +371 -0
  106. praisonai/cli/features/doctor/engine.py +266 -0
  107. praisonai/cli/features/doctor/formatters.py +310 -0
  108. praisonai/cli/features/doctor/handler.py +397 -0
  109. praisonai/cli/features/doctor/models.py +264 -0
  110. praisonai/cli/features/doctor/registry.py +239 -0
  111. praisonai/cli/features/endpoints.py +1019 -0
  112. praisonai/cli/features/eval.py +560 -0
  113. praisonai/cli/features/external_agents.py +231 -0
  114. praisonai/cli/features/fast_context.py +410 -0
  115. praisonai/cli/features/flow_display.py +566 -0
  116. praisonai/cli/features/git_integration.py +651 -0
  117. praisonai/cli/features/guardrail.py +171 -0
  118. praisonai/cli/features/handoff.py +185 -0
  119. praisonai/cli/features/hooks.py +583 -0
  120. praisonai/cli/features/image.py +384 -0
  121. praisonai/cli/features/interactive_runtime.py +585 -0
  122. praisonai/cli/features/interactive_tools.py +380 -0
  123. praisonai/cli/features/interactive_tui.py +603 -0
  124. praisonai/cli/features/jobs.py +632 -0
  125. praisonai/cli/features/knowledge.py +531 -0
  126. praisonai/cli/features/lite.py +244 -0
  127. praisonai/cli/features/lsp_cli.py +225 -0
  128. praisonai/cli/features/mcp.py +169 -0
  129. praisonai/cli/features/message_queue.py +587 -0
  130. praisonai/cli/features/metrics.py +211 -0
  131. praisonai/cli/features/n8n.py +673 -0
  132. praisonai/cli/features/observability.py +293 -0
  133. praisonai/cli/features/ollama.py +361 -0
  134. praisonai/cli/features/output_style.py +273 -0
  135. praisonai/cli/features/package.py +631 -0
  136. praisonai/cli/features/performance.py +308 -0
  137. praisonai/cli/features/persistence.py +636 -0
  138. praisonai/cli/features/profile.py +226 -0
  139. praisonai/cli/features/profiler/__init__.py +81 -0
  140. praisonai/cli/features/profiler/core.py +558 -0
  141. praisonai/cli/features/profiler/optimizations.py +652 -0
  142. praisonai/cli/features/profiler/suite.py +386 -0
  143. praisonai/cli/features/profiling.py +350 -0
  144. praisonai/cli/features/queue/__init__.py +73 -0
  145. praisonai/cli/features/queue/manager.py +395 -0
  146. praisonai/cli/features/queue/models.py +286 -0
  147. praisonai/cli/features/queue/persistence.py +564 -0
  148. praisonai/cli/features/queue/scheduler.py +484 -0
  149. praisonai/cli/features/queue/worker.py +372 -0
  150. praisonai/cli/features/recipe.py +1723 -0
  151. praisonai/cli/features/recipes.py +449 -0
  152. praisonai/cli/features/registry.py +229 -0
  153. praisonai/cli/features/repo_map.py +860 -0
  154. praisonai/cli/features/router.py +466 -0
  155. praisonai/cli/features/sandbox_executor.py +515 -0
  156. praisonai/cli/features/serve.py +829 -0
  157. praisonai/cli/features/session.py +222 -0
  158. praisonai/cli/features/skills.py +856 -0
  159. praisonai/cli/features/slash_commands.py +650 -0
  160. praisonai/cli/features/telemetry.py +179 -0
  161. praisonai/cli/features/templates.py +1384 -0
  162. praisonai/cli/features/thinking.py +305 -0
  163. praisonai/cli/features/todo.py +334 -0
  164. praisonai/cli/features/tools.py +680 -0
  165. praisonai/cli/features/tui/__init__.py +83 -0
  166. praisonai/cli/features/tui/app.py +580 -0
  167. praisonai/cli/features/tui/cli.py +566 -0
  168. praisonai/cli/features/tui/debug.py +511 -0
  169. praisonai/cli/features/tui/events.py +99 -0
  170. praisonai/cli/features/tui/mock_provider.py +328 -0
  171. praisonai/cli/features/tui/orchestrator.py +652 -0
  172. praisonai/cli/features/tui/screens/__init__.py +50 -0
  173. praisonai/cli/features/tui/screens/main.py +245 -0
  174. praisonai/cli/features/tui/screens/queue.py +174 -0
  175. praisonai/cli/features/tui/screens/session.py +124 -0
  176. praisonai/cli/features/tui/screens/settings.py +148 -0
  177. praisonai/cli/features/tui/widgets/__init__.py +56 -0
  178. praisonai/cli/features/tui/widgets/chat.py +261 -0
  179. praisonai/cli/features/tui/widgets/composer.py +224 -0
  180. praisonai/cli/features/tui/widgets/queue_panel.py +200 -0
  181. praisonai/cli/features/tui/widgets/status.py +167 -0
  182. praisonai/cli/features/tui/widgets/tool_panel.py +248 -0
  183. praisonai/cli/features/workflow.py +720 -0
  184. praisonai/cli/legacy.py +236 -0
  185. praisonai/cli/main.py +5559 -0
  186. praisonai/cli/schedule_cli.py +54 -0
  187. praisonai/cli/state/__init__.py +31 -0
  188. praisonai/cli/state/identifiers.py +161 -0
  189. praisonai/cli/state/sessions.py +313 -0
  190. praisonai/code/__init__.py +93 -0
  191. praisonai/code/agent_tools.py +344 -0
  192. praisonai/code/diff/__init__.py +21 -0
  193. praisonai/code/diff/diff_strategy.py +432 -0
  194. praisonai/code/tools/__init__.py +27 -0
  195. praisonai/code/tools/apply_diff.py +221 -0
  196. praisonai/code/tools/execute_command.py +275 -0
  197. praisonai/code/tools/list_files.py +274 -0
  198. praisonai/code/tools/read_file.py +206 -0
  199. praisonai/code/tools/search_replace.py +248 -0
  200. praisonai/code/tools/write_file.py +217 -0
  201. praisonai/code/utils/__init__.py +46 -0
  202. praisonai/code/utils/file_utils.py +307 -0
  203. praisonai/code/utils/ignore_utils.py +308 -0
  204. praisonai/code/utils/text_utils.py +276 -0
  205. praisonai/db/__init__.py +64 -0
  206. praisonai/db/adapter.py +531 -0
  207. praisonai/deploy/__init__.py +62 -0
  208. praisonai/deploy/api.py +231 -0
  209. praisonai/deploy/docker.py +454 -0
  210. praisonai/deploy/doctor.py +367 -0
  211. praisonai/deploy/main.py +327 -0
  212. praisonai/deploy/models.py +179 -0
  213. praisonai/deploy/providers/__init__.py +33 -0
  214. praisonai/deploy/providers/aws.py +331 -0
  215. praisonai/deploy/providers/azure.py +358 -0
  216. praisonai/deploy/providers/base.py +101 -0
  217. praisonai/deploy/providers/gcp.py +314 -0
  218. praisonai/deploy/schema.py +208 -0
  219. praisonai/deploy.py +185 -0
  220. praisonai/endpoints/__init__.py +53 -0
  221. praisonai/endpoints/a2u_server.py +410 -0
  222. praisonai/endpoints/discovery.py +165 -0
  223. praisonai/endpoints/providers/__init__.py +28 -0
  224. praisonai/endpoints/providers/a2a.py +253 -0
  225. praisonai/endpoints/providers/a2u.py +208 -0
  226. praisonai/endpoints/providers/agents_api.py +171 -0
  227. praisonai/endpoints/providers/base.py +231 -0
  228. praisonai/endpoints/providers/mcp.py +263 -0
  229. praisonai/endpoints/providers/recipe.py +206 -0
  230. praisonai/endpoints/providers/tools_mcp.py +150 -0
  231. praisonai/endpoints/registry.py +131 -0
  232. praisonai/endpoints/server.py +161 -0
  233. praisonai/inbuilt_tools/__init__.py +24 -0
  234. praisonai/inbuilt_tools/autogen_tools.py +117 -0
  235. praisonai/inc/__init__.py +2 -0
  236. praisonai/inc/config.py +96 -0
  237. praisonai/inc/models.py +155 -0
  238. praisonai/integrations/__init__.py +56 -0
  239. praisonai/integrations/base.py +303 -0
  240. praisonai/integrations/claude_code.py +270 -0
  241. praisonai/integrations/codex_cli.py +255 -0
  242. praisonai/integrations/cursor_cli.py +195 -0
  243. praisonai/integrations/gemini_cli.py +222 -0
  244. praisonai/jobs/__init__.py +67 -0
  245. praisonai/jobs/executor.py +425 -0
  246. praisonai/jobs/models.py +230 -0
  247. praisonai/jobs/router.py +314 -0
  248. praisonai/jobs/server.py +186 -0
  249. praisonai/jobs/store.py +203 -0
  250. praisonai/llm/__init__.py +66 -0
  251. praisonai/llm/registry.py +382 -0
  252. praisonai/mcp_server/__init__.py +152 -0
  253. praisonai/mcp_server/adapters/__init__.py +74 -0
  254. praisonai/mcp_server/adapters/agents.py +128 -0
  255. praisonai/mcp_server/adapters/capabilities.py +168 -0
  256. praisonai/mcp_server/adapters/cli_tools.py +568 -0
  257. praisonai/mcp_server/adapters/extended_capabilities.py +462 -0
  258. praisonai/mcp_server/adapters/knowledge.py +93 -0
  259. praisonai/mcp_server/adapters/memory.py +104 -0
  260. praisonai/mcp_server/adapters/prompts.py +306 -0
  261. praisonai/mcp_server/adapters/resources.py +124 -0
  262. praisonai/mcp_server/adapters/tools_bridge.py +280 -0
  263. praisonai/mcp_server/auth/__init__.py +48 -0
  264. praisonai/mcp_server/auth/api_key.py +291 -0
  265. praisonai/mcp_server/auth/oauth.py +460 -0
  266. praisonai/mcp_server/auth/oidc.py +289 -0
  267. praisonai/mcp_server/auth/scopes.py +260 -0
  268. praisonai/mcp_server/cli.py +852 -0
  269. praisonai/mcp_server/elicitation.py +445 -0
  270. praisonai/mcp_server/icons.py +302 -0
  271. praisonai/mcp_server/recipe_adapter.py +573 -0
  272. praisonai/mcp_server/recipe_cli.py +824 -0
  273. praisonai/mcp_server/registry.py +703 -0
  274. praisonai/mcp_server/sampling.py +422 -0
  275. praisonai/mcp_server/server.py +490 -0
  276. praisonai/mcp_server/tasks.py +443 -0
  277. praisonai/mcp_server/transports/__init__.py +18 -0
  278. praisonai/mcp_server/transports/http_stream.py +376 -0
  279. praisonai/mcp_server/transports/stdio.py +132 -0
  280. praisonai/persistence/__init__.py +84 -0
  281. praisonai/persistence/config.py +238 -0
  282. praisonai/persistence/conversation/__init__.py +25 -0
  283. praisonai/persistence/conversation/async_mysql.py +427 -0
  284. praisonai/persistence/conversation/async_postgres.py +410 -0
  285. praisonai/persistence/conversation/async_sqlite.py +371 -0
  286. praisonai/persistence/conversation/base.py +151 -0
  287. praisonai/persistence/conversation/json_store.py +250 -0
  288. praisonai/persistence/conversation/mysql.py +387 -0
  289. praisonai/persistence/conversation/postgres.py +401 -0
  290. praisonai/persistence/conversation/singlestore.py +240 -0
  291. praisonai/persistence/conversation/sqlite.py +341 -0
  292. praisonai/persistence/conversation/supabase.py +203 -0
  293. praisonai/persistence/conversation/surrealdb.py +287 -0
  294. praisonai/persistence/factory.py +301 -0
  295. praisonai/persistence/hooks/__init__.py +18 -0
  296. praisonai/persistence/hooks/agent_hooks.py +297 -0
  297. praisonai/persistence/knowledge/__init__.py +26 -0
  298. praisonai/persistence/knowledge/base.py +144 -0
  299. praisonai/persistence/knowledge/cassandra.py +232 -0
  300. praisonai/persistence/knowledge/chroma.py +295 -0
  301. praisonai/persistence/knowledge/clickhouse.py +242 -0
  302. praisonai/persistence/knowledge/cosmosdb_vector.py +438 -0
  303. praisonai/persistence/knowledge/couchbase.py +286 -0
  304. praisonai/persistence/knowledge/lancedb.py +216 -0
  305. praisonai/persistence/knowledge/langchain_adapter.py +291 -0
  306. praisonai/persistence/knowledge/lightrag_adapter.py +212 -0
  307. praisonai/persistence/knowledge/llamaindex_adapter.py +256 -0
  308. praisonai/persistence/knowledge/milvus.py +277 -0
  309. praisonai/persistence/knowledge/mongodb_vector.py +306 -0
  310. praisonai/persistence/knowledge/pgvector.py +335 -0
  311. praisonai/persistence/knowledge/pinecone.py +253 -0
  312. praisonai/persistence/knowledge/qdrant.py +301 -0
  313. praisonai/persistence/knowledge/redis_vector.py +291 -0
  314. praisonai/persistence/knowledge/singlestore_vector.py +299 -0
  315. praisonai/persistence/knowledge/surrealdb_vector.py +309 -0
  316. praisonai/persistence/knowledge/upstash_vector.py +266 -0
  317. praisonai/persistence/knowledge/weaviate.py +223 -0
  318. praisonai/persistence/migrations/__init__.py +10 -0
  319. praisonai/persistence/migrations/manager.py +251 -0
  320. praisonai/persistence/orchestrator.py +406 -0
  321. praisonai/persistence/state/__init__.py +21 -0
  322. praisonai/persistence/state/async_mongodb.py +200 -0
  323. praisonai/persistence/state/base.py +107 -0
  324. praisonai/persistence/state/dynamodb.py +226 -0
  325. praisonai/persistence/state/firestore.py +175 -0
  326. praisonai/persistence/state/gcs.py +155 -0
  327. praisonai/persistence/state/memory.py +245 -0
  328. praisonai/persistence/state/mongodb.py +158 -0
  329. praisonai/persistence/state/redis.py +190 -0
  330. praisonai/persistence/state/upstash.py +144 -0
  331. praisonai/persistence/tests/__init__.py +3 -0
  332. praisonai/persistence/tests/test_all_backends.py +633 -0
  333. praisonai/profiler.py +1214 -0
  334. praisonai/recipe/__init__.py +134 -0
  335. praisonai/recipe/bridge.py +278 -0
  336. praisonai/recipe/core.py +893 -0
  337. praisonai/recipe/exceptions.py +54 -0
  338. praisonai/recipe/history.py +402 -0
  339. praisonai/recipe/models.py +266 -0
  340. praisonai/recipe/operations.py +440 -0
  341. praisonai/recipe/policy.py +422 -0
  342. praisonai/recipe/registry.py +849 -0
  343. praisonai/recipe/runtime.py +214 -0
  344. praisonai/recipe/security.py +711 -0
  345. praisonai/recipe/serve.py +859 -0
  346. praisonai/recipe/server.py +613 -0
  347. praisonai/scheduler/__init__.py +45 -0
  348. praisonai/scheduler/agent_scheduler.py +552 -0
  349. praisonai/scheduler/base.py +124 -0
  350. praisonai/scheduler/daemon_manager.py +225 -0
  351. praisonai/scheduler/state_manager.py +155 -0
  352. praisonai/scheduler/yaml_loader.py +193 -0
  353. praisonai/scheduler.py +194 -0
  354. praisonai/setup/__init__.py +1 -0
  355. praisonai/setup/build.py +21 -0
  356. praisonai/setup/post_install.py +23 -0
  357. praisonai/setup/setup_conda_env.py +25 -0
  358. praisonai/setup.py +16 -0
  359. praisonai/templates/__init__.py +116 -0
  360. praisonai/templates/cache.py +364 -0
  361. praisonai/templates/dependency_checker.py +358 -0
  362. praisonai/templates/discovery.py +391 -0
  363. praisonai/templates/loader.py +564 -0
  364. praisonai/templates/registry.py +511 -0
  365. praisonai/templates/resolver.py +206 -0
  366. praisonai/templates/security.py +327 -0
  367. praisonai/templates/tool_override.py +498 -0
  368. praisonai/templates/tools_doctor.py +256 -0
  369. praisonai/test.py +105 -0
  370. praisonai/train.py +562 -0
  371. praisonai/train_vision.py +306 -0
  372. praisonai/ui/agents.py +824 -0
  373. praisonai/ui/callbacks.py +57 -0
  374. praisonai/ui/chainlit_compat.py +246 -0
  375. praisonai/ui/chat.py +532 -0
  376. praisonai/ui/code.py +717 -0
  377. praisonai/ui/colab.py +474 -0
  378. praisonai/ui/colab_chainlit.py +81 -0
  379. praisonai/ui/components/aicoder.py +284 -0
  380. praisonai/ui/context.py +283 -0
  381. praisonai/ui/database_config.py +56 -0
  382. praisonai/ui/db.py +294 -0
  383. praisonai/ui/realtime.py +488 -0
  384. praisonai/ui/realtimeclient/__init__.py +756 -0
  385. praisonai/ui/realtimeclient/tools.py +242 -0
  386. praisonai/ui/sql_alchemy.py +710 -0
  387. praisonai/upload_vision.py +140 -0
  388. praisonai/version.py +1 -0
  389. praisonai-3.0.0.dist-info/METADATA +3493 -0
  390. praisonai-3.0.0.dist-info/RECORD +393 -0
  391. praisonai-3.0.0.dist-info/WHEEL +5 -0
  392. praisonai-3.0.0.dist-info/entry_points.txt +4 -0
  393. praisonai-3.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,293 @@
1
+ """
2
+ Observability CLI Handler
3
+
4
+ CLI commands for managing observability providers.
5
+ Usage: praisonai obs <command>
6
+ """
7
+
8
+ import os
9
+ from typing import Any, Dict, List, Optional, Tuple
10
+
11
+ from .base import FlagHandler
12
+
13
+
14
+ class ObservabilityHandler(FlagHandler):
15
+ """
16
+ Handler for observability CLI commands.
17
+
18
+ Commands:
19
+ praisonai obs list - List available providers
20
+ praisonai obs doctor - Check provider connectivity
21
+ praisonai obs init <name> - Initialize a provider
22
+ """
23
+
24
+ @property
25
+ def feature_name(self) -> str:
26
+ return "observability"
27
+
28
+ @property
29
+ def flag_name(self) -> str:
30
+ return "obs"
31
+
32
+ @property
33
+ def flag_help(self) -> str:
34
+ return "Observability commands (list, doctor, init)"
35
+
36
+ def check_dependencies(self) -> Tuple[bool, str]:
37
+ """Check if observability module is available."""
38
+ try:
39
+ from praisonai_tools.observability import obs
40
+ return True, ""
41
+ except ImportError:
42
+ return False, "Observability requires praisonai-tools. Install with: pip install praisonai-tools"
43
+
44
+ def list_providers(self) -> Dict[str, Any]:
45
+ """List all available observability providers."""
46
+ result = {
47
+ "registered": [],
48
+ "available": [],
49
+ "configured": [],
50
+ }
51
+
52
+ try:
53
+ from praisonai_tools.observability.manager import ObservabilityManager
54
+ from praisonai_tools.observability.config import PROVIDER_ENV_KEYS
55
+
56
+ mgr = ObservabilityManager()
57
+
58
+ # Get all registered providers
59
+ result["registered"] = mgr.list_providers()
60
+
61
+ # Check which have env vars configured
62
+ for provider, keys in PROVIDER_ENV_KEYS.items():
63
+ if all(os.getenv(key) for key in keys):
64
+ result["configured"].append(provider)
65
+
66
+ # Check which SDKs are available
67
+ for provider in result["registered"]:
68
+ try:
69
+ mgr._load_provider(provider)
70
+ if provider in mgr._providers:
71
+ provider_class = mgr._providers[provider]
72
+ instance = provider_class()
73
+ if instance.is_available():
74
+ result["available"].append(provider)
75
+ except Exception:
76
+ pass
77
+
78
+ except ImportError:
79
+ pass
80
+
81
+ return result
82
+
83
+ def doctor(self, provider: Optional[str] = None) -> Dict[str, Any]:
84
+ """Run diagnostics on observability setup."""
85
+ try:
86
+ from praisonai_tools.observability import obs
87
+
88
+ if provider:
89
+ obs.init(provider=provider)
90
+
91
+ return obs.doctor()
92
+
93
+ except ImportError:
94
+ return {
95
+ "error": "praisonai-tools not installed",
96
+ "enabled": False,
97
+ }
98
+
99
+ def init_provider(self, provider: str, **kwargs) -> bool:
100
+ """Initialize a specific provider."""
101
+ try:
102
+ from praisonai_tools.observability import obs
103
+ return obs.init(provider=provider, **kwargs)
104
+ except ImportError:
105
+ return False
106
+
107
+ def execute(self, action: str = "list", **kwargs) -> Any:
108
+ """
109
+ Execute observability command.
110
+
111
+ Args:
112
+ action: Command to execute (list, doctor, init)
113
+ **kwargs: Additional arguments
114
+
115
+ Returns:
116
+ Command result
117
+ """
118
+ if action == "list":
119
+ return self._execute_list()
120
+ elif action == "doctor":
121
+ return self._execute_doctor(kwargs.get("provider"))
122
+ elif action == "init":
123
+ return self._execute_init(kwargs.get("provider"), **kwargs)
124
+ else:
125
+ self.print_status(f"Unknown action: {action}", "error")
126
+ return None
127
+
128
+ def _execute_list(self) -> Dict[str, Any]:
129
+ """Execute list command."""
130
+ result = self.list_providers()
131
+
132
+ self.print_status("\n📊 Observability Providers", "info")
133
+ self.print_status("=" * 40, "info")
134
+
135
+ # Provider status table
136
+ all_providers = [
137
+ "agentops", "langfuse", "langsmith", "traceloop",
138
+ "arize_phoenix", "openlit", "langtrace", "langwatch",
139
+ "datadog", "mlflow", "opik", "portkey",
140
+ "braintrust", "maxim", "weave", "neatlogs",
141
+ "langdb", "atla", "patronus", "truefoundry",
142
+ ]
143
+
144
+ for provider in all_providers:
145
+ status_parts = []
146
+ if provider in result.get("available", []):
147
+ status_parts.append("✅ SDK")
148
+ else:
149
+ status_parts.append("❌ SDK")
150
+
151
+ if provider in result.get("configured", []):
152
+ status_parts.append("🔑 Keys")
153
+ else:
154
+ status_parts.append("⚪ Keys")
155
+
156
+ status = " | ".join(status_parts)
157
+ self.print_status(f" {provider:20} {status}", "info")
158
+
159
+ self.print_status("\n✅ = Available ❌ = Not installed 🔑 = API key configured", "info")
160
+
161
+ return result
162
+
163
+ def _execute_doctor(self, provider: Optional[str] = None) -> Dict[str, Any]:
164
+ """Execute doctor command."""
165
+ result = self.doctor(provider)
166
+
167
+ self.print_status("\n🩺 Observability Diagnostics", "info")
168
+ self.print_status("=" * 40, "info")
169
+
170
+ self.print_status(f" Enabled: {result.get('enabled', False)}", "info")
171
+ self.print_status(f" Provider: {result.get('provider', 'None')}", "info")
172
+
173
+ if result.get("connection_status") is not None:
174
+ status = "✅" if result["connection_status"] else "❌"
175
+ self.print_status(f" Connection: {status} {result.get('connection_message', '')}", "info")
176
+
177
+ if result.get("available_providers"):
178
+ self.print_status(f" Available: {', '.join(result['available_providers'])}", "info")
179
+
180
+ return result
181
+
182
+ def _execute_init(self, provider: str, **kwargs) -> bool:
183
+ """Execute init command."""
184
+ if not provider:
185
+ self.print_status("Provider name required. Usage: praisonai obs init <provider>", "error")
186
+ return False
187
+
188
+ success = self.init_provider(provider, **kwargs)
189
+
190
+ if success:
191
+ self.print_status(f"✅ {provider} initialized successfully", "success")
192
+ else:
193
+ self.print_status(f"❌ Failed to initialize {provider}", "error")
194
+ self.print_status(f" Check that {provider} SDK is installed and API keys are set", "info")
195
+
196
+ return success
197
+
198
+ def apply_to_agent_config(self, config: Dict[str, Any], flag_value: Any) -> Dict[str, Any]:
199
+ """Apply observability to agent config."""
200
+ if flag_value:
201
+ try:
202
+ from praisonai_tools.observability import obs
203
+
204
+ if isinstance(flag_value, str):
205
+ obs.init(provider=flag_value)
206
+ else:
207
+ obs.init()
208
+
209
+ self.print_status("📊 Observability enabled", "success")
210
+ except ImportError:
211
+ self.print_status("Observability requires praisonai-tools", "warning")
212
+
213
+ return config
214
+
215
+
216
+ # Provider environment variable reference
217
+ PROVIDER_SETUP_GUIDE = {
218
+ "agentops": {
219
+ "env_vars": ["AGENTOPS_API_KEY"],
220
+ "install": "pip install agentops",
221
+ "docs": "https://agentops.ai/",
222
+ },
223
+ "langfuse": {
224
+ "env_vars": ["LANGFUSE_PUBLIC_KEY", "LANGFUSE_SECRET_KEY"],
225
+ "install": "pip install opentelemetry-sdk opentelemetry-exporter-otlp",
226
+ "docs": "https://langfuse.com/",
227
+ },
228
+ "langsmith": {
229
+ "env_vars": ["LANGSMITH_API_KEY"],
230
+ "install": "pip install opentelemetry-sdk opentelemetry-exporter-otlp",
231
+ "docs": "https://smith.langchain.com/",
232
+ },
233
+ "traceloop": {
234
+ "env_vars": ["TRACELOOP_API_KEY"],
235
+ "install": "pip install traceloop-sdk",
236
+ "docs": "https://traceloop.com/",
237
+ },
238
+ "arize_phoenix": {
239
+ "env_vars": ["PHOENIX_API_KEY"],
240
+ "install": "pip install arize-phoenix openinference-instrumentation",
241
+ "docs": "https://phoenix.arize.com/",
242
+ },
243
+ "openlit": {
244
+ "env_vars": [],
245
+ "install": "pip install openlit",
246
+ "docs": "https://github.com/openlit/openlit",
247
+ },
248
+ "langtrace": {
249
+ "env_vars": ["LANGTRACE_API_KEY"],
250
+ "install": "pip install langtrace-python-sdk",
251
+ "docs": "https://langtrace.ai/",
252
+ },
253
+ "langwatch": {
254
+ "env_vars": ["LANGWATCH_API_KEY"],
255
+ "install": "pip install langwatch",
256
+ "docs": "https://langwatch.ai/",
257
+ },
258
+ "datadog": {
259
+ "env_vars": ["DD_API_KEY"],
260
+ "install": "pip install ddtrace",
261
+ "docs": "https://www.datadoghq.com/product/llm-observability/",
262
+ },
263
+ "mlflow": {
264
+ "env_vars": ["MLFLOW_TRACKING_URI"],
265
+ "install": "pip install mlflow",
266
+ "docs": "https://mlflow.org/",
267
+ },
268
+ "opik": {
269
+ "env_vars": ["OPIK_API_KEY"],
270
+ "install": "pip install opik",
271
+ "docs": "https://www.comet.com/docs/opik/",
272
+ },
273
+ "portkey": {
274
+ "env_vars": ["PORTKEY_API_KEY"],
275
+ "install": "pip install portkey-ai",
276
+ "docs": "https://portkey.ai/",
277
+ },
278
+ "braintrust": {
279
+ "env_vars": ["BRAINTRUST_API_KEY"],
280
+ "install": "pip install braintrust",
281
+ "docs": "https://www.braintrust.dev/",
282
+ },
283
+ "maxim": {
284
+ "env_vars": ["MAXIM_API_KEY"],
285
+ "install": "pip install maxim-py",
286
+ "docs": "https://getmaxim.ai/",
287
+ },
288
+ "weave": {
289
+ "env_vars": ["WANDB_API_KEY"],
290
+ "install": "pip install weave",
291
+ "docs": "https://weave-docs.wandb.ai/",
292
+ },
293
+ }
@@ -0,0 +1,361 @@
1
+ """
2
+ Ollama CLI Handler
3
+
4
+ CLI support for Ollama provider and Weak-Model-Proof execution.
5
+ """
6
+
7
+ import logging
8
+ import os
9
+ from typing import Any, Dict, List, Optional
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ class OllamaHandler:
15
+ """
16
+ Handler for Ollama CLI commands.
17
+
18
+ Supports:
19
+ - --provider ollama
20
+ - --weak-model-proof / --wmp
21
+ - --ollama-host
22
+ - --ollama-model
23
+ """
24
+
25
+ def __init__(
26
+ self,
27
+ model: str = "llama3.2:3b",
28
+ host: Optional[str] = None,
29
+ weak_model_proof: bool = True,
30
+ verbose: bool = False,
31
+ **kwargs
32
+ ):
33
+ """
34
+ Initialize Ollama handler.
35
+
36
+ Args:
37
+ model: Ollama model name
38
+ host: Ollama server host
39
+ weak_model_proof: Enable WMP
40
+ verbose: Verbose output
41
+ **kwargs: Additional options
42
+ """
43
+ self.model = model
44
+ self.host = host or os.environ.get("OLLAMA_HOST", "http://localhost:11434")
45
+ self.weak_model_proof = weak_model_proof
46
+ self.verbose = verbose
47
+ self.extra_kwargs = kwargs
48
+
49
+ self._provider = None
50
+ self._wmp_hooks = None
51
+
52
+ @property
53
+ def provider(self):
54
+ """Lazy-load Ollama provider."""
55
+ if self._provider is None:
56
+ try:
57
+ from praisonai.providers import OllamaProvider
58
+ self._provider = OllamaProvider(
59
+ model=self.model,
60
+ host=self.host,
61
+ weak_model_proof=self.weak_model_proof,
62
+ **self.extra_kwargs
63
+ )
64
+ except ImportError as e:
65
+ logger.error(f"Failed to import OllamaProvider: {e}")
66
+ raise
67
+ return self._provider
68
+
69
+ @property
70
+ def wmp_hooks(self):
71
+ """Lazy-load WMP hooks."""
72
+ if self._wmp_hooks is None and self.weak_model_proof:
73
+ try:
74
+ from praisonai.wmp import WMPHooks, WeakModelProofConfig
75
+
76
+ # Auto-configure based on model
77
+ config = WeakModelProofConfig.for_ollama(
78
+ self._detect_model_size()
79
+ )
80
+
81
+ self._wmp_hooks = WMPHooks(
82
+ config=config,
83
+ llm_fn=self._llm_fn,
84
+ )
85
+ except ImportError as e:
86
+ logger.warning(f"WMP not available: {e}")
87
+ self._wmp_hooks = None
88
+ return self._wmp_hooks
89
+
90
+ def _detect_model_size(self) -> str:
91
+ """Detect model size from name."""
92
+ model_lower = self.model.lower()
93
+
94
+ if any(x in model_lower for x in ["70b", "72b", "65b"]):
95
+ return "large"
96
+ elif any(x in model_lower for x in ["13b", "14b", "7b", "8b"]):
97
+ return "medium"
98
+ else:
99
+ return "small"
100
+
101
+ def _llm_fn(self, prompt: str) -> str:
102
+ """LLM function for WMP."""
103
+ response = self.provider.chat(prompt)
104
+ return response.content
105
+
106
+ def chat(
107
+ self,
108
+ message: str,
109
+ system: Optional[str] = None,
110
+ stream: bool = False,
111
+ ) -> str:
112
+ """
113
+ Send a chat message.
114
+
115
+ Args:
116
+ message: User message
117
+ system: System prompt
118
+ stream: Enable streaming
119
+
120
+ Returns:
121
+ Response text
122
+ """
123
+ if self.weak_model_proof and self.wmp_hooks:
124
+ # Use WMP execution
125
+ self.wmp_hooks.on_task_start(message)
126
+
127
+ messages = [{"role": "user", "content": message}]
128
+ if system:
129
+ messages.insert(0, {"role": "system", "content": system})
130
+
131
+ enhanced_messages = self.wmp_hooks.on_before_llm_call(messages)
132
+
133
+ # Call Ollama
134
+ response = self.provider.chat(
135
+ messages=enhanced_messages,
136
+ stream=stream,
137
+ )
138
+
139
+ if stream:
140
+ # Collect streamed response
141
+ full_response = ""
142
+ for chunk in response:
143
+ full_response += chunk
144
+ if self.verbose:
145
+ print(chunk, end="", flush=True)
146
+ if self.verbose:
147
+ print()
148
+ response_text = full_response
149
+ else:
150
+ response_text = response.content
151
+
152
+ # Post-process with WMP
153
+ response_text = self.wmp_hooks.on_after_llm_call(response_text)
154
+
155
+ return response_text
156
+ else:
157
+ # Direct Ollama call
158
+ response = self.provider.chat(
159
+ messages=message,
160
+ system=system,
161
+ stream=stream,
162
+ )
163
+
164
+ if stream:
165
+ full_response = ""
166
+ for chunk in response:
167
+ full_response += chunk
168
+ if self.verbose:
169
+ print(chunk, end="", flush=True)
170
+ if self.verbose:
171
+ print()
172
+ return full_response
173
+ else:
174
+ return response.content
175
+
176
+ def execute(
177
+ self,
178
+ task: str,
179
+ tools: Optional[List] = None,
180
+ ) -> Dict[str, Any]:
181
+ """
182
+ Execute a task with WMP.
183
+
184
+ Args:
185
+ task: Task description
186
+ tools: Available tools
187
+
188
+ Returns:
189
+ Execution result
190
+ """
191
+ if self.weak_model_proof:
192
+ try:
193
+ from praisonai.wmp import WMPExecutor, WeakModelProofConfig
194
+
195
+ config = WeakModelProofConfig.for_ollama(
196
+ self._detect_model_size()
197
+ )
198
+
199
+ executor = WMPExecutor(config=config)
200
+ result = executor.execute(
201
+ task=task,
202
+ llm_fn=self._llm_fn,
203
+ tools=tools,
204
+ )
205
+
206
+ return result.to_dict()
207
+
208
+ except ImportError:
209
+ logger.warning("WMP not available, falling back to direct execution")
210
+
211
+ # Direct execution
212
+ response = self.chat(task)
213
+ return {
214
+ "success": True,
215
+ "result": response,
216
+ "wmp_enabled": False,
217
+ }
218
+
219
+ def list_models(self) -> List[Dict[str, Any]]:
220
+ """List available Ollama models."""
221
+ return self.provider.list_models()
222
+
223
+ def pull_model(self, model: str) -> None:
224
+ """Pull a model from Ollama registry."""
225
+ if self.verbose:
226
+ print(f"Pulling model: {model}")
227
+
228
+ for progress in self.provider.pull_model(model, stream=True):
229
+ if self.verbose and hasattr(progress, 'status'):
230
+ print(f" {progress.status}", end="\r")
231
+
232
+ if self.verbose:
233
+ print(f"\nModel {model} pulled successfully")
234
+
235
+ def is_available(self) -> bool:
236
+ """Check if Ollama is available."""
237
+ return self.provider.is_available()
238
+
239
+
240
+ def handle_ollama_command(args, unknown_args: List[str] = None) -> int:
241
+ """
242
+ Handle Ollama CLI commands.
243
+
244
+ Args:
245
+ args: Parsed arguments
246
+ unknown_args: Unknown arguments
247
+
248
+ Returns:
249
+ Exit code
250
+ """
251
+ from rich.console import Console
252
+ console = Console()
253
+
254
+ # Get options from args
255
+ model = getattr(args, 'ollama_model', None) or getattr(args, 'model', 'llama3.2:3b')
256
+ host = getattr(args, 'ollama_host', None)
257
+ wmp = getattr(args, 'weak_model_proof', True)
258
+ verbose = getattr(args, 'verbose', False)
259
+
260
+ try:
261
+ handler = OllamaHandler(
262
+ model=model,
263
+ host=host,
264
+ weak_model_proof=wmp,
265
+ verbose=verbose,
266
+ )
267
+
268
+ # Check availability
269
+ if not handler.is_available():
270
+ console.print("[red]Error: Ollama server not available[/red]")
271
+ console.print(f"Make sure Ollama is running at {handler.host}")
272
+ return 1
273
+
274
+ # Get command/prompt
275
+ prompt = getattr(args, 'command', None) or getattr(args, 'direct_prompt', None)
276
+
277
+ if not prompt:
278
+ console.print("[yellow]No prompt provided[/yellow]")
279
+ return 1
280
+
281
+ # Execute
282
+ if wmp:
283
+ console.print(f"[dim]Using Ollama with Weak-Model-Proof ({model})[/dim]")
284
+ else:
285
+ console.print(f"[dim]Using Ollama ({model})[/dim]")
286
+
287
+ result = handler.execute(prompt)
288
+
289
+ if result.get("success"):
290
+ console.print(result.get("result", ""))
291
+
292
+ if verbose and result.get("wmp_enabled"):
293
+ console.print(f"\n[dim]Steps: {result.get('steps_executed', 0)}, "
294
+ f"Retries: {result.get('retries', 0)}[/dim]")
295
+ else:
296
+ console.print(f"[red]Error: {result.get('error', 'Unknown error')}[/red]")
297
+ return 1
298
+
299
+ return 0
300
+
301
+ except Exception as e:
302
+ console.print(f"[red]Error: {e}[/red]")
303
+ if verbose:
304
+ import traceback
305
+ console.print(traceback.format_exc())
306
+ return 1
307
+
308
+
309
+ def add_ollama_arguments(parser) -> None:
310
+ """
311
+ Add Ollama-related arguments to parser.
312
+
313
+ Args:
314
+ parser: ArgumentParser instance
315
+ """
316
+ parser.add_argument(
317
+ "--provider",
318
+ type=str,
319
+ choices=["ollama", "litellm", "openai"],
320
+ help="LLM provider to use"
321
+ )
322
+ parser.add_argument(
323
+ "--ollama-model",
324
+ type=str,
325
+ default="llama3.2:3b",
326
+ help="Ollama model name (e.g., llama3.2:3b, mistral, qwen2.5:7b)"
327
+ )
328
+ parser.add_argument(
329
+ "--ollama-host",
330
+ type=str,
331
+ help="Ollama server host (default: http://localhost:11434)"
332
+ )
333
+ parser.add_argument(
334
+ "--weak-model-proof", "--wmp",
335
+ action="store_true",
336
+ default=None,
337
+ dest="weak_model_proof",
338
+ help="Enable Weak-Model-Proof execution (default: auto for Ollama)"
339
+ )
340
+ parser.add_argument(
341
+ "--no-wmp",
342
+ action="store_false",
343
+ dest="weak_model_proof",
344
+ help="Disable Weak-Model-Proof execution"
345
+ )
346
+ parser.add_argument(
347
+ "--wmp-strict",
348
+ action="store_true",
349
+ help="Use strict WMP mode (more retries, stricter validation)"
350
+ )
351
+ parser.add_argument(
352
+ "--wmp-fast",
353
+ action="store_true",
354
+ help="Use fast WMP mode (fewer retries, faster execution)"
355
+ )
356
+ parser.add_argument(
357
+ "--step-budget",
358
+ type=int,
359
+ default=10,
360
+ help="Maximum steps for WMP execution (default: 10)"
361
+ )