ai-parrot 0.17.2__cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentui/.prettierrc +15 -0
- agentui/QUICKSTART.md +272 -0
- agentui/README.md +59 -0
- agentui/env.example +16 -0
- agentui/jsconfig.json +14 -0
- agentui/package-lock.json +4242 -0
- agentui/package.json +34 -0
- agentui/scripts/postinstall/apply-patches.mjs +260 -0
- agentui/src/app.css +61 -0
- agentui/src/app.d.ts +13 -0
- agentui/src/app.html +12 -0
- agentui/src/components/LoadingSpinner.svelte +64 -0
- agentui/src/components/ThemeSwitcher.svelte +159 -0
- agentui/src/components/index.js +4 -0
- agentui/src/lib/api/bots.ts +60 -0
- agentui/src/lib/api/chat.ts +22 -0
- agentui/src/lib/api/http.ts +25 -0
- agentui/src/lib/components/BotCard.svelte +33 -0
- agentui/src/lib/components/ChatBubble.svelte +63 -0
- agentui/src/lib/components/Toast.svelte +21 -0
- agentui/src/lib/config.ts +20 -0
- agentui/src/lib/stores/auth.svelte.ts +73 -0
- agentui/src/lib/stores/theme.svelte.js +64 -0
- agentui/src/lib/stores/toast.svelte.ts +31 -0
- agentui/src/lib/utils/conversation.ts +39 -0
- agentui/src/routes/+layout.svelte +20 -0
- agentui/src/routes/+page.svelte +232 -0
- agentui/src/routes/login/+page.svelte +200 -0
- agentui/src/routes/talk/[agentId]/+page.svelte +297 -0
- agentui/src/routes/talk/[agentId]/+page.ts +7 -0
- agentui/static/README.md +1 -0
- agentui/svelte.config.js +11 -0
- agentui/tailwind.config.ts +53 -0
- agentui/tsconfig.json +3 -0
- agentui/vite.config.ts +10 -0
- ai_parrot-0.17.2.dist-info/METADATA +472 -0
- ai_parrot-0.17.2.dist-info/RECORD +535 -0
- ai_parrot-0.17.2.dist-info/WHEEL +6 -0
- ai_parrot-0.17.2.dist-info/entry_points.txt +2 -0
- ai_parrot-0.17.2.dist-info/licenses/LICENSE +21 -0
- ai_parrot-0.17.2.dist-info/top_level.txt +6 -0
- crew-builder/.prettierrc +15 -0
- crew-builder/QUICKSTART.md +259 -0
- crew-builder/README.md +113 -0
- crew-builder/env.example +17 -0
- crew-builder/jsconfig.json +14 -0
- crew-builder/package-lock.json +4182 -0
- crew-builder/package.json +37 -0
- crew-builder/scripts/postinstall/apply-patches.mjs +260 -0
- crew-builder/src/app.css +62 -0
- crew-builder/src/app.d.ts +13 -0
- crew-builder/src/app.html +12 -0
- crew-builder/src/components/LoadingSpinner.svelte +64 -0
- crew-builder/src/components/ThemeSwitcher.svelte +149 -0
- crew-builder/src/components/index.js +9 -0
- crew-builder/src/lib/api/bots.ts +60 -0
- crew-builder/src/lib/api/chat.ts +80 -0
- crew-builder/src/lib/api/client.ts +56 -0
- crew-builder/src/lib/api/crew/crew.ts +136 -0
- crew-builder/src/lib/api/index.ts +5 -0
- crew-builder/src/lib/api/o365/auth.ts +65 -0
- crew-builder/src/lib/auth/auth.ts +54 -0
- crew-builder/src/lib/components/AgentNode.svelte +43 -0
- crew-builder/src/lib/components/BotCard.svelte +33 -0
- crew-builder/src/lib/components/ChatBubble.svelte +67 -0
- crew-builder/src/lib/components/ConfigPanel.svelte +278 -0
- crew-builder/src/lib/components/JsonTreeNode.svelte +76 -0
- crew-builder/src/lib/components/JsonViewer.svelte +24 -0
- crew-builder/src/lib/components/MarkdownEditor.svelte +48 -0
- crew-builder/src/lib/components/ThemeToggle.svelte +36 -0
- crew-builder/src/lib/components/Toast.svelte +67 -0
- crew-builder/src/lib/components/Toolbar.svelte +157 -0
- crew-builder/src/lib/components/index.ts +10 -0
- crew-builder/src/lib/config.ts +8 -0
- crew-builder/src/lib/stores/auth.svelte.ts +228 -0
- crew-builder/src/lib/stores/crewStore.ts +369 -0
- crew-builder/src/lib/stores/theme.svelte.js +145 -0
- crew-builder/src/lib/stores/toast.svelte.ts +69 -0
- crew-builder/src/lib/utils/conversation.ts +39 -0
- crew-builder/src/lib/utils/markdown.ts +122 -0
- crew-builder/src/lib/utils/talkHistory.ts +47 -0
- crew-builder/src/routes/+layout.svelte +20 -0
- crew-builder/src/routes/+page.svelte +539 -0
- crew-builder/src/routes/agents/+page.svelte +247 -0
- crew-builder/src/routes/agents/[agentId]/+page.svelte +288 -0
- crew-builder/src/routes/agents/[agentId]/+page.ts +7 -0
- crew-builder/src/routes/builder/+page.svelte +204 -0
- crew-builder/src/routes/crew/ask/+page.svelte +1052 -0
- crew-builder/src/routes/crew/ask/+page.ts +1 -0
- crew-builder/src/routes/integrations/o365/+page.svelte +304 -0
- crew-builder/src/routes/login/+page.svelte +197 -0
- crew-builder/src/routes/talk/[agentId]/+page.svelte +487 -0
- crew-builder/src/routes/talk/[agentId]/+page.ts +7 -0
- crew-builder/static/README.md +1 -0
- crew-builder/svelte.config.js +11 -0
- crew-builder/tailwind.config.ts +53 -0
- crew-builder/tsconfig.json +3 -0
- crew-builder/vite.config.ts +10 -0
- mcp_servers/calculator_server.py +309 -0
- parrot/__init__.py +27 -0
- parrot/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/__pycache__/version.cpython-310.pyc +0 -0
- parrot/_version.py +34 -0
- parrot/a2a/__init__.py +48 -0
- parrot/a2a/client.py +658 -0
- parrot/a2a/discovery.py +89 -0
- parrot/a2a/mixin.py +257 -0
- parrot/a2a/models.py +376 -0
- parrot/a2a/server.py +770 -0
- parrot/agents/__init__.py +29 -0
- parrot/bots/__init__.py +12 -0
- parrot/bots/a2a_agent.py +19 -0
- parrot/bots/abstract.py +3139 -0
- parrot/bots/agent.py +1129 -0
- parrot/bots/basic.py +9 -0
- parrot/bots/chatbot.py +669 -0
- parrot/bots/data.py +1618 -0
- parrot/bots/database/__init__.py +5 -0
- parrot/bots/database/abstract.py +3071 -0
- parrot/bots/database/cache.py +286 -0
- parrot/bots/database/models.py +468 -0
- parrot/bots/database/prompts.py +154 -0
- parrot/bots/database/retries.py +98 -0
- parrot/bots/database/router.py +269 -0
- parrot/bots/database/sql.py +41 -0
- parrot/bots/db/__init__.py +6 -0
- parrot/bots/db/abstract.py +556 -0
- parrot/bots/db/bigquery.py +602 -0
- parrot/bots/db/cache.py +85 -0
- parrot/bots/db/documentdb.py +668 -0
- parrot/bots/db/elastic.py +1014 -0
- parrot/bots/db/influx.py +898 -0
- parrot/bots/db/mock.py +96 -0
- parrot/bots/db/multi.py +783 -0
- parrot/bots/db/prompts.py +185 -0
- parrot/bots/db/sql.py +1255 -0
- parrot/bots/db/tools.py +212 -0
- parrot/bots/document.py +680 -0
- parrot/bots/hrbot.py +15 -0
- parrot/bots/kb.py +170 -0
- parrot/bots/mcp.py +36 -0
- parrot/bots/orchestration/README.md +463 -0
- parrot/bots/orchestration/__init__.py +1 -0
- parrot/bots/orchestration/agent.py +155 -0
- parrot/bots/orchestration/crew.py +3330 -0
- parrot/bots/orchestration/fsm.py +1179 -0
- parrot/bots/orchestration/hr.py +434 -0
- parrot/bots/orchestration/storage/__init__.py +4 -0
- parrot/bots/orchestration/storage/memory.py +100 -0
- parrot/bots/orchestration/storage/mixin.py +119 -0
- parrot/bots/orchestration/verify.py +202 -0
- parrot/bots/product.py +204 -0
- parrot/bots/prompts/__init__.py +96 -0
- parrot/bots/prompts/agents.py +155 -0
- parrot/bots/prompts/data.py +216 -0
- parrot/bots/prompts/output_generation.py +8 -0
- parrot/bots/scraper/__init__.py +3 -0
- parrot/bots/scraper/models.py +122 -0
- parrot/bots/scraper/scraper.py +1173 -0
- parrot/bots/scraper/templates.py +115 -0
- parrot/bots/stores/__init__.py +5 -0
- parrot/bots/stores/local.py +172 -0
- parrot/bots/webdev.py +81 -0
- parrot/cli.py +17 -0
- parrot/clients/__init__.py +16 -0
- parrot/clients/base.py +1491 -0
- parrot/clients/claude.py +1191 -0
- parrot/clients/factory.py +129 -0
- parrot/clients/google.py +4567 -0
- parrot/clients/gpt.py +1975 -0
- parrot/clients/grok.py +432 -0
- parrot/clients/groq.py +986 -0
- parrot/clients/hf.py +582 -0
- parrot/clients/models.py +18 -0
- parrot/conf.py +395 -0
- parrot/embeddings/__init__.py +9 -0
- parrot/embeddings/base.py +157 -0
- parrot/embeddings/google.py +98 -0
- parrot/embeddings/huggingface.py +74 -0
- parrot/embeddings/openai.py +84 -0
- parrot/embeddings/processor.py +88 -0
- parrot/exceptions.c +13868 -0
- parrot/exceptions.cpython-310-x86_64-linux-gnu.so +0 -0
- parrot/exceptions.pxd +22 -0
- parrot/exceptions.pxi +15 -0
- parrot/exceptions.pyx +44 -0
- parrot/generators/__init__.py +29 -0
- parrot/generators/base.py +200 -0
- parrot/generators/html.py +293 -0
- parrot/generators/react.py +205 -0
- parrot/generators/streamlit.py +203 -0
- parrot/generators/template.py +105 -0
- parrot/handlers/__init__.py +4 -0
- parrot/handlers/agent.py +861 -0
- parrot/handlers/agents/__init__.py +1 -0
- parrot/handlers/agents/abstract.py +900 -0
- parrot/handlers/bots.py +338 -0
- parrot/handlers/chat.py +915 -0
- parrot/handlers/creation.sql +192 -0
- parrot/handlers/crew/ARCHITECTURE.md +362 -0
- parrot/handlers/crew/README_BOTMANAGER_PERSISTENCE.md +303 -0
- parrot/handlers/crew/README_REDIS_PERSISTENCE.md +366 -0
- parrot/handlers/crew/__init__.py +0 -0
- parrot/handlers/crew/handler.py +801 -0
- parrot/handlers/crew/models.py +229 -0
- parrot/handlers/crew/redis_persistence.py +523 -0
- parrot/handlers/jobs/__init__.py +10 -0
- parrot/handlers/jobs/job.py +384 -0
- parrot/handlers/jobs/mixin.py +627 -0
- parrot/handlers/jobs/models.py +115 -0
- parrot/handlers/jobs/worker.py +31 -0
- parrot/handlers/models.py +596 -0
- parrot/handlers/o365_auth.py +105 -0
- parrot/handlers/stream.py +337 -0
- parrot/interfaces/__init__.py +6 -0
- parrot/interfaces/aws.py +143 -0
- parrot/interfaces/credentials.py +113 -0
- parrot/interfaces/database.py +27 -0
- parrot/interfaces/google.py +1123 -0
- parrot/interfaces/hierarchy.py +1227 -0
- parrot/interfaces/http.py +651 -0
- parrot/interfaces/images/__init__.py +0 -0
- parrot/interfaces/images/plugins/__init__.py +24 -0
- parrot/interfaces/images/plugins/abstract.py +58 -0
- parrot/interfaces/images/plugins/analisys.py +148 -0
- parrot/interfaces/images/plugins/classify.py +150 -0
- parrot/interfaces/images/plugins/classifybase.py +182 -0
- parrot/interfaces/images/plugins/detect.py +150 -0
- parrot/interfaces/images/plugins/exif.py +1103 -0
- parrot/interfaces/images/plugins/hash.py +52 -0
- parrot/interfaces/images/plugins/vision.py +104 -0
- parrot/interfaces/images/plugins/yolo.py +66 -0
- parrot/interfaces/images/plugins/zerodetect.py +197 -0
- parrot/interfaces/o365.py +978 -0
- parrot/interfaces/onedrive.py +822 -0
- parrot/interfaces/sharepoint.py +1435 -0
- parrot/interfaces/soap.py +257 -0
- parrot/loaders/__init__.py +8 -0
- parrot/loaders/abstract.py +1131 -0
- parrot/loaders/audio.py +199 -0
- parrot/loaders/basepdf.py +53 -0
- parrot/loaders/basevideo.py +1568 -0
- parrot/loaders/csv.py +409 -0
- parrot/loaders/docx.py +116 -0
- parrot/loaders/epubloader.py +316 -0
- parrot/loaders/excel.py +199 -0
- parrot/loaders/factory.py +55 -0
- parrot/loaders/files/__init__.py +0 -0
- parrot/loaders/files/abstract.py +39 -0
- parrot/loaders/files/html.py +26 -0
- parrot/loaders/files/text.py +63 -0
- parrot/loaders/html.py +152 -0
- parrot/loaders/markdown.py +442 -0
- parrot/loaders/pdf.py +373 -0
- parrot/loaders/pdfmark.py +320 -0
- parrot/loaders/pdftables.py +506 -0
- parrot/loaders/ppt.py +476 -0
- parrot/loaders/qa.py +63 -0
- parrot/loaders/splitters/__init__.py +10 -0
- parrot/loaders/splitters/base.py +138 -0
- parrot/loaders/splitters/md.py +228 -0
- parrot/loaders/splitters/token.py +143 -0
- parrot/loaders/txt.py +26 -0
- parrot/loaders/video.py +89 -0
- parrot/loaders/videolocal.py +218 -0
- parrot/loaders/videounderstanding.py +377 -0
- parrot/loaders/vimeo.py +167 -0
- parrot/loaders/web.py +599 -0
- parrot/loaders/youtube.py +504 -0
- parrot/manager/__init__.py +5 -0
- parrot/manager/manager.py +1030 -0
- parrot/mcp/__init__.py +28 -0
- parrot/mcp/adapter.py +105 -0
- parrot/mcp/cli.py +174 -0
- parrot/mcp/client.py +119 -0
- parrot/mcp/config.py +75 -0
- parrot/mcp/integration.py +842 -0
- parrot/mcp/oauth.py +933 -0
- parrot/mcp/server.py +225 -0
- parrot/mcp/transports/__init__.py +3 -0
- parrot/mcp/transports/base.py +279 -0
- parrot/mcp/transports/grpc_session.py +163 -0
- parrot/mcp/transports/http.py +312 -0
- parrot/mcp/transports/mcp.proto +108 -0
- parrot/mcp/transports/quic.py +1082 -0
- parrot/mcp/transports/sse.py +330 -0
- parrot/mcp/transports/stdio.py +309 -0
- parrot/mcp/transports/unix.py +395 -0
- parrot/mcp/transports/websocket.py +547 -0
- parrot/memory/__init__.py +16 -0
- parrot/memory/abstract.py +209 -0
- parrot/memory/agent.py +32 -0
- parrot/memory/cache.py +175 -0
- parrot/memory/core.py +555 -0
- parrot/memory/file.py +153 -0
- parrot/memory/mem.py +131 -0
- parrot/memory/redis.py +613 -0
- parrot/models/__init__.py +46 -0
- parrot/models/basic.py +118 -0
- parrot/models/compliance.py +208 -0
- parrot/models/crew.py +395 -0
- parrot/models/detections.py +654 -0
- parrot/models/generation.py +85 -0
- parrot/models/google.py +223 -0
- parrot/models/groq.py +23 -0
- parrot/models/openai.py +30 -0
- parrot/models/outputs.py +285 -0
- parrot/models/responses.py +938 -0
- parrot/notifications/__init__.py +743 -0
- parrot/openapi/__init__.py +3 -0
- parrot/openapi/components.yaml +641 -0
- parrot/openapi/config.py +322 -0
- parrot/outputs/__init__.py +32 -0
- parrot/outputs/formats/__init__.py +108 -0
- parrot/outputs/formats/altair.py +359 -0
- parrot/outputs/formats/application.py +122 -0
- parrot/outputs/formats/base.py +351 -0
- parrot/outputs/formats/bokeh.py +356 -0
- parrot/outputs/formats/card.py +424 -0
- parrot/outputs/formats/chart.py +436 -0
- parrot/outputs/formats/d3.py +255 -0
- parrot/outputs/formats/echarts.py +310 -0
- parrot/outputs/formats/generators/__init__.py +0 -0
- parrot/outputs/formats/generators/abstract.py +61 -0
- parrot/outputs/formats/generators/panel.py +145 -0
- parrot/outputs/formats/generators/streamlit.py +86 -0
- parrot/outputs/formats/generators/terminal.py +63 -0
- parrot/outputs/formats/holoviews.py +310 -0
- parrot/outputs/formats/html.py +147 -0
- parrot/outputs/formats/jinja2.py +46 -0
- parrot/outputs/formats/json.py +87 -0
- parrot/outputs/formats/map.py +933 -0
- parrot/outputs/formats/markdown.py +172 -0
- parrot/outputs/formats/matplotlib.py +237 -0
- parrot/outputs/formats/mixins/__init__.py +0 -0
- parrot/outputs/formats/mixins/emaps.py +855 -0
- parrot/outputs/formats/plotly.py +341 -0
- parrot/outputs/formats/seaborn.py +310 -0
- parrot/outputs/formats/table.py +397 -0
- parrot/outputs/formats/template_report.py +138 -0
- parrot/outputs/formats/yaml.py +125 -0
- parrot/outputs/formatter.py +152 -0
- parrot/outputs/templates/__init__.py +95 -0
- parrot/pipelines/__init__.py +0 -0
- parrot/pipelines/abstract.py +210 -0
- parrot/pipelines/detector.py +124 -0
- parrot/pipelines/models.py +90 -0
- parrot/pipelines/planogram.py +3002 -0
- parrot/pipelines/table.sql +97 -0
- parrot/plugins/__init__.py +106 -0
- parrot/plugins/importer.py +80 -0
- parrot/py.typed +0 -0
- parrot/registry/__init__.py +18 -0
- parrot/registry/registry.py +594 -0
- parrot/scheduler/__init__.py +1189 -0
- parrot/scheduler/models.py +60 -0
- parrot/security/__init__.py +16 -0
- parrot/security/prompt_injection.py +268 -0
- parrot/security/security_events.sql +25 -0
- parrot/services/__init__.py +1 -0
- parrot/services/mcp/__init__.py +8 -0
- parrot/services/mcp/config.py +13 -0
- parrot/services/mcp/server.py +295 -0
- parrot/services/o365_remote_auth.py +235 -0
- parrot/stores/__init__.py +7 -0
- parrot/stores/abstract.py +352 -0
- parrot/stores/arango.py +1090 -0
- parrot/stores/bigquery.py +1377 -0
- parrot/stores/cache.py +106 -0
- parrot/stores/empty.py +10 -0
- parrot/stores/faiss_store.py +1157 -0
- parrot/stores/kb/__init__.py +9 -0
- parrot/stores/kb/abstract.py +68 -0
- parrot/stores/kb/cache.py +165 -0
- parrot/stores/kb/doc.py +325 -0
- parrot/stores/kb/hierarchy.py +346 -0
- parrot/stores/kb/local.py +457 -0
- parrot/stores/kb/prompt.py +28 -0
- parrot/stores/kb/redis.py +659 -0
- parrot/stores/kb/store.py +115 -0
- parrot/stores/kb/user.py +374 -0
- parrot/stores/models.py +59 -0
- parrot/stores/pgvector.py +3 -0
- parrot/stores/postgres.py +2853 -0
- parrot/stores/utils/__init__.py +0 -0
- parrot/stores/utils/chunking.py +197 -0
- parrot/telemetry/__init__.py +3 -0
- parrot/telemetry/mixin.py +111 -0
- parrot/template/__init__.py +3 -0
- parrot/template/engine.py +259 -0
- parrot/tools/__init__.py +23 -0
- parrot/tools/abstract.py +644 -0
- parrot/tools/agent.py +363 -0
- parrot/tools/arangodbsearch.py +537 -0
- parrot/tools/arxiv_tool.py +188 -0
- parrot/tools/calculator/__init__.py +3 -0
- parrot/tools/calculator/operations/__init__.py +38 -0
- parrot/tools/calculator/operations/calculus.py +80 -0
- parrot/tools/calculator/operations/statistics.py +76 -0
- parrot/tools/calculator/tool.py +150 -0
- parrot/tools/cloudwatch.py +988 -0
- parrot/tools/codeinterpreter/__init__.py +127 -0
- parrot/tools/codeinterpreter/executor.py +371 -0
- parrot/tools/codeinterpreter/internals.py +473 -0
- parrot/tools/codeinterpreter/models.py +643 -0
- parrot/tools/codeinterpreter/prompts.py +224 -0
- parrot/tools/codeinterpreter/tool.py +664 -0
- parrot/tools/company_info/__init__.py +6 -0
- parrot/tools/company_info/tool.py +1138 -0
- parrot/tools/correlationanalysis.py +437 -0
- parrot/tools/database/abstract.py +286 -0
- parrot/tools/database/bq.py +115 -0
- parrot/tools/database/cache.py +284 -0
- parrot/tools/database/models.py +95 -0
- parrot/tools/database/pg.py +343 -0
- parrot/tools/databasequery.py +1159 -0
- parrot/tools/db.py +1800 -0
- parrot/tools/ddgo.py +370 -0
- parrot/tools/decorators.py +271 -0
- parrot/tools/dftohtml.py +282 -0
- parrot/tools/document.py +549 -0
- parrot/tools/ecs.py +819 -0
- parrot/tools/edareport.py +368 -0
- parrot/tools/elasticsearch.py +1049 -0
- parrot/tools/employees.py +462 -0
- parrot/tools/epson/__init__.py +96 -0
- parrot/tools/excel.py +683 -0
- parrot/tools/file/__init__.py +13 -0
- parrot/tools/file/abstract.py +76 -0
- parrot/tools/file/gcs.py +378 -0
- parrot/tools/file/local.py +284 -0
- parrot/tools/file/s3.py +511 -0
- parrot/tools/file/tmp.py +309 -0
- parrot/tools/file/tool.py +501 -0
- parrot/tools/file_reader.py +129 -0
- parrot/tools/flowtask/__init__.py +19 -0
- parrot/tools/flowtask/tool.py +761 -0
- parrot/tools/gittoolkit.py +508 -0
- parrot/tools/google/__init__.py +18 -0
- parrot/tools/google/base.py +169 -0
- parrot/tools/google/tools.py +1251 -0
- parrot/tools/googlelocation.py +5 -0
- parrot/tools/googleroutes.py +5 -0
- parrot/tools/googlesearch.py +5 -0
- parrot/tools/googlesitesearch.py +5 -0
- parrot/tools/googlevoice.py +2 -0
- parrot/tools/gvoice.py +695 -0
- parrot/tools/ibisworld/README.md +225 -0
- parrot/tools/ibisworld/__init__.py +11 -0
- parrot/tools/ibisworld/tool.py +366 -0
- parrot/tools/jiratoolkit.py +1718 -0
- parrot/tools/manager.py +1098 -0
- parrot/tools/math.py +152 -0
- parrot/tools/metadata.py +476 -0
- parrot/tools/msteams.py +1621 -0
- parrot/tools/msword.py +635 -0
- parrot/tools/multidb.py +580 -0
- parrot/tools/multistoresearch.py +369 -0
- parrot/tools/networkninja.py +167 -0
- parrot/tools/nextstop/__init__.py +4 -0
- parrot/tools/nextstop/base.py +286 -0
- parrot/tools/nextstop/employee.py +733 -0
- parrot/tools/nextstop/store.py +462 -0
- parrot/tools/notification.py +435 -0
- parrot/tools/o365/__init__.py +42 -0
- parrot/tools/o365/base.py +295 -0
- parrot/tools/o365/bundle.py +522 -0
- parrot/tools/o365/events.py +554 -0
- parrot/tools/o365/mail.py +992 -0
- parrot/tools/o365/onedrive.py +497 -0
- parrot/tools/o365/sharepoint.py +641 -0
- parrot/tools/openapi_toolkit.py +904 -0
- parrot/tools/openweather.py +527 -0
- parrot/tools/pdfprint.py +1001 -0
- parrot/tools/powerbi.py +518 -0
- parrot/tools/powerpoint.py +1113 -0
- parrot/tools/pricestool.py +146 -0
- parrot/tools/products/__init__.py +246 -0
- parrot/tools/prophet_tool.py +171 -0
- parrot/tools/pythonpandas.py +630 -0
- parrot/tools/pythonrepl.py +910 -0
- parrot/tools/qsource.py +436 -0
- parrot/tools/querytoolkit.py +395 -0
- parrot/tools/quickeda.py +827 -0
- parrot/tools/resttool.py +553 -0
- parrot/tools/retail/__init__.py +0 -0
- parrot/tools/retail/bby.py +528 -0
- parrot/tools/sandboxtool.py +703 -0
- parrot/tools/sassie/__init__.py +352 -0
- parrot/tools/scraping/__init__.py +7 -0
- parrot/tools/scraping/docs/select.md +466 -0
- parrot/tools/scraping/documentation.md +1278 -0
- parrot/tools/scraping/driver.py +436 -0
- parrot/tools/scraping/models.py +576 -0
- parrot/tools/scraping/options.py +85 -0
- parrot/tools/scraping/orchestrator.py +517 -0
- parrot/tools/scraping/readme.md +740 -0
- parrot/tools/scraping/tool.py +3115 -0
- parrot/tools/seasonaldetection.py +642 -0
- parrot/tools/shell_tool/__init__.py +5 -0
- parrot/tools/shell_tool/actions.py +408 -0
- parrot/tools/shell_tool/engine.py +155 -0
- parrot/tools/shell_tool/models.py +322 -0
- parrot/tools/shell_tool/tool.py +442 -0
- parrot/tools/site_search.py +214 -0
- parrot/tools/textfile.py +418 -0
- parrot/tools/think.py +378 -0
- parrot/tools/toolkit.py +298 -0
- parrot/tools/webapp_tool.py +187 -0
- parrot/tools/whatif.py +1279 -0
- parrot/tools/workday/MULTI_WSDL_EXAMPLE.md +249 -0
- parrot/tools/workday/__init__.py +6 -0
- parrot/tools/workday/models.py +1389 -0
- parrot/tools/workday/tool.py +1293 -0
- parrot/tools/yfinance_tool.py +306 -0
- parrot/tools/zipcode.py +217 -0
- parrot/utils/__init__.py +2 -0
- parrot/utils/helpers.py +73 -0
- parrot/utils/parsers/__init__.py +5 -0
- parrot/utils/parsers/toml.c +12078 -0
- parrot/utils/parsers/toml.cpython-310-x86_64-linux-gnu.so +0 -0
- parrot/utils/parsers/toml.pyx +21 -0
- parrot/utils/toml.py +11 -0
- parrot/utils/types.cpp +20936 -0
- parrot/utils/types.cpython-310-x86_64-linux-gnu.so +0 -0
- parrot/utils/types.pyx +213 -0
- parrot/utils/uv.py +11 -0
- parrot/version.py +10 -0
- parrot/yaml-rs/Cargo.lock +350 -0
- parrot/yaml-rs/Cargo.toml +19 -0
- parrot/yaml-rs/pyproject.toml +19 -0
- parrot/yaml-rs/python/yaml_rs/__init__.py +81 -0
- parrot/yaml-rs/src/lib.rs +222 -0
- requirements/docker-compose.yml +24 -0
- requirements/requirements-dev.txt +21 -0
|
@@ -0,0 +1,1049 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Elasticsearch/OpenSearch Tool for AI-Parrot
|
|
3
|
+
Enables AI agents to query Elasticsearch indices, search logs, and extract metrics
|
|
4
|
+
"""
|
|
5
|
+
from typing import Optional, List, Dict, Any, Literal
|
|
6
|
+
from datetime import datetime, timedelta, timezone
|
|
7
|
+
from enum import Enum
|
|
8
|
+
import re
|
|
9
|
+
import json
|
|
10
|
+
from pydantic import Field, field_validator
|
|
11
|
+
from navconfig import config
|
|
12
|
+
from asyncdb import AsyncDB
|
|
13
|
+
from asyncdb.drivers.elastic import ElasticConfig
|
|
14
|
+
from .abstract import (
|
|
15
|
+
AbstractTool,
|
|
16
|
+
AbstractToolArgsSchema,
|
|
17
|
+
ToolResult
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ElasticsearchOperation(str, Enum):
|
|
22
|
+
"""Available Elasticsearch operations"""
|
|
23
|
+
SEARCH = "search"
|
|
24
|
+
QUERY_LOGS = "query_logs"
|
|
25
|
+
GET_METRICS = "get_metrics"
|
|
26
|
+
AGGREGATE = "aggregate"
|
|
27
|
+
LIST_INDICES = "list_indices"
|
|
28
|
+
GET_DOCUMENT = "get_document"
|
|
29
|
+
COUNT_DOCUMENTS = "count_documents"
|
|
30
|
+
ANALYZE_LOGS = "analyze_logs"
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class ElasticsearchToolArgs(AbstractToolArgsSchema):
|
|
34
|
+
"""Arguments schema for Elasticsearch operations"""
|
|
35
|
+
|
|
36
|
+
operation: ElasticsearchOperation = Field(
|
|
37
|
+
...,
|
|
38
|
+
description=(
|
|
39
|
+
"Elasticsearch operation to perform:\n"
|
|
40
|
+
"- 'search': Execute a search query using Elasticsearch DSL\n"
|
|
41
|
+
"- 'query_logs': Query logs with filters and time ranges (Logstash format)\n"
|
|
42
|
+
"- 'get_metrics': Extract metrics from log entries\n"
|
|
43
|
+
"- 'aggregate': Perform aggregations on data\n"
|
|
44
|
+
"- 'list_indices': List all available indices\n"
|
|
45
|
+
"- 'get_document': Get a specific document by ID\n"
|
|
46
|
+
"- 'count_documents': Count documents matching criteria\n"
|
|
47
|
+
"- 'analyze_logs': Analyze log patterns and extract insights"
|
|
48
|
+
)
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
# Index parameters
|
|
52
|
+
index: Optional[str] = Field(
|
|
53
|
+
None,
|
|
54
|
+
description="Elasticsearch index name (e.g., 'logstash-*', 'app-logs-2024')"
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
# Query parameters
|
|
58
|
+
query: Optional[Dict[str, Any]] = Field(
|
|
59
|
+
None,
|
|
60
|
+
description=(
|
|
61
|
+
"Elasticsearch query DSL as a dictionary. "
|
|
62
|
+
"Example: {'match': {'message': 'error'}}"
|
|
63
|
+
)
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
query_string: Optional[str] = Field(
|
|
67
|
+
None,
|
|
68
|
+
description=(
|
|
69
|
+
"Simple query string for searching. "
|
|
70
|
+
"Examples: 'status:500', 'error AND timeout', 'user:john'"
|
|
71
|
+
)
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
# Log-specific parameters
|
|
75
|
+
log_level: Optional[Literal["DEBUG", "INFO", "WARN", "ERROR", "FATAL"]] = Field(
|
|
76
|
+
None,
|
|
77
|
+
description="Filter logs by level (for Logstash-formatted logs)"
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
message_filter: Optional[str] = Field(
|
|
81
|
+
None,
|
|
82
|
+
description="Filter log messages containing this text"
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
# Time range parameters
|
|
86
|
+
start_time: Optional[str] = Field(
|
|
87
|
+
None,
|
|
88
|
+
description=(
|
|
89
|
+
"Start time for query (ISO format or relative like '-1h', '-30m', '-7d'). "
|
|
90
|
+
"Examples: '2024-01-01T00:00:00', '-1h', '-24h'"
|
|
91
|
+
)
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
end_time: Optional[str] = Field(
|
|
95
|
+
None,
|
|
96
|
+
description="End time for query (ISO format or 'now'). Default: now"
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
# Aggregation parameters
|
|
100
|
+
aggregation: Optional[Dict[str, Any]] = Field(
|
|
101
|
+
None,
|
|
102
|
+
description=(
|
|
103
|
+
"Elasticsearch aggregation specification. "
|
|
104
|
+
"Example: {'terms': {'field': 'status.keyword', 'size': 10}}"
|
|
105
|
+
)
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
metric_field: Optional[str] = Field(
|
|
109
|
+
None,
|
|
110
|
+
description="Field to extract metrics from (e.g., 'response_time', 'cpu_usage')"
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
metric_type: Optional[Literal["avg", "sum", "min", "max", "count", "percentiles"]] = Field(
|
|
114
|
+
"avg",
|
|
115
|
+
description="Type of metric aggregation to perform"
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
# General parameters
|
|
119
|
+
size: Optional[int] = Field(
|
|
120
|
+
100,
|
|
121
|
+
description="Maximum number of results to return (default: 100)"
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
sort: Optional[List[Dict[str, Any]]] = Field(
|
|
125
|
+
None,
|
|
126
|
+
description=(
|
|
127
|
+
"Sort specification as list of dicts. "
|
|
128
|
+
"Example: [{'@timestamp': {'order': 'desc'}}]"
|
|
129
|
+
)
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
fields: Optional[List[str]] = Field(
|
|
133
|
+
None,
|
|
134
|
+
description="Specific fields to return in results"
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
# Document operations
|
|
138
|
+
document_id: Optional[str] = Field(
|
|
139
|
+
None,
|
|
140
|
+
description="Document ID for get_document operation"
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
# Analysis parameters
|
|
144
|
+
group_by: Optional[str] = Field(
|
|
145
|
+
None,
|
|
146
|
+
description="Field to group by for log analysis (e.g., 'host.keyword', 'level.keyword')"
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
time_interval: Optional[str] = Field(
|
|
150
|
+
"1h",
|
|
151
|
+
description="Time interval for bucketing (e.g., '5m', '1h', '1d')"
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
@field_validator('start_time', mode='before')
|
|
155
|
+
@classmethod
|
|
156
|
+
def parse_time(cls, v):
|
|
157
|
+
"""Parse time string to timestamp"""
|
|
158
|
+
return v # Will be parsed in the tool
|
|
159
|
+
|
|
160
|
+
@field_validator('end_time', mode='before')
|
|
161
|
+
@classmethod
|
|
162
|
+
def parse_end_time(cls, v):
|
|
163
|
+
"""Parse end time string to timestamp"""
|
|
164
|
+
return v # Will be parsed in the tool
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
class ElasticsearchTool(AbstractTool):
|
|
168
|
+
"""
|
|
169
|
+
Tool for querying Elasticsearch/OpenSearch indices and analyzing logs.
|
|
170
|
+
|
|
171
|
+
Capabilities:
|
|
172
|
+
- Execute complex searches using Elasticsearch DSL
|
|
173
|
+
- Query and analyze logs (especially Logstash-formatted logs)
|
|
174
|
+
- Extract metrics from log entries
|
|
175
|
+
- Perform aggregations and analytics
|
|
176
|
+
- List indices and explore data structure
|
|
177
|
+
- Retrieve specific documents
|
|
178
|
+
|
|
179
|
+
Example Usage:
|
|
180
|
+
# Query error logs in last hour
|
|
181
|
+
{
|
|
182
|
+
"operation": "query_logs",
|
|
183
|
+
"index": "logstash-*",
|
|
184
|
+
"log_level": "ERROR",
|
|
185
|
+
"start_time": "-1h"
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
# Get average response time metrics
|
|
189
|
+
{
|
|
190
|
+
"operation": "get_metrics",
|
|
191
|
+
"index": "app-logs-*",
|
|
192
|
+
"metric_field": "response_time",
|
|
193
|
+
"metric_type": "avg",
|
|
194
|
+
"start_time": "-24h"
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
# Analyze log patterns
|
|
198
|
+
{
|
|
199
|
+
"operation": "analyze_logs",
|
|
200
|
+
"index": "logstash-*",
|
|
201
|
+
"group_by": "level.keyword",
|
|
202
|
+
"time_interval": "1h",
|
|
203
|
+
"start_time": "-7d"
|
|
204
|
+
}
|
|
205
|
+
"""
|
|
206
|
+
|
|
207
|
+
name: str = "elasticsearch_tool"
|
|
208
|
+
description: str = (
|
|
209
|
+
"Query Elasticsearch/OpenSearch indices, search logs, and extract metrics. "
|
|
210
|
+
"Supports complex queries, aggregations, and log analysis."
|
|
211
|
+
)
|
|
212
|
+
args_schema: type[AbstractToolArgsSchema] = ElasticsearchToolArgs
|
|
213
|
+
|
|
214
|
+
def __init__(
|
|
215
|
+
self,
|
|
216
|
+
host: str = None,
|
|
217
|
+
port: int = None,
|
|
218
|
+
user: str = None,
|
|
219
|
+
password: str = None,
|
|
220
|
+
protocol: str = "http",
|
|
221
|
+
default_index: str = "logstash-*",
|
|
222
|
+
client_type: str = "auto",
|
|
223
|
+
**kwargs
|
|
224
|
+
):
|
|
225
|
+
"""
|
|
226
|
+
Initialize Elasticsearch tool.
|
|
227
|
+
|
|
228
|
+
Args:
|
|
229
|
+
host: Elasticsearch host (from ELASTICSEARCH_HOST env var if not provided)
|
|
230
|
+
port: Elasticsearch port (from ELASTICSEARCH_PORT env var if not provided)
|
|
231
|
+
user: Elasticsearch user (from ELASTICSEARCH_USER env var if not provided)
|
|
232
|
+
password: Elasticsearch password (from ELASTICSEARCH_PASSWORD env var if not provided)
|
|
233
|
+
protocol: Connection protocol (http or https, from ELASTICSEARCH_PROTOCOL env var)
|
|
234
|
+
default_index: Default index to query
|
|
235
|
+
client_type: Client type ('elasticsearch', 'opensearch', or 'auto')
|
|
236
|
+
**kwargs: Additional parameters
|
|
237
|
+
"""
|
|
238
|
+
super().__init__(**kwargs)
|
|
239
|
+
# Import config for environment variables
|
|
240
|
+
self.host = host or config.get('ELASTICSEARCH_HOST', fallback='localhost')
|
|
241
|
+
self.port = port or int(config.get('ELASTICSEARCH_PORT', fallback='9200'))
|
|
242
|
+
self.user = user or config.get('ELASTICSEARCH_USER')
|
|
243
|
+
self.password = password or config.get('ELASTICSEARCH_PASSWORD')
|
|
244
|
+
self.protocol = protocol or config.get('ELASTICSEARCH_PROTOCOL', fallback='http')
|
|
245
|
+
self.default_index = default_index or config.get(
|
|
246
|
+
'ELASTICSEARCH_INDEX',
|
|
247
|
+
fallback='logstash-*'
|
|
248
|
+
)
|
|
249
|
+
self.client_type = client_type or config.get(
|
|
250
|
+
'ELASTICSEARCH_CLIENT_TYPE', fallback='auto'
|
|
251
|
+
)
|
|
252
|
+
|
|
253
|
+
# Initialize AsyncDB connection
|
|
254
|
+
self.config = ElasticConfig(
|
|
255
|
+
host=self.host,
|
|
256
|
+
port=self.port,
|
|
257
|
+
user=self.user,
|
|
258
|
+
password=self.password,
|
|
259
|
+
db=self.default_index,
|
|
260
|
+
protocol=self.protocol
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
self._db = AsyncDB(
|
|
264
|
+
'elastic',
|
|
265
|
+
params=self.config,
|
|
266
|
+
client_type=self.client_type
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
def _parse_relative_time(self, time_str: str) -> datetime:
|
|
270
|
+
"""
|
|
271
|
+
Parse relative time strings like '-1h', '-30m', '-7d'.
|
|
272
|
+
|
|
273
|
+
Args:
|
|
274
|
+
time_str: Time string (ISO format, 'now', or relative like '-1h')
|
|
275
|
+
|
|
276
|
+
Returns:
|
|
277
|
+
datetime object
|
|
278
|
+
"""
|
|
279
|
+
if time_str == 'now' or time_str is None:
|
|
280
|
+
return datetime.now(timezone.utc)
|
|
281
|
+
|
|
282
|
+
# Check if it's a relative time
|
|
283
|
+
if time_str.startswith('-'):
|
|
284
|
+
time_str = time_str[1:] # Remove the minus sign
|
|
285
|
+
|
|
286
|
+
# Parse the number and unit
|
|
287
|
+
match = re.match(r'^(\d+)([smhd])$', time_str)
|
|
288
|
+
if not match:
|
|
289
|
+
raise ValueError(
|
|
290
|
+
f"Invalid relative time format: {time_str}. "
|
|
291
|
+
"Use format like '1h', '30m', '7d'"
|
|
292
|
+
)
|
|
293
|
+
|
|
294
|
+
amount, unit = match.groups()
|
|
295
|
+
amount = int(amount)
|
|
296
|
+
|
|
297
|
+
# Calculate the timedelta
|
|
298
|
+
if unit == 's':
|
|
299
|
+
delta = timedelta(seconds=amount)
|
|
300
|
+
elif unit == 'm':
|
|
301
|
+
delta = timedelta(minutes=amount)
|
|
302
|
+
elif unit == 'h':
|
|
303
|
+
delta = timedelta(hours=amount)
|
|
304
|
+
elif unit == 'd':
|
|
305
|
+
delta = timedelta(days=amount)
|
|
306
|
+
else:
|
|
307
|
+
raise ValueError(f"Unknown time unit: {unit}")
|
|
308
|
+
|
|
309
|
+
return datetime.now(timezone.utc) - delta
|
|
310
|
+
|
|
311
|
+
# Parse as ISO format
|
|
312
|
+
try:
|
|
313
|
+
return datetime.fromisoformat(time_str.replace('Z', '+00:00'))
|
|
314
|
+
except ValueError as e:
|
|
315
|
+
raise ValueError(
|
|
316
|
+
f"Invalid time format: {time_str}. Use ISO format or relative like '-1h'"
|
|
317
|
+
) from e
|
|
318
|
+
|
|
319
|
+
def _build_time_range_filter(
|
|
320
|
+
self,
|
|
321
|
+
start_time: str = None,
|
|
322
|
+
end_time: str = None
|
|
323
|
+
) -> Dict[str, Any]:
|
|
324
|
+
"""
|
|
325
|
+
Build Elasticsearch time range filter.
|
|
326
|
+
|
|
327
|
+
Args:
|
|
328
|
+
start_time: Start time string
|
|
329
|
+
end_time: End time string
|
|
330
|
+
|
|
331
|
+
Returns:
|
|
332
|
+
Time range filter dict
|
|
333
|
+
"""
|
|
334
|
+
time_filter = {}
|
|
335
|
+
|
|
336
|
+
if start_time:
|
|
337
|
+
start_dt = self._parse_relative_time(start_time)
|
|
338
|
+
time_filter['gte'] = start_dt.isoformat()
|
|
339
|
+
|
|
340
|
+
if end_time:
|
|
341
|
+
end_dt = self._parse_relative_time(end_time)
|
|
342
|
+
time_filter['lte'] = end_dt.isoformat()
|
|
343
|
+
|
|
344
|
+
return {'range': {'@timestamp': time_filter}} if time_filter else {}
|
|
345
|
+
|
|
346
|
+
async def _search(
|
|
347
|
+
self,
|
|
348
|
+
index: str,
|
|
349
|
+
query: Dict[str, Any],
|
|
350
|
+
size: int = 100,
|
|
351
|
+
sort: List[Dict[str, Any]] = None,
|
|
352
|
+
fields: List[str] = None
|
|
353
|
+
) -> List[Dict[str, Any]]:
|
|
354
|
+
"""
|
|
355
|
+
Execute a search query.
|
|
356
|
+
|
|
357
|
+
Args:
|
|
358
|
+
index: Index name
|
|
359
|
+
query: Elasticsearch query DSL
|
|
360
|
+
size: Number of results
|
|
361
|
+
sort: Sort specification
|
|
362
|
+
fields: Fields to return
|
|
363
|
+
|
|
364
|
+
Returns:
|
|
365
|
+
List of matching documents
|
|
366
|
+
"""
|
|
367
|
+
body = {
|
|
368
|
+
'query': query,
|
|
369
|
+
'size': size
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
if sort:
|
|
373
|
+
body['sort'] = sort
|
|
374
|
+
|
|
375
|
+
if fields:
|
|
376
|
+
body['_source'] = fields
|
|
377
|
+
|
|
378
|
+
# Execute query using asyncdb
|
|
379
|
+
async with await self._db.connection() as conn: # pylint: disable=E1101
|
|
380
|
+
results, error = await conn.query(
|
|
381
|
+
json.dumps(body)
|
|
382
|
+
)
|
|
383
|
+
if error:
|
|
384
|
+
raise RuntimeError(
|
|
385
|
+
f"Elasticsearch query failed: {error}"
|
|
386
|
+
)
|
|
387
|
+
|
|
388
|
+
# Extract source documents
|
|
389
|
+
return [hit.get('_source', hit) for hit in results]
|
|
390
|
+
|
|
391
|
+
async def _query_logs(
|
|
392
|
+
self,
|
|
393
|
+
index: str,
|
|
394
|
+
start_time: str = None,
|
|
395
|
+
end_time: str = None,
|
|
396
|
+
log_level: str = None,
|
|
397
|
+
message_filter: str = None,
|
|
398
|
+
size: int = 100
|
|
399
|
+
) -> List[Dict[str, Any]]:
|
|
400
|
+
"""
|
|
401
|
+
Query logs with filters.
|
|
402
|
+
|
|
403
|
+
Args:
|
|
404
|
+
index: Index name
|
|
405
|
+
start_time: Start time
|
|
406
|
+
end_time: End time
|
|
407
|
+
log_level: Log level filter
|
|
408
|
+
message_filter: Message content filter
|
|
409
|
+
size: Number of results
|
|
410
|
+
|
|
411
|
+
Returns:
|
|
412
|
+
List of log entries
|
|
413
|
+
"""
|
|
414
|
+
# Build query filters
|
|
415
|
+
must_filters = []
|
|
416
|
+
|
|
417
|
+
# Add time range filter
|
|
418
|
+
time_filter = self._build_time_range_filter(start_time, end_time)
|
|
419
|
+
if time_filter:
|
|
420
|
+
must_filters.append(time_filter)
|
|
421
|
+
|
|
422
|
+
# Add log level filter
|
|
423
|
+
if log_level:
|
|
424
|
+
must_filters.append({
|
|
425
|
+
'term': {'level.keyword': log_level.upper()}
|
|
426
|
+
})
|
|
427
|
+
|
|
428
|
+
# Add message filter
|
|
429
|
+
if message_filter:
|
|
430
|
+
must_filters.append({
|
|
431
|
+
'match': {'message': message_filter}
|
|
432
|
+
})
|
|
433
|
+
|
|
434
|
+
# Build complete query
|
|
435
|
+
query = {
|
|
436
|
+
'bool': {
|
|
437
|
+
'must': must_filters
|
|
438
|
+
}
|
|
439
|
+
} if must_filters else {'match_all': {}}
|
|
440
|
+
|
|
441
|
+
# Sort by timestamp descending
|
|
442
|
+
sort = [{'@timestamp': {'order': 'desc'}}]
|
|
443
|
+
|
|
444
|
+
return await self._search(index, query, size, sort)
|
|
445
|
+
|
|
446
|
+
async def _get_metrics(
|
|
447
|
+
self,
|
|
448
|
+
index: str,
|
|
449
|
+
metric_field: str,
|
|
450
|
+
metric_type: str = "avg",
|
|
451
|
+
start_time: str = None,
|
|
452
|
+
end_time: str = None,
|
|
453
|
+
time_interval: str = "1h"
|
|
454
|
+
) -> Dict[str, Any]:
|
|
455
|
+
"""
|
|
456
|
+
Extract metrics from log entries.
|
|
457
|
+
|
|
458
|
+
Args:
|
|
459
|
+
index: Index name
|
|
460
|
+
metric_field: Field to extract metrics from
|
|
461
|
+
metric_type: Type of aggregation (avg, sum, min, max, count, percentiles)
|
|
462
|
+
start_time: Start time
|
|
463
|
+
end_time: End time
|
|
464
|
+
time_interval: Time interval for bucketing
|
|
465
|
+
|
|
466
|
+
Returns:
|
|
467
|
+
Metric results with aggregations
|
|
468
|
+
"""
|
|
469
|
+
# Build query with time range
|
|
470
|
+
time_filter = self._build_time_range_filter(start_time, end_time)
|
|
471
|
+
query = {
|
|
472
|
+
'bool': {
|
|
473
|
+
'must': [time_filter]
|
|
474
|
+
}
|
|
475
|
+
} if time_filter else {'match_all': {}}
|
|
476
|
+
|
|
477
|
+
# Build aggregation
|
|
478
|
+
agg_body = {}
|
|
479
|
+
|
|
480
|
+
# Time-based histogram
|
|
481
|
+
agg_body['time_buckets'] = {
|
|
482
|
+
'date_histogram': {
|
|
483
|
+
'field': '@timestamp',
|
|
484
|
+
'fixed_interval': time_interval
|
|
485
|
+
},
|
|
486
|
+
'aggs': {}
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
# Add metric aggregation
|
|
490
|
+
if metric_type == 'percentiles':
|
|
491
|
+
agg_body['time_buckets']['aggs']['metric'] = {
|
|
492
|
+
'percentiles': {
|
|
493
|
+
'field': metric_field,
|
|
494
|
+
'percents': [50, 95, 99]
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
else:
|
|
498
|
+
agg_body['time_buckets']['aggs']['metric'] = {
|
|
499
|
+
metric_type: {
|
|
500
|
+
'field': metric_field
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
# Also add overall metric
|
|
505
|
+
if metric_type == 'percentiles':
|
|
506
|
+
agg_body['overall_metric'] = {
|
|
507
|
+
'percentiles': {
|
|
508
|
+
'field': metric_field,
|
|
509
|
+
'percents': [50, 95, 99]
|
|
510
|
+
}
|
|
511
|
+
}
|
|
512
|
+
else:
|
|
513
|
+
agg_body['overall_metric'] = {
|
|
514
|
+
metric_type: {
|
|
515
|
+
'field': metric_field
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
|
|
519
|
+
# Build complete query
|
|
520
|
+
body = {
|
|
521
|
+
'query': query,
|
|
522
|
+
'size': 0,
|
|
523
|
+
'aggs': agg_body
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
# Execute query
|
|
527
|
+
async with await self._db.connection() as conn: # pylint: disable=E1101
|
|
528
|
+
result = await conn._connection.search(
|
|
529
|
+
index=index,
|
|
530
|
+
body=body
|
|
531
|
+
)
|
|
532
|
+
|
|
533
|
+
# Extract aggregation results
|
|
534
|
+
buckets = result.get('aggregations', {}).get('time_buckets', {}).get('buckets', [])
|
|
535
|
+
overall = result.get('aggregations', {}).get('overall_metric', {})
|
|
536
|
+
|
|
537
|
+
return {
|
|
538
|
+
'field': metric_field,
|
|
539
|
+
'metric_type': metric_type,
|
|
540
|
+
'overall': overall.get('value') if 'value' in overall else overall.get('values'),
|
|
541
|
+
'time_series': [
|
|
542
|
+
{
|
|
543
|
+
'timestamp': bucket['key_as_string'] if 'key_as_string' in bucket else bucket['key'],
|
|
544
|
+
'value': bucket['metric'].get('value') if 'value' in bucket['metric'] else bucket['metric'].get('values')
|
|
545
|
+
}
|
|
546
|
+
for bucket in buckets
|
|
547
|
+
],
|
|
548
|
+
'total_documents': result.get('hits', {}).get('total', {}).get('value', 0)
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
async def _aggregate(
|
|
552
|
+
self,
|
|
553
|
+
index: str,
|
|
554
|
+
aggregation: Dict[str, Any],
|
|
555
|
+
query: Dict[str, Any] = None,
|
|
556
|
+
start_time: str = None,
|
|
557
|
+
end_time: str = None
|
|
558
|
+
) -> Dict[str, Any]:
|
|
559
|
+
"""
|
|
560
|
+
Perform custom aggregation.
|
|
561
|
+
|
|
562
|
+
Args:
|
|
563
|
+
index: Index name
|
|
564
|
+
aggregation: Aggregation specification
|
|
565
|
+
query: Optional query filter
|
|
566
|
+
start_time: Start time
|
|
567
|
+
end_time: End time
|
|
568
|
+
|
|
569
|
+
Returns:
|
|
570
|
+
Aggregation results
|
|
571
|
+
"""
|
|
572
|
+
# Build query with time range
|
|
573
|
+
time_filter = self._build_time_range_filter(start_time, end_time)
|
|
574
|
+
|
|
575
|
+
if query:
|
|
576
|
+
if time_filter:
|
|
577
|
+
query = {
|
|
578
|
+
'bool': {
|
|
579
|
+
'must': [query, time_filter]
|
|
580
|
+
}
|
|
581
|
+
}
|
|
582
|
+
else:
|
|
583
|
+
query = {
|
|
584
|
+
'bool': {
|
|
585
|
+
'must': [time_filter]
|
|
586
|
+
}
|
|
587
|
+
} if time_filter else {'match_all': {}}
|
|
588
|
+
|
|
589
|
+
# Build complete query
|
|
590
|
+
body = {
|
|
591
|
+
'query': query,
|
|
592
|
+
'size': 0,
|
|
593
|
+
'aggs': aggregation
|
|
594
|
+
}
|
|
595
|
+
|
|
596
|
+
# Execute query
|
|
597
|
+
async with await self._db.connection() as conn: # pylint: disable=E1101
|
|
598
|
+
result = await conn._connection.search(
|
|
599
|
+
index=index,
|
|
600
|
+
body=body
|
|
601
|
+
)
|
|
602
|
+
|
|
603
|
+
return {
|
|
604
|
+
'aggregations': result.get('aggregations', {}),
|
|
605
|
+
'total_documents': result.get('hits', {}).get('total', {}).get('value', 0)
|
|
606
|
+
}
|
|
607
|
+
|
|
608
|
+
async def _list_indices(self) -> List[str]:
|
|
609
|
+
"""
|
|
610
|
+
List all available indices.
|
|
611
|
+
|
|
612
|
+
Returns:
|
|
613
|
+
List of index names
|
|
614
|
+
"""
|
|
615
|
+
async with await self._db.connection() as conn: # pylint: disable=E1101
|
|
616
|
+
indices = await conn._connection.cat.indices(format='json')
|
|
617
|
+
|
|
618
|
+
return [idx['index'] for idx in indices]
|
|
619
|
+
|
|
620
|
+
async def _get_document(self, index: str, document_id: str) -> Dict[str, Any]:
|
|
621
|
+
"""
|
|
622
|
+
Get a specific document by ID.
|
|
623
|
+
|
|
624
|
+
Args:
|
|
625
|
+
index: Index name
|
|
626
|
+
document_id: Document ID
|
|
627
|
+
|
|
628
|
+
Returns:
|
|
629
|
+
Document content
|
|
630
|
+
"""
|
|
631
|
+
async with await self._db.connection() as conn: # pylint: disable=E1101
|
|
632
|
+
result = await conn.get(document_id)
|
|
633
|
+
|
|
634
|
+
return result
|
|
635
|
+
|
|
636
|
+
async def _count_documents(
|
|
637
|
+
self,
|
|
638
|
+
index: str,
|
|
639
|
+
query: Dict[str, Any] = None,
|
|
640
|
+
start_time: str = None,
|
|
641
|
+
end_time: str = None
|
|
642
|
+
) -> int:
|
|
643
|
+
"""
|
|
644
|
+
Count documents matching criteria.
|
|
645
|
+
|
|
646
|
+
Args:
|
|
647
|
+
index: Index name
|
|
648
|
+
query: Optional query filter
|
|
649
|
+
start_time: Start time
|
|
650
|
+
end_time: End time
|
|
651
|
+
|
|
652
|
+
Returns:
|
|
653
|
+
Document count
|
|
654
|
+
"""
|
|
655
|
+
# Build query with time range
|
|
656
|
+
time_filter = self._build_time_range_filter(start_time, end_time)
|
|
657
|
+
|
|
658
|
+
if query:
|
|
659
|
+
if time_filter:
|
|
660
|
+
query = {
|
|
661
|
+
'bool': {
|
|
662
|
+
'must': [query, time_filter]
|
|
663
|
+
}
|
|
664
|
+
}
|
|
665
|
+
else:
|
|
666
|
+
query = {
|
|
667
|
+
'bool': {
|
|
668
|
+
'must': [time_filter]
|
|
669
|
+
}
|
|
670
|
+
} if time_filter else {'match_all': {}}
|
|
671
|
+
|
|
672
|
+
# Execute count query
|
|
673
|
+
async with await self._db.connection() as conn: # pylint: disable=E1101
|
|
674
|
+
result = await conn._connection.count(
|
|
675
|
+
index=index,
|
|
676
|
+
body={'query': query}
|
|
677
|
+
)
|
|
678
|
+
|
|
679
|
+
return result.get('count', 0)
|
|
680
|
+
|
|
681
|
+
async def _analyze_logs(
|
|
682
|
+
self,
|
|
683
|
+
index: str,
|
|
684
|
+
group_by: str,
|
|
685
|
+
start_time: str = None,
|
|
686
|
+
end_time: str = None,
|
|
687
|
+
time_interval: str = "1h",
|
|
688
|
+
size: int = 10
|
|
689
|
+
) -> Dict[str, Any]:
|
|
690
|
+
"""
|
|
691
|
+
Analyze log patterns and extract insights.
|
|
692
|
+
|
|
693
|
+
Args:
|
|
694
|
+
index: Index name
|
|
695
|
+
group_by: Field to group by
|
|
696
|
+
start_time: Start time
|
|
697
|
+
end_time: End time
|
|
698
|
+
time_interval: Time interval for bucketing
|
|
699
|
+
size: Number of top groups to return
|
|
700
|
+
|
|
701
|
+
Returns:
|
|
702
|
+
Analysis results with patterns and insights
|
|
703
|
+
"""
|
|
704
|
+
# Build query with time range
|
|
705
|
+
time_filter = self._build_time_range_filter(start_time, end_time)
|
|
706
|
+
query = {
|
|
707
|
+
'bool': {
|
|
708
|
+
'must': [time_filter]
|
|
709
|
+
}
|
|
710
|
+
} if time_filter else {'match_all': {}}
|
|
711
|
+
|
|
712
|
+
# Build aggregation for log analysis
|
|
713
|
+
agg_body = {
|
|
714
|
+
'time_buckets': {
|
|
715
|
+
'date_histogram': {
|
|
716
|
+
'field': '@timestamp',
|
|
717
|
+
'fixed_interval': time_interval
|
|
718
|
+
},
|
|
719
|
+
'aggs': {
|
|
720
|
+
'groups': {
|
|
721
|
+
'terms': {
|
|
722
|
+
'field': group_by,
|
|
723
|
+
'size': size
|
|
724
|
+
}
|
|
725
|
+
}
|
|
726
|
+
}
|
|
727
|
+
},
|
|
728
|
+
'top_groups': {
|
|
729
|
+
'terms': {
|
|
730
|
+
'field': group_by,
|
|
731
|
+
'size': size,
|
|
732
|
+
'order': {'_count': 'desc'}
|
|
733
|
+
}
|
|
734
|
+
}
|
|
735
|
+
}
|
|
736
|
+
|
|
737
|
+
# Build complete query
|
|
738
|
+
body = {
|
|
739
|
+
'query': query,
|
|
740
|
+
'size': 0,
|
|
741
|
+
'aggs': agg_body
|
|
742
|
+
}
|
|
743
|
+
|
|
744
|
+
# Execute query
|
|
745
|
+
async with await self._db.connection() as conn: # pylint: disable=E1101
|
|
746
|
+
result = await conn._connection.search(
|
|
747
|
+
index=index,
|
|
748
|
+
body=body
|
|
749
|
+
)
|
|
750
|
+
|
|
751
|
+
# Extract analysis results
|
|
752
|
+
time_buckets = result.get('aggregations', {}).get('time_buckets', {}).get('buckets', [])
|
|
753
|
+
top_groups = result.get('aggregations', {}).get('top_groups', {}).get('buckets', [])
|
|
754
|
+
|
|
755
|
+
return {
|
|
756
|
+
'group_by': group_by,
|
|
757
|
+
'total_documents': result.get('hits', {}).get('total', {}).get('value', 0),
|
|
758
|
+
'top_groups': [
|
|
759
|
+
{
|
|
760
|
+
'name': bucket['key'],
|
|
761
|
+
'count': bucket['doc_count']
|
|
762
|
+
}
|
|
763
|
+
for bucket in top_groups
|
|
764
|
+
],
|
|
765
|
+
'time_series': [
|
|
766
|
+
{
|
|
767
|
+
'timestamp': bucket['key_as_string'] if 'key_as_string' in bucket else bucket['key'],
|
|
768
|
+
'total': bucket['doc_count'],
|
|
769
|
+
'groups': [
|
|
770
|
+
{
|
|
771
|
+
'name': group['key'],
|
|
772
|
+
'count': group['doc_count']
|
|
773
|
+
}
|
|
774
|
+
for group in bucket.get('groups', {}).get('buckets', [])
|
|
775
|
+
]
|
|
776
|
+
}
|
|
777
|
+
for bucket in time_buckets
|
|
778
|
+
]
|
|
779
|
+
}
|
|
780
|
+
|
|
781
|
+
async def _execute(self, **kwargs) -> ToolResult:
|
|
782
|
+
"""Execute Elasticsearch operation"""
|
|
783
|
+
|
|
784
|
+
try:
|
|
785
|
+
operation = kwargs['operation']
|
|
786
|
+
index = kwargs.get('index') or self.default_index
|
|
787
|
+
|
|
788
|
+
# Route to appropriate method
|
|
789
|
+
if operation == ElasticsearchOperation.SEARCH:
|
|
790
|
+
# Build query from parameters
|
|
791
|
+
query = kwargs.get('query')
|
|
792
|
+
if not query:
|
|
793
|
+
# Build query from query_string if provided
|
|
794
|
+
query_string = kwargs.get('query_string')
|
|
795
|
+
if query_string:
|
|
796
|
+
query = {
|
|
797
|
+
'query_string': {
|
|
798
|
+
'query': query_string
|
|
799
|
+
}
|
|
800
|
+
}
|
|
801
|
+
else:
|
|
802
|
+
query = {'match_all': {}}
|
|
803
|
+
|
|
804
|
+
# Add time range if provided
|
|
805
|
+
start_time = kwargs.get('start_time')
|
|
806
|
+
end_time = kwargs.get('end_time')
|
|
807
|
+
if start_time or end_time:
|
|
808
|
+
if time_filter := self._build_time_range_filter(start_time, end_time):
|
|
809
|
+
query = {
|
|
810
|
+
'bool': {
|
|
811
|
+
'must': [query, time_filter]
|
|
812
|
+
}
|
|
813
|
+
}
|
|
814
|
+
|
|
815
|
+
results = await self._search(
|
|
816
|
+
index=index,
|
|
817
|
+
query=query,
|
|
818
|
+
size=kwargs.get('size', 100),
|
|
819
|
+
sort=kwargs.get('sort'),
|
|
820
|
+
fields=kwargs.get('fields')
|
|
821
|
+
)
|
|
822
|
+
|
|
823
|
+
return ToolResult(
|
|
824
|
+
success=True,
|
|
825
|
+
status="completed",
|
|
826
|
+
result={
|
|
827
|
+
'documents': results,
|
|
828
|
+
'count': len(results)
|
|
829
|
+
},
|
|
830
|
+
error=None,
|
|
831
|
+
metadata={
|
|
832
|
+
'operation': 'search',
|
|
833
|
+
'index': index
|
|
834
|
+
},
|
|
835
|
+
timestamp=datetime.now(timezone.utc).isoformat()
|
|
836
|
+
)
|
|
837
|
+
|
|
838
|
+
elif operation == ElasticsearchOperation.QUERY_LOGS:
|
|
839
|
+
results = await self._query_logs(
|
|
840
|
+
index=index,
|
|
841
|
+
start_time=kwargs.get('start_time'),
|
|
842
|
+
end_time=kwargs.get('end_time'),
|
|
843
|
+
log_level=kwargs.get('log_level'),
|
|
844
|
+
message_filter=kwargs.get('message_filter'),
|
|
845
|
+
size=kwargs.get('size', 100)
|
|
846
|
+
)
|
|
847
|
+
|
|
848
|
+
return ToolResult(
|
|
849
|
+
success=True,
|
|
850
|
+
status="completed",
|
|
851
|
+
result={
|
|
852
|
+
'logs': results,
|
|
853
|
+
'count': len(results)
|
|
854
|
+
},
|
|
855
|
+
error=None,
|
|
856
|
+
metadata={
|
|
857
|
+
'operation': 'query_logs',
|
|
858
|
+
'index': index,
|
|
859
|
+
'log_level': kwargs.get('log_level')
|
|
860
|
+
},
|
|
861
|
+
timestamp=datetime.now(timezone.utc).isoformat()
|
|
862
|
+
)
|
|
863
|
+
|
|
864
|
+
elif operation == ElasticsearchOperation.GET_METRICS:
|
|
865
|
+
metric_field = kwargs.get('metric_field')
|
|
866
|
+
if not metric_field:
|
|
867
|
+
return ToolResult(
|
|
868
|
+
success=False,
|
|
869
|
+
status="error",
|
|
870
|
+
result=None,
|
|
871
|
+
error="metric_field is required for get_metrics operation",
|
|
872
|
+
metadata={},
|
|
873
|
+
timestamp=datetime.now(timezone.utc).isoformat()
|
|
874
|
+
)
|
|
875
|
+
|
|
876
|
+
results = await self._get_metrics(
|
|
877
|
+
index=index,
|
|
878
|
+
metric_field=metric_field,
|
|
879
|
+
metric_type=kwargs.get('metric_type', 'avg'),
|
|
880
|
+
start_time=kwargs.get('start_time'),
|
|
881
|
+
end_time=kwargs.get('end_time'),
|
|
882
|
+
time_interval=kwargs.get('time_interval', '1h')
|
|
883
|
+
)
|
|
884
|
+
|
|
885
|
+
return ToolResult(
|
|
886
|
+
success=True,
|
|
887
|
+
status="completed",
|
|
888
|
+
result=results,
|
|
889
|
+
error=None,
|
|
890
|
+
metadata={
|
|
891
|
+
'operation': 'get_metrics',
|
|
892
|
+
'index': index,
|
|
893
|
+
'metric_field': metric_field
|
|
894
|
+
},
|
|
895
|
+
timestamp=datetime.now(timezone.utc).isoformat()
|
|
896
|
+
)
|
|
897
|
+
|
|
898
|
+
elif operation == ElasticsearchOperation.AGGREGATE:
|
|
899
|
+
aggregation = kwargs.get('aggregation')
|
|
900
|
+
if not aggregation:
|
|
901
|
+
return ToolResult(
|
|
902
|
+
success=False,
|
|
903
|
+
status="error",
|
|
904
|
+
result=None,
|
|
905
|
+
error="aggregation is required for aggregate operation",
|
|
906
|
+
metadata={},
|
|
907
|
+
timestamp=datetime.now(timezone.utc).isoformat()
|
|
908
|
+
)
|
|
909
|
+
|
|
910
|
+
results = await self._aggregate(
|
|
911
|
+
index=index,
|
|
912
|
+
aggregation=aggregation,
|
|
913
|
+
query=kwargs.get('query'),
|
|
914
|
+
start_time=kwargs.get('start_time'),
|
|
915
|
+
end_time=kwargs.get('end_time')
|
|
916
|
+
)
|
|
917
|
+
|
|
918
|
+
return ToolResult(
|
|
919
|
+
success=True,
|
|
920
|
+
status="completed",
|
|
921
|
+
result=results,
|
|
922
|
+
error=None,
|
|
923
|
+
metadata={
|
|
924
|
+
'operation': 'aggregate',
|
|
925
|
+
'index': index
|
|
926
|
+
},
|
|
927
|
+
timestamp=datetime.now(timezone.utc).isoformat()
|
|
928
|
+
)
|
|
929
|
+
|
|
930
|
+
elif operation == ElasticsearchOperation.LIST_INDICES:
|
|
931
|
+
indices = await self._list_indices()
|
|
932
|
+
|
|
933
|
+
return ToolResult(
|
|
934
|
+
success=True,
|
|
935
|
+
status="completed",
|
|
936
|
+
result={
|
|
937
|
+
'indices': indices,
|
|
938
|
+
'count': len(indices)
|
|
939
|
+
},
|
|
940
|
+
error=None,
|
|
941
|
+
metadata={
|
|
942
|
+
'operation': 'list_indices'
|
|
943
|
+
},
|
|
944
|
+
timestamp=datetime.now(timezone.utc).isoformat()
|
|
945
|
+
)
|
|
946
|
+
|
|
947
|
+
elif operation == ElasticsearchOperation.GET_DOCUMENT:
|
|
948
|
+
document_id = kwargs.get('document_id')
|
|
949
|
+
if not document_id:
|
|
950
|
+
return ToolResult(
|
|
951
|
+
success=False,
|
|
952
|
+
status="error",
|
|
953
|
+
result=None,
|
|
954
|
+
error="document_id is required for get_document operation",
|
|
955
|
+
metadata={},
|
|
956
|
+
timestamp=datetime.now(timezone.utc).isoformat()
|
|
957
|
+
)
|
|
958
|
+
|
|
959
|
+
document = await self._get_document(index, document_id)
|
|
960
|
+
|
|
961
|
+
return ToolResult(
|
|
962
|
+
success=True,
|
|
963
|
+
status="completed",
|
|
964
|
+
result={'document': document},
|
|
965
|
+
error=None,
|
|
966
|
+
metadata={
|
|
967
|
+
'operation': 'get_document',
|
|
968
|
+
'index': index,
|
|
969
|
+
'document_id': document_id
|
|
970
|
+
},
|
|
971
|
+
timestamp=datetime.now(timezone.utc).isoformat()
|
|
972
|
+
)
|
|
973
|
+
|
|
974
|
+
elif operation == ElasticsearchOperation.COUNT_DOCUMENTS:
|
|
975
|
+
count = await self._count_documents(
|
|
976
|
+
index=index,
|
|
977
|
+
query=kwargs.get('query'),
|
|
978
|
+
start_time=kwargs.get('start_time'),
|
|
979
|
+
end_time=kwargs.get('end_time')
|
|
980
|
+
)
|
|
981
|
+
|
|
982
|
+
return ToolResult(
|
|
983
|
+
success=True,
|
|
984
|
+
status="completed",
|
|
985
|
+
result={'count': count},
|
|
986
|
+
error=None,
|
|
987
|
+
metadata={
|
|
988
|
+
'operation': 'count_documents',
|
|
989
|
+
'index': index
|
|
990
|
+
},
|
|
991
|
+
timestamp=datetime.now(timezone.utc).isoformat()
|
|
992
|
+
)
|
|
993
|
+
|
|
994
|
+
elif operation == ElasticsearchOperation.ANALYZE_LOGS:
|
|
995
|
+
group_by = kwargs.get('group_by')
|
|
996
|
+
if not group_by:
|
|
997
|
+
return ToolResult(
|
|
998
|
+
success=False,
|
|
999
|
+
status="error",
|
|
1000
|
+
result=None,
|
|
1001
|
+
error="group_by is required for analyze_logs operation",
|
|
1002
|
+
metadata={},
|
|
1003
|
+
timestamp=datetime.now(timezone.utc).isoformat()
|
|
1004
|
+
)
|
|
1005
|
+
|
|
1006
|
+
results = await self._analyze_logs(
|
|
1007
|
+
index=index,
|
|
1008
|
+
group_by=group_by,
|
|
1009
|
+
start_time=kwargs.get('start_time'),
|
|
1010
|
+
end_time=kwargs.get('end_time'),
|
|
1011
|
+
time_interval=kwargs.get('time_interval', '1h'),
|
|
1012
|
+
size=kwargs.get('size', 10)
|
|
1013
|
+
)
|
|
1014
|
+
|
|
1015
|
+
return ToolResult(
|
|
1016
|
+
success=True,
|
|
1017
|
+
status="completed",
|
|
1018
|
+
result=results,
|
|
1019
|
+
error=None,
|
|
1020
|
+
metadata={
|
|
1021
|
+
'operation': 'analyze_logs',
|
|
1022
|
+
'index': index,
|
|
1023
|
+
'group_by': group_by
|
|
1024
|
+
},
|
|
1025
|
+
timestamp=datetime.now(timezone.utc).isoformat()
|
|
1026
|
+
)
|
|
1027
|
+
|
|
1028
|
+
else:
|
|
1029
|
+
return ToolResult(
|
|
1030
|
+
success=False,
|
|
1031
|
+
status="error",
|
|
1032
|
+
result=None,
|
|
1033
|
+
error=f"Unknown operation: {operation}",
|
|
1034
|
+
metadata={'operation': str(operation)},
|
|
1035
|
+
timestamp=datetime.now(timezone.utc).isoformat()
|
|
1036
|
+
)
|
|
1037
|
+
|
|
1038
|
+
except Exception as e:
|
|
1039
|
+
return ToolResult(
|
|
1040
|
+
success=False,
|
|
1041
|
+
status="error",
|
|
1042
|
+
result=None,
|
|
1043
|
+
error=f"Elasticsearch operation failed: {str(e)}",
|
|
1044
|
+
metadata={
|
|
1045
|
+
'operation': kwargs.get('operation', 'unknown'),
|
|
1046
|
+
'exception_type': type(e).__name__
|
|
1047
|
+
},
|
|
1048
|
+
timestamp=datetime.now(timezone.utc).isoformat()
|
|
1049
|
+
)
|