@revealui/ai 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +22 -0
- package/LICENSE.commercial +112 -0
- package/README.md +314 -0
- package/dist/a2a/card.d.ts +26 -0
- package/dist/a2a/card.d.ts.map +1 -0
- package/dist/a2a/card.js +173 -0
- package/dist/a2a/handler.d.ts +26 -0
- package/dist/a2a/handler.d.ts.map +1 -0
- package/dist/a2a/handler.js +170 -0
- package/dist/a2a/index.d.ts +10 -0
- package/dist/a2a/index.d.ts.map +1 -0
- package/dist/a2a/index.js +9 -0
- package/dist/a2a/task-store.d.ts +42 -0
- package/dist/a2a/task-store.d.ts.map +1 -0
- package/dist/a2a/task-store.js +99 -0
- package/dist/audit/emitter.d.ts +34 -0
- package/dist/audit/emitter.d.ts.map +1 -0
- package/dist/audit/emitter.js +34 -0
- package/dist/audit/index.d.ts +44 -0
- package/dist/audit/index.d.ts.map +1 -0
- package/dist/audit/index.js +48 -0
- package/dist/audit/observer.d.ts +108 -0
- package/dist/audit/observer.d.ts.map +1 -0
- package/dist/audit/observer.js +271 -0
- package/dist/audit/policy.d.ts +70 -0
- package/dist/audit/policy.d.ts.map +1 -0
- package/dist/audit/policy.js +209 -0
- package/dist/audit/store.d.ts +42 -0
- package/dist/audit/store.d.ts.map +1 -0
- package/dist/audit/store.js +80 -0
- package/dist/audit/types.d.ts +169 -0
- package/dist/audit/types.d.ts.map +1 -0
- package/dist/audit/types.js +80 -0
- package/dist/client/hooks/index.d.ts +22 -0
- package/dist/client/hooks/index.d.ts.map +1 -0
- package/dist/client/hooks/index.js +21 -0
- package/dist/client/hooks/useAgentContext.d.ts +30 -0
- package/dist/client/hooks/useAgentContext.d.ts.map +1 -0
- package/dist/client/hooks/useAgentContext.js +161 -0
- package/dist/client/hooks/useAgentEvents.d.ts +126 -0
- package/dist/client/hooks/useAgentEvents.d.ts.map +1 -0
- package/dist/client/hooks/useAgentEvents.js +232 -0
- package/dist/client/hooks/useAgentStream.d.ts +44 -0
- package/dist/client/hooks/useAgentStream.d.ts.map +1 -0
- package/dist/client/hooks/useAgentStream.js +101 -0
- package/dist/client/hooks/useEpisodicMemory.d.ts +25 -0
- package/dist/client/hooks/useEpisodicMemory.d.ts.map +1 -0
- package/dist/client/hooks/useEpisodicMemory.js +174 -0
- package/dist/client/hooks/useWorkingMemory.d.ts +57 -0
- package/dist/client/hooks/useWorkingMemory.d.ts.map +1 -0
- package/dist/client/hooks/useWorkingMemory.js +276 -0
- package/dist/client/index.d.ts +14 -0
- package/dist/client/index.d.ts.map +1 -0
- package/dist/client/index.js +13 -0
- package/dist/embeddings/index.d.ts +51 -0
- package/dist/embeddings/index.d.ts.map +1 -0
- package/dist/embeddings/index.js +73 -0
- package/dist/index.d.ts +83 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +103 -0
- package/dist/inference/context-assembly.d.ts +27 -0
- package/dist/inference/context-assembly.d.ts.map +1 -0
- package/dist/inference/context-assembly.js +81 -0
- package/dist/inference/overflow-compressor.d.ts +17 -0
- package/dist/inference/overflow-compressor.d.ts.map +1 -0
- package/dist/inference/overflow-compressor.js +40 -0
- package/dist/inference/runRag.d.ts +35 -0
- package/dist/inference/runRag.d.ts.map +1 -0
- package/dist/inference/runRag.js +53 -0
- package/dist/ingestion/bm25.d.ts +29 -0
- package/dist/ingestion/bm25.d.ts.map +1 -0
- package/dist/ingestion/bm25.js +161 -0
- package/dist/ingestion/cms-indexer.d.ts +39 -0
- package/dist/ingestion/cms-indexer.d.ts.map +1 -0
- package/dist/ingestion/cms-indexer.js +74 -0
- package/dist/ingestion/file-parsers.d.ts +51 -0
- package/dist/ingestion/file-parsers.d.ts.map +1 -0
- package/dist/ingestion/file-parsers.js +247 -0
- package/dist/ingestion/hybrid-search.d.ts +22 -0
- package/dist/ingestion/hybrid-search.d.ts.map +1 -0
- package/dist/ingestion/hybrid-search.js +63 -0
- package/dist/ingestion/index.d.ts +9 -0
- package/dist/ingestion/index.d.ts.map +1 -0
- package/dist/ingestion/index.js +8 -0
- package/dist/ingestion/pipeline.d.ts +35 -0
- package/dist/ingestion/pipeline.d.ts.map +1 -0
- package/dist/ingestion/pipeline.js +114 -0
- package/dist/ingestion/rag-vector-service.d.ts +34 -0
- package/dist/ingestion/rag-vector-service.d.ts.map +1 -0
- package/dist/ingestion/rag-vector-service.js +98 -0
- package/dist/ingestion/reranker.d.ts +10 -0
- package/dist/ingestion/reranker.d.ts.map +1 -0
- package/dist/ingestion/reranker.js +41 -0
- package/dist/ingestion/text-splitter.d.ts +25 -0
- package/dist/ingestion/text-splitter.d.ts.map +1 -0
- package/dist/ingestion/text-splitter.js +119 -0
- package/dist/llm/cache-utils.d.ts +146 -0
- package/dist/llm/cache-utils.d.ts.map +1 -0
- package/dist/llm/cache-utils.js +204 -0
- package/dist/llm/client.d.ts +134 -0
- package/dist/llm/client.d.ts.map +1 -0
- package/dist/llm/client.js +497 -0
- package/dist/llm/key-validator.d.ts +25 -0
- package/dist/llm/key-validator.d.ts.map +1 -0
- package/dist/llm/key-validator.js +101 -0
- package/dist/llm/provider-health.d.ts +40 -0
- package/dist/llm/provider-health.d.ts.map +1 -0
- package/dist/llm/provider-health.js +97 -0
- package/dist/llm/providers/anthropic.d.ts +31 -0
- package/dist/llm/providers/anthropic.d.ts.map +1 -0
- package/dist/llm/providers/anthropic.js +248 -0
- package/dist/llm/providers/base.d.ts +111 -0
- package/dist/llm/providers/base.d.ts.map +1 -0
- package/dist/llm/providers/base.js +6 -0
- package/dist/llm/providers/groq.d.ts +23 -0
- package/dist/llm/providers/groq.d.ts.map +1 -0
- package/dist/llm/providers/groq.js +27 -0
- package/dist/llm/providers/ollama.d.ts +27 -0
- package/dist/llm/providers/ollama.d.ts.map +1 -0
- package/dist/llm/providers/ollama.js +48 -0
- package/dist/llm/providers/openai.d.ts +19 -0
- package/dist/llm/providers/openai.d.ts.map +1 -0
- package/dist/llm/providers/openai.js +245 -0
- package/dist/llm/providers/vultr.d.ts +18 -0
- package/dist/llm/providers/vultr.d.ts.map +1 -0
- package/dist/llm/providers/vultr.js +168 -0
- package/dist/llm/response-cache.d.ts +166 -0
- package/dist/llm/response-cache.d.ts.map +1 -0
- package/dist/llm/response-cache.js +233 -0
- package/dist/llm/semantic-cache.d.ts +179 -0
- package/dist/llm/semantic-cache.d.ts.map +1 -0
- package/dist/llm/semantic-cache.js +306 -0
- package/dist/llm/server.d.ts +14 -0
- package/dist/llm/server.d.ts.map +1 -0
- package/dist/llm/server.js +15 -0
- package/dist/llm/token-counter.d.ts +48 -0
- package/dist/llm/token-counter.d.ts.map +1 -0
- package/dist/llm/token-counter.js +77 -0
- package/dist/llm/workspace-provider-config.d.ts +38 -0
- package/dist/llm/workspace-provider-config.d.ts.map +1 -0
- package/dist/llm/workspace-provider-config.js +47 -0
- package/dist/memory/agent/context-manager.d.ts +148 -0
- package/dist/memory/agent/context-manager.d.ts.map +1 -0
- package/dist/memory/agent/context-manager.js +284 -0
- package/dist/memory/agent/index.d.ts +7 -0
- package/dist/memory/agent/index.d.ts.map +1 -0
- package/dist/memory/agent/index.js +6 -0
- package/dist/memory/crdt/index.d.ts +13 -0
- package/dist/memory/crdt/index.d.ts.map +1 -0
- package/dist/memory/crdt/index.js +12 -0
- package/dist/memory/crdt/lww-register.d.ts +108 -0
- package/dist/memory/crdt/lww-register.d.ts.map +1 -0
- package/dist/memory/crdt/lww-register.js +169 -0
- package/dist/memory/crdt/or-set.d.ts +141 -0
- package/dist/memory/crdt/or-set.d.ts.map +1 -0
- package/dist/memory/crdt/or-set.js +291 -0
- package/dist/memory/crdt/pn-counter.d.ts +116 -0
- package/dist/memory/crdt/pn-counter.d.ts.map +1 -0
- package/dist/memory/crdt/pn-counter.js +174 -0
- package/dist/memory/crdt/vector-clock.d.ts +115 -0
- package/dist/memory/crdt/vector-clock.d.ts.map +1 -0
- package/dist/memory/crdt/vector-clock.js +179 -0
- package/dist/memory/errors/index.d.ts +56 -0
- package/dist/memory/errors/index.d.ts.map +1 -0
- package/dist/memory/errors/index.js +85 -0
- package/dist/memory/index.d.ts +21 -0
- package/dist/memory/index.d.ts.map +1 -0
- package/dist/memory/index.js +20 -0
- package/dist/memory/persistence/crdt-persistence.d.ts +85 -0
- package/dist/memory/persistence/crdt-persistence.d.ts.map +1 -0
- package/dist/memory/persistence/crdt-persistence.js +204 -0
- package/dist/memory/persistence/index.d.ts +7 -0
- package/dist/memory/persistence/index.d.ts.map +1 -0
- package/dist/memory/persistence/index.js +6 -0
- package/dist/memory/preferences/index.d.ts +7 -0
- package/dist/memory/preferences/index.d.ts.map +1 -0
- package/dist/memory/preferences/index.js +6 -0
- package/dist/memory/preferences/user-preferences-manager.d.ts +133 -0
- package/dist/memory/preferences/user-preferences-manager.d.ts.map +1 -0
- package/dist/memory/preferences/user-preferences-manager.js +342 -0
- package/dist/memory/services/index.d.ts +8 -0
- package/dist/memory/services/index.d.ts.map +1 -0
- package/dist/memory/services/index.js +6 -0
- package/dist/memory/services/node-id-service.d.ts +75 -0
- package/dist/memory/services/node-id-service.d.ts.map +1 -0
- package/dist/memory/services/node-id-service.js +190 -0
- package/dist/memory/stores/episodic-memory.d.ts +182 -0
- package/dist/memory/stores/episodic-memory.d.ts.map +1 -0
- package/dist/memory/stores/episodic-memory.js +378 -0
- package/dist/memory/stores/index.d.ts +16 -0
- package/dist/memory/stores/index.d.ts.map +1 -0
- package/dist/memory/stores/index.js +15 -0
- package/dist/memory/stores/procedural-memory.d.ts +89 -0
- package/dist/memory/stores/procedural-memory.d.ts.map +1 -0
- package/dist/memory/stores/procedural-memory.js +152 -0
- package/dist/memory/stores/semantic-memory.d.ts +92 -0
- package/dist/memory/stores/semantic-memory.d.ts.map +1 -0
- package/dist/memory/stores/semantic-memory.js +155 -0
- package/dist/memory/stores/working-memory.d.ts +225 -0
- package/dist/memory/stores/working-memory.d.ts.map +1 -0
- package/dist/memory/stores/working-memory.js +336 -0
- package/dist/memory/utils/deep-clone.d.ts +10 -0
- package/dist/memory/utils/deep-clone.d.ts.map +1 -0
- package/dist/memory/utils/deep-clone.js +9 -0
- package/dist/memory/utils/index.d.ts +8 -0
- package/dist/memory/utils/index.d.ts.map +1 -0
- package/dist/memory/utils/index.js +7 -0
- package/dist/memory/utils/logger.d.ts +21 -0
- package/dist/memory/utils/logger.d.ts.map +1 -0
- package/dist/memory/utils/logger.js +62 -0
- package/dist/memory/utils/sql-helpers.d.ts +97 -0
- package/dist/memory/utils/sql-helpers.d.ts.map +1 -0
- package/dist/memory/utils/sql-helpers.js +214 -0
- package/dist/memory/utils/validation.d.ts +62 -0
- package/dist/memory/utils/validation.d.ts.map +1 -0
- package/dist/memory/utils/validation.js +244 -0
- package/dist/memory/vector/index.d.ts +12 -0
- package/dist/memory/vector/index.d.ts.map +1 -0
- package/dist/memory/vector/index.js +14 -0
- package/dist/memory/vector/vector-memory-service.d.ts +88 -0
- package/dist/memory/vector/vector-memory-service.d.ts.map +1 -0
- package/dist/memory/vector/vector-memory-service.js +335 -0
- package/dist/observability/logger.d.ts +79 -0
- package/dist/observability/logger.d.ts.map +1 -0
- package/dist/observability/logger.js +165 -0
- package/dist/observability/metrics.d.ts +43 -0
- package/dist/observability/metrics.d.ts.map +1 -0
- package/dist/observability/metrics.js +197 -0
- package/dist/observability/query.d.ts +150 -0
- package/dist/observability/query.d.ts.map +1 -0
- package/dist/observability/query.js +339 -0
- package/dist/observability/types.d.ts +140 -0
- package/dist/observability/types.d.ts.map +1 -0
- package/dist/observability/types.js +6 -0
- package/dist/orchestration/agent.d.ts +98 -0
- package/dist/orchestration/agent.d.ts.map +1 -0
- package/dist/orchestration/agent.js +6 -0
- package/dist/orchestration/defaults.d.ts +21 -0
- package/dist/orchestration/defaults.d.ts.map +1 -0
- package/dist/orchestration/defaults.js +22 -0
- package/dist/orchestration/memory-integration.d.ts +58 -0
- package/dist/orchestration/memory-integration.d.ts.map +1 -0
- package/dist/orchestration/memory-integration.js +130 -0
- package/dist/orchestration/orchestrator.d.ts +67 -0
- package/dist/orchestration/orchestrator.d.ts.map +1 -0
- package/dist/orchestration/orchestrator.js +174 -0
- package/dist/orchestration/runtime.d.ts +82 -0
- package/dist/orchestration/runtime.d.ts.map +1 -0
- package/dist/orchestration/runtime.js +251 -0
- package/dist/orchestration/streaming-runtime.d.ts +36 -0
- package/dist/orchestration/streaming-runtime.d.ts.map +1 -0
- package/dist/orchestration/streaming-runtime.js +175 -0
- package/dist/orchestration/ticket-agent.d.ts +70 -0
- package/dist/orchestration/ticket-agent.d.ts.map +1 -0
- package/dist/orchestration/ticket-agent.js +146 -0
- package/dist/skills/activation/index.d.ts +7 -0
- package/dist/skills/activation/index.d.ts.map +1 -0
- package/dist/skills/activation/index.js +6 -0
- package/dist/skills/activation/skill-activator.d.ts +68 -0
- package/dist/skills/activation/skill-activator.d.ts.map +1 -0
- package/dist/skills/activation/skill-activator.js +224 -0
- package/dist/skills/catalog/catalog-search.d.ts +55 -0
- package/dist/skills/catalog/catalog-search.d.ts.map +1 -0
- package/dist/skills/catalog/catalog-search.js +111 -0
- package/dist/skills/catalog/catalog-types.d.ts +81 -0
- package/dist/skills/catalog/catalog-types.d.ts.map +1 -0
- package/dist/skills/catalog/catalog-types.js +66 -0
- package/dist/skills/catalog/index.d.ts +9 -0
- package/dist/skills/catalog/index.d.ts.map +1 -0
- package/dist/skills/catalog/index.js +7 -0
- package/dist/skills/catalog/vercel-catalog.d.ts +42 -0
- package/dist/skills/catalog/vercel-catalog.d.ts.map +1 -0
- package/dist/skills/catalog/vercel-catalog.js +189 -0
- package/dist/skills/compat/index.d.ts +9 -0
- package/dist/skills/compat/index.d.ts.map +1 -0
- package/dist/skills/compat/index.js +8 -0
- package/dist/skills/compat/skill-enhancer.d.ts +37 -0
- package/dist/skills/compat/skill-enhancer.d.ts.map +1 -0
- package/dist/skills/compat/skill-enhancer.js +76 -0
- package/dist/skills/compat/tool-mapper.d.ts +61 -0
- package/dist/skills/compat/tool-mapper.d.ts.map +1 -0
- package/dist/skills/compat/tool-mapper.js +168 -0
- package/dist/skills/compat/vercel-compat.d.ts +33 -0
- package/dist/skills/compat/vercel-compat.d.ts.map +1 -0
- package/dist/skills/compat/vercel-compat.js +132 -0
- package/dist/skills/index.d.ts +40 -0
- package/dist/skills/index.d.ts.map +1 -0
- package/dist/skills/index.js +47 -0
- package/dist/skills/integration/agent-skill-provider.d.ts +94 -0
- package/dist/skills/integration/agent-skill-provider.d.ts.map +1 -0
- package/dist/skills/integration/agent-skill-provider.js +161 -0
- package/dist/skills/integration/index.d.ts +7 -0
- package/dist/skills/integration/index.d.ts.map +1 -0
- package/dist/skills/integration/index.js +6 -0
- package/dist/skills/loader/github-loader.d.ts +61 -0
- package/dist/skills/loader/github-loader.d.ts.map +1 -0
- package/dist/skills/loader/github-loader.js +176 -0
- package/dist/skills/loader/index.d.ts +10 -0
- package/dist/skills/loader/index.d.ts.map +1 -0
- package/dist/skills/loader/index.js +9 -0
- package/dist/skills/loader/local-loader.d.ts +56 -0
- package/dist/skills/loader/local-loader.d.ts.map +1 -0
- package/dist/skills/loader/local-loader.js +186 -0
- package/dist/skills/loader/vercel-loader.d.ts +64 -0
- package/dist/skills/loader/vercel-loader.d.ts.map +1 -0
- package/dist/skills/loader/vercel-loader.js +313 -0
- package/dist/skills/loader/vercel-types.d.ts +64 -0
- package/dist/skills/loader/vercel-types.d.ts.map +1 -0
- package/dist/skills/loader/vercel-types.js +55 -0
- package/dist/skills/parser/index.d.ts +7 -0
- package/dist/skills/parser/index.d.ts.map +1 -0
- package/dist/skills/parser/index.js +6 -0
- package/dist/skills/parser/skill-md-parser.d.ts +64 -0
- package/dist/skills/parser/skill-md-parser.d.ts.map +1 -0
- package/dist/skills/parser/skill-md-parser.js +242 -0
- package/dist/skills/registry/index.d.ts +7 -0
- package/dist/skills/registry/index.d.ts.map +1 -0
- package/dist/skills/registry/index.js +6 -0
- package/dist/skills/registry/skill-registry.d.ts +133 -0
- package/dist/skills/registry/skill-registry.d.ts.map +1 -0
- package/dist/skills/registry/skill-registry.js +373 -0
- package/dist/skills/types.d.ts +216 -0
- package/dist/skills/types.d.ts.map +1 -0
- package/dist/skills/types.js +176 -0
- package/dist/templates/agent-spec.d.ts +138 -0
- package/dist/templates/agent-spec.d.ts.map +1 -0
- package/dist/templates/agent-spec.js +138 -0
- package/dist/templates/index.d.ts +56 -0
- package/dist/templates/index.d.ts.map +1 -0
- package/dist/templates/index.js +58 -0
- package/dist/templates/prompt-spec.d.ts +140 -0
- package/dist/templates/prompt-spec.d.ts.map +1 -0
- package/dist/templates/prompt-spec.js +210 -0
- package/dist/templates/skill-spec.d.ts +106 -0
- package/dist/templates/skill-spec.d.ts.map +1 -0
- package/dist/templates/skill-spec.js +119 -0
- package/dist/tools/base.d.ts +74 -0
- package/dist/tools/base.d.ts.map +1 -0
- package/dist/tools/base.js +6 -0
- package/dist/tools/cms/collection-tools.d.ts +36 -0
- package/dist/tools/cms/collection-tools.d.ts.map +1 -0
- package/dist/tools/cms/collection-tools.js +178 -0
- package/dist/tools/cms/factory.d.ts +89 -0
- package/dist/tools/cms/factory.d.ts.map +1 -0
- package/dist/tools/cms/factory.js +462 -0
- package/dist/tools/cms/global-tools.d.ts +21 -0
- package/dist/tools/cms/global-tools.d.ts.map +1 -0
- package/dist/tools/cms/global-tools.js +92 -0
- package/dist/tools/cms/index.d.ts +11 -0
- package/dist/tools/cms/index.d.ts.map +1 -0
- package/dist/tools/cms/index.js +11 -0
- package/dist/tools/cms/media-tools.d.ts +31 -0
- package/dist/tools/cms/media-tools.d.ts.map +1 -0
- package/dist/tools/cms/media-tools.js +140 -0
- package/dist/tools/cms/user-tools.d.ts +31 -0
- package/dist/tools/cms/user-tools.d.ts.map +1 -0
- package/dist/tools/cms/user-tools.js +135 -0
- package/dist/tools/deduplicator.d.ts +19 -0
- package/dist/tools/deduplicator.d.ts.map +1 -0
- package/dist/tools/deduplicator.js +53 -0
- package/dist/tools/document-summarizer.d.ts +11 -0
- package/dist/tools/document-summarizer.d.ts.map +1 -0
- package/dist/tools/document-summarizer.js +82 -0
- package/dist/tools/mcp-adapter.d.ts +66 -0
- package/dist/tools/mcp-adapter.d.ts.map +1 -0
- package/dist/tools/mcp-adapter.js +152 -0
- package/dist/tools/memory/index.d.ts +3 -0
- package/dist/tools/memory/index.d.ts.map +1 -0
- package/dist/tools/memory/index.js +1 -0
- package/dist/tools/memory/store-memory.d.ts +39 -0
- package/dist/tools/memory/store-memory.d.ts.map +1 -0
- package/dist/tools/memory/store-memory.js +94 -0
- package/dist/tools/registry.d.ts +14 -0
- package/dist/tools/registry.d.ts.map +1 -0
- package/dist/tools/registry.js +48 -0
- package/dist/tools/ticket-tools.d.ts +31 -0
- package/dist/tools/ticket-tools.d.ts.map +1 -0
- package/dist/tools/ticket-tools.js +74 -0
- package/dist/tools/web/duck-duck-go.d.ts +52 -0
- package/dist/tools/web/duck-duck-go.d.ts.map +1 -0
- package/dist/tools/web/duck-duck-go.js +202 -0
- package/dist/tools/web/exa.d.ts +34 -0
- package/dist/tools/web/exa.d.ts.map +1 -0
- package/dist/tools/web/exa.js +80 -0
- package/dist/tools/web/index.d.ts +6 -0
- package/dist/tools/web/index.d.ts.map +1 -0
- package/dist/tools/web/index.js +4 -0
- package/dist/tools/web/scraper.d.ts +9 -0
- package/dist/tools/web/scraper.d.ts.map +1 -0
- package/dist/tools/web/scraper.js +118 -0
- package/dist/tools/web/tavily.d.ts +32 -0
- package/dist/tools/web/tavily.d.ts.map +1 -0
- package/dist/tools/web/tavily.js +73 -0
- package/dist/tools/web/types.d.ts +31 -0
- package/dist/tools/web/types.d.ts.map +1 -0
- package/dist/tools/web/types.js +9 -0
- package/package.json +143 -0
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Provider Health Monitor
|
|
3
|
+
*
|
|
4
|
+
* Sliding window of last 100 calls per provider.
|
|
5
|
+
* Tracks latency (p50) and error rate to rank fallback candidates.
|
|
6
|
+
*
|
|
7
|
+
* AnythingLLM lesson: no health checks → silent failures cascade.
|
|
8
|
+
*/
|
|
9
|
+
const WINDOW_SIZE = 100;
|
|
10
|
+
const DEGRADED_ERROR_RATE = 0.1; // 10%
|
|
11
|
+
const UNHEALTHY_ERROR_RATE = 0.3; // 30%
|
|
12
|
+
const DEGRADED_LATENCY_MS = 5_000;
|
|
13
|
+
const UNHEALTHY_LATENCY_MS = 15_000;
|
|
14
|
+
function p50(values) {
|
|
15
|
+
if (values.length === 0)
|
|
16
|
+
return 0;
|
|
17
|
+
const sorted = [...values].sort((a, b) => a - b);
|
|
18
|
+
const mid = Math.floor(sorted.length / 2);
|
|
19
|
+
return sorted[mid] ?? 0;
|
|
20
|
+
}
|
|
21
|
+
export class ProviderHealthMonitor {
|
|
22
|
+
windows = new Map();
|
|
23
|
+
getWindow(provider) {
|
|
24
|
+
if (!this.windows.has(provider)) {
|
|
25
|
+
this.windows.set(provider, []);
|
|
26
|
+
}
|
|
27
|
+
// biome-ignore lint/style/noNonNullAssertion: set above guarantees presence
|
|
28
|
+
return this.windows.get(provider);
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Record a completed LLM API call.
|
|
32
|
+
*/
|
|
33
|
+
recordCall(provider, latencyMs, error) {
|
|
34
|
+
const window = this.getWindow(provider);
|
|
35
|
+
window.push({ latencyMs, error: error != null, timestamp: Date.now() });
|
|
36
|
+
// Keep only the last WINDOW_SIZE records
|
|
37
|
+
if (window.length > WINDOW_SIZE) {
|
|
38
|
+
window.splice(0, window.length - WINDOW_SIZE);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Get health metrics for a provider.
|
|
43
|
+
* Returns 'healthy' with 0 latency if no calls recorded yet.
|
|
44
|
+
*/
|
|
45
|
+
getHealth(provider) {
|
|
46
|
+
const window = this.getWindow(provider);
|
|
47
|
+
if (window.length === 0) {
|
|
48
|
+
return {
|
|
49
|
+
provider,
|
|
50
|
+
status: 'healthy',
|
|
51
|
+
latencyP50Ms: 0,
|
|
52
|
+
errorRate: 0,
|
|
53
|
+
sampleCount: 0,
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
const errorCount = window.filter((r) => r.error).length;
|
|
57
|
+
const errorRate = errorCount / window.length;
|
|
58
|
+
const latencies = window.filter((r) => !r.error).map((r) => r.latencyMs);
|
|
59
|
+
const latencyP50Ms = p50(latencies);
|
|
60
|
+
let status = 'healthy';
|
|
61
|
+
if (errorRate >= UNHEALTHY_ERROR_RATE || latencyP50Ms >= UNHEALTHY_LATENCY_MS) {
|
|
62
|
+
status = 'unhealthy';
|
|
63
|
+
}
|
|
64
|
+
else if (errorRate >= DEGRADED_ERROR_RATE || latencyP50Ms >= DEGRADED_LATENCY_MS) {
|
|
65
|
+
status = 'degraded';
|
|
66
|
+
}
|
|
67
|
+
return { provider, status, latencyP50Ms, errorRate, sampleCount: window.length };
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Pick the best provider from a set of candidates.
|
|
71
|
+
* Prefers healthy > degraded > unhealthy, then by p50 latency.
|
|
72
|
+
*/
|
|
73
|
+
getBestProvider(candidates) {
|
|
74
|
+
if (candidates.length === 0)
|
|
75
|
+
throw new Error('No provider candidates provided');
|
|
76
|
+
// biome-ignore lint/style/noNonNullAssertion: length === 1 guarantees element exists
|
|
77
|
+
if (candidates.length === 1)
|
|
78
|
+
return candidates[0];
|
|
79
|
+
const ranked = candidates
|
|
80
|
+
.map((p) => ({ provider: p, health: this.getHealth(p) }))
|
|
81
|
+
.sort((a, b) => {
|
|
82
|
+
const statusOrder = { healthy: 0, degraded: 1, unhealthy: 2 };
|
|
83
|
+
const statusDiff = statusOrder[a.health.status] - statusOrder[b.health.status];
|
|
84
|
+
if (statusDiff !== 0)
|
|
85
|
+
return statusDiff;
|
|
86
|
+
return a.health.latencyP50Ms - b.health.latencyP50Ms;
|
|
87
|
+
});
|
|
88
|
+
// biome-ignore lint/style/noNonNullAssertion: ranked has at least 2 elements (checked above)
|
|
89
|
+
return ranked[0].provider;
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* Reset health data for a provider (useful for testing).
|
|
93
|
+
*/
|
|
94
|
+
reset(provider) {
|
|
95
|
+
this.windows.set(provider, []);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Anthropic Provider
|
|
3
|
+
*
|
|
4
|
+
* Implementation of LLMProvider for Anthropic Claude API
|
|
5
|
+
*/
|
|
6
|
+
import type { Embedding, LLMChatOptions, LLMChunk, LLMEmbedOptions, LLMProvider, LLMProviderConfig, LLMResponse, LLMStreamOptions, Message } from './base.js';
|
|
7
|
+
export interface AnthropicProviderConfig extends LLMProviderConfig {
|
|
8
|
+
apiVersion?: string;
|
|
9
|
+
/** Enable prompt caching by default (5min TTL, 90% cost reduction on cache hits) */
|
|
10
|
+
enableCacheByDefault?: boolean;
|
|
11
|
+
}
|
|
12
|
+
export declare class AnthropicProvider implements LLMProvider {
|
|
13
|
+
private config;
|
|
14
|
+
private baseURL;
|
|
15
|
+
constructor(config: AnthropicProviderConfig);
|
|
16
|
+
chat(messages: Message[], options?: LLMChatOptions): Promise<LLMResponse>;
|
|
17
|
+
embed(text: string | string[], options?: LLMEmbedOptions): Promise<Embedding | Embedding[]>;
|
|
18
|
+
stream(messages: Message[], options?: LLMStreamOptions): AsyncIterable<LLMChunk>;
|
|
19
|
+
/**
|
|
20
|
+
* Format system messages with optional caching
|
|
21
|
+
* Caches the last system message for maximum benefit
|
|
22
|
+
*/
|
|
23
|
+
private formatSystemMessages;
|
|
24
|
+
/**
|
|
25
|
+
* Format tools with optional caching
|
|
26
|
+
* Caches the last tool definition for maximum benefit
|
|
27
|
+
*/
|
|
28
|
+
private formatTools;
|
|
29
|
+
private formatMessages;
|
|
30
|
+
}
|
|
31
|
+
//# sourceMappingURL=anthropic.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"anthropic.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/anthropic.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,KAAK,EACV,SAAS,EACT,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,iBAAiB,EACjB,WAAW,EACX,gBAAgB,EAChB,OAAO,EAER,MAAM,WAAW,CAAA;AAElB,MAAM,WAAW,uBAAwB,SAAQ,iBAAiB;IAChE,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,oFAAoF;IACpF,oBAAoB,CAAC,EAAE,OAAO,CAAA;CAC/B;AAqDD,qBAAa,iBAAkB,YAAW,WAAW;IACnD,OAAO,CAAC,MAAM,CAAyB;IACvC,OAAO,CAAC,OAAO,CAAQ;gBAEX,MAAM,EAAE,uBAAuB;IAKrC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IA6F/E,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;IAUpF,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;IAoGvF;;;OAGG;IACH,OAAO,CAAC,oBAAoB;IA0B5B;;;OAGG;IACH,OAAO,CAAC,WAAW;IAmBnB,OAAO,CAAC,cAAc;CAiBvB"}
|
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Anthropic Provider
|
|
3
|
+
*
|
|
4
|
+
* Implementation of LLMProvider for Anthropic Claude API
|
|
5
|
+
*/
|
|
6
|
+
const isRecord = (value) => typeof value === 'object' && value !== null && !Array.isArray(value);
|
|
7
|
+
const isTextBlock = (block) => block.type === 'text' && typeof block.text === 'string';
|
|
8
|
+
const isToolUseBlock = (block) => block.type === 'tool_use';
|
|
9
|
+
const maxTokensKey = 'max_tokens';
|
|
10
|
+
const inputTokensKey = 'input_tokens';
|
|
11
|
+
const outputTokensKey = 'output_tokens';
|
|
12
|
+
const stopReasonKey = 'stop_reason';
|
|
13
|
+
const cacheCreationTokensKey = 'cache_creation_input_tokens';
|
|
14
|
+
const cacheReadTokensKey = 'cache_read_input_tokens';
|
|
15
|
+
export class AnthropicProvider {
|
|
16
|
+
config;
|
|
17
|
+
baseURL;
|
|
18
|
+
constructor(config) {
|
|
19
|
+
this.config = config;
|
|
20
|
+
this.baseURL = config.baseURL || 'https://api.anthropic.com/v1';
|
|
21
|
+
}
|
|
22
|
+
async chat(messages, options) {
|
|
23
|
+
// Anthropic API format is slightly different
|
|
24
|
+
const systemMessages = messages.filter((m) => m.role === 'system');
|
|
25
|
+
const conversationMessages = messages.filter((m) => m.role !== 'system');
|
|
26
|
+
const enableCache = options?.enableCache ?? this.config.enableCacheByDefault ?? false;
|
|
27
|
+
// Use 2024-07-15 API version for prompt caching support
|
|
28
|
+
const apiVersion = enableCache ? '2024-07-15' : this.config.apiVersion || '2023-06-01';
|
|
29
|
+
// Format system messages with caching
|
|
30
|
+
const systemContent = this.formatSystemMessages(systemMessages, enableCache);
|
|
31
|
+
// Format tools with caching (cache last tool if enabled)
|
|
32
|
+
const tools = this.formatTools(options?.tools, enableCache);
|
|
33
|
+
const response = await fetch(`${this.baseURL}/messages`, {
|
|
34
|
+
method: 'POST',
|
|
35
|
+
headers: {
|
|
36
|
+
'Content-Type': 'application/json',
|
|
37
|
+
'x-api-key': this.config.apiKey,
|
|
38
|
+
'anthropic-version': apiVersion,
|
|
39
|
+
},
|
|
40
|
+
body: JSON.stringify({
|
|
41
|
+
model: this.config.model || 'claude-3-5-sonnet-20241022',
|
|
42
|
+
system: systemContent,
|
|
43
|
+
messages: this.formatMessages(conversationMessages),
|
|
44
|
+
temperature: options?.temperature ?? this.config.temperature ?? 0.7,
|
|
45
|
+
[maxTokensKey]: options?.maxTokens ?? this.config.maxTokens ?? 4096,
|
|
46
|
+
tools,
|
|
47
|
+
}),
|
|
48
|
+
});
|
|
49
|
+
if (!response.ok) {
|
|
50
|
+
const errorPayload = (await response.json().catch(() => undefined));
|
|
51
|
+
const errorMessage = isRecord(errorPayload) &&
|
|
52
|
+
isRecord(errorPayload.error) &&
|
|
53
|
+
typeof errorPayload.error.message === 'string'
|
|
54
|
+
? errorPayload.error.message
|
|
55
|
+
: response.statusText;
|
|
56
|
+
throw new Error(`Anthropic API error: ${errorMessage}`);
|
|
57
|
+
}
|
|
58
|
+
const data = (await response.json());
|
|
59
|
+
const contentBlocks = Array.isArray(data.content)
|
|
60
|
+
? data.content
|
|
61
|
+
: [];
|
|
62
|
+
const textBlock = contentBlocks.find(isTextBlock);
|
|
63
|
+
const toolCalls = contentBlocks.filter(isToolUseBlock).map((tc) => ({
|
|
64
|
+
id: tc.id,
|
|
65
|
+
type: 'function',
|
|
66
|
+
function: {
|
|
67
|
+
name: tc.name,
|
|
68
|
+
arguments: JSON.stringify(tc.input),
|
|
69
|
+
},
|
|
70
|
+
}));
|
|
71
|
+
const usage = data.usage && typeof data.usage === 'object'
|
|
72
|
+
? data.usage
|
|
73
|
+
: undefined;
|
|
74
|
+
const inputTokens = usage && typeof usage[inputTokensKey] === 'number' ? usage[inputTokensKey] : undefined;
|
|
75
|
+
const outputTokens = usage && typeof usage[outputTokensKey] === 'number' ? usage[outputTokensKey] : undefined;
|
|
76
|
+
const cacheCreationTokens = usage && typeof usage[cacheCreationTokensKey] === 'number'
|
|
77
|
+
? usage[cacheCreationTokensKey]
|
|
78
|
+
: undefined;
|
|
79
|
+
const cacheReadTokens = usage && typeof usage[cacheReadTokensKey] === 'number' ? usage[cacheReadTokensKey] : undefined;
|
|
80
|
+
const finishReason = typeof data[stopReasonKey] === 'string'
|
|
81
|
+
? data[stopReasonKey]
|
|
82
|
+
: undefined;
|
|
83
|
+
return {
|
|
84
|
+
content: textBlock?.text || '',
|
|
85
|
+
role: 'assistant',
|
|
86
|
+
toolCalls,
|
|
87
|
+
finishReason,
|
|
88
|
+
usage: inputTokens !== undefined && outputTokens !== undefined
|
|
89
|
+
? {
|
|
90
|
+
promptTokens: inputTokens,
|
|
91
|
+
completionTokens: outputTokens,
|
|
92
|
+
totalTokens: inputTokens + outputTokens,
|
|
93
|
+
cacheCreationTokens,
|
|
94
|
+
cacheReadTokens,
|
|
95
|
+
}
|
|
96
|
+
: undefined,
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
embed(text, options) {
|
|
100
|
+
void text;
|
|
101
|
+
void options;
|
|
102
|
+
// Anthropic doesn't have a separate embeddings API
|
|
103
|
+
// Would need to use a different provider or service
|
|
104
|
+
return Promise.reject(new Error('Anthropic does not support embeddings. Use OpenAI provider for embeddings.'));
|
|
105
|
+
}
|
|
106
|
+
async *stream(messages, options) {
|
|
107
|
+
const systemMessages = messages.filter((m) => m.role === 'system');
|
|
108
|
+
const conversationMessages = messages.filter((m) => m.role !== 'system');
|
|
109
|
+
const enableCache = options?.enableCache ?? this.config.enableCacheByDefault ?? false;
|
|
110
|
+
// Use 2024-07-15 API version for prompt caching support
|
|
111
|
+
const apiVersion = enableCache ? '2024-07-15' : this.config.apiVersion || '2023-06-01';
|
|
112
|
+
// Format system messages with caching
|
|
113
|
+
const systemContent = this.formatSystemMessages(systemMessages, enableCache);
|
|
114
|
+
// Format tools with caching
|
|
115
|
+
const tools = this.formatTools(options?.tools, enableCache);
|
|
116
|
+
const response = await fetch(`${this.baseURL}/messages`, {
|
|
117
|
+
method: 'POST',
|
|
118
|
+
headers: {
|
|
119
|
+
'Content-Type': 'application/json',
|
|
120
|
+
'x-api-key': this.config.apiKey,
|
|
121
|
+
'anthropic-version': apiVersion,
|
|
122
|
+
},
|
|
123
|
+
body: JSON.stringify({
|
|
124
|
+
model: this.config.model || 'claude-3-5-sonnet-20241022',
|
|
125
|
+
system: systemContent,
|
|
126
|
+
messages: this.formatMessages(conversationMessages),
|
|
127
|
+
temperature: options?.temperature ?? this.config.temperature ?? 0.7,
|
|
128
|
+
[maxTokensKey]: options?.maxTokens ?? this.config.maxTokens ?? 4096,
|
|
129
|
+
tools,
|
|
130
|
+
stream: true,
|
|
131
|
+
}),
|
|
132
|
+
});
|
|
133
|
+
if (!response.ok) {
|
|
134
|
+
const errorPayload = (await response.json().catch(() => undefined));
|
|
135
|
+
const errorMessage = isRecord(errorPayload) &&
|
|
136
|
+
isRecord(errorPayload.error) &&
|
|
137
|
+
typeof errorPayload.error.message === 'string'
|
|
138
|
+
? errorPayload.error.message
|
|
139
|
+
: response.statusText;
|
|
140
|
+
throw new Error(`Anthropic API error: ${errorMessage}`);
|
|
141
|
+
}
|
|
142
|
+
const reader = response.body?.getReader();
|
|
143
|
+
const decoder = new TextDecoder();
|
|
144
|
+
if (!reader) {
|
|
145
|
+
throw new Error('Response body is not readable');
|
|
146
|
+
}
|
|
147
|
+
let buffer = '';
|
|
148
|
+
while (true) {
|
|
149
|
+
const { done, value } = await reader.read();
|
|
150
|
+
if (done) {
|
|
151
|
+
yield { content: '', done: true };
|
|
152
|
+
break;
|
|
153
|
+
}
|
|
154
|
+
buffer += decoder.decode(value, { stream: true });
|
|
155
|
+
const lines = buffer.split('\n');
|
|
156
|
+
buffer = lines.pop() || '';
|
|
157
|
+
for (const line of lines) {
|
|
158
|
+
if (line.startsWith('data: ')) {
|
|
159
|
+
const data = line.slice(6);
|
|
160
|
+
if (data === '[DONE]') {
|
|
161
|
+
yield { content: '', done: true };
|
|
162
|
+
return;
|
|
163
|
+
}
|
|
164
|
+
try {
|
|
165
|
+
const parsed = JSON.parse(data);
|
|
166
|
+
if (!isRecord(parsed)) {
|
|
167
|
+
continue;
|
|
168
|
+
}
|
|
169
|
+
const eventType = typeof parsed.type === 'string' ? parsed.type : undefined;
|
|
170
|
+
if (eventType === 'content_block_delta' && isRecord(parsed.delta)) {
|
|
171
|
+
const deltaType = typeof parsed.delta.type === 'string' ? parsed.delta.type : undefined;
|
|
172
|
+
if (deltaType === 'text_delta') {
|
|
173
|
+
yield {
|
|
174
|
+
content: typeof parsed.delta.text === 'string' ? parsed.delta.text : '',
|
|
175
|
+
done: false,
|
|
176
|
+
};
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
else if (eventType === 'message_stop') {
|
|
180
|
+
yield { content: '', done: true };
|
|
181
|
+
return;
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
catch {
|
|
185
|
+
// Ignore parse errors for incomplete chunks
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
/**
|
|
192
|
+
* Format system messages with optional caching
|
|
193
|
+
* Caches the last system message for maximum benefit
|
|
194
|
+
*/
|
|
195
|
+
formatSystemMessages(systemMessages, enableCache) {
|
|
196
|
+
if (systemMessages.length === 0) {
|
|
197
|
+
return '';
|
|
198
|
+
}
|
|
199
|
+
// If caching disabled, use simple string format
|
|
200
|
+
if (!enableCache) {
|
|
201
|
+
return systemMessages.map((m) => m.content).join('\n');
|
|
202
|
+
}
|
|
203
|
+
// With caching, use structured format and cache the last block
|
|
204
|
+
return systemMessages.map((msg, index) => ({
|
|
205
|
+
type: 'text',
|
|
206
|
+
text: msg.content,
|
|
207
|
+
// Cache the last system message (most likely to be reused)
|
|
208
|
+
...(index === systemMessages.length - 1 && msg.cacheControl
|
|
209
|
+
? { cache_control: msg.cacheControl }
|
|
210
|
+
: index === systemMessages.length - 1
|
|
211
|
+
? { cache_control: { type: 'ephemeral' } }
|
|
212
|
+
: {}),
|
|
213
|
+
}));
|
|
214
|
+
}
|
|
215
|
+
/**
|
|
216
|
+
* Format tools with optional caching
|
|
217
|
+
* Caches the last tool definition for maximum benefit
|
|
218
|
+
*/
|
|
219
|
+
formatTools(tools, enableCache) {
|
|
220
|
+
if (!tools || tools.length === 0) {
|
|
221
|
+
return undefined;
|
|
222
|
+
}
|
|
223
|
+
return tools.map((tool, index) => ({
|
|
224
|
+
name: tool.function.name,
|
|
225
|
+
description: tool.function.description,
|
|
226
|
+
input_schema: tool.function.parameters,
|
|
227
|
+
// Cache the last tool (most likely to be reused across calls)
|
|
228
|
+
...(enableCache && index === tools.length - 1
|
|
229
|
+
? { cache_control: { type: 'ephemeral' } }
|
|
230
|
+
: {}),
|
|
231
|
+
}));
|
|
232
|
+
}
|
|
233
|
+
formatMessages(messages) {
|
|
234
|
+
return messages
|
|
235
|
+
.map((msg) => {
|
|
236
|
+
if (msg.role === 'system') {
|
|
237
|
+
// System messages are handled separately in Anthropic API
|
|
238
|
+
return null;
|
|
239
|
+
}
|
|
240
|
+
const formatted = {
|
|
241
|
+
role: msg.role === 'assistant' ? 'assistant' : 'user',
|
|
242
|
+
content: msg.content,
|
|
243
|
+
};
|
|
244
|
+
return formatted;
|
|
245
|
+
})
|
|
246
|
+
.filter((message) => Boolean(message));
|
|
247
|
+
}
|
|
248
|
+
}
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LLM Provider Base Interface
|
|
3
|
+
*
|
|
4
|
+
* Abstract interface for all LLM providers (OpenAI, Anthropic, etc.)
|
|
5
|
+
*/
|
|
6
|
+
export interface Message {
|
|
7
|
+
role: 'system' | 'user' | 'assistant' | 'tool';
|
|
8
|
+
content: string;
|
|
9
|
+
name?: string;
|
|
10
|
+
toolCalls?: ToolCall[];
|
|
11
|
+
toolCallId?: string;
|
|
12
|
+
/** Anthropic prompt caching - marks content for caching (5min TTL, 90% cost reduction) */
|
|
13
|
+
cacheControl?: {
|
|
14
|
+
type: 'ephemeral';
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
export interface ToolCall {
|
|
18
|
+
id: string;
|
|
19
|
+
type: 'function';
|
|
20
|
+
function: {
|
|
21
|
+
name: string;
|
|
22
|
+
arguments: string;
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
export interface LLMResponse {
|
|
26
|
+
content: string;
|
|
27
|
+
role: 'assistant';
|
|
28
|
+
toolCalls?: ToolCall[];
|
|
29
|
+
finishReason?: 'stop' | 'length' | 'tool_calls' | 'content_filter';
|
|
30
|
+
usage?: {
|
|
31
|
+
promptTokens: number;
|
|
32
|
+
completionTokens: number;
|
|
33
|
+
totalTokens: number;
|
|
34
|
+
/** Anthropic cache stats */
|
|
35
|
+
cacheCreationTokens?: number;
|
|
36
|
+
cacheReadTokens?: number;
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
export interface Embedding {
|
|
40
|
+
vector: number[];
|
|
41
|
+
dimension: number;
|
|
42
|
+
model: string;
|
|
43
|
+
}
|
|
44
|
+
export interface LLMChunk {
|
|
45
|
+
content: string;
|
|
46
|
+
done: boolean;
|
|
47
|
+
toolCalls?: ToolCall[];
|
|
48
|
+
}
|
|
49
|
+
export interface LLMProviderConfig {
|
|
50
|
+
apiKey: string;
|
|
51
|
+
baseURL?: string;
|
|
52
|
+
model?: string;
|
|
53
|
+
temperature?: number;
|
|
54
|
+
maxTokens?: number;
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Base interface for LLM providers
|
|
58
|
+
*/
|
|
59
|
+
export interface LLMProvider {
|
|
60
|
+
/**
|
|
61
|
+
* Chat completion
|
|
62
|
+
*/
|
|
63
|
+
chat(messages: Message[], options?: LLMChatOptions): Promise<LLMResponse>;
|
|
64
|
+
/**
|
|
65
|
+
* Generate embeddings
|
|
66
|
+
*/
|
|
67
|
+
embed(text: string | string[], options?: LLMEmbedOptions): Promise<Embedding | Embedding[]>;
|
|
68
|
+
/**
|
|
69
|
+
* Stream chat completion
|
|
70
|
+
*/
|
|
71
|
+
stream(messages: Message[], options?: LLMStreamOptions): AsyncIterable<LLMChunk>;
|
|
72
|
+
}
|
|
73
|
+
export interface LLMChatOptions {
|
|
74
|
+
temperature?: number;
|
|
75
|
+
maxTokens?: number;
|
|
76
|
+
tools?: ToolDefinition[];
|
|
77
|
+
toolChoice?: 'auto' | 'none' | {
|
|
78
|
+
type: 'function';
|
|
79
|
+
function: {
|
|
80
|
+
name: string;
|
|
81
|
+
};
|
|
82
|
+
};
|
|
83
|
+
/** Enable prompt caching (Anthropic only) - caches system prompts and tools */
|
|
84
|
+
enableCache?: boolean;
|
|
85
|
+
/**
|
|
86
|
+
* Extended thinking token budget (Anthropic only).
|
|
87
|
+
* Enables Claude's internal reasoning before responding.
|
|
88
|
+
* Typical values: 512 (minimal) → 31999 (xhigh). 0 or undefined = disabled.
|
|
89
|
+
*/
|
|
90
|
+
thinkingBudget?: number;
|
|
91
|
+
}
|
|
92
|
+
export interface LLMEmbedOptions {
|
|
93
|
+
model?: string;
|
|
94
|
+
}
|
|
95
|
+
export interface LLMStreamOptions {
|
|
96
|
+
temperature?: number;
|
|
97
|
+
maxTokens?: number;
|
|
98
|
+
tools?: ToolDefinition[];
|
|
99
|
+
/** Enable prompt caching (Anthropic only) - caches system prompts and tools */
|
|
100
|
+
enableCache?: boolean;
|
|
101
|
+
}
|
|
102
|
+
export interface ToolDefinition {
|
|
103
|
+
type: 'function';
|
|
104
|
+
function: {
|
|
105
|
+
name: string;
|
|
106
|
+
description: string;
|
|
107
|
+
parameters: Record<string, unknown>;
|
|
108
|
+
};
|
|
109
|
+
}
|
|
110
|
+
export type FinishReason = 'stop' | 'length' | 'tool_calls' | 'content_filter';
|
|
111
|
+
//# sourceMappingURL=base.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"base.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/base.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,MAAM,WAAW,OAAO;IACtB,IAAI,EAAE,QAAQ,GAAG,MAAM,GAAG,WAAW,GAAG,MAAM,CAAA;IAC9C,OAAO,EAAE,MAAM,CAAA;IACf,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,SAAS,CAAC,EAAE,QAAQ,EAAE,CAAA;IACtB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,0FAA0F;IAC1F,YAAY,CAAC,EAAE;QAAE,IAAI,EAAE,WAAW,CAAA;KAAE,CAAA;CACrC;AAED,MAAM,WAAW,QAAQ;IACvB,EAAE,EAAE,MAAM,CAAA;IACV,IAAI,EAAE,UAAU,CAAA;IAChB,QAAQ,EAAE;QACR,IAAI,EAAE,MAAM,CAAA;QACZ,SAAS,EAAE,MAAM,CAAA;KAClB,CAAA;CACF;AAED,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,MAAM,CAAA;IACf,IAAI,EAAE,WAAW,CAAA;IACjB,SAAS,CAAC,EAAE,QAAQ,EAAE,CAAA;IACtB,YAAY,CAAC,EAAE,MAAM,GAAG,QAAQ,GAAG,YAAY,GAAG,gBAAgB,CAAA;IAClE,KAAK,CAAC,EAAE;QACN,YAAY,EAAE,MAAM,CAAA;QACpB,gBAAgB,EAAE,MAAM,CAAA;QACxB,WAAW,EAAE,MAAM,CAAA;QACnB,4BAA4B;QAC5B,mBAAmB,CAAC,EAAE,MAAM,CAAA;QAC5B,eAAe,CAAC,EAAE,MAAM,CAAA;KACzB,CAAA;CACF;AAED,MAAM,WAAW,SAAS;IACxB,MAAM,EAAE,MAAM,EAAE,CAAA;IAChB,SAAS,EAAE,MAAM,CAAA;IACjB,KAAK,EAAE,MAAM,CAAA;CACd;AAED,MAAM,WAAW,QAAQ;IACvB,OAAO,EAAE,MAAM,CAAA;IACf,IAAI,EAAE,OAAO,CAAA;IACb,SAAS,CAAC,EAAE,QAAQ,EAAE,CAAA;CACvB;AAED,MAAM,WAAW,iBAAiB;IAChC,MAAM,EAAE,MAAM,CAAA;IACd,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB,SAAS,CAAC,EAAE,MAAM,CAAA;CACnB;AAED;;GAEG;AACH,MAAM,WAAW,WAAW;IAC1B;;OAEG;IACH,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC,CAAA;IAEzE;;OAEG;IACH,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,OAAO,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC,CAAA;IAE3F;;OAEG;IACH,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC,CAAA;CACjF;AAED,MAAM,WAAW,cAAc;IAC7B,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,KAAK,CAAC,EAAE,cAAc,EAAE,CAAA;IACxB,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG;QAAE,IAAI,EAAE,UAAU,CAAC;QAAC,QAAQ,EAAE;YAAE,IAAI,EAAE,MAAM,CAAA;SAAE,CAAA;KAAE,CAAA;IAC/E,+EAA+E;IAC/E,WAAW,CAAC,EAAE,OAAO,CAAA;IACrB;;;;OAIG;IACH,cAAc,CAAC,EAAE,MAAM,CAAA;CACxB;AAED,MAAM,WAAW,eAAe;IAC9B,KAAK,CAAC,EAAE,MAAM,CAAA;CACf;AAED,MAAM,WAAW,gBAAgB;IAC/B,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,KAAK,CAAC,EAAE,cAAc,EAAE,CAAA;IACxB,+EAA+E;IAC/E,WAAW,CAAC,EAAE,OAAO,CAAA;CACtB;AAED,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,UAAU,CAAA;IAChB,QAAQ,EAAE;QACR,IAAI,EAAE,MAAM,CAAA;QACZ,WAAW,EAAE,MAAM,CAAA;QACnB,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;KACpC,CAAA;CACF;AAED,MAAM,MAAM,YAAY,GAAG,MAAM,GAAG,QAAQ,GAAG,YAAY,GAAG,gBAAgB,CAAA"}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Groq Provider
|
|
3
|
+
*
|
|
4
|
+
* Thin wrapper over OpenAIProvider using Groq's OpenAI-compatible API.
|
|
5
|
+
* Free tier: 6,000 TPM / 500k TPD for Llama 3.3 70B.
|
|
6
|
+
* Sign up: console.groq.com
|
|
7
|
+
*/
|
|
8
|
+
import type { Embedding, LLMChatOptions, LLMChunk, LLMEmbedOptions, LLMProvider, LLMProviderConfig, LLMResponse, LLMStreamOptions, Message } from './base.js';
|
|
9
|
+
export interface GroqProviderConfig extends Omit<LLMProviderConfig, 'apiKey'> {
|
|
10
|
+
apiKey: string;
|
|
11
|
+
/** Defaults to https://api.groq.com/openai/v1 */
|
|
12
|
+
baseURL?: string;
|
|
13
|
+
/** Defaults to llama-3.3-70b-versatile */
|
|
14
|
+
model?: string;
|
|
15
|
+
}
|
|
16
|
+
export declare class GroqProvider implements LLMProvider {
|
|
17
|
+
private inner;
|
|
18
|
+
constructor(config: GroqProviderConfig);
|
|
19
|
+
chat(messages: Message[], options?: LLMChatOptions): Promise<LLMResponse>;
|
|
20
|
+
stream(messages: Message[], options?: LLMStreamOptions): AsyncIterable<LLMChunk>;
|
|
21
|
+
embed(_text: string | string[], _options?: LLMEmbedOptions): Promise<Embedding | Embedding[]>;
|
|
22
|
+
}
|
|
23
|
+
//# sourceMappingURL=groq.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"groq.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/groq.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,EACV,SAAS,EACT,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,iBAAiB,EACjB,WAAW,EACX,gBAAgB,EAChB,OAAO,EACR,MAAM,WAAW,CAAA;AAGlB,MAAM,WAAW,kBAAmB,SAAQ,IAAI,CAAC,iBAAiB,EAAE,QAAQ,CAAC;IAC3E,MAAM,EAAE,MAAM,CAAA;IACd,iDAAiD;IACjD,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,0CAA0C;IAC1C,KAAK,CAAC,EAAE,MAAM,CAAA;CACf;AAED,qBAAa,YAAa,YAAW,WAAW;IAC9C,OAAO,CAAC,KAAK,CAAgB;gBAEjB,MAAM,EAAE,kBAAkB;IAQtC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IAIzE,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;IAIhF,KAAK,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,QAAQ,CAAC,EAAE,eAAe,GAAG,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;CAG9F"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Groq Provider
|
|
3
|
+
*
|
|
4
|
+
* Thin wrapper over OpenAIProvider using Groq's OpenAI-compatible API.
|
|
5
|
+
* Free tier: 6,000 TPM / 500k TPD for Llama 3.3 70B.
|
|
6
|
+
* Sign up: console.groq.com
|
|
7
|
+
*/
|
|
8
|
+
import { OpenAIProvider } from './openai.js';
|
|
9
|
+
export class GroqProvider {
|
|
10
|
+
inner;
|
|
11
|
+
constructor(config) {
|
|
12
|
+
this.inner = new OpenAIProvider({
|
|
13
|
+
...config,
|
|
14
|
+
baseURL: config.baseURL ?? 'https://api.groq.com/openai/v1',
|
|
15
|
+
model: config.model ?? 'llama-3.3-70b-versatile',
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
chat(messages, options) {
|
|
19
|
+
return this.inner.chat(messages, options);
|
|
20
|
+
}
|
|
21
|
+
stream(messages, options) {
|
|
22
|
+
return this.inner.stream(messages, options);
|
|
23
|
+
}
|
|
24
|
+
embed(_text, _options) {
|
|
25
|
+
throw new Error('Groq does not support embeddings. Use Ollama or HuggingFace.');
|
|
26
|
+
}
|
|
27
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Ollama Provider
|
|
3
|
+
*
|
|
4
|
+
* Local inference via Ollama's OpenAI-compatible API (http://localhost:11434/v1).
|
|
5
|
+
* No API key required. Zero cost, fully offline.
|
|
6
|
+
* Install: https://ollama.com
|
|
7
|
+
*/
|
|
8
|
+
import type { Embedding, LLMChatOptions, LLMChunk, LLMEmbedOptions, LLMProvider, LLMProviderConfig, LLMResponse, LLMStreamOptions, Message } from './base.js';
|
|
9
|
+
export interface OllamaProviderConfig extends Omit<LLMProviderConfig, 'apiKey'> {
|
|
10
|
+
apiKey?: string;
|
|
11
|
+
/** Defaults to http://localhost:11434/v1 */
|
|
12
|
+
baseURL?: string;
|
|
13
|
+
/** Chat model. Defaults to llama3.2:3b — run `ollama pull llama3.2:3b` first */
|
|
14
|
+
model?: string;
|
|
15
|
+
/** Embedding model. Defaults to nomic-embed-text — run `ollama pull nomic-embed-text` first */
|
|
16
|
+
embedModel?: string;
|
|
17
|
+
}
|
|
18
|
+
export declare class OllamaProvider implements LLMProvider {
|
|
19
|
+
private inner;
|
|
20
|
+
private embedModel;
|
|
21
|
+
private baseURL;
|
|
22
|
+
constructor(config: OllamaProviderConfig);
|
|
23
|
+
chat(messages: Message[], options?: LLMChatOptions): Promise<LLMResponse>;
|
|
24
|
+
stream(messages: Message[], options?: LLMStreamOptions): AsyncIterable<LLMChunk>;
|
|
25
|
+
embed(text: string | string[], _options?: LLMEmbedOptions): Promise<Embedding | Embedding[]>;
|
|
26
|
+
}
|
|
27
|
+
//# sourceMappingURL=ollama.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ollama.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/ollama.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,EACV,SAAS,EACT,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,iBAAiB,EACjB,WAAW,EACX,gBAAgB,EAChB,OAAO,EACR,MAAM,WAAW,CAAA;AAGlB,MAAM,WAAW,oBAAqB,SAAQ,IAAI,CAAC,iBAAiB,EAAE,QAAQ,CAAC;IAC7E,MAAM,CAAC,EAAE,MAAM,CAAA;IACf,4CAA4C;IAC5C,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,gFAAgF;IAChF,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,+FAA+F;IAC/F,UAAU,CAAC,EAAE,MAAM,CAAA;CACpB;AAED,qBAAa,cAAe,YAAW,WAAW;IAChD,OAAO,CAAC,KAAK,CAAgB;IAC7B,OAAO,CAAC,UAAU,CAAQ;IAC1B,OAAO,CAAC,OAAO,CAAQ;gBAEX,MAAM,EAAE,oBAAoB;IAaxC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IAIzE,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;IAI1E,KAAK,CACT,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,EACvB,QAAQ,CAAC,EAAE,eAAe,GACzB,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;CAsBpC"}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Ollama Provider
|
|
3
|
+
*
|
|
4
|
+
* Local inference via Ollama's OpenAI-compatible API (http://localhost:11434/v1).
|
|
5
|
+
* No API key required. Zero cost, fully offline.
|
|
6
|
+
* Install: https://ollama.com
|
|
7
|
+
*/
|
|
8
|
+
import { OpenAIProvider } from './openai.js';
|
|
9
|
+
export class OllamaProvider {
|
|
10
|
+
inner;
|
|
11
|
+
embedModel;
|
|
12
|
+
baseURL;
|
|
13
|
+
constructor(config) {
|
|
14
|
+
const baseURL = config.baseURL ?? 'http://localhost:11434/v1';
|
|
15
|
+
this.baseURL = baseURL;
|
|
16
|
+
this.embedModel = config.embedModel ?? 'nomic-embed-text';
|
|
17
|
+
this.inner = new OpenAIProvider({
|
|
18
|
+
...config,
|
|
19
|
+
// Ollama ignores the API key but the OpenAI client requires a non-empty value
|
|
20
|
+
apiKey: config.apiKey ?? 'ollama',
|
|
21
|
+
baseURL,
|
|
22
|
+
model: config.model ?? 'llama3.2:3b',
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
chat(messages, options) {
|
|
26
|
+
return this.inner.chat(messages, options);
|
|
27
|
+
}
|
|
28
|
+
stream(messages, options) {
|
|
29
|
+
return this.inner.stream(messages, options);
|
|
30
|
+
}
|
|
31
|
+
async embed(text, _options) {
|
|
32
|
+
const texts = Array.isArray(text) ? text : [text];
|
|
33
|
+
const response = await fetch(`${this.baseURL}/embeddings`, {
|
|
34
|
+
method: 'POST',
|
|
35
|
+
headers: { 'Content-Type': 'application/json' },
|
|
36
|
+
body: JSON.stringify({ model: this.embedModel, input: texts }),
|
|
37
|
+
});
|
|
38
|
+
if (!response.ok) {
|
|
39
|
+
throw new Error(`Ollama embeddings error: ${response.statusText}`);
|
|
40
|
+
}
|
|
41
|
+
const data = (await response.json());
|
|
42
|
+
const embeddings = (data.data ?? []).map((item) => {
|
|
43
|
+
const vector = item.embedding ?? [];
|
|
44
|
+
return { vector, dimension: vector.length, model: this.embedModel };
|
|
45
|
+
});
|
|
46
|
+
return Array.isArray(text) ? embeddings : embeddings[0];
|
|
47
|
+
}
|
|
48
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI Provider
|
|
3
|
+
*
|
|
4
|
+
* Implementation of LLMProvider for OpenAI API
|
|
5
|
+
*/
|
|
6
|
+
import type { Embedding, LLMChatOptions, LLMChunk, LLMEmbedOptions, LLMProvider, LLMProviderConfig, LLMResponse, LLMStreamOptions, Message } from './base.js';
|
|
7
|
+
export interface OpenAIProviderConfig extends LLMProviderConfig {
|
|
8
|
+
organization?: string;
|
|
9
|
+
}
|
|
10
|
+
export declare class OpenAIProvider implements LLMProvider {
|
|
11
|
+
private config;
|
|
12
|
+
private baseURL;
|
|
13
|
+
constructor(config: OpenAIProviderConfig);
|
|
14
|
+
chat(messages: Message[], options?: LLMChatOptions): Promise<LLMResponse>;
|
|
15
|
+
embed(text: string | string[], options?: LLMEmbedOptions): Promise<Embedding | Embedding[]>;
|
|
16
|
+
stream(messages: Message[], options?: LLMStreamOptions): AsyncIterable<LLMChunk>;
|
|
17
|
+
private formatMessages;
|
|
18
|
+
}
|
|
19
|
+
//# sourceMappingURL=openai.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../../src/llm/providers/openai.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,KAAK,EACV,SAAS,EAET,cAAc,EACd,QAAQ,EACR,eAAe,EACf,WAAW,EACX,iBAAiB,EACjB,WAAW,EACX,gBAAgB,EAChB,OAAO,EAER,MAAM,WAAW,CAAA;AAElB,MAAM,WAAW,oBAAqB,SAAQ,iBAAiB;IAC7D,YAAY,CAAC,EAAE,MAAM,CAAA;CACtB;AAsDD,qBAAa,cAAe,YAAW,WAAW;IAChD,OAAO,CAAC,MAAM,CAAsB;IACpC,OAAO,CAAC,OAAO,CAAQ;gBAEX,MAAM,EAAE,oBAAoB;IAKlC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC;IAgFzE,KAAK,CACT,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,EACvB,OAAO,CAAC,EAAE,eAAe,GACxB,OAAO,CAAC,SAAS,GAAG,SAAS,EAAE,CAAC;IA2C5B,MAAM,CAAC,QAAQ,EAAE,OAAO,EAAE,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,aAAa,CAAC,QAAQ,CAAC;IA0FvF,OAAO,CAAC,cAAc;CA0BvB"}
|