synapsekit 0.6.2__tar.gz → 0.6.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {synapsekit-0.6.2 → synapsekit-0.6.3}/CHANGELOG.md +32 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/PKG-INFO +1 -1
- {synapsekit-0.6.2 → synapsekit-0.6.3}/pyproject.toml +2 -1
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/__init__.py +19 -1
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/__init__.py +6 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/tools/__init__.py +6 -0
- synapsekit-0.6.3/src/synapsekit/agents/tools/sentiment.py +57 -0
- synapsekit-0.6.3/src/synapsekit/agents/tools/summarization.py +79 -0
- synapsekit-0.6.3/src/synapsekit/agents/tools/translation.py +77 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/graph/__init__.py +9 -1
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/graph/compiled.py +46 -5
- synapsekit-0.6.3/src/synapsekit/graph/fan_out.py +72 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/graph/graph.py +12 -2
- synapsekit-0.6.3/src/synapsekit/graph/state.py +73 -0
- synapsekit-0.6.3/src/synapsekit/graph/streaming.py +126 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/graph/subgraph.py +1 -1
- synapsekit-0.6.3/src/synapsekit/llm/_semantic_cache.py +107 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/memory/hybrid.py +2 -1
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/memory/sqlite.py +3 -3
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/test_v060_features.py +1 -1
- synapsekit-0.6.3/tests/test_v063_features.py +472 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/uv.lock +1 -1
- synapsekit-0.6.2/src/synapsekit/graph/state.py +0 -9
- {synapsekit-0.6.2 → synapsekit-0.6.3}/.github/DISCUSSION_TEMPLATE/ideas.yml +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/.github/ISSUE_TEMPLATE/bug_report.yml +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/.github/ISSUE_TEMPLATE/config.yml +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/.github/ISSUE_TEMPLATE/feature_request.yml +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/.github/PULL_REQUEST_TEMPLATE.md +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/.github/profile/README.md +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/.github/workflows/ci.yml +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/.gitignore +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/.pre-commit-config.yaml +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/CODE_OF_CONDUCT.md +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/CONTRIBUTING.md +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/LICENSE +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/Makefile +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/README.md +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/SECURITY.md +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/assets/banner.svg +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/assets/favicon.svg +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/assets/logo.svg +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/_compat.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/base.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/executor.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/function_calling.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/memory.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/react.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/registry.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/tool_decorator.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/tools/calculator.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/tools/datetime_tool.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/tools/file_list.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/tools/file_read.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/tools/file_write.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/tools/http_request.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/tools/human_input.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/tools/json_query.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/tools/python_repl.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/tools/regex_tool.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/tools/sql_query.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/tools/web_search.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/agents/tools/wikipedia.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/embeddings/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/embeddings/backend.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/graph/checkpointers/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/graph/checkpointers/base.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/graph/checkpointers/memory.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/graph/checkpointers/sqlite.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/graph/edge.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/graph/errors.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/graph/interrupt.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/graph/mermaid.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/graph/node.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/_cache.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/_rate_limit.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/_retry.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/_sqlite_cache.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/anthropic.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/azure_openai.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/base.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/bedrock.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/cohere.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/deepseek.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/fireworks.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/gemini.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/groq.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/mistral.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/ollama.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/openai.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/openrouter.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/structured.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/llm/together.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/loaders/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/loaders/base.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/loaders/csv.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/loaders/directory.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/loaders/excel.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/loaders/html.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/loaders/json_loader.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/loaders/pdf.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/loaders/pptx.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/loaders/text.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/loaders/web.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/memory/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/memory/conversation.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/memory/summary_buffer.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/observability/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/observability/tracer.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/parsers/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/parsers/json_parser.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/parsers/list_parser.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/parsers/pydantic_parser.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/prompts/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/prompts/template.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/py.typed +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/rag/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/rag/facade.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/rag/pipeline.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/base.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/chroma.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/contextual.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/contextual_compression.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/crag.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/cross_encoder.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/ensemble.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/faiss.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/parent_document.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/pinecone.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/qdrant.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/query_decomposition.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/rag_fusion.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/retriever.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/self_query.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/sentence_window.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/retrieval/vectorstore.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/text_splitters/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/text_splitters/base.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/text_splitters/character.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/text_splitters/recursive.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/text_splitters/semantic.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/src/synapsekit/text_splitters/token.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/agents/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/agents/test_executor.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/agents/test_function_calling.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/agents/test_memory.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/agents/test_react.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/agents/test_tool_decorator.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/agents/test_tools.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/conftest.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/graph/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/graph/test_build.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/graph/test_checkpointing.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/graph/test_cycles.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/graph/test_mermaid.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/graph/test_run.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/graph/test_state.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/graph/test_stream.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/llm/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/llm/test_cache_retry.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/llm/test_function_calling_providers.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/llm/test_llm.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/llm/test_providers.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/loaders/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/loaders/test_loaders.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/memory/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/memory/test_memory.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/observability/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/observability/test_tracer.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/parsers/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/parsers/test_parsers.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/prompts/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/prompts/test_prompts.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/rag/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/rag/test_facade.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/rag/test_pipeline.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/retrieval/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/retrieval/test_backends.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/retrieval/test_retriever.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/retrieval/test_vectorstore.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/test_v051_features.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/test_v052_features.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/test_v053_features.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/test_v061_features.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/test_v062_features.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/text_splitters/__init__.py +0 -0
- {synapsekit-0.6.2 → synapsekit-0.6.3}/tests/text_splitters/test_splitters.py +0 -0
|
@@ -7,6 +7,38 @@ SynapseKit uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
|
7
7
|
|
|
8
8
|
---
|
|
9
9
|
|
|
10
|
+
## [0.6.3] — 2026-03-14
|
|
11
|
+
|
|
12
|
+
### Added
|
|
13
|
+
|
|
14
|
+
- **Typed state with reducers** — `TypedState` and `StateField` for safe parallel state merging in graph workflows; per-field reducers control how concurrent node outputs are combined (closes #253)
|
|
15
|
+
- **Parallel subgraph execution** — `fan_out_node()` runs multiple subgraphs concurrently with `asyncio.gather()`, supports per-subgraph input mappings and custom merge functions (closes #248)
|
|
16
|
+
- **SSE streaming** — `sse_stream()` streams graph execution as Server-Sent Events for HTTP responses (closes #238)
|
|
17
|
+
- **Event callbacks** — `EventHooks` and `GraphEvent` for registering callbacks on node_start, node_complete, wave_start, wave_complete events during graph execution (closes #240)
|
|
18
|
+
- **Semantic LLM cache** — `SemanticCache` uses embeddings for similarity-based cache lookup instead of exact match; configurable threshold and maxsize (closes #196)
|
|
19
|
+
- **Summarization tool** — `SummarizationTool` summarizes text using an LLM with concise, bullet_points, or detailed styles (closes #223)
|
|
20
|
+
- **Sentiment analysis tool** — `SentimentAnalysisTool` analyzes text sentiment (positive/negative/neutral) with confidence and explanation (closes #225)
|
|
21
|
+
- **Translation tool** — `TranslationTool` translates text between languages with optional source language specification (closes #224)
|
|
22
|
+
- 28 new tests (540 total)
|
|
23
|
+
|
|
24
|
+
---
|
|
25
|
+
|
|
26
|
+
## [0.6.2] — 2026-03-13
|
|
27
|
+
|
|
28
|
+
### Added
|
|
29
|
+
|
|
30
|
+
- **CRAG (Corrective RAG)** — `CRAGRetriever` grades retrieved documents for relevance using an LLM, rewrites the query and retries when too few are relevant (closes #152)
|
|
31
|
+
- **Query Decomposition** — `QueryDecompositionRetriever` breaks complex queries into sub-queries, retrieves for each, and deduplicates results (closes #156)
|
|
32
|
+
- **Contextual Compression** — `ContextualCompressionRetriever` compresses retrieved documents to only the relevant excerpts using an LLM (closes #146)
|
|
33
|
+
- **Ensemble Retrieval** — `EnsembleRetriever` fuses results from multiple retrievers using weighted Reciprocal Rank Fusion (closes #147)
|
|
34
|
+
- **SQLite Conversation Memory** — `SQLiteConversationMemory` persists chat history to SQLite with multi-conversation support and optional sliding window (closes #138)
|
|
35
|
+
- **Summary Buffer Memory** — `SummaryBufferMemory` tracks token budget and progressively summarizes older messages when the buffer exceeds the limit (closes #135)
|
|
36
|
+
- **Human Input Tool** — `HumanInputTool` pauses agent execution to ask the user a question, supports custom sync/async input functions (closes #228)
|
|
37
|
+
- **Wikipedia Tool** — `WikipediaTool` searches and fetches Wikipedia article summaries using the REST API, no extra dependencies (closes #202)
|
|
38
|
+
- 30 new tests (512 total)
|
|
39
|
+
|
|
40
|
+
---
|
|
41
|
+
|
|
10
42
|
## [0.6.1] — 2026-03-13
|
|
11
43
|
|
|
12
44
|
### Added
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: synapsekit
|
|
3
|
-
Version: 0.6.
|
|
3
|
+
Version: 0.6.3
|
|
4
4
|
Summary: Async-native Python framework for building production-grade LLM applications. Streaming-first, 2 dependencies, fully transparent.
|
|
5
5
|
Project-URL: Homepage, https://github.com/SynapseKit/SynapseKit
|
|
6
6
|
Project-URL: Repository, https://github.com/SynapseKit/SynapseKit
|
|
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "synapsekit"
|
|
7
|
-
version = "0.6.
|
|
7
|
+
version = "0.6.3"
|
|
8
8
|
description = "Async-native Python framework for building production-grade LLM applications. Streaming-first, 2 dependencies, fully transparent."
|
|
9
9
|
authors = [{ name = "Amit", email = "de.amit.nautiyal@gmail.com" }]
|
|
10
10
|
license = { text = "MIT" }
|
|
@@ -13,6 +13,7 @@ requires-python = ">=3.14"
|
|
|
13
13
|
keywords = ["rag", "llm", "ai", "async", "streaming", "retrieval"]
|
|
14
14
|
classifiers = [
|
|
15
15
|
"Development Status :: 3 - Alpha",
|
|
16
|
+
|
|
16
17
|
"Intended Audience :: Developers",
|
|
17
18
|
"License :: OSI Approved :: MIT License",
|
|
18
19
|
"Programming Language :: Python :: 3",
|
|
@@ -32,9 +32,12 @@ from .agents import (
|
|
|
32
32
|
PythonREPLTool,
|
|
33
33
|
ReActAgent,
|
|
34
34
|
RegexTool,
|
|
35
|
+
SentimentAnalysisTool,
|
|
35
36
|
SQLQueryTool,
|
|
37
|
+
SummarizationTool,
|
|
36
38
|
ToolRegistry,
|
|
37
39
|
ToolResult,
|
|
40
|
+
TranslationTool,
|
|
38
41
|
WebSearchTool,
|
|
39
42
|
WikipediaTool,
|
|
40
43
|
tool,
|
|
@@ -47,7 +50,9 @@ from .graph import (
|
|
|
47
50
|
ConditionalEdge,
|
|
48
51
|
ConditionFn,
|
|
49
52
|
Edge,
|
|
53
|
+
EventHooks,
|
|
50
54
|
GraphConfigError,
|
|
55
|
+
GraphEvent,
|
|
51
56
|
GraphInterrupt,
|
|
52
57
|
GraphRuntimeError,
|
|
53
58
|
GraphState,
|
|
@@ -56,10 +61,14 @@ from .graph import (
|
|
|
56
61
|
Node,
|
|
57
62
|
NodeFn,
|
|
58
63
|
SQLiteCheckpointer,
|
|
64
|
+
StateField,
|
|
59
65
|
StateGraph,
|
|
66
|
+
TypedState,
|
|
60
67
|
agent_node,
|
|
68
|
+
fan_out_node,
|
|
61
69
|
llm_node,
|
|
62
70
|
rag_node,
|
|
71
|
+
sse_stream,
|
|
63
72
|
subgraph_node,
|
|
64
73
|
)
|
|
65
74
|
from .llm.base import BaseLLM, LLMConfig
|
|
@@ -104,7 +113,7 @@ from .text_splitters import (
|
|
|
104
113
|
TokenAwareSplitter,
|
|
105
114
|
)
|
|
106
115
|
|
|
107
|
-
__version__ = "0.6.
|
|
116
|
+
__version__ = "0.6.3"
|
|
108
117
|
__all__ = [
|
|
109
118
|
# Facade
|
|
110
119
|
"RAG",
|
|
@@ -190,7 +199,10 @@ __all__ = [
|
|
|
190
199
|
"JSONQueryTool",
|
|
191
200
|
"PythonREPLTool",
|
|
192
201
|
"RegexTool",
|
|
202
|
+
"SentimentAnalysisTool",
|
|
193
203
|
"SQLQueryTool",
|
|
204
|
+
"SummarizationTool",
|
|
205
|
+
"TranslationTool",
|
|
194
206
|
"WebSearchTool",
|
|
195
207
|
"WikipediaTool",
|
|
196
208
|
# Text splitters
|
|
@@ -215,8 +227,14 @@ __all__ = [
|
|
|
215
227
|
"Edge",
|
|
216
228
|
"ConditionalEdge",
|
|
217
229
|
"ConditionFn",
|
|
230
|
+
"EventHooks",
|
|
231
|
+
"GraphEvent",
|
|
232
|
+
"StateField",
|
|
218
233
|
"StateGraph",
|
|
234
|
+
"TypedState",
|
|
219
235
|
"CompiledGraph",
|
|
236
|
+
"fan_out_node",
|
|
237
|
+
"sse_stream",
|
|
220
238
|
# Checkpointers
|
|
221
239
|
"BaseCheckpointer",
|
|
222
240
|
"InMemoryCheckpointer",
|
|
@@ -16,7 +16,10 @@ from .tools import (
|
|
|
16
16
|
JSONQueryTool,
|
|
17
17
|
PythonREPLTool,
|
|
18
18
|
RegexTool,
|
|
19
|
+
SentimentAnalysisTool,
|
|
19
20
|
SQLQueryTool,
|
|
21
|
+
SummarizationTool,
|
|
22
|
+
TranslationTool,
|
|
20
23
|
WebSearchTool,
|
|
21
24
|
WikipediaTool,
|
|
22
25
|
)
|
|
@@ -46,7 +49,10 @@ __all__ = [
|
|
|
46
49
|
"JSONQueryTool",
|
|
47
50
|
"PythonREPLTool",
|
|
48
51
|
"RegexTool",
|
|
52
|
+
"SentimentAnalysisTool",
|
|
49
53
|
"SQLQueryTool",
|
|
54
|
+
"SummarizationTool",
|
|
55
|
+
"TranslationTool",
|
|
50
56
|
"WebSearchTool",
|
|
51
57
|
"WikipediaTool",
|
|
52
58
|
]
|
|
@@ -8,7 +8,10 @@ from .human_input import HumanInputTool
|
|
|
8
8
|
from .json_query import JSONQueryTool
|
|
9
9
|
from .python_repl import PythonREPLTool
|
|
10
10
|
from .regex_tool import RegexTool
|
|
11
|
+
from .sentiment import SentimentAnalysisTool
|
|
11
12
|
from .sql_query import SQLQueryTool
|
|
13
|
+
from .summarization import SummarizationTool
|
|
14
|
+
from .translation import TranslationTool
|
|
12
15
|
from .web_search import WebSearchTool
|
|
13
16
|
from .wikipedia import WikipediaTool
|
|
14
17
|
|
|
@@ -23,7 +26,10 @@ __all__ = [
|
|
|
23
26
|
"JSONQueryTool",
|
|
24
27
|
"PythonREPLTool",
|
|
25
28
|
"RegexTool",
|
|
29
|
+
"SentimentAnalysisTool",
|
|
26
30
|
"SQLQueryTool",
|
|
31
|
+
"SummarizationTool",
|
|
32
|
+
"TranslationTool",
|
|
27
33
|
"WebSearchTool",
|
|
28
34
|
"WikipediaTool",
|
|
29
35
|
]
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
"""Sentiment Analysis Tool: analyze sentiment using an LLM."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from ..base import BaseTool, ToolResult
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class SentimentAnalysisTool(BaseTool):
|
|
11
|
+
"""Analyze the sentiment of text using an LLM.
|
|
12
|
+
|
|
13
|
+
Usage::
|
|
14
|
+
|
|
15
|
+
tool = SentimentAnalysisTool(llm=llm)
|
|
16
|
+
result = await tool.run(text="I love this product!")
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
name = "sentiment_analysis"
|
|
20
|
+
description = (
|
|
21
|
+
"Analyze the sentiment of a piece of text. "
|
|
22
|
+
"Input: the text to analyze. "
|
|
23
|
+
"Returns: sentiment (positive/negative/neutral), confidence, and explanation."
|
|
24
|
+
)
|
|
25
|
+
parameters = {
|
|
26
|
+
"type": "object",
|
|
27
|
+
"properties": {
|
|
28
|
+
"text": {
|
|
29
|
+
"type": "string",
|
|
30
|
+
"description": "The text to analyze for sentiment",
|
|
31
|
+
},
|
|
32
|
+
},
|
|
33
|
+
"required": ["text"],
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
def __init__(self, llm: Any) -> None:
|
|
37
|
+
self._llm = llm
|
|
38
|
+
|
|
39
|
+
async def run(self, text: str = "", **kwargs: Any) -> ToolResult:
|
|
40
|
+
input_text = text or kwargs.get("input", "")
|
|
41
|
+
if not input_text:
|
|
42
|
+
return ToolResult(output="", error="No text provided for sentiment analysis.")
|
|
43
|
+
|
|
44
|
+
prompt = (
|
|
45
|
+
"Analyze the sentiment of the following text. "
|
|
46
|
+
"Respond with exactly three lines:\n"
|
|
47
|
+
"Sentiment: positive/negative/neutral/mixed\n"
|
|
48
|
+
"Confidence: high/medium/low\n"
|
|
49
|
+
"Explanation: one sentence explaining why\n\n"
|
|
50
|
+
f"Text:\n{input_text}"
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
try:
|
|
54
|
+
result: str = await self._llm.generate(prompt)
|
|
55
|
+
return ToolResult(output=result.strip())
|
|
56
|
+
except Exception as e:
|
|
57
|
+
return ToolResult(output="", error=f"Sentiment analysis failed: {e}")
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
"""Text Summarization Tool: summarize text using an LLM."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from ..base import BaseTool, ToolResult
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class SummarizationTool(BaseTool):
|
|
11
|
+
"""Summarize text using an LLM.
|
|
12
|
+
|
|
13
|
+
Usage::
|
|
14
|
+
|
|
15
|
+
tool = SummarizationTool(llm=llm)
|
|
16
|
+
result = await tool.run(text="Long article text here...")
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
name = "summarize"
|
|
20
|
+
description = (
|
|
21
|
+
"Summarize a piece of text. "
|
|
22
|
+
"Input: the text to summarize, and optionally a max_sentences count. "
|
|
23
|
+
"Returns: a concise summary of the text."
|
|
24
|
+
)
|
|
25
|
+
parameters = {
|
|
26
|
+
"type": "object",
|
|
27
|
+
"properties": {
|
|
28
|
+
"text": {
|
|
29
|
+
"type": "string",
|
|
30
|
+
"description": "The text to summarize",
|
|
31
|
+
},
|
|
32
|
+
"max_sentences": {
|
|
33
|
+
"type": "integer",
|
|
34
|
+
"description": "Maximum number of sentences in the summary (default: 3)",
|
|
35
|
+
"default": 3,
|
|
36
|
+
},
|
|
37
|
+
"style": {
|
|
38
|
+
"type": "string",
|
|
39
|
+
"description": "Summary style: 'concise', 'bullet_points', or 'detailed' (default: 'concise')",
|
|
40
|
+
"default": "concise",
|
|
41
|
+
},
|
|
42
|
+
},
|
|
43
|
+
"required": ["text"],
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
def __init__(self, llm: Any) -> None:
|
|
47
|
+
self._llm = llm
|
|
48
|
+
|
|
49
|
+
async def run(
|
|
50
|
+
self, text: str = "", max_sentences: int = 3, style: str = "concise", **kwargs: Any
|
|
51
|
+
) -> ToolResult:
|
|
52
|
+
input_text = text or kwargs.get("input", "")
|
|
53
|
+
if not input_text:
|
|
54
|
+
return ToolResult(output="", error="No text provided to summarize.")
|
|
55
|
+
|
|
56
|
+
if style == "bullet_points":
|
|
57
|
+
prompt = (
|
|
58
|
+
f"Summarize the following text as {max_sentences} bullet points. "
|
|
59
|
+
f"Return only the bullet points, one per line starting with '- '.\n\n"
|
|
60
|
+
f"Text:\n{input_text}"
|
|
61
|
+
)
|
|
62
|
+
elif style == "detailed":
|
|
63
|
+
prompt = (
|
|
64
|
+
f"Provide a detailed summary of the following text in {max_sentences} sentences. "
|
|
65
|
+
f"Capture all key points and nuances.\n\n"
|
|
66
|
+
f"Text:\n{input_text}"
|
|
67
|
+
)
|
|
68
|
+
else:
|
|
69
|
+
prompt = (
|
|
70
|
+
f"Summarize the following text in {max_sentences} sentences or fewer. "
|
|
71
|
+
f"Be concise and capture the main points.\n\n"
|
|
72
|
+
f"Text:\n{input_text}"
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
try:
|
|
76
|
+
result: str = await self._llm.generate(prompt)
|
|
77
|
+
return ToolResult(output=result.strip())
|
|
78
|
+
except Exception as e:
|
|
79
|
+
return ToolResult(output="", error=f"Summarization failed: {e}")
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
"""Translation Tool: translate text using an LLM."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from ..base import BaseTool, ToolResult
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class TranslationTool(BaseTool):
|
|
11
|
+
"""Translate text between languages using an LLM.
|
|
12
|
+
|
|
13
|
+
Usage::
|
|
14
|
+
|
|
15
|
+
tool = TranslationTool(llm=llm)
|
|
16
|
+
result = await tool.run(text="Hello world!", target_language="Spanish")
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
name = "translate"
|
|
20
|
+
description = (
|
|
21
|
+
"Translate text from one language to another. "
|
|
22
|
+
"Input: the text and the target language. "
|
|
23
|
+
"Returns: the translated text."
|
|
24
|
+
)
|
|
25
|
+
parameters = {
|
|
26
|
+
"type": "object",
|
|
27
|
+
"properties": {
|
|
28
|
+
"text": {
|
|
29
|
+
"type": "string",
|
|
30
|
+
"description": "The text to translate",
|
|
31
|
+
},
|
|
32
|
+
"target_language": {
|
|
33
|
+
"type": "string",
|
|
34
|
+
"description": "The language to translate to (e.g., 'Spanish', 'French', 'Japanese')",
|
|
35
|
+
},
|
|
36
|
+
"source_language": {
|
|
37
|
+
"type": "string",
|
|
38
|
+
"description": "The source language (optional, auto-detected if omitted)",
|
|
39
|
+
},
|
|
40
|
+
},
|
|
41
|
+
"required": ["text", "target_language"],
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
def __init__(self, llm: Any) -> None:
|
|
45
|
+
self._llm = llm
|
|
46
|
+
|
|
47
|
+
async def run(
|
|
48
|
+
self,
|
|
49
|
+
text: str = "",
|
|
50
|
+
target_language: str = "",
|
|
51
|
+
source_language: str = "",
|
|
52
|
+
**kwargs: Any,
|
|
53
|
+
) -> ToolResult:
|
|
54
|
+
input_text = text or kwargs.get("input", "")
|
|
55
|
+
if not input_text:
|
|
56
|
+
return ToolResult(output="", error="No text provided to translate.")
|
|
57
|
+
if not target_language:
|
|
58
|
+
return ToolResult(output="", error="No target language specified.")
|
|
59
|
+
|
|
60
|
+
if source_language:
|
|
61
|
+
prompt = (
|
|
62
|
+
f"Translate the following text from {source_language} to {target_language}. "
|
|
63
|
+
f"Return only the translated text, nothing else.\n\n"
|
|
64
|
+
f"Text:\n{input_text}"
|
|
65
|
+
)
|
|
66
|
+
else:
|
|
67
|
+
prompt = (
|
|
68
|
+
f"Translate the following text to {target_language}. "
|
|
69
|
+
f"Return only the translated text, nothing else.\n\n"
|
|
70
|
+
f"Text:\n{input_text}"
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
try:
|
|
74
|
+
result: str = await self._llm.generate(prompt)
|
|
75
|
+
return ToolResult(output=result.strip())
|
|
76
|
+
except Exception as e:
|
|
77
|
+
return ToolResult(output="", error=f"Translation failed: {e}")
|
|
@@ -2,10 +2,12 @@ from .checkpointers import BaseCheckpointer, InMemoryCheckpointer, SQLiteCheckpo
|
|
|
2
2
|
from .compiled import CompiledGraph
|
|
3
3
|
from .edge import ConditionalEdge, ConditionFn, Edge
|
|
4
4
|
from .errors import GraphConfigError, GraphRuntimeError
|
|
5
|
+
from .fan_out import fan_out_node
|
|
5
6
|
from .graph import StateGraph
|
|
6
7
|
from .interrupt import GraphInterrupt, InterruptState
|
|
7
8
|
from .node import Node, NodeFn, agent_node, llm_node, rag_node
|
|
8
|
-
from .state import END, GraphState
|
|
9
|
+
from .state import END, GraphState, StateField, TypedState
|
|
10
|
+
from .streaming import EventHooks, GraphEvent, sse_stream
|
|
9
11
|
from .subgraph import subgraph_node
|
|
10
12
|
|
|
11
13
|
__all__ = [
|
|
@@ -15,7 +17,9 @@ __all__ = [
|
|
|
15
17
|
"ConditionFn",
|
|
16
18
|
"ConditionalEdge",
|
|
17
19
|
"Edge",
|
|
20
|
+
"EventHooks",
|
|
18
21
|
"GraphConfigError",
|
|
22
|
+
"GraphEvent",
|
|
19
23
|
"GraphInterrupt",
|
|
20
24
|
"GraphRuntimeError",
|
|
21
25
|
"GraphState",
|
|
@@ -24,9 +28,13 @@ __all__ = [
|
|
|
24
28
|
"Node",
|
|
25
29
|
"NodeFn",
|
|
26
30
|
"SQLiteCheckpointer",
|
|
31
|
+
"StateField",
|
|
27
32
|
"StateGraph",
|
|
33
|
+
"TypedState",
|
|
28
34
|
"agent_node",
|
|
35
|
+
"fan_out_node",
|
|
29
36
|
"llm_node",
|
|
30
37
|
"rag_node",
|
|
38
|
+
"sse_stream",
|
|
31
39
|
"subgraph_node",
|
|
32
40
|
]
|
|
@@ -10,6 +10,7 @@ from .errors import GraphRuntimeError
|
|
|
10
10
|
from .interrupt import GraphInterrupt
|
|
11
11
|
from .mermaid import get_mermaid
|
|
12
12
|
from .state import END
|
|
13
|
+
from .streaming import EventHooks, GraphEvent
|
|
13
14
|
|
|
14
15
|
if TYPE_CHECKING:
|
|
15
16
|
from .checkpointers.base import BaseCheckpointer
|
|
@@ -42,15 +43,24 @@ class CompiledGraph:
|
|
|
42
43
|
# Public API
|
|
43
44
|
# ------------------------------------------------------------------ #
|
|
44
45
|
|
|
46
|
+
def _merge_state(self, state: dict[str, Any], partial: dict[str, Any]) -> None:
|
|
47
|
+
"""Merge partial state into current state, using reducers if available."""
|
|
48
|
+
schema = self._graph._state_schema
|
|
49
|
+
if schema is not None:
|
|
50
|
+
schema.merge(state, partial)
|
|
51
|
+
else:
|
|
52
|
+
state.update(partial)
|
|
53
|
+
|
|
45
54
|
async def run(
|
|
46
55
|
self,
|
|
47
56
|
state: dict[str, Any],
|
|
48
57
|
checkpointer: BaseCheckpointer | None = None,
|
|
49
58
|
graph_id: str | None = None,
|
|
59
|
+
hooks: EventHooks | None = None,
|
|
50
60
|
) -> dict[str, Any]:
|
|
51
61
|
"""Run the graph to completion and return the final state."""
|
|
52
62
|
state = dict(state)
|
|
53
|
-
async for _ in self._execute(state, checkpointer=checkpointer, graph_id=graph_id):
|
|
63
|
+
async for _ in self._execute(state, checkpointer=checkpointer, graph_id=graph_id, hooks=hooks):
|
|
54
64
|
pass
|
|
55
65
|
return state
|
|
56
66
|
|
|
@@ -59,13 +69,14 @@ class CompiledGraph:
|
|
|
59
69
|
state: dict[str, Any],
|
|
60
70
|
checkpointer: BaseCheckpointer | None = None,
|
|
61
71
|
graph_id: str | None = None,
|
|
72
|
+
hooks: EventHooks | None = None,
|
|
62
73
|
) -> AsyncGenerator[dict[str, Any]]:
|
|
63
74
|
"""
|
|
64
75
|
Yield ``{"node": name, "state": snapshot}`` for each completed node.
|
|
65
76
|
The caller receives incremental state updates as nodes finish.
|
|
66
77
|
"""
|
|
67
78
|
state = dict(state)
|
|
68
|
-
async for event in self._execute(state, checkpointer=checkpointer, graph_id=graph_id):
|
|
79
|
+
async for event in self._execute(state, checkpointer=checkpointer, graph_id=graph_id, hooks=hooks):
|
|
69
80
|
yield event
|
|
70
81
|
|
|
71
82
|
async def resume(
|
|
@@ -95,11 +106,12 @@ class CompiledGraph:
|
|
|
95
106
|
state: dict[str, Any],
|
|
96
107
|
checkpointer: BaseCheckpointer | None = None,
|
|
97
108
|
graph_id: str | None = None,
|
|
109
|
+
hooks: EventHooks | None = None,
|
|
98
110
|
) -> dict[str, Any]:
|
|
99
111
|
"""Synchronous wrapper — works inside and outside a running event loop."""
|
|
100
112
|
from .._compat import run_sync
|
|
101
113
|
|
|
102
|
-
return run_sync(self.run(state, checkpointer=checkpointer, graph_id=graph_id))
|
|
114
|
+
return run_sync(self.run(state, checkpointer=checkpointer, graph_id=graph_id, hooks=hooks))
|
|
103
115
|
|
|
104
116
|
async def stream_tokens(
|
|
105
117
|
self,
|
|
@@ -152,7 +164,7 @@ class CompiledGraph:
|
|
|
152
164
|
if "__stream_key__" in result:
|
|
153
165
|
result[result.pop("__stream_key__")] = "".join(collected)
|
|
154
166
|
|
|
155
|
-
|
|
167
|
+
self._merge_state(state, result)
|
|
156
168
|
yield {"type": "node_complete", "node": name, "state": dict(state)}
|
|
157
169
|
|
|
158
170
|
current_wave = await self._next_wave(current_wave, state)
|
|
@@ -169,6 +181,7 @@ class CompiledGraph:
|
|
|
169
181
|
state: dict[str, Any],
|
|
170
182
|
checkpointer: BaseCheckpointer | None = None,
|
|
171
183
|
graph_id: str | None = None,
|
|
184
|
+
hooks: EventHooks | None = None,
|
|
172
185
|
) -> AsyncGenerator[dict[str, Any]]:
|
|
173
186
|
graph = self._graph
|
|
174
187
|
current_wave: list[str] = [graph._entry_point] # type: ignore[list-item]
|
|
@@ -182,6 +195,18 @@ class CompiledGraph:
|
|
|
182
195
|
)
|
|
183
196
|
steps += 1
|
|
184
197
|
|
|
198
|
+
# Emit wave_start event
|
|
199
|
+
if hooks is not None:
|
|
200
|
+
await hooks.emit(GraphEvent(
|
|
201
|
+
event_type="wave_start",
|
|
202
|
+
data={"wave": current_wave, "step": steps},
|
|
203
|
+
))
|
|
204
|
+
|
|
205
|
+
# Emit node_start events
|
|
206
|
+
if hooks is not None:
|
|
207
|
+
for name in current_wave:
|
|
208
|
+
await hooks.emit(GraphEvent(event_type="node_start", node=name))
|
|
209
|
+
|
|
185
210
|
# Run all nodes in this wave concurrently
|
|
186
211
|
try:
|
|
187
212
|
results = await asyncio.gather(
|
|
@@ -195,9 +220,25 @@ class CompiledGraph:
|
|
|
195
220
|
|
|
196
221
|
# Merge partial results into state and yield events
|
|
197
222
|
for name, partial in zip(current_wave, results, strict=False):
|
|
198
|
-
|
|
223
|
+
self._merge_state(state, partial)
|
|
199
224
|
yield {"node": name, "state": dict(state)}
|
|
200
225
|
|
|
226
|
+
# Emit node_complete event
|
|
227
|
+
if hooks is not None:
|
|
228
|
+
await hooks.emit(GraphEvent(
|
|
229
|
+
event_type="node_complete",
|
|
230
|
+
node=name,
|
|
231
|
+
state=dict(state),
|
|
232
|
+
))
|
|
233
|
+
|
|
234
|
+
# Emit wave_complete event
|
|
235
|
+
if hooks is not None:
|
|
236
|
+
await hooks.emit(GraphEvent(
|
|
237
|
+
event_type="wave_complete",
|
|
238
|
+
data={"wave": current_wave, "step": steps},
|
|
239
|
+
state=dict(state),
|
|
240
|
+
))
|
|
241
|
+
|
|
201
242
|
# Save checkpoint after wave completion
|
|
202
243
|
if checkpointer is not None and graph_id is not None:
|
|
203
244
|
checkpointer.save(graph_id, steps, dict(state))
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
"""Parallel subgraph execution — fan-out/fan-in pattern."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from .node import NodeFn
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def fan_out_node(
|
|
12
|
+
subgraphs: list[Any],
|
|
13
|
+
input_mappings: list[dict[str, str]] | None = None,
|
|
14
|
+
output_key: str = "fan_out_results",
|
|
15
|
+
merge_fn: Any | None = None,
|
|
16
|
+
) -> NodeFn:
|
|
17
|
+
"""Run multiple subgraphs in parallel and collect their results.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
subgraphs: List of ``CompiledGraph`` instances to run concurrently.
|
|
21
|
+
input_mappings: Optional per-subgraph input key mappings.
|
|
22
|
+
Each dict maps ``{parent_key: sub_key}``.
|
|
23
|
+
If ``None``, each subgraph receives the full parent state.
|
|
24
|
+
output_key: State key to store the list of results.
|
|
25
|
+
merge_fn: Optional function ``(list[dict]) -> dict`` to merge results
|
|
26
|
+
into a single dict. If ``None``, results are stored as a list.
|
|
27
|
+
|
|
28
|
+
Usage::
|
|
29
|
+
|
|
30
|
+
fan = fan_out_node(
|
|
31
|
+
subgraphs=[compiled_a, compiled_b, compiled_c],
|
|
32
|
+
input_mappings=[
|
|
33
|
+
{"query": "input"},
|
|
34
|
+
{"query": "input"},
|
|
35
|
+
{"query": "input"},
|
|
36
|
+
],
|
|
37
|
+
output_key="results",
|
|
38
|
+
)
|
|
39
|
+
graph.add_node("parallel", fan)
|
|
40
|
+
|
|
41
|
+
With a merge function::
|
|
42
|
+
|
|
43
|
+
def merge(results):
|
|
44
|
+
return {"combined": " | ".join(r.get("output", "") for r in results)}
|
|
45
|
+
|
|
46
|
+
fan = fan_out_node(
|
|
47
|
+
subgraphs=[sub_a, sub_b],
|
|
48
|
+
merge_fn=merge,
|
|
49
|
+
)
|
|
50
|
+
"""
|
|
51
|
+
mappings = input_mappings or [None] * len(subgraphs) # type: ignore[list-item]
|
|
52
|
+
|
|
53
|
+
if len(mappings) != len(subgraphs):
|
|
54
|
+
raise ValueError("input_mappings must have the same length as subgraphs.")
|
|
55
|
+
|
|
56
|
+
async def _fn(state: dict[str, Any]) -> dict[str, Any]:
|
|
57
|
+
tasks = []
|
|
58
|
+
for sg, mapping in zip(subgraphs, mappings, strict=True):
|
|
59
|
+
if mapping:
|
|
60
|
+
sub_state = {sub_key: state[parent_key] for parent_key, sub_key in mapping.items()}
|
|
61
|
+
else:
|
|
62
|
+
sub_state = dict(state)
|
|
63
|
+
tasks.append(sg.run(sub_state))
|
|
64
|
+
|
|
65
|
+
results = await asyncio.gather(*tasks)
|
|
66
|
+
|
|
67
|
+
if merge_fn is not None:
|
|
68
|
+
merged: dict[str, Any] = merge_fn(list(results))
|
|
69
|
+
return merged
|
|
70
|
+
return {output_key: list(results)}
|
|
71
|
+
|
|
72
|
+
return _fn
|
|
@@ -3,7 +3,7 @@ from __future__ import annotations
|
|
|
3
3
|
from .edge import ConditionalEdge, ConditionFn, Edge
|
|
4
4
|
from .errors import GraphConfigError
|
|
5
5
|
from .node import Node, NodeFn
|
|
6
|
-
from .state import END
|
|
6
|
+
from .state import END, TypedState
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
class StateGraph:
|
|
@@ -18,12 +18,22 @@ class StateGraph:
|
|
|
18
18
|
graph.set_entry_point("a").set_finish_point("b")
|
|
19
19
|
compiled = graph.compile()
|
|
20
20
|
result = await compiled.run({"input": "hello"})
|
|
21
|
+
|
|
22
|
+
With typed state and reducers::
|
|
23
|
+
|
|
24
|
+
from synapsekit.graph.state import StateField, TypedState
|
|
25
|
+
|
|
26
|
+
schema = TypedState(fields={
|
|
27
|
+
"messages": StateField(default=list, reducer=lambda cur, new: cur + new),
|
|
28
|
+
})
|
|
29
|
+
graph = StateGraph(state_schema=schema)
|
|
21
30
|
"""
|
|
22
31
|
|
|
23
|
-
def __init__(self) -> None:
|
|
32
|
+
def __init__(self, state_schema: TypedState | None = None) -> None:
|
|
24
33
|
self._nodes: dict[str, Node] = {}
|
|
25
34
|
self._edges: list[Edge | ConditionalEdge] = []
|
|
26
35
|
self._entry_point: str | None = None
|
|
36
|
+
self._state_schema = state_schema
|
|
27
37
|
|
|
28
38
|
def __repr__(self) -> str:
|
|
29
39
|
return f"StateGraph(nodes={len(self._nodes)}, edges={len(self._edges)})"
|