py-context-graph 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py_context_graph-0.1.0/.github/ISSUE_TEMPLATE/bug_report.yml +51 -0
- py_context_graph-0.1.0/.github/ISSUE_TEMPLATE/feature_request.yml +23 -0
- py_context_graph-0.1.0/.github/PULL_REQUEST_TEMPLATE.md +13 -0
- py_context_graph-0.1.0/.github/workflows/publish.yml +54 -0
- py_context_graph-0.1.0/.github/workflows/test.yml +24 -0
- py_context_graph-0.1.0/.gitignore +19 -0
- py_context_graph-0.1.0/CHANGELOG.md +25 -0
- py_context_graph-0.1.0/CONTRIBUTING.md +48 -0
- py_context_graph-0.1.0/LICENSE +21 -0
- py_context_graph-0.1.0/PKG-INFO +271 -0
- py_context_graph-0.1.0/README.md +231 -0
- py_context_graph-0.1.0/SECURITY.md +21 -0
- py_context_graph-0.1.0/examples/README.md +77 -0
- py_context_graph-0.1.0/examples/run.py +234 -0
- py_context_graph-0.1.0/examples/sample_conversation_1.txt +21 -0
- py_context_graph-0.1.0/examples/sample_conversation_2.txt +15 -0
- py_context_graph-0.1.0/examples/sample_conversation_3.txt +19 -0
- py_context_graph-0.1.0/examples/viewer.html +1278 -0
- py_context_graph-0.1.0/pyproject.toml +50 -0
- py_context_graph-0.1.0/src/decision_graph/__init__.py +8 -0
- py_context_graph-0.1.0/src/decision_graph/backends/__init__.py +0 -0
- py_context_graph-0.1.0/src/decision_graph/backends/firestore/__init__.py +41 -0
- py_context_graph-0.1.0/src/decision_graph/backends/firestore/stores.py +254 -0
- py_context_graph-0.1.0/src/decision_graph/backends/memory/__init__.py +30 -0
- py_context_graph-0.1.0/src/decision_graph/backends/memory/stores.py +323 -0
- py_context_graph-0.1.0/src/decision_graph/clustering_service.py +301 -0
- py_context_graph-0.1.0/src/decision_graph/context_graph/__init__.py +0 -0
- py_context_graph-0.1.0/src/decision_graph/context_graph/planner.py +102 -0
- py_context_graph-0.1.0/src/decision_graph/context_graph/post_processing.py +247 -0
- py_context_graph-0.1.0/src/decision_graph/context_graph/registry.py +35 -0
- py_context_graph-0.1.0/src/decision_graph/context_graph/service.py +360 -0
- py_context_graph-0.1.0/src/decision_graph/context_graph/templates.py +138 -0
- py_context_graph-0.1.0/src/decision_graph/context_retrieval.py +298 -0
- py_context_graph-0.1.0/src/decision_graph/core/__init__.py +0 -0
- py_context_graph-0.1.0/src/decision_graph/core/config.py +44 -0
- py_context_graph-0.1.0/src/decision_graph/core/decision_trace_profiles.py +76 -0
- py_context_graph-0.1.0/src/decision_graph/core/domain.py +307 -0
- py_context_graph-0.1.0/src/decision_graph/core/interfaces.py +160 -0
- py_context_graph-0.1.0/src/decision_graph/core/matching.py +383 -0
- py_context_graph-0.1.0/src/decision_graph/core/registry.py +35 -0
- py_context_graph-0.1.0/src/decision_graph/decision_enrichment.py +22 -0
- py_context_graph-0.1.0/src/decision_graph/decision_trace_pipeline.py +293 -0
- py_context_graph-0.1.0/src/decision_graph/enrichment_service.py +209 -0
- py_context_graph-0.1.0/src/decision_graph/extraction_service.py +50 -0
- py_context_graph-0.1.0/src/decision_graph/graph.py +51 -0
- py_context_graph-0.1.0/src/decision_graph/ingestion.py +171 -0
- py_context_graph-0.1.0/src/decision_graph/llm/__init__.py +3 -0
- py_context_graph-0.1.0/src/decision_graph/llm/litellm_adapter.py +63 -0
- py_context_graph-0.1.0/src/decision_graph/markdown_chunker.py +50 -0
- py_context_graph-0.1.0/src/decision_graph/prompt_loader.py +19 -0
- py_context_graph-0.1.0/src/decision_graph/prompts/decision_enrichment.txt +35 -0
- py_context_graph-0.1.0/src/decision_graph/prompts/decision_trace.txt +177 -0
- py_context_graph-0.1.0/src/decision_graph/py.typed +0 -0
- py_context_graph-0.1.0/src/decision_graph/retrieval.py +274 -0
- py_context_graph-0.1.0/src/decision_graph/services.py +362 -0
- py_context_graph-0.1.0/src/decision_graph/visualization.py +133 -0
- py_context_graph-0.1.0/tests/__init__.py +1 -0
- py_context_graph-0.1.0/tests/context_graph/__init__.py +0 -0
- py_context_graph-0.1.0/tests/context_graph/test_planner.py +58 -0
- py_context_graph-0.1.0/tests/context_graph/test_post_processing.py +153 -0
- py_context_graph-0.1.0/tests/context_graph/test_registry.py +32 -0
- py_context_graph-0.1.0/tests/context_graph/test_service.py +108 -0
- py_context_graph-0.1.0/tests/test_clustering_service.py +496 -0
- py_context_graph-0.1.0/tests/test_decision_enrichment_merge.py +45 -0
- py_context_graph-0.1.0/tests/test_decision_enrichment_models.py +10 -0
- py_context_graph-0.1.0/tests/test_decision_models.py +391 -0
- py_context_graph-0.1.0/tests/test_firestore_backend.py +413 -0
- py_context_graph-0.1.0/tests/test_ingestion.py +705 -0
- py_context_graph-0.1.0/tests/test_litellm_adapter.py +120 -0
- py_context_graph-0.1.0/tests/test_matching_functions.py +673 -0
- py_context_graph-0.1.0/tests/test_pipeline_memory.py +206 -0
- py_context_graph-0.1.0/tests/test_search_hydration.py +69 -0
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
name: Bug Report
|
|
2
|
+
description: Report something that isn't working correctly
|
|
3
|
+
labels: ["bug"]
|
|
4
|
+
body:
|
|
5
|
+
- type: textarea
|
|
6
|
+
id: description
|
|
7
|
+
attributes:
|
|
8
|
+
label: What happened?
|
|
9
|
+
description: A clear description of the bug.
|
|
10
|
+
validations:
|
|
11
|
+
required: true
|
|
12
|
+
- type: textarea
|
|
13
|
+
id: reproduce
|
|
14
|
+
attributes:
|
|
15
|
+
label: Steps to reproduce
|
|
16
|
+
description: Minimal code or steps to reproduce the issue.
|
|
17
|
+
placeholder: |
|
|
18
|
+
1. Install with `pip install py-context-graph[all]`
|
|
19
|
+
2. Run `python run.py`
|
|
20
|
+
3. ...
|
|
21
|
+
validations:
|
|
22
|
+
required: true
|
|
23
|
+
- type: textarea
|
|
24
|
+
id: expected
|
|
25
|
+
attributes:
|
|
26
|
+
label: Expected behavior
|
|
27
|
+
description: What did you expect to happen?
|
|
28
|
+
validations:
|
|
29
|
+
required: true
|
|
30
|
+
- type: input
|
|
31
|
+
id: version
|
|
32
|
+
attributes:
|
|
33
|
+
label: Version
|
|
34
|
+
description: Output of `pip show py-context-graph | grep Version`
|
|
35
|
+
placeholder: "0.1.0"
|
|
36
|
+
validations:
|
|
37
|
+
required: true
|
|
38
|
+
- type: input
|
|
39
|
+
id: python
|
|
40
|
+
attributes:
|
|
41
|
+
label: Python version
|
|
42
|
+
description: Output of `python --version`
|
|
43
|
+
placeholder: "3.12.0"
|
|
44
|
+
validations:
|
|
45
|
+
required: true
|
|
46
|
+
- type: textarea
|
|
47
|
+
id: logs
|
|
48
|
+
attributes:
|
|
49
|
+
label: Relevant logs or errors
|
|
50
|
+
description: Paste any error output here.
|
|
51
|
+
render: shell
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
name: Feature Request
|
|
2
|
+
description: Suggest a new feature or improvement
|
|
3
|
+
labels: ["enhancement"]
|
|
4
|
+
body:
|
|
5
|
+
- type: textarea
|
|
6
|
+
id: problem
|
|
7
|
+
attributes:
|
|
8
|
+
label: Problem or motivation
|
|
9
|
+
description: What problem does this solve, or why would it be useful?
|
|
10
|
+
validations:
|
|
11
|
+
required: true
|
|
12
|
+
- type: textarea
|
|
13
|
+
id: solution
|
|
14
|
+
attributes:
|
|
15
|
+
label: Proposed solution
|
|
16
|
+
description: How would you like it to work?
|
|
17
|
+
validations:
|
|
18
|
+
required: true
|
|
19
|
+
- type: textarea
|
|
20
|
+
id: alternatives
|
|
21
|
+
attributes:
|
|
22
|
+
label: Alternatives considered
|
|
23
|
+
description: Any other approaches you've thought about.
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
## What does this PR do?
|
|
2
|
+
|
|
3
|
+
<!-- Brief description of the change -->
|
|
4
|
+
|
|
5
|
+
## Why?
|
|
6
|
+
|
|
7
|
+
<!-- Link to issue or explain the motivation -->
|
|
8
|
+
|
|
9
|
+
## Checklist
|
|
10
|
+
|
|
11
|
+
- [ ] Tests pass (`PYTHONPATH=src:tests python -m unittest discover -s tests -p 'test_*.py'`)
|
|
12
|
+
- [ ] New code has tests (if applicable)
|
|
13
|
+
- [ ] CHANGELOG.md updated under `[Unreleased]` (if user-facing)
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
name: Release
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
tags:
|
|
6
|
+
- "v*"
|
|
7
|
+
|
|
8
|
+
jobs:
|
|
9
|
+
build:
|
|
10
|
+
runs-on: ubuntu-latest
|
|
11
|
+
permissions:
|
|
12
|
+
contents: read
|
|
13
|
+
steps:
|
|
14
|
+
- uses: actions/checkout@v6
|
|
15
|
+
- uses: actions/setup-python@v6
|
|
16
|
+
with:
|
|
17
|
+
python-version: "3.12"
|
|
18
|
+
- run: pip install build
|
|
19
|
+
- run: python -m build
|
|
20
|
+
- uses: actions/upload-artifact@v6
|
|
21
|
+
with:
|
|
22
|
+
name: dist
|
|
23
|
+
path: dist/
|
|
24
|
+
|
|
25
|
+
publish-pypi:
|
|
26
|
+
needs: build
|
|
27
|
+
runs-on: ubuntu-latest
|
|
28
|
+
environment:
|
|
29
|
+
name: pypi
|
|
30
|
+
url: https://pypi.org/project/py-context-graph/
|
|
31
|
+
permissions:
|
|
32
|
+
id-token: write
|
|
33
|
+
steps:
|
|
34
|
+
- uses: actions/download-artifact@v7
|
|
35
|
+
with:
|
|
36
|
+
name: dist
|
|
37
|
+
path: dist/
|
|
38
|
+
- uses: pypa/gh-action-pypi-publish@release/v1
|
|
39
|
+
|
|
40
|
+
github-release:
|
|
41
|
+
needs: publish-pypi
|
|
42
|
+
runs-on: ubuntu-latest
|
|
43
|
+
permissions:
|
|
44
|
+
contents: write
|
|
45
|
+
steps:
|
|
46
|
+
- uses: actions/checkout@v6
|
|
47
|
+
- uses: actions/download-artifact@v7
|
|
48
|
+
with:
|
|
49
|
+
name: dist
|
|
50
|
+
path: dist/
|
|
51
|
+
- uses: softprops/action-gh-release@v2
|
|
52
|
+
with:
|
|
53
|
+
generate_release_notes: true
|
|
54
|
+
files: dist/*
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
name: Tests
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
branches: [main]
|
|
6
|
+
pull_request:
|
|
7
|
+
branches: [main]
|
|
8
|
+
|
|
9
|
+
jobs:
|
|
10
|
+
test:
|
|
11
|
+
runs-on: ubuntu-latest
|
|
12
|
+
strategy:
|
|
13
|
+
matrix:
|
|
14
|
+
python-version: ["3.10", "3.11", "3.12"]
|
|
15
|
+
steps:
|
|
16
|
+
- uses: actions/checkout@v4
|
|
17
|
+
- name: Set up Python ${{ matrix.python-version }}
|
|
18
|
+
uses: actions/setup-python@v5
|
|
19
|
+
with:
|
|
20
|
+
python-version: ${{ matrix.python-version }}
|
|
21
|
+
- name: Install dependencies
|
|
22
|
+
run: pip install -e ".[dev]"
|
|
23
|
+
- name: Run tests
|
|
24
|
+
run: PYTHONPATH=src:tests python -m unittest discover -s tests -p 'test_*.py'
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
# Changelog
|
|
2
|
+
|
|
3
|
+
All notable changes to this project will be documented in this file.
|
|
4
|
+
|
|
5
|
+
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
|
6
|
+
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
7
|
+
|
|
8
|
+
## [Unreleased]
|
|
9
|
+
|
|
10
|
+
## [0.1.0] - 2026-04-02
|
|
11
|
+
|
|
12
|
+
### Added
|
|
13
|
+
- Decision extraction from unstructured conversation text via LLMs
|
|
14
|
+
- Decision deduplication with configurable similarity scoring
|
|
15
|
+
- LLM-based enrichment (topics, entities, constraints, key facts)
|
|
16
|
+
- Cross-conversation decision clustering
|
|
17
|
+
- Graph materialization with hydrated cluster output
|
|
18
|
+
- Interactive HTML viewer with Insights dashboard, Cluster Board, Timeline, Person x Cluster matrix, and Explore (force-directed graph) views
|
|
19
|
+
- Live pipeline progress in the viewer when running `python run.py`
|
|
20
|
+
- In-memory backend (stores, TF-IDF vector index, graph store)
|
|
21
|
+
- Google Cloud Firestore backend
|
|
22
|
+
- LiteLLM adapter supporting OpenAI, Anthropic, and any LiteLLM provider
|
|
23
|
+
- Context Graph query layer for natural language graph queries
|
|
24
|
+
- Pluggable interfaces: `StorageBackend`, `LLMAdapter`, `VectorIndex`, `GraphStore`
|
|
25
|
+
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
# Contributing to py-context-graph
|
|
2
|
+
|
|
3
|
+
Thanks for your interest in contributing! Here's how to get started.
|
|
4
|
+
|
|
5
|
+
## Getting started
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
git clone https://github.com/ResearchifyLabs/py-context-graph.git
|
|
9
|
+
cd py-context-graph
|
|
10
|
+
python -m venv .venv && source .venv/bin/activate
|
|
11
|
+
pip install -e ".[dev]"
|
|
12
|
+
```
|
|
13
|
+
|
|
14
|
+
## Running tests
|
|
15
|
+
|
|
16
|
+
```bash
|
|
17
|
+
PYTHONPATH=src:tests python -m unittest discover -s tests -p 'test_*.py'
|
|
18
|
+
```
|
|
19
|
+
|
|
20
|
+
To run a specific test file:
|
|
21
|
+
|
|
22
|
+
```bash
|
|
23
|
+
PYTHONPATH=src:tests python -m unittest tests/test_example.py
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
## Project layout
|
|
27
|
+
|
|
28
|
+
- `src/decision_graph/` — the library source
|
|
29
|
+
- `tests/` — unit tests (stdlib `unittest`, not pytest)
|
|
30
|
+
- `examples/` — runnable demos
|
|
31
|
+
|
|
32
|
+
## How to contribute
|
|
33
|
+
|
|
34
|
+
1. **Open an issue first** — describe the bug or feature so we can discuss before you write code.
|
|
35
|
+
2. **Fork and branch** — create a feature branch from `main`.
|
|
36
|
+
3. **Keep changes focused** — one PR per concern. Small PRs get reviewed faster.
|
|
37
|
+
4. **Add tests** — if you're adding a feature or fixing a bug, add a test that covers it.
|
|
38
|
+
5. **Make sure tests pass** — run the full test suite before opening a PR.
|
|
39
|
+
|
|
40
|
+
## Code style
|
|
41
|
+
|
|
42
|
+
- Keep it simple. No speculative abstractions.
|
|
43
|
+
- Match the style of the surrounding code.
|
|
44
|
+
- Avoid unnecessary comments — code should be self-documenting.
|
|
45
|
+
|
|
46
|
+
## License
|
|
47
|
+
|
|
48
|
+
By contributing, you agree that your contributions will be licensed under the [MIT License](LICENSE).
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Researchify Labs
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,271 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: py-context-graph
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Extract, enrich, cluster, and query decisions from unstructured conversations using LLMs.
|
|
5
|
+
Project-URL: Homepage, https://github.com/ResearchifyLabs/py-context-graph
|
|
6
|
+
Project-URL: Repository, https://github.com/ResearchifyLabs/py-context-graph
|
|
7
|
+
Project-URL: Issues, https://github.com/ResearchifyLabs/py-context-graph/issues
|
|
8
|
+
Project-URL: Changelog, https://github.com/ResearchifyLabs/py-context-graph/blob/main/CHANGELOG.md
|
|
9
|
+
Author-email: ResearchifyLabs <care@researchify.io>
|
|
10
|
+
License-Expression: MIT
|
|
11
|
+
License-File: LICENSE
|
|
12
|
+
Keywords: clustering,context,conversations,decisions,knowledge-graph,llm
|
|
13
|
+
Classifier: Development Status :: 3 - Alpha
|
|
14
|
+
Classifier: Intended Audience :: Developers
|
|
15
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
16
|
+
Classifier: Programming Language :: Python :: 3
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
21
|
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
22
|
+
Requires-Python: >=3.10
|
|
23
|
+
Requires-Dist: pydantic>=2.0
|
|
24
|
+
Provides-Extra: all
|
|
25
|
+
Requires-Dist: google-cloud-firestore; extra == 'all'
|
|
26
|
+
Requires-Dist: litellm; extra == 'all'
|
|
27
|
+
Requires-Dist: pandas; extra == 'all'
|
|
28
|
+
Provides-Extra: dev
|
|
29
|
+
Requires-Dist: google-cloud-firestore; extra == 'dev'
|
|
30
|
+
Requires-Dist: litellm; extra == 'dev'
|
|
31
|
+
Requires-Dist: mock-firestore; extra == 'dev'
|
|
32
|
+
Requires-Dist: pandas; extra == 'dev'
|
|
33
|
+
Provides-Extra: firestore
|
|
34
|
+
Requires-Dist: google-cloud-firestore; extra == 'firestore'
|
|
35
|
+
Provides-Extra: llm
|
|
36
|
+
Requires-Dist: litellm; extra == 'llm'
|
|
37
|
+
Provides-Extra: memory
|
|
38
|
+
Requires-Dist: pandas; extra == 'memory'
|
|
39
|
+
Description-Content-Type: text/markdown
|
|
40
|
+
|
|
41
|
+
# py-context-graph
|
|
42
|
+
|
|
43
|
+
Extract, enrich, cluster, and query decisions from unstructured conversations using LLMs.
|
|
44
|
+
|
|
45
|
+
[](https://github.com/ResearchifyLabs/py-context-graph/actions/workflows/test.yml)
|
|
46
|
+
[](https://pypi.org/project/py-context-graph/)
|
|
47
|
+
[](https://opensource.org/licenses/MIT)
|
|
48
|
+
|
|
49
|
+
## What is this?
|
|
50
|
+
|
|
51
|
+
**py-context-graph** turns messy conversation text (meeting notes, Slack threads, standups) into a structured decision graph. It uses LLMs to:
|
|
52
|
+
|
|
53
|
+
1. **Extract** decision items from text (what was decided, by whom, about what)
|
|
54
|
+
2. **Deduplicate** near-identical decisions across conversations
|
|
55
|
+
3. **Enrich** each decision with structured metadata (topics, entities, constraints, key facts)
|
|
56
|
+
4. **Cluster** related decisions across conversations into coherent themes
|
|
57
|
+
5. **Materialize** the result into a queryable graph
|
|
58
|
+
|
|
59
|
+
```
|
|
60
|
+
Text → Extract (LLM) → Persist → Deduplicate → Enrich (LLM) → Cluster → Graph
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
## Install
|
|
64
|
+
|
|
65
|
+
```bash
|
|
66
|
+
pip install py-context-graph
|
|
67
|
+
```
|
|
68
|
+
|
|
69
|
+
With optional backends:
|
|
70
|
+
|
|
71
|
+
```bash
|
|
72
|
+
pip install py-context-graph[all] # LiteLLM + Firestore + in-memory vector index
|
|
73
|
+
pip install py-context-graph[llm] # LiteLLM adapter only
|
|
74
|
+
pip install py-context-graph[firestore] # Google Cloud Firestore backend
|
|
75
|
+
pip install py-context-graph[memory] # In-memory TF-IDF vector index (pandas)
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
## Quick start
|
|
79
|
+
|
|
80
|
+
```python
|
|
81
|
+
import asyncio
|
|
82
|
+
from decision_graph import DecisionGraph, LiteLLMAdapter
|
|
83
|
+
from decision_graph.backends.memory import InMemoryBackend
|
|
84
|
+
from decision_graph.backends.memory.stores import InMemoryGraphStore, InMemoryVectorIndex
|
|
85
|
+
from decision_graph.decision_trace_pipeline import DecisionTracePipeline
|
|
86
|
+
|
|
87
|
+
backend = InMemoryBackend()
|
|
88
|
+
pipeline = DecisionTracePipeline(
|
|
89
|
+
backend=backend,
|
|
90
|
+
executor=LiteLLMAdapter(),
|
|
91
|
+
vector_index=InMemoryVectorIndex(),
|
|
92
|
+
graph_store=InMemoryGraphStore(),
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
async def main():
|
|
96
|
+
# Process a conversation
|
|
97
|
+
decisions = await pipeline.run_from_text(
|
|
98
|
+
conv_text="Alice: We decided to switch from REST to GraphQL for the new API...",
|
|
99
|
+
conv_id="standup-2024-01-15",
|
|
100
|
+
gid="engineering-team",
|
|
101
|
+
updated_at=1705334400.0,
|
|
102
|
+
summary_pid="summary_standup-2024-01-15",
|
|
103
|
+
query_gids=["engineering-team"],
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
# Query the results
|
|
107
|
+
dg = DecisionGraph(backend=backend, executor=LiteLLMAdapter())
|
|
108
|
+
service = dg.graph_service()
|
|
109
|
+
result = await service.get_enrichments_and_projections_joined(
|
|
110
|
+
group_ids=["engineering-team"]
|
|
111
|
+
)
|
|
112
|
+
print(f"Found {result['total_joined']} enriched decisions")
|
|
113
|
+
|
|
114
|
+
asyncio.run(main())
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
## Key concepts
|
|
118
|
+
|
|
119
|
+
### The four protocols
|
|
120
|
+
|
|
121
|
+
py-context-graph is built around pluggable interfaces. You only implement what you need:
|
|
122
|
+
|
|
123
|
+
| Protocol | Purpose | Bundled implementations |
|
|
124
|
+
|----------|---------|------------------------|
|
|
125
|
+
| **`StorageBackend`** | Groups 4 document stores (enrichments, projections, clusters, links) | `InMemoryBackend`, `FirestoreBackend` |
|
|
126
|
+
| **`LLMAdapter`** | Executes LLM calls for extraction and enrichment | `LiteLLMAdapter` (supports OpenAI, Anthropic, and any LiteLLM provider) |
|
|
127
|
+
| **`VectorIndex`** | Similarity search for cross-conversation clustering | `InMemoryVectorIndex` (TF-IDF + cosine) |
|
|
128
|
+
| **`GraphStore`** | Write-only sync of hydrated clusters to a graph DB | `InMemoryGraphStore`, `NullGraphStore` |
|
|
129
|
+
|
|
130
|
+
### DecisionGraph facade
|
|
131
|
+
|
|
132
|
+
The main entry point. Wire a backend and LLM adapter, then access services:
|
|
133
|
+
|
|
134
|
+
```python
|
|
135
|
+
from decision_graph import DecisionGraph
|
|
136
|
+
|
|
137
|
+
dg = DecisionGraph(backend=my_backend, executor=my_llm)
|
|
138
|
+
service = dg.graph_service() # query enrichments, projections, clusters
|
|
139
|
+
retrieval = dg.retrieval() # filtered queries over enrichments
|
|
140
|
+
clusterer = dg.cluster_service() # cluster management
|
|
141
|
+
```
|
|
142
|
+
|
|
143
|
+
### DecisionTracePipeline
|
|
144
|
+
|
|
145
|
+
The end-to-end processing pipeline. Feed it text, get structured decisions:
|
|
146
|
+
|
|
147
|
+
```python
|
|
148
|
+
from decision_graph.decision_trace_pipeline import DecisionTracePipeline
|
|
149
|
+
|
|
150
|
+
pipeline = DecisionTracePipeline(
|
|
151
|
+
backend=backend,
|
|
152
|
+
executor=llm_adapter,
|
|
153
|
+
vector_index=vector_index, # optional, enables cross-conversation clustering
|
|
154
|
+
graph_store=graph_store, # use NullGraphStore() to skip graph materialization
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
# From raw text
|
|
158
|
+
decisions = await pipeline.run_from_text(conv_text=text, conv_id="c1", gid="g1", ...)
|
|
159
|
+
|
|
160
|
+
# From pre-extracted decision items
|
|
161
|
+
decisions = await pipeline.run(decision_items=[...], conv_id="c1", gid="g1", ...)
|
|
162
|
+
```
|
|
163
|
+
|
|
164
|
+
### Context Graph (query layer)
|
|
165
|
+
|
|
166
|
+
For querying the materialized graph (requires a `GraphReader` implementation, e.g. Neo4j):
|
|
167
|
+
|
|
168
|
+
```python
|
|
169
|
+
from decision_graph.context_graph.service import ContextGraphService
|
|
170
|
+
|
|
171
|
+
ctx = ContextGraphService(reader=my_graph_reader)
|
|
172
|
+
result = await ctx.query(text="What decisions were made about the API?", mode="chat")
|
|
173
|
+
```
|
|
174
|
+
|
|
175
|
+
## Bring your own backend
|
|
176
|
+
|
|
177
|
+
Implement `StorageBackend` to use any database:
|
|
178
|
+
|
|
179
|
+
```python
|
|
180
|
+
from decision_graph.core.registry import StorageBackend
|
|
181
|
+
from decision_graph.core.interfaces import EnrichmentStore, ProjectionStore, ClusterStore, LinkStore
|
|
182
|
+
|
|
183
|
+
class PostgresBackend(StorageBackend):
|
|
184
|
+
def enrichment_store(self) -> EnrichmentStore: ...
|
|
185
|
+
def projection_store(self) -> ProjectionStore: ...
|
|
186
|
+
def cluster_store(self) -> ClusterStore: ...
|
|
187
|
+
def link_store(self) -> LinkStore: ...
|
|
188
|
+
```
|
|
189
|
+
|
|
190
|
+
Each store protocol is defined in `decision_graph.core.interfaces` with clear method signatures.
|
|
191
|
+
|
|
192
|
+
## Bring your own LLM
|
|
193
|
+
|
|
194
|
+
Implement the `LLMAdapter` protocol:
|
|
195
|
+
|
|
196
|
+
```python
|
|
197
|
+
from decision_graph.core.interfaces import LLMAdapter
|
|
198
|
+
|
|
199
|
+
class MyLLMAdapter(LLMAdapter):
|
|
200
|
+
async def execute_async(self, model_config, data, additional_data=None):
|
|
201
|
+
# Call your LLM, return parsed result
|
|
202
|
+
...
|
|
203
|
+
```
|
|
204
|
+
|
|
205
|
+
## Examples
|
|
206
|
+
|
|
207
|
+
See the [`examples/`](examples/) directory for a complete demo that:
|
|
208
|
+
- Processes sample conversation files through the full pipeline
|
|
209
|
+
- Shows live pipeline progress in the browser as conversations are processed
|
|
210
|
+
- Generates an interactive HTML viewer with Insights dashboard, Cluster Board, Timeline, Person x Cluster matrix, and Explore (force-directed graph) views
|
|
211
|
+
|
|
212
|
+
```bash
|
|
213
|
+
cd examples
|
|
214
|
+
pip install py-context-graph[all]
|
|
215
|
+
export OPENAI_API_KEY=sk-... # or any LiteLLM-supported provider
|
|
216
|
+
python run.py # opens browser automatically
|
|
217
|
+
```
|
|
218
|
+
|
|
219
|
+
The viewer opens immediately and shows pipeline progress in real time. When processing completes, the dashboard appears with all visualizations.
|
|
220
|
+
|
|
221
|
+
Options:
|
|
222
|
+
- `python run.py --port 9000` — use a different port
|
|
223
|
+
- `python run.py --no-browser` — don't auto-open the browser
|
|
224
|
+
- `python run.py my_notes.txt` — process your own conversation files
|
|
225
|
+
|
|
226
|
+
## Project structure
|
|
227
|
+
|
|
228
|
+
```
|
|
229
|
+
src/decision_graph/
|
|
230
|
+
├── __init__.py # Public API: DecisionGraph, LLMAdapter, LLMConfig, LiteLLMAdapter
|
|
231
|
+
├── graph.py # DecisionGraph facade
|
|
232
|
+
├── decision_trace_pipeline.py # End-to-end pipeline
|
|
233
|
+
├── extraction_service.py # LLM-based decision extraction
|
|
234
|
+
├── enrichment_service.py # LLM-based decision enrichment
|
|
235
|
+
├── clustering_service.py # Decision clustering
|
|
236
|
+
├── retrieval.py # Query/filter over enrichments
|
|
237
|
+
├── context_retrieval.py # Vector-based context retrieval
|
|
238
|
+
├── services.py # DecisionGraphService (joins, hydration)
|
|
239
|
+
├── ingestion.py # Graph materialization helpers
|
|
240
|
+
├── visualization.py # vis.js graph builder
|
|
241
|
+
├── markdown_chunker.py # Split markdown by headings
|
|
242
|
+
├── core/
|
|
243
|
+
│ ├── interfaces.py # Protocol definitions
|
|
244
|
+
│ ├── registry.py # StorageBackend ABC
|
|
245
|
+
│ ├── domain.py # Pydantic models
|
|
246
|
+
│ ├── config.py # LLMConfig
|
|
247
|
+
│ └── matching.py # Dedup, scoring, similarity
|
|
248
|
+
├── llm/
|
|
249
|
+
│ └── litellm_adapter.py # LiteLLM-based LLMAdapter
|
|
250
|
+
├── backends/
|
|
251
|
+
│ ├── memory/ # In-memory stores + TF-IDF vector index
|
|
252
|
+
│ └── firestore/ # Google Cloud Firestore stores
|
|
253
|
+
├── context_graph/ # Graph query layer (planner, templates, post-processing)
|
|
254
|
+
└── prompts/ # LLM prompt templates
|
|
255
|
+
```
|
|
256
|
+
|
|
257
|
+
## Contributing
|
|
258
|
+
|
|
259
|
+
Contributions are welcome. Please open an issue first to discuss what you'd like to change.
|
|
260
|
+
|
|
261
|
+
```bash
|
|
262
|
+
git clone https://github.com/ResearchifyLabs/py-context-graph.git
|
|
263
|
+
cd py-context-graph
|
|
264
|
+
python -m venv .venv && source .venv/bin/activate
|
|
265
|
+
pip install -e ".[dev]"
|
|
266
|
+
PYTHONPATH=src:tests python -m unittest discover -s tests -p 'test_*.py'
|
|
267
|
+
```
|
|
268
|
+
|
|
269
|
+
## License
|
|
270
|
+
|
|
271
|
+
[MIT](LICENSE)
|