hebbmem 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,21 @@
1
+ name: CI
2
+
3
+ on:
4
+ push:
5
+ branches: [main]
6
+ pull_request:
7
+ branches: [main]
8
+
9
+ jobs:
10
+ test:
11
+ runs-on: ubuntu-latest
12
+ strategy:
13
+ matrix:
14
+ python-version: ["3.10", "3.11", "3.12"]
15
+ steps:
16
+ - uses: actions/checkout@v4
17
+ - uses: actions/setup-python@v5
18
+ with:
19
+ python-version: ${{ matrix.python-version }}
20
+ - run: pip install -e ".[dev]"
21
+ - run: pytest tests/ -v --cov=hebbmem --cov-report=term-missing --cov-fail-under=88
@@ -0,0 +1,34 @@
1
+ # Publishes to PyPI when a version tag (v*) is pushed.
2
+ #
3
+ # Uses Trusted Publishing (OIDC) — no API token needed in secrets.
4
+ # One-time setup required on PyPI:
5
+ # 1. Go to https://pypi.org/manage/project/hebbmem/settings/publishing/
6
+ # (or create the project first via manual upload / `twine upload`)
7
+ # 2. Add a new "GitHub Actions" publisher:
8
+ # - Owner: codepawl
9
+ # - Repository: hebbmem
10
+ # - Workflow: publish.yml
11
+ # - Environment: pypi
12
+
13
+ name: Publish to PyPI
14
+
15
+ on:
16
+ push:
17
+ tags: ["v*"]
18
+
19
+ jobs:
20
+ publish:
21
+ runs-on: ubuntu-latest
22
+ permissions:
23
+ id-token: write
24
+ environment:
25
+ name: pypi
26
+ url: https://pypi.org/p/hebbmem
27
+ steps:
28
+ - uses: actions/checkout@v4
29
+ - uses: actions/setup-python@v5
30
+ with:
31
+ python-version: "3.12"
32
+ - run: pip install build
33
+ - run: python -m build
34
+ - uses: pypa/gh-action-pypi-publish@release/v1
@@ -0,0 +1,10 @@
1
+ __pycache__/
2
+ *.pyc
3
+ *.pyo
4
+ dist/
5
+ build/
6
+ *.egg-info/
7
+ .coverage
8
+ .pytest_cache/
9
+ .venv/
10
+ .claude/
hebbmem-0.1.0/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Codepawl
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
hebbmem-0.1.0/PKG-INFO ADDED
@@ -0,0 +1,79 @@
1
+ Metadata-Version: 2.4
2
+ Name: hebbmem
3
+ Version: 0.1.0
4
+ Summary: Hebbian memory for AI agents — memories that fire together wire together.
5
+ Project-URL: Homepage, https://github.com/codepawl/hebbmem
6
+ Project-URL: Repository, https://github.com/codepawl/hebbmem
7
+ Project-URL: Issues, https://github.com/codepawl/hebbmem/issues
8
+ Author-email: An <an@codepawl.dev>
9
+ License-Expression: MIT
10
+ License-File: LICENSE
11
+ Keywords: agent,ai,hebbian,memory,neuroscience
12
+ Classifier: Development Status :: 3 - Alpha
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Programming Language :: Python :: 3
16
+ Classifier: Programming Language :: Python :: 3.10
17
+ Classifier: Programming Language :: Python :: 3.11
18
+ Classifier: Programming Language :: Python :: 3.12
19
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
20
+ Requires-Python: >=3.10
21
+ Requires-Dist: numpy>=1.24
22
+ Provides-Extra: dev
23
+ Requires-Dist: pytest-cov>=4.0; extra == 'dev'
24
+ Requires-Dist: pytest>=7.0; extra == 'dev'
25
+ Provides-Extra: ml
26
+ Requires-Dist: sentence-transformers>=2.2; extra == 'ml'
27
+ Description-Content-Type: text/markdown
28
+
29
+ # hebbmem
30
+
31
+ Hebbian memory for AI agents — memories that fire together wire together.
32
+
33
+ ## Install
34
+
35
+ ```bash
36
+ pip install hebbmem
37
+ ```
38
+
39
+ For higher-quality embeddings (recommended):
40
+
41
+ ```bash
42
+ pip install hebbmem[ml]
43
+ ```
44
+
45
+ ## Quick Start
46
+
47
+ ```python
48
+ from hebbmem import HebbMem
49
+
50
+ mem = HebbMem()
51
+
52
+ # Store memories
53
+ mem.store("Python is great for data science", importance=0.8)
54
+ mem.store("JavaScript runs in the browser", importance=0.5)
55
+ mem.store("Neural networks learn from data", importance=0.7)
56
+
57
+ # Time passes, memories decay
58
+ mem.step(5)
59
+
60
+ # Recall activates related memories through the graph
61
+ results = mem.recall("machine learning with Python", top_k=3)
62
+ for r in results:
63
+ print(f"{r.content} (score={r.score:.3f})")
64
+ ```
65
+
66
+ ## How It Works
67
+
68
+ hebbmem replaces flat vector storage with three neuroscience mechanisms:
69
+
70
+ **Decay** — Memories fade over time unless reinforced, following the Ebbinghaus forgetting curve. Recent and frequently accessed memories stay strong.
71
+
72
+ **Hebbian Learning** — Memories recalled together strengthen their connections. "Neurons that fire together wire together." Over time, the graph learns which memories are related through usage, not just embedding similarity.
73
+
74
+ **Spreading Activation** — Recalling one memory activates related ones through the graph, surfacing connections that keyword or vector search alone would miss.
75
+
76
+ ## Links
77
+
78
+ - [GitHub](https://github.com/codepawl/hebbmem)
79
+ - [Codepawl](https://github.com/codepawl)
@@ -0,0 +1,51 @@
1
+ # hebbmem
2
+
3
+ Hebbian memory for AI agents — memories that fire together wire together.
4
+
5
+ ## Install
6
+
7
+ ```bash
8
+ pip install hebbmem
9
+ ```
10
+
11
+ For higher-quality embeddings (recommended):
12
+
13
+ ```bash
14
+ pip install hebbmem[ml]
15
+ ```
16
+
17
+ ## Quick Start
18
+
19
+ ```python
20
+ from hebbmem import HebbMem
21
+
22
+ mem = HebbMem()
23
+
24
+ # Store memories
25
+ mem.store("Python is great for data science", importance=0.8)
26
+ mem.store("JavaScript runs in the browser", importance=0.5)
27
+ mem.store("Neural networks learn from data", importance=0.7)
28
+
29
+ # Time passes, memories decay
30
+ mem.step(5)
31
+
32
+ # Recall activates related memories through the graph
33
+ results = mem.recall("machine learning with Python", top_k=3)
34
+ for r in results:
35
+ print(f"{r.content} (score={r.score:.3f})")
36
+ ```
37
+
38
+ ## How It Works
39
+
40
+ hebbmem replaces flat vector storage with three neuroscience mechanisms:
41
+
42
+ **Decay** — Memories fade over time unless reinforced, following the Ebbinghaus forgetting curve. Recent and frequently accessed memories stay strong.
43
+
44
+ **Hebbian Learning** — Memories recalled together strengthen their connections. "Neurons that fire together wire together." Over time, the graph learns which memories are related through usage, not just embedding similarity.
45
+
46
+ **Spreading Activation** — Recalling one memory activates related ones through the graph, surfacing connections that keyword or vector search alone would miss.
47
+
48
+ ## Links
49
+
50
+ - [GitHub](https://github.com/codepawl/hebbmem)
51
+ - [Codepawl](https://github.com/codepawl)
@@ -0,0 +1,45 @@
1
+ [build-system]
2
+ requires = ["hatchling"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [project]
6
+ name = "hebbmem"
7
+ version = "0.1.0"
8
+ description = "Hebbian memory for AI agents — memories that fire together wire together."
9
+ readme = "README.md"
10
+ license = "MIT"
11
+ requires-python = ">=3.10"
12
+ authors = [{ name = "An", email = "an@codepawl.dev" }]
13
+ keywords = ["ai", "agent", "memory", "hebbian", "neuroscience"]
14
+ classifiers = [
15
+ "Development Status :: 3 - Alpha",
16
+ "Intended Audience :: Developers",
17
+ "License :: OSI Approved :: MIT License",
18
+ "Programming Language :: Python :: 3",
19
+ "Programming Language :: Python :: 3.10",
20
+ "Programming Language :: Python :: 3.11",
21
+ "Programming Language :: Python :: 3.12",
22
+ "Topic :: Scientific/Engineering :: Artificial Intelligence",
23
+ ]
24
+ dependencies = ["numpy>=1.24"]
25
+
26
+ [project.optional-dependencies]
27
+ ml = ["sentence-transformers>=2.2"]
28
+ dev = ["pytest>=7.0", "pytest-cov>=4.0"]
29
+
30
+ [project.urls]
31
+ Homepage = "https://github.com/codepawl/hebbmem"
32
+ Repository = "https://github.com/codepawl/hebbmem"
33
+ Issues = "https://github.com/codepawl/hebbmem/issues"
34
+
35
+ [tool.hatch.build]
36
+ exclude = [".claude/"]
37
+
38
+ [tool.hatch.build.targets.wheel]
39
+ packages = ["src/hebbmem"]
40
+
41
+ [tool.pytest.ini_options]
42
+ testpaths = ["tests"]
43
+
44
+ [tool.coverage.run]
45
+ source = ["hebbmem"]
@@ -0,0 +1,21 @@
1
+ """hebbmem - Bio-inspired memory for AI agents."""
2
+
3
+ # TODO v0.2: py.typed — PEP 561 marker
4
+ # TODO v0.2: hebbmem/integrations/ — framework adapters (LangChain, OpenPawl, OpenCode)
5
+
6
+ from hebbmem.encoders import EncoderBackend, HashEncoder, SentenceTransformerEncoder
7
+ from hebbmem.memory import HebbMem
8
+ from hebbmem.node import MemoryNode
9
+ from hebbmem.types import Config, Edge, RecallResult
10
+
11
+ __version__ = "0.1.0"
12
+ __all__ = [
13
+ "HebbMem",
14
+ "Config",
15
+ "Edge",
16
+ "RecallResult",
17
+ "MemoryNode",
18
+ "EncoderBackend",
19
+ "HashEncoder",
20
+ "SentenceTransformerEncoder",
21
+ ]
@@ -0,0 +1,89 @@
1
+ """Pluggable encoder backends for hebbmem."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import hashlib
6
+ from abc import ABC, abstractmethod
7
+
8
+ import numpy as np
9
+
10
+
11
+ class EncoderBackend(ABC):
12
+ """Base class for text-to-embedding encoders."""
13
+
14
+ @abstractmethod
15
+ def encode(self, text: str) -> np.ndarray:
16
+ """Encode text to a fixed-dimension float32 vector."""
17
+ ...
18
+
19
+ @abstractmethod
20
+ def encode_batch(self, texts: list[str]) -> np.ndarray:
21
+ """Encode multiple texts. Returns shape (n, dim)."""
22
+ ...
23
+
24
+ @property
25
+ @abstractmethod
26
+ def dimension(self) -> int:
27
+ ...
28
+
29
+
30
+ class HashEncoder(EncoderBackend):
31
+ """Zero-dependency encoder using the hashing trick.
32
+
33
+ Deterministic: same input always produces same vector.
34
+ Uses multiple hash seeds to fill a fixed-dimension vector,
35
+ then L2-normalizes so cosine similarity works correctly.
36
+ """
37
+
38
+ def __init__(self, dimension: int = 256, num_hashes: int = 4) -> None:
39
+ self._dimension = dimension
40
+ self._num_hashes = num_hashes
41
+
42
+ @property
43
+ def dimension(self) -> int:
44
+ return self._dimension
45
+
46
+ def encode(self, text: str) -> np.ndarray:
47
+ vec = np.zeros(self._dimension, dtype=np.float32)
48
+ tokens = text.lower().split()
49
+ for token in tokens:
50
+ for seed in range(self._num_hashes):
51
+ h = int(hashlib.md5(f"{seed}:{token}".encode()).hexdigest(), 16)
52
+ idx = h % self._dimension
53
+ sign = 1.0 if (h // self._dimension) % 2 == 0 else -1.0
54
+ vec[idx] += sign
55
+ norm = np.linalg.norm(vec)
56
+ if norm > 0:
57
+ vec /= norm
58
+ return vec
59
+
60
+ def encode_batch(self, texts: list[str]) -> np.ndarray:
61
+ return np.stack([self.encode(t) for t in texts])
62
+
63
+
64
+ class SentenceTransformerEncoder(EncoderBackend):
65
+ """Quality encoder using sentence-transformers (optional dependency)."""
66
+
67
+ def __init__(self, model_name: str = "all-MiniLM-L6-v2") -> None:
68
+ from sentence_transformers import SentenceTransformer
69
+
70
+ self._model = SentenceTransformer(model_name)
71
+ self._dimension: int = self._model.get_sentence_embedding_dimension()
72
+
73
+ @property
74
+ def dimension(self) -> int:
75
+ return self._dimension
76
+
77
+ def encode(self, text: str) -> np.ndarray:
78
+ return self._model.encode(text, convert_to_numpy=True).astype(np.float32)
79
+
80
+ def encode_batch(self, texts: list[str]) -> np.ndarray:
81
+ return self._model.encode(texts, convert_to_numpy=True).astype(np.float32)
82
+
83
+
84
+ def auto_select_encoder() -> EncoderBackend:
85
+ """Return SentenceTransformerEncoder if available, else HashEncoder."""
86
+ try:
87
+ return SentenceTransformerEncoder()
88
+ except ImportError:
89
+ return HashEncoder()
@@ -0,0 +1,182 @@
1
+ """MemoryGraph — weighted graph with spreading activation and Hebbian learning."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import uuid
6
+ from collections import deque
7
+
8
+ import numpy as np
9
+
10
+ from hebbmem.node import MemoryNode
11
+ from hebbmem.types import Config, Edge
12
+
13
+
14
+ class MemoryGraph:
15
+ """Graph of memory nodes connected by weighted edges (synapses).
16
+
17
+ Implements spreading activation, Hebbian reinforcement, and temporal decay.
18
+ """
19
+
20
+ def __init__(self, config: Config | None = None) -> None:
21
+ self.config = config or Config()
22
+ self._nodes: dict[uuid.UUID, MemoryNode] = {}
23
+ self._edges: dict[tuple[uuid.UUID, uuid.UUID], Edge] = {}
24
+ # Embedding cache for vectorized cosine similarity
25
+ self._embedding_matrix: np.ndarray | None = None
26
+ self._embedding_ids: list[uuid.UUID] = []
27
+ self._cache_dirty: bool = True
28
+
29
+ # --- Node operations ---
30
+
31
+ def add_node(self, node: MemoryNode) -> None:
32
+ """Add a node and auto-connect to similar existing nodes."""
33
+ self._nodes[node.memory_id] = node
34
+ self._cache_dirty = True
35
+ self._auto_connect(node)
36
+
37
+ def remove_node(self, memory_id: uuid.UUID) -> None:
38
+ """Remove a node and all its edges."""
39
+ self._nodes.pop(memory_id, None)
40
+ dead = [k for k in self._edges if memory_id in k]
41
+ for k in dead:
42
+ del self._edges[k]
43
+ self._cache_dirty = True
44
+
45
+ def get_node(self, memory_id: uuid.UUID) -> MemoryNode | None:
46
+ return self._nodes.get(memory_id)
47
+
48
+ @property
49
+ def node_count(self) -> int:
50
+ return len(self._nodes)
51
+
52
+ @property
53
+ def edge_count(self) -> int:
54
+ return len(self._edges) // 2 # undirected, stored both directions
55
+
56
+ # --- Embedding cache ---
57
+
58
+ def _rebuild_cache(self) -> None:
59
+ if not self._nodes:
60
+ self._embedding_matrix = None
61
+ self._embedding_ids = []
62
+ self._cache_dirty = False
63
+ return
64
+ self._embedding_ids = list(self._nodes.keys())
65
+ self._embedding_matrix = np.stack(
66
+ [self._nodes[nid].embedding for nid in self._embedding_ids]
67
+ )
68
+ norms = np.linalg.norm(self._embedding_matrix, axis=1, keepdims=True)
69
+ norms = np.maximum(norms, 1e-10)
70
+ self._embedding_matrix /= norms
71
+ self._cache_dirty = False
72
+
73
+ def cosine_similarity(
74
+ self, query_embedding: np.ndarray, top_k: int = 5
75
+ ) -> list[tuple[uuid.UUID, float]]:
76
+ """Vectorized cosine similarity against all nodes.
77
+
78
+ Returns list of (memory_id, similarity) sorted descending.
79
+ """
80
+ if self._cache_dirty:
81
+ self._rebuild_cache()
82
+ if self._embedding_matrix is None:
83
+ return []
84
+ query_norm = query_embedding / max(float(np.linalg.norm(query_embedding)), 1e-10)
85
+ scores = self._embedding_matrix @ query_norm
86
+ top_indices = np.argsort(scores)[::-1][:top_k]
87
+ return [(self._embedding_ids[i], float(scores[i])) for i in top_indices]
88
+
89
+ # --- Auto-connect ---
90
+
91
+ def _auto_connect(self, node: MemoryNode) -> None:
92
+ """Connect new node to existing nodes above similarity threshold."""
93
+ if self._cache_dirty:
94
+ self._rebuild_cache()
95
+ if self._embedding_matrix is None or len(self._embedding_ids) <= 1:
96
+ return
97
+ query_norm = node.embedding / max(float(np.linalg.norm(node.embedding)), 1e-10)
98
+ scores = self._embedding_matrix @ query_norm
99
+ threshold = self.config.auto_connect_threshold
100
+ for i, sim in enumerate(scores):
101
+ nid = self._embedding_ids[i]
102
+ if nid != node.memory_id and sim >= threshold:
103
+ self._set_edge(node.memory_id, nid, float(sim))
104
+
105
+ def _set_edge(self, a: uuid.UUID, b: uuid.UUID, weight: float) -> None:
106
+ """Set undirected edge weight."""
107
+ self._edges[(a, b)] = Edge(weight=weight)
108
+ self._edges[(b, a)] = Edge(weight=weight)
109
+
110
+ def get_neighbors(self, memory_id: uuid.UUID) -> list[tuple[uuid.UUID, float]]:
111
+ """Return [(neighbor_id, edge_weight), ...] for a node."""
112
+ result = []
113
+ for (src, dst), edge in self._edges.items():
114
+ if src == memory_id:
115
+ result.append((dst, edge.weight))
116
+ return result
117
+
118
+ # --- Spreading Activation (BFS) ---
119
+
120
+ def spread_activation(
121
+ self, seeds: list[tuple[uuid.UUID, float]]
122
+ ) -> list[uuid.UUID]:
123
+ """BFS spread from seed nodes. Returns all activated node IDs.
124
+
125
+ seeds: [(memory_id, initial_activation), ...]
126
+ """
127
+ activated: set[uuid.UUID] = set()
128
+ queue: deque[tuple[uuid.UUID, int]] = deque()
129
+ threshold = self.config.activation_threshold
130
+
131
+ for nid, act in seeds:
132
+ node = self._nodes.get(nid)
133
+ if node:
134
+ node.activate(act)
135
+ activated.add(nid)
136
+ queue.append((nid, 0))
137
+
138
+ while queue:
139
+ current_id, hop = queue.popleft()
140
+ if hop >= self.config.max_hops:
141
+ continue
142
+ current_node = self._nodes[current_id]
143
+ for neighbor_id, edge_weight in self.get_neighbors(current_id):
144
+ spread_amount = (
145
+ current_node.activation * edge_weight * self.config.spread_factor
146
+ )
147
+ if spread_amount < threshold:
148
+ continue
149
+ neighbor = self._nodes.get(neighbor_id)
150
+ if neighbor:
151
+ neighbor.activate(spread_amount)
152
+ if neighbor_id not in activated:
153
+ activated.add(neighbor_id)
154
+ queue.append((neighbor_id, hop + 1))
155
+
156
+ return list(activated)
157
+
158
+ # --- Hebbian Reinforcement ---
159
+
160
+ def hebbian_update(self, activated_ids: list[uuid.UUID]) -> None:
161
+ """Strengthen edges between co-activated nodes."""
162
+ lr = self.config.hebbian_lr
163
+ ids = set(activated_ids)
164
+ for (a, b), edge in self._edges.items():
165
+ if a in ids and b in ids:
166
+ edge.weight = min(1.0, edge.weight + lr * (1.0 - edge.weight))
167
+ edge.co_activations += 1
168
+
169
+ # --- Temporal Decay ---
170
+
171
+ def decay_all(self) -> None:
172
+ """Apply one time-step of decay to all nodes and edges."""
173
+ for node in self._nodes.values():
174
+ node.decay(self.config.activation_decay, self.config.strength_decay)
175
+
176
+ dead_edges = []
177
+ for key, edge in self._edges.items():
178
+ edge.weight *= self.config.edge_decay
179
+ if edge.weight < 0.01:
180
+ dead_edges.append(key)
181
+ for key in dead_edges:
182
+ del self._edges[key]