hebbmem 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hebbmem/__init__.py ADDED
@@ -0,0 +1,21 @@
1
+ """hebbmem - Bio-inspired memory for AI agents."""
2
+
3
+ # TODO v0.2: py.typed — PEP 561 marker
4
+ # TODO v0.2: hebbmem/integrations/ — framework adapters (LangChain, OpenPawl, OpenCode)
5
+
6
+ from hebbmem.encoders import EncoderBackend, HashEncoder, SentenceTransformerEncoder
7
+ from hebbmem.memory import HebbMem
8
+ from hebbmem.node import MemoryNode
9
+ from hebbmem.types import Config, Edge, RecallResult
10
+
11
+ __version__ = "0.1.0"
12
+ __all__ = [
13
+ "HebbMem",
14
+ "Config",
15
+ "Edge",
16
+ "RecallResult",
17
+ "MemoryNode",
18
+ "EncoderBackend",
19
+ "HashEncoder",
20
+ "SentenceTransformerEncoder",
21
+ ]
hebbmem/encoders.py ADDED
@@ -0,0 +1,89 @@
1
+ """Pluggable encoder backends for hebbmem."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import hashlib
6
+ from abc import ABC, abstractmethod
7
+
8
+ import numpy as np
9
+
10
+
11
+ class EncoderBackend(ABC):
12
+ """Base class for text-to-embedding encoders."""
13
+
14
+ @abstractmethod
15
+ def encode(self, text: str) -> np.ndarray:
16
+ """Encode text to a fixed-dimension float32 vector."""
17
+ ...
18
+
19
+ @abstractmethod
20
+ def encode_batch(self, texts: list[str]) -> np.ndarray:
21
+ """Encode multiple texts. Returns shape (n, dim)."""
22
+ ...
23
+
24
+ @property
25
+ @abstractmethod
26
+ def dimension(self) -> int:
27
+ ...
28
+
29
+
30
+ class HashEncoder(EncoderBackend):
31
+ """Zero-dependency encoder using the hashing trick.
32
+
33
+ Deterministic: same input always produces same vector.
34
+ Uses multiple hash seeds to fill a fixed-dimension vector,
35
+ then L2-normalizes so cosine similarity works correctly.
36
+ """
37
+
38
+ def __init__(self, dimension: int = 256, num_hashes: int = 4) -> None:
39
+ self._dimension = dimension
40
+ self._num_hashes = num_hashes
41
+
42
+ @property
43
+ def dimension(self) -> int:
44
+ return self._dimension
45
+
46
+ def encode(self, text: str) -> np.ndarray:
47
+ vec = np.zeros(self._dimension, dtype=np.float32)
48
+ tokens = text.lower().split()
49
+ for token in tokens:
50
+ for seed in range(self._num_hashes):
51
+ h = int(hashlib.md5(f"{seed}:{token}".encode()).hexdigest(), 16)
52
+ idx = h % self._dimension
53
+ sign = 1.0 if (h // self._dimension) % 2 == 0 else -1.0
54
+ vec[idx] += sign
55
+ norm = np.linalg.norm(vec)
56
+ if norm > 0:
57
+ vec /= norm
58
+ return vec
59
+
60
+ def encode_batch(self, texts: list[str]) -> np.ndarray:
61
+ return np.stack([self.encode(t) for t in texts])
62
+
63
+
64
+ class SentenceTransformerEncoder(EncoderBackend):
65
+ """Quality encoder using sentence-transformers (optional dependency)."""
66
+
67
+ def __init__(self, model_name: str = "all-MiniLM-L6-v2") -> None:
68
+ from sentence_transformers import SentenceTransformer
69
+
70
+ self._model = SentenceTransformer(model_name)
71
+ self._dimension: int = self._model.get_sentence_embedding_dimension()
72
+
73
+ @property
74
+ def dimension(self) -> int:
75
+ return self._dimension
76
+
77
+ def encode(self, text: str) -> np.ndarray:
78
+ return self._model.encode(text, convert_to_numpy=True).astype(np.float32)
79
+
80
+ def encode_batch(self, texts: list[str]) -> np.ndarray:
81
+ return self._model.encode(texts, convert_to_numpy=True).astype(np.float32)
82
+
83
+
84
+ def auto_select_encoder() -> EncoderBackend:
85
+ """Return SentenceTransformerEncoder if available, else HashEncoder."""
86
+ try:
87
+ return SentenceTransformerEncoder()
88
+ except ImportError:
89
+ return HashEncoder()
hebbmem/graph.py ADDED
@@ -0,0 +1,182 @@
1
+ """MemoryGraph — weighted graph with spreading activation and Hebbian learning."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import uuid
6
+ from collections import deque
7
+
8
+ import numpy as np
9
+
10
+ from hebbmem.node import MemoryNode
11
+ from hebbmem.types import Config, Edge
12
+
13
+
14
+ class MemoryGraph:
15
+ """Graph of memory nodes connected by weighted edges (synapses).
16
+
17
+ Implements spreading activation, Hebbian reinforcement, and temporal decay.
18
+ """
19
+
20
+ def __init__(self, config: Config | None = None) -> None:
21
+ self.config = config or Config()
22
+ self._nodes: dict[uuid.UUID, MemoryNode] = {}
23
+ self._edges: dict[tuple[uuid.UUID, uuid.UUID], Edge] = {}
24
+ # Embedding cache for vectorized cosine similarity
25
+ self._embedding_matrix: np.ndarray | None = None
26
+ self._embedding_ids: list[uuid.UUID] = []
27
+ self._cache_dirty: bool = True
28
+
29
+ # --- Node operations ---
30
+
31
+ def add_node(self, node: MemoryNode) -> None:
32
+ """Add a node and auto-connect to similar existing nodes."""
33
+ self._nodes[node.memory_id] = node
34
+ self._cache_dirty = True
35
+ self._auto_connect(node)
36
+
37
+ def remove_node(self, memory_id: uuid.UUID) -> None:
38
+ """Remove a node and all its edges."""
39
+ self._nodes.pop(memory_id, None)
40
+ dead = [k for k in self._edges if memory_id in k]
41
+ for k in dead:
42
+ del self._edges[k]
43
+ self._cache_dirty = True
44
+
45
+ def get_node(self, memory_id: uuid.UUID) -> MemoryNode | None:
46
+ return self._nodes.get(memory_id)
47
+
48
+ @property
49
+ def node_count(self) -> int:
50
+ return len(self._nodes)
51
+
52
+ @property
53
+ def edge_count(self) -> int:
54
+ return len(self._edges) // 2 # undirected, stored both directions
55
+
56
+ # --- Embedding cache ---
57
+
58
+ def _rebuild_cache(self) -> None:
59
+ if not self._nodes:
60
+ self._embedding_matrix = None
61
+ self._embedding_ids = []
62
+ self._cache_dirty = False
63
+ return
64
+ self._embedding_ids = list(self._nodes.keys())
65
+ self._embedding_matrix = np.stack(
66
+ [self._nodes[nid].embedding for nid in self._embedding_ids]
67
+ )
68
+ norms = np.linalg.norm(self._embedding_matrix, axis=1, keepdims=True)
69
+ norms = np.maximum(norms, 1e-10)
70
+ self._embedding_matrix /= norms
71
+ self._cache_dirty = False
72
+
73
+ def cosine_similarity(
74
+ self, query_embedding: np.ndarray, top_k: int = 5
75
+ ) -> list[tuple[uuid.UUID, float]]:
76
+ """Vectorized cosine similarity against all nodes.
77
+
78
+ Returns list of (memory_id, similarity) sorted descending.
79
+ """
80
+ if self._cache_dirty:
81
+ self._rebuild_cache()
82
+ if self._embedding_matrix is None:
83
+ return []
84
+ query_norm = query_embedding / max(float(np.linalg.norm(query_embedding)), 1e-10)
85
+ scores = self._embedding_matrix @ query_norm
86
+ top_indices = np.argsort(scores)[::-1][:top_k]
87
+ return [(self._embedding_ids[i], float(scores[i])) for i in top_indices]
88
+
89
+ # --- Auto-connect ---
90
+
91
+ def _auto_connect(self, node: MemoryNode) -> None:
92
+ """Connect new node to existing nodes above similarity threshold."""
93
+ if self._cache_dirty:
94
+ self._rebuild_cache()
95
+ if self._embedding_matrix is None or len(self._embedding_ids) <= 1:
96
+ return
97
+ query_norm = node.embedding / max(float(np.linalg.norm(node.embedding)), 1e-10)
98
+ scores = self._embedding_matrix @ query_norm
99
+ threshold = self.config.auto_connect_threshold
100
+ for i, sim in enumerate(scores):
101
+ nid = self._embedding_ids[i]
102
+ if nid != node.memory_id and sim >= threshold:
103
+ self._set_edge(node.memory_id, nid, float(sim))
104
+
105
+ def _set_edge(self, a: uuid.UUID, b: uuid.UUID, weight: float) -> None:
106
+ """Set undirected edge weight."""
107
+ self._edges[(a, b)] = Edge(weight=weight)
108
+ self._edges[(b, a)] = Edge(weight=weight)
109
+
110
+ def get_neighbors(self, memory_id: uuid.UUID) -> list[tuple[uuid.UUID, float]]:
111
+ """Return [(neighbor_id, edge_weight), ...] for a node."""
112
+ result = []
113
+ for (src, dst), edge in self._edges.items():
114
+ if src == memory_id:
115
+ result.append((dst, edge.weight))
116
+ return result
117
+
118
+ # --- Spreading Activation (BFS) ---
119
+
120
+ def spread_activation(
121
+ self, seeds: list[tuple[uuid.UUID, float]]
122
+ ) -> list[uuid.UUID]:
123
+ """BFS spread from seed nodes. Returns all activated node IDs.
124
+
125
+ seeds: [(memory_id, initial_activation), ...]
126
+ """
127
+ activated: set[uuid.UUID] = set()
128
+ queue: deque[tuple[uuid.UUID, int]] = deque()
129
+ threshold = self.config.activation_threshold
130
+
131
+ for nid, act in seeds:
132
+ node = self._nodes.get(nid)
133
+ if node:
134
+ node.activate(act)
135
+ activated.add(nid)
136
+ queue.append((nid, 0))
137
+
138
+ while queue:
139
+ current_id, hop = queue.popleft()
140
+ if hop >= self.config.max_hops:
141
+ continue
142
+ current_node = self._nodes[current_id]
143
+ for neighbor_id, edge_weight in self.get_neighbors(current_id):
144
+ spread_amount = (
145
+ current_node.activation * edge_weight * self.config.spread_factor
146
+ )
147
+ if spread_amount < threshold:
148
+ continue
149
+ neighbor = self._nodes.get(neighbor_id)
150
+ if neighbor:
151
+ neighbor.activate(spread_amount)
152
+ if neighbor_id not in activated:
153
+ activated.add(neighbor_id)
154
+ queue.append((neighbor_id, hop + 1))
155
+
156
+ return list(activated)
157
+
158
+ # --- Hebbian Reinforcement ---
159
+
160
+ def hebbian_update(self, activated_ids: list[uuid.UUID]) -> None:
161
+ """Strengthen edges between co-activated nodes."""
162
+ lr = self.config.hebbian_lr
163
+ ids = set(activated_ids)
164
+ for (a, b), edge in self._edges.items():
165
+ if a in ids and b in ids:
166
+ edge.weight = min(1.0, edge.weight + lr * (1.0 - edge.weight))
167
+ edge.co_activations += 1
168
+
169
+ # --- Temporal Decay ---
170
+
171
+ def decay_all(self) -> None:
172
+ """Apply one time-step of decay to all nodes and edges."""
173
+ for node in self._nodes.values():
174
+ node.decay(self.config.activation_decay, self.config.strength_decay)
175
+
176
+ dead_edges = []
177
+ for key, edge in self._edges.items():
178
+ edge.weight *= self.config.edge_decay
179
+ if edge.weight < 0.01:
180
+ dead_edges.append(key)
181
+ for key in dead_edges:
182
+ del self._edges[key]
hebbmem/memory.py ADDED
@@ -0,0 +1,142 @@
1
+ """HebbMem — public API for bio-inspired memory."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import uuid
6
+ from typing import Any
7
+
8
+ from hebbmem.encoders import (
9
+ EncoderBackend,
10
+ HashEncoder,
11
+ SentenceTransformerEncoder,
12
+ auto_select_encoder,
13
+ )
14
+ from hebbmem.graph import MemoryGraph
15
+ from hebbmem.node import MemoryNode
16
+ from hebbmem.types import Config, RecallResult
17
+
18
+
19
+ class HebbMem:
20
+ """Bio-inspired memory for AI agents.
21
+
22
+ Uses decay, Hebbian learning, and spreading activation
23
+ to model how human memory works.
24
+ """
25
+
26
+ # TODO v0.2: save(path) / HebbMem.load(path) — persistence
27
+ # TODO v0.2: store_batch(contents) — batch ingestion
28
+ # TODO v0.2: threading.RLock — thread safety
29
+
30
+ def __init__(
31
+ self,
32
+ encoder: str | EncoderBackend = "auto",
33
+ config: Config | None = None,
34
+ ) -> None:
35
+ self.config = config or Config()
36
+ self._encoder = self._resolve_encoder(encoder)
37
+ self._graph = MemoryGraph(self.config)
38
+ self._time_step: int = 0
39
+
40
+ @staticmethod
41
+ def _resolve_encoder(encoder: str | EncoderBackend) -> EncoderBackend:
42
+ if isinstance(encoder, EncoderBackend):
43
+ return encoder
44
+ if encoder == "auto":
45
+ return auto_select_encoder()
46
+ if encoder == "hash":
47
+ return HashEncoder()
48
+ if encoder == "sentence-transformer":
49
+ return SentenceTransformerEncoder()
50
+ raise ValueError(f"Unknown encoder: {encoder}")
51
+
52
+ def store(
53
+ self,
54
+ content: str,
55
+ importance: float = 0.5,
56
+ metadata: dict[str, Any] | None = None,
57
+ ) -> uuid.UUID:
58
+ """Encode and store content. Returns memory_id."""
59
+ embedding = self._encoder.encode(content)
60
+ node = MemoryNode(
61
+ content=content,
62
+ embedding=embedding,
63
+ importance=importance,
64
+ metadata=metadata or {},
65
+ )
66
+ self._graph.add_node(node)
67
+ return node.memory_id
68
+
69
+ def recall(self, query: str, top_k: int = 5) -> list[RecallResult]:
70
+ """Full recall pipeline: encode → similarity → spread → hebbian → rank."""
71
+ query_embedding = self._encoder.encode(query)
72
+
73
+ # Find seed candidates (fetch extra to allow spreading to surface more)
74
+ candidates = self._graph.cosine_similarity(query_embedding, top_k=top_k * 2)
75
+ if not candidates:
76
+ return []
77
+
78
+ # Activate seeds proportional to similarity
79
+ seeds = [(nid, sim) for nid, sim in candidates if sim > 0]
80
+
81
+ # Spread activation through graph
82
+ activated_ids = self._graph.spread_activation(seeds)
83
+
84
+ # Hebbian update on co-activated nodes
85
+ self._graph.hebbian_update(activated_ids)
86
+
87
+ # Rank by weighted score
88
+ weights = self.config.scoring_weights
89
+ sim_map = dict(candidates)
90
+ results: list[RecallResult] = []
91
+
92
+ for nid in activated_ids:
93
+ node = self._graph.get_node(nid)
94
+ if node is None:
95
+ continue
96
+ node.touch()
97
+ sim = sim_map.get(nid, 0.0)
98
+ values = {
99
+ "activation": node.activation,
100
+ "similarity": sim,
101
+ "strength": node.base_strength,
102
+ "importance": node.importance,
103
+ }
104
+ score = sum(weights[k] * values[k] for k in weights)
105
+ results.append(
106
+ RecallResult(
107
+ memory_id=node.memory_id,
108
+ content=node.content,
109
+ score=score,
110
+ activation=node.activation,
111
+ similarity=sim,
112
+ strength=node.base_strength,
113
+ importance=node.importance,
114
+ metadata=node.metadata,
115
+ )
116
+ )
117
+
118
+ results.sort(key=lambda r: r.score, reverse=True)
119
+ return results[:top_k]
120
+
121
+ def step(self, n: int = 1) -> None:
122
+ """Advance time by n steps, applying decay each step."""
123
+ for _ in range(n):
124
+ self._time_step += 1
125
+ self._graph.decay_all()
126
+
127
+ def forget(self, memory_id: uuid.UUID) -> bool:
128
+ """Explicitly remove a memory. Returns True if found and removed."""
129
+ node = self._graph.get_node(memory_id)
130
+ if node is None:
131
+ return False
132
+ self._graph.remove_node(memory_id)
133
+ return True
134
+
135
+ def stats(self) -> dict[str, Any]:
136
+ """Return introspection statistics."""
137
+ return {
138
+ "node_count": self._graph.node_count,
139
+ "edge_count": self._graph.edge_count,
140
+ "time_step": self._time_step,
141
+ "encoder": type(self._encoder).__name__,
142
+ }
hebbmem/node.py ADDED
@@ -0,0 +1,41 @@
1
+ """MemoryNode — a single unit of memory in the graph."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import time
6
+ import uuid
7
+ from dataclasses import dataclass, field
8
+ from typing import Any
9
+
10
+ import numpy as np
11
+
12
+
13
+ @dataclass
14
+ class MemoryNode:
15
+ """A single memory node with content, embedding, and activation dynamics."""
16
+
17
+ content: str
18
+ embedding: np.ndarray
19
+ importance: float = 0.5
20
+ metadata: dict[str, Any] = field(default_factory=dict)
21
+ memory_id: uuid.UUID = field(default_factory=uuid.uuid4)
22
+ activation: float = 0.0
23
+ base_strength: float = 1.0
24
+ decay_rate: float = 1.0 # per-node multiplier (1.0 = use global defaults)
25
+ created_at: float = field(default_factory=time.time)
26
+ last_accessed: float = field(default_factory=time.time)
27
+ access_count: int = 0
28
+
29
+ def activate(self, amount: float) -> None:
30
+ """Add activation energy, clamped to [0, 1]."""
31
+ self.activation = min(1.0, self.activation + amount)
32
+
33
+ def decay(self, activation_rate: float, strength_rate: float) -> None:
34
+ """Apply one step of temporal decay to activation and strength."""
35
+ self.activation *= activation_rate * self.decay_rate
36
+ self.base_strength *= strength_rate
37
+
38
+ def touch(self) -> None:
39
+ """Record an access (updates last_accessed, increments count)."""
40
+ self.last_accessed = time.time()
41
+ self.access_count += 1
hebbmem/types.py ADDED
@@ -0,0 +1,49 @@
1
+ """Shared data structures for hebbmem."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import uuid
6
+ from dataclasses import dataclass, field
7
+ from typing import Any
8
+
9
+
10
+ @dataclass
11
+ class Edge:
12
+ """A weighted connection (synapse) between two memory nodes."""
13
+
14
+ weight: float = 0.1
15
+ co_activations: int = 0
16
+
17
+
18
+ @dataclass
19
+ class Config:
20
+ """Configuration for the hebbmem memory system."""
21
+
22
+ activation_decay: float = 0.95
23
+ strength_decay: float = 0.999
24
+ edge_decay: float = 0.99
25
+ hebbian_lr: float = 0.1
26
+ spread_factor: float = 0.5
27
+ max_hops: int = 3
28
+ auto_connect_threshold: float = 0.5
29
+ activation_threshold: float = 0.1
30
+ scoring_weights: dict[str, float] = field(default_factory=lambda: {
31
+ "activation": 0.4,
32
+ "similarity": 0.35,
33
+ "strength": 0.15,
34
+ "importance": 0.1,
35
+ })
36
+
37
+
38
+ @dataclass
39
+ class RecallResult:
40
+ """A single result from a recall query, with full score breakdown."""
41
+
42
+ memory_id: uuid.UUID
43
+ content: str
44
+ score: float
45
+ activation: float
46
+ similarity: float
47
+ strength: float
48
+ importance: float
49
+ metadata: dict[str, Any] = field(default_factory=dict)
@@ -0,0 +1,79 @@
1
+ Metadata-Version: 2.4
2
+ Name: hebbmem
3
+ Version: 0.1.0
4
+ Summary: Hebbian memory for AI agents — memories that fire together wire together.
5
+ Project-URL: Homepage, https://github.com/codepawl/hebbmem
6
+ Project-URL: Repository, https://github.com/codepawl/hebbmem
7
+ Project-URL: Issues, https://github.com/codepawl/hebbmem/issues
8
+ Author-email: An <an@codepawl.dev>
9
+ License-Expression: MIT
10
+ License-File: LICENSE
11
+ Keywords: agent,ai,hebbian,memory,neuroscience
12
+ Classifier: Development Status :: 3 - Alpha
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Programming Language :: Python :: 3
16
+ Classifier: Programming Language :: Python :: 3.10
17
+ Classifier: Programming Language :: Python :: 3.11
18
+ Classifier: Programming Language :: Python :: 3.12
19
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
20
+ Requires-Python: >=3.10
21
+ Requires-Dist: numpy>=1.24
22
+ Provides-Extra: dev
23
+ Requires-Dist: pytest-cov>=4.0; extra == 'dev'
24
+ Requires-Dist: pytest>=7.0; extra == 'dev'
25
+ Provides-Extra: ml
26
+ Requires-Dist: sentence-transformers>=2.2; extra == 'ml'
27
+ Description-Content-Type: text/markdown
28
+
29
+ # hebbmem
30
+
31
+ Hebbian memory for AI agents — memories that fire together wire together.
32
+
33
+ ## Install
34
+
35
+ ```bash
36
+ pip install hebbmem
37
+ ```
38
+
39
+ For higher-quality embeddings (recommended):
40
+
41
+ ```bash
42
+ pip install hebbmem[ml]
43
+ ```
44
+
45
+ ## Quick Start
46
+
47
+ ```python
48
+ from hebbmem import HebbMem
49
+
50
+ mem = HebbMem()
51
+
52
+ # Store memories
53
+ mem.store("Python is great for data science", importance=0.8)
54
+ mem.store("JavaScript runs in the browser", importance=0.5)
55
+ mem.store("Neural networks learn from data", importance=0.7)
56
+
57
+ # Time passes, memories decay
58
+ mem.step(5)
59
+
60
+ # Recall activates related memories through the graph
61
+ results = mem.recall("machine learning with Python", top_k=3)
62
+ for r in results:
63
+ print(f"{r.content} (score={r.score:.3f})")
64
+ ```
65
+
66
+ ## How It Works
67
+
68
+ hebbmem replaces flat vector storage with three neuroscience mechanisms:
69
+
70
+ **Decay** — Memories fade over time unless reinforced, following the Ebbinghaus forgetting curve. Recent and frequently accessed memories stay strong.
71
+
72
+ **Hebbian Learning** — Memories recalled together strengthen their connections. "Neurons that fire together wire together." Over time, the graph learns which memories are related through usage, not just embedding similarity.
73
+
74
+ **Spreading Activation** — Recalling one memory activates related ones through the graph, surfacing connections that keyword or vector search alone would miss.
75
+
76
+ ## Links
77
+
78
+ - [GitHub](https://github.com/codepawl/hebbmem)
79
+ - [Codepawl](https://github.com/codepawl)
@@ -0,0 +1,10 @@
1
+ hebbmem/__init__.py,sha256=TSfOcTnaoOvdG7xq_tM_C_Nt7urSDfGCs8vtQt0lggU,584
2
+ hebbmem/encoders.py,sha256=mhT3ifWIqUpMJPkn_TiWWyKoqQmhYeByVNtUZMbkxMI,2768
3
+ hebbmem/graph.py,sha256=Tt9InYdcpsVert8uB4Q1YMZtBdqsEzraR8euHHsxPNQ,6686
4
+ hebbmem/memory.py,sha256=QuzDrB6FOjbrPI_O1fyOLHJLW9irZlBBQu_m80Tm94Y,4692
5
+ hebbmem/node.py,sha256=yLX3-ThW9dT9A4cS4CgxzeTF--yoKeHXQsehFmi75hg,1386
6
+ hebbmem/types.py,sha256=AgDJ4ZuA7SI6FRH35Qm1MkGXy1X07CNIRtFJnP0UDj0,1140
7
+ hebbmem-0.1.0.dist-info/METADATA,sha256=1X024ZD42Tkw0s6bVpmoXZaaqVk92aphXEsuEWjFbOU,2592
8
+ hebbmem-0.1.0.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
9
+ hebbmem-0.1.0.dist-info/licenses/LICENSE,sha256=U3h9vQu24D-tEPiBuuIh-a8BQV1xKzV-D5j1st3qFLc,1065
10
+ hebbmem-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.29.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Codepawl
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.