nvidia-nat-redis 1.3.0a20250828__py3-none-any.whl → 1.3.0a20250829__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nat/plugins/redis/redis_editor.py +35 -35
- nat/plugins/redis/schema.py +19 -19
- {nvidia_nat_redis-1.3.0a20250828.dist-info → nvidia_nat_redis-1.3.0a20250829.dist-info}/METADATA +2 -2
- {nvidia_nat_redis-1.3.0a20250828.dist-info → nvidia_nat_redis-1.3.0a20250829.dist-info}/RECORD +7 -7
- {nvidia_nat_redis-1.3.0a20250828.dist-info → nvidia_nat_redis-1.3.0a20250829.dist-info}/WHEEL +0 -0
- {nvidia_nat_redis-1.3.0a20250828.dist-info → nvidia_nat_redis-1.3.0a20250829.dist-info}/entry_points.txt +0 -0
- {nvidia_nat_redis-1.3.0a20250828.dist-info → nvidia_nat_redis-1.3.0a20250829.dist-info}/top_level.txt +0 -0
@@ -54,7 +54,7 @@ class RedisEditor(MemoryEditor):
|
|
54
54
|
Insert Multiple MemoryItems into Redis.
|
55
55
|
Each MemoryItem is stored with its metadata and tags.
|
56
56
|
"""
|
57
|
-
logger.debug(
|
57
|
+
logger.debug("Attempting to add %d items to Redis", len(items))
|
58
58
|
|
59
59
|
for memory_item in items:
|
60
60
|
item_meta = memory_item.metadata
|
@@ -65,7 +65,7 @@ class RedisEditor(MemoryEditor):
|
|
65
65
|
|
66
66
|
# Create a unique key for this memory item
|
67
67
|
memory_key = f"{self._key_prefix}:memory:{memory_id}"
|
68
|
-
logger.debug(
|
68
|
+
logger.debug("Generated memory key: %s", memory_key)
|
69
69
|
|
70
70
|
# Prepare memory data
|
71
71
|
memory_data = {
|
@@ -75,30 +75,30 @@ class RedisEditor(MemoryEditor):
|
|
75
75
|
"metadata": item_meta,
|
76
76
|
"memory": memory_item.memory or ""
|
77
77
|
}
|
78
|
-
logger.debug(
|
78
|
+
logger.debug("Prepared memory data for key %s", memory_key)
|
79
79
|
|
80
80
|
# If we have memory, compute and store the embedding
|
81
81
|
if memory_item.memory:
|
82
82
|
logger.debug("Computing embedding for memory text")
|
83
83
|
search_vector = await self._embedder.aembed_query(memory_item.memory)
|
84
|
-
logger.debug(
|
84
|
+
logger.debug("Generated embedding vector of length: %d", len(search_vector))
|
85
85
|
memory_data["embedding"] = search_vector
|
86
86
|
|
87
87
|
try:
|
88
88
|
# Store as JSON in Redis
|
89
|
-
logger.debug(
|
89
|
+
logger.debug("Attempting to store memory data in Redis for key: %s", memory_key)
|
90
90
|
await self._client.json().set(memory_key, "$", memory_data)
|
91
|
-
logger.debug(
|
91
|
+
logger.debug("Successfully stored memory data for key: %s", memory_key)
|
92
92
|
|
93
93
|
# Verify the data was stored
|
94
94
|
stored_data = await self._client.json().get(memory_key)
|
95
|
-
logger.debug(
|
95
|
+
logger.debug("Verified data storage for key %s: %s", memory_key, bool(stored_data))
|
96
96
|
|
97
97
|
except redis_exceptions.ResponseError as e:
|
98
|
-
logger.error(
|
98
|
+
logger.error("Failed to store memory item: %s", e)
|
99
99
|
raise
|
100
100
|
except redis_exceptions.ConnectionError as e:
|
101
|
-
logger.error(
|
101
|
+
logger.error("Redis connection error while storing memory item: %s", e)
|
102
102
|
raise
|
103
103
|
|
104
104
|
async def search(self, query: str, top_k: int = 5, **kwargs) -> list[MemoryItem]:
|
@@ -113,64 +113,64 @@ class RedisEditor(MemoryEditor):
|
|
113
113
|
Returns:
|
114
114
|
list[MemoryItem]: The most relevant MemoryItems for the given query.
|
115
115
|
"""
|
116
|
-
logger.debug(
|
116
|
+
logger.debug("Search called with query: %s, top_k: %d, kwargs: %s", query, top_k, kwargs)
|
117
117
|
|
118
118
|
user_id = kwargs.get("user_id", "redis") # TODO: remove this fallback username
|
119
|
-
logger.debug(
|
119
|
+
logger.debug("Using user_id: %s", user_id)
|
120
120
|
|
121
121
|
# Perform vector search using Redis search
|
122
122
|
logger.debug("Using embedder for vector search")
|
123
123
|
try:
|
124
|
-
logger.debug(
|
124
|
+
logger.debug("Generating embedding for query: '%s'", query)
|
125
125
|
query_vector = await self._embedder.aembed_query(query)
|
126
|
-
logger.debug(
|
126
|
+
logger.debug("Generated embedding vector of length: %d", len(query_vector))
|
127
127
|
except Exception as e:
|
128
|
-
logger.error(
|
128
|
+
logger.error("Failed to generate embedding: %s", e)
|
129
129
|
raise
|
130
130
|
|
131
131
|
# Create vector search query
|
132
132
|
search_query = (
|
133
133
|
Query(f"(@user_id:{user_id})=>[KNN {top_k} @embedding $vec AS score]").sort_by("score").return_fields(
|
134
134
|
"conversation", "user_id", "tags", "metadata", "memory", "score").dialect(2))
|
135
|
-
logger.debug(
|
136
|
-
logger.debug(
|
135
|
+
logger.debug("Created search query: %s", search_query)
|
136
|
+
logger.debug("Query string: %s", search_query.query_string())
|
137
137
|
|
138
138
|
# Convert query vector to bytes
|
139
139
|
try:
|
140
140
|
logger.debug("Converting query vector to bytes")
|
141
141
|
query_vector_bytes = np.array(query_vector, dtype=np.float32).tobytes()
|
142
|
-
logger.debug(
|
142
|
+
logger.debug("Converted vector to bytes of length: %d", len(query_vector_bytes))
|
143
143
|
except Exception as e:
|
144
|
-
logger.error(
|
144
|
+
logger.error("Failed to convert vector to bytes: %s", e)
|
145
145
|
raise
|
146
146
|
|
147
147
|
try:
|
148
148
|
# Execute search with vector parameters
|
149
149
|
logger.debug("Executing Redis search with vector parameters")
|
150
|
-
logger.debug(
|
150
|
+
logger.debug("Search query parameters: vec length=%d", len(query_vector_bytes))
|
151
151
|
|
152
152
|
# Log the actual query being executed
|
153
|
-
logger.debug(
|
153
|
+
logger.debug("Full search query: %s", search_query.query_string())
|
154
154
|
|
155
155
|
# Check if there are any documents in the index
|
156
156
|
try:
|
157
157
|
total_docs = await self._client.ft(INDEX_NAME).info()
|
158
|
-
logger.debug(
|
158
|
+
logger.debug("Total documents in index: %d", total_docs.get('num_docs', 0))
|
159
159
|
except Exception as e:
|
160
|
-
logger.
|
160
|
+
logger.exception("Failed to get index info: %s", e)
|
161
161
|
|
162
162
|
# Execute the search
|
163
163
|
results = await self._client.ft(INDEX_NAME).search(search_query, query_params={"vec": query_vector_bytes})
|
164
164
|
|
165
165
|
# Log detailed results information
|
166
|
-
logger.debug(
|
167
|
-
logger.debug(
|
166
|
+
logger.debug("Search returned %d results", len(results.docs))
|
167
|
+
logger.debug("Total results found: %d", results.total)
|
168
168
|
|
169
169
|
# Convert results to MemoryItems
|
170
170
|
memories = []
|
171
171
|
for i, doc in enumerate(results.docs):
|
172
172
|
try:
|
173
|
-
logger.debug(
|
173
|
+
logger.debug("Processing result %d/%d", i + 1, len(results.docs))
|
174
174
|
# Get the document data from the correct attribute
|
175
175
|
memory_data = {
|
176
176
|
"conversation": getattr(doc, 'conversation', []),
|
@@ -179,25 +179,25 @@ class RedisEditor(MemoryEditor):
|
|
179
179
|
"metadata": getattr(doc, 'metadata', {}),
|
180
180
|
"memory": getattr(doc, 'memory', "")
|
181
181
|
}
|
182
|
-
logger.debug(
|
183
|
-
logger.debug(
|
182
|
+
logger.debug("Similarity score: %d", getattr(doc, 'score', 0))
|
183
|
+
logger.debug("Extracted data for result %d: %s", i + 1, memory_data)
|
184
184
|
memory_item = self._create_memory_item(memory_data, user_id)
|
185
185
|
memories.append(memory_item)
|
186
|
-
logger.debug(
|
186
|
+
logger.debug("Successfully created MemoryItem for result %d", i + 1)
|
187
187
|
except Exception as e:
|
188
|
-
logger.error(
|
188
|
+
logger.error("Failed to process result %d: %s", i + 1, e)
|
189
189
|
raise
|
190
190
|
|
191
|
-
logger.debug(
|
191
|
+
logger.debug("Successfully processed all %d results", len(memories))
|
192
192
|
return memories
|
193
193
|
except redis_exceptions.ResponseError as e:
|
194
|
-
logger.error(
|
194
|
+
logger.error("Search failed with ResponseError: %s", e)
|
195
195
|
raise
|
196
196
|
except redis_exceptions.ConnectionError as e:
|
197
|
-
logger.error(
|
197
|
+
logger.error("Search failed with ConnectionError: %s", e)
|
198
198
|
raise
|
199
199
|
except Exception as e:
|
200
|
-
logger.error(
|
200
|
+
logger.error("Unexpected error during search: %s", e)
|
201
201
|
raise
|
202
202
|
|
203
203
|
def _create_memory_item(self, memory_data: dict, user_id: str) -> MemoryItem:
|
@@ -226,8 +226,8 @@ class RedisEditor(MemoryEditor):
|
|
226
226
|
if keys:
|
227
227
|
await self._client.delete(*keys)
|
228
228
|
except redis_exceptions.ResponseError as e:
|
229
|
-
logger.error(
|
229
|
+
logger.error("Failed to remove items: %s", e)
|
230
230
|
raise
|
231
231
|
except redis_exceptions.ConnectionError as e:
|
232
|
-
logger.error(
|
232
|
+
logger.error("Redis connection error while removing items: %s", e)
|
233
233
|
raise
|
nat/plugins/redis/schema.py
CHANGED
@@ -39,7 +39,7 @@ def create_schema(embedding_dim: int = DEFAULT_DIM):
|
|
39
39
|
Returns:
|
40
40
|
tuple: Schema definition for Redis search
|
41
41
|
"""
|
42
|
-
logger.info(
|
42
|
+
logger.info("Creating schema with embedding dimension: %d", embedding_dim)
|
43
43
|
|
44
44
|
embedding_field = VectorField("$.embedding",
|
45
45
|
"HNSW",
|
@@ -53,7 +53,7 @@ def create_schema(embedding_dim: int = DEFAULT_DIM):
|
|
53
53
|
"EF_RUNTIME": 10
|
54
54
|
},
|
55
55
|
as_name="embedding")
|
56
|
-
logger.info(
|
56
|
+
logger.info("Created embedding field with dimension %d", embedding_dim)
|
57
57
|
|
58
58
|
schema = (
|
59
59
|
TextField("$.user_id", as_name="user_id"),
|
@@ -65,7 +65,7 @@ def create_schema(embedding_dim: int = DEFAULT_DIM):
|
|
65
65
|
# Log the schema details
|
66
66
|
logger.info("Schema fields:")
|
67
67
|
for field in schema:
|
68
|
-
logger.info(
|
68
|
+
logger.info(" - %s: %s", field.name, type(field).__name__)
|
69
69
|
|
70
70
|
return schema
|
71
71
|
|
@@ -81,55 +81,55 @@ async def ensure_index_exists(client: redis.Redis, key_prefix: str, embedding_di
|
|
81
81
|
"""
|
82
82
|
try:
|
83
83
|
# Check if index exists
|
84
|
-
logger.info(
|
84
|
+
logger.info("Checking if index '%s' exists...", INDEX_NAME)
|
85
85
|
info = await client.ft(INDEX_NAME).info()
|
86
|
-
logger.info(
|
86
|
+
logger.info("Redis search index '%s' exists.", INDEX_NAME)
|
87
87
|
|
88
88
|
# Verify the schema
|
89
89
|
schema = info.get('attributes', [])
|
90
90
|
|
91
91
|
return
|
92
|
-
except redis_exceptions.ResponseError as
|
93
|
-
error_msg = str(
|
92
|
+
except redis_exceptions.ResponseError as ex:
|
93
|
+
error_msg = str(ex)
|
94
94
|
if "no such index" not in error_msg.lower() and "Index needs recreation" not in error_msg:
|
95
|
-
logger.error(
|
95
|
+
logger.error("Unexpected Redis error: %s", error_msg)
|
96
96
|
raise
|
97
97
|
|
98
98
|
# Index doesn't exist or needs recreation
|
99
|
-
logger.info(
|
99
|
+
logger.info("Creating Redis search index '%s' with prefix '%s'", INDEX_NAME, key_prefix)
|
100
100
|
|
101
101
|
# Drop any existing index
|
102
102
|
try:
|
103
|
-
logger.info(
|
103
|
+
logger.info("Attempting to drop existing index '%s' if it exists", INDEX_NAME)
|
104
104
|
await client.ft(INDEX_NAME).dropindex()
|
105
|
-
logger.info(
|
105
|
+
logger.info("Successfully dropped existing index '%s'", INDEX_NAME)
|
106
106
|
except redis_exceptions.ResponseError as e:
|
107
107
|
if "no such index" not in str(e).lower():
|
108
|
-
logger.warning(
|
108
|
+
logger.warning("Error while dropping index: %s", str(e))
|
109
109
|
|
110
110
|
# Create new schema and index
|
111
111
|
schema = create_schema(embedding_dim or DEFAULT_DIM)
|
112
|
-
logger.info(
|
112
|
+
logger.info("Created schema with embedding dimension: %d", embedding_dim or DEFAULT_DIM)
|
113
113
|
|
114
114
|
try:
|
115
115
|
# Create the index
|
116
|
-
logger.info(
|
116
|
+
logger.info("Creating new index '%s' with schema", INDEX_NAME)
|
117
117
|
await client.ft(INDEX_NAME).create_index(schema,
|
118
118
|
definition=IndexDefinition(prefix=[f"{key_prefix}:"],
|
119
119
|
index_type=IndexType.JSON))
|
120
120
|
|
121
121
|
# Verify index was created
|
122
122
|
info = await client.ft(INDEX_NAME).info()
|
123
|
-
logger.info(
|
124
|
-
logger.debug(
|
123
|
+
logger.info("Successfully created Redis search index '%s'", INDEX_NAME)
|
124
|
+
logger.debug("Redis search index info: %s", info)
|
125
125
|
|
126
126
|
# Verify the schema
|
127
127
|
schema = info.get('attributes', [])
|
128
|
-
logger.debug(
|
128
|
+
logger.debug("New index schema: %s", schema)
|
129
129
|
|
130
130
|
except redis_exceptions.ResponseError as e:
|
131
|
-
logger.error(
|
131
|
+
logger.error("Failed to create index: %s", str(e))
|
132
132
|
raise
|
133
133
|
except redis_exceptions.ConnectionError as e:
|
134
|
-
logger.error(
|
134
|
+
logger.error("Redis connection error while creating index: %s", str(e))
|
135
135
|
raise
|
{nvidia_nat_redis-1.3.0a20250828.dist-info → nvidia_nat_redis-1.3.0a20250829.dist-info}/METADATA
RENAMED
@@ -1,12 +1,12 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: nvidia-nat-redis
|
3
|
-
Version: 1.3.
|
3
|
+
Version: 1.3.0a20250829
|
4
4
|
Summary: Subpackage for Redis integration in NeMo Agent toolkit
|
5
5
|
Keywords: ai,agents,memory
|
6
6
|
Classifier: Programming Language :: Python
|
7
7
|
Requires-Python: <3.13,>=3.11
|
8
8
|
Description-Content-Type: text/markdown
|
9
|
-
Requires-Dist: nvidia-nat==v1.3.
|
9
|
+
Requires-Dist: nvidia-nat==v1.3.0a20250829
|
10
10
|
Requires-Dist: redis~=4.3.4
|
11
11
|
|
12
12
|
<!--
|
{nvidia_nat_redis-1.3.0a20250828.dist-info → nvidia_nat_redis-1.3.0a20250829.dist-info}/RECORD
RENAMED
@@ -2,12 +2,12 @@ nat/meta/pypi.md,sha256=TpeNbVZJxzvEf0Gh3BGvLHPYsKnXjgM_KQVCayBPXso,1090
|
|
2
2
|
nat/plugins/redis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
3
|
nat/plugins/redis/memory.py,sha256=wqj_UYqnllc4kTCtFDz3eu-OZnyPWXxNZ3e6G992OGQ,2546
|
4
4
|
nat/plugins/redis/object_store.py,sha256=f_GtCTZ3KHfxE4f0lAzKaoAoInAZZki4Dfo7hrKCAHA,1681
|
5
|
-
nat/plugins/redis/redis_editor.py,sha256=
|
5
|
+
nat/plugins/redis/redis_editor.py,sha256=OjdVaP1OBBR6CoIZ2PIqCrRfsD7l8U6Pn59oq_paGw4,10084
|
6
6
|
nat/plugins/redis/redis_object_store.py,sha256=DX46GEQl4H1Ivf2wrRaSimNcq6EfvRsm-xhyiECudoQ,4302
|
7
7
|
nat/plugins/redis/register.py,sha256=dJBKi-7W72ipkmZTOIo1E3ETffmJIlYhQTOlrkiFH3A,834
|
8
|
-
nat/plugins/redis/schema.py,sha256=
|
9
|
-
nvidia_nat_redis-1.3.
|
10
|
-
nvidia_nat_redis-1.3.
|
11
|
-
nvidia_nat_redis-1.3.
|
12
|
-
nvidia_nat_redis-1.3.
|
13
|
-
nvidia_nat_redis-1.3.
|
8
|
+
nat/plugins/redis/schema.py,sha256=4Or-rrkdj6h4okGsnEm6cKMNGLCeSSFwqecQNmu6T9g,5517
|
9
|
+
nvidia_nat_redis-1.3.0a20250829.dist-info/METADATA,sha256=sAqE8PbKjx7j4r21CPWNcWgylqY96BMAU96yH5ehaDA,1435
|
10
|
+
nvidia_nat_redis-1.3.0a20250829.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
11
|
+
nvidia_nat_redis-1.3.0a20250829.dist-info/entry_points.txt,sha256=nyS8t8L9CbRFIMlE70RQBtJXrflBP4Ltl5zAkIl44So,56
|
12
|
+
nvidia_nat_redis-1.3.0a20250829.dist-info/top_level.txt,sha256=8-CJ2cP6-f0ZReXe5Hzqp-5pvzzHz-5Ds5H2bGqh1-U,4
|
13
|
+
nvidia_nat_redis-1.3.0a20250829.dist-info/RECORD,,
|
{nvidia_nat_redis-1.3.0a20250828.dist-info → nvidia_nat_redis-1.3.0a20250829.dist-info}/WHEEL
RENAMED
File without changes
|
File without changes
|
File without changes
|