agentrun-mem0ai 0.0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. agentrun_mem0/__init__.py +6 -0
  2. agentrun_mem0/client/__init__.py +0 -0
  3. agentrun_mem0/client/main.py +1747 -0
  4. agentrun_mem0/client/project.py +931 -0
  5. agentrun_mem0/client/utils.py +115 -0
  6. agentrun_mem0/configs/__init__.py +0 -0
  7. agentrun_mem0/configs/base.py +90 -0
  8. agentrun_mem0/configs/embeddings/__init__.py +0 -0
  9. agentrun_mem0/configs/embeddings/base.py +110 -0
  10. agentrun_mem0/configs/enums.py +7 -0
  11. agentrun_mem0/configs/llms/__init__.py +0 -0
  12. agentrun_mem0/configs/llms/anthropic.py +56 -0
  13. agentrun_mem0/configs/llms/aws_bedrock.py +192 -0
  14. agentrun_mem0/configs/llms/azure.py +57 -0
  15. agentrun_mem0/configs/llms/base.py +62 -0
  16. agentrun_mem0/configs/llms/deepseek.py +56 -0
  17. agentrun_mem0/configs/llms/lmstudio.py +59 -0
  18. agentrun_mem0/configs/llms/ollama.py +56 -0
  19. agentrun_mem0/configs/llms/openai.py +79 -0
  20. agentrun_mem0/configs/llms/vllm.py +56 -0
  21. agentrun_mem0/configs/prompts.py +459 -0
  22. agentrun_mem0/configs/rerankers/__init__.py +0 -0
  23. agentrun_mem0/configs/rerankers/base.py +17 -0
  24. agentrun_mem0/configs/rerankers/cohere.py +15 -0
  25. agentrun_mem0/configs/rerankers/config.py +12 -0
  26. agentrun_mem0/configs/rerankers/huggingface.py +17 -0
  27. agentrun_mem0/configs/rerankers/llm.py +48 -0
  28. agentrun_mem0/configs/rerankers/sentence_transformer.py +16 -0
  29. agentrun_mem0/configs/rerankers/zero_entropy.py +28 -0
  30. agentrun_mem0/configs/vector_stores/__init__.py +0 -0
  31. agentrun_mem0/configs/vector_stores/alibabacloud_mysql.py +64 -0
  32. agentrun_mem0/configs/vector_stores/aliyun_tablestore.py +32 -0
  33. agentrun_mem0/configs/vector_stores/azure_ai_search.py +57 -0
  34. agentrun_mem0/configs/vector_stores/azure_mysql.py +84 -0
  35. agentrun_mem0/configs/vector_stores/baidu.py +27 -0
  36. agentrun_mem0/configs/vector_stores/chroma.py +58 -0
  37. agentrun_mem0/configs/vector_stores/databricks.py +61 -0
  38. agentrun_mem0/configs/vector_stores/elasticsearch.py +65 -0
  39. agentrun_mem0/configs/vector_stores/faiss.py +37 -0
  40. agentrun_mem0/configs/vector_stores/langchain.py +30 -0
  41. agentrun_mem0/configs/vector_stores/milvus.py +42 -0
  42. agentrun_mem0/configs/vector_stores/mongodb.py +25 -0
  43. agentrun_mem0/configs/vector_stores/neptune.py +27 -0
  44. agentrun_mem0/configs/vector_stores/opensearch.py +41 -0
  45. agentrun_mem0/configs/vector_stores/pgvector.py +52 -0
  46. agentrun_mem0/configs/vector_stores/pinecone.py +55 -0
  47. agentrun_mem0/configs/vector_stores/qdrant.py +47 -0
  48. agentrun_mem0/configs/vector_stores/redis.py +24 -0
  49. agentrun_mem0/configs/vector_stores/s3_vectors.py +28 -0
  50. agentrun_mem0/configs/vector_stores/supabase.py +44 -0
  51. agentrun_mem0/configs/vector_stores/upstash_vector.py +34 -0
  52. agentrun_mem0/configs/vector_stores/valkey.py +15 -0
  53. agentrun_mem0/configs/vector_stores/vertex_ai_vector_search.py +28 -0
  54. agentrun_mem0/configs/vector_stores/weaviate.py +41 -0
  55. agentrun_mem0/embeddings/__init__.py +0 -0
  56. agentrun_mem0/embeddings/aws_bedrock.py +100 -0
  57. agentrun_mem0/embeddings/azure_openai.py +55 -0
  58. agentrun_mem0/embeddings/base.py +31 -0
  59. agentrun_mem0/embeddings/configs.py +30 -0
  60. agentrun_mem0/embeddings/gemini.py +39 -0
  61. agentrun_mem0/embeddings/huggingface.py +44 -0
  62. agentrun_mem0/embeddings/langchain.py +35 -0
  63. agentrun_mem0/embeddings/lmstudio.py +29 -0
  64. agentrun_mem0/embeddings/mock.py +11 -0
  65. agentrun_mem0/embeddings/ollama.py +53 -0
  66. agentrun_mem0/embeddings/openai.py +49 -0
  67. agentrun_mem0/embeddings/together.py +31 -0
  68. agentrun_mem0/embeddings/vertexai.py +64 -0
  69. agentrun_mem0/exceptions.py +503 -0
  70. agentrun_mem0/graphs/__init__.py +0 -0
  71. agentrun_mem0/graphs/configs.py +105 -0
  72. agentrun_mem0/graphs/neptune/__init__.py +0 -0
  73. agentrun_mem0/graphs/neptune/base.py +497 -0
  74. agentrun_mem0/graphs/neptune/neptunedb.py +511 -0
  75. agentrun_mem0/graphs/neptune/neptunegraph.py +474 -0
  76. agentrun_mem0/graphs/tools.py +371 -0
  77. agentrun_mem0/graphs/utils.py +97 -0
  78. agentrun_mem0/llms/__init__.py +0 -0
  79. agentrun_mem0/llms/anthropic.py +87 -0
  80. agentrun_mem0/llms/aws_bedrock.py +665 -0
  81. agentrun_mem0/llms/azure_openai.py +141 -0
  82. agentrun_mem0/llms/azure_openai_structured.py +91 -0
  83. agentrun_mem0/llms/base.py +131 -0
  84. agentrun_mem0/llms/configs.py +34 -0
  85. agentrun_mem0/llms/deepseek.py +107 -0
  86. agentrun_mem0/llms/gemini.py +201 -0
  87. agentrun_mem0/llms/groq.py +88 -0
  88. agentrun_mem0/llms/langchain.py +94 -0
  89. agentrun_mem0/llms/litellm.py +87 -0
  90. agentrun_mem0/llms/lmstudio.py +114 -0
  91. agentrun_mem0/llms/ollama.py +117 -0
  92. agentrun_mem0/llms/openai.py +147 -0
  93. agentrun_mem0/llms/openai_structured.py +52 -0
  94. agentrun_mem0/llms/sarvam.py +89 -0
  95. agentrun_mem0/llms/together.py +88 -0
  96. agentrun_mem0/llms/vllm.py +107 -0
  97. agentrun_mem0/llms/xai.py +52 -0
  98. agentrun_mem0/memory/__init__.py +0 -0
  99. agentrun_mem0/memory/base.py +63 -0
  100. agentrun_mem0/memory/graph_memory.py +698 -0
  101. agentrun_mem0/memory/kuzu_memory.py +713 -0
  102. agentrun_mem0/memory/main.py +2229 -0
  103. agentrun_mem0/memory/memgraph_memory.py +689 -0
  104. agentrun_mem0/memory/setup.py +56 -0
  105. agentrun_mem0/memory/storage.py +218 -0
  106. agentrun_mem0/memory/telemetry.py +90 -0
  107. agentrun_mem0/memory/utils.py +208 -0
  108. agentrun_mem0/proxy/__init__.py +0 -0
  109. agentrun_mem0/proxy/main.py +189 -0
  110. agentrun_mem0/reranker/__init__.py +9 -0
  111. agentrun_mem0/reranker/base.py +20 -0
  112. agentrun_mem0/reranker/cohere_reranker.py +85 -0
  113. agentrun_mem0/reranker/huggingface_reranker.py +147 -0
  114. agentrun_mem0/reranker/llm_reranker.py +142 -0
  115. agentrun_mem0/reranker/sentence_transformer_reranker.py +107 -0
  116. agentrun_mem0/reranker/zero_entropy_reranker.py +96 -0
  117. agentrun_mem0/utils/factory.py +283 -0
  118. agentrun_mem0/utils/gcp_auth.py +167 -0
  119. agentrun_mem0/vector_stores/__init__.py +0 -0
  120. agentrun_mem0/vector_stores/alibabacloud_mysql.py +547 -0
  121. agentrun_mem0/vector_stores/aliyun_tablestore.py +252 -0
  122. agentrun_mem0/vector_stores/azure_ai_search.py +396 -0
  123. agentrun_mem0/vector_stores/azure_mysql.py +463 -0
  124. agentrun_mem0/vector_stores/baidu.py +368 -0
  125. agentrun_mem0/vector_stores/base.py +58 -0
  126. agentrun_mem0/vector_stores/chroma.py +332 -0
  127. agentrun_mem0/vector_stores/configs.py +67 -0
  128. agentrun_mem0/vector_stores/databricks.py +761 -0
  129. agentrun_mem0/vector_stores/elasticsearch.py +237 -0
  130. agentrun_mem0/vector_stores/faiss.py +479 -0
  131. agentrun_mem0/vector_stores/langchain.py +180 -0
  132. agentrun_mem0/vector_stores/milvus.py +250 -0
  133. agentrun_mem0/vector_stores/mongodb.py +310 -0
  134. agentrun_mem0/vector_stores/neptune_analytics.py +467 -0
  135. agentrun_mem0/vector_stores/opensearch.py +292 -0
  136. agentrun_mem0/vector_stores/pgvector.py +404 -0
  137. agentrun_mem0/vector_stores/pinecone.py +382 -0
  138. agentrun_mem0/vector_stores/qdrant.py +270 -0
  139. agentrun_mem0/vector_stores/redis.py +295 -0
  140. agentrun_mem0/vector_stores/s3_vectors.py +176 -0
  141. agentrun_mem0/vector_stores/supabase.py +237 -0
  142. agentrun_mem0/vector_stores/upstash_vector.py +293 -0
  143. agentrun_mem0/vector_stores/valkey.py +824 -0
  144. agentrun_mem0/vector_stores/vertex_ai_vector_search.py +635 -0
  145. agentrun_mem0/vector_stores/weaviate.py +343 -0
  146. agentrun_mem0ai-0.0.11.data/data/README.md +205 -0
  147. agentrun_mem0ai-0.0.11.dist-info/METADATA +277 -0
  148. agentrun_mem0ai-0.0.11.dist-info/RECORD +150 -0
  149. agentrun_mem0ai-0.0.11.dist-info/WHEEL +4 -0
  150. agentrun_mem0ai-0.0.11.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,56 @@
1
+ import json
2
+ import os
3
+ import uuid
4
+
5
+ # Set up the directory path
6
+ VECTOR_ID = str(uuid.uuid4())
7
+ home_dir = os.path.expanduser("~")
8
+ mem0_dir = os.environ.get("MEM0_DIR") or os.path.join(home_dir, ".mem0")
9
+ os.makedirs(mem0_dir, exist_ok=True)
10
+
11
+
12
+ def setup_config():
13
+ config_path = os.path.join(mem0_dir, "config.json")
14
+ if not os.path.exists(config_path):
15
+ user_id = str(uuid.uuid4())
16
+ config = {"user_id": user_id}
17
+ with open(config_path, "w") as config_file:
18
+ json.dump(config, config_file, indent=4)
19
+
20
+
21
+ def get_user_id():
22
+ config_path = os.path.join(mem0_dir, "config.json")
23
+ if not os.path.exists(config_path):
24
+ return "anonymous_user"
25
+
26
+ try:
27
+ with open(config_path, "r") as config_file:
28
+ config = json.load(config_file)
29
+ user_id = config.get("user_id")
30
+ return user_id
31
+ except Exception:
32
+ return "anonymous_user"
33
+
34
+
35
+ def get_or_create_user_id(vector_store):
36
+ """Store user_id in vector store and return it."""
37
+ user_id = get_user_id()
38
+
39
+ # Try to get existing user_id from vector store
40
+ try:
41
+ existing = vector_store.get(vector_id=user_id)
42
+ if existing and hasattr(existing, "payload") and existing.payload and "user_id" in existing.payload:
43
+ return existing.payload["user_id"]
44
+ except Exception:
45
+ pass
46
+
47
+ # If we get here, we need to insert the user_id
48
+ try:
49
+ dims = getattr(vector_store, "embedding_model_dims", 1536)
50
+ vector_store.insert(
51
+ vectors=[[0.1] * dims], payloads=[{"user_id": user_id, "type": "user_identity"}], ids=[user_id]
52
+ )
53
+ except Exception:
54
+ pass
55
+
56
+ return user_id
@@ -0,0 +1,218 @@
1
+ import logging
2
+ import sqlite3
3
+ import threading
4
+ import uuid
5
+ from typing import Any, Dict, List, Optional
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+
10
+ class SQLiteManager:
11
+ def __init__(self, db_path: str = ":memory:"):
12
+ self.db_path = db_path
13
+ self.connection = sqlite3.connect(self.db_path, check_same_thread=False)
14
+ self._lock = threading.Lock()
15
+ self._migrate_history_table()
16
+ self._create_history_table()
17
+
18
+ def _migrate_history_table(self) -> None:
19
+ """
20
+ If a pre-existing history table had the old group-chat columns,
21
+ rename it, create the new schema, copy the intersecting data, then
22
+ drop the old table.
23
+ """
24
+ with self._lock:
25
+ try:
26
+ # Start a transaction
27
+ self.connection.execute("BEGIN")
28
+ cur = self.connection.cursor()
29
+
30
+ cur.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='history'")
31
+ if cur.fetchone() is None:
32
+ self.connection.execute("COMMIT")
33
+ return # nothing to migrate
34
+
35
+ cur.execute("PRAGMA table_info(history)")
36
+ old_cols = {row[1] for row in cur.fetchall()}
37
+
38
+ expected_cols = {
39
+ "id",
40
+ "memory_id",
41
+ "old_memory",
42
+ "new_memory",
43
+ "event",
44
+ "created_at",
45
+ "updated_at",
46
+ "is_deleted",
47
+ "actor_id",
48
+ "role",
49
+ }
50
+
51
+ if old_cols == expected_cols:
52
+ self.connection.execute("COMMIT")
53
+ return
54
+
55
+ logger.info("Migrating history table to new schema (no convo columns).")
56
+
57
+ # Clean up any existing history_old table from previous failed migration
58
+ cur.execute("DROP TABLE IF EXISTS history_old")
59
+
60
+ # Rename the current history table
61
+ cur.execute("ALTER TABLE history RENAME TO history_old")
62
+
63
+ # Create the new history table with updated schema
64
+ cur.execute(
65
+ """
66
+ CREATE TABLE history (
67
+ id TEXT PRIMARY KEY,
68
+ memory_id TEXT,
69
+ old_memory TEXT,
70
+ new_memory TEXT,
71
+ event TEXT,
72
+ created_at DATETIME,
73
+ updated_at DATETIME,
74
+ is_deleted INTEGER,
75
+ actor_id TEXT,
76
+ role TEXT
77
+ )
78
+ """
79
+ )
80
+
81
+ # Copy data from old table to new table
82
+ intersecting = list(expected_cols & old_cols)
83
+ if intersecting:
84
+ cols_csv = ", ".join(intersecting)
85
+ cur.execute(f"INSERT INTO history ({cols_csv}) SELECT {cols_csv} FROM history_old")
86
+
87
+ # Drop the old table
88
+ cur.execute("DROP TABLE history_old")
89
+
90
+ # Commit the transaction
91
+ self.connection.execute("COMMIT")
92
+ logger.info("History table migration completed successfully.")
93
+
94
+ except Exception as e:
95
+ # Rollback the transaction on any error
96
+ self.connection.execute("ROLLBACK")
97
+ logger.error(f"History table migration failed: {e}")
98
+ raise
99
+
100
+ def _create_history_table(self) -> None:
101
+ with self._lock:
102
+ try:
103
+ self.connection.execute("BEGIN")
104
+ self.connection.execute(
105
+ """
106
+ CREATE TABLE IF NOT EXISTS history (
107
+ id TEXT PRIMARY KEY,
108
+ memory_id TEXT,
109
+ old_memory TEXT,
110
+ new_memory TEXT,
111
+ event TEXT,
112
+ created_at DATETIME,
113
+ updated_at DATETIME,
114
+ is_deleted INTEGER,
115
+ actor_id TEXT,
116
+ role TEXT
117
+ )
118
+ """
119
+ )
120
+ self.connection.execute("COMMIT")
121
+ except Exception as e:
122
+ self.connection.execute("ROLLBACK")
123
+ logger.error(f"Failed to create history table: {e}")
124
+ raise
125
+
126
+ def add_history(
127
+ self,
128
+ memory_id: str,
129
+ old_memory: Optional[str],
130
+ new_memory: Optional[str],
131
+ event: str,
132
+ *,
133
+ created_at: Optional[str] = None,
134
+ updated_at: Optional[str] = None,
135
+ is_deleted: int = 0,
136
+ actor_id: Optional[str] = None,
137
+ role: Optional[str] = None,
138
+ ) -> None:
139
+ with self._lock:
140
+ try:
141
+ self.connection.execute("BEGIN")
142
+ self.connection.execute(
143
+ """
144
+ INSERT INTO history (
145
+ id, memory_id, old_memory, new_memory, event,
146
+ created_at, updated_at, is_deleted, actor_id, role
147
+ )
148
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
149
+ """,
150
+ (
151
+ str(uuid.uuid4()),
152
+ memory_id,
153
+ old_memory,
154
+ new_memory,
155
+ event,
156
+ created_at,
157
+ updated_at,
158
+ is_deleted,
159
+ actor_id,
160
+ role,
161
+ ),
162
+ )
163
+ self.connection.execute("COMMIT")
164
+ except Exception as e:
165
+ self.connection.execute("ROLLBACK")
166
+ logger.error(f"Failed to add history record: {e}")
167
+ raise
168
+
169
+ def get_history(self, memory_id: str) -> List[Dict[str, Any]]:
170
+ with self._lock:
171
+ cur = self.connection.execute(
172
+ """
173
+ SELECT id, memory_id, old_memory, new_memory, event,
174
+ created_at, updated_at, is_deleted, actor_id, role
175
+ FROM history
176
+ WHERE memory_id = ?
177
+ ORDER BY created_at ASC, DATETIME(updated_at) ASC
178
+ """,
179
+ (memory_id,),
180
+ )
181
+ rows = cur.fetchall()
182
+
183
+ return [
184
+ {
185
+ "id": r[0],
186
+ "memory_id": r[1],
187
+ "old_memory": r[2],
188
+ "new_memory": r[3],
189
+ "event": r[4],
190
+ "created_at": r[5],
191
+ "updated_at": r[6],
192
+ "is_deleted": bool(r[7]),
193
+ "actor_id": r[8],
194
+ "role": r[9],
195
+ }
196
+ for r in rows
197
+ ]
198
+
199
+ def reset(self) -> None:
200
+ """Drop and recreate the history table."""
201
+ with self._lock:
202
+ try:
203
+ self.connection.execute("BEGIN")
204
+ self.connection.execute("DROP TABLE IF EXISTS history")
205
+ self.connection.execute("COMMIT")
206
+ self._create_history_table()
207
+ except Exception as e:
208
+ self.connection.execute("ROLLBACK")
209
+ logger.error(f"Failed to reset history table: {e}")
210
+ raise
211
+
212
+ def close(self) -> None:
213
+ if self.connection:
214
+ self.connection.close()
215
+ self.connection = None
216
+
217
+ def __del__(self):
218
+ self.close()
@@ -0,0 +1,90 @@
1
+ import logging
2
+ import os
3
+ import platform
4
+ import sys
5
+
6
+ from posthog import Posthog
7
+
8
+ import agentrun_mem0
9
+ from agentrun_mem0.memory.setup import get_or_create_user_id
10
+
11
+ MEM0_TELEMETRY = os.environ.get("MEM0_TELEMETRY", "False")
12
+ PROJECT_API_KEY = "phc_hgJkUVJFYtmaJqrvf6CYN67TIQ8yhXAkWzUn9AMU4yX"
13
+ HOST = "https://us.i.posthog.com"
14
+
15
+ if isinstance(MEM0_TELEMETRY, str):
16
+ MEM0_TELEMETRY = MEM0_TELEMETRY.lower() in ("true", "1", "yes")
17
+
18
+ if not isinstance(MEM0_TELEMETRY, bool):
19
+ raise ValueError("MEM0_TELEMETRY must be a boolean value.")
20
+
21
+ logging.getLogger("posthog").setLevel(logging.CRITICAL + 1)
22
+ logging.getLogger("urllib3").setLevel(logging.CRITICAL + 1)
23
+
24
+
25
+ class AnonymousTelemetry:
26
+ def __init__(self, vector_store=None):
27
+ self.posthog = Posthog(project_api_key=PROJECT_API_KEY, host=HOST)
28
+
29
+ self.user_id = get_or_create_user_id(vector_store)
30
+
31
+ if not MEM0_TELEMETRY:
32
+ self.posthog.disabled = True
33
+
34
+ def capture_event(self, event_name, properties=None, user_email=None):
35
+ if properties is None:
36
+ properties = {}
37
+ properties = {
38
+ "client_source": "python",
39
+ "client_version": agentrun_mem0.__version__,
40
+ "python_version": sys.version,
41
+ "os": sys.platform,
42
+ "os_version": platform.version(),
43
+ "os_release": platform.release(),
44
+ "processor": platform.processor(),
45
+ "machine": platform.machine(),
46
+ **properties,
47
+ }
48
+ distinct_id = self.user_id if user_email is None else user_email
49
+ self.posthog.capture(distinct_id=distinct_id, event=event_name, properties=properties)
50
+
51
+ def close(self):
52
+ self.posthog.shutdown()
53
+
54
+
55
+ client_telemetry = AnonymousTelemetry()
56
+
57
+
58
+ def capture_event(event_name, memory_instance, additional_data=None):
59
+ oss_telemetry = AnonymousTelemetry(
60
+ vector_store=memory_instance._telemetry_vector_store
61
+ if hasattr(memory_instance, "_telemetry_vector_store")
62
+ else None,
63
+ )
64
+
65
+ event_data = {
66
+ "collection": memory_instance.collection_name,
67
+ "vector_size": memory_instance.embedding_model.config.embedding_dims,
68
+ "history_store": "sqlite",
69
+ "graph_store": f"{memory_instance.graph.__class__.__module__}.{memory_instance.graph.__class__.__name__}"
70
+ if memory_instance.config.graph_store.config
71
+ else None,
72
+ "vector_store": f"{memory_instance.vector_store.__class__.__module__}.{memory_instance.vector_store.__class__.__name__}",
73
+ "llm": f"{memory_instance.llm.__class__.__module__}.{memory_instance.llm.__class__.__name__}",
74
+ "embedding_model": f"{memory_instance.embedding_model.__class__.__module__}.{memory_instance.embedding_model.__class__.__name__}",
75
+ "function": f"{memory_instance.__class__.__module__}.{memory_instance.__class__.__name__}.{memory_instance.api_version}",
76
+ }
77
+ if additional_data:
78
+ event_data.update(additional_data)
79
+
80
+ oss_telemetry.capture_event(event_name, event_data)
81
+
82
+
83
+ def capture_client_event(event_name, instance, additional_data=None):
84
+ event_data = {
85
+ "function": f"{instance.__class__.__module__}.{instance.__class__.__name__}",
86
+ }
87
+ if additional_data:
88
+ event_data.update(additional_data)
89
+
90
+ client_telemetry.capture_event(event_name, event_data, instance.user_email)
@@ -0,0 +1,208 @@
1
+ import hashlib
2
+ import re
3
+
4
+ from agentrun_mem0.configs.prompts import (
5
+ FACT_RETRIEVAL_PROMPT,
6
+ USER_MEMORY_EXTRACTION_PROMPT,
7
+ AGENT_MEMORY_EXTRACTION_PROMPT,
8
+ )
9
+
10
+
11
+ def get_fact_retrieval_messages(message, is_agent_memory=False):
12
+ """Get fact retrieval messages based on the memory type.
13
+
14
+ Args:
15
+ message: The message content to extract facts from
16
+ is_agent_memory: If True, use agent memory extraction prompt, else use user memory extraction prompt
17
+
18
+ Returns:
19
+ tuple: (system_prompt, user_prompt)
20
+ """
21
+ if is_agent_memory:
22
+ return AGENT_MEMORY_EXTRACTION_PROMPT, f"Input:\n{message}"
23
+ else:
24
+ return USER_MEMORY_EXTRACTION_PROMPT, f"Input:\n{message}"
25
+
26
+
27
+ def get_fact_retrieval_messages_legacy(message):
28
+ """Legacy function for backward compatibility."""
29
+ return FACT_RETRIEVAL_PROMPT, f"Input:\n{message}"
30
+
31
+
32
+ def parse_messages(messages):
33
+ response = ""
34
+ for msg in messages:
35
+ if msg["role"] == "system":
36
+ response += f"system: {msg['content']}\n"
37
+ if msg["role"] == "user":
38
+ response += f"user: {msg['content']}\n"
39
+ if msg["role"] == "assistant":
40
+ response += f"assistant: {msg['content']}\n"
41
+ return response
42
+
43
+
44
+ def format_entities(entities):
45
+ if not entities:
46
+ return ""
47
+
48
+ formatted_lines = []
49
+ for entity in entities:
50
+ simplified = f"{entity['source']} -- {entity['relationship']} -- {entity['destination']}"
51
+ formatted_lines.append(simplified)
52
+
53
+ return "\n".join(formatted_lines)
54
+
55
+
56
+ def remove_code_blocks(content: str) -> str:
57
+ """
58
+ Removes enclosing code block markers ```[language] and ``` from a given string.
59
+
60
+ Remarks:
61
+ - The function uses a regex pattern to match code blocks that may start with ``` followed by an optional language tag (letters or numbers) and end with ```.
62
+ - If a code block is detected, it returns only the inner content, stripping out the markers.
63
+ - If no code block markers are found, the original content is returned as-is.
64
+ """
65
+ pattern = r"^```[a-zA-Z0-9]*\n([\s\S]*?)\n```$"
66
+ match = re.match(pattern, content.strip())
67
+ match_res=match.group(1).strip() if match else content.strip()
68
+ return re.sub(r"<think>.*?</think>", "", match_res, flags=re.DOTALL).strip()
69
+
70
+
71
+
72
+ def extract_json(text):
73
+ """
74
+ Extracts JSON content from a string, removing enclosing triple backticks and optional 'json' tag if present.
75
+ If no code block is found, returns the text as-is.
76
+ """
77
+ text = text.strip()
78
+ match = re.search(r"```(?:json)?\s*(.*?)\s*```", text, re.DOTALL)
79
+ if match:
80
+ json_str = match.group(1)
81
+ else:
82
+ json_str = text # assume it's raw JSON
83
+ return json_str
84
+
85
+
86
+ def get_image_description(image_obj, llm, vision_details):
87
+ """
88
+ Get the description of the image
89
+ """
90
+
91
+ if isinstance(image_obj, str):
92
+ messages = [
93
+ {
94
+ "role": "user",
95
+ "content": [
96
+ {
97
+ "type": "text",
98
+ "text": "A user is providing an image. Provide a high level description of the image and do not include any additional text.",
99
+ },
100
+ {"type": "image_url", "image_url": {"url": image_obj, "detail": vision_details}},
101
+ ],
102
+ },
103
+ ]
104
+ else:
105
+ messages = [image_obj]
106
+
107
+ response = llm.generate_response(messages=messages)
108
+ return response
109
+
110
+
111
+ def parse_vision_messages(messages, llm=None, vision_details="auto"):
112
+ """
113
+ Parse the vision messages from the messages
114
+ """
115
+ returned_messages = []
116
+ for msg in messages:
117
+ if msg["role"] == "system":
118
+ returned_messages.append(msg)
119
+ continue
120
+
121
+ # Handle message content
122
+ if isinstance(msg["content"], list):
123
+ # Multiple image URLs in content
124
+ description = get_image_description(msg, llm, vision_details)
125
+ returned_messages.append({"role": msg["role"], "content": description})
126
+ elif isinstance(msg["content"], dict) and msg["content"].get("type") == "image_url":
127
+ # Single image content
128
+ image_url = msg["content"]["image_url"]["url"]
129
+ try:
130
+ description = get_image_description(image_url, llm, vision_details)
131
+ returned_messages.append({"role": msg["role"], "content": description})
132
+ except Exception:
133
+ raise Exception(f"Error while downloading {image_url}.")
134
+ else:
135
+ # Regular text content
136
+ returned_messages.append(msg)
137
+
138
+ return returned_messages
139
+
140
+
141
+ def process_telemetry_filters(filters):
142
+ """
143
+ Process the telemetry filters
144
+ """
145
+ if filters is None:
146
+ return {}
147
+
148
+ encoded_ids = {}
149
+ if "user_id" in filters:
150
+ encoded_ids["user_id"] = hashlib.md5(filters["user_id"].encode()).hexdigest()
151
+ if "agent_id" in filters:
152
+ encoded_ids["agent_id"] = hashlib.md5(filters["agent_id"].encode()).hexdigest()
153
+ if "run_id" in filters:
154
+ encoded_ids["run_id"] = hashlib.md5(filters["run_id"].encode()).hexdigest()
155
+
156
+ return list(filters.keys()), encoded_ids
157
+
158
+
159
+ def sanitize_relationship_for_cypher(relationship) -> str:
160
+ """Sanitize relationship text for Cypher queries by replacing problematic characters."""
161
+ char_map = {
162
+ "...": "_ellipsis_",
163
+ "…": "_ellipsis_",
164
+ "。": "_period_",
165
+ ",": "_comma_",
166
+ ";": "_semicolon_",
167
+ ":": "_colon_",
168
+ "!": "_exclamation_",
169
+ "?": "_question_",
170
+ "(": "_lparen_",
171
+ ")": "_rparen_",
172
+ "【": "_lbracket_",
173
+ "】": "_rbracket_",
174
+ "《": "_langle_",
175
+ "》": "_rangle_",
176
+ "'": "_apostrophe_",
177
+ '"': "_quote_",
178
+ "\\": "_backslash_",
179
+ "/": "_slash_",
180
+ "|": "_pipe_",
181
+ "&": "_ampersand_",
182
+ "=": "_equals_",
183
+ "+": "_plus_",
184
+ "*": "_asterisk_",
185
+ "^": "_caret_",
186
+ "%": "_percent_",
187
+ "$": "_dollar_",
188
+ "#": "_hash_",
189
+ "@": "_at_",
190
+ "!": "_bang_",
191
+ "?": "_question_",
192
+ "(": "_lparen_",
193
+ ")": "_rparen_",
194
+ "[": "_lbracket_",
195
+ "]": "_rbracket_",
196
+ "{": "_lbrace_",
197
+ "}": "_rbrace_",
198
+ "<": "_langle_",
199
+ ">": "_rangle_",
200
+ }
201
+
202
+ # Apply replacements and clean up
203
+ sanitized = relationship
204
+ for old, new in char_map.items():
205
+ sanitized = sanitized.replace(old, new)
206
+
207
+ return re.sub(r"_+", "_", sanitized).strip("_")
208
+
File without changes