npcpy 1.0.26__py3-none-any.whl → 1.2.32__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (148) hide show
  1. npcpy/__init__.py +0 -7
  2. npcpy/data/audio.py +16 -99
  3. npcpy/data/image.py +43 -42
  4. npcpy/data/load.py +83 -124
  5. npcpy/data/text.py +28 -28
  6. npcpy/data/video.py +8 -32
  7. npcpy/data/web.py +51 -23
  8. npcpy/ft/diff.py +110 -0
  9. npcpy/ft/ge.py +115 -0
  10. npcpy/ft/memory_trainer.py +171 -0
  11. npcpy/ft/model_ensembler.py +357 -0
  12. npcpy/ft/rl.py +360 -0
  13. npcpy/ft/sft.py +248 -0
  14. npcpy/ft/usft.py +128 -0
  15. npcpy/gen/audio_gen.py +24 -0
  16. npcpy/gen/embeddings.py +13 -13
  17. npcpy/gen/image_gen.py +262 -117
  18. npcpy/gen/response.py +615 -415
  19. npcpy/gen/video_gen.py +53 -7
  20. npcpy/llm_funcs.py +1869 -437
  21. npcpy/main.py +1 -1
  22. npcpy/memory/command_history.py +844 -510
  23. npcpy/memory/kg_vis.py +833 -0
  24. npcpy/memory/knowledge_graph.py +892 -1845
  25. npcpy/memory/memory_processor.py +81 -0
  26. npcpy/memory/search.py +188 -90
  27. npcpy/mix/debate.py +192 -3
  28. npcpy/npc_compiler.py +1672 -801
  29. npcpy/npc_sysenv.py +593 -1266
  30. npcpy/serve.py +3120 -0
  31. npcpy/sql/ai_function_tools.py +257 -0
  32. npcpy/sql/database_ai_adapters.py +186 -0
  33. npcpy/sql/database_ai_functions.py +163 -0
  34. npcpy/sql/model_runner.py +19 -19
  35. npcpy/sql/npcsql.py +706 -507
  36. npcpy/sql/sql_model_compiler.py +156 -0
  37. npcpy/tools.py +183 -0
  38. npcpy/work/plan.py +13 -279
  39. npcpy/work/trigger.py +3 -3
  40. npcpy-1.2.32.dist-info/METADATA +803 -0
  41. npcpy-1.2.32.dist-info/RECORD +54 -0
  42. npcpy/data/dataframes.py +0 -171
  43. npcpy/memory/deep_research.py +0 -125
  44. npcpy/memory/sleep.py +0 -557
  45. npcpy/modes/_state.py +0 -78
  46. npcpy/modes/alicanto.py +0 -1075
  47. npcpy/modes/guac.py +0 -785
  48. npcpy/modes/mcp_npcsh.py +0 -822
  49. npcpy/modes/npc.py +0 -213
  50. npcpy/modes/npcsh.py +0 -1158
  51. npcpy/modes/plonk.py +0 -409
  52. npcpy/modes/pti.py +0 -234
  53. npcpy/modes/serve.py +0 -1637
  54. npcpy/modes/spool.py +0 -312
  55. npcpy/modes/wander.py +0 -549
  56. npcpy/modes/yap.py +0 -572
  57. npcpy/npc_team/alicanto.npc +0 -2
  58. npcpy/npc_team/alicanto.png +0 -0
  59. npcpy/npc_team/assembly_lines/test_pipeline.py +0 -181
  60. npcpy/npc_team/corca.npc +0 -13
  61. npcpy/npc_team/foreman.npc +0 -7
  62. npcpy/npc_team/frederic.npc +0 -6
  63. npcpy/npc_team/frederic4.png +0 -0
  64. npcpy/npc_team/guac.png +0 -0
  65. npcpy/npc_team/jinxs/automator.jinx +0 -18
  66. npcpy/npc_team/jinxs/bash_executer.jinx +0 -31
  67. npcpy/npc_team/jinxs/calculator.jinx +0 -11
  68. npcpy/npc_team/jinxs/edit_file.jinx +0 -96
  69. npcpy/npc_team/jinxs/file_chat.jinx +0 -14
  70. npcpy/npc_team/jinxs/gui_controller.jinx +0 -28
  71. npcpy/npc_team/jinxs/image_generation.jinx +0 -29
  72. npcpy/npc_team/jinxs/internet_search.jinx +0 -30
  73. npcpy/npc_team/jinxs/local_search.jinx +0 -152
  74. npcpy/npc_team/jinxs/npcsh_executor.jinx +0 -31
  75. npcpy/npc_team/jinxs/python_executor.jinx +0 -8
  76. npcpy/npc_team/jinxs/screen_cap.jinx +0 -25
  77. npcpy/npc_team/jinxs/sql_executor.jinx +0 -33
  78. npcpy/npc_team/kadiefa.npc +0 -3
  79. npcpy/npc_team/kadiefa.png +0 -0
  80. npcpy/npc_team/npcsh.ctx +0 -9
  81. npcpy/npc_team/npcsh_sibiji.png +0 -0
  82. npcpy/npc_team/plonk.npc +0 -2
  83. npcpy/npc_team/plonk.png +0 -0
  84. npcpy/npc_team/plonkjr.npc +0 -2
  85. npcpy/npc_team/plonkjr.png +0 -0
  86. npcpy/npc_team/sibiji.npc +0 -5
  87. npcpy/npc_team/sibiji.png +0 -0
  88. npcpy/npc_team/spool.png +0 -0
  89. npcpy/npc_team/templates/analytics/celona.npc +0 -0
  90. npcpy/npc_team/templates/hr_support/raone.npc +0 -0
  91. npcpy/npc_team/templates/humanities/eriane.npc +0 -4
  92. npcpy/npc_team/templates/it_support/lineru.npc +0 -0
  93. npcpy/npc_team/templates/marketing/slean.npc +0 -4
  94. npcpy/npc_team/templates/philosophy/maurawa.npc +0 -0
  95. npcpy/npc_team/templates/sales/turnic.npc +0 -4
  96. npcpy/npc_team/templates/software/welxor.npc +0 -0
  97. npcpy/npc_team/yap.png +0 -0
  98. npcpy/routes.py +0 -958
  99. npcpy/work/mcp_helpers.py +0 -357
  100. npcpy/work/mcp_server.py +0 -194
  101. npcpy-1.0.26.data/data/npcpy/npc_team/alicanto.npc +0 -2
  102. npcpy-1.0.26.data/data/npcpy/npc_team/alicanto.png +0 -0
  103. npcpy-1.0.26.data/data/npcpy/npc_team/automator.jinx +0 -18
  104. npcpy-1.0.26.data/data/npcpy/npc_team/bash_executer.jinx +0 -31
  105. npcpy-1.0.26.data/data/npcpy/npc_team/calculator.jinx +0 -11
  106. npcpy-1.0.26.data/data/npcpy/npc_team/celona.npc +0 -0
  107. npcpy-1.0.26.data/data/npcpy/npc_team/corca.npc +0 -13
  108. npcpy-1.0.26.data/data/npcpy/npc_team/edit_file.jinx +0 -96
  109. npcpy-1.0.26.data/data/npcpy/npc_team/eriane.npc +0 -4
  110. npcpy-1.0.26.data/data/npcpy/npc_team/file_chat.jinx +0 -14
  111. npcpy-1.0.26.data/data/npcpy/npc_team/foreman.npc +0 -7
  112. npcpy-1.0.26.data/data/npcpy/npc_team/frederic.npc +0 -6
  113. npcpy-1.0.26.data/data/npcpy/npc_team/frederic4.png +0 -0
  114. npcpy-1.0.26.data/data/npcpy/npc_team/guac.png +0 -0
  115. npcpy-1.0.26.data/data/npcpy/npc_team/gui_controller.jinx +0 -28
  116. npcpy-1.0.26.data/data/npcpy/npc_team/image_generation.jinx +0 -29
  117. npcpy-1.0.26.data/data/npcpy/npc_team/internet_search.jinx +0 -30
  118. npcpy-1.0.26.data/data/npcpy/npc_team/kadiefa.npc +0 -3
  119. npcpy-1.0.26.data/data/npcpy/npc_team/kadiefa.png +0 -0
  120. npcpy-1.0.26.data/data/npcpy/npc_team/lineru.npc +0 -0
  121. npcpy-1.0.26.data/data/npcpy/npc_team/local_search.jinx +0 -152
  122. npcpy-1.0.26.data/data/npcpy/npc_team/maurawa.npc +0 -0
  123. npcpy-1.0.26.data/data/npcpy/npc_team/npcsh.ctx +0 -9
  124. npcpy-1.0.26.data/data/npcpy/npc_team/npcsh_executor.jinx +0 -31
  125. npcpy-1.0.26.data/data/npcpy/npc_team/npcsh_sibiji.png +0 -0
  126. npcpy-1.0.26.data/data/npcpy/npc_team/plonk.npc +0 -2
  127. npcpy-1.0.26.data/data/npcpy/npc_team/plonk.png +0 -0
  128. npcpy-1.0.26.data/data/npcpy/npc_team/plonkjr.npc +0 -2
  129. npcpy-1.0.26.data/data/npcpy/npc_team/plonkjr.png +0 -0
  130. npcpy-1.0.26.data/data/npcpy/npc_team/python_executor.jinx +0 -8
  131. npcpy-1.0.26.data/data/npcpy/npc_team/raone.npc +0 -0
  132. npcpy-1.0.26.data/data/npcpy/npc_team/screen_cap.jinx +0 -25
  133. npcpy-1.0.26.data/data/npcpy/npc_team/sibiji.npc +0 -5
  134. npcpy-1.0.26.data/data/npcpy/npc_team/sibiji.png +0 -0
  135. npcpy-1.0.26.data/data/npcpy/npc_team/slean.npc +0 -4
  136. npcpy-1.0.26.data/data/npcpy/npc_team/spool.png +0 -0
  137. npcpy-1.0.26.data/data/npcpy/npc_team/sql_executor.jinx +0 -33
  138. npcpy-1.0.26.data/data/npcpy/npc_team/test_pipeline.py +0 -181
  139. npcpy-1.0.26.data/data/npcpy/npc_team/turnic.npc +0 -4
  140. npcpy-1.0.26.data/data/npcpy/npc_team/welxor.npc +0 -0
  141. npcpy-1.0.26.data/data/npcpy/npc_team/yap.png +0 -0
  142. npcpy-1.0.26.dist-info/METADATA +0 -827
  143. npcpy-1.0.26.dist-info/RECORD +0 -139
  144. npcpy-1.0.26.dist-info/entry_points.txt +0 -11
  145. /npcpy/{modes → ft}/__init__.py +0 -0
  146. {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/WHEEL +0 -0
  147. {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/licenses/LICENSE +0 -0
  148. {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/top_level.txt +0 -0
npcpy/memory/sleep.py DELETED
@@ -1,557 +0,0 @@
1
- """
2
- Sleep module for NPC agents.
3
-
4
- This module provides functions for the "sleep" process of NPC agents, which includes:
5
- 1. Breathing: extracting facts, lessons, and mistakes from conversation history
6
- 2. Processing: organizing and storing these extractions in knowledge graphs
7
- 3. Consolidation: merging similar memories and updating belief structures
8
- 4. Integration: connecting new knowledge with existing knowledge
9
- """
10
-
11
- import os
12
- import json
13
- import time
14
- import datetime
15
- import sqlite3
16
- from typing import Dict, List, Tuple, Any, Optional
17
-
18
-
19
- def initialize_sleep_db(db_path: str) -> sqlite3.Connection:
20
- """
21
- Initialize the database for sleep operations.
22
-
23
- Args:
24
- db_path: Path to the SQLite database
25
-
26
- Returns:
27
- SQLite connection object
28
- """
29
- conn = sqlite3.connect(db_path)
30
- cursor = conn.cursor()
31
-
32
- # Create sleep_sessions table to track sleeping sessions
33
- cursor.execute('''
34
- CREATE TABLE IF NOT EXISTS sleep_sessions (
35
- id INTEGER PRIMARY KEY AUTOINCREMENT,
36
- start_time TIMESTAMP NOT NULL,
37
- end_time TIMESTAMP,
38
- conversation_source TEXT NOT NULL,
39
- status TEXT NOT NULL,
40
- extraction_count INTEGER DEFAULT 0,
41
- consolidation_count INTEGER DEFAULT 0
42
- )
43
- ''')
44
-
45
- # Create sleep_logs table for detailed logging
46
- cursor.execute('''
47
- CREATE TABLE IF NOT EXISTS sleep_logs (
48
- id INTEGER PRIMARY KEY AUTOINCREMENT,
49
- session_id INTEGER NOT NULL,
50
- timestamp TIMESTAMP NOT NULL,
51
- event_type TEXT NOT NULL,
52
- details TEXT,
53
- FOREIGN KEY (session_id) REFERENCES sleep_sessions(id)
54
- )
55
- ''')
56
-
57
- conn.commit()
58
- return conn
59
-
60
- def sleep(conversation_history: List[Dict],
61
- db_path: str,
62
- chroma_path: str,
63
- agent_name: str = "default_agent",
64
- sleep_duration: int = 5,
65
- verbose: bool = False) -> Dict:
66
- """
67
- Main sleep function for NPC agents. This processes conversation history,
68
- extracts knowledge, and integrates it into the agent's memory.
69
-
70
- Args:
71
- conversation_history: List of conversation turns in dict format
72
- db_path: Path to the SQLite database
73
- chroma_path: Path to the ChromaDB directory
74
- agent_name: Name of the agent
75
- sleep_duration: Duration to simulate sleep in seconds
76
- verbose: Whether to print verbose output
77
-
78
- Returns:
79
- Dict with results of the sleep operation
80
- """
81
- # Initialize results dictionary
82
- results = {
83
- "success": False,
84
- "extraction_count": 0,
85
- "facts": [],
86
- "mistakes": [],
87
- "lessons": [],
88
- "actions": [],
89
- "decisions": [],
90
- "start_time": datetime.datetime.now().isoformat(),
91
- "end_time": None,
92
- "duration": 0,
93
- "errors": []
94
- }
95
-
96
- try:
97
- # Connect to database
98
- conn = initialize_sleep_db(db_path)
99
- cursor = conn.cursor()
100
-
101
- # Log start of sleep session
102
- cursor.execute(
103
- "INSERT INTO sleep_sessions (start_time, conversation_source, status) VALUES (datetime('now'), ?, ?)",
104
- (agent_name, "started")
105
- )
106
- session_id = cursor.lastrowid
107
- conn.commit()
108
-
109
- if verbose:
110
- print(f"🌙 {agent_name} is sleeping and processing memories...")
111
-
112
- # Simulate sleep processing time
113
- time.sleep(sleep_duration)
114
-
115
- # Process conversation history
116
- conversation_text = ""
117
- for turn in conversation_history:
118
- role = turn.get("role", "unknown")
119
- content = turn.get("content", "")
120
- conversation_text += f"{role}: {content}\n\n"
121
-
122
- # Extract knowledge using breathe
123
- if verbose:
124
- print("🧠 Extracting knowledge from conversation...")
125
-
126
- extraction_results = breathe(conversation_text)
127
-
128
- # Store results
129
- results["facts"] = extraction_results.get("facts", [])
130
- results["mistakes"] = extraction_results.get("mistakes", [])
131
- results["lessons"] = extraction_results.get("lessons", [])
132
- results["actions"] = extraction_results.get("actions", [])
133
- results["decisions"] = extraction_results.get("decisions", [])
134
-
135
- # Calculate extraction count
136
- extraction_count = sum(len(items) for items in extraction_results.values() if isinstance(items, list))
137
- results["extraction_count"] = extraction_count
138
-
139
- # Store in SQLite
140
- if verbose:
141
- print("💾 Storing knowledge in database...")
142
-
143
- # Store facts
144
- for fact in results["facts"]:
145
- add_fact(conn, fact, agent_name)
146
- cursor.execute(
147
- "INSERT INTO sleep_logs (session_id, timestamp, event_type, details) VALUES (?, datetime('now'), ?, ?)",
148
- (session_id, "fact_added", fact)
149
- )
150
-
151
- # Store mistakes
152
- for mistake in results["mistakes"]:
153
- add_mistake(conn, mistake, agent_name)
154
- cursor.execute(
155
- "INSERT INTO sleep_logs (session_id, timestamp, event_type, details) VALUES (?, datetime('now'), ?, ?)",
156
- (session_id, "mistake_added", mistake)
157
- )
158
-
159
- # Store lessons
160
- for lesson in results["lessons"]:
161
- add_lesson(conn, lesson, agent_name)
162
- cursor.execute(
163
- "INSERT INTO sleep_logs (session_id, timestamp, event_type, details) VALUES (?, datetime('now'), ?, ?)",
164
- (session_id, "lesson_added", lesson)
165
- )
166
-
167
- # Store actions
168
- for action in results["actions"]:
169
- add_action(conn, action, agent_name)
170
- cursor.execute(
171
- "INSERT INTO sleep_logs (session_id, timestamp, event_type, details) VALUES (?, datetime('now'), ?, ?)",
172
- (session_id, "action_added", action)
173
- )
174
-
175
- # Store decisions
176
- for decision in results["decisions"]:
177
- add_decision(conn, decision, agent_name)
178
- cursor.execute(
179
- "INSERT INTO sleep_logs (session_id, timestamp, event_type, details) VALUES (?, datetime('now'), ?, ?)",
180
- (session_id, "decision_added", decision)
181
- )
182
-
183
- # Store in vector database
184
- store_in_vector_db(chroma_path, conversation_text, extraction_results, agent_name)
185
-
186
- # Update session information
187
- end_time = datetime.datetime.now()
188
- results["end_time"] = end_time.isoformat()
189
- results["duration"] = (end_time - datetime.datetime.fromisoformat(results["start_time"])).total_seconds()
190
-
191
- cursor.execute(
192
- "UPDATE sleep_sessions SET end_time = datetime('now'), status = ?, extraction_count = ? WHERE id = ?",
193
- ("completed", extraction_count, session_id)
194
- )
195
- conn.commit()
196
-
197
- if verbose:
198
- print(f"✅ Sleep completed. Extracted {extraction_count} items.")
199
- print(f"⏱️ Duration: {results['duration']:.2f} seconds")
200
-
201
- results["success"] = True
202
- except Exception as e:
203
- results["errors"].append(str(e))
204
- if verbose:
205
- print(f"❌ Error during sleep: {str(e)}")
206
-
207
- # Log error in database if session_id exists
208
- try:
209
- if 'session_id' in locals():
210
- cursor.execute(
211
- "UPDATE sleep_sessions SET status = ? WHERE id = ?",
212
- ("error", session_id)
213
- )
214
- cursor.execute(
215
- "INSERT INTO sleep_logs (session_id, timestamp, event_type, details) VALUES (?, datetime('now'), ?, ?)",
216
- (session_id, "error", str(e))
217
- )
218
- conn.commit()
219
- except Exception as inner_e:
220
- results["errors"].append(f"Failed to log error: {str(inner_e)}")
221
- finally:
222
- # Close database connection
223
- if 'conn' in locals():
224
- conn.close()
225
-
226
- return results
227
-
228
- def recall(query: str,
229
- db_path: str,
230
- chroma_path: str,
231
- top_k: int = 5,
232
- include_vector_search: bool = True,
233
- verbose: bool = False) -> Dict:
234
- """
235
- Recall information from the agent's memory.
236
-
237
- Args:
238
- query: The query to search for
239
- db_path: Path to the SQLite database
240
- chroma_path: Path to the ChromaDB directory
241
- top_k: Number of results to return
242
- include_vector_search: Whether to include vector search results
243
- verbose: Whether to print verbose output
244
-
245
- Returns:
246
- Dict with search results
247
- """
248
- results = {
249
- "facts": [],
250
- "mistakes": [],
251
- "lessons": [],
252
- "actions": [],
253
- "decisions": [],
254
- "vector_results": [],
255
- "success": False,
256
- "errors": []
257
- }
258
-
259
- try:
260
- # Connect to database
261
- conn = sqlite3.connect(db_path)
262
-
263
- # Search in SQL database
264
- if verbose:
265
- print(f"🔍 Searching for: '{query}'")
266
-
267
- # Search for facts
268
- facts = search_facts(conn, query)
269
- results["facts"] = [{"id": f[0], "content": f[1], "source": f[2]} for f in facts]
270
-
271
- # Search for mistakes
272
- mistakes = search_mistakes(conn, query)
273
- results["mistakes"] = [{"id": m[0], "content": m[1], "source": m[2]} for m in mistakes]
274
-
275
- # Search for lessons
276
- lessons = search_lessons(conn, query)
277
- results["lessons"] = [{"id": l[0], "content": l[1], "source": l[2]} for l in lessons]
278
-
279
- # Search for actions
280
- actions = search_actions(conn, query)
281
- results["actions"] = [{"id": a[0], "content": a[1], "source": a[2]} for a in actions]
282
-
283
- # Search for decisions
284
- decisions = search_decisions(conn, query)
285
- results["decisions"] = [{"id": d[0], "content": d[1], "source": d[2]} for d in decisions]
286
-
287
- # Vector search
288
- if include_vector_search:
289
- vector_results = retrieve_relevant_memory(query, chroma_path, top_k)
290
- results["vector_results"] = vector_results
291
-
292
- if verbose:
293
- total_results = sum(len(results[key]) for key in ["facts", "mistakes", "lessons", "actions", "decisions"])
294
- vector_count = len(results["vector_results"])
295
- print(f"✅ Found {total_results} direct matches and {vector_count} semantic matches.")
296
-
297
- results["success"] = True
298
- except Exception as e:
299
- results["errors"].append(str(e))
300
- if verbose:
301
- print(f"❌ Error during recall: {str(e)}")
302
- finally:
303
- # Close database connection
304
- if 'conn' in locals():
305
- conn.close()
306
-
307
- return results
308
-
309
- def list_sleep_sessions(db_path: str, limit: int = 10) -> List[Dict]:
310
- """
311
- List recent sleep sessions.
312
-
313
- Args:
314
- db_path: Path to the SQLite database
315
- limit: Maximum number of sessions to return
316
-
317
- Returns:
318
- List of session information dictionaries
319
- """
320
- sessions = []
321
-
322
- try:
323
- # Connect to database
324
- conn = sqlite3.connect(db_path)
325
- cursor = conn.cursor()
326
-
327
- # Query sessions
328
- cursor.execute("""
329
- SELECT id, start_time, end_time, conversation_source, status, extraction_count, consolidation_count
330
- FROM sleep_sessions
331
- ORDER BY start_time DESC
332
- LIMIT ?
333
- """, (limit,))
334
-
335
- for row in cursor.fetchall():
336
- session_id, start_time, end_time, source, status, extraction_count, consolidation_count = row
337
-
338
- # Calculate duration if available
339
- duration = None
340
- if start_time and end_time:
341
- try:
342
- start_dt = datetime.datetime.fromisoformat(start_time.replace('Z', '+00:00'))
343
- end_dt = datetime.datetime.fromisoformat(end_time.replace('Z', '+00:00'))
344
- duration = (end_dt - start_dt).total_seconds()
345
- except:
346
- pass
347
-
348
- sessions.append({
349
- "id": session_id,
350
- "start_time": start_time,
351
- "end_time": end_time,
352
- "source": source,
353
- "status": status,
354
- "extraction_count": extraction_count,
355
- "consolidation_count": consolidation_count,
356
- "duration": duration
357
- })
358
-
359
- conn.close()
360
- except Exception as e:
361
- print(f"Error listing sleep sessions: {str(e)}")
362
-
363
- return sessions
364
-
365
- def get_session_details(db_path: str, session_id: int) -> Dict:
366
- """
367
- Get detailed information about a sleep session.
368
-
369
- Args:
370
- db_path: Path to the SQLite database
371
- session_id: ID of the session to retrieve
372
-
373
- Returns:
374
- Dictionary with session details and logs
375
- """
376
- details = {
377
- "session": None,
378
- "logs": [],
379
- "success": False,
380
- "error": None
381
- }
382
-
383
- try:
384
- # Connect to database
385
- conn = sqlite3.connect(db_path)
386
- cursor = conn.cursor()
387
-
388
- # Query session information
389
- cursor.execute("""
390
- SELECT id, start_time, end_time, conversation_source, status, extraction_count, consolidation_count
391
- FROM sleep_sessions
392
- WHERE id = ?
393
- """, (session_id,))
394
-
395
- row = cursor.fetchone()
396
- if not row:
397
- details["error"] = f"Session {session_id} not found"
398
- return details
399
-
400
- session_id, start_time, end_time, source, status, extraction_count, consolidation_count = row
401
-
402
- # Calculate duration if available
403
- duration = None
404
- if start_time and end_time:
405
- try:
406
- start_dt = datetime.datetime.fromisoformat(start_time.replace('Z', '+00:00'))
407
- end_dt = datetime.datetime.fromisoformat(end_time.replace('Z', '+00:00'))
408
- duration = (end_dt - start_dt).total_seconds()
409
- except:
410
- pass
411
-
412
- details["session"] = {
413
- "id": session_id,
414
- "start_time": start_time,
415
- "end_time": end_time,
416
- "source": source,
417
- "status": status,
418
- "extraction_count": extraction_count,
419
- "consolidation_count": consolidation_count,
420
- "duration": duration
421
- }
422
-
423
- # Query session logs
424
- cursor.execute("""
425
- SELECT id, timestamp, event_type, details
426
- FROM sleep_logs
427
- WHERE session_id = ?
428
- ORDER BY timestamp ASC
429
- """, (session_id,))
430
-
431
- for log_row in cursor.fetchall():
432
- log_id, timestamp, event_type, log_details = log_row
433
- details["logs"].append({
434
- "id": log_id,
435
- "timestamp": timestamp,
436
- "event_type": event_type,
437
- "details": log_details
438
- })
439
-
440
- details["success"] = True
441
- conn.close()
442
- except Exception as e:
443
- details["error"] = str(e)
444
-
445
- return details
446
-
447
- def forget(query: str,
448
- db_path: str,
449
- chroma_path: str,
450
- source: Optional[str] = None,
451
- verbose: bool = False) -> Dict:
452
- """
453
- Forget/remove specific memories from the agent's memory based on query.
454
-
455
- Args:
456
- query: The query to identify memories to forget
457
- db_path: Path to the SQLite database
458
- chroma_path: Path to the ChromaDB directory
459
- source: Optional source filter (e.g., agent name)
460
- verbose: Whether to print verbose output
461
-
462
- Returns:
463
- Dict with deletion results
464
- """
465
- results = {
466
- "success": False,
467
- "removed": {
468
- "facts": 0,
469
- "mistakes": 0,
470
- "lessons": 0,
471
- "actions": 0,
472
- "decisions": 0,
473
- "vector_items": 0
474
- },
475
- "errors": []
476
- }
477
-
478
- try:
479
- # Connect to database
480
- conn = sqlite3.connect(db_path)
481
- cursor = conn.cursor()
482
-
483
- if verbose:
484
- print(f"🗑️ Forgetting memories matching: '{query}'")
485
-
486
- # Build source filter if provided
487
- source_filter = "AND source = ?" if source else ""
488
- params = (f"%{query}%", source) if source else (f"%{query}%",)
489
-
490
- # Delete facts
491
- cursor.execute(
492
- f"DELETE FROM facts WHERE content LIKE ? {source_filter}",
493
- params
494
- )
495
- results["removed"]["facts"] = cursor.rowcount
496
-
497
- # Delete mistakes
498
- cursor.execute(
499
- f"DELETE FROM mistakes WHERE content LIKE ? {source_filter}",
500
- params
501
- )
502
- results["removed"]["mistakes"] = cursor.rowcount
503
-
504
- # Delete lessons
505
- cursor.execute(
506
- f"DELETE FROM lessons WHERE content LIKE ? {source_filter}",
507
- params
508
- )
509
- results["removed"]["lessons"] = cursor.rowcount
510
-
511
- # Delete actions
512
- cursor.execute(
513
- f"DELETE FROM actions WHERE content LIKE ? {source_filter}",
514
- params
515
- )
516
- results["removed"]["actions"] = cursor.rowcount
517
-
518
- # Delete decisions
519
- cursor.execute(
520
- f"DELETE FROM decisions WHERE content LIKE ? {source_filter}",
521
- params
522
- )
523
- results["removed"]["decisions"] = cursor.rowcount
524
-
525
- conn.commit()
526
-
527
- # Delete from vector database
528
- try:
529
- from npcpy.memory.knowledge_graph import remove_from_vector_db
530
- vector_count = remove_from_vector_db(chroma_path, query, source)
531
- results["removed"]["vector_items"] = vector_count
532
- except ImportError:
533
- results["errors"].append("Vector database removal function not available")
534
-
535
- # Calculate total removed items
536
- total_removed = sum(results["removed"].values())
537
-
538
- if verbose:
539
- print(f"✅ Removed {total_removed} memories matching the query.")
540
- print(f" - Facts: {results['removed']['facts']}")
541
- print(f" - Mistakes: {results['removed']['mistakes']}")
542
- print(f" - Lessons: {results['removed']['lessons']}")
543
- print(f" - Actions: {results['removed']['actions']}")
544
- print(f" - Decisions: {results['removed']['decisions']}")
545
- print(f" - Vector items: {results['removed']['vector_items']}")
546
-
547
- results["success"] = True
548
- except Exception as e:
549
- results["errors"].append(str(e))
550
- if verbose:
551
- print(f"❌ Error during forget operation: {str(e)}")
552
- finally:
553
- # Close database connection
554
- if 'conn' in locals():
555
- conn.close()
556
-
557
- return results
npcpy/modes/_state.py DELETED
@@ -1,78 +0,0 @@
1
- from npcpy.npc_sysenv import (
2
- print_and_process_stream_with_markdown,
3
- NPCSH_STREAM_OUTPUT,
4
- NPCSH_CHAT_MODEL, NPCSH_CHAT_PROVIDER,
5
- NPCSH_VISION_MODEL, NPCSH_VISION_PROVIDER,
6
- NPCSH_EMBEDDING_MODEL, NPCSH_EMBEDDING_PROVIDER,
7
- NPCSH_REASONING_MODEL, NPCSH_REASONING_PROVIDER,
8
- NPCSH_IMAGE_GEN_MODEL, NPCSH_IMAGE_GEN_PROVIDER,
9
- NPCSH_VIDEO_GEN_MODEL, NPCSH_VIDEO_GEN_PROVIDER,
10
- NPCSH_API_URL,
11
- NPCSH_DEFAULT_MODE,
12
-
13
- )
14
- from npcpy.memory.command_history import (
15
- start_new_conversation,
16
- )
17
- from dataclasses import dataclass, field
18
- from typing import Optional, List, Dict, Any, Tuple, Union
19
- from npcpy.npc_compiler import NPC, Team
20
- import os
21
- @dataclass
22
- class ShellState:
23
- npc: Optional[Union[NPC, str]] = None
24
- team: Optional[Team] = None
25
- messages: List[Dict[str, Any]] = field(default_factory=list)
26
- mcp_client: Optional[Any] = None
27
- conversation_id: Optional[int] = None
28
- chat_model: str = NPCSH_CHAT_MODEL
29
- chat_provider: str = NPCSH_CHAT_PROVIDER
30
- vision_model: str = NPCSH_VISION_MODEL
31
- vision_provider: str = NPCSH_VISION_PROVIDER
32
- embedding_model: str = NPCSH_EMBEDDING_MODEL
33
- embedding_provider: str = NPCSH_EMBEDDING_PROVIDER
34
- reasoning_model: str = NPCSH_REASONING_MODEL
35
- reasoning_provider: str = NPCSH_REASONING_PROVIDER
36
- image_gen_model: str = NPCSH_IMAGE_GEN_MODEL
37
- image_gen_provider: str = NPCSH_IMAGE_GEN_PROVIDER
38
- video_gen_model: str = NPCSH_VIDEO_GEN_MODEL
39
- video_gen_provider: str = NPCSH_VIDEO_GEN_PROVIDER
40
- current_mode: str = NPCSH_DEFAULT_MODE
41
- api_key: Optional[str] = None
42
- api_url: Optional[str] = NPCSH_API_URL
43
- current_path: str = field(default_factory=os.getcwd)
44
- stream_output: bool = NPCSH_STREAM_OUTPUT
45
- attachments: Optional[List[Any]] = None
46
- def get_model_for_command(self, model_type: str = "chat"):
47
- if model_type == "chat":
48
- return self.chat_model, self.chat_provider
49
- elif model_type == "vision":
50
- return self.vision_model, self.vision_provider
51
- elif model_type == "embedding":
52
- return self.embedding_model, self.embedding_provider
53
- elif model_type == "reasoning":
54
- return self.reasoning_model, self.reasoning_provider
55
- elif model_type == "image_gen":
56
- return self.image_gen_model, self.image_gen_provider
57
- elif model_type == "video_gen":
58
- return self.video_gen_model, self.video_gen_provider
59
- else:
60
- return self.chat_model, self.chat_provider # Default fallback
61
- initial_state = ShellState(
62
- conversation_id=start_new_conversation(),
63
- stream_output=NPCSH_STREAM_OUTPUT,
64
- current_mode=NPCSH_DEFAULT_MODE,
65
- chat_model=NPCSH_CHAT_MODEL,
66
- chat_provider=NPCSH_CHAT_PROVIDER,
67
- vision_model=NPCSH_VISION_MODEL,
68
- vision_provider=NPCSH_VISION_PROVIDER,
69
- embedding_model=NPCSH_EMBEDDING_MODEL,
70
- embedding_provider=NPCSH_EMBEDDING_PROVIDER,
71
- reasoning_model=NPCSH_REASONING_MODEL,
72
- reasoning_provider=NPCSH_REASONING_PROVIDER,
73
- image_gen_model=NPCSH_IMAGE_GEN_MODEL,
74
- image_gen_provider=NPCSH_IMAGE_GEN_PROVIDER,
75
- video_gen_model=NPCSH_VIDEO_GEN_MODEL,
76
- video_gen_provider=NPCSH_VIDEO_GEN_PROVIDER,
77
- api_url=NPCSH_API_URL,
78
- )