npcpy 1.0.26__py3-none-any.whl → 1.2.32__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (148) hide show
  1. npcpy/__init__.py +0 -7
  2. npcpy/data/audio.py +16 -99
  3. npcpy/data/image.py +43 -42
  4. npcpy/data/load.py +83 -124
  5. npcpy/data/text.py +28 -28
  6. npcpy/data/video.py +8 -32
  7. npcpy/data/web.py +51 -23
  8. npcpy/ft/diff.py +110 -0
  9. npcpy/ft/ge.py +115 -0
  10. npcpy/ft/memory_trainer.py +171 -0
  11. npcpy/ft/model_ensembler.py +357 -0
  12. npcpy/ft/rl.py +360 -0
  13. npcpy/ft/sft.py +248 -0
  14. npcpy/ft/usft.py +128 -0
  15. npcpy/gen/audio_gen.py +24 -0
  16. npcpy/gen/embeddings.py +13 -13
  17. npcpy/gen/image_gen.py +262 -117
  18. npcpy/gen/response.py +615 -415
  19. npcpy/gen/video_gen.py +53 -7
  20. npcpy/llm_funcs.py +1869 -437
  21. npcpy/main.py +1 -1
  22. npcpy/memory/command_history.py +844 -510
  23. npcpy/memory/kg_vis.py +833 -0
  24. npcpy/memory/knowledge_graph.py +892 -1845
  25. npcpy/memory/memory_processor.py +81 -0
  26. npcpy/memory/search.py +188 -90
  27. npcpy/mix/debate.py +192 -3
  28. npcpy/npc_compiler.py +1672 -801
  29. npcpy/npc_sysenv.py +593 -1266
  30. npcpy/serve.py +3120 -0
  31. npcpy/sql/ai_function_tools.py +257 -0
  32. npcpy/sql/database_ai_adapters.py +186 -0
  33. npcpy/sql/database_ai_functions.py +163 -0
  34. npcpy/sql/model_runner.py +19 -19
  35. npcpy/sql/npcsql.py +706 -507
  36. npcpy/sql/sql_model_compiler.py +156 -0
  37. npcpy/tools.py +183 -0
  38. npcpy/work/plan.py +13 -279
  39. npcpy/work/trigger.py +3 -3
  40. npcpy-1.2.32.dist-info/METADATA +803 -0
  41. npcpy-1.2.32.dist-info/RECORD +54 -0
  42. npcpy/data/dataframes.py +0 -171
  43. npcpy/memory/deep_research.py +0 -125
  44. npcpy/memory/sleep.py +0 -557
  45. npcpy/modes/_state.py +0 -78
  46. npcpy/modes/alicanto.py +0 -1075
  47. npcpy/modes/guac.py +0 -785
  48. npcpy/modes/mcp_npcsh.py +0 -822
  49. npcpy/modes/npc.py +0 -213
  50. npcpy/modes/npcsh.py +0 -1158
  51. npcpy/modes/plonk.py +0 -409
  52. npcpy/modes/pti.py +0 -234
  53. npcpy/modes/serve.py +0 -1637
  54. npcpy/modes/spool.py +0 -312
  55. npcpy/modes/wander.py +0 -549
  56. npcpy/modes/yap.py +0 -572
  57. npcpy/npc_team/alicanto.npc +0 -2
  58. npcpy/npc_team/alicanto.png +0 -0
  59. npcpy/npc_team/assembly_lines/test_pipeline.py +0 -181
  60. npcpy/npc_team/corca.npc +0 -13
  61. npcpy/npc_team/foreman.npc +0 -7
  62. npcpy/npc_team/frederic.npc +0 -6
  63. npcpy/npc_team/frederic4.png +0 -0
  64. npcpy/npc_team/guac.png +0 -0
  65. npcpy/npc_team/jinxs/automator.jinx +0 -18
  66. npcpy/npc_team/jinxs/bash_executer.jinx +0 -31
  67. npcpy/npc_team/jinxs/calculator.jinx +0 -11
  68. npcpy/npc_team/jinxs/edit_file.jinx +0 -96
  69. npcpy/npc_team/jinxs/file_chat.jinx +0 -14
  70. npcpy/npc_team/jinxs/gui_controller.jinx +0 -28
  71. npcpy/npc_team/jinxs/image_generation.jinx +0 -29
  72. npcpy/npc_team/jinxs/internet_search.jinx +0 -30
  73. npcpy/npc_team/jinxs/local_search.jinx +0 -152
  74. npcpy/npc_team/jinxs/npcsh_executor.jinx +0 -31
  75. npcpy/npc_team/jinxs/python_executor.jinx +0 -8
  76. npcpy/npc_team/jinxs/screen_cap.jinx +0 -25
  77. npcpy/npc_team/jinxs/sql_executor.jinx +0 -33
  78. npcpy/npc_team/kadiefa.npc +0 -3
  79. npcpy/npc_team/kadiefa.png +0 -0
  80. npcpy/npc_team/npcsh.ctx +0 -9
  81. npcpy/npc_team/npcsh_sibiji.png +0 -0
  82. npcpy/npc_team/plonk.npc +0 -2
  83. npcpy/npc_team/plonk.png +0 -0
  84. npcpy/npc_team/plonkjr.npc +0 -2
  85. npcpy/npc_team/plonkjr.png +0 -0
  86. npcpy/npc_team/sibiji.npc +0 -5
  87. npcpy/npc_team/sibiji.png +0 -0
  88. npcpy/npc_team/spool.png +0 -0
  89. npcpy/npc_team/templates/analytics/celona.npc +0 -0
  90. npcpy/npc_team/templates/hr_support/raone.npc +0 -0
  91. npcpy/npc_team/templates/humanities/eriane.npc +0 -4
  92. npcpy/npc_team/templates/it_support/lineru.npc +0 -0
  93. npcpy/npc_team/templates/marketing/slean.npc +0 -4
  94. npcpy/npc_team/templates/philosophy/maurawa.npc +0 -0
  95. npcpy/npc_team/templates/sales/turnic.npc +0 -4
  96. npcpy/npc_team/templates/software/welxor.npc +0 -0
  97. npcpy/npc_team/yap.png +0 -0
  98. npcpy/routes.py +0 -958
  99. npcpy/work/mcp_helpers.py +0 -357
  100. npcpy/work/mcp_server.py +0 -194
  101. npcpy-1.0.26.data/data/npcpy/npc_team/alicanto.npc +0 -2
  102. npcpy-1.0.26.data/data/npcpy/npc_team/alicanto.png +0 -0
  103. npcpy-1.0.26.data/data/npcpy/npc_team/automator.jinx +0 -18
  104. npcpy-1.0.26.data/data/npcpy/npc_team/bash_executer.jinx +0 -31
  105. npcpy-1.0.26.data/data/npcpy/npc_team/calculator.jinx +0 -11
  106. npcpy-1.0.26.data/data/npcpy/npc_team/celona.npc +0 -0
  107. npcpy-1.0.26.data/data/npcpy/npc_team/corca.npc +0 -13
  108. npcpy-1.0.26.data/data/npcpy/npc_team/edit_file.jinx +0 -96
  109. npcpy-1.0.26.data/data/npcpy/npc_team/eriane.npc +0 -4
  110. npcpy-1.0.26.data/data/npcpy/npc_team/file_chat.jinx +0 -14
  111. npcpy-1.0.26.data/data/npcpy/npc_team/foreman.npc +0 -7
  112. npcpy-1.0.26.data/data/npcpy/npc_team/frederic.npc +0 -6
  113. npcpy-1.0.26.data/data/npcpy/npc_team/frederic4.png +0 -0
  114. npcpy-1.0.26.data/data/npcpy/npc_team/guac.png +0 -0
  115. npcpy-1.0.26.data/data/npcpy/npc_team/gui_controller.jinx +0 -28
  116. npcpy-1.0.26.data/data/npcpy/npc_team/image_generation.jinx +0 -29
  117. npcpy-1.0.26.data/data/npcpy/npc_team/internet_search.jinx +0 -30
  118. npcpy-1.0.26.data/data/npcpy/npc_team/kadiefa.npc +0 -3
  119. npcpy-1.0.26.data/data/npcpy/npc_team/kadiefa.png +0 -0
  120. npcpy-1.0.26.data/data/npcpy/npc_team/lineru.npc +0 -0
  121. npcpy-1.0.26.data/data/npcpy/npc_team/local_search.jinx +0 -152
  122. npcpy-1.0.26.data/data/npcpy/npc_team/maurawa.npc +0 -0
  123. npcpy-1.0.26.data/data/npcpy/npc_team/npcsh.ctx +0 -9
  124. npcpy-1.0.26.data/data/npcpy/npc_team/npcsh_executor.jinx +0 -31
  125. npcpy-1.0.26.data/data/npcpy/npc_team/npcsh_sibiji.png +0 -0
  126. npcpy-1.0.26.data/data/npcpy/npc_team/plonk.npc +0 -2
  127. npcpy-1.0.26.data/data/npcpy/npc_team/plonk.png +0 -0
  128. npcpy-1.0.26.data/data/npcpy/npc_team/plonkjr.npc +0 -2
  129. npcpy-1.0.26.data/data/npcpy/npc_team/plonkjr.png +0 -0
  130. npcpy-1.0.26.data/data/npcpy/npc_team/python_executor.jinx +0 -8
  131. npcpy-1.0.26.data/data/npcpy/npc_team/raone.npc +0 -0
  132. npcpy-1.0.26.data/data/npcpy/npc_team/screen_cap.jinx +0 -25
  133. npcpy-1.0.26.data/data/npcpy/npc_team/sibiji.npc +0 -5
  134. npcpy-1.0.26.data/data/npcpy/npc_team/sibiji.png +0 -0
  135. npcpy-1.0.26.data/data/npcpy/npc_team/slean.npc +0 -4
  136. npcpy-1.0.26.data/data/npcpy/npc_team/spool.png +0 -0
  137. npcpy-1.0.26.data/data/npcpy/npc_team/sql_executor.jinx +0 -33
  138. npcpy-1.0.26.data/data/npcpy/npc_team/test_pipeline.py +0 -181
  139. npcpy-1.0.26.data/data/npcpy/npc_team/turnic.npc +0 -4
  140. npcpy-1.0.26.data/data/npcpy/npc_team/welxor.npc +0 -0
  141. npcpy-1.0.26.data/data/npcpy/npc_team/yap.png +0 -0
  142. npcpy-1.0.26.dist-info/METADATA +0 -827
  143. npcpy-1.0.26.dist-info/RECORD +0 -139
  144. npcpy-1.0.26.dist-info/entry_points.txt +0 -11
  145. /npcpy/{modes → ft}/__init__.py +0 -0
  146. {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/WHEEL +0 -0
  147. {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/licenses/LICENSE +0 -0
  148. {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,4 @@
1
1
  import os
2
- import sqlite3
3
2
  import json
4
3
  from datetime import datetime
5
4
  import uuid
@@ -9,16 +8,29 @@ import numpy as np
9
8
 
10
9
  try:
11
10
  import sqlalchemy
12
- from sqlalchemy import create_engine, text
11
+ from sqlalchemy import create_engine, text, MetaData, Table, Column, Integer, String, Text, DateTime, LargeBinary, ForeignKey, Boolean, func
13
12
  from sqlalchemy.engine import Engine, Connection as SQLAlchemyConnection
14
13
  from sqlalchemy.exc import SQLAlchemyError
14
+ from sqlalchemy.sql import select, insert, update, delete
15
+ from sqlalchemy.dialects import sqlite, postgresql
15
16
  _HAS_SQLALCHEMY = True
16
17
  except ImportError:
17
18
  _HAS_SQLALCHEMY = False
18
- Engine = type(None) # Define dummy types if sqlalchemy not installed
19
- SQLAlchemyConnection = type(None)
20
- create_engine = None
21
- text = None
19
+ print("SQLAlchemy not available - this module requires SQLAlchemy")
20
+ raise
21
+
22
+ try:
23
+ import chromadb
24
+ except ModuleNotFoundError:
25
+ print("chromadb not installed")
26
+ except OSError as e:
27
+ print('os error importing chromadb:', e)
28
+ except NameError as e:
29
+ print('name error importing chromadb:', e)
30
+ chromadb = None
31
+
32
+
33
+ import logging
22
34
 
23
35
  def flush_messages(n: int, messages: list) -> dict:
24
36
  if n <= 0:
@@ -27,45 +39,46 @@ def flush_messages(n: int, messages: list) -> dict:
27
39
  "output": "Error: 'n' must be a positive integer.",
28
40
  }
29
41
 
30
- removed_count = min(n, len(messages)) # Calculate how many to remove
31
- del messages[-removed_count:] # Remove the last n messages
42
+ removed_count = min(n, len(messages))
43
+ del messages[-removed_count:]
32
44
 
33
45
  return {
34
46
  "messages": messages,
35
47
  "output": f"Flushed {removed_count} message(s). Context count is now {len(messages)} messages.",
36
48
  }
37
49
 
50
+ def create_engine_from_path(db_path: str) -> Engine:
51
+ """Create SQLAlchemy engine from database path, detecting type"""
52
+ if db_path.startswith('postgresql://') or db_path.startswith('postgres://'):
53
+ return create_engine(db_path)
54
+ else:
55
+
56
+ if db_path.startswith('~/'):
57
+ db_path = os.path.expanduser(db_path)
58
+ return create_engine(f'sqlite:///{db_path}')
38
59
 
39
- def get_db_connection():
40
- conn = sqlite3.connect(db_path)
41
- conn.row_factory = sqlite3.Row
42
- return conn
43
-
44
-
45
- def fetch_messages_for_conversation(conversation_id):
46
- conn = get_db_connection()
47
- cursor = conn.cursor()
60
+ def get_db_connection(db_path: str = "~/npcsh_history.db") -> Engine:
61
+ """Get SQLAlchemy engine"""
62
+ return create_engine_from_path(db_path)
48
63
 
49
- query = """
64
+ def fetch_messages_for_conversation(engine: Engine, conversation_id: str):
65
+ query = text("""
50
66
  SELECT role, content, timestamp
51
67
  FROM conversation_history
52
- WHERE conversation_id = ?
68
+ WHERE conversation_id = :conversation_id
53
69
  ORDER BY timestamp ASC
54
- """
55
- cursor.execute(query, (conversation_id,))
56
- messages = cursor.fetchall()
57
- conn.close()
58
-
59
- return [
60
- {
61
- "role": message["role"],
62
- "content": message["content"],
63
- "timestamp": message["timestamp"],
64
- }
65
- for message in messages
66
- ]
67
-
68
-
70
+ """)
71
+
72
+ with engine.connect() as conn:
73
+ result = conn.execute(query, {"conversation_id": conversation_id})
74
+ return [
75
+ {
76
+ "role": row.role,
77
+ "content": row.content,
78
+ "timestamp": row.timestamp,
79
+ }
80
+ for row in result
81
+ ]
69
82
 
70
83
  def deep_to_dict(obj):
71
84
  """
@@ -84,8 +97,7 @@ def deep_to_dict(obj):
84
97
  if isinstance(obj, (int, float, str, bool, type(None))):
85
98
  return obj
86
99
 
87
- return None # Drop objects that don't have a known conversion
88
-
100
+ return None
89
101
 
90
102
  class CustomJSONEncoder(json.JSONEncoder):
91
103
  def default(self, obj):
@@ -94,59 +106,38 @@ class CustomJSONEncoder(json.JSONEncoder):
94
106
  except TypeError:
95
107
  return super().default(obj)
96
108
 
97
-
98
109
  def show_history(command_history, args):
99
110
  if args:
100
- search_results = command_history.search(args[0])
111
+ search_results = command_history.search_commands(args[0])
101
112
  if search_results:
102
113
  return "\n".join(
103
- [f"{item[0]}. [{item[1]}] {item[2]}" for item in search_results]
114
+ [f"{item['id']}. [{item['timestamp']}] {item['command']}" for item in search_results]
104
115
  )
105
116
  else:
106
117
  return f"No commands found matching '{args[0]}'"
107
118
  else:
108
- all_history = command_history.get_all()
109
- return "\n".join([f"{item[0]}. [{item[1]}] {item[2]}" for item in all_history])
110
-
119
+ all_history = command_history.get_all_commands()
120
+ return "\n".join([f"{item['id']}. [{item['timestamp']}] {item['command']}" for item in all_history])
111
121
 
112
122
  def query_history_for_llm(command_history, query):
113
- results = command_history.search(query)
123
+ results = command_history.search_commands(query)
114
124
  formatted_results = [
115
- f"Command: {r[2]}\nOutput: {r[4]}\nLocation: {r[5]}" for r in results
125
+ f"Command: {r['command']}\nOutput: {r['output']}\nLocation: {r['location']}" for r in results
116
126
  ]
117
127
  return "\n\n".join(formatted_results)
118
128
 
119
-
120
- try:
121
- import chromadb
122
- except ModuleNotFoundError:
123
- print("chromadb not installed")
124
- except OSError as e:
125
- print('os error importing chromadb:', e)
126
- except NameError as e:
127
- print('name error importing chromadb:', e)
128
- chromadb = None
129
- import numpy as np
130
- import os
131
- from typing import Optional, Dict, List, Union, Tuple
132
-
133
-
134
- def setup_chroma_db(collection, description='', db_path: str= ''):
129
+ def setup_chroma_db(collection, description='', db_path: str = ''):
135
130
  """Initialize Chroma vector database without a default embedding function"""
136
131
  if db_path == '':
137
132
  db_path = os.path.expanduser('~/npcsh_chroma_db')
138
133
 
139
134
  try:
140
- # Create or connect to Chroma client with persistent storage
141
135
  client = chromadb.PersistentClient(path=db_path)
142
136
 
143
- # Check if collection exists, create if not
144
137
  try:
145
138
  collection = client.get_collection(collection)
146
139
  print("Connected to existing facts collection")
147
140
  except ValueError:
148
- # Create new collection without an embedding function
149
- # We'll provide embeddings manually using get_embeddings
150
141
  collection = client.create_collection(
151
142
  name=collection,
152
143
  metadata={"description": description},
@@ -158,48 +149,271 @@ def setup_chroma_db(collection, description='', db_path: str= ''):
158
149
  print(f"Error setting up Chroma DB: {e}")
159
150
  raise
160
151
 
152
+ def init_kg_schema(engine: Engine):
153
+ """Creates the multi-scoped, path-aware KG tables using SQLAlchemy"""
154
+
155
+
156
+ metadata = MetaData()
157
+
158
+ kg_facts = Table('kg_facts', metadata,
159
+ Column('statement', Text, nullable=False),
160
+ Column('team_name', String(255), nullable=False),
161
+ Column('npc_name', String(255), nullable=False),
162
+ Column('directory_path', Text, nullable=False),
163
+ Column('source_text', Text),
164
+ Column('type', String(100)),
165
+ Column('generation', Integer),
166
+ Column('origin', String(100)),
167
+
168
+ schema=None
169
+ )
170
+
171
+ kg_concepts = Table('kg_concepts', metadata,
172
+ Column('name', Text, nullable=False),
173
+ Column('team_name', String(255), nullable=False),
174
+ Column('npc_name', String(255), nullable=False),
175
+ Column('directory_path', Text, nullable=False),
176
+ Column('generation', Integer),
177
+ Column('origin', String(100)),
178
+ schema=None
179
+ )
180
+
181
+ kg_links = Table('kg_links', metadata,
182
+ Column('source', Text, nullable=False),
183
+ Column('target', Text, nullable=False),
184
+ Column('team_name', String(255), nullable=False),
185
+ Column('npc_name', String(255), nullable=False),
186
+ Column('directory_path', Text, nullable=False),
187
+ Column('type', String(100), nullable=False),
188
+ schema=None
189
+ )
190
+
191
+ kg_metadata = Table('kg_metadata', metadata,
192
+ Column('key', String(255), nullable=False),
193
+ Column('team_name', String(255), nullable=False),
194
+ Column('npc_name', String(255), nullable=False),
195
+ Column('directory_path', Text, nullable=False),
196
+ Column('value', Text),
197
+ schema=None
198
+ )
199
+
200
+
201
+ metadata.create_all(engine, checkfirst=True)
202
+
203
+ def load_kg_from_db(engine: Engine, team_name: str, npc_name: str, directory_path: str) -> Dict[str, Any]:
204
+ """Loads the KG for a specific scope (team, npc, path) from database."""
205
+ kg = {
206
+ "generation": 0,
207
+ "facts": [],
208
+ "concepts": [],
209
+ "concept_links": [],
210
+ "fact_to_concept_links": {},
211
+ "fact_to_fact_links": []
212
+ }
213
+
214
+ with engine.connect() as conn:
215
+ try:
216
+
217
+ result = conn.execute(text("""
218
+ SELECT value FROM kg_metadata
219
+ WHERE team_name = :team AND npc_name = :npc AND directory_path = :path AND key = 'generation'
220
+ """), {"team": team_name, "npc": npc_name, "path": directory_path})
221
+
222
+ row = result.fetchone()
223
+ if row:
224
+ kg['generation'] = int(row.value)
225
+
226
+
227
+ result = conn.execute(text("""
228
+ SELECT statement, source_text, type, generation, origin FROM kg_facts
229
+ WHERE team_name = :team AND npc_name = :npc AND directory_path = :path
230
+ """), {"team": team_name, "npc": npc_name, "path": directory_path})
231
+
232
+ kg['facts'] = [
233
+ {
234
+ "statement": row.statement,
235
+ "source_text": row.source_text,
236
+ "type": row.type,
237
+ "generation": row.generation,
238
+ "origin": row.origin
239
+ }
240
+ for row in result
241
+ ]
242
+
243
+
244
+ result = conn.execute(text("""
245
+ SELECT name, generation, origin FROM kg_concepts
246
+ WHERE team_name = :team AND npc_name = :npc AND directory_path = :path
247
+ """), {"team": team_name, "npc": npc_name, "path": directory_path})
248
+
249
+ kg['concepts'] = [
250
+ {"name": row.name, "generation": row.generation, "origin": row.origin}
251
+ for row in result
252
+ ]
253
+
254
+
255
+ links = {}
256
+ result = conn.execute(text("""
257
+ SELECT source, target, type FROM kg_links
258
+ WHERE team_name = :team AND npc_name = :npc AND directory_path = :path
259
+ """), {"team": team_name, "npc": npc_name, "path": directory_path})
260
+
261
+ for row in result:
262
+ if row.type == 'fact_to_concept':
263
+ if row.source not in links:
264
+ links[row.source] = []
265
+ links[row.source].append(row.target)
266
+ elif row.type == 'concept_to_concept':
267
+ kg['concept_links'].append((row.source, row.target))
268
+ elif row.type == 'fact_to_fact':
269
+ kg['fact_to_fact_links'].append((row.source, row.target))
270
+
271
+ kg['fact_to_concept_links'] = links
272
+
273
+ except SQLAlchemyError:
274
+
275
+ init_kg_schema(engine)
276
+
277
+ return kg
161
278
 
162
- class CommandHistory:
163
- def __init__(self, db: Union[str, sqlite3.Connection, Engine] = "~/npcsh_history.db"):
279
+ def save_kg_to_db(engine: Engine, kg_data: Dict[str, Any], team_name: str, npc_name: str, directory_path: str):
280
+ """Saves a knowledge graph dictionary to the database, ignoring duplicates."""
281
+ try:
282
+ with engine.begin() as conn:
283
+
284
+ facts_to_save = [
285
+ {
286
+ "statement": fact['statement'],
287
+ "team_name": team_name,
288
+ "npc_name": npc_name,
289
+ "directory_path": directory_path,
290
+ "generation": fact.get('generation', 0),
291
+ "origin": fact.get('origin', 'organic')
292
+ }
293
+ for fact in kg_data.get("facts", [])
294
+ ]
295
+
296
+ if facts_to_save:
297
+
298
+ if 'sqlite' in str(engine.url):
299
+ stmt = text("""
300
+ INSERT OR IGNORE INTO kg_facts
301
+ (statement, team_name, npc_name, directory_path, generation, origin)
302
+ VALUES (:statement, :team_name, :npc_name, :directory_path, :generation, :origin)
303
+ """)
304
+ else:
305
+ stmt = text("""
306
+ INSERT INTO kg_facts
307
+ (statement, team_name, npc_name, directory_path, generation, origin)
308
+ VALUES (:statement, :team_name, :npc_name, :directory_path, :generation, :origin)
309
+ ON CONFLICT (statement, team_name, npc_name, directory_path) DO NOTHING
310
+ """)
311
+
312
+ for fact in facts_to_save:
313
+ conn.execute(stmt, fact)
314
+
315
+
316
+ concepts_to_save = [
317
+ {
318
+ "name": concept['name'],
319
+ "team_name": team_name,
320
+ "npc_name": npc_name,
321
+ "directory_path": directory_path,
322
+ "generation": concept.get('generation', 0),
323
+ "origin": concept.get('origin', 'organic')
324
+ }
325
+ for concept in kg_data.get("concepts", [])
326
+ ]
327
+
328
+ if concepts_to_save:
329
+ if 'sqlite' in str(engine.url):
330
+ stmt = text("""
331
+ INSERT OR IGNORE INTO kg_concepts
332
+ (name, team_name, npc_name, directory_path, generation, origin)
333
+ VALUES (:name, :team_name, :npc_name, :directory_path, :generation, :origin)
334
+ """)
335
+ else:
336
+ stmt = text("""
337
+ INSERT INTO kg_concepts
338
+ (name, team_name, npc_name, directory_path, generation, origin)
339
+ VALUES (:name, :team_name, :npc_name, :directory_path, :generation, :origin)
340
+ ON CONFLICT (name, team_name, npc_name, directory_path) DO NOTHING
341
+ """)
342
+
343
+ for concept in concepts_to_save:
344
+ conn.execute(stmt, concept)
345
+
346
+
347
+ if 'sqlite' in str(engine.url):
348
+ stmt = text("""
349
+ INSERT OR REPLACE INTO kg_metadata (key, value, team_name, npc_name, directory_path)
350
+ VALUES ('generation', :generation, :team_name, :npc_name, :directory_path)
351
+ """)
352
+ else:
353
+ stmt = text("""
354
+ INSERT INTO kg_metadata (key, value, team_name, npc_name, directory_path)
355
+ VALUES ('generation', :generation, :team_name, :npc_name, :directory_path)
356
+ ON CONFLICT (key, team_name, npc_name, directory_path)
357
+ DO UPDATE SET value = EXCLUDED.value
358
+ """)
359
+
360
+ conn.execute(stmt, {
361
+ "generation": str(kg_data.get('generation', 0)),
362
+ "team_name": team_name,
363
+ "npc_name": npc_name,
364
+ "directory_path": directory_path
365
+ })
366
+
367
+
368
+ conn.execute(text("""
369
+ DELETE FROM kg_links
370
+ WHERE team_name = :team_name AND npc_name = :npc_name AND directory_path = :directory_path
371
+ """), {"team_name": team_name, "npc_name": npc_name, "directory_path": directory_path})
372
+
373
+
374
+ for fact, concepts in kg_data.get("fact_to_concept_links", {}).items():
375
+ for concept in concepts:
376
+ conn.execute(text("""
377
+ INSERT INTO kg_links (source, target, type, team_name, npc_name, directory_path)
378
+ VALUES (:source, :target, 'fact_to_concept', :team_name, :npc_name, :directory_path)
379
+ """), {
380
+ "source": fact, "target": concept,
381
+ "team_name": team_name, "npc_name": npc_name, "directory_path": directory_path
382
+ })
383
+
384
+ for c1, c2 in kg_data.get("concept_links", []):
385
+ conn.execute(text("""
386
+ INSERT INTO kg_links (source, target, type, team_name, npc_name, directory_path)
387
+ VALUES (:source, :target, 'concept_to_concept', :team_name, :npc_name, :directory_path)
388
+ """), {
389
+ "source": c1, "target": c2,
390
+ "team_name": team_name, "npc_name": npc_name, "directory_path": directory_path
391
+ })
392
+
393
+ for f1, f2 in kg_data.get("fact_to_fact_links", []):
394
+ conn.execute(text("""
395
+ INSERT INTO kg_links (source, target, type, team_name, npc_name, directory_path)
396
+ VALUES (:source, :target, 'fact_to_fact', :team_name, :npc_name, :directory_path)
397
+ """), {
398
+ "source": f1, "target": f2,
399
+ "team_name": team_name, "npc_name": npc_name, "directory_path": directory_path
400
+ })
401
+
402
+ except Exception as e:
403
+ print(f"Failed to save KG for scope '({team_name}, {npc_name}, {directory_path})': {e}")
164
404
 
165
- self._is_sqlalchemy = False
166
- self.cursor = None
167
- self.db_path = None # Store the determined path if available
405
+ def generate_message_id() -> str:
406
+ return str(uuid.uuid4())
168
407
 
408
+ class CommandHistory:
409
+ def __init__(self, db: Union[str, Engine] = "~/npcsh_history.db"):
410
+
169
411
  if isinstance(db, str):
170
- self.db_path = os.path.expanduser(db)
171
- try:
172
- self.conn = sqlite3.connect(self.db_path, check_same_thread=False) # Allow multithread access if needed
173
- self.conn.row_factory = sqlite3.Row
174
- self.cursor = self.conn.cursor()
175
- self.cursor.execute("PRAGMA foreign_keys = ON")
176
- self.conn.commit()
177
-
178
- except sqlite3.Error as e:
179
- print(f"FATAL: Error connecting to sqlite3 DB at {self.db_path}: {e}")
180
- raise
181
-
182
- elif isinstance(db, sqlite3.Connection):
183
- self.conn = db
184
- if not hasattr(self.conn, 'row_factory') or self.conn.row_factory is None:
185
- # Set row_factory if not already set on provided connection
186
- try: self.conn.row_factory = sqlite3.Row
187
- except Exception as e: print(f"Warning: Could not set row_factory on provided sqlite3 connection: {e}")
188
-
189
- self.cursor = self.conn.cursor()
190
- try:
191
- self.cursor.execute("PRAGMA foreign_keys = ON")
192
- self.conn.commit()
193
- except sqlite3.Error as e:
194
- print(f"Warning: Could not set PRAGMA foreign_keys on provided sqlite3 connection: {e}")
195
-
196
-
197
- elif _HAS_SQLALCHEMY and isinstance(db, Engine):
412
+ self.engine = create_engine_from_path(db)
413
+ self.db_path = db
414
+ elif isinstance(db, Engine):
415
+ self.engine = db
198
416
  self.db_path = str(db.url)
199
- self.conn = db
200
- self._is_sqlalchemy = True
201
-
202
-
203
417
  else:
204
418
  raise TypeError(f"Unsupported type for CommandHistory db parameter: {type(db)}")
205
419
 
@@ -207,318 +421,468 @@ class CommandHistory:
207
421
 
208
422
  def _initialize_schema(self):
209
423
  """Creates all necessary tables."""
210
- print("Initializing database schema...")
211
- self.create_command_table()
212
- self.create_conversation_table()
213
- self.create_attachment_table()
214
- self.create_jinx_call_table()
215
- print("Database schema initialization complete.")
216
-
217
- def _execute(self, sql: str, params: Optional[Union[tuple, Dict]] = None, script: bool = False, requires_fk: bool = False) -> Optional[int]:
218
- """Executes SQL, handling transactions and FK pragma for SQLAlchemy. Returns lastrowid for INSERTs."""
219
- last_row_id = None
220
- try:
221
- if self._is_sqlalchemy:
222
- with self.conn.connect() as connection:
223
- with connection.begin():
224
- if requires_fk and self.conn.url.drivername == 'sqlite':
225
- try: connection.execute(text("PRAGMA foreign_keys=ON"))
226
- except SQLAlchemyError as e: print(f"Warning: SQLAlchemy PRAGMA foreign_keys=ON failed: {e}")
227
-
228
- if script:
229
- statements = [s.strip() for s in sql.split(';') if s.strip()]
230
- for statement in statements:
231
- result_proxy = connection.execute(text(statement))
232
- if result_proxy.lastrowid is not None: last_row_id = result_proxy.lastrowid
233
- else:
234
- result_proxy = connection.execute(text(sql), params or {})
235
- if result_proxy.lastrowid is not None: last_row_id = result_proxy.lastrowid
236
- else:
237
- # Existing sqlite3 logic
238
- if script:
239
- self.cursor.executescript(sql)
240
- else:
241
- self.cursor.execute(sql, params or ())
242
- last_row_id = self.cursor.lastrowid # Get lastrowid for sqlite3
243
- self.conn.commit()
244
-
245
- return last_row_id
246
-
247
- except (sqlite3.Error, SQLAlchemyError) as e:
248
- error_type = "SQLAlchemy" if self._is_sqlalchemy else "SQLite"
249
- print(f"{error_type} Error executing: {sql[:100]}... Error: {e}")
250
- if not self._is_sqlalchemy:
251
- try: self.conn.rollback()
252
- except Exception as rb_err: print(f"SQLite rollback failed: {rb_err}")
253
- # Decide whether to raise the error or just return None/False
254
- raise # Re-raise the error to indicate failure
255
- except Exception as e:
256
- print(f"Unexpected error in _execute: {e}")
257
- raise # Re-raise unexpected errors
258
-
259
-
260
- def _fetch_one(self, sql: str, params: Optional[Union[tuple, Dict]] = None) -> Optional[Dict]:
261
- """Fetches a single row, adapting to connection type."""
262
- try:
263
- if self._is_sqlalchemy:
264
- with self.conn.connect() as connection:
265
- # No need for transaction for SELECT
266
- result = connection.execute(text(sql), params or {})
267
- row = result.fetchone()
268
- return dict(row._mapping) if row else None
269
- else:
270
- self.cursor.execute(sql, params or ())
271
- row = self.cursor.fetchone()
272
- return dict(row) if row else None
273
- except (sqlite3.Error, SQLAlchemyError) as e:
274
- error_type = "SQLAlchemy" if self._is_sqlalchemy else "SQLite"
275
- print(f"{error_type} Error fetching one: {sql[:100]}... Error: {e}")
276
- return None # Return None on error
277
- except Exception as e:
278
- print(f"Unexpected error in _fetch_one: {e}")
279
- return None
280
-
281
- def _fetch_all(self, sql: str, params: Optional[Union[tuple, Dict]] = None) -> List[Dict]:
282
- """Fetches all rows, adapting to connection type."""
283
- try:
284
- if self._is_sqlalchemy:
285
- with self.conn.connect() as connection:
286
- # Convert tuple params to a dictionary format for SQLAlchemy
287
- if params and isinstance(params, tuple):
288
- # Extract parameter placeholders from SQL
289
- placeholders = []
290
- for i, char in enumerate(sql):
291
- if char == '?':
292
- placeholders.append(i)
293
-
294
- # Convert tuple params to dict format
295
- dict_params = {}
296
- for i, value in enumerate(params):
297
- param_name = f"param_{i}"
298
- # Replace ? with :param_name in SQL
299
- sql = sql.replace('?', f":{param_name}", 1)
300
- dict_params[param_name] = value
301
-
302
- params = dict_params
424
+ metadata = MetaData()
425
+
426
+
427
+ Table('command_history', metadata,
428
+ Column('id', Integer, primary_key=True, autoincrement=True),
429
+ Column('timestamp', String(50)),
430
+ Column('command', Text),
431
+ Column('subcommands', Text),
432
+ Column('output', Text),
433
+ Column('location', Text)
434
+ )
435
+
436
+
437
+ Table('conversation_history', metadata,
438
+ Column('id', Integer, primary_key=True, autoincrement=True),
439
+ Column('message_id', String(50), unique=True, nullable=False),
440
+ Column('timestamp', String(50)),
441
+ Column('role', String(20)),
442
+ Column('content', Text),
443
+ Column('conversation_id', String(100)),
444
+ Column('directory_path', Text),
445
+ Column('model', String(100)),
446
+ Column('provider', String(100)),
447
+ Column('npc', String(100)),
448
+ Column('team', String(100))
449
+ )
450
+
451
+
452
+ Table('message_attachments', metadata,
453
+ Column('id', Integer, primary_key=True, autoincrement=True),
454
+ Column('message_id', String(50), ForeignKey('conversation_history.message_id', ondelete='CASCADE'), nullable=False),
455
+ Column('attachment_name', String(255)),
456
+ Column('attachment_type', String(100)),
457
+ Column('attachment_data', LargeBinary),
458
+ Column('attachment_size', Integer),
459
+ Column('upload_timestamp', String(50)),
460
+ Column('file_path', Text)
461
+ )
462
+
463
+
464
+ Table('jinx_execution_log', metadata,
465
+ Column('execution_id', Integer, primary_key=True, autoincrement=True),
466
+ Column('triggering_message_id', String(50), ForeignKey('conversation_history.message_id', ondelete='CASCADE'), nullable=False),
467
+ Column('response_message_id', String(50), ForeignKey('conversation_history.message_id', ondelete='SET NULL')),
468
+ Column('conversation_id', String(100), nullable=False),
469
+ Column('timestamp', String(50), nullable=False),
470
+ Column('npc_name', String(100)),
471
+ Column('team_name', String(100)),
472
+ Column('jinx_name', String(100), nullable=False),
473
+ Column('jinx_inputs', Text),
474
+ Column('jinx_output', Text),
475
+ Column('status', String(50), nullable=False),
476
+ Column('error_message', Text),
477
+ Column('duration_ms', Integer)
478
+ )
479
+
480
+ Table('memory_lifecycle', metadata,
481
+ Column('id', Integer, primary_key=True, autoincrement=True),
482
+ Column('message_id', String(50), nullable=False),
483
+ Column('conversation_id', String(100), nullable=False),
484
+ Column('npc', String(100), nullable=False),
485
+ Column('team', String(100), nullable=False),
486
+ Column('directory_path', Text, nullable=False),
487
+ Column('timestamp', String(50), nullable=False),
488
+ Column('initial_memory', Text, nullable=False),
489
+ Column('final_memory', Text),
490
+ Column('status', String(50), nullable=False),
491
+ Column('model', String(100)),
492
+ Column('provider', String(100)),
493
+ Column('created_at', DateTime, default=func.now())
494
+ )
495
+
496
+
497
+ metadata.create_all(self.engine, checkfirst=True)
498
+
499
+
500
+ with self.engine.begin() as conn:
501
+
502
+ index_queries = [
503
+ "CREATE INDEX IF NOT EXISTS idx_jinx_log_trigger_msg ON jinx_execution_log (triggering_message_id)",
504
+ "CREATE INDEX IF NOT EXISTS idx_jinx_log_convo_id ON jinx_execution_log (conversation_id)",
505
+ "CREATE INDEX IF NOT EXISTS idx_jinx_log_jinx_name ON jinx_execution_log (jinx_name)",
506
+ "CREATE INDEX IF NOT EXISTS idx_jinx_log_timestamp ON jinx_execution_log (timestamp)"
507
+ ]
508
+
509
+ for idx_query in index_queries:
510
+ try:
511
+ conn.execute(text(idx_query))
512
+ except SQLAlchemyError:
303
513
 
304
- result = connection.execute(text(sql), params or {})
305
- rows = result.fetchall()
306
- return [dict(row._mapping) for row in rows]
307
- else:
308
- self.cursor.execute(sql, params or ())
309
- rows = self.cursor.fetchall()
310
- return [dict(row) for row in rows]
311
- except (sqlite3.Error, SQLAlchemyError) as e:
312
- error_type = "SQLAlchemy" if self._is_sqlalchemy else "SQLite"
313
- print(f"{error_type} Error fetching all: {sql[:100]}... Error: {e}")
314
- return [] # Return empty list on error
315
- except Exception as e:
316
- print(f"Unexpected error in _fetch_all: {e}")
317
- return []
318
-
319
- def create_command_table(self):
320
- query = """
321
- CREATE TABLE IF NOT EXISTS command_history (
322
- id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp TEXT, command TEXT,
323
- subcommands TEXT, output TEXT, location TEXT
324
- )"""
325
- self._execute(query)
326
-
327
- def create_conversation_table(self):
328
- query = """
329
- CREATE TABLE IF NOT EXISTS conversation_history (
330
- id INTEGER PRIMARY KEY AUTOINCREMENT, message_id TEXT UNIQUE NOT NULL,
331
- timestamp TEXT, role TEXT, content TEXT, conversation_id TEXT,
332
- directory_path TEXT, model TEXT, provider TEXT, npc TEXT, team TEXT
333
- )"""
334
- self._execute(query)
335
-
336
- def create_attachment_table(self):
337
- query = """
338
- CREATE TABLE IF NOT EXISTS message_attachments (
339
- id INTEGER PRIMARY KEY AUTOINCREMENT, message_id TEXT NOT NULL,
340
- attachment_name TEXT, attachment_type TEXT, attachment_data BLOB,
341
- attachment_size INTEGER, upload_timestamp TEXT,
342
- FOREIGN KEY (message_id) REFERENCES conversation_history(message_id) ON DELETE CASCADE
343
- )"""
344
- self._execute(query, requires_fk=True)
345
-
346
- def create_jinx_call_table(self):
347
- table_query = '''
348
- CREATE TABLE IF NOT EXISTS jinx_execution_log (
349
- execution_id INTEGER PRIMARY KEY AUTOINCREMENT, triggering_message_id TEXT NOT NULL,
350
- response_message_id TEXT, conversation_id TEXT NOT NULL, timestamp TEXT NOT NULL,
351
- npc_name TEXT, team_name TEXT, jinx_name TEXT NOT NULL, jinx_inputs TEXT,
352
- jinx_output TEXT, status TEXT NOT NULL, error_message TEXT, duration_ms INTEGER,
353
- FOREIGN KEY (triggering_message_id) REFERENCES conversation_history(message_id) ON DELETE CASCADE,
354
- FOREIGN KEY (response_message_id) REFERENCES conversation_history(message_id) ON DELETE SET NULL
355
- );
356
- '''
357
- self._execute(table_query, requires_fk=True)
358
-
359
- index_queries = [
360
- "CREATE INDEX IF NOT EXISTS idx_jinx_log_trigger_msg ON jinx_execution_log (triggering_message_id);",
361
- "CREATE INDEX IF NOT EXISTS idx_jinx_log_convo_id ON jinx_execution_log (conversation_id);",
362
- "CREATE INDEX IF NOT EXISTS idx_jinx_log_jinx_name ON jinx_execution_log (jinx_name);",
363
- "CREATE INDEX IF NOT EXISTS idx_jinx_log_timestamp ON jinx_execution_log (timestamp);"
364
- ]
365
- for idx_query in index_queries:
366
- self._execute(idx_query)
514
+ pass
515
+
516
+
517
+ init_kg_schema(self.engine)
518
+
519
+ def _execute_returning_id(self, stmt: str, params: Dict = None) -> Optional[int]:
520
+ """Execute INSERT and return the generated ID"""
521
+ with self.engine.begin() as conn:
522
+ result = conn.execute(text(stmt), params or {})
523
+ return result.lastrowid if hasattr(result, 'lastrowid') else None
524
+
525
+ def _fetch_one(self, stmt: str, params: Dict = None) -> Optional[Dict]:
526
+ """Fetch a single row"""
527
+ with self.engine.connect() as conn:
528
+ result = conn.execute(text(stmt), params or {})
529
+ row = result.fetchone()
530
+ return dict(row._mapping) if row else None
531
+
532
+ def _fetch_all(self, stmt: str, params: Dict = None) -> List[Dict]:
533
+ """Fetch all rows"""
534
+ with self.engine.connect() as conn:
535
+ result = conn.execute(text(stmt), params or {})
536
+ return [dict(row._mapping) for row in result]
367
537
 
368
538
  def add_command(self, command, subcommands, output, location):
369
539
  timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
370
- safe_subcommands = str(subcommands)
371
- safe_output = str(output)
372
- sql = """
540
+ stmt = """
373
541
  INSERT INTO command_history (timestamp, command, subcommands, output, location)
374
- VALUES (?, ?, ?, ?, ?)
375
- """
376
- params = (timestamp, command, safe_subcommands, safe_output, location)
377
- self._execute(sql, params)
542
+ VALUES (:timestamp, :command, :subcommands, :output, :location)
543
+ """
544
+ params = {
545
+ "timestamp": timestamp,
546
+ "command": command,
547
+ "subcommands": str(subcommands),
548
+ "output": str(output),
549
+ "location": location
550
+ }
551
+
552
+ with self.engine.begin() as conn:
553
+ conn.execute(text(stmt), params)
378
554
 
379
- def generate_message_id(self) -> str:
380
- return str(uuid.uuid4())
381
555
 
382
556
  def add_conversation(
383
- self, role, content, conversation_id, directory_path,
384
- model=None, provider=None, npc=None, team=None,
385
- attachments=None, message_id=None,
557
+ self,
558
+ message_id,
559
+ timestamp,
560
+ role,
561
+ content,
562
+ conversation_id,
563
+ directory_path,
564
+ model=None,
565
+ provider=None,
566
+ npc=None,
567
+ team=None,
568
+ attachments=None,
386
569
  ):
387
- timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
388
- if message_id is None: message_id = self.generate_message_id()
389
- if isinstance(content, dict): content = json.dumps(content, cls=CustomJSONEncoder)
390
-
391
- existing_row = self._fetch_one(
392
- "SELECT content FROM conversation_history WHERE message_id = ?", (message_id,)
393
- )
570
+ if isinstance(content, (dict, list)):
571
+ content = json.dumps(content, cls=CustomJSONEncoder)
394
572
 
395
- if existing_row:
396
- sql = "UPDATE conversation_history SET content = ?, timestamp = ? WHERE message_id = ?"
397
- params = (content, timestamp, message_id)
398
- self._execute(sql, params)
399
- else:
400
- sql = """INSERT INTO conversation_history
401
- (message_id, timestamp, role, content, conversation_id, directory_path, model, provider, npc, team)
402
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"""
403
- params = (message_id, timestamp, role, content, conversation_id, directory_path, model, provider, npc, team,)
404
- self._execute(sql, params)
573
+ stmt = """
574
+ INSERT INTO conversation_history
575
+ (message_id, timestamp, role, content, conversation_id, directory_path, model, provider, npc, team)
576
+ VALUES (:message_id, :timestamp, :role, :content, :conversation_id, :directory_path, :model, :provider, :npc, :team)
577
+ """
578
+ params = {
579
+ "message_id": message_id, "timestamp": timestamp, "role": role, "content": content,
580
+ "conversation_id": conversation_id, "directory_path": directory_path, "model": model,
581
+ "provider": provider, "npc": npc, "team": team
582
+ }
583
+ with self.engine.begin() as conn:
584
+ conn.execute(text(stmt), params)
405
585
 
406
586
  if attachments:
407
587
  for attachment in attachments:
408
588
  self.add_attachment(
409
- message_id, attachment["name"], attachment["type"],
410
- attachment["data"], attachment_size=attachment.get("size"),
589
+ message_id=message_id,
590
+ name=attachment.get("name"),
591
+ attachment_type=attachment.get("type"),
592
+ data=attachment.get("data"),
593
+ size=attachment.get("size"),
594
+ file_path=attachment.get("path")
411
595
  )
596
+
412
597
  return message_id
413
598
 
414
- def add_attachment(
415
- self, message_id, attachment_name, attachment_type, attachment_data, attachment_size=None,
416
- ):
599
+ def add_memory_to_database(self, message_id: str, conversation_id: str, npc: str, team: str,
600
+ directory_path: str, initial_memory: str, status: str,
601
+ model: str = None, provider: str = None, final_memory: str = None):
602
+ """Store a memory entry in the database"""
603
+ timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
604
+
605
+ stmt = """
606
+ INSERT INTO memory_lifecycle
607
+ (message_id, conversation_id, npc, team, directory_path, timestamp,
608
+ initial_memory, final_memory, status, model, provider)
609
+ VALUES (:message_id, :conversation_id, :npc, :team, :directory_path,
610
+ :timestamp, :initial_memory, :final_memory, :status, :model, :provider)
611
+ """
612
+
613
+ params = {
614
+ "message_id": message_id, "conversation_id": conversation_id,
615
+ "npc": npc, "team": team, "directory_path": directory_path,
616
+ "timestamp": timestamp, "initial_memory": initial_memory,
617
+ "final_memory": final_memory, "status": status,
618
+ "model": model, "provider": provider
619
+ }
620
+
621
+ return self._execute_returning_id(stmt, params)
622
+ def get_memories_for_scope(
623
+ self,
624
+ npc: str,
625
+ team: str,
626
+ directory_path: str,
627
+ status: Optional[str] = None
628
+ ) -> List[Dict]:
629
+
630
+ query = """
631
+ SELECT id, initial_memory, final_memory,
632
+ status, timestamp, created_at
633
+ FROM memory_lifecycle
634
+ WHERE npc = :npc AND team = :team AND directory_path = :path
635
+ """
636
+ params = {"npc": npc, "team": team, "path": directory_path}
637
+
638
+ if status:
639
+ query += " AND status = :status"
640
+ params["status"] = status
641
+
642
+ query += " ORDER BY created_at DESC"
643
+ data =self._fetch_all(query, params)
644
+ return data
645
+
646
+ def search_memory(self, query: str, npc: str = None, team: str = None,
647
+ directory_path: str = None, status_filter: str = None, limit: int = 10):
648
+ """Search memories with hierarchical scope"""
649
+ conditions = ["LOWER(initial_memory) LIKE LOWER(:query) OR LOWER(final_memory) LIKE LOWER(:query)"]
650
+ params = {"query": f"%{query}%"}
651
+
652
+ if status_filter:
653
+ conditions.append("status = :status")
654
+ params["status"] = status_filter
655
+
656
+
657
+ order_parts = []
658
+ if npc:
659
+ order_parts.append(f"CASE WHEN npc = '{npc}' THEN 1 ELSE 2 END")
660
+ if team:
661
+ order_parts.append(f"CASE WHEN team = '{team}' THEN 1 ELSE 2 END")
662
+ if directory_path:
663
+ order_parts.append(f"CASE WHEN directory_path = '{directory_path}' THEN 1 ELSE 2 END")
664
+
665
+ order_clause = ", ".join(order_parts) + ", created_at DESC" if order_parts else "created_at DESC"
666
+
667
+ stmt = f"""
668
+ SELECT * FROM memory_lifecycle
669
+ WHERE {' AND '.join(conditions)}
670
+ ORDER BY {order_clause}
671
+ LIMIT :limit
672
+ """
673
+ params["limit"] = limit
674
+
675
+ return self._fetch_all(stmt, params)
676
+
677
+ def get_memory_examples_for_context(self, npc: str, team: str, directory_path: str,
678
+ n_approved: int = 10, n_rejected: int = 10):
679
+ """Get recent approved and rejected memories for learning context"""
680
+
681
+ approved_stmt = """
682
+ SELECT initial_memory, final_memory, status FROM memory_lifecycle
683
+ WHERE status IN ('human-approved', 'model-approved')
684
+ ORDER BY
685
+ CASE WHEN npc = :npc AND team = :team AND directory_path = :path THEN 1
686
+ WHEN npc = :npc AND team = :team THEN 2
687
+ WHEN team = :team THEN 3
688
+ ELSE 4 END,
689
+ created_at DESC
690
+ LIMIT :n_approved
691
+ """
692
+
693
+ rejected_stmt = """
694
+ SELECT initial_memory, status FROM memory_lifecycle
695
+ WHERE status IN ('human-rejected', 'model-rejected')
696
+ ORDER BY
697
+ CASE WHEN npc = :npc AND team = :team AND directory_path = :path THEN 1
698
+ WHEN npc = :npc AND team = :team THEN 2
699
+ WHEN team = :team THEN 3
700
+ ELSE 4 END,
701
+ created_at DESC
702
+ LIMIT :n_rejected
703
+ """
704
+
705
+ params = {"npc": npc, "team": team, "path": directory_path,
706
+ "n_approved": n_approved, "n_rejected": n_rejected}
707
+
708
+ approved = self._fetch_all(approved_stmt, params)
709
+ rejected = self._fetch_all(rejected_stmt, params)
710
+
711
+ return {"approved": approved, "rejected": rejected}
712
+
713
+ def get_pending_memories(self, limit: int = 50):
714
+ """Get memories pending human approval"""
715
+ stmt = """
716
+ SELECT * FROM memory_lifecycle
717
+ WHERE status = 'pending_approval'
718
+ ORDER BY created_at ASC
719
+ LIMIT :limit
720
+ """
721
+ return self._fetch_all(stmt, {"limit": limit})
722
+
723
+ def update_memory_status(self, memory_id: int, new_status: str, final_memory: str = None):
724
+ """Update memory status and optionally final_memory"""
725
+ stmt = """
726
+ UPDATE memory_lifecycle
727
+ SET status = :status, final_memory = :final_memory
728
+ WHERE id = :memory_id
729
+ """
730
+ params = {"status": new_status, "final_memory": final_memory, "memory_id": memory_id}
731
+
732
+ with self.engine.begin() as conn:
733
+ conn.execute(text(stmt), params)
734
+
735
+ def add_attachment(self, message_id, name, attachment_type, data, size, file_path=None):
417
736
  timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
418
- if attachment_size is None and attachment_data is not None:
419
- attachment_size = len(attachment_data)
420
- sql = """INSERT INTO message_attachments
421
- (message_id, attachment_name, attachment_type, attachment_data, attachment_size, upload_timestamp)
422
- VALUES (?, ?, ?, ?, ?, ?)"""
423
- params = (message_id, attachment_name, attachment_type, attachment_data, attachment_size, timestamp,)
424
- self._execute(sql, params)
737
+ stmt = """
738
+ INSERT INTO message_attachments
739
+ (message_id, attachment_name, attachment_type, attachment_data, attachment_size, upload_timestamp, file_path)
740
+ VALUES (:message_id, :name, :type, :data, :size, :timestamp, :file_path)
741
+ """
742
+ params = {
743
+ "message_id": message_id,
744
+ "name": name,
745
+ "type": attachment_type,
746
+ "data": data,
747
+ "size": size,
748
+ "timestamp": timestamp,
749
+ "file_path": file_path
750
+ }
751
+ with self.engine.begin() as conn:
752
+ conn.execute(text(stmt), params)
425
753
 
426
754
  def save_jinx_execution(
427
- self, triggering_message_id: str, conversation_id: str, npc_name: Optional[str],
428
- jinx_name: str, jinx_inputs: Dict, jinx_output: Any, status: str,
429
- team_name: Optional[str] = None, error_message: Optional[str] = None,
430
- response_message_id: Optional[str] = None, duration_ms: Optional[int] = None
755
+ self,
756
+ triggering_message_id: str,
757
+ conversation_id: str,
758
+ npc_name: Optional[str],
759
+ jinx_name: str,
760
+ jinx_inputs: Dict,
761
+ jinx_output: Any, status: str,
762
+ team_name: Optional[str] = None,
763
+ error_message: Optional[str] = None,
764
+ response_message_id: Optional[str] = None,
765
+ duration_ms: Optional[int] = None
431
766
  ):
432
767
  timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
433
- try: inputs_json = json.dumps(jinx_inputs, cls=CustomJSONEncoder)
434
- except TypeError: inputs_json = json.dumps(str(jinx_inputs))
768
+
769
+ try:
770
+ inputs_json = json.dumps(jinx_inputs, cls=CustomJSONEncoder)
771
+ except TypeError:
772
+ inputs_json = json.dumps(str(jinx_inputs))
773
+
435
774
  try:
436
775
  if isinstance(jinx_output, (str, int, float, bool, list, dict, type(None))):
437
- outputs_json = json.dumps(jinx_output, cls=CustomJSONEncoder)
438
- else: outputs_json = json.dumps(str(jinx_output))
439
- except TypeError: outputs_json = json.dumps(f"Non-serializable output: {type(jinx_output)}")
776
+ outputs_json = json.dumps(jinx_output, cls=CustomJSONEncoder)
777
+ else:
778
+ outputs_json = json.dumps(str(jinx_output))
779
+ except TypeError:
780
+ outputs_json = json.dumps(f"Non-serializable output: {type(jinx_output)}")
440
781
 
441
- sql = """INSERT INTO jinx_execution_log
782
+ stmt = """
783
+ INSERT INTO jinx_execution_log
442
784
  (triggering_message_id, conversation_id, timestamp, npc_name, team_name,
443
785
  jinx_name, jinx_inputs, jinx_output, status, error_message, response_message_id, duration_ms)
444
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"""
445
- params = (triggering_message_id, conversation_id, timestamp, npc_name, team_name,
446
- jinx_name, inputs_json, outputs_json, status, error_message, response_message_id, duration_ms)
447
- try:
448
- return self._execute(sql, params) # Return lastrowid if available
449
- except Exception as e:
450
- print(f"CRITICAL: Failed to save tool execution via _execute: {e}")
451
- return None
786
+ VALUES (:triggering_message_id, :conversation_id, :timestamp, :npc_name, :team_name,
787
+ :jinx_name, :jinx_inputs, :jinx_output, :status, :error_message, :response_message_id, :duration_ms)
788
+ """
789
+ params = {
790
+ "triggering_message_id": triggering_message_id,
791
+ "conversation_id": conversation_id,
792
+ "timestamp": timestamp,
793
+ "npc_name": npc_name,
794
+ "team_name": team_name,
795
+ "jinx_name": jinx_name,
796
+ "jinx_inputs": inputs_json,
797
+ "jinx_output": outputs_json,
798
+ "status": status,
799
+ "error_message": error_message,
800
+ "response_message_id": response_message_id,
801
+ "duration_ms": duration_ms
802
+ }
803
+
804
+ return self._execute_returning_id(stmt, params)
452
805
 
453
806
  def get_full_message_content(self, message_id):
454
- sql = "SELECT content FROM conversation_history WHERE message_id = ? ORDER BY timestamp ASC"
455
- rows = self._fetch_all(sql, (message_id,))
807
+ stmt = "SELECT content FROM conversation_history WHERE message_id = :message_id ORDER BY timestamp ASC"
808
+ rows = self._fetch_all(stmt, {"message_id": message_id})
456
809
  return "".join(row['content'] for row in rows)
457
810
 
458
811
  def update_message_content(self, message_id, full_content):
459
- sql = "UPDATE conversation_history SET content = ? WHERE message_id = ?"
460
- params = (full_content, message_id)
461
- self._execute(sql, params)
812
+ stmt = "UPDATE conversation_history SET content = :content WHERE message_id = :message_id"
813
+ with self.engine.begin() as conn:
814
+ conn.execute(text(stmt), {"content": full_content, "message_id": message_id})
462
815
 
463
816
  def get_message_attachments(self, message_id) -> List[Dict]:
464
- sql = """SELECT id, message_id, attachment_name, attachment_type, attachment_size, upload_timestamp
465
- FROM message_attachments WHERE message_id = ?"""
466
- return self._fetch_all(sql, (message_id,))
817
+ stmt = """
818
+ SELECT
819
+ id,
820
+ message_id,
821
+ attachment_name,
822
+ attachment_type,
823
+ attachment_size,
824
+ upload_timestamp
825
+ FROM message_attachments WHERE message_id = :message_id
826
+ """
827
+ return self._fetch_all(stmt, {"message_id": message_id})
467
828
 
468
829
  def get_attachment_data(self, attachment_id) -> Optional[Tuple[bytes, str, str]]:
469
- sql = "SELECT attachment_data, attachment_name, attachment_type FROM message_attachments WHERE id = ?"
470
- row = self._fetch_one(sql, (attachment_id,))
830
+ stmt = "SELECT attachment_data, attachment_name, attachment_type FROM message_attachments WHERE id = :attachment_id"
831
+ row = self._fetch_one(stmt, {"attachment_id": attachment_id})
471
832
  if row:
472
833
  return row['attachment_data'], row['attachment_name'], row['attachment_type']
473
834
  return None, None, None
474
835
 
475
836
  def delete_attachment(self, attachment_id) -> bool:
476
- sql = "DELETE FROM message_attachments WHERE id = ?"
837
+ stmt = "DELETE FROM message_attachments WHERE id = :attachment_id"
477
838
  try:
478
- # _execute might not return rowcount reliably across drivers
479
- self._execute(sql, (attachment_id,))
480
- # We assume success if no exception was raised.
481
- # A more robust check might involve trying to fetch the deleted row.
839
+ with self.engine.begin() as conn:
840
+ conn.execute(text(stmt), {"attachment_id": attachment_id})
482
841
  return True
483
842
  except Exception as e:
484
843
  print(f"Error deleting attachment {attachment_id}: {e}")
485
844
  return False
486
845
 
487
846
  def get_last_command(self) -> Optional[Dict]:
488
- sql = "SELECT * FROM command_history ORDER BY id DESC LIMIT 1"
489
- return self._fetch_one(sql)
847
+ stmt = "SELECT * FROM command_history ORDER BY id DESC LIMIT 1"
848
+ return self._fetch_one(stmt)
490
849
 
491
850
  def get_most_recent_conversation_id(self) -> Optional[Dict]:
492
- sql = "SELECT conversation_id FROM conversation_history ORDER BY id DESC LIMIT 1"
493
- # Returns dict like {'conversation_id': '...'} or None
494
- return self._fetch_one(sql)
851
+ stmt = "SELECT conversation_id FROM conversation_history ORDER BY id DESC LIMIT 1"
852
+ return self._fetch_one(stmt)
495
853
 
496
854
  def get_last_conversation(self, conversation_id) -> Optional[Dict]:
497
- sql = """SELECT * FROM conversation_history WHERE conversation_id = ? and role = 'user'
498
- ORDER BY id DESC LIMIT 1"""
499
- return self._fetch_one(sql, (conversation_id,))
855
+ stmt = """
856
+ SELECT * FROM conversation_history
857
+ WHERE conversation_id = :conversation_id and role = 'user'
858
+ ORDER BY id DESC LIMIT 1
859
+ """
860
+ return self._fetch_one(stmt, {"conversation_id": conversation_id})
500
861
 
501
862
  def get_messages_by_npc(self, npc, n_last=20) -> List[Dict]:
502
- sql = """SELECT * FROM conversation_history WHERE npc = ?
503
- ORDER BY timestamp DESC LIMIT ?"""
504
- params = (npc, n_last)
863
+ stmt = """
864
+ SELECT * FROM conversation_history WHERE npc = :npc
865
+ ORDER BY timestamp DESC LIMIT :n_last
866
+ """
867
+ return self._fetch_all(stmt, {"npc": npc, "n_last": n_last})
505
868
 
506
- return self._fetch_all(sql, params)
507
869
  def get_messages_by_team(self, team, n_last=20) -> List[Dict]:
508
- sql = """SELECT * FROM conversation_history WHERE team = ?
509
- ORDER BY timestamp DESC LIMIT ?"""
510
- params = (team, n_last)
870
+ stmt = """
871
+ SELECT * FROM conversation_history WHERE team = :team
872
+ ORDER BY timestamp DESC LIMIT :n_last
873
+ """
874
+ return self._fetch_all(stmt, {"team": team, "n_last": n_last})
511
875
 
512
- return self._fetch_all(sql, params)
513
876
  def get_message_by_id(self, message_id) -> Optional[Dict]:
514
- sql = "SELECT * FROM conversation_history WHERE message_id = ?"
515
- return self._fetch_one(sql, (message_id,))
877
+ stmt = "SELECT * FROM conversation_history WHERE message_id = :message_id"
878
+ return self._fetch_one(stmt, {"message_id": message_id})
516
879
 
517
880
  def get_most_recent_conversation_id_by_path(self, path) -> Optional[Dict]:
518
- sql = """SELECT conversation_id FROM conversation_history WHERE directory_path = ?
519
- ORDER BY timestamp DESC LIMIT 1"""
520
- # Returns dict like {'conversation_id': '...'} or None
521
- return self._fetch_one(sql, (path,))
881
+ stmt = """
882
+ SELECT conversation_id FROM conversation_history WHERE directory_path = :path
883
+ ORDER BY timestamp DESC LIMIT 1
884
+ """
885
+ return self._fetch_one(stmt, {"path": path})
522
886
 
523
887
  def get_last_conversation_by_path(self, directory_path) -> Optional[List[Dict]]:
524
888
  result_dict = self.get_most_recent_conversation_id_by_path(directory_path)
@@ -528,18 +892,24 @@ class CommandHistory:
528
892
  return None
529
893
 
530
894
  def get_conversations_by_id(self, conversation_id: str) -> List[Dict[str, Any]]:
531
- sql = """SELECT id, message_id, timestamp, role, content, conversation_id,
532
- directory_path, model, provider, npc, team
533
- FROM conversation_history WHERE conversation_id = ? ORDER BY timestamp ASC"""
534
- results = self._fetch_all(sql, (conversation_id,))
895
+ stmt = """
896
+ SELECT id, message_id, timestamp, role, content, conversation_id,
897
+ directory_path, model, provider, npc, team
898
+ FROM conversation_history WHERE conversation_id = :conversation_id
899
+ ORDER BY timestamp ASC
900
+ """
901
+ results = self._fetch_all(stmt, {"conversation_id": conversation_id})
902
+
535
903
  for message_dict in results:
536
- attachments = self.get_message_attachments(message_dict["message_id"])
537
- if attachments: message_dict["attachments"] = attachments
904
+ attachments = self.get_message_attachments(message_dict["message_id"])
905
+ if attachments:
906
+ message_dict["attachments"] = attachments
538
907
  return results
539
908
 
540
909
  def get_npc_conversation_stats(self, start_date=None, end_date=None) -> pd.DataFrame:
541
910
  date_filter = ""
542
- params = {} # Use dict for named parameters with SQLAlchemy/read_sql
911
+ params = {}
912
+
543
913
  if start_date and end_date:
544
914
  date_filter = "WHERE timestamp BETWEEN :start_date AND :end_date"
545
915
  params = {"start_date": start_date, "end_date": end_date}
@@ -550,6 +920,14 @@ class CommandHistory:
550
920
  date_filter = "WHERE timestamp <= :end_date"
551
921
  params = {"end_date": end_date}
552
922
 
923
+
924
+ if 'sqlite' in str(self.engine.url):
925
+ group_concat_models = "GROUP_CONCAT(DISTINCT model)"
926
+ group_concat_providers = "GROUP_CONCAT(DISTINCT provider)"
927
+ else:
928
+
929
+ group_concat_models = "STRING_AGG(DISTINCT model, ',')"
930
+ group_concat_providers = "STRING_AGG(DISTINCT provider, ',')"
553
931
 
554
932
  query = f"""
555
933
  SELECT
@@ -559,8 +937,8 @@ class CommandHistory:
559
937
  COUNT(DISTINCT conversation_id) as total_conversations,
560
938
  COUNT(DISTINCT model) as models_used,
561
939
  COUNT(DISTINCT provider) as providers_used,
562
- GROUP_CONCAT(DISTINCT model) as model_list,
563
- GROUP_CONCAT(DISTINCT provider) as provider_list,
940
+ {group_concat_models} as model_list,
941
+ {group_concat_providers} as provider_list,
564
942
  MIN(timestamp) as first_conversation,
565
943
  MAX(timestamp) as last_conversation
566
944
  FROM conversation_history
@@ -568,122 +946,127 @@ class CommandHistory:
568
946
  GROUP BY npc
569
947
  ORDER BY total_messages DESC
570
948
  """
949
+
571
950
  try:
572
- # Use pd.read_sql with the appropriate connection object
573
- if self._is_sqlalchemy:
574
- # read_sql works directly with SQLAlchemy Engine
575
- df = pd.read_sql(sql=text(query), con=self.conn, params=params)
576
- else:
577
- # read_sql works directly with sqlite3 Connection
578
- df = pd.read_sql(sql=query, con=self.conn, params=params)
951
+ df = pd.read_sql(sql=text(query), con=self.engine, params=params)
579
952
  return df
580
953
  except Exception as e:
581
- print(f"Error fetching conversation stats with pandas: {e}")
582
- # Fallback or return empty DataFrame
583
- return pd.DataFrame(columns=[
584
- 'npc', 'total_messages', 'avg_message_length', 'total_conversations',
585
- 'models_used', 'providers_used', 'model_list', 'provider_list',
586
- 'first_conversation', 'last_conversation'
587
- ])
588
-
954
+ print(f"Error fetching conversation stats with pandas: {e}")
955
+ return pd.DataFrame(columns=[
956
+ 'npc', 'total_messages', 'avg_message_length', 'total_conversations',
957
+ 'models_used', 'providers_used', 'model_list', 'provider_list',
958
+ 'first_conversation', 'last_conversation'
959
+ ])
589
960
 
590
961
  def get_command_patterns(self, timeframe='day') -> pd.DataFrame:
591
- time_group_formats = {
592
- 'hour': "strftime('%Y-%m-%d %H', timestamp)",
593
- 'day': "strftime('%Y-%m-%d', timestamp)",
594
- 'week': "strftime('%Y-%W', timestamp)",
595
- 'month': "strftime('%Y-%m', timestamp)"
596
- }
597
- time_group = time_group_formats.get(timeframe, "strftime('%Y-%m-%d', timestamp)") # Default to day
962
+
963
+ if 'sqlite' in str(self.engine.url):
964
+ time_group_formats = {
965
+ 'hour': "strftime('%Y-%m-%d %H', timestamp)",
966
+ 'day': "strftime('%Y-%m-%d', timestamp)",
967
+ 'week': "strftime('%Y-%W', timestamp)",
968
+ 'month': "strftime('%Y-%m', timestamp)"
969
+ }
970
+ else:
971
+
972
+ time_group_formats = {
973
+ 'hour': "TO_CHAR(timestamp::timestamp, 'YYYY-MM-DD HH24')",
974
+ 'day': "TO_CHAR(timestamp::timestamp, 'YYYY-MM-DD')",
975
+ 'week': "TO_CHAR(timestamp::timestamp, 'YYYY-WW')",
976
+ 'month': "TO_CHAR(timestamp::timestamp, 'YYYY-MM')"
977
+ }
978
+
979
+ time_group = time_group_formats.get(timeframe, time_group_formats['day'])
980
+
981
+
982
+ if 'sqlite' in str(self.engine.url):
983
+ substr_func = "SUBSTR"
984
+ instr_func = "INSTR"
985
+ else:
986
+ substr_func = "SUBSTRING"
987
+ instr_func = "POSITION"
598
988
 
599
989
  query = f"""
600
990
  WITH parsed_commands AS (
601
991
  SELECT
602
992
  {time_group} as time_bucket,
603
993
  CASE
604
- WHEN command LIKE '/%%' THEN SUBSTR(command, 2, INSTR(SUBSTR(command, 2), ' ') - 1)
605
- WHEN command LIKE 'npc %%' THEN SUBSTR(command, 5, INSTR(SUBSTR(command, 5), ' ') - 1)
994
+ WHEN command LIKE '/%%' THEN {substr_func}(command, 2, {instr_func}({substr_func}(command, 2), ' ') - 1)
995
+ WHEN command LIKE 'npc %%' THEN {substr_func}(command, 5, {instr_func}({substr_func}(command, 5), ' ') - 1)
606
996
  ELSE command
607
997
  END as base_command
608
998
  FROM command_history
609
- WHERE timestamp IS NOT NULL -- Added check for null timestamps
999
+ WHERE timestamp IS NOT NULL
610
1000
  )
611
1001
  SELECT
612
1002
  time_bucket,
613
1003
  base_command,
614
1004
  COUNT(*) as usage_count
615
1005
  FROM parsed_commands
616
- WHERE base_command IS NOT NULL AND base_command != '' -- Filter out potential null/empty commands
1006
+ WHERE base_command IS NOT NULL AND base_command != ''
617
1007
  GROUP BY time_bucket, base_command
618
1008
  ORDER BY time_bucket DESC, usage_count DESC
619
1009
  """
1010
+
620
1011
  try:
621
- # Use pd.read_sql
622
- if self._is_sqlalchemy:
623
- df = pd.read_sql(sql=text(query), con=self.conn)
624
- else:
625
- df = pd.read_sql(sql=query, con=self.conn)
626
- return df
1012
+ df = pd.read_sql(sql=text(query), con=self.engine)
1013
+ return df
627
1014
  except Exception as e:
628
- print(f"Error fetching command patterns with pandas: {e}")
629
- return pd.DataFrame(columns=['time_period', 'command', 'count'])
1015
+ print(f"Error fetching command patterns with pandas: {e}")
1016
+ return pd.DataFrame(columns=['time_period', 'command', 'count'])
630
1017
 
631
1018
  def search_commands(self, search_term: str) -> List[Dict]:
632
1019
  """Searches command history table for a term."""
633
- # Use LOWER() for case-insensitive search
634
- sql = """
1020
+ stmt = """
635
1021
  SELECT id, timestamp, command, subcommands, output, location
636
1022
  FROM command_history
637
- WHERE LOWER(command) LIKE LOWER(?) OR LOWER(output) LIKE LOWER(?)
1023
+ WHERE LOWER(command) LIKE LOWER(:search_term) OR LOWER(output) LIKE LOWER(:search_term)
638
1024
  ORDER BY timestamp DESC
639
1025
  LIMIT 5
640
1026
  """
641
1027
  like_term = f"%{search_term}%"
642
- return self._fetch_all(sql, (like_term, like_term))
643
- def search_conversations(self, search_term:str) -> List[Dict]:
1028
+ return self._fetch_all(stmt, {"search_term": like_term})
1029
+
1030
+ def search_conversations(self, search_term: str) -> List[Dict]:
644
1031
  """Searches conversation history table for a term."""
645
- # Use LOWER() for case-insensitive search
646
- sql = """
1032
+ stmt = """
647
1033
  SELECT id, message_id, timestamp, role, content, conversation_id, directory_path, model, provider, npc, team
648
1034
  FROM conversation_history
649
- WHERE LOWER(content) LIKE LOWER(?)
1035
+ WHERE LOWER(content) LIKE LOWER(:search_term)
650
1036
  ORDER BY timestamp DESC
651
1037
  LIMIT 5
652
1038
  """
653
1039
  like_term = f"%{search_term}%"
654
- return self._fetch_all(sql, (like_term,))
1040
+ return self._fetch_all(stmt, {"search_term": like_term})
655
1041
 
656
1042
  def get_all_commands(self, limit: int = 100) -> List[Dict]:
657
1043
  """Gets the most recent commands."""
658
- sql = """
1044
+ stmt = """
659
1045
  SELECT id, timestamp, command, subcommands, output, location
660
1046
  FROM command_history
661
1047
  ORDER BY id DESC
662
- LIMIT ?
1048
+ LIMIT :limit
663
1049
  """
664
- return self._fetch_all(sql, (limit,))
665
-
1050
+ return self._fetch_all(stmt, {"limit": limit})
666
1051
 
667
1052
  def close(self):
668
- """Closes the connection if it's a direct sqlite3 connection."""
669
- if not self._is_sqlalchemy and self.conn:
1053
+ """Dispose of the SQLAlchemy engine."""
1054
+ if self.engine:
670
1055
  try:
671
- self.conn.close()
672
- print("Closed sqlite3 connection.")
1056
+ self.engine.dispose()
1057
+ logging.info("Disposed SQLAlchemy engine.")
673
1058
  except Exception as e:
674
- print(f"Error closing sqlite3 connection: {e}")
675
- elif self._is_sqlalchemy:
676
- print("SQLAlchemy Engine pool managed automatically.")
677
- self.conn = None
678
- def start_new_conversation(prepend = '') -> str:
1059
+ print(f"Error disposing SQLAlchemy engine: {e}")
1060
+ self.engine = None
1061
+
1062
+ def start_new_conversation(prepend: str = None) -> str:
679
1063
  """
680
1064
  Starts a new conversation and returns a unique conversation ID.
681
1065
  """
682
- if prepend =='':
1066
+ if prepend is None:
683
1067
  prepend = 'npcsh'
684
1068
  return f"{prepend}_{datetime.now().strftime('%Y%m%d%H%M%S')}"
685
1069
 
686
-
687
1070
  def save_conversation_message(
688
1071
  command_history: CommandHistory,
689
1072
  conversation_id: str,
@@ -696,94 +1079,62 @@ def save_conversation_message(
696
1079
  team: str = None,
697
1080
  attachments: List[Dict] = None,
698
1081
  message_id: str = None,
699
- ):
1082
+ ):
700
1083
  """
701
1084
  Saves a conversation message linked to a conversation ID with optional attachments.
702
-
703
- Args:
704
- command_history: The CommandHistory instance
705
- conversation_id: The conversation identifier
706
- role: The message sender role ('user', 'assistant', etc.)
707
- content: The message content
708
- wd: Working directory (defaults to current directory)
709
- model: The model identifier (optional)
710
- provider: The provider identifier (optional)
711
- npc: The NPC identifier (optional)
712
- attachments: List of attachment dictionaries (optional)
713
- Each attachment dict should have:
714
- - name: Filename/title
715
- - type: MIME type or extension
716
- - data: Binary blob data
717
- - size: Size in bytes (optional)
718
-
719
- Returns:
720
- The message ID
721
1085
  """
722
1086
  if wd is None:
723
1087
  wd = os.getcwd()
724
-
725
- return command_history.add_conversation(
726
- role=role,
727
- content=content,
728
- conversation_id=conversation_id,
729
- directory_path=wd,
730
- model=model,
731
- provider=provider,
732
- npc=npc,
733
- team=team,
734
- attachments=attachments,
735
- message_id=message_id,
736
- )
1088
+ timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
1089
+ if message_id is None:
1090
+ message_id = generate_message_id()
737
1091
 
738
1092
 
1093
+ return command_history.add_conversation(
1094
+ message_id,
1095
+ timestamp,
1096
+ role,
1097
+ content,
1098
+ conversation_id,
1099
+ wd,
1100
+ model=model,
1101
+ provider=provider,
1102
+ npc=npc,
1103
+ team=team,
1104
+ attachments=attachments)
739
1105
  def retrieve_last_conversation(
740
1106
  command_history: CommandHistory, conversation_id: str
741
- ) -> str:
1107
+ ) -> str:
742
1108
  """
743
1109
  Retrieves and formats all messages from the last conversation.
744
1110
  """
745
1111
  last_message = command_history.get_last_conversation(conversation_id)
746
1112
  if last_message:
747
- return last_message[3] # content
1113
+ return last_message['content']
748
1114
  return "No previous conversation messages found."
749
1115
 
750
-
751
1116
  def save_attachment_to_message(
752
1117
  command_history: CommandHistory,
753
1118
  message_id: str,
754
1119
  file_path: str,
755
1120
  attachment_name: str = None,
756
1121
  attachment_type: str = None,
757
- ):
1122
+ ):
758
1123
  """
759
1124
  Helper function to save a file from disk as an attachment.
760
-
761
- Args:
762
- command_history: The CommandHistory instance
763
- message_id: The message ID to attach to
764
- file_path: Path to the file on disk
765
- attachment_name: Name to save (defaults to basename)
766
- attachment_type: MIME type (defaults to guessing from extension)
767
-
768
- Returns:
769
- Boolean indicating success
770
1125
  """
771
1126
  try:
772
- # Get file name if not specified
773
1127
  if not attachment_name:
774
1128
  attachment_name = os.path.basename(file_path)
775
1129
 
776
- # Try to guess MIME type if not specified
777
1130
  if not attachment_type:
778
1131
  _, ext = os.path.splitext(file_path)
779
1132
  if ext:
780
- attachment_type = ext.lower()[1:] # Remove the dot
1133
+ attachment_type = ext.lower()[1:]
781
1134
 
782
- # Read file data
783
1135
  with open(file_path, "rb") as f:
784
1136
  data = f.read()
785
1137
 
786
- # Add attachment
787
1138
  command_history.add_attachment(
788
1139
  message_id=message_id,
789
1140
  attachment_name=attachment_name,
@@ -796,46 +1147,29 @@ def save_attachment_to_message(
796
1147
  print(f"Error saving attachment: {str(e)}")
797
1148
  return False
798
1149
 
799
- def get_available_tables(db_path: str) -> str:
1150
+ def get_available_tables(db_path_or_engine: Union[str, Engine]) -> List[Tuple[str]]:
800
1151
  """
801
- Function Description:
802
- This function gets the available tables in the database.
803
- Args:
804
- db_path (str): The database path.
805
- Keyword Args:
806
- None
807
- Returns:
808
- str: The available tables in the database.
1152
+ Gets the available tables in the database.
809
1153
  """
810
- if '~' in db_path:
811
- db_path = os.path.expanduser(db_path)
1154
+ if isinstance(db_path_or_engine, str):
1155
+ engine = create_engine_from_path(db_path_or_engine)
1156
+ else:
1157
+ engine = db_path_or_engine
1158
+
812
1159
  try:
813
- with sqlite3.connect(db_path) as conn:
814
- cursor = conn.cursor()
815
- cursor.execute(
816
- "SELECT name FROM sqlite_master WHERE type='table' AND name != 'command_history'"
817
- )
818
- tables = cursor.fetchall()
819
-
820
- return tables
1160
+ with engine.connect() as conn:
1161
+ if 'sqlite' in str(engine.url):
1162
+ result = conn.execute(text(
1163
+ "SELECT name FROM sqlite_master WHERE type='table' AND name != 'command_history'"
1164
+ ))
1165
+ else:
1166
+
1167
+ result = conn.execute(text("""
1168
+ SELECT table_name FROM information_schema.tables
1169
+ WHERE table_schema = 'public' AND table_name != 'command_history'
1170
+ """))
1171
+
1172
+ return [row[0] for row in result]
821
1173
  except Exception as e:
822
1174
  print(f"Error getting available tables: {e}")
823
- return ""
824
-
825
-
826
-
827
- '''
828
- from npcpy.memory.command_history import CommandHistory
829
- command_history = CommandHistory()
830
-
831
- sibiji_messages = command_history.get_messages_by_npc('sibiji', n_last=10)
832
-
833
- stats = command_history.get_npc_conversation_stats()
834
-
835
-
836
-
837
-
838
- from npcpy.memory.command_history import CommandHistory
839
- command_history = CommandHistory()
840
- command_history.create_tool_call_table()
841
- '''
1175
+ return []