npcpy 1.1.28__py3-none-any.whl → 1.2.32__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. npcpy/data/audio.py +16 -38
  2. npcpy/data/image.py +29 -29
  3. npcpy/data/load.py +4 -3
  4. npcpy/data/text.py +28 -28
  5. npcpy/data/video.py +6 -6
  6. npcpy/data/web.py +49 -21
  7. npcpy/ft/__init__.py +0 -0
  8. npcpy/ft/diff.py +110 -0
  9. npcpy/ft/ge.py +115 -0
  10. npcpy/ft/memory_trainer.py +171 -0
  11. npcpy/ft/model_ensembler.py +357 -0
  12. npcpy/ft/rl.py +360 -0
  13. npcpy/ft/sft.py +248 -0
  14. npcpy/ft/usft.py +128 -0
  15. npcpy/gen/audio_gen.py +24 -0
  16. npcpy/gen/embeddings.py +13 -13
  17. npcpy/gen/image_gen.py +37 -15
  18. npcpy/gen/response.py +287 -111
  19. npcpy/gen/video_gen.py +10 -9
  20. npcpy/llm_funcs.py +447 -79
  21. npcpy/memory/command_history.py +201 -48
  22. npcpy/memory/kg_vis.py +74 -74
  23. npcpy/memory/knowledge_graph.py +482 -115
  24. npcpy/memory/memory_processor.py +81 -0
  25. npcpy/memory/search.py +70 -70
  26. npcpy/mix/debate.py +192 -3
  27. npcpy/npc_compiler.py +1541 -879
  28. npcpy/npc_sysenv.py +250 -78
  29. npcpy/serve.py +1036 -321
  30. npcpy/sql/ai_function_tools.py +257 -0
  31. npcpy/sql/database_ai_adapters.py +186 -0
  32. npcpy/sql/database_ai_functions.py +163 -0
  33. npcpy/sql/model_runner.py +19 -19
  34. npcpy/sql/npcsql.py +706 -507
  35. npcpy/sql/sql_model_compiler.py +156 -0
  36. npcpy/tools.py +20 -20
  37. npcpy/work/plan.py +8 -8
  38. npcpy/work/trigger.py +3 -3
  39. {npcpy-1.1.28.dist-info → npcpy-1.2.32.dist-info}/METADATA +169 -9
  40. npcpy-1.2.32.dist-info/RECORD +54 -0
  41. npcpy-1.1.28.dist-info/RECORD +0 -40
  42. {npcpy-1.1.28.dist-info → npcpy-1.2.32.dist-info}/WHEEL +0 -0
  43. {npcpy-1.1.28.dist-info → npcpy-1.2.32.dist-info}/licenses/LICENSE +0 -0
  44. {npcpy-1.1.28.dist-info → npcpy-1.2.32.dist-info}/top_level.txt +0 -0
@@ -8,7 +8,7 @@ import numpy as np
8
8
 
9
9
  try:
10
10
  import sqlalchemy
11
- from sqlalchemy import create_engine, text, MetaData, Table, Column, Integer, String, Text, DateTime, LargeBinary, ForeignKey, Boolean
11
+ from sqlalchemy import create_engine, text, MetaData, Table, Column, Integer, String, Text, DateTime, LargeBinary, ForeignKey, Boolean, func
12
12
  from sqlalchemy.engine import Engine, Connection as SQLAlchemyConnection
13
13
  from sqlalchemy.exc import SQLAlchemyError
14
14
  from sqlalchemy.sql import select, insert, update, delete
@@ -52,7 +52,7 @@ def create_engine_from_path(db_path: str) -> Engine:
52
52
  if db_path.startswith('postgresql://') or db_path.startswith('postgres://'):
53
53
  return create_engine(db_path)
54
54
  else:
55
- # Treat as SQLite file path
55
+
56
56
  if db_path.startswith('~/'):
57
57
  db_path = os.path.expanduser(db_path)
58
58
  return create_engine(f'sqlite:///{db_path}')
@@ -152,7 +152,7 @@ def setup_chroma_db(collection, description='', db_path: str = ''):
152
152
  def init_kg_schema(engine: Engine):
153
153
  """Creates the multi-scoped, path-aware KG tables using SQLAlchemy"""
154
154
 
155
- # Create tables using SQLAlchemy DDL
155
+
156
156
  metadata = MetaData()
157
157
 
158
158
  kg_facts = Table('kg_facts', metadata,
@@ -164,7 +164,7 @@ def init_kg_schema(engine: Engine):
164
164
  Column('type', String(100)),
165
165
  Column('generation', Integer),
166
166
  Column('origin', String(100)),
167
- # Composite primary key
167
+
168
168
  schema=None
169
169
  )
170
170
 
@@ -197,7 +197,7 @@ def init_kg_schema(engine: Engine):
197
197
  schema=None
198
198
  )
199
199
 
200
- # Create all tables
200
+
201
201
  metadata.create_all(engine, checkfirst=True)
202
202
 
203
203
  def load_kg_from_db(engine: Engine, team_name: str, npc_name: str, directory_path: str) -> Dict[str, Any]:
@@ -213,7 +213,7 @@ def load_kg_from_db(engine: Engine, team_name: str, npc_name: str, directory_pat
213
213
 
214
214
  with engine.connect() as conn:
215
215
  try:
216
- # Get generation
216
+
217
217
  result = conn.execute(text("""
218
218
  SELECT value FROM kg_metadata
219
219
  WHERE team_name = :team AND npc_name = :npc AND directory_path = :path AND key = 'generation'
@@ -223,7 +223,7 @@ def load_kg_from_db(engine: Engine, team_name: str, npc_name: str, directory_pat
223
223
  if row:
224
224
  kg['generation'] = int(row.value)
225
225
 
226
- # Get facts
226
+
227
227
  result = conn.execute(text("""
228
228
  SELECT statement, source_text, type, generation, origin FROM kg_facts
229
229
  WHERE team_name = :team AND npc_name = :npc AND directory_path = :path
@@ -240,7 +240,7 @@ def load_kg_from_db(engine: Engine, team_name: str, npc_name: str, directory_pat
240
240
  for row in result
241
241
  ]
242
242
 
243
- # Get concepts
243
+
244
244
  result = conn.execute(text("""
245
245
  SELECT name, generation, origin FROM kg_concepts
246
246
  WHERE team_name = :team AND npc_name = :npc AND directory_path = :path
@@ -251,7 +251,7 @@ def load_kg_from_db(engine: Engine, team_name: str, npc_name: str, directory_pat
251
251
  for row in result
252
252
  ]
253
253
 
254
- # Get links
254
+
255
255
  links = {}
256
256
  result = conn.execute(text("""
257
257
  SELECT source, target, type FROM kg_links
@@ -271,7 +271,7 @@ def load_kg_from_db(engine: Engine, team_name: str, npc_name: str, directory_pat
271
271
  kg['fact_to_concept_links'] = links
272
272
 
273
273
  except SQLAlchemyError:
274
- # Initialize schema if it doesn't exist
274
+
275
275
  init_kg_schema(engine)
276
276
 
277
277
  return kg
@@ -280,7 +280,7 @@ def save_kg_to_db(engine: Engine, kg_data: Dict[str, Any], team_name: str, npc_n
280
280
  """Saves a knowledge graph dictionary to the database, ignoring duplicates."""
281
281
  try:
282
282
  with engine.begin() as conn:
283
- # Save facts
283
+
284
284
  facts_to_save = [
285
285
  {
286
286
  "statement": fact['statement'],
@@ -294,7 +294,7 @@ def save_kg_to_db(engine: Engine, kg_data: Dict[str, Any], team_name: str, npc_n
294
294
  ]
295
295
 
296
296
  if facts_to_save:
297
- # Use INSERT OR IGNORE for SQLite, ON CONFLICT DO NOTHING for PostgreSQL
297
+
298
298
  if 'sqlite' in str(engine.url):
299
299
  stmt = text("""
300
300
  INSERT OR IGNORE INTO kg_facts
@@ -312,7 +312,7 @@ def save_kg_to_db(engine: Engine, kg_data: Dict[str, Any], team_name: str, npc_n
312
312
  for fact in facts_to_save:
313
313
  conn.execute(stmt, fact)
314
314
 
315
- # Save concepts
315
+
316
316
  concepts_to_save = [
317
317
  {
318
318
  "name": concept['name'],
@@ -343,7 +343,7 @@ def save_kg_to_db(engine: Engine, kg_data: Dict[str, Any], team_name: str, npc_n
343
343
  for concept in concepts_to_save:
344
344
  conn.execute(stmt, concept)
345
345
 
346
- # Update metadata (generation number)
346
+
347
347
  if 'sqlite' in str(engine.url):
348
348
  stmt = text("""
349
349
  INSERT OR REPLACE INTO kg_metadata (key, value, team_name, npc_name, directory_path)
@@ -364,13 +364,13 @@ def save_kg_to_db(engine: Engine, kg_data: Dict[str, Any], team_name: str, npc_n
364
364
  "directory_path": directory_path
365
365
  })
366
366
 
367
- # Rebuild links from scratch to ensure consistency
367
+
368
368
  conn.execute(text("""
369
369
  DELETE FROM kg_links
370
370
  WHERE team_name = :team_name AND npc_name = :npc_name AND directory_path = :directory_path
371
371
  """), {"team_name": team_name, "npc_name": npc_name, "directory_path": directory_path})
372
372
 
373
- # Insert links
373
+
374
374
  for fact, concepts in kg_data.get("fact_to_concept_links", {}).items():
375
375
  for concept in concepts:
376
376
  conn.execute(text("""
@@ -402,6 +402,9 @@ def save_kg_to_db(engine: Engine, kg_data: Dict[str, Any], team_name: str, npc_n
402
402
  except Exception as e:
403
403
  print(f"Failed to save KG for scope '({team_name}, {npc_name}, {directory_path})': {e}")
404
404
 
405
+ def generate_message_id() -> str:
406
+ return str(uuid.uuid4())
407
+
405
408
  class CommandHistory:
406
409
  def __init__(self, db: Union[str, Engine] = "~/npcsh_history.db"):
407
410
 
@@ -420,7 +423,7 @@ class CommandHistory:
420
423
  """Creates all necessary tables."""
421
424
  metadata = MetaData()
422
425
 
423
- # Command history table
426
+
424
427
  Table('command_history', metadata,
425
428
  Column('id', Integer, primary_key=True, autoincrement=True),
426
429
  Column('timestamp', String(50)),
@@ -430,7 +433,7 @@ class CommandHistory:
430
433
  Column('location', Text)
431
434
  )
432
435
 
433
- # Conversation history table
436
+
434
437
  Table('conversation_history', metadata,
435
438
  Column('id', Integer, primary_key=True, autoincrement=True),
436
439
  Column('message_id', String(50), unique=True, nullable=False),
@@ -445,7 +448,7 @@ class CommandHistory:
445
448
  Column('team', String(100))
446
449
  )
447
450
 
448
- # Message attachments table
451
+
449
452
  Table('message_attachments', metadata,
450
453
  Column('id', Integer, primary_key=True, autoincrement=True),
451
454
  Column('message_id', String(50), ForeignKey('conversation_history.message_id', ondelete='CASCADE'), nullable=False),
@@ -457,7 +460,7 @@ class CommandHistory:
457
460
  Column('file_path', Text)
458
461
  )
459
462
 
460
- # Jinx execution log table
463
+
461
464
  Table('jinx_execution_log', metadata,
462
465
  Column('execution_id', Integer, primary_key=True, autoincrement=True),
463
466
  Column('triggering_message_id', String(50), ForeignKey('conversation_history.message_id', ondelete='CASCADE'), nullable=False),
@@ -474,12 +477,28 @@ class CommandHistory:
474
477
  Column('duration_ms', Integer)
475
478
  )
476
479
 
477
- # Create all tables
480
+ Table('memory_lifecycle', metadata,
481
+ Column('id', Integer, primary_key=True, autoincrement=True),
482
+ Column('message_id', String(50), nullable=False),
483
+ Column('conversation_id', String(100), nullable=False),
484
+ Column('npc', String(100), nullable=False),
485
+ Column('team', String(100), nullable=False),
486
+ Column('directory_path', Text, nullable=False),
487
+ Column('timestamp', String(50), nullable=False),
488
+ Column('initial_memory', Text, nullable=False),
489
+ Column('final_memory', Text),
490
+ Column('status', String(50), nullable=False),
491
+ Column('model', String(100)),
492
+ Column('provider', String(100)),
493
+ Column('created_at', DateTime, default=func.now())
494
+ )
495
+
496
+
478
497
  metadata.create_all(self.engine, checkfirst=True)
479
498
 
480
- # Create indexes for jinx table
499
+
481
500
  with self.engine.begin() as conn:
482
- # Check if indexes exist before creating (database-agnostic approach)
501
+
483
502
  index_queries = [
484
503
  "CREATE INDEX IF NOT EXISTS idx_jinx_log_trigger_msg ON jinx_execution_log (triggering_message_id)",
485
504
  "CREATE INDEX IF NOT EXISTS idx_jinx_log_convo_id ON jinx_execution_log (conversation_id)",
@@ -491,10 +510,10 @@ class CommandHistory:
491
510
  try:
492
511
  conn.execute(text(idx_query))
493
512
  except SQLAlchemyError:
494
- # Index might already exist or syntax might be different for PostgreSQL
513
+
495
514
  pass
496
515
 
497
- # Initialize KG schema
516
+
498
517
  init_kg_schema(self.engine)
499
518
 
500
519
  def _execute_returning_id(self, stmt: str, params: Dict = None) -> Optional[int]:
@@ -533,8 +552,6 @@ class CommandHistory:
533
552
  with self.engine.begin() as conn:
534
553
  conn.execute(text(stmt), params)
535
554
 
536
- def generate_message_id(self) -> str:
537
- return str(uuid.uuid4())
538
555
 
539
556
  def add_conversation(
540
557
  self,
@@ -574,11 +591,147 @@ class CommandHistory:
574
591
  attachment_type=attachment.get("type"),
575
592
  data=attachment.get("data"),
576
593
  size=attachment.get("size"),
577
- file_path=attachment.get("path") # PASS THE PATH
594
+ file_path=attachment.get("path")
578
595
  )
579
596
 
580
597
  return message_id
581
598
 
599
+ def add_memory_to_database(self, message_id: str, conversation_id: str, npc: str, team: str,
600
+ directory_path: str, initial_memory: str, status: str,
601
+ model: str = None, provider: str = None, final_memory: str = None):
602
+ """Store a memory entry in the database"""
603
+ timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
604
+
605
+ stmt = """
606
+ INSERT INTO memory_lifecycle
607
+ (message_id, conversation_id, npc, team, directory_path, timestamp,
608
+ initial_memory, final_memory, status, model, provider)
609
+ VALUES (:message_id, :conversation_id, :npc, :team, :directory_path,
610
+ :timestamp, :initial_memory, :final_memory, :status, :model, :provider)
611
+ """
612
+
613
+ params = {
614
+ "message_id": message_id, "conversation_id": conversation_id,
615
+ "npc": npc, "team": team, "directory_path": directory_path,
616
+ "timestamp": timestamp, "initial_memory": initial_memory,
617
+ "final_memory": final_memory, "status": status,
618
+ "model": model, "provider": provider
619
+ }
620
+
621
+ return self._execute_returning_id(stmt, params)
622
+ def get_memories_for_scope(
623
+ self,
624
+ npc: str,
625
+ team: str,
626
+ directory_path: str,
627
+ status: Optional[str] = None
628
+ ) -> List[Dict]:
629
+
630
+ query = """
631
+ SELECT id, initial_memory, final_memory,
632
+ status, timestamp, created_at
633
+ FROM memory_lifecycle
634
+ WHERE npc = :npc AND team = :team AND directory_path = :path
635
+ """
636
+ params = {"npc": npc, "team": team, "path": directory_path}
637
+
638
+ if status:
639
+ query += " AND status = :status"
640
+ params["status"] = status
641
+
642
+ query += " ORDER BY created_at DESC"
643
+ data =self._fetch_all(query, params)
644
+ return data
645
+
646
+ def search_memory(self, query: str, npc: str = None, team: str = None,
647
+ directory_path: str = None, status_filter: str = None, limit: int = 10):
648
+ """Search memories with hierarchical scope"""
649
+ conditions = ["LOWER(initial_memory) LIKE LOWER(:query) OR LOWER(final_memory) LIKE LOWER(:query)"]
650
+ params = {"query": f"%{query}%"}
651
+
652
+ if status_filter:
653
+ conditions.append("status = :status")
654
+ params["status"] = status_filter
655
+
656
+
657
+ order_parts = []
658
+ if npc:
659
+ order_parts.append(f"CASE WHEN npc = '{npc}' THEN 1 ELSE 2 END")
660
+ if team:
661
+ order_parts.append(f"CASE WHEN team = '{team}' THEN 1 ELSE 2 END")
662
+ if directory_path:
663
+ order_parts.append(f"CASE WHEN directory_path = '{directory_path}' THEN 1 ELSE 2 END")
664
+
665
+ order_clause = ", ".join(order_parts) + ", created_at DESC" if order_parts else "created_at DESC"
666
+
667
+ stmt = f"""
668
+ SELECT * FROM memory_lifecycle
669
+ WHERE {' AND '.join(conditions)}
670
+ ORDER BY {order_clause}
671
+ LIMIT :limit
672
+ """
673
+ params["limit"] = limit
674
+
675
+ return self._fetch_all(stmt, params)
676
+
677
+ def get_memory_examples_for_context(self, npc: str, team: str, directory_path: str,
678
+ n_approved: int = 10, n_rejected: int = 10):
679
+ """Get recent approved and rejected memories for learning context"""
680
+
681
+ approved_stmt = """
682
+ SELECT initial_memory, final_memory, status FROM memory_lifecycle
683
+ WHERE status IN ('human-approved', 'model-approved')
684
+ ORDER BY
685
+ CASE WHEN npc = :npc AND team = :team AND directory_path = :path THEN 1
686
+ WHEN npc = :npc AND team = :team THEN 2
687
+ WHEN team = :team THEN 3
688
+ ELSE 4 END,
689
+ created_at DESC
690
+ LIMIT :n_approved
691
+ """
692
+
693
+ rejected_stmt = """
694
+ SELECT initial_memory, status FROM memory_lifecycle
695
+ WHERE status IN ('human-rejected', 'model-rejected')
696
+ ORDER BY
697
+ CASE WHEN npc = :npc AND team = :team AND directory_path = :path THEN 1
698
+ WHEN npc = :npc AND team = :team THEN 2
699
+ WHEN team = :team THEN 3
700
+ ELSE 4 END,
701
+ created_at DESC
702
+ LIMIT :n_rejected
703
+ """
704
+
705
+ params = {"npc": npc, "team": team, "path": directory_path,
706
+ "n_approved": n_approved, "n_rejected": n_rejected}
707
+
708
+ approved = self._fetch_all(approved_stmt, params)
709
+ rejected = self._fetch_all(rejected_stmt, params)
710
+
711
+ return {"approved": approved, "rejected": rejected}
712
+
713
+ def get_pending_memories(self, limit: int = 50):
714
+ """Get memories pending human approval"""
715
+ stmt = """
716
+ SELECT * FROM memory_lifecycle
717
+ WHERE status = 'pending_approval'
718
+ ORDER BY created_at ASC
719
+ LIMIT :limit
720
+ """
721
+ return self._fetch_all(stmt, {"limit": limit})
722
+
723
+ def update_memory_status(self, memory_id: int, new_status: str, final_memory: str = None):
724
+ """Update memory status and optionally final_memory"""
725
+ stmt = """
726
+ UPDATE memory_lifecycle
727
+ SET status = :status, final_memory = :final_memory
728
+ WHERE id = :memory_id
729
+ """
730
+ params = {"status": new_status, "final_memory": final_memory, "memory_id": memory_id}
731
+
732
+ with self.engine.begin() as conn:
733
+ conn.execute(text(stmt), params)
734
+
582
735
  def add_attachment(self, message_id, name, attachment_type, data, size, file_path=None):
583
736
  timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
584
737
  stmt = """
@@ -767,12 +920,12 @@ class CommandHistory:
767
920
  date_filter = "WHERE timestamp <= :end_date"
768
921
  params = {"end_date": end_date}
769
922
 
770
- # Use different GROUP_CONCAT for different databases
923
+
771
924
  if 'sqlite' in str(self.engine.url):
772
925
  group_concat_models = "GROUP_CONCAT(DISTINCT model)"
773
926
  group_concat_providers = "GROUP_CONCAT(DISTINCT provider)"
774
927
  else:
775
- # PostgreSQL uses STRING_AGG
928
+
776
929
  group_concat_models = "STRING_AGG(DISTINCT model, ',')"
777
930
  group_concat_providers = "STRING_AGG(DISTINCT provider, ',')"
778
931
 
@@ -806,7 +959,7 @@ class CommandHistory:
806
959
  ])
807
960
 
808
961
  def get_command_patterns(self, timeframe='day') -> pd.DataFrame:
809
- # Use different date formatting for different databases
962
+
810
963
  if 'sqlite' in str(self.engine.url):
811
964
  time_group_formats = {
812
965
  'hour': "strftime('%Y-%m-%d %H', timestamp)",
@@ -815,7 +968,7 @@ class CommandHistory:
815
968
  'month': "strftime('%Y-%m', timestamp)"
816
969
  }
817
970
  else:
818
- # PostgreSQL date formatting
971
+
819
972
  time_group_formats = {
820
973
  'hour': "TO_CHAR(timestamp::timestamp, 'YYYY-MM-DD HH24')",
821
974
  'day': "TO_CHAR(timestamp::timestamp, 'YYYY-MM-DD')",
@@ -825,7 +978,7 @@ class CommandHistory:
825
978
 
826
979
  time_group = time_group_formats.get(timeframe, time_group_formats['day'])
827
980
 
828
- # Use different SUBSTR functions
981
+
829
982
  if 'sqlite' in str(self.engine.url):
830
983
  substr_func = "SUBSTR"
831
984
  instr_func = "INSTR"
@@ -933,22 +1086,22 @@ def save_conversation_message(
933
1086
  if wd is None:
934
1087
  wd = os.getcwd()
935
1088
  timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
1089
+ if message_id is None:
1090
+ message_id = generate_message_id()
936
1091
 
937
1092
 
938
1093
  return command_history.add_conversation(
939
- role=role,
940
- timestamp=timestamp,
941
- content=content,
942
- conversation_id=conversation_id,
943
- directory_path=wd,
944
- model=model,
945
- provider=provider,
946
- npc=npc,
947
- team=team,
948
- attachments=attachments,
949
- message_id=message_id,
950
- )
951
-
1094
+ message_id,
1095
+ timestamp,
1096
+ role,
1097
+ content,
1098
+ conversation_id,
1099
+ wd,
1100
+ model=model,
1101
+ provider=provider,
1102
+ npc=npc,
1103
+ team=team,
1104
+ attachments=attachments)
952
1105
  def retrieve_last_conversation(
953
1106
  command_history: CommandHistory, conversation_id: str
954
1107
  ) -> str:
@@ -957,7 +1110,7 @@ def retrieve_last_conversation(
957
1110
  """
958
1111
  last_message = command_history.get_last_conversation(conversation_id)
959
1112
  if last_message:
960
- return last_message['content'] # Use dict key access
1113
+ return last_message['content']
961
1114
  return "No previous conversation messages found."
962
1115
 
963
1116
  def save_attachment_to_message(
@@ -1010,7 +1163,7 @@ def get_available_tables(db_path_or_engine: Union[str, Engine]) -> List[Tuple[st
1010
1163
  "SELECT name FROM sqlite_master WHERE type='table' AND name != 'command_history'"
1011
1164
  ))
1012
1165
  else:
1013
- # PostgreSQL
1166
+
1014
1167
  result = conn.execute(text("""
1015
1168
  SELECT table_name FROM information_schema.tables
1016
1169
  WHERE table_schema = 'public' AND table_name != 'command_history'