npcsh 1.1.5__py3-none-any.whl → 1.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. npcsh/_state.py +483 -336
  2. npcsh/npc_team/jinxs/code/sh.jinx +0 -1
  3. npcsh/npc_team/jinxs/code/sql.jinx +1 -3
  4. npcsh/npc_team/jinxs/utils/npc-studio.jinx +33 -38
  5. npcsh/npc_team/jinxs/utils/ots.jinx +34 -65
  6. npcsh/npc_team/jinxs/utils/search.jinx +130 -0
  7. npcsh/npc_team/jinxs/utils/vixynt.jinx +33 -45
  8. npcsh/routes.py +32 -14
  9. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/npc-studio.jinx +33 -38
  10. npcsh-1.1.7.data/data/npcsh/npc_team/ots.jinx +61 -0
  11. npcsh-1.1.7.data/data/npcsh/npc_team/search.jinx +130 -0
  12. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/sh.jinx +0 -1
  13. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/sql.jinx +1 -3
  14. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/vixynt.jinx +33 -45
  15. {npcsh-1.1.5.dist-info → npcsh-1.1.7.dist-info}/METADATA +1 -10
  16. {npcsh-1.1.5.dist-info → npcsh-1.1.7.dist-info}/RECORD +65 -73
  17. npcsh/npc_team/jinxs/utils/search/brainblast.jinx +0 -51
  18. npcsh/npc_team/jinxs/utils/search/kg_search.jinx +0 -43
  19. npcsh/npc_team/jinxs/utils/search/memory_search.jinx +0 -36
  20. npcsh/npc_team/jinxs/utils/search/rag.jinx +0 -70
  21. npcsh/npc_team/jinxs/utils/search/search.jinx +0 -192
  22. npcsh-1.1.5.data/data/npcsh/npc_team/brainblast.jinx +0 -51
  23. npcsh-1.1.5.data/data/npcsh/npc_team/kg_search.jinx +0 -43
  24. npcsh-1.1.5.data/data/npcsh/npc_team/memory_search.jinx +0 -36
  25. npcsh-1.1.5.data/data/npcsh/npc_team/ots.jinx +0 -92
  26. npcsh-1.1.5.data/data/npcsh/npc_team/rag.jinx +0 -70
  27. npcsh-1.1.5.data/data/npcsh/npc_team/search.jinx +0 -192
  28. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/alicanto.jinx +0 -0
  29. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/alicanto.npc +0 -0
  30. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/alicanto.png +0 -0
  31. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/breathe.jinx +0 -0
  32. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/build.jinx +0 -0
  33. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/compile.jinx +0 -0
  34. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/corca.jinx +0 -0
  35. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/corca.npc +0 -0
  36. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/corca.png +0 -0
  37. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/corca_example.png +0 -0
  38. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/edit_file.jinx +0 -0
  39. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/flush.jinx +0 -0
  40. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/foreman.npc +0 -0
  41. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/frederic.npc +0 -0
  42. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/frederic4.png +0 -0
  43. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/guac.jinx +0 -0
  44. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/guac.png +0 -0
  45. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/help.jinx +0 -0
  46. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/init.jinx +0 -0
  47. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/jinxs.jinx +0 -0
  48. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/kadiefa.npc +0 -0
  49. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/kadiefa.png +0 -0
  50. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/npcsh.ctx +0 -0
  51. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/npcsh_sibiji.png +0 -0
  52. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/plan.jinx +0 -0
  53. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/plonk.jinx +0 -0
  54. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/plonk.npc +0 -0
  55. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/plonk.png +0 -0
  56. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/plonkjr.npc +0 -0
  57. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/plonkjr.png +0 -0
  58. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/pti.jinx +0 -0
  59. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/python.jinx +0 -0
  60. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/roll.jinx +0 -0
  61. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/sample.jinx +0 -0
  62. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/serve.jinx +0 -0
  63. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/set.jinx +0 -0
  64. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/sibiji.npc +0 -0
  65. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/sibiji.png +0 -0
  66. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/sleep.jinx +0 -0
  67. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/spool.jinx +0 -0
  68. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/spool.png +0 -0
  69. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/trigger.jinx +0 -0
  70. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/wander.jinx +0 -0
  71. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/yap.jinx +0 -0
  72. {npcsh-1.1.5.data → npcsh-1.1.7.data}/data/npcsh/npc_team/yap.png +0 -0
  73. {npcsh-1.1.5.dist-info → npcsh-1.1.7.dist-info}/WHEEL +0 -0
  74. {npcsh-1.1.5.dist-info → npcsh-1.1.7.dist-info}/entry_points.txt +0 -0
  75. {npcsh-1.1.5.dist-info → npcsh-1.1.7.dist-info}/licenses/LICENSE +0 -0
  76. {npcsh-1.1.5.dist-info → npcsh-1.1.7.dist-info}/top_level.txt +0 -0
npcsh/_state.py CHANGED
@@ -88,12 +88,18 @@ from npcpy.llm_funcs import (
88
88
  breathe,
89
89
 
90
90
  )
91
+
91
92
  from npcpy.memory.knowledge_graph import (
92
93
  kg_evolve_incremental,
93
94
 
94
95
  )
95
96
  from npcpy.gen.embeddings import get_embeddings
96
97
 
98
+ import inspect
99
+ import sys
100
+ from npcpy.memory.search import execute_rag_command, execute_brainblast_command
101
+ from npcpy.data.load import load_file_contents
102
+ from npcpy.data.web import search_web
97
103
  try:
98
104
  import readline
99
105
  except:
@@ -311,7 +317,6 @@ def get_npc_path(npc_name: str, db_path: str) -> str:
311
317
 
312
318
  raise ValueError(f"NPC file not found: {npc_name}")
313
319
 
314
-
315
320
  def initialize_base_npcs_if_needed(db_path: str) -> None:
316
321
  """
317
322
  Function Description:
@@ -331,7 +336,7 @@ def initialize_base_npcs_if_needed(db_path: str) -> None:
331
336
  conn = sqlite3.connect(db_path)
332
337
  cursor = conn.cursor()
333
338
 
334
-
339
+ # Create table
335
340
  cursor.execute(
336
341
  """
337
342
  CREATE TABLE IF NOT EXISTS compiled_npcs (
@@ -342,7 +347,7 @@ def initialize_base_npcs_if_needed(db_path: str) -> None:
342
347
  """
343
348
  )
344
349
 
345
-
350
+ # Package directories
346
351
  package_dir = os.path.dirname(__file__)
347
352
  package_npc_team_dir = os.path.join(package_dir, "npc_team")
348
353
 
@@ -354,6 +359,7 @@ def initialize_base_npcs_if_needed(db_path: str) -> None:
354
359
  os.makedirs(user_jinxs_dir, exist_ok=True)
355
360
  os.makedirs(user_templates_dir, exist_ok=True)
356
361
 
362
+ # Copy .npc and .ctx files
357
363
  for filename in os.listdir(package_npc_team_dir):
358
364
  if filename.endswith(".npc"):
359
365
  source_path = os.path.join(package_npc_team_dir, filename)
@@ -372,19 +378,33 @@ def initialize_base_npcs_if_needed(db_path: str) -> None:
372
378
  shutil.copy2(source_path, destination_path)
373
379
  print(f"Copied ctx {filename} to {destination_path}")
374
380
 
375
-
381
+ # Copy jinxs directory RECURSIVELY
376
382
  package_jinxs_dir = os.path.join(package_npc_team_dir, "jinxs")
377
383
  if os.path.exists(package_jinxs_dir):
378
- for filename in os.listdir(package_jinxs_dir):
379
- if filename.endswith(".jinx"):
380
- source_jinx_path = os.path.join(package_jinxs_dir, filename)
381
- destination_jinx_path = os.path.join(user_jinxs_dir, filename)
382
- if (not os.path.exists(destination_jinx_path)) or file_has_changed(
383
- source_jinx_path, destination_jinx_path
384
- ):
385
- shutil.copy2(source_jinx_path, destination_jinx_path)
386
- print(f"Copied jinx {filename} to {destination_jinx_path}")
384
+ for root, dirs, files in os.walk(package_jinxs_dir):
385
+ # Calculate relative path from package_jinxs_dir
386
+ rel_path = os.path.relpath(root, package_jinxs_dir)
387
+
388
+ # Create corresponding directory in user jinxs
389
+ if rel_path == '.':
390
+ dest_dir = user_jinxs_dir
391
+ else:
392
+ dest_dir = os.path.join(user_jinxs_dir, rel_path)
393
+ os.makedirs(dest_dir, exist_ok=True)
394
+
395
+ # Copy all .jinx files in this directory
396
+ for filename in files:
397
+ if filename.endswith(".jinx"):
398
+ source_jinx_path = os.path.join(root, filename)
399
+ destination_jinx_path = os.path.join(dest_dir, filename)
400
+
401
+ if not os.path.exists(destination_jinx_path) or file_has_changed(
402
+ source_jinx_path, destination_jinx_path
403
+ ):
404
+ shutil.copy2(source_jinx_path, destination_jinx_path)
405
+ print(f"Copied jinx {os.path.join(rel_path, filename)} to {destination_jinx_path}")
387
406
 
407
+ # Copy templates directory
388
408
  templates = os.path.join(package_npc_team_dir, "templates")
389
409
  if os.path.exists(templates):
390
410
  for folder in os.listdir(templates):
@@ -408,6 +428,7 @@ def initialize_base_npcs_if_needed(db_path: str) -> None:
408
428
  set_npcsh_initialized()
409
429
  add_npcshrc_to_shell_config()
410
430
 
431
+
411
432
  def get_shell_config_file() -> str:
412
433
  """
413
434
 
@@ -443,6 +464,138 @@ def get_team_ctx_path(team_path: str) -> Optional[str]:
443
464
  return str(ctx_files[0]) if ctx_files else None
444
465
 
445
466
 
467
+ from npcpy.memory.memory_processor import memory_approval_ui
468
+ from npcpy.ft.memory_trainer import MemoryTrainer
469
+ from npcpy.llm_funcs import get_facts
470
+
471
+ def get_relevant_memories(
472
+ command_history: CommandHistory,
473
+ npc_name: str,
474
+ team_name: str,
475
+ path: str,
476
+ query: Optional[str] = None,
477
+ max_memories: int = 10,
478
+ state: Optional[ShellState] = None
479
+ ) -> List[Dict]:
480
+
481
+ engine = command_history.engine
482
+
483
+ all_memories = command_history.get_memories_for_scope(
484
+ npc=npc_name,
485
+ team=team_name,
486
+ directory_path=path,
487
+ )
488
+
489
+ if not all_memories:
490
+ return []
491
+
492
+ if len(all_memories) <= max_memories and not query:
493
+ return all_memories
494
+
495
+ if query:
496
+ query_lower = query.lower()
497
+ keyword_matches = [
498
+ m for m in all_memories
499
+ if query_lower in (m.get('final_memory') or m.get('initial_memory') or '').lower()
500
+ ]
501
+
502
+ if keyword_matches:
503
+ return keyword_matches[:max_memories]
504
+
505
+ if state and state.embedding_model and state.embedding_provider:
506
+ try:
507
+ from npcpy.gen.embeddings import get_embeddings
508
+
509
+ search_text = query if query else "recent context"
510
+ query_embedding = get_embeddings(
511
+ [search_text],
512
+ state.embedding_model,
513
+ state.embedding_provider
514
+ )[0]
515
+
516
+ memory_texts = [
517
+ m.get('final_memory', '') for m in all_memories
518
+ ]
519
+ memory_embeddings = get_embeddings(
520
+ memory_texts,
521
+ state.embedding_model,
522
+ state.embedding_provider
523
+ )
524
+
525
+ import numpy as np
526
+ similarities = []
527
+ for mem_emb in memory_embeddings:
528
+ similarity = np.dot(query_embedding, mem_emb) / (
529
+ np.linalg.norm(query_embedding) *
530
+ np.linalg.norm(mem_emb)
531
+ )
532
+ similarities.append(similarity)
533
+
534
+ sorted_indices = np.argsort(similarities)[::-1]
535
+ return [all_memories[i] for i in sorted_indices[:max_memories]]
536
+
537
+ except Exception as e:
538
+ print(colored(
539
+ f"RAG search failed, using recent: {e}",
540
+ "yellow"
541
+ ))
542
+
543
+ return all_memories[-max_memories:]
544
+
545
+
546
+ def search_kg_facts(
547
+ self,
548
+ npc: str,
549
+ team: str,
550
+ directory_path: str,
551
+ query: str
552
+ ) -> List[Dict]:
553
+
554
+ kg = load_kg_from_db(
555
+ self.engine,
556
+ team,
557
+ npc,
558
+ directory_path
559
+ )
560
+
561
+ if not kg or 'facts' not in kg:
562
+ return []
563
+
564
+ query_lower = query.lower()
565
+ matching_facts = []
566
+
567
+ for fact in kg['facts']:
568
+ statement = fact.get('statement', '').lower()
569
+ if query_lower in statement:
570
+ matching_facts.append(fact)
571
+
572
+ return matching_facts
573
+
574
+ def format_memory_context(memory_examples):
575
+ if not memory_examples:
576
+ return ""
577
+
578
+ context_parts = []
579
+
580
+ approved_examples = memory_examples.get("approved", [])
581
+ rejected_examples = memory_examples.get("rejected", [])
582
+
583
+ if approved_examples:
584
+ context_parts.append("EXAMPLES OF GOOD MEMORIES:")
585
+ for ex in approved_examples[:5]:
586
+ final = ex.get("final_memory") or ex.get("initial_memory")
587
+ context_parts.append(f"- {final}")
588
+
589
+ if rejected_examples:
590
+ context_parts.append("\nEXAMPLES OF POOR MEMORIES TO AVOID:")
591
+ for ex in rejected_examples[:3]:
592
+ context_parts.append(f"- {ex.get('initial_memory')}")
593
+
594
+ if context_parts:
595
+ context_parts.append("\nLearn from these examples to generate similar high-quality memories.")
596
+ return "\n".join(context_parts)
597
+
598
+ return ""
446
599
  def add_npcshrc_to_shell_config() -> None:
447
600
  """
448
601
  Function Description:
@@ -1863,18 +2016,19 @@ def should_skip_kg_processing(user_input: str, assistant_output: str) -> bool:
1863
2016
 
1864
2017
  return False
1865
2018
 
1866
-
1867
-
1868
-
1869
2019
  def execute_slash_command(command: str,
1870
2020
  stdin_input: Optional[str],
1871
2021
  state: ShellState,
1872
2022
  stream: bool,
1873
2023
  router) -> Tuple[ShellState, Any]:
1874
- """Executes slash commands using the router or checking NPC/Team jinxs."""
1875
- all_command_parts = shlex.split(command)
2024
+ """Executes slash commands using the router."""
2025
+ try:
2026
+ all_command_parts = shlex.split(command)
2027
+ except ValueError:
2028
+ all_command_parts = command.split()
1876
2029
  command_name = all_command_parts[0].lstrip('/')
1877
2030
 
2031
+ # --- NPC SWITCHING LOGIC ---
1878
2032
  if command_name in ['n', 'npc']:
1879
2033
  npc_to_switch_to = all_command_parts[1] if len(all_command_parts) > 1 else None
1880
2034
  if npc_to_switch_to and state.team and npc_to_switch_to in state.team.npcs:
@@ -1884,129 +2038,35 @@ def execute_slash_command(command: str,
1884
2038
  available_npcs = list(state.team.npcs.keys()) if state.team else []
1885
2039
  return state, {"output": colored(f"NPC '{npc_to_switch_to}' not found. Available NPCs: {', '.join(available_npcs)}", "red"), "messages": state.messages}
1886
2040
 
2041
+ # --- ROUTER LOGIC ---
1887
2042
  handler = router.get_route(command_name)
1888
2043
  if handler:
1889
- parsed_flags, positional_args = parse_generic_command_flags(all_command_parts[1:])
1890
- normalized_flags = normalize_and_expand_flags(parsed_flags)
1891
-
1892
2044
  handler_kwargs = {
1893
- 'stream': stream,
1894
- 'team': state.team,
1895
- 'messages': state.messages,
1896
- 'api_url': state.api_url,
1897
- 'api_key': state.api_key,
1898
- 'stdin_input': stdin_input,
1899
- 'positional_args': positional_args,
1900
- 'plonk_context': state.team.shared_context.get('PLONK_CONTEXT') if state.team and hasattr(state.team, 'shared_context') else None,
1901
-
2045
+ 'stream': stream, 'team': state.team, 'messages': state.messages, 'api_url': state.api_url,
2046
+ 'api_key': state.api_key, 'stdin_input': stdin_input,
1902
2047
  'model': state.npc.model if isinstance(state.npc, NPC) and state.npc.model else state.chat_model,
1903
2048
  'provider': state.npc.provider if isinstance(state.npc, NPC) and state.npc.provider else state.chat_provider,
1904
- 'npc': state.npc,
1905
-
1906
- 'sprovider': state.search_provider,
1907
- 'emodel': state.embedding_model,
1908
- 'eprovider': state.embedding_provider,
1909
- 'igmodel': state.image_gen_model,
1910
- 'igprovider': state.image_gen_provider,
1911
- 'vgmodel': state.video_gen_model,
1912
- 'vgprovider': state.video_gen_provider,
1913
- 'vmodel': state.vision_model,
1914
- 'vprovider': state.vision_provider,
1915
- 'rmodel': state.reasoning_model,
1916
- 'rprovider': state.reasoning_provider,
2049
+ 'npc': state.npc, 'sprovider': state.search_provider, 'emodel': state.embedding_model,
2050
+ 'eprovider': state.embedding_provider, 'igmodel': state.image_gen_model, 'igprovider': state.image_gen_provider,
2051
+ 'vmodel': state.vision_model, 'vprovider': state.vision_provider, 'rmodel': state.reasoning_model,
2052
+ 'rprovider': state.reasoning_provider, 'state': state
1917
2053
  }
1918
-
1919
- if len(normalized_flags) > 0:
1920
- kwarg_part = 'with kwargs: \n -' + '\n -'.join(f'{key}={item}' for key, item in normalized_flags.items())
1921
- else:
1922
- kwarg_part = ''
1923
-
1924
- render_markdown(f'- Calling {command_name} handler {kwarg_part} ')
1925
-
1926
- if 'model' in normalized_flags and 'provider' not in normalized_flags:
1927
- inferred_provider = lookup_provider(normalized_flags['model'])
1928
- if inferred_provider:
1929
- handler_kwargs['provider'] = inferred_provider
1930
- print(colored(f"Info: Inferred provider '{inferred_provider}' for model '{normalized_flags['model']}'.", "cyan"))
1931
-
1932
- if 'provider' in normalized_flags and 'model' not in normalized_flags:
1933
- current_provider = lookup_provider(handler_kwargs['model'])
1934
- if current_provider != normalized_flags['provider']:
1935
- prov = normalized_flags['provider']
1936
- print(f'Please specify a model for the provider: {prov}')
1937
-
1938
- handler_kwargs.update(normalized_flags)
1939
-
1940
2054
  try:
1941
2055
  result = handler(command=command, **handler_kwargs)
1942
-
1943
- if isinstance(result, dict):
2056
+ if isinstance(result, dict):
1944
2057
  state.messages = result.get("messages", state.messages)
1945
- return state, result
1946
- elif isinstance(result, str):
1947
- return state, {"output": result, "messages": state.messages}
1948
- else:
1949
- return state, {"output": str(result), "messages": state.messages}
1950
-
2058
+ return state, result
1951
2059
  except Exception as e:
1952
2060
  import traceback
1953
- print(f"Error executing slash command '{command_name}':", file=sys.stderr)
1954
2061
  traceback.print_exc()
1955
2062
  return state, {"output": colored(f"Error executing slash command '{command_name}': {e}", "red"), "messages": state.messages}
1956
-
1957
- active_npc = state.npc if isinstance(state.npc, NPC) else None
1958
- jinx_to_execute = None
1959
- executor = None
1960
-
1961
- if active_npc and hasattr(active_npc, 'jinxs_dict') and command_name in active_npc.jinxs_dict:
1962
- jinx_to_execute = active_npc.jinxs_dict[command_name]
1963
- executor = active_npc
1964
- elif state.team and hasattr(state.team, 'jinxs_dict') and command_name in state.team.jinxs_dict:
1965
- jinx_to_execute = state.team.jinxs_dict[command_name]
1966
- executor = state.team
1967
- if jinx_to_execute:
1968
- args = all_command_parts[1:]
1969
- try:
1970
- input_values = {}
1971
- if hasattr(jinx_to_execute, 'inputs') and jinx_to_execute.inputs:
1972
- for i, input_name in enumerate(jinx_to_execute.inputs):
1973
- if i < len(args):
1974
- input_values[input_name] = args[i]
1975
-
1976
- if isinstance(executor, NPC):
1977
- jinx_output = jinx_to_execute.execute(
1978
- input_values=input_values,
1979
- jinxs_dict=executor.jinxs_dict if hasattr(executor, 'jinxs_dict') else {},
1980
- npc=executor,
1981
- messages=state.messages
1982
- )
1983
- else:
1984
- jinx_output = jinx_to_execute.execute(
1985
- input_values=input_values,
1986
- jinxs_dict=executor.jinxs_dict if hasattr(executor, 'jinxs_dict') else {},
1987
- npc=active_npc or state.npc,
1988
- messages=state.messages
1989
- )
1990
- if isinstance(jinx_output, dict) and 'messages' in jinx_output:
1991
- state.messages = jinx_output['messages']
1992
- return state, jinx_output
1993
- elif isinstance(jinx_output, dict):
1994
- return state, jinx_output
1995
- else:
1996
- return state, {"output": str(jinx_output), "messages": state.messages}
1997
-
1998
- except Exception as e:
1999
- import traceback
2000
- print(f"Error executing jinx '{command_name}':", file=sys.stderr)
2001
- traceback.print_exc()
2002
- return state, {"output": colored(f"Error executing jinx '{command_name}': {e}", "red"), "messages": state.messages}
2003
2063
 
2064
+ # Fallback for switching NPC by name
2004
2065
  if state.team and command_name in state.team.npcs:
2005
- new_npc = state.team.npcs[command_name]
2006
- state.npc = new_npc
2007
- return state, {"output": f"Switched to NPC: {new_npc.name}", "messages": state.messages}
2066
+ state.npc = state.team.npcs[command_name]
2067
+ return state, {"output": f"Switched to NPC: {state.npc.name}", "messages": state.messages}
2008
2068
 
2009
- return state, {"output": colored(f"Unknown slash command, jinx, or NPC: {command_name}", "red"), "messages": state.messages}
2069
+ return state, {"output": colored(f"Unknown slash command or NPC: {command_name}", "red"), "messages": state.messages}
2010
2070
 
2011
2071
 
2012
2072
  def process_pipeline_command(
@@ -2017,15 +2077,14 @@ def process_pipeline_command(
2017
2077
  review = False,
2018
2078
  router = None,
2019
2079
  ) -> Tuple[ShellState, Any]:
2020
- '''
2021
- Processing command
2022
- '''
2023
2080
 
2024
2081
  if not cmd_segment:
2025
2082
  return state, stdin_input
2026
2083
 
2027
2084
  available_models_all = get_locally_available_models(state.current_path)
2028
- available_models_all_list = [item for key, item in available_models_all.items()]
2085
+ available_models_all_list = [
2086
+ item for key, item in available_models_all.items()
2087
+ ]
2029
2088
 
2030
2089
  model_override, provider_override, cmd_cleaned = get_model_and_provider(
2031
2090
  cmd_segment, available_models_all_list
@@ -2034,18 +2093,33 @@ def process_pipeline_command(
2034
2093
  if not cmd_to_process:
2035
2094
  return state, stdin_input
2036
2095
 
2037
- npc_model = state.npc.model if isinstance(state.npc, NPC) and state.npc.model else None
2038
- npc_provider = state.npc.provider if isinstance(state.npc, NPC) and state.npc.provider else None
2096
+ npc_model = (
2097
+ state.npc.model
2098
+ if isinstance(state.npc, NPC) and state.npc.model
2099
+ else None
2100
+ )
2101
+ npc_provider = (
2102
+ state.npc.provider
2103
+ if isinstance(state.npc, NPC) and state.npc.provider
2104
+ else None
2105
+ )
2039
2106
 
2040
2107
  exec_model = model_override or npc_model or state.chat_model
2041
2108
  exec_provider = provider_override or npc_provider or state.chat_provider
2042
2109
 
2043
2110
  if cmd_to_process.startswith("/"):
2044
- return execute_slash_command(cmd_to_process,
2045
- stdin_input,
2046
- state,
2047
- stream_final,
2048
- router)
2111
+ with SpinnerContext(
2112
+ f"Routing to {cmd_to_process.split()[0]}",
2113
+ style="arrow"
2114
+ ):
2115
+ result = execute_slash_command(
2116
+ cmd_to_process,
2117
+ stdin_input,
2118
+ state,
2119
+ stream_final,
2120
+ router
2121
+ )
2122
+ return result
2049
2123
 
2050
2124
  cmd_parts = parse_command_safely(cmd_to_process)
2051
2125
  if not cmd_parts:
@@ -2058,6 +2132,7 @@ def process_pipeline_command(
2058
2132
 
2059
2133
  if command_name in interactive_commands:
2060
2134
  return handle_interactive_command(cmd_parts, state)
2135
+
2061
2136
  if command_name in TERMINAL_EDITORS:
2062
2137
  print(f"Starting interactive editor: {command_name}...")
2063
2138
  full_command_str = " ".join(cmd_parts)
@@ -2065,46 +2140,107 @@ def process_pipeline_command(
2065
2140
  return state, output
2066
2141
 
2067
2142
  if validate_bash_command(cmd_parts):
2068
- success, result = handle_bash_command(cmd_parts, cmd_to_process, stdin_input, state)
2143
+ with SpinnerContext(f"Executing {command_name}", style="line"):
2144
+ success, result = handle_bash_command(
2145
+ cmd_parts,
2146
+ cmd_to_process,
2147
+ stdin_input,
2148
+ state
2149
+ )
2150
+
2069
2151
  if success:
2070
2152
  return state, result
2071
2153
  else:
2072
- print(colored(f"Bash command failed: {result}. Asking LLM for a fix...", "yellow"), file=sys.stderr)
2073
- fixer_prompt = f"The command '{cmd_to_process}' failed with the error: '{result}'. Provide the correct command."
2074
- response = execute_llm_command(
2075
- fixer_prompt,
2076
- model=exec_model,
2077
- provider=exec_provider,
2078
- npc=state.npc,
2079
- stream=stream_final,
2080
- messages=state.messages
2154
+ print(
2155
+ colored(
2156
+ f"Command failed. Consulting {exec_model}...",
2157
+ "yellow"
2158
+ ),
2159
+ file=sys.stderr
2081
2160
  )
2161
+ fixer_prompt = (
2162
+ f"The command '{cmd_to_process}' failed with error: "
2163
+ f"'{result}'. Provide the correct command."
2164
+ )
2165
+
2166
+ with SpinnerContext(
2167
+ f"{exec_model} analyzing error",
2168
+ style="brain"
2169
+ ):
2170
+ response = execute_llm_command(
2171
+ fixer_prompt,
2172
+ model=exec_model,
2173
+ provider=exec_provider,
2174
+ npc=state.npc,
2175
+ stream=stream_final,
2176
+ messages=state.messages
2177
+ )
2178
+
2082
2179
  state.messages = response['messages']
2083
2180
  return state, response['response']
2084
2181
  else:
2085
- full_llm_cmd = f"{cmd_to_process} {stdin_input}" if stdin_input else cmd_to_process
2182
+ full_llm_cmd = (
2183
+ f"{cmd_to_process} {stdin_input}"
2184
+ if stdin_input
2185
+ else cmd_to_process
2186
+ )
2086
2187
  path_cmd = 'The current working directory is: ' + state.current_path
2087
- ls_files = 'Files in the current directory (full paths):\n' + "\n".join([os.path.join(state.current_path, f) for f in os.listdir(state.current_path)]) if os.path.exists(state.current_path) else 'No files found in the current directory.'
2088
- platform_info = f"Platform: {platform.system()} {platform.release()} ({platform.machine()})"
2188
+ ls_files = (
2189
+ 'Files in the current directory (full paths):\n' +
2190
+ "\n".join([
2191
+ os.path.join(state.current_path, f)
2192
+ for f in os.listdir(state.current_path)
2193
+ ])
2194
+ if os.path.exists(state.current_path)
2195
+ else 'No files found in the current directory.'
2196
+ )
2197
+ platform_info = (
2198
+ f"Platform: {platform.system()} {platform.release()} "
2199
+ f"({platform.machine()})"
2200
+ )
2089
2201
  info = path_cmd + '\n' + ls_files + '\n' + platform_info + '\n'
2090
2202
  state.messages.append({'role':'user', 'content':full_llm_cmd})
2091
2203
 
2092
-
2093
- llm_result = check_llm_command(
2094
- full_llm_cmd,
2095
- model=exec_model,
2096
- provider=exec_provider,
2097
- api_url=state.api_url,
2098
- api_key=state.api_key,
2099
- npc=state.npc,
2100
- team=state.team,
2101
- messages=state.messages,
2102
- images=state.attachments,
2103
- stream=stream_final,
2104
- context=info,
2204
+ npc_name = (
2205
+ state.npc.name
2206
+ if isinstance(state.npc, NPC)
2207
+ else "Assistant"
2105
2208
  )
2106
-
2107
2209
 
2210
+ with SpinnerContext(
2211
+ f"{npc_name} processing with {exec_model}",
2212
+ style="dots_pulse"
2213
+ ):
2214
+ # Build extra_globals for jinx execution
2215
+ application_globals_for_jinx = {
2216
+ "CommandHistory": CommandHistory,
2217
+ "load_kg_from_db": load_kg_from_db,
2218
+ "execute_rag_command": execute_rag_command,
2219
+ "execute_brainblast_command": execute_brainblast_command,
2220
+ "load_file_contents": load_file_contents,
2221
+ "search_web": search_web,
2222
+ "get_relevant_memories": get_relevant_memories,
2223
+ "search_kg_facts": search_kg_facts,
2224
+ 'state': state
2225
+ }
2226
+ current_module = sys.modules[__name__]
2227
+ for name, func in inspect.getmembers(current_module, inspect.isfunction):
2228
+ application_globals_for_jinx[name] = func
2229
+
2230
+ llm_result = check_llm_command(
2231
+ full_llm_cmd,
2232
+ model=exec_model,
2233
+ provider=exec_provider,
2234
+ api_url=state.api_url,
2235
+ api_key=state.api_key,
2236
+ npc=state.npc,
2237
+ team=state.team,
2238
+ messages=state.messages,
2239
+ images=state.attachments,
2240
+ stream=stream_final,
2241
+ context=info,
2242
+ extra_globals=application_globals_for_jinx # NOW PASS IT
2243
+ )
2108
2244
  if not review:
2109
2245
  if isinstance(llm_result, dict):
2110
2246
  state.messages = llm_result.get("messages", state.messages)
@@ -2112,7 +2248,6 @@ def process_pipeline_command(
2112
2248
  return state, output
2113
2249
  else:
2114
2250
  return state, llm_result
2115
-
2116
2251
  else:
2117
2252
  return review_and_iterate_command(
2118
2253
  original_command=full_llm_cmd,
@@ -2123,6 +2258,8 @@ def process_pipeline_command(
2123
2258
  stream_final=stream_final,
2124
2259
  info=info
2125
2260
  )
2261
+
2262
+
2126
2263
  def review_and_iterate_command(
2127
2264
  original_command: str,
2128
2265
  initial_result: Any,
@@ -2181,6 +2318,71 @@ def check_mode_switch(command:str , state: ShellState):
2181
2318
  return True, state
2182
2319
  return False, state
2183
2320
 
2321
+ import sys
2322
+ import time
2323
+ import threading
2324
+ from itertools import cycle
2325
+
2326
+ class SpinnerContext:
2327
+ def __init__(self, message="Processing", style="dots"):
2328
+ self.message = message
2329
+ self.spinning = False
2330
+ self.thread = None
2331
+
2332
+ styles = {
2333
+ "dots": ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"],
2334
+ "line": ["-", "\\", "|", "/"],
2335
+ "arrow": ["←", "↖", "↑", "↗", "→", "↘", "↓", "↙"],
2336
+ "box": ["◰", "◳", "◲", "◱"],
2337
+ "dots_pulse": ["⣾", "⣽", "⣻", "⢿", "⡿", "⣟", "⣯", "⣷"],
2338
+ "brain": ["🧠", "💭", "🤔", "💡"],
2339
+ }
2340
+ self.frames = cycle(styles.get(style, styles["dots"]))
2341
+
2342
+ def _spin(self):
2343
+ while self.spinning:
2344
+ sys.stdout.write(
2345
+ f"\r{colored(next(self.frames), 'cyan')} "
2346
+ f"{colored(self.message, 'yellow')}..."
2347
+ )
2348
+ sys.stdout.flush()
2349
+ time.sleep(0.1)
2350
+
2351
+ def __enter__(self):
2352
+ self.spinning = True
2353
+ self.thread = threading.Thread(target=self._spin)
2354
+ self.thread.start()
2355
+ return self
2356
+
2357
+ def __exit__(self, exc_type, exc_val, exc_tb):
2358
+ self.spinning = False
2359
+ if self.thread:
2360
+ self.thread.join()
2361
+ sys.stdout.write("\r" + " " * 80 + "\r")
2362
+ sys.stdout.flush()
2363
+
2364
+ def show_thinking_animation(message="Thinking", duration=None):
2365
+ frames = ["🤔", "💭", "🧠", "💡", "✨"]
2366
+ colors = ["cyan", "blue", "magenta", "yellow", "green"]
2367
+
2368
+ start = time.time()
2369
+ i = 0
2370
+ while duration is None or (time.time() - start) < duration:
2371
+ frame = frames[i % len(frames)]
2372
+ color = colors[i % len(colors)]
2373
+ sys.stdout.write(
2374
+ f"\r{colored(frame, color)} "
2375
+ f"{colored(message, 'yellow')}..."
2376
+ )
2377
+ sys.stdout.flush()
2378
+ time.sleep(0.3)
2379
+ i += 1
2380
+ if duration and (time.time() - start) >= duration:
2381
+ break
2382
+
2383
+ sys.stdout.write("\r" + " " * 80 + "\r")
2384
+ sys.stdout.flush()
2385
+
2184
2386
  def execute_command(
2185
2387
  command: str,
2186
2388
  state: ShellState,
@@ -2194,29 +2396,51 @@ def execute_command(
2194
2396
 
2195
2397
  mode_change, state = check_mode_switch(command, state)
2196
2398
  if mode_change:
2399
+ print(colored(f"⚡ Switched to {state.current_mode} mode", "green"))
2197
2400
  return state, 'Mode changed.'
2198
2401
 
2199
- npc_name = state.npc.name if isinstance(state.npc, NPC) else "__none__"
2402
+ npc_name = (
2403
+ state.npc.name
2404
+ if isinstance(state.npc, NPC)
2405
+ else "__none__"
2406
+ )
2200
2407
  team_name = state.team.name if state.team else "__none__"
2201
2408
 
2202
-
2203
2409
  original_command_for_embedding = command
2204
2410
  commands = split_by_pipes(command)
2205
2411
 
2206
2412
  stdin_for_next = None
2207
2413
  final_output = None
2208
2414
  current_state = state
2209
- npc_model = state.npc.model if isinstance(state.npc, NPC) and state.npc.model else None
2210
- npc_provider = state.npc.provider if isinstance(state.npc, NPC) and state.npc.provider else None
2415
+ npc_model = (
2416
+ state.npc.model
2417
+ if isinstance(state.npc, NPC) and state.npc.model
2418
+ else None
2419
+ )
2420
+ npc_provider = (
2421
+ state.npc.provider
2422
+ if isinstance(state.npc, NPC) and state.npc.provider
2423
+ else None
2424
+ )
2211
2425
  active_model = npc_model or state.chat_model
2212
2426
  active_provider = npc_provider or state.chat_provider
2427
+
2213
2428
  if state.current_mode == 'agent':
2214
-
2215
-
2429
+ total_stages = len(commands)
2430
+
2216
2431
  for i, cmd_segment in enumerate(commands):
2217
- render_markdown(f'- Executing command {i+1}/{len(commands)}')
2432
+ stage_num = i + 1
2433
+ stage_emoji = ["🎯", "⚙️", "🔧", "✨", "🚀"][i % 5]
2434
+
2435
+ print(colored(
2436
+ f"\n{stage_emoji} Pipeline Stage {stage_num}/{total_stages}",
2437
+ "cyan",
2438
+ attrs=["bold"]
2439
+ ))
2440
+
2218
2441
  is_last_command = (i == len(commands) - 1)
2219
2442
  stream_this_segment = state.stream_output and not is_last_command
2443
+
2220
2444
  try:
2221
2445
  current_state, output = process_pipeline_command(
2222
2446
  cmd_segment.strip(),
@@ -2224,19 +2448,26 @@ def execute_command(
2224
2448
  current_state,
2225
2449
  stream_final=stream_this_segment,
2226
2450
  review=review,
2227
- router= router
2451
+ router=router
2228
2452
  )
2453
+
2229
2454
  if is_last_command:
2455
+ print(colored("✅ Pipeline complete", "green"))
2230
2456
  return current_state, output
2457
+
2231
2458
  if isinstance(output, str):
2232
2459
  stdin_for_next = output
2233
2460
  elif not isinstance(output, str):
2234
2461
  try:
2235
2462
  if stream_this_segment:
2236
- full_stream_output = print_and_process_stream_with_markdown(output,
2237
- state.npc.model,
2238
- state.npc.provider,
2239
- show=True)
2463
+ full_stream_output = (
2464
+ print_and_process_stream_with_markdown(
2465
+ output,
2466
+ state.npc.model,
2467
+ state.npc.provider,
2468
+ show=True
2469
+ )
2470
+ )
2240
2471
  stdin_for_next = full_stream_output
2241
2472
  if is_last_command:
2242
2473
  final_output = full_stream_output
@@ -2245,24 +2476,40 @@ def execute_command(
2245
2476
  try:
2246
2477
  stdin_for_next = str(output)
2247
2478
  except Exception:
2248
- print(f"Warning: Cannot convert output to string for piping: {type(output)}", file=sys.stderr)
2479
+ print(
2480
+ f"Warning: Cannot convert output to "
2481
+ f"string for piping: {type(output)}",
2482
+ file=sys.stderr
2483
+ )
2249
2484
  stdin_for_next = None
2250
2485
  else:
2251
2486
  stdin_for_next = None
2487
+
2488
+ print(colored(
2489
+ f" → Passing to stage {stage_num + 1}",
2490
+ "blue"
2491
+ ))
2492
+
2252
2493
  except Exception as pipeline_error:
2253
2494
  import traceback
2254
2495
  traceback.print_exc()
2255
- error_msg = colored(f"Error in pipeline stage {i+1} ('{cmd_segment[:50]}...'): {pipeline_error}", "red")
2496
+ error_msg = colored(
2497
+ f"❌ Error in stage {stage_num} "
2498
+ f"('{cmd_segment[:50]}...'): {pipeline_error}",
2499
+ "red"
2500
+ )
2256
2501
  return current_state, error_msg
2257
2502
 
2258
2503
  if final_output is not None and isinstance(final_output,str):
2259
- store_command_embeddings(original_command_for_embedding, final_output, current_state)
2504
+ store_command_embeddings(
2505
+ original_command_for_embedding,
2506
+ final_output,
2507
+ current_state
2508
+ )
2260
2509
 
2261
2510
  return current_state, final_output
2262
2511
 
2263
-
2264
2512
  elif state.current_mode == 'chat':
2265
-
2266
2513
  cmd_parts = parse_command_safely(command)
2267
2514
  is_probably_bash = (
2268
2515
  cmd_parts
@@ -2273,6 +2520,7 @@ def execute_command(
2273
2520
  or command.strip().startswith("/")
2274
2521
  )
2275
2522
  )
2523
+
2276
2524
  if is_probably_bash:
2277
2525
  try:
2278
2526
  command_name = cmd_parts[0]
@@ -2282,33 +2530,51 @@ def execute_command(
2282
2530
  return handle_cd_command(cmd_parts, state)
2283
2531
  else:
2284
2532
  try:
2285
- bash_state, bash_output = handle_bash_command(cmd_parts, command, None, state)
2533
+ bash_state, bash_output = handle_bash_command(
2534
+ cmd_parts,
2535
+ command,
2536
+ None,
2537
+ state
2538
+ )
2286
2539
  return state, bash_output
2287
2540
  except Exception as bash_err:
2288
- return state, colored(f"Bash execution failed: {bash_err}", "red")
2541
+ return state, colored(
2542
+ f"Bash execution failed: {bash_err}",
2543
+ "red"
2544
+ )
2289
2545
  except Exception:
2290
2546
  pass
2291
2547
 
2292
-
2293
- response = get_llm_response(
2294
- command,
2295
- model=active_model,
2296
- provider=active_provider,
2297
- npc=state.npc,
2298
- stream=state.stream_output,
2299
- messages=state.messages
2300
- )
2548
+ with SpinnerContext(
2549
+ f"Chatting with {active_model}",
2550
+ style="brain"
2551
+ ):
2552
+ response = get_llm_response(
2553
+ command,
2554
+ model=active_model,
2555
+ provider=active_provider,
2556
+ npc=state.npc,
2557
+ stream=state.stream_output,
2558
+ messages=state.messages
2559
+ )
2560
+
2301
2561
  state.messages = response['messages']
2302
2562
  return state, response['response']
2303
2563
 
2304
2564
  elif state.current_mode == 'cmd':
2305
-
2306
- response = execute_llm_command(command,
2307
- model=active_model,
2308
- provider=active_provider,
2309
- npc = state.npc,
2310
- stream = state.stream_output,
2311
- messages = state.messages)
2565
+ with SpinnerContext(
2566
+ f"Executing with {active_model}",
2567
+ style="dots_pulse"
2568
+ ):
2569
+ response = execute_llm_command(
2570
+ command,
2571
+ model=active_model,
2572
+ provider=active_provider,
2573
+ npc=state.npc,
2574
+ stream=state.stream_output,
2575
+ messages=state.messages
2576
+ )
2577
+
2312
2578
  state.messages = response['messages']
2313
2579
  return state, response['response']
2314
2580
 
@@ -2334,9 +2600,13 @@ def setup_shell() -> Tuple[CommandHistory, Team, Optional[NPC]]:
2334
2600
 
2335
2601
  project_team_path = os.path.abspath(PROJECT_NPC_TEAM_PATH)
2336
2602
  global_team_path = os.path.expanduser(DEFAULT_NPC_TEAM_PATH)
2603
+
2337
2604
  team_dir = None
2338
2605
  default_forenpc_name = None
2339
-
2606
+ global_team_path = os.path.expanduser(DEFAULT_NPC_TEAM_PATH)
2607
+ if not os.path.exists(global_team_path):
2608
+ print(f"Global NPC team directory doesn't exist. Initializing...")
2609
+ initialize_base_npcs_if_needed(db_path)
2340
2610
  if os.path.exists(project_team_path):
2341
2611
  team_dir = project_team_path
2342
2612
  default_forenpc_name = "forenpc"
@@ -2383,9 +2653,17 @@ def setup_shell() -> Tuple[CommandHistory, Team, Optional[NPC]]:
2383
2653
  pass
2384
2654
  team_dir = global_team_path
2385
2655
  default_forenpc_name = "sibiji"
2386
- elif os.path.exists(global_team_path):
2656
+ else:
2387
2657
  team_dir = global_team_path
2388
- default_forenpc_name = "sibiji"
2658
+ default_forenpc_name = "sibiji"
2659
+
2660
+ if team_dir is None:
2661
+ team_dir = global_team_path
2662
+ default_forenpc_name = "sibiji"
2663
+
2664
+ if not os.path.exists(team_dir):
2665
+ print(f"Creating team directory: {team_dir}")
2666
+ os.makedirs(team_dir, exist_ok=True)
2389
2667
 
2390
2668
  team_ctx = {}
2391
2669
  team_ctx_path = get_team_ctx_path(team_dir)
@@ -2395,7 +2673,10 @@ def setup_shell() -> Tuple[CommandHistory, Team, Optional[NPC]]:
2395
2673
  team_ctx = yaml.safe_load(f) or {}
2396
2674
  except Exception as e:
2397
2675
  print(f"Warning: Could not load context file {os.path.basename(team_ctx_path)}: {e}")
2676
+
2398
2677
  forenpc_name = team_ctx.get("forenpc", default_forenpc_name)
2678
+ if forenpc_name is None:
2679
+ forenpc_name = "sibiji"
2399
2680
 
2400
2681
  print('forenpc_name:', forenpc_name)
2401
2682
 
@@ -2432,7 +2713,6 @@ def setup_shell() -> Tuple[CommandHistory, Team, Optional[NPC]]:
2432
2713
  team.name = "npcsh"
2433
2714
 
2434
2715
  return command_history, team, forenpc_obj
2435
-
2436
2716
  def initialize_router_with_jinxs(team, router):
2437
2717
  """Load global and team Jinxs into router"""
2438
2718
  global_jinxs_dir = os.path.expanduser("~/.npcsh/npc_team/jinxs")
@@ -2445,139 +2725,6 @@ def initialize_router_with_jinxs(team, router):
2445
2725
 
2446
2726
  return router
2447
2727
 
2448
- from npcpy.memory.memory_processor import memory_approval_ui
2449
- from npcpy.ft.memory_trainer import MemoryTrainer
2450
- from npcpy.llm_funcs import get_facts
2451
-
2452
- def get_relevant_memories(
2453
- command_history: CommandHistory,
2454
- npc_name: str,
2455
- team_name: str,
2456
- path: str,
2457
- query: Optional[str] = None,
2458
- max_memories: int = 10,
2459
- state: Optional[ShellState] = None
2460
- ) -> List[Dict]:
2461
-
2462
- engine = command_history.engine
2463
-
2464
- all_memories = command_history.get_memories_for_scope(
2465
- npc=npc_name,
2466
- team=team_name,
2467
- directory_path=path,
2468
- status='human-approved'
2469
- )
2470
-
2471
- if not all_memories:
2472
- return []
2473
-
2474
- if len(all_memories) <= max_memories and not query:
2475
- return all_memories
2476
-
2477
- if query:
2478
- query_lower = query.lower()
2479
- keyword_matches = [
2480
- m for m in all_memories
2481
- if query_lower in (m.get('final_memory') or m.get('initial_memory') or '').lower()
2482
- ]
2483
-
2484
- if keyword_matches:
2485
- return keyword_matches[:max_memories]
2486
-
2487
- if state and state.embedding_model and state.embedding_provider:
2488
- try:
2489
- from npcpy.gen.embeddings import get_embeddings
2490
-
2491
- search_text = query if query else "recent context"
2492
- query_embedding = get_embeddings(
2493
- [search_text],
2494
- state.embedding_model,
2495
- state.embedding_provider
2496
- )[0]
2497
-
2498
- memory_texts = [
2499
- m.get('final_memory', '') for m in all_memories
2500
- ]
2501
- memory_embeddings = get_embeddings(
2502
- memory_texts,
2503
- state.embedding_model,
2504
- state.embedding_provider
2505
- )
2506
-
2507
- import numpy as np
2508
- similarities = []
2509
- for mem_emb in memory_embeddings:
2510
- similarity = np.dot(query_embedding, mem_emb) / (
2511
- np.linalg.norm(query_embedding) *
2512
- np.linalg.norm(mem_emb)
2513
- )
2514
- similarities.append(similarity)
2515
-
2516
- sorted_indices = np.argsort(similarities)[::-1]
2517
- return [all_memories[i] for i in sorted_indices[:max_memories]]
2518
-
2519
- except Exception as e:
2520
- print(colored(
2521
- f"RAG search failed, using recent: {e}",
2522
- "yellow"
2523
- ))
2524
-
2525
- return all_memories[-max_memories:]
2526
-
2527
-
2528
- def search_kg_facts(
2529
- self,
2530
- npc: str,
2531
- team: str,
2532
- directory_path: str,
2533
- query: str
2534
- ) -> List[Dict]:
2535
-
2536
- kg = load_kg_from_db(
2537
- self.engine,
2538
- team,
2539
- npc,
2540
- directory_path
2541
- )
2542
-
2543
- if not kg or 'facts' not in kg:
2544
- return []
2545
-
2546
- query_lower = query.lower()
2547
- matching_facts = []
2548
-
2549
- for fact in kg['facts']:
2550
- statement = fact.get('statement', '').lower()
2551
- if query_lower in statement:
2552
- matching_facts.append(fact)
2553
-
2554
- return matching_facts
2555
-
2556
- def format_memory_context(memory_examples):
2557
- if not memory_examples:
2558
- return ""
2559
-
2560
- context_parts = []
2561
-
2562
- approved_examples = memory_examples.get("approved", [])
2563
- rejected_examples = memory_examples.get("rejected", [])
2564
-
2565
- if approved_examples:
2566
- context_parts.append("EXAMPLES OF GOOD MEMORIES:")
2567
- for ex in approved_examples[:5]:
2568
- final = ex.get("final_memory") or ex.get("initial_memory")
2569
- context_parts.append(f"- {final}")
2570
-
2571
- if rejected_examples:
2572
- context_parts.append("\nEXAMPLES OF POOR MEMORIES TO AVOID:")
2573
- for ex in rejected_examples[:3]:
2574
- context_parts.append(f"- {ex.get('initial_memory')}")
2575
-
2576
- if context_parts:
2577
- context_parts.append("\nLearn from these examples to generate similar high-quality memories.")
2578
- return "\n".join(context_parts)
2579
-
2580
- return ""
2581
2728
 
2582
2729
  def process_memory_approvals(command_history, memory_queue):
2583
2730
  pending_memories = memory_queue.get_approval_batch(max_items=5)