npcpy 1.2.17__py3-none-any.whl → 1.2.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- npcpy/ft/memory_trainer.py +7 -4
- npcpy/memory/memory_processor.py +0 -90
- npcpy/npc_compiler.py +187 -18
- {npcpy-1.2.17.dist-info → npcpy-1.2.19.dist-info}/METADATA +1 -1
- {npcpy-1.2.17.dist-info → npcpy-1.2.19.dist-info}/RECORD +8 -8
- {npcpy-1.2.17.dist-info → npcpy-1.2.19.dist-info}/WHEEL +0 -0
- {npcpy-1.2.17.dist-info → npcpy-1.2.19.dist-info}/licenses/LICENSE +0 -0
- {npcpy-1.2.17.dist-info → npcpy-1.2.19.dist-info}/top_level.txt +0 -0
npcpy/ft/memory_trainer.py
CHANGED
|
@@ -1,7 +1,10 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
1
|
+
try:
|
|
2
|
+
from torch.utils.data import Dataset
|
|
3
|
+
import torch
|
|
4
|
+
import torch.nn as nn
|
|
5
|
+
from transformers import AutoTokenizer, AutoModelForSequenceClassification, Trainer, TrainingArguments
|
|
6
|
+
except:
|
|
7
|
+
pass
|
|
5
8
|
import json
|
|
6
9
|
from typing import List, Dict, Tuple
|
|
7
10
|
import random
|
npcpy/memory/memory_processor.py
CHANGED
|
@@ -17,96 +17,6 @@ class MemoryItem:
|
|
|
17
17
|
model: str
|
|
18
18
|
provider: str
|
|
19
19
|
|
|
20
|
-
class MemoryApprovalQueue:
|
|
21
|
-
def __init__(self, command_history):
|
|
22
|
-
self.command_history = command_history
|
|
23
|
-
self.pending_queue = queue.Queue()
|
|
24
|
-
self.approval_results = queue.Queue()
|
|
25
|
-
self.processing_thread = None
|
|
26
|
-
self.running = False
|
|
27
|
-
|
|
28
|
-
def add_memory(self, memory_item: MemoryItem):
|
|
29
|
-
"""Add memory to processing queue (non-blocking)"""
|
|
30
|
-
self.pending_queue.put(memory_item)
|
|
31
|
-
|
|
32
|
-
def start_background_processing(self):
|
|
33
|
-
"""Start background thread for memory processing"""
|
|
34
|
-
if self.processing_thread and self.processing_thread.is_alive():
|
|
35
|
-
return
|
|
36
|
-
|
|
37
|
-
self.running = True
|
|
38
|
-
self.processing_thread = threading.Thread(target=self._process_queue)
|
|
39
|
-
self.processing_thread.daemon = True
|
|
40
|
-
self.processing_thread.start()
|
|
41
|
-
|
|
42
|
-
def _process_queue(self):
|
|
43
|
-
"""Background processing of memory queue"""
|
|
44
|
-
while self.running:
|
|
45
|
-
try:
|
|
46
|
-
|
|
47
|
-
batch = []
|
|
48
|
-
try:
|
|
49
|
-
|
|
50
|
-
memory = self.pending_queue.get(timeout=1.0)
|
|
51
|
-
batch.append(memory)
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
while len(batch) < 10:
|
|
55
|
-
try:
|
|
56
|
-
memory = self.pending_queue.get_nowait()
|
|
57
|
-
batch.append(memory)
|
|
58
|
-
except queue.Empty:
|
|
59
|
-
break
|
|
60
|
-
|
|
61
|
-
self._process_memory_batch(batch)
|
|
62
|
-
|
|
63
|
-
except queue.Empty:
|
|
64
|
-
continue
|
|
65
|
-
|
|
66
|
-
except Exception as e:
|
|
67
|
-
print(f"Error in memory processing: {e}")
|
|
68
|
-
time.sleep(1)
|
|
69
|
-
|
|
70
|
-
def _process_memory_batch(self, memories: List[MemoryItem]):
|
|
71
|
-
"""Process a batch of memories"""
|
|
72
|
-
for memory in memories:
|
|
73
|
-
|
|
74
|
-
memory_id = self.command_history.add_memory_to_database(
|
|
75
|
-
message_id=memory.message_id,
|
|
76
|
-
conversation_id=memory.conversation_id,
|
|
77
|
-
npc=memory.npc,
|
|
78
|
-
team=memory.team,
|
|
79
|
-
directory_path=memory.directory_path,
|
|
80
|
-
initial_memory=memory.content,
|
|
81
|
-
status="pending_approval",
|
|
82
|
-
model=memory.model,
|
|
83
|
-
provider=memory.provider
|
|
84
|
-
)
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
self.approval_results.put({
|
|
88
|
-
"memory_id": memory_id,
|
|
89
|
-
"content": memory.content,
|
|
90
|
-
"context": memory.context,
|
|
91
|
-
"npc": memory.npc
|
|
92
|
-
})
|
|
93
|
-
|
|
94
|
-
def get_approval_batch(self, max_items: int = 5) -> List[Dict]:
|
|
95
|
-
"""Get batch of memories ready for approval"""
|
|
96
|
-
batch = []
|
|
97
|
-
try:
|
|
98
|
-
while len(batch) < max_items:
|
|
99
|
-
item = self.approval_results.get_nowait()
|
|
100
|
-
batch.append(item)
|
|
101
|
-
except queue.Empty:
|
|
102
|
-
pass
|
|
103
|
-
return batch
|
|
104
|
-
|
|
105
|
-
def stop_processing(self):
|
|
106
|
-
"""Stop background processing"""
|
|
107
|
-
self.running = False
|
|
108
|
-
if self.processing_thread:
|
|
109
|
-
self.processing_thread.join(timeout=2.0)
|
|
110
20
|
|
|
111
21
|
def memory_approval_ui(memories: List[Dict]) -> List[Dict]:
|
|
112
22
|
"""Simple CLI interface for memory approval"""
|
npcpy/npc_compiler.py
CHANGED
|
@@ -26,7 +26,7 @@ from npcpy.npc_sysenv import (
|
|
|
26
26
|
get_system_message,
|
|
27
27
|
|
|
28
28
|
)
|
|
29
|
-
from npcpy.memory.command_history import CommandHistory
|
|
29
|
+
from npcpy.memory.command_history import CommandHistory, generate_message_id
|
|
30
30
|
|
|
31
31
|
class SilentUndefined(Undefined):
|
|
32
32
|
def _fail_with_undefined_error(self, *args, **kwargs):
|
|
@@ -498,7 +498,7 @@ def load_jinxs_from_directory(directory):
|
|
|
498
498
|
return jinxs
|
|
499
499
|
|
|
500
500
|
def get_npc_action_space(npc=None, team=None):
|
|
501
|
-
"""Get action space for NPC including memory
|
|
501
|
+
"""Get action space for NPC including memory CRUD and core capabilities"""
|
|
502
502
|
actions = DEFAULT_ACTION_SPACE.copy()
|
|
503
503
|
|
|
504
504
|
if npc:
|
|
@@ -510,24 +510,35 @@ def get_npc_action_space(npc=None, team=None):
|
|
|
510
510
|
if npc.command_history:
|
|
511
511
|
core_tools.extend([
|
|
512
512
|
npc.search_my_conversations,
|
|
513
|
-
npc.search_my_memories
|
|
513
|
+
npc.search_my_memories,
|
|
514
|
+
npc.create_memory,
|
|
515
|
+
npc.read_memory,
|
|
516
|
+
npc.update_memory,
|
|
517
|
+
npc.delete_memory,
|
|
518
|
+
npc.search_memories,
|
|
519
|
+
npc.get_all_memories,
|
|
520
|
+
npc.archive_old_memories,
|
|
521
|
+
npc.get_memory_stats
|
|
514
522
|
])
|
|
515
523
|
|
|
516
524
|
if npc.db_conn:
|
|
517
525
|
core_tools.append(npc.query_database)
|
|
518
526
|
|
|
519
527
|
if hasattr(npc, 'tools') and npc.tools:
|
|
528
|
+
core_tools.extend([func for func in npc.tool_map.values() if callable(func)])
|
|
529
|
+
|
|
530
|
+
if core_tools:
|
|
520
531
|
tools_schema, tool_map = auto_tools(core_tools)
|
|
521
532
|
actions.update({
|
|
522
533
|
f"use_{tool.__name__}": {
|
|
523
|
-
"description":
|
|
534
|
+
"description": f"Use {tool.__name__} capability",
|
|
524
535
|
"handler": tool,
|
|
525
536
|
"context": lambda **_: f"Available as automated capability",
|
|
526
537
|
"output_keys": {"result": {"description": "Tool execution result", "type": "string"}}
|
|
527
538
|
}
|
|
528
539
|
for tool in core_tools
|
|
529
540
|
})
|
|
530
|
-
|
|
541
|
+
|
|
531
542
|
if team and hasattr(team, 'npcs') and len(team.npcs) > 1:
|
|
532
543
|
available_npcs = [name for name in team.npcs.keys() if name != (npc.name if npc else None)]
|
|
533
544
|
|
|
@@ -537,13 +548,11 @@ def get_npc_action_space(npc=None, team=None):
|
|
|
537
548
|
return agent_pass_handler(command, extracted_data, **kwargs)
|
|
538
549
|
|
|
539
550
|
actions["pass_to_npc"] = {
|
|
540
|
-
"description": "Pass
|
|
551
|
+
"description": "Pass request to another NPC - only when task requires their specific expertise",
|
|
541
552
|
"handler": team_aware_handler,
|
|
542
553
|
"context": lambda npc=npc, team=team, **_: (
|
|
543
|
-
f"Use this SPARINGLY when the request absolutely requires another team member's expertise. "
|
|
544
554
|
f"Available NPCs: {', '.join(available_npcs)}. "
|
|
545
|
-
f"
|
|
546
|
-
f"Only pass when you genuinely cannot complete the task due to lack of domain expertise."
|
|
555
|
+
f"Only pass when you genuinely cannot complete the task."
|
|
547
556
|
),
|
|
548
557
|
"output_keys": {
|
|
549
558
|
"target_npc": {
|
|
@@ -556,8 +565,6 @@ def get_npc_action_space(npc=None, team=None):
|
|
|
556
565
|
return actions
|
|
557
566
|
|
|
558
567
|
|
|
559
|
-
|
|
560
|
-
|
|
561
568
|
def extract_jinx_inputs(args: List[str], jinx: Jinx) -> Dict[str, Any]:
|
|
562
569
|
inputs = {}
|
|
563
570
|
|
|
@@ -1074,14 +1081,14 @@ class NPC:
|
|
|
1074
1081
|
|
|
1075
1082
|
def get_llm_response(self,
|
|
1076
1083
|
request,
|
|
1077
|
-
jinxs=
|
|
1084
|
+
jinxs=None,
|
|
1078
1085
|
tools=None,
|
|
1079
|
-
tool_map=
|
|
1086
|
+
tool_map=None,
|
|
1080
1087
|
tool_choice=None,
|
|
1081
|
-
messages
|
|
1082
|
-
auto_process_tool_calls
|
|
1088
|
+
messages=None,
|
|
1089
|
+
auto_process_tool_calls=True,
|
|
1083
1090
|
**kwargs):
|
|
1084
|
-
"""Get
|
|
1091
|
+
"""Get response from LLM with automatic tool integration including memory CRUD"""
|
|
1085
1092
|
|
|
1086
1093
|
if tools is None and tool_map is None and tool_choice is None:
|
|
1087
1094
|
core_tools = [
|
|
@@ -1092,7 +1099,15 @@ class NPC:
|
|
|
1092
1099
|
if self.command_history:
|
|
1093
1100
|
core_tools.extend([
|
|
1094
1101
|
self.search_my_conversations,
|
|
1095
|
-
self.search_my_memories
|
|
1102
|
+
self.search_my_memories,
|
|
1103
|
+
self.create_memory,
|
|
1104
|
+
self.read_memory,
|
|
1105
|
+
self.update_memory,
|
|
1106
|
+
self.delete_memory,
|
|
1107
|
+
self.search_memories,
|
|
1108
|
+
self.get_all_memories,
|
|
1109
|
+
self.archive_old_memories,
|
|
1110
|
+
self.get_memory_stats
|
|
1096
1111
|
])
|
|
1097
1112
|
|
|
1098
1113
|
if self.db_conn:
|
|
@@ -1122,6 +1137,7 @@ class NPC:
|
|
|
1122
1137
|
)
|
|
1123
1138
|
|
|
1124
1139
|
return response
|
|
1140
|
+
|
|
1125
1141
|
|
|
1126
1142
|
|
|
1127
1143
|
|
|
@@ -1674,7 +1690,7 @@ class NPC:
|
|
|
1674
1690
|
|
|
1675
1691
|
|
|
1676
1692
|
|
|
1677
|
-
def execute_jinx_command(
|
|
1693
|
+
def execute_jinx_command(self,
|
|
1678
1694
|
jinx: Jinx,
|
|
1679
1695
|
args: List[str],
|
|
1680
1696
|
messages=None,
|
|
@@ -1695,6 +1711,159 @@ class NPC:
|
|
|
1695
1711
|
)
|
|
1696
1712
|
|
|
1697
1713
|
return {"messages": messages, "output": jinx_output}
|
|
1714
|
+
def create_memory(self, content: str, memory_type: str = "observation") -> Optional[int]:
|
|
1715
|
+
"""Create a new memory entry"""
|
|
1716
|
+
if not self.command_history:
|
|
1717
|
+
return None
|
|
1718
|
+
|
|
1719
|
+
message_id = generate_message_id()
|
|
1720
|
+
conversation_id = self.command_history.get_most_recent_conversation_id()
|
|
1721
|
+
conversation_id = conversation_id.get('conversation_id') if conversation_id else 'direct_memory'
|
|
1722
|
+
|
|
1723
|
+
team_name = getattr(self.team, 'name', 'default_team') if self.team else 'default_team'
|
|
1724
|
+
directory_path = os.getcwd()
|
|
1725
|
+
|
|
1726
|
+
return self.command_history.add_memory_to_database(
|
|
1727
|
+
message_id=message_id,
|
|
1728
|
+
conversation_id=conversation_id,
|
|
1729
|
+
npc=self.name,
|
|
1730
|
+
team=team_name,
|
|
1731
|
+
directory_path=directory_path,
|
|
1732
|
+
initial_memory=content,
|
|
1733
|
+
status='active',
|
|
1734
|
+
model=self.model,
|
|
1735
|
+
provider=self.provider
|
|
1736
|
+
)
|
|
1737
|
+
|
|
1738
|
+
def read_memory(self, memory_id: int) -> Optional[Dict[str, Any]]:
|
|
1739
|
+
"""Read a specific memory by ID"""
|
|
1740
|
+
if not self.command_history:
|
|
1741
|
+
return None
|
|
1742
|
+
|
|
1743
|
+
stmt = "SELECT * FROM memory_lifecycle WHERE id = :memory_id"
|
|
1744
|
+
return self.command_history._fetch_one(stmt, {"memory_id": memory_id})
|
|
1745
|
+
|
|
1746
|
+
def update_memory(self, memory_id: int, new_content: str = None, status: str = None) -> bool:
|
|
1747
|
+
"""Update memory content or status"""
|
|
1748
|
+
if not self.command_history:
|
|
1749
|
+
return False
|
|
1750
|
+
|
|
1751
|
+
updates = []
|
|
1752
|
+
params = {"memory_id": memory_id}
|
|
1753
|
+
|
|
1754
|
+
if new_content is not None:
|
|
1755
|
+
updates.append("final_memory = :final_memory")
|
|
1756
|
+
params["final_memory"] = new_content
|
|
1757
|
+
|
|
1758
|
+
if status is not None:
|
|
1759
|
+
updates.append("status = :status")
|
|
1760
|
+
params["status"] = status
|
|
1761
|
+
|
|
1762
|
+
if not updates:
|
|
1763
|
+
return False
|
|
1764
|
+
|
|
1765
|
+
stmt = f"UPDATE memory_lifecycle SET {', '.join(updates)} WHERE id = :memory_id"
|
|
1766
|
+
|
|
1767
|
+
try:
|
|
1768
|
+
with self.command_history.engine.begin() as conn:
|
|
1769
|
+
conn.execute(text(stmt), params)
|
|
1770
|
+
return True
|
|
1771
|
+
except Exception as e:
|
|
1772
|
+
print(f"Error updating memory {memory_id}: {e}")
|
|
1773
|
+
return False
|
|
1774
|
+
|
|
1775
|
+
def delete_memory(self, memory_id: int) -> bool:
|
|
1776
|
+
"""Delete a memory by ID"""
|
|
1777
|
+
if not self.command_history:
|
|
1778
|
+
return False
|
|
1779
|
+
|
|
1780
|
+
stmt = "DELETE FROM memory_lifecycle WHERE id = :memory_id AND npc = :npc"
|
|
1781
|
+
|
|
1782
|
+
try:
|
|
1783
|
+
with self.command_history.engine.begin() as conn:
|
|
1784
|
+
result = conn.execute(text(stmt), {"memory_id": memory_id, "npc": self.name})
|
|
1785
|
+
return result.rowcount > 0
|
|
1786
|
+
except Exception as e:
|
|
1787
|
+
print(f"Error deleting memory {memory_id}: {e}")
|
|
1788
|
+
return False
|
|
1789
|
+
|
|
1790
|
+
def search_memories(self, query: str, limit: int = 10, status_filter: str = None) -> List[Dict[str, Any]]:
|
|
1791
|
+
"""Search memories with optional status filtering"""
|
|
1792
|
+
if not self.command_history:
|
|
1793
|
+
return []
|
|
1794
|
+
|
|
1795
|
+
team_name = getattr(self.team, 'name', 'default_team') if self.team else 'default_team'
|
|
1796
|
+
directory_path = os.getcwd()
|
|
1797
|
+
|
|
1798
|
+
return self.command_history.search_memory(
|
|
1799
|
+
query=query,
|
|
1800
|
+
npc=self.name,
|
|
1801
|
+
team=team_name,
|
|
1802
|
+
directory_path=directory_path,
|
|
1803
|
+
status_filter=status_filter,
|
|
1804
|
+
limit=limit
|
|
1805
|
+
)
|
|
1806
|
+
|
|
1807
|
+
def get_all_memories(self, limit: int = 50, status_filter: str = None) -> List[Dict[str, Any]]:
|
|
1808
|
+
"""Get all memories for this NPC with optional status filtering"""
|
|
1809
|
+
if not self.command_history:
|
|
1810
|
+
return []
|
|
1811
|
+
|
|
1812
|
+
if limit is None:
|
|
1813
|
+
limit = 50
|
|
1814
|
+
|
|
1815
|
+
conditions = ["npc = :npc"]
|
|
1816
|
+
params = {"npc": self.name, "limit": limit}
|
|
1817
|
+
|
|
1818
|
+
if status_filter:
|
|
1819
|
+
conditions.append("status = :status")
|
|
1820
|
+
params["status"] = status_filter
|
|
1821
|
+
|
|
1822
|
+
stmt = f"""
|
|
1823
|
+
SELECT * FROM memory_lifecycle
|
|
1824
|
+
WHERE {' AND '.join(conditions)}
|
|
1825
|
+
ORDER BY created_at DESC
|
|
1826
|
+
LIMIT :limit
|
|
1827
|
+
"""
|
|
1828
|
+
|
|
1829
|
+
return self.command_history._fetch_all(stmt, params)
|
|
1830
|
+
|
|
1831
|
+
|
|
1832
|
+
def archive_old_memories(self, days_old: int = 30) -> int:
|
|
1833
|
+
"""Archive memories older than specified days"""
|
|
1834
|
+
if not self.command_history:
|
|
1835
|
+
return 0
|
|
1836
|
+
|
|
1837
|
+
stmt = """
|
|
1838
|
+
UPDATE memory_lifecycle
|
|
1839
|
+
SET status = 'archived'
|
|
1840
|
+
WHERE npc = :npc
|
|
1841
|
+
AND status = 'active'
|
|
1842
|
+
AND datetime(created_at) < datetime('now', '-{} days')
|
|
1843
|
+
""".format(days_old)
|
|
1844
|
+
|
|
1845
|
+
try:
|
|
1846
|
+
with self.command_history.engine.begin() as conn:
|
|
1847
|
+
result = conn.execute(text(stmt), {"npc": self.name})
|
|
1848
|
+
return result.rowcount
|
|
1849
|
+
except Exception as e:
|
|
1850
|
+
print(f"Error archiving memories: {e}")
|
|
1851
|
+
return 0
|
|
1852
|
+
|
|
1853
|
+
def get_memory_stats(self) -> Dict[str, int]:
|
|
1854
|
+
"""Get memory statistics for this NPC"""
|
|
1855
|
+
if not self.command_history:
|
|
1856
|
+
return {}
|
|
1857
|
+
|
|
1858
|
+
stmt = """
|
|
1859
|
+
SELECT status, COUNT(*) as count
|
|
1860
|
+
FROM memory_lifecycle
|
|
1861
|
+
WHERE npc = :npc
|
|
1862
|
+
GROUP BY status
|
|
1863
|
+
"""
|
|
1864
|
+
|
|
1865
|
+
results = self.command_history._fetch_all(stmt, {"npc": self.name})
|
|
1866
|
+
return {row['status']: row['count'] for row in results}
|
|
1698
1867
|
|
|
1699
1868
|
|
|
1700
1869
|
class Team:
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
npcpy/__init__.py,sha256=9imxFtK74_6Rw9rz0kyMnZYl_voPb569tkTlYLt0Urg,131
|
|
2
2
|
npcpy/llm_funcs.py,sha256=tvcZuQEcIUJClwEJQXBF6ArEVjSuXt1jAcZOcnYWsVQ,85101
|
|
3
3
|
npcpy/main.py,sha256=RWoRIj6VQLxKdOKvdVyaq2kwG35oRpeXPvp1CAAoG-w,81
|
|
4
|
-
npcpy/npc_compiler.py,sha256=
|
|
4
|
+
npcpy/npc_compiler.py,sha256=BpNlrjwkxhERTrFeFtvv9CUqzULoD2JQuEwRtqwQHLY,92107
|
|
5
5
|
npcpy/npc_sysenv.py,sha256=lPYlKM_TeR4l4-Jcgiqq3CCge8b2oFHdfISD4L_G7eo,30308
|
|
6
6
|
npcpy/npcs.py,sha256=eExuVsbTfrRobTRRptRpDm46jCLWUgbvy4_U7IUQo-c,744
|
|
7
7
|
npcpy/serve.py,sha256=RPYT3ZMu-OmO6dg3Ss04wZQNlZQive5cm2UPUbtToV0,100271
|
|
@@ -17,7 +17,7 @@ npcpy/data/web.py,sha256=ARGoVKUlQmaiX0zJbSvvFmRCwOv_Z7Pcan9c5GxYObQ,5117
|
|
|
17
17
|
npcpy/ft/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
18
|
npcpy/ft/diff.py,sha256=R3Qo6v0-6M1iI0wiXhUzyuYI2ja0q_0i9bE0z3coxzU,28
|
|
19
19
|
npcpy/ft/ge.py,sha256=my5LtGyVTT40V0i1h9FR-tFFA1FHSga-PeCCgUX1UUI,61
|
|
20
|
-
npcpy/ft/memory_trainer.py,sha256=
|
|
20
|
+
npcpy/ft/memory_trainer.py,sha256=410BtNj308c7V45E809ILbDjCnVRy7n0mdIp2DKOCNY,5904
|
|
21
21
|
npcpy/ft/rl.py,sha256=l3RUkEJe4b2yB6pildveu2LJymtNq0F17COwf_CCq3U,34
|
|
22
22
|
npcpy/ft/sft.py,sha256=i4ENygRPArbLWN4XZZuBnPWaehs8M-J68JB_mewGJHI,62
|
|
23
23
|
npcpy/gen/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -30,7 +30,7 @@ npcpy/memory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
30
30
|
npcpy/memory/command_history.py,sha256=Ww7vZTSjQDuElQXuOjsvu7NTljOLAg07QIFrfKARpVg,45562
|
|
31
31
|
npcpy/memory/kg_vis.py,sha256=TrQQCRh_E7Pyr-GPAHLSsayubAfGyf4HOEFrPB6W86Q,31280
|
|
32
32
|
npcpy/memory/knowledge_graph.py,sha256=2XpIlsyPdAOnzQ6kkwP6MWPGwL3P6V33_3suNJYMMJE,48681
|
|
33
|
-
npcpy/memory/memory_processor.py,sha256=
|
|
33
|
+
npcpy/memory/memory_processor.py,sha256=bLfzT-uDgwNegs1hVBqW3Hl2fYtdmFQbdc5To_f4i5E,2106
|
|
34
34
|
npcpy/memory/search.py,sha256=glN6WYzaixcoDphTEHAXSMX3vKZGjR12Jx9YVL_gYfE,18433
|
|
35
35
|
npcpy/mix/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
36
36
|
npcpy/mix/debate.py,sha256=lQXxC7nl6Rwyf7HIYrsVQILMUmYYx55Tjt2pkTg56qY,9019
|
|
@@ -41,8 +41,8 @@ npcpy/work/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
41
41
|
npcpy/work/desktop.py,sha256=F3I8mUtJp6LAkXodsh8hGZIncoads6c_2Utty-0EdDA,2986
|
|
42
42
|
npcpy/work/plan.py,sha256=QyUwg8vElWiHuoS-xK4jXTxxHvkMD3VkaCEsCmrEPQk,8300
|
|
43
43
|
npcpy/work/trigger.py,sha256=P1Y8u1wQRsS2WACims_2IdkBEar-iBQix-2TDWoW0OM,9948
|
|
44
|
-
npcpy-1.2.
|
|
45
|
-
npcpy-1.2.
|
|
46
|
-
npcpy-1.2.
|
|
47
|
-
npcpy-1.2.
|
|
48
|
-
npcpy-1.2.
|
|
44
|
+
npcpy-1.2.19.dist-info/licenses/LICENSE,sha256=j0YPvce7Ng9e32zYOu0EmXjXeJ0Nwawd0RA3uSGGH4E,1070
|
|
45
|
+
npcpy-1.2.19.dist-info/METADATA,sha256=dnJPjMvml-0yrEJXUt9g-Rf-UT9mIXuQR3HJZzYgm18,26084
|
|
46
|
+
npcpy-1.2.19.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
47
|
+
npcpy-1.2.19.dist-info/top_level.txt,sha256=g1pbSvrOOncB74Bg5-J0Olg4V0A5VzDw-Xz5YObq8BU,6
|
|
48
|
+
npcpy-1.2.19.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|