npcpy 1.0.26__py3-none-any.whl → 1.2.32__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- npcpy/__init__.py +0 -7
- npcpy/data/audio.py +16 -99
- npcpy/data/image.py +43 -42
- npcpy/data/load.py +83 -124
- npcpy/data/text.py +28 -28
- npcpy/data/video.py +8 -32
- npcpy/data/web.py +51 -23
- npcpy/ft/diff.py +110 -0
- npcpy/ft/ge.py +115 -0
- npcpy/ft/memory_trainer.py +171 -0
- npcpy/ft/model_ensembler.py +357 -0
- npcpy/ft/rl.py +360 -0
- npcpy/ft/sft.py +248 -0
- npcpy/ft/usft.py +128 -0
- npcpy/gen/audio_gen.py +24 -0
- npcpy/gen/embeddings.py +13 -13
- npcpy/gen/image_gen.py +262 -117
- npcpy/gen/response.py +615 -415
- npcpy/gen/video_gen.py +53 -7
- npcpy/llm_funcs.py +1869 -437
- npcpy/main.py +1 -1
- npcpy/memory/command_history.py +844 -510
- npcpy/memory/kg_vis.py +833 -0
- npcpy/memory/knowledge_graph.py +892 -1845
- npcpy/memory/memory_processor.py +81 -0
- npcpy/memory/search.py +188 -90
- npcpy/mix/debate.py +192 -3
- npcpy/npc_compiler.py +1672 -801
- npcpy/npc_sysenv.py +593 -1266
- npcpy/serve.py +3120 -0
- npcpy/sql/ai_function_tools.py +257 -0
- npcpy/sql/database_ai_adapters.py +186 -0
- npcpy/sql/database_ai_functions.py +163 -0
- npcpy/sql/model_runner.py +19 -19
- npcpy/sql/npcsql.py +706 -507
- npcpy/sql/sql_model_compiler.py +156 -0
- npcpy/tools.py +183 -0
- npcpy/work/plan.py +13 -279
- npcpy/work/trigger.py +3 -3
- npcpy-1.2.32.dist-info/METADATA +803 -0
- npcpy-1.2.32.dist-info/RECORD +54 -0
- npcpy/data/dataframes.py +0 -171
- npcpy/memory/deep_research.py +0 -125
- npcpy/memory/sleep.py +0 -557
- npcpy/modes/_state.py +0 -78
- npcpy/modes/alicanto.py +0 -1075
- npcpy/modes/guac.py +0 -785
- npcpy/modes/mcp_npcsh.py +0 -822
- npcpy/modes/npc.py +0 -213
- npcpy/modes/npcsh.py +0 -1158
- npcpy/modes/plonk.py +0 -409
- npcpy/modes/pti.py +0 -234
- npcpy/modes/serve.py +0 -1637
- npcpy/modes/spool.py +0 -312
- npcpy/modes/wander.py +0 -549
- npcpy/modes/yap.py +0 -572
- npcpy/npc_team/alicanto.npc +0 -2
- npcpy/npc_team/alicanto.png +0 -0
- npcpy/npc_team/assembly_lines/test_pipeline.py +0 -181
- npcpy/npc_team/corca.npc +0 -13
- npcpy/npc_team/foreman.npc +0 -7
- npcpy/npc_team/frederic.npc +0 -6
- npcpy/npc_team/frederic4.png +0 -0
- npcpy/npc_team/guac.png +0 -0
- npcpy/npc_team/jinxs/automator.jinx +0 -18
- npcpy/npc_team/jinxs/bash_executer.jinx +0 -31
- npcpy/npc_team/jinxs/calculator.jinx +0 -11
- npcpy/npc_team/jinxs/edit_file.jinx +0 -96
- npcpy/npc_team/jinxs/file_chat.jinx +0 -14
- npcpy/npc_team/jinxs/gui_controller.jinx +0 -28
- npcpy/npc_team/jinxs/image_generation.jinx +0 -29
- npcpy/npc_team/jinxs/internet_search.jinx +0 -30
- npcpy/npc_team/jinxs/local_search.jinx +0 -152
- npcpy/npc_team/jinxs/npcsh_executor.jinx +0 -31
- npcpy/npc_team/jinxs/python_executor.jinx +0 -8
- npcpy/npc_team/jinxs/screen_cap.jinx +0 -25
- npcpy/npc_team/jinxs/sql_executor.jinx +0 -33
- npcpy/npc_team/kadiefa.npc +0 -3
- npcpy/npc_team/kadiefa.png +0 -0
- npcpy/npc_team/npcsh.ctx +0 -9
- npcpy/npc_team/npcsh_sibiji.png +0 -0
- npcpy/npc_team/plonk.npc +0 -2
- npcpy/npc_team/plonk.png +0 -0
- npcpy/npc_team/plonkjr.npc +0 -2
- npcpy/npc_team/plonkjr.png +0 -0
- npcpy/npc_team/sibiji.npc +0 -5
- npcpy/npc_team/sibiji.png +0 -0
- npcpy/npc_team/spool.png +0 -0
- npcpy/npc_team/templates/analytics/celona.npc +0 -0
- npcpy/npc_team/templates/hr_support/raone.npc +0 -0
- npcpy/npc_team/templates/humanities/eriane.npc +0 -4
- npcpy/npc_team/templates/it_support/lineru.npc +0 -0
- npcpy/npc_team/templates/marketing/slean.npc +0 -4
- npcpy/npc_team/templates/philosophy/maurawa.npc +0 -0
- npcpy/npc_team/templates/sales/turnic.npc +0 -4
- npcpy/npc_team/templates/software/welxor.npc +0 -0
- npcpy/npc_team/yap.png +0 -0
- npcpy/routes.py +0 -958
- npcpy/work/mcp_helpers.py +0 -357
- npcpy/work/mcp_server.py +0 -194
- npcpy-1.0.26.data/data/npcpy/npc_team/alicanto.npc +0 -2
- npcpy-1.0.26.data/data/npcpy/npc_team/alicanto.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/automator.jinx +0 -18
- npcpy-1.0.26.data/data/npcpy/npc_team/bash_executer.jinx +0 -31
- npcpy-1.0.26.data/data/npcpy/npc_team/calculator.jinx +0 -11
- npcpy-1.0.26.data/data/npcpy/npc_team/celona.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/corca.npc +0 -13
- npcpy-1.0.26.data/data/npcpy/npc_team/edit_file.jinx +0 -96
- npcpy-1.0.26.data/data/npcpy/npc_team/eriane.npc +0 -4
- npcpy-1.0.26.data/data/npcpy/npc_team/file_chat.jinx +0 -14
- npcpy-1.0.26.data/data/npcpy/npc_team/foreman.npc +0 -7
- npcpy-1.0.26.data/data/npcpy/npc_team/frederic.npc +0 -6
- npcpy-1.0.26.data/data/npcpy/npc_team/frederic4.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/guac.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/gui_controller.jinx +0 -28
- npcpy-1.0.26.data/data/npcpy/npc_team/image_generation.jinx +0 -29
- npcpy-1.0.26.data/data/npcpy/npc_team/internet_search.jinx +0 -30
- npcpy-1.0.26.data/data/npcpy/npc_team/kadiefa.npc +0 -3
- npcpy-1.0.26.data/data/npcpy/npc_team/kadiefa.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/lineru.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/local_search.jinx +0 -152
- npcpy-1.0.26.data/data/npcpy/npc_team/maurawa.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/npcsh.ctx +0 -9
- npcpy-1.0.26.data/data/npcpy/npc_team/npcsh_executor.jinx +0 -31
- npcpy-1.0.26.data/data/npcpy/npc_team/npcsh_sibiji.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/plonk.npc +0 -2
- npcpy-1.0.26.data/data/npcpy/npc_team/plonk.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/plonkjr.npc +0 -2
- npcpy-1.0.26.data/data/npcpy/npc_team/plonkjr.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/python_executor.jinx +0 -8
- npcpy-1.0.26.data/data/npcpy/npc_team/raone.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/screen_cap.jinx +0 -25
- npcpy-1.0.26.data/data/npcpy/npc_team/sibiji.npc +0 -5
- npcpy-1.0.26.data/data/npcpy/npc_team/sibiji.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/slean.npc +0 -4
- npcpy-1.0.26.data/data/npcpy/npc_team/spool.png +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/sql_executor.jinx +0 -33
- npcpy-1.0.26.data/data/npcpy/npc_team/test_pipeline.py +0 -181
- npcpy-1.0.26.data/data/npcpy/npc_team/turnic.npc +0 -4
- npcpy-1.0.26.data/data/npcpy/npc_team/welxor.npc +0 -0
- npcpy-1.0.26.data/data/npcpy/npc_team/yap.png +0 -0
- npcpy-1.0.26.dist-info/METADATA +0 -827
- npcpy-1.0.26.dist-info/RECORD +0 -139
- npcpy-1.0.26.dist-info/entry_points.txt +0 -11
- /npcpy/{modes → ft}/__init__.py +0 -0
- {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/WHEEL +0 -0
- {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/licenses/LICENSE +0 -0
- {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
npcpy/__init__.py,sha256=9imxFtK74_6Rw9rz0kyMnZYl_voPb569tkTlYLt0Urg,131
|
|
2
|
+
npcpy/llm_funcs.py,sha256=qC-WmNvUl3YAS1u-xPXh1YDlr2e9cv80_wXK6wCr7TA,85546
|
|
3
|
+
npcpy/main.py,sha256=RWoRIj6VQLxKdOKvdVyaq2kwG35oRpeXPvp1CAAoG-w,81
|
|
4
|
+
npcpy/npc_compiler.py,sha256=j3JYZPKPLi42HAEA_i3Cp5GBGGUcpzBk8OEzZEvxzY4,89458
|
|
5
|
+
npcpy/npc_sysenv.py,sha256=t9AswM-9_P2NaGsnlzTMc2hUfdSthi9ofbud6F1G7LM,35974
|
|
6
|
+
npcpy/npcs.py,sha256=eExuVsbTfrRobTRRptRpDm46jCLWUgbvy4_U7IUQo-c,744
|
|
7
|
+
npcpy/serve.py,sha256=P01tYsY1ctq408nn-t3sLPGuGJg5KoaApy4gNECDRgo,118007
|
|
8
|
+
npcpy/tools.py,sha256=A5_oVmZkzGnI3BI-NmneuxeXQq-r29PbpAZP4nV4jrc,5303
|
|
9
|
+
npcpy/data/__init__.py,sha256=1tcoChR-Hjn905JDLqaW9ElRmcISCTJdE7BGXPlym2Q,642
|
|
10
|
+
npcpy/data/audio.py,sha256=goon4HfsYgx0bI-n1lhkrzWPrJoejJlycXcB0P62pyk,11280
|
|
11
|
+
npcpy/data/data_models.py,sha256=q7xpI4_nK5HvlOE1XB5u5nFQs4SE5zcgt0kIZJF2dhs,682
|
|
12
|
+
npcpy/data/image.py,sha256=UQcioNPDd5HYMLL_KStf45SuiIPXDcUY-dEFHwSWUeE,6564
|
|
13
|
+
npcpy/data/load.py,sha256=7Ay-TYNhCvjJLwdQ5qAgxXSrGwow9ZrazHFVPqMw_cI,4274
|
|
14
|
+
npcpy/data/text.py,sha256=jP0a1qZZaSJdK-LdZTn2Jjdxqmkd3efxDLEoxflJQeY,5010
|
|
15
|
+
npcpy/data/video.py,sha256=aPUgj0fA_lFQ7Jf94-PutggCF4695FVCh3q5mnVthvI,574
|
|
16
|
+
npcpy/data/web.py,sha256=ARGoVKUlQmaiX0zJbSvvFmRCwOv_Z7Pcan9c5GxYObQ,5117
|
|
17
|
+
npcpy/ft/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
|
+
npcpy/ft/diff.py,sha256=wYFRY_2p-B5xVqO7NDyhJbjQsUt4PrwOfgpE1Icghmk,2906
|
|
19
|
+
npcpy/ft/ge.py,sha256=0VzIiXq2wCzGcK1x0Wd-myJ3xRf-FNaPg0GkHEZegUM,3552
|
|
20
|
+
npcpy/ft/memory_trainer.py,sha256=QZPznxEEwXbOGroHdMUMa5xpqlNwgV6nqOazI2xgrnQ,6635
|
|
21
|
+
npcpy/ft/model_ensembler.py,sha256=BRX4hJ_rvF1vKTzjMhlahZqPttUgc3PqmzUJDqIfIps,10038
|
|
22
|
+
npcpy/ft/rl.py,sha256=EcPD8t5MFg0zYWSS-A7KJ9bWd0qCTsL5SSvDxV556Z4,9245
|
|
23
|
+
npcpy/ft/sft.py,sha256=74gRaJTTrZcO4np4DqRMr79ADkGhPcDKutR74rag03E,6659
|
|
24
|
+
npcpy/ft/usft.py,sha256=O025GGYGZQf2ZVLowyAmBwh5bJyuy2dUAM6v03YcboY,3435
|
|
25
|
+
npcpy/gen/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
26
|
+
npcpy/gen/audio_gen.py,sha256=w4toESu7nmli1T5FOwRRCGC_QK9W-SMWknYYkbRv9jE,635
|
|
27
|
+
npcpy/gen/embeddings.py,sha256=QStTJ2ELiC379OEZsLEgGGIIFD267Y8zQchs7HRn2Zg,2089
|
|
28
|
+
npcpy/gen/image_gen.py,sha256=mAlLG9jo9RnuuMU0jJVV0CpIgHqdizU9sfC6A0w5kKE,15599
|
|
29
|
+
npcpy/gen/response.py,sha256=6iAOi4hxUxkTZ1d2suBUASOssT6pQnr3HFwZWrvmATg,31925
|
|
30
|
+
npcpy/gen/video_gen.py,sha256=RFi3Zcq_Hn3HIcfoF3mijQ6G7RYFZaM_9pjPTh-8E64,3239
|
|
31
|
+
npcpy/memory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
32
|
+
npcpy/memory/command_history.py,sha256=2VdmNW5VRpMrOkbdrMsgn5p3mvuJHNnzGHnIUEM8XMI,46279
|
|
33
|
+
npcpy/memory/kg_vis.py,sha256=TrQQCRh_E7Pyr-GPAHLSsayubAfGyf4HOEFrPB6W86Q,31280
|
|
34
|
+
npcpy/memory/knowledge_graph.py,sha256=2XpIlsyPdAOnzQ6kkwP6MWPGwL3P6V33_3suNJYMMJE,48681
|
|
35
|
+
npcpy/memory/memory_processor.py,sha256=6PfVnSBA9ag5EhHJinXoODfEPTlDDoaT0PtCCuZO6HI,2598
|
|
36
|
+
npcpy/memory/search.py,sha256=glN6WYzaixcoDphTEHAXSMX3vKZGjR12Jx9YVL_gYfE,18433
|
|
37
|
+
npcpy/mix/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
38
|
+
npcpy/mix/debate.py,sha256=lQXxC7nl6Rwyf7HIYrsVQILMUmYYx55Tjt2pkTg56qY,9019
|
|
39
|
+
npcpy/sql/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
40
|
+
npcpy/sql/ai_function_tools.py,sha256=ZCpjVHtaMRdL2dXxbQy5NhhjtPrVViGT1wyEl8ADrks,7755
|
|
41
|
+
npcpy/sql/database_ai_adapters.py,sha256=CMlNGOhmJZhGB47RPvLIMqB61m_eYPVg1lwx42_b0jQ,6865
|
|
42
|
+
npcpy/sql/database_ai_functions.py,sha256=XQCmaFOE1lNCnwrLTNpotYOlv6sx41bb8hxZI_sqpy8,6335
|
|
43
|
+
npcpy/sql/model_runner.py,sha256=hJZ7hx2mwI-8DAh47Q6BwOsRjx30-HzebL4ajEUO4HA,5734
|
|
44
|
+
npcpy/sql/npcsql.py,sha256=-PmV7AXSKwRog4gPHTeHzmvPrnDZOiccjgkUGv4DwEU,35614
|
|
45
|
+
npcpy/sql/sql_model_compiler.py,sha256=G-0dpTlgzc-dXy9YEsdWGjO8xaQ3jFNbc6oUja1Ef4M,5364
|
|
46
|
+
npcpy/work/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
47
|
+
npcpy/work/desktop.py,sha256=F3I8mUtJp6LAkXodsh8hGZIncoads6c_2Utty-0EdDA,2986
|
|
48
|
+
npcpy/work/plan.py,sha256=QyUwg8vElWiHuoS-xK4jXTxxHvkMD3VkaCEsCmrEPQk,8300
|
|
49
|
+
npcpy/work/trigger.py,sha256=P1Y8u1wQRsS2WACims_2IdkBEar-iBQix-2TDWoW0OM,9948
|
|
50
|
+
npcpy-1.2.32.dist-info/licenses/LICENSE,sha256=j0YPvce7Ng9e32zYOu0EmXjXeJ0Nwawd0RA3uSGGH4E,1070
|
|
51
|
+
npcpy-1.2.32.dist-info/METADATA,sha256=Bii6xZThq-8YL-AByfstXOd6xYhtzH4deF4QQZ44LAk,29895
|
|
52
|
+
npcpy-1.2.32.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
53
|
+
npcpy-1.2.32.dist-info/top_level.txt,sha256=g1pbSvrOOncB74Bg5-J0Olg4V0A5VzDw-Xz5YObq8BU,6
|
|
54
|
+
npcpy-1.2.32.dist-info/RECORD,,
|
npcpy/data/dataframes.py
DELETED
|
@@ -1,171 +0,0 @@
|
|
|
1
|
-
## functions for dataframes
|
|
2
|
-
import os
|
|
3
|
-
import sqlite3
|
|
4
|
-
import json
|
|
5
|
-
import pandas as pd
|
|
6
|
-
import numpy as np
|
|
7
|
-
import io
|
|
8
|
-
from PIL import Image
|
|
9
|
-
from typing import Optional
|
|
10
|
-
|
|
11
|
-
from npcpy.llm_funcs import get_llm_response
|
|
12
|
-
|
|
13
|
-
# from npcpy.audio import process_audio
|
|
14
|
-
# from npcpy.video import process_video
|
|
15
|
-
|
|
16
|
-
from npcpy.data.load import (
|
|
17
|
-
load_pdf,
|
|
18
|
-
load_csv,
|
|
19
|
-
load_json,
|
|
20
|
-
load_excel,
|
|
21
|
-
load_txt,
|
|
22
|
-
load_image,
|
|
23
|
-
)
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
def load_data_into_table(
|
|
27
|
-
file_path: str, table_name: str, cursor: sqlite3.Cursor, conn: sqlite3.Connection
|
|
28
|
-
) -> None:
|
|
29
|
-
"""
|
|
30
|
-
Function Description:
|
|
31
|
-
This function is used to load data into a table.
|
|
32
|
-
Args:
|
|
33
|
-
file_path : str : The file path.
|
|
34
|
-
table_name : str : The table name.
|
|
35
|
-
cursor : sqlite3.Cursor : The SQLite cursor.
|
|
36
|
-
conn : sqlite3.Connection : The SQLite connection.
|
|
37
|
-
Keyword Args:
|
|
38
|
-
None
|
|
39
|
-
Returns:
|
|
40
|
-
None
|
|
41
|
-
"""
|
|
42
|
-
try:
|
|
43
|
-
if not os.path.exists(file_path):
|
|
44
|
-
raise FileNotFoundError(f"File not found: {file_path}")
|
|
45
|
-
|
|
46
|
-
# Determine file type and load data
|
|
47
|
-
if file_path.endswith(".csv"):
|
|
48
|
-
df = pd.read_csv(file_path)
|
|
49
|
-
elif file_path.endswith(".pdf"):
|
|
50
|
-
df = load_pdf(file_path)
|
|
51
|
-
elif file_path.endswith((".txt", ".log", ".md")):
|
|
52
|
-
df = load_txt(file_path)
|
|
53
|
-
elif file_path.endswith((".xls", ".xlsx")):
|
|
54
|
-
df = load_excel(file_path)
|
|
55
|
-
elif file_path.lower().endswith(
|
|
56
|
-
(".png", ".jpg", ".jpeg", ".gif", ".bmp", ".tiff")
|
|
57
|
-
):
|
|
58
|
-
# Handle images as NumPy arrays
|
|
59
|
-
df = load_image(file_path)
|
|
60
|
-
elif file_path.lower().endswith(
|
|
61
|
-
(".mp4", ".avi", ".mov", ".mkv")
|
|
62
|
-
): # Video files
|
|
63
|
-
video_frames, audio_array = process_video(file_path)
|
|
64
|
-
# Store video frames and audio
|
|
65
|
-
df = pd.DataFrame(
|
|
66
|
-
{
|
|
67
|
-
"video_frames": [video_frames.tobytes()],
|
|
68
|
-
"shape": [video_frames.shape],
|
|
69
|
-
"dtype": [video_frames.dtype.str],
|
|
70
|
-
"audio_array": (
|
|
71
|
-
[audio_array.tobytes()] if audio_array is not None else None
|
|
72
|
-
),
|
|
73
|
-
"audio_rate": [sr] if audio_array is not None else None,
|
|
74
|
-
}
|
|
75
|
-
)
|
|
76
|
-
|
|
77
|
-
elif file_path.lower().endswith((".mp3", ".wav", ".ogg")): # Audio files
|
|
78
|
-
audio_array, sr = process_audio(file_path)
|
|
79
|
-
df = pd.DataFrame(
|
|
80
|
-
{
|
|
81
|
-
"audio_array": [audio_array.tobytes()],
|
|
82
|
-
"audio_rate": [sr],
|
|
83
|
-
}
|
|
84
|
-
)
|
|
85
|
-
else:
|
|
86
|
-
# Attempt to load as text if no other type matches
|
|
87
|
-
try:
|
|
88
|
-
df = load_txt(file_path)
|
|
89
|
-
except Exception as e:
|
|
90
|
-
print(f"Could not load file: {e}")
|
|
91
|
-
return
|
|
92
|
-
|
|
93
|
-
# Store DataFrame in the database
|
|
94
|
-
df.to_sql(table_name, conn, if_exists="replace", index=False)
|
|
95
|
-
print(f"Data from '{file_path}' loaded into table '{table_name}'")
|
|
96
|
-
|
|
97
|
-
except Exception as e:
|
|
98
|
-
raise e # Re-raise the exception for handling in enter_observation_mode
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
def create_new_table(cursor: sqlite3.Cursor, conn: sqlite3.Connection) -> None:
|
|
102
|
-
"""
|
|
103
|
-
Function Description:
|
|
104
|
-
This function is used to create a new table.
|
|
105
|
-
Args:
|
|
106
|
-
cursor : sqlite3.Cursor : The SQLite cursor.
|
|
107
|
-
conn : sqlite3.Connection : The SQLite connection.
|
|
108
|
-
Keyword Args:
|
|
109
|
-
None
|
|
110
|
-
Returns:
|
|
111
|
-
None
|
|
112
|
-
"""
|
|
113
|
-
|
|
114
|
-
table_name = input("Enter new table name: ").strip()
|
|
115
|
-
columns = input("Enter column names separated by commas: ").strip()
|
|
116
|
-
|
|
117
|
-
create_query = (
|
|
118
|
-
f"CREATE TABLE {table_name} (id INTEGER PRIMARY KEY AUTOINCREMENT, {columns})"
|
|
119
|
-
)
|
|
120
|
-
cursor.execute(create_query)
|
|
121
|
-
conn.commit()
|
|
122
|
-
print(f"Table '{table_name}' created successfully.")
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
def delete_table(cursor: sqlite3.Cursor, conn: sqlite3.Connection) -> None:
|
|
126
|
-
"""
|
|
127
|
-
Function Description:
|
|
128
|
-
This function is used to delete a table.
|
|
129
|
-
Args:
|
|
130
|
-
cursor : sqlite3.Cursor : The SQLite cursor.
|
|
131
|
-
conn : sqlite3.Connection : The SQLite connection.
|
|
132
|
-
Keyword Args:
|
|
133
|
-
None
|
|
134
|
-
Returns:
|
|
135
|
-
None
|
|
136
|
-
"""
|
|
137
|
-
|
|
138
|
-
table_name = input("Enter table name to delete: ").strip()
|
|
139
|
-
cursor.execute(f"DROP TABLE IF EXISTS {table_name}")
|
|
140
|
-
conn.commit()
|
|
141
|
-
print(f"Table '{table_name}' deleted successfully.")
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
def add_observation(
|
|
145
|
-
cursor: sqlite3.Cursor, conn: sqlite3.Connection, table_name: str
|
|
146
|
-
) -> None:
|
|
147
|
-
"""
|
|
148
|
-
Function Description:
|
|
149
|
-
This function is used to add an observation.
|
|
150
|
-
Args:
|
|
151
|
-
cursor : sqlite3.Cursor : The SQLite cursor.
|
|
152
|
-
conn : sqlite3.Connection : The SQLite connection.
|
|
153
|
-
table_name : str : The table name.
|
|
154
|
-
Keyword Args:
|
|
155
|
-
None
|
|
156
|
-
Returns:
|
|
157
|
-
None
|
|
158
|
-
"""
|
|
159
|
-
|
|
160
|
-
cursor.execute(f"PRAGMA table_info({table_name})")
|
|
161
|
-
columns = [column[1] for column in cursor.fetchall() if column[1] != "id"]
|
|
162
|
-
|
|
163
|
-
values = []
|
|
164
|
-
for column in columns:
|
|
165
|
-
value = input(f"Enter value for {column}: ").strip()
|
|
166
|
-
values.append(value)
|
|
167
|
-
|
|
168
|
-
insert_query = f"INSERT INTO {table_name} ({','.join(columns)}) VALUES ({','.join(['?' for _ in columns])})"
|
|
169
|
-
cursor.execute(insert_query, values)
|
|
170
|
-
conn.commit()
|
|
171
|
-
print("Observation added successfully.")
|
npcpy/memory/deep_research.py
DELETED
|
@@ -1,125 +0,0 @@
|
|
|
1
|
-
#deep_research
|
|
2
|
-
import numpy as np
|
|
3
|
-
from npcpy.npc_compiler import NPC
|
|
4
|
-
|
|
5
|
-
from npcpy.memory.knowledge_graph import *
|
|
6
|
-
import os
|
|
7
|
-
|
|
8
|
-
from npcpy.data import sample_primary_directives
|
|
9
|
-
|
|
10
|
-
def generate_random_npcs(num_npcs, model, provider):
|
|
11
|
-
"""
|
|
12
|
-
Function Description:
|
|
13
|
-
This function generates a list of random NPCs.
|
|
14
|
-
Args:
|
|
15
|
-
num_npcs (int): The number of NPCs to generate.
|
|
16
|
-
Returns:
|
|
17
|
-
List[NPC]: A list of generated NPCs.
|
|
18
|
-
"""
|
|
19
|
-
# Initialize the list of NPCs
|
|
20
|
-
npcs = []
|
|
21
|
-
|
|
22
|
-
# Generate the NPCs
|
|
23
|
-
for i, primary_directive in np.random.choice(sample_primary_directives, num_npcs):
|
|
24
|
-
npc = NPC(primary_directive=primary_directive,
|
|
25
|
-
model=model,
|
|
26
|
-
provider=provider,)
|
|
27
|
-
|
|
28
|
-
npcs.append(npc)
|
|
29
|
-
return npcs
|
|
30
|
-
|
|
31
|
-
def generate_research_chain(request, npc, depth, memory=5, context=None):
|
|
32
|
-
"""
|
|
33
|
-
Function Description:
|
|
34
|
-
This function generates a research chain for the given NPC.
|
|
35
|
-
Args:
|
|
36
|
-
npc (NPC): The NPC for which to generate the research chain.
|
|
37
|
-
depth (int): The depth of the research chain.
|
|
38
|
-
context (str, optional): Additional context for the research chain. Defaults to None.
|
|
39
|
-
Returns:
|
|
40
|
-
List[str]: A list of generated research chains.
|
|
41
|
-
"""
|
|
42
|
-
chain = []
|
|
43
|
-
first_message = f'the user has requested that you research the following: {request}. Please begin providing a single specific question to ask. '
|
|
44
|
-
if context:
|
|
45
|
-
first_message += f'The user also provided this context: {context}'
|
|
46
|
-
summary, question_raised = npc.search_and_ask(first_message)
|
|
47
|
-
chain.append(first_message)
|
|
48
|
-
chain.append(summary)
|
|
49
|
-
chain.append(question_raised)
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
for i in range(depth):
|
|
54
|
-
memories = chain[-memory:]
|
|
55
|
-
next_message = "\n".join(memories) + 'Last Search Summary: ' + summary + '. New Question'
|
|
56
|
-
|
|
57
|
-
summary, question_raised = npc.search_and_ask(next_message)
|
|
58
|
-
chain.append(next_message)
|
|
59
|
-
chain.append(summary)
|
|
60
|
-
chain.append(question_raised)
|
|
61
|
-
return chain
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
def prune_chains():
|
|
65
|
-
return
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
# search and ask will have a check llm command more or less.
|
|
70
|
-
def consolidate_research(chains, facts, groups, model, provider):
|
|
71
|
-
prompt = f'''
|
|
72
|
-
You are a research advisor reviewing the notes of your research assisitants who have been working on a request.
|
|
73
|
-
The results from their efforts are contained here:
|
|
74
|
-
|
|
75
|
-
{chains}
|
|
76
|
-
|
|
77
|
-
Please identify the 3 most common ideas, the 3 most unusual ideas, and the 3 most important ideas.
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
Provide your response as a json object with a list of json objects for "most_common_ideas", "most_unusual_ideas" and "most_important_ideas".
|
|
81
|
-
|
|
82
|
-
Each of those json objects within the sublists should be structured like so:
|
|
83
|
-
{{
|
|
84
|
-
'idea': 'the idea',
|
|
85
|
-
'source_npc': 'the name of the npc chain that provided this idea',
|
|
86
|
-
'supporting_links': [
|
|
87
|
-
'link1/to/local/file',
|
|
88
|
-
'link2/to/web/site',
|
|
89
|
-
],
|
|
90
|
-
'supporting_evidence' : [
|
|
91
|
-
'script x was run by npc and verified this idea ',
|
|
92
|
-
'npc found evidence in site x y was run by npc and verified this idea ',
|
|
93
|
-
]
|
|
94
|
-
}}
|
|
95
|
-
|
|
96
|
-
The links should be a list of links to the original sources of the information that were contained within the chains themselves.
|
|
97
|
-
The supporting evidence should be a list of the evidence that was used to support the idea.
|
|
98
|
-
'''
|
|
99
|
-
ideas = get_llm_response(prompt, model=model, provider=provider, format='json')
|
|
100
|
-
# build knowledge graph
|
|
101
|
-
|
|
102
|
-
groups = identify_groups(facts, model=model, provider=provider)
|
|
103
|
-
|
|
104
|
-
prompt = f'''
|
|
105
|
-
You are a research advisor reviewing the notes of your research assisitants who have been working on a request.
|
|
106
|
-
The results from their efforts are contained here:
|
|
107
|
-
|
|
108
|
-
{facts}
|
|
109
|
-
|
|
110
|
-
Additionally, we have already found some common ideas and have produced the following groups:
|
|
111
|
-
{groups}
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
Please identify the 3 most common ideas, the 3 most unusual ideas, and the 3 most important ideas.
|
|
115
|
-
Provide your response as a json object with 3 lists each containing 3 items.
|
|
116
|
-
|
|
117
|
-
'''
|
|
118
|
-
ideas_summarized = get_llm_response(prompt, model=model, provider=provider)
|
|
119
|
-
|
|
120
|
-
return ideas, ideas_summarized
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
## ultimately wwell do the vector store in the main db. so when we eventually starti adding new facts well do so by checking similar facts
|
|
125
|
-
# there and then if were doing the rag search well do a rag and then graph
|