npcpy 1.2.26__tar.gz → 1.2.27__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {npcpy-1.2.26/npcpy.egg-info → npcpy-1.2.27}/PKG-INFO +1 -1
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/gen/response.py +1 -1
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/npc_compiler.py +47 -298
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/npc_sysenv.py +3 -3
- npcpy-1.2.27/npcpy/sql/npcsql.py +804 -0
- {npcpy-1.2.26 → npcpy-1.2.27/npcpy.egg-info}/PKG-INFO +1 -1
- {npcpy-1.2.26 → npcpy-1.2.27}/setup.py +1 -1
- npcpy-1.2.26/npcpy/sql/npcsql.py +0 -377
- {npcpy-1.2.26 → npcpy-1.2.27}/LICENSE +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/MANIFEST.in +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/README.md +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/__init__.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/data/__init__.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/data/audio.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/data/data_models.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/data/image.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/data/load.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/data/text.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/data/video.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/data/web.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/ft/__init__.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/ft/diff.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/ft/ge.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/ft/memory_trainer.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/ft/model_ensembler.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/ft/rl.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/ft/sft.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/ft/usft.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/gen/__init__.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/gen/audio_gen.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/gen/embeddings.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/gen/image_gen.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/gen/video_gen.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/llm_funcs.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/main.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/memory/__init__.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/memory/command_history.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/memory/kg_vis.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/memory/knowledge_graph.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/memory/memory_processor.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/memory/search.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/mix/__init__.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/mix/debate.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/npcs.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/serve.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/sql/__init__.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/sql/ai_function_tools.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/sql/database_ai_adapters.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/sql/database_ai_functions.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/sql/model_runner.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/sql/sql_model_compiler.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/tools.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/work/__init__.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/work/desktop.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/work/plan.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy/work/trigger.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy.egg-info/SOURCES.txt +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy.egg-info/dependency_links.txt +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy.egg-info/requires.txt +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/npcpy.egg-info/top_level.txt +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/setup.cfg +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/tests/test_audio.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/tests/test_command_history.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/tests/test_image.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/tests/test_llm_funcs.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/tests/test_load.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/tests/test_npc_compiler.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/tests/test_npcsql.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/tests/test_response.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/tests/test_serve.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/tests/test_text.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/tests/test_tools.py +0 -0
- {npcpy-1.2.26 → npcpy-1.2.27}/tests/test_web.py +0 -0
|
@@ -561,7 +561,7 @@ def get_litellm_response(
|
|
|
561
561
|
if provider =='enpisi' and api_url is None:
|
|
562
562
|
api_params['api_base'] = 'https://api.enpisi.com'
|
|
563
563
|
if api_key is None:
|
|
564
|
-
api_key = os.environ.get('
|
|
564
|
+
api_key = os.environ.get('NPC_STUDIO_LICENSE_KEY')
|
|
565
565
|
api_params['api_key'] = api_key
|
|
566
566
|
if '-npc' in model:
|
|
567
567
|
model = model.split('-npc')[0]
|
|
@@ -1093,26 +1093,32 @@ class NPC:
|
|
|
1093
1093
|
|
|
1094
1094
|
self.jinxs_dict = {jinx.jinx_name: jinx for jinx in npc_jinxs}
|
|
1095
1095
|
return npc_jinxs
|
|
1096
|
-
|
|
1096
|
+
|
|
1097
1097
|
def get_llm_response(self,
|
|
1098
|
-
|
|
1099
|
-
|
|
1100
|
-
|
|
1101
|
-
|
|
1102
|
-
|
|
1103
|
-
|
|
1104
|
-
|
|
1105
|
-
|
|
1106
|
-
|
|
1107
|
-
|
|
1108
|
-
|
|
1109
|
-
|
|
1098
|
+
request,
|
|
1099
|
+
jinxs=None,
|
|
1100
|
+
tools: Optional[list] = None,
|
|
1101
|
+
tool_map: Optional[dict] = None,
|
|
1102
|
+
tool_choice=None,
|
|
1103
|
+
messages=None,
|
|
1104
|
+
auto_process_tool_calls=True,
|
|
1105
|
+
use_core_tools: bool = False,
|
|
1106
|
+
**kwargs):
|
|
1107
|
+
all_candidate_functions = []
|
|
1108
|
+
|
|
1109
|
+
if tools is not None and tool_map is not None:
|
|
1110
|
+
all_candidate_functions.extend([func for func in tool_map.values() if callable(func)])
|
|
1111
|
+
elif hasattr(self, 'tool_map') and self.tool_map:
|
|
1112
|
+
all_candidate_functions.extend([func for func in self.tool_map.values() if callable(func)])
|
|
1113
|
+
|
|
1114
|
+
if use_core_tools:
|
|
1115
|
+
dynamic_core_tools_list = [
|
|
1110
1116
|
self.think_step_by_step,
|
|
1111
1117
|
self.write_code
|
|
1112
1118
|
]
|
|
1113
|
-
|
|
1119
|
+
|
|
1114
1120
|
if self.command_history:
|
|
1115
|
-
|
|
1121
|
+
dynamic_core_tools_list.extend([
|
|
1116
1122
|
self.search_my_conversations,
|
|
1117
1123
|
self.search_my_memories,
|
|
1118
1124
|
self.create_memory,
|
|
@@ -1124,35 +1130,44 @@ class NPC:
|
|
|
1124
1130
|
self.archive_old_memories,
|
|
1125
1131
|
self.get_memory_stats
|
|
1126
1132
|
])
|
|
1127
|
-
|
|
1133
|
+
|
|
1128
1134
|
if self.db_conn:
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1135
|
+
dynamic_core_tools_list.append(self.query_database)
|
|
1136
|
+
|
|
1137
|
+
all_candidate_functions.extend(dynamic_core_tools_list)
|
|
1138
|
+
|
|
1139
|
+
unique_functions = []
|
|
1140
|
+
seen_names = set()
|
|
1141
|
+
for func in all_candidate_functions:
|
|
1142
|
+
if func.__name__ not in seen_names:
|
|
1143
|
+
unique_functions.append(func)
|
|
1144
|
+
seen_names.add(func.__name__)
|
|
1145
|
+
|
|
1146
|
+
final_tools_schema = None
|
|
1147
|
+
final_tool_map_dict = None
|
|
1148
|
+
|
|
1149
|
+
if unique_functions:
|
|
1150
|
+
final_tools_schema, final_tool_map_dict = auto_tools(unique_functions)
|
|
1151
|
+
|
|
1152
|
+
if tool_choice is None:
|
|
1153
|
+
if final_tools_schema:
|
|
1154
|
+
tool_choice = "auto"
|
|
1155
|
+
else:
|
|
1156
|
+
tool_choice = "none"
|
|
1139
1157
|
|
|
1140
1158
|
response = npy.llm_funcs.get_llm_response(
|
|
1141
1159
|
request,
|
|
1142
|
-
model=self.model,
|
|
1143
|
-
provider=self.provider,
|
|
1144
1160
|
npc=self,
|
|
1145
1161
|
jinxs=jinxs,
|
|
1146
|
-
tools=
|
|
1147
|
-
tool_map=
|
|
1162
|
+
tools=final_tools_schema,
|
|
1163
|
+
tool_map=final_tool_map_dict,
|
|
1148
1164
|
tool_choice=tool_choice,
|
|
1149
1165
|
auto_process_tool_calls=auto_process_tool_calls,
|
|
1150
1166
|
messages=self.memory if messages is None else messages,
|
|
1151
1167
|
**kwargs
|
|
1152
1168
|
)
|
|
1153
|
-
|
|
1154
|
-
return response
|
|
1155
1169
|
|
|
1170
|
+
return response
|
|
1156
1171
|
|
|
1157
1172
|
|
|
1158
1173
|
|
|
@@ -2383,269 +2398,3 @@ class Team:
|
|
|
2383
2398
|
context_parts.append("")
|
|
2384
2399
|
|
|
2385
2400
|
return "\n".join(context_parts)
|
|
2386
|
-
|
|
2387
|
-
class Pipeline:
|
|
2388
|
-
def __init__(self, pipeline_data=None, pipeline_path=None, npc_team=None):
|
|
2389
|
-
"""Initialize a pipeline from data or file path"""
|
|
2390
|
-
self.npc_team = npc_team
|
|
2391
|
-
self.steps = []
|
|
2392
|
-
|
|
2393
|
-
if pipeline_path:
|
|
2394
|
-
self._load_from_path(pipeline_path)
|
|
2395
|
-
elif pipeline_data:
|
|
2396
|
-
self.name = pipeline_data.get("name", "unnamed_pipeline")
|
|
2397
|
-
self.steps = pipeline_data.get("steps", [])
|
|
2398
|
-
else:
|
|
2399
|
-
raise ValueError("Either pipeline_data or pipeline_path must be provided")
|
|
2400
|
-
|
|
2401
|
-
def _load_from_path(self, path):
|
|
2402
|
-
"""Load pipeline from file"""
|
|
2403
|
-
pipeline_data = load_yaml_file(path)
|
|
2404
|
-
if not pipeline_data:
|
|
2405
|
-
raise ValueError(f"Failed to load pipeline from {path}")
|
|
2406
|
-
|
|
2407
|
-
self.name = os.path.splitext(os.path.basename(path))[0]
|
|
2408
|
-
self.steps = pipeline_data.get("steps", [])
|
|
2409
|
-
self.pipeline_path = path
|
|
2410
|
-
|
|
2411
|
-
def execute(self, initial_context=None):
|
|
2412
|
-
"""Execute the pipeline with given context"""
|
|
2413
|
-
context = initial_context or {}
|
|
2414
|
-
results = {}
|
|
2415
|
-
|
|
2416
|
-
|
|
2417
|
-
init_db_tables()
|
|
2418
|
-
|
|
2419
|
-
|
|
2420
|
-
pipeline_hash = self._generate_hash()
|
|
2421
|
-
|
|
2422
|
-
|
|
2423
|
-
results_table = f"{self.name}_results"
|
|
2424
|
-
self._ensure_results_table(results_table)
|
|
2425
|
-
|
|
2426
|
-
|
|
2427
|
-
run_id = self._create_run_entry(pipeline_hash)
|
|
2428
|
-
|
|
2429
|
-
|
|
2430
|
-
context.update({
|
|
2431
|
-
"ref": lambda step_name: results.get(step_name),
|
|
2432
|
-
"source": self._fetch_data_from_source,
|
|
2433
|
-
})
|
|
2434
|
-
|
|
2435
|
-
|
|
2436
|
-
for step in self.steps:
|
|
2437
|
-
step_name = step.get("step_name")
|
|
2438
|
-
if not step_name:
|
|
2439
|
-
raise ValueError(f"Missing step_name in step: {step}")
|
|
2440
|
-
|
|
2441
|
-
|
|
2442
|
-
npc_name = self._render_template(step.get("npc", ""), context)
|
|
2443
|
-
npc = self._get_npc(npc_name)
|
|
2444
|
-
if not npc:
|
|
2445
|
-
raise ValueError(f"NPC {npc_name} not found for step {step_name}")
|
|
2446
|
-
|
|
2447
|
-
|
|
2448
|
-
task = self._render_template(step.get("task", ""), context)
|
|
2449
|
-
|
|
2450
|
-
|
|
2451
|
-
model = step.get("model", npc.model)
|
|
2452
|
-
provider = step.get("provider", npc.provider)
|
|
2453
|
-
|
|
2454
|
-
|
|
2455
|
-
mixa = step.get("mixa", False)
|
|
2456
|
-
if mixa:
|
|
2457
|
-
response = self._execute_mixa_step(step, context, npc, model, provider)
|
|
2458
|
-
else:
|
|
2459
|
-
|
|
2460
|
-
source_matches = re.findall(r"{{\s*source\('([^']+)'\)\s*}}", task)
|
|
2461
|
-
if source_matches:
|
|
2462
|
-
response = self._execute_data_source_step(step, context, source_matches, npc, model, provider)
|
|
2463
|
-
else:
|
|
2464
|
-
|
|
2465
|
-
llm_response = npy.llm_funcs.get_llm_response(task, model=model, provider=provider, npc=npc)
|
|
2466
|
-
response = llm_response.get("response", "")
|
|
2467
|
-
|
|
2468
|
-
|
|
2469
|
-
results[step_name] = response
|
|
2470
|
-
context[step_name] = response
|
|
2471
|
-
|
|
2472
|
-
|
|
2473
|
-
self._store_step_result(run_id, step_name, npc_name, model, provider,
|
|
2474
|
-
{"task": task}, response, results_table)
|
|
2475
|
-
|
|
2476
|
-
|
|
2477
|
-
return {
|
|
2478
|
-
"results": results,
|
|
2479
|
-
"run_id": run_id
|
|
2480
|
-
}
|
|
2481
|
-
|
|
2482
|
-
def _render_template(self, template_str, context):
|
|
2483
|
-
"""Render a template with the given context"""
|
|
2484
|
-
if not template_str:
|
|
2485
|
-
return ""
|
|
2486
|
-
|
|
2487
|
-
try:
|
|
2488
|
-
template = Template(template_str)
|
|
2489
|
-
return template.render(**context)
|
|
2490
|
-
except Exception as e:
|
|
2491
|
-
print(f"Error rendering template: {e}")
|
|
2492
|
-
return template_str
|
|
2493
|
-
|
|
2494
|
-
def _get_npc(self, npc_name):
|
|
2495
|
-
"""Get NPC by name from team"""
|
|
2496
|
-
if not self.npc_team:
|
|
2497
|
-
raise ValueError("No NPC team available")
|
|
2498
|
-
|
|
2499
|
-
return self.npc_team.get_npc(npc_name)
|
|
2500
|
-
|
|
2501
|
-
def _generate_hash(self):
|
|
2502
|
-
"""Generate a hash for the pipeline"""
|
|
2503
|
-
if hasattr(self, 'pipeline_path') and self.pipeline_path:
|
|
2504
|
-
with open(self.pipeline_path, 'r') as f:
|
|
2505
|
-
content = f.read()
|
|
2506
|
-
return hashlib.sha256(content.encode()).hexdigest()
|
|
2507
|
-
else:
|
|
2508
|
-
|
|
2509
|
-
content = json.dumps(self.steps)
|
|
2510
|
-
return hashlib.sha256(content.encode()).hexdigest()
|
|
2511
|
-
|
|
2512
|
-
def _ensure_results_table(self, table_name):
|
|
2513
|
-
"""Ensure results table exists"""
|
|
2514
|
-
db_path = "~/npcsh_history.db"
|
|
2515
|
-
with sqlite3.connect(os.path.expanduser(db_path)) as conn:
|
|
2516
|
-
conn.execute(f"""
|
|
2517
|
-
CREATE TABLE IF NOT EXISTS {table_name} (
|
|
2518
|
-
result_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
2519
|
-
run_id INTEGER,
|
|
2520
|
-
step_name TEXT,
|
|
2521
|
-
npc_name TEXT,
|
|
2522
|
-
model TEXT,
|
|
2523
|
-
provider TEXT,
|
|
2524
|
-
inputs TEXT,
|
|
2525
|
-
outputs TEXT,
|
|
2526
|
-
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
2527
|
-
FOREIGN KEY(run_id) REFERENCES pipeline_runs(run_id)
|
|
2528
|
-
)
|
|
2529
|
-
""")
|
|
2530
|
-
conn.commit()
|
|
2531
|
-
|
|
2532
|
-
def _create_run_entry(self, pipeline_hash):
|
|
2533
|
-
"""Create run entry in pipeline_runs table"""
|
|
2534
|
-
db_path = "~/npcsh_history.db"
|
|
2535
|
-
with sqlite3.connect(os.path.expanduser(db_path)) as conn:
|
|
2536
|
-
cursor = conn.execute(
|
|
2537
|
-
"INSERT INTO pipeline_runs (pipeline_name, pipeline_hash, timestamp) VALUES (?, ?, ?)",
|
|
2538
|
-
(self.name, pipeline_hash, datetime.now())
|
|
2539
|
-
)
|
|
2540
|
-
conn.commit()
|
|
2541
|
-
return cursor.lastrowid
|
|
2542
|
-
|
|
2543
|
-
def _store_step_result(self, run_id, step_name, npc_name, model, provider, inputs, outputs, table_name):
|
|
2544
|
-
"""Store step result in database"""
|
|
2545
|
-
db_path = "~/npcsh_history.db"
|
|
2546
|
-
with sqlite3.connect(os.path.expanduser(db_path)) as conn:
|
|
2547
|
-
conn.execute(
|
|
2548
|
-
f"""
|
|
2549
|
-
INSERT INTO {table_name}
|
|
2550
|
-
(run_id, step_name, npc_name, model, provider, inputs, outputs)
|
|
2551
|
-
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
2552
|
-
""",
|
|
2553
|
-
(
|
|
2554
|
-
run_id,
|
|
2555
|
-
step_name,
|
|
2556
|
-
npc_name,
|
|
2557
|
-
model,
|
|
2558
|
-
provider,
|
|
2559
|
-
json.dumps(self._clean_for_json(inputs)),
|
|
2560
|
-
json.dumps(self._clean_for_json(outputs))
|
|
2561
|
-
)
|
|
2562
|
-
)
|
|
2563
|
-
conn.commit()
|
|
2564
|
-
|
|
2565
|
-
def _clean_for_json(self, obj):
|
|
2566
|
-
"""Clean an object for JSON serialization"""
|
|
2567
|
-
if isinstance(obj, dict):
|
|
2568
|
-
return {
|
|
2569
|
-
k: self._clean_for_json(v)
|
|
2570
|
-
for k, v in obj.items()
|
|
2571
|
-
if not k.startswith("_") and not callable(v)
|
|
2572
|
-
}
|
|
2573
|
-
elif isinstance(obj, list):
|
|
2574
|
-
return [self._clean_for_json(i) for i in obj]
|
|
2575
|
-
elif isinstance(obj, (str, int, float, bool, type(None))):
|
|
2576
|
-
return obj
|
|
2577
|
-
else:
|
|
2578
|
-
return str(obj)
|
|
2579
|
-
|
|
2580
|
-
def _fetch_data_from_source(self, table_name):
|
|
2581
|
-
"""Fetch data from a database table"""
|
|
2582
|
-
db_path = "~/npcsh_history.db"
|
|
2583
|
-
try:
|
|
2584
|
-
engine = create_engine(f"sqlite:///{os.path.expanduser(db_path)}")
|
|
2585
|
-
df = pd.read_sql(f"SELECT * FROM {table_name}", engine)
|
|
2586
|
-
return df.to_json(orient="records")
|
|
2587
|
-
except Exception as e:
|
|
2588
|
-
print(f"Error fetching data from {table_name}: {e}")
|
|
2589
|
-
return "[]"
|
|
2590
|
-
|
|
2591
|
-
def _execute_mixa_step(self, step, context, npc, model, provider):
|
|
2592
|
-
"""Execute a mixture of agents step"""
|
|
2593
|
-
|
|
2594
|
-
task = self._render_template(step.get("task", ""), context)
|
|
2595
|
-
|
|
2596
|
-
|
|
2597
|
-
mixa_turns = step.get("mixa_turns", 5)
|
|
2598
|
-
num_generating_agents = len(step.get("mixa_agents", []))
|
|
2599
|
-
if num_generating_agents == 0:
|
|
2600
|
-
num_generating_agents = 3
|
|
2601
|
-
|
|
2602
|
-
num_voting_agents = len(step.get("mixa_voters", []))
|
|
2603
|
-
if num_voting_agents == 0:
|
|
2604
|
-
num_voting_agents = 3
|
|
2605
|
-
|
|
2606
|
-
|
|
2607
|
-
round_responses = []
|
|
2608
|
-
|
|
2609
|
-
|
|
2610
|
-
return
|
|
2611
|
-
|
|
2612
|
-
def _execute_data_source_step(self, step, context, source_matches, npc, model, provider):
|
|
2613
|
-
"""Execute a step with data source"""
|
|
2614
|
-
task_template = step.get("task", "")
|
|
2615
|
-
table_name = source_matches[0]
|
|
2616
|
-
|
|
2617
|
-
try:
|
|
2618
|
-
|
|
2619
|
-
db_path = "~/npcsh_history.db"
|
|
2620
|
-
engine = create_engine(f"sqlite:///{os.path.expanduser(db_path)}")
|
|
2621
|
-
df = pd.read_sql(f"SELECT * FROM {table_name}", engine)
|
|
2622
|
-
|
|
2623
|
-
|
|
2624
|
-
if step.get("batch_mode", False):
|
|
2625
|
-
|
|
2626
|
-
data_str = df.to_json(orient="records")
|
|
2627
|
-
task = task_template.replace(f"{{{{ source('{table_name}') }}}}", data_str)
|
|
2628
|
-
task = self._render_template(task, context)
|
|
2629
|
-
|
|
2630
|
-
|
|
2631
|
-
response = npy.llm_funcs.get_llm_response(task, model=model, provider=provider, npc=npc)
|
|
2632
|
-
return response.get("response", "")
|
|
2633
|
-
else:
|
|
2634
|
-
|
|
2635
|
-
results = []
|
|
2636
|
-
for _, row in df.iterrows():
|
|
2637
|
-
|
|
2638
|
-
row_data = json.dumps(row.to_dict())
|
|
2639
|
-
row_task = task_template.replace(f"{{{{ source('{table_name}') }}}}", row_data)
|
|
2640
|
-
row_task = self._render_template(row_task, context)
|
|
2641
|
-
|
|
2642
|
-
|
|
2643
|
-
response = npy.llm_funcs.get_llm_response(row_task, model=model, provider=provider, npc=npc)
|
|
2644
|
-
results.append(response.get("response", ""))
|
|
2645
|
-
|
|
2646
|
-
return results
|
|
2647
|
-
except Exception as e:
|
|
2648
|
-
print(f"Error processing data source {table_name}: {e}")
|
|
2649
|
-
return f"Error: {str(e)}"
|
|
2650
|
-
|
|
2651
|
-
|
|
@@ -164,18 +164,18 @@ def get_locally_available_models(project_directory, airplane_mode=False):
|
|
|
164
164
|
|
|
165
165
|
|
|
166
166
|
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
|
|
167
|
-
if '
|
|
167
|
+
if 'NPC_STUDIO_LICENSE_KEY' in env_vars or os.environ.get('NPC_STUDIO_LICENSE_KEY'):
|
|
168
168
|
try:
|
|
169
169
|
def fetch_enpisi_models():
|
|
170
170
|
import requests
|
|
171
171
|
|
|
172
172
|
api_url = 'https://api.enpisi.com'
|
|
173
173
|
headers = {
|
|
174
|
-
'Authorization': f"Bearer {env_vars.get('
|
|
174
|
+
'Authorization': f"Bearer {env_vars.get('NPC_STUDIO_LICENSE_KEY') or os.environ.get('NPC_STUDIO_LICENSE_KEY')}",
|
|
175
175
|
'Content-Type': 'application/json'
|
|
176
176
|
}
|
|
177
|
-
import requests
|
|
178
177
|
response = requests.get(f"{api_url}/models", headers=headers)
|
|
178
|
+
|
|
179
179
|
return [model['id'] for model in response.json().get('data','')]
|
|
180
180
|
for model in fetch_enpisi_models():
|
|
181
181
|
available_models[model+'-npc'] = 'enpisi'
|