npcpy 1.2.25__py3-none-any.whl → 1.2.27__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
npcpy/gen/response.py CHANGED
@@ -561,7 +561,7 @@ def get_litellm_response(
561
561
  if provider =='enpisi' and api_url is None:
562
562
  api_params['api_base'] = 'https://api.enpisi.com'
563
563
  if api_key is None:
564
- api_key = os.environ.get('NPC_STUDIO_LICENSE')
564
+ api_key = os.environ.get('NPC_STUDIO_LICENSE_KEY')
565
565
  api_params['api_key'] = api_key
566
566
  if '-npc' in model:
567
567
  model = model.split('-npc')[0]
npcpy/npc_compiler.py CHANGED
@@ -566,7 +566,6 @@ def get_npc_action_space(npc=None, team=None):
566
566
  def extract_jinx_inputs(args: List[str], jinx: Jinx) -> Dict[str, Any]:
567
567
  inputs = {}
568
568
 
569
- # Create flag mapping for inputs
570
569
  flag_mapping = {}
571
570
  for input_ in jinx.inputs:
572
571
  if isinstance(input_, str):
@@ -577,17 +576,19 @@ def extract_jinx_inputs(args: List[str], jinx: Jinx) -> Dict[str, Any]:
577
576
  flag_mapping[f"-{key[0]}"] = key
578
577
  flag_mapping[f"--{key}"] = key
579
578
 
580
- # Parse key=value pairs first
581
- used_args = set()
582
- for i, arg in enumerate(args):
583
- if '=' in arg and not arg.startswith('-'):
584
- key, value = arg.split('=', 1)
585
- key = key.strip().strip("'\"")
586
- value = value.strip().strip("'\"")
587
- inputs[key] = value
588
- used_args.add(i)
589
-
590
- # Parse flags
579
+ if len(jinx.inputs) > 1:
580
+ used_args = set()
581
+ for i, arg in enumerate(args):
582
+ if '=' in arg and arg != '=' and not arg.startswith('-'):
583
+ key, value = arg.split('=', 1)
584
+ key = key.strip().strip("'\"")
585
+ value = value.strip().strip("'\"")
586
+ inputs[key] = value
587
+ used_args.add(i)
588
+ else:
589
+ used_args = set()
590
+
591
+
591
592
  for i, arg in enumerate(args):
592
593
  if i in used_args:
593
594
  continue
@@ -599,29 +600,28 @@ def extract_jinx_inputs(args: List[str], jinx: Jinx) -> Dict[str, Any]:
599
600
  used_args.add(i)
600
601
  used_args.add(i + 1)
601
602
  else:
602
- # Boolean flag
603
603
  input_name = flag_mapping[arg]
604
604
  inputs[input_name] = True
605
605
  used_args.add(i)
606
606
 
607
- # Handle remaining positional arguments
608
607
  unused_args = [arg for i, arg in enumerate(args) if i not in used_args]
609
608
 
610
- # Map positional args to jinx inputs in order
611
609
  jinx_input_names = []
612
610
  for input_ in jinx.inputs:
613
611
  if isinstance(input_, str):
614
612
  jinx_input_names.append(input_)
615
613
  elif isinstance(input_, dict):
616
614
  jinx_input_names.append(list(input_.keys())[0])
617
-
618
- for i, arg in enumerate(unused_args):
619
- if i < len(jinx_input_names):
620
- input_name = jinx_input_names[i]
621
- if input_name not in inputs: # Don't overwrite existing values
622
- inputs[input_name] = arg
615
+ if len(jinx_input_names) == 1:
616
+ inputs[jinx_input_names[0]] = ' '.join(unused_args).strip()
617
+ else:
618
+ for i, arg in enumerate(unused_args):
619
+ if i < len(jinx_input_names):
620
+ input_name = jinx_input_names[i]
621
+ if input_name not in inputs:
622
+ inputs[input_name] = arg
623
+
623
624
 
624
- # Set default values for missing inputs
625
625
  for input_ in jinx.inputs:
626
626
  if isinstance(input_, str):
627
627
  if input_ not in inputs:
@@ -1093,26 +1093,32 @@ class NPC:
1093
1093
 
1094
1094
  self.jinxs_dict = {jinx.jinx_name: jinx for jinx in npc_jinxs}
1095
1095
  return npc_jinxs
1096
-
1096
+
1097
1097
  def get_llm_response(self,
1098
- request,
1099
- jinxs=None,
1100
- tools=None,
1101
- tool_map=None,
1102
- tool_choice=None,
1103
- messages=None,
1104
- auto_process_tool_calls=True,
1105
- **kwargs):
1106
- """Get response from LLM with automatic tool integration including memory CRUD"""
1107
-
1108
- if tools is None and tool_map is None and tool_choice is None:
1109
- core_tools = [
1098
+ request,
1099
+ jinxs=None,
1100
+ tools: Optional[list] = None,
1101
+ tool_map: Optional[dict] = None,
1102
+ tool_choice=None,
1103
+ messages=None,
1104
+ auto_process_tool_calls=True,
1105
+ use_core_tools: bool = False,
1106
+ **kwargs):
1107
+ all_candidate_functions = []
1108
+
1109
+ if tools is not None and tool_map is not None:
1110
+ all_candidate_functions.extend([func for func in tool_map.values() if callable(func)])
1111
+ elif hasattr(self, 'tool_map') and self.tool_map:
1112
+ all_candidate_functions.extend([func for func in self.tool_map.values() if callable(func)])
1113
+
1114
+ if use_core_tools:
1115
+ dynamic_core_tools_list = [
1110
1116
  self.think_step_by_step,
1111
1117
  self.write_code
1112
1118
  ]
1113
-
1119
+
1114
1120
  if self.command_history:
1115
- core_tools.extend([
1121
+ dynamic_core_tools_list.extend([
1116
1122
  self.search_my_conversations,
1117
1123
  self.search_my_memories,
1118
1124
  self.create_memory,
@@ -1124,35 +1130,44 @@ class NPC:
1124
1130
  self.archive_old_memories,
1125
1131
  self.get_memory_stats
1126
1132
  ])
1127
-
1133
+
1128
1134
  if self.db_conn:
1129
- core_tools.append(self.query_database)
1130
-
1131
- if hasattr(self, 'tools') and self.tools:
1132
- core_tools.extend([func for func in self.tool_map.values() if callable(func)])
1133
-
1134
- if core_tools:
1135
- tools, tool_map = auto_tools(core_tools)
1136
-
1137
- if tool_choice is None and tools:
1138
- tool_choice = "auto"
1135
+ dynamic_core_tools_list.append(self.query_database)
1136
+
1137
+ all_candidate_functions.extend(dynamic_core_tools_list)
1138
+
1139
+ unique_functions = []
1140
+ seen_names = set()
1141
+ for func in all_candidate_functions:
1142
+ if func.__name__ not in seen_names:
1143
+ unique_functions.append(func)
1144
+ seen_names.add(func.__name__)
1145
+
1146
+ final_tools_schema = None
1147
+ final_tool_map_dict = None
1148
+
1149
+ if unique_functions:
1150
+ final_tools_schema, final_tool_map_dict = auto_tools(unique_functions)
1151
+
1152
+ if tool_choice is None:
1153
+ if final_tools_schema:
1154
+ tool_choice = "auto"
1155
+ else:
1156
+ tool_choice = "none"
1139
1157
 
1140
1158
  response = npy.llm_funcs.get_llm_response(
1141
1159
  request,
1142
- model=self.model,
1143
- provider=self.provider,
1144
1160
  npc=self,
1145
1161
  jinxs=jinxs,
1146
- tools=tools,
1147
- tool_map=tool_map,
1162
+ tools=final_tools_schema,
1163
+ tool_map=final_tool_map_dict,
1148
1164
  tool_choice=tool_choice,
1149
1165
  auto_process_tool_calls=auto_process_tool_calls,
1150
1166
  messages=self.memory if messages is None else messages,
1151
1167
  **kwargs
1152
1168
  )
1153
-
1154
- return response
1155
1169
 
1170
+ return response
1156
1171
 
1157
1172
 
1158
1173
 
@@ -2383,269 +2398,3 @@ class Team:
2383
2398
  context_parts.append("")
2384
2399
 
2385
2400
  return "\n".join(context_parts)
2386
-
2387
- class Pipeline:
2388
- def __init__(self, pipeline_data=None, pipeline_path=None, npc_team=None):
2389
- """Initialize a pipeline from data or file path"""
2390
- self.npc_team = npc_team
2391
- self.steps = []
2392
-
2393
- if pipeline_path:
2394
- self._load_from_path(pipeline_path)
2395
- elif pipeline_data:
2396
- self.name = pipeline_data.get("name", "unnamed_pipeline")
2397
- self.steps = pipeline_data.get("steps", [])
2398
- else:
2399
- raise ValueError("Either pipeline_data or pipeline_path must be provided")
2400
-
2401
- def _load_from_path(self, path):
2402
- """Load pipeline from file"""
2403
- pipeline_data = load_yaml_file(path)
2404
- if not pipeline_data:
2405
- raise ValueError(f"Failed to load pipeline from {path}")
2406
-
2407
- self.name = os.path.splitext(os.path.basename(path))[0]
2408
- self.steps = pipeline_data.get("steps", [])
2409
- self.pipeline_path = path
2410
-
2411
- def execute(self, initial_context=None):
2412
- """Execute the pipeline with given context"""
2413
- context = initial_context or {}
2414
- results = {}
2415
-
2416
-
2417
- init_db_tables()
2418
-
2419
-
2420
- pipeline_hash = self._generate_hash()
2421
-
2422
-
2423
- results_table = f"{self.name}_results"
2424
- self._ensure_results_table(results_table)
2425
-
2426
-
2427
- run_id = self._create_run_entry(pipeline_hash)
2428
-
2429
-
2430
- context.update({
2431
- "ref": lambda step_name: results.get(step_name),
2432
- "source": self._fetch_data_from_source,
2433
- })
2434
-
2435
-
2436
- for step in self.steps:
2437
- step_name = step.get("step_name")
2438
- if not step_name:
2439
- raise ValueError(f"Missing step_name in step: {step}")
2440
-
2441
-
2442
- npc_name = self._render_template(step.get("npc", ""), context)
2443
- npc = self._get_npc(npc_name)
2444
- if not npc:
2445
- raise ValueError(f"NPC {npc_name} not found for step {step_name}")
2446
-
2447
-
2448
- task = self._render_template(step.get("task", ""), context)
2449
-
2450
-
2451
- model = step.get("model", npc.model)
2452
- provider = step.get("provider", npc.provider)
2453
-
2454
-
2455
- mixa = step.get("mixa", False)
2456
- if mixa:
2457
- response = self._execute_mixa_step(step, context, npc, model, provider)
2458
- else:
2459
-
2460
- source_matches = re.findall(r"{{\s*source\('([^']+)'\)\s*}}", task)
2461
- if source_matches:
2462
- response = self._execute_data_source_step(step, context, source_matches, npc, model, provider)
2463
- else:
2464
-
2465
- llm_response = npy.llm_funcs.get_llm_response(task, model=model, provider=provider, npc=npc)
2466
- response = llm_response.get("response", "")
2467
-
2468
-
2469
- results[step_name] = response
2470
- context[step_name] = response
2471
-
2472
-
2473
- self._store_step_result(run_id, step_name, npc_name, model, provider,
2474
- {"task": task}, response, results_table)
2475
-
2476
-
2477
- return {
2478
- "results": results,
2479
- "run_id": run_id
2480
- }
2481
-
2482
- def _render_template(self, template_str, context):
2483
- """Render a template with the given context"""
2484
- if not template_str:
2485
- return ""
2486
-
2487
- try:
2488
- template = Template(template_str)
2489
- return template.render(**context)
2490
- except Exception as e:
2491
- print(f"Error rendering template: {e}")
2492
- return template_str
2493
-
2494
- def _get_npc(self, npc_name):
2495
- """Get NPC by name from team"""
2496
- if not self.npc_team:
2497
- raise ValueError("No NPC team available")
2498
-
2499
- return self.npc_team.get_npc(npc_name)
2500
-
2501
- def _generate_hash(self):
2502
- """Generate a hash for the pipeline"""
2503
- if hasattr(self, 'pipeline_path') and self.pipeline_path:
2504
- with open(self.pipeline_path, 'r') as f:
2505
- content = f.read()
2506
- return hashlib.sha256(content.encode()).hexdigest()
2507
- else:
2508
-
2509
- content = json.dumps(self.steps)
2510
- return hashlib.sha256(content.encode()).hexdigest()
2511
-
2512
- def _ensure_results_table(self, table_name):
2513
- """Ensure results table exists"""
2514
- db_path = "~/npcsh_history.db"
2515
- with sqlite3.connect(os.path.expanduser(db_path)) as conn:
2516
- conn.execute(f"""
2517
- CREATE TABLE IF NOT EXISTS {table_name} (
2518
- result_id INTEGER PRIMARY KEY AUTOINCREMENT,
2519
- run_id INTEGER,
2520
- step_name TEXT,
2521
- npc_name TEXT,
2522
- model TEXT,
2523
- provider TEXT,
2524
- inputs TEXT,
2525
- outputs TEXT,
2526
- timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
2527
- FOREIGN KEY(run_id) REFERENCES pipeline_runs(run_id)
2528
- )
2529
- """)
2530
- conn.commit()
2531
-
2532
- def _create_run_entry(self, pipeline_hash):
2533
- """Create run entry in pipeline_runs table"""
2534
- db_path = "~/npcsh_history.db"
2535
- with sqlite3.connect(os.path.expanduser(db_path)) as conn:
2536
- cursor = conn.execute(
2537
- "INSERT INTO pipeline_runs (pipeline_name, pipeline_hash, timestamp) VALUES (?, ?, ?)",
2538
- (self.name, pipeline_hash, datetime.now())
2539
- )
2540
- conn.commit()
2541
- return cursor.lastrowid
2542
-
2543
- def _store_step_result(self, run_id, step_name, npc_name, model, provider, inputs, outputs, table_name):
2544
- """Store step result in database"""
2545
- db_path = "~/npcsh_history.db"
2546
- with sqlite3.connect(os.path.expanduser(db_path)) as conn:
2547
- conn.execute(
2548
- f"""
2549
- INSERT INTO {table_name}
2550
- (run_id, step_name, npc_name, model, provider, inputs, outputs)
2551
- VALUES (?, ?, ?, ?, ?, ?, ?)
2552
- """,
2553
- (
2554
- run_id,
2555
- step_name,
2556
- npc_name,
2557
- model,
2558
- provider,
2559
- json.dumps(self._clean_for_json(inputs)),
2560
- json.dumps(self._clean_for_json(outputs))
2561
- )
2562
- )
2563
- conn.commit()
2564
-
2565
- def _clean_for_json(self, obj):
2566
- """Clean an object for JSON serialization"""
2567
- if isinstance(obj, dict):
2568
- return {
2569
- k: self._clean_for_json(v)
2570
- for k, v in obj.items()
2571
- if not k.startswith("_") and not callable(v)
2572
- }
2573
- elif isinstance(obj, list):
2574
- return [self._clean_for_json(i) for i in obj]
2575
- elif isinstance(obj, (str, int, float, bool, type(None))):
2576
- return obj
2577
- else:
2578
- return str(obj)
2579
-
2580
- def _fetch_data_from_source(self, table_name):
2581
- """Fetch data from a database table"""
2582
- db_path = "~/npcsh_history.db"
2583
- try:
2584
- engine = create_engine(f"sqlite:///{os.path.expanduser(db_path)}")
2585
- df = pd.read_sql(f"SELECT * FROM {table_name}", engine)
2586
- return df.to_json(orient="records")
2587
- except Exception as e:
2588
- print(f"Error fetching data from {table_name}: {e}")
2589
- return "[]"
2590
-
2591
- def _execute_mixa_step(self, step, context, npc, model, provider):
2592
- """Execute a mixture of agents step"""
2593
-
2594
- task = self._render_template(step.get("task", ""), context)
2595
-
2596
-
2597
- mixa_turns = step.get("mixa_turns", 5)
2598
- num_generating_agents = len(step.get("mixa_agents", []))
2599
- if num_generating_agents == 0:
2600
- num_generating_agents = 3
2601
-
2602
- num_voting_agents = len(step.get("mixa_voters", []))
2603
- if num_voting_agents == 0:
2604
- num_voting_agents = 3
2605
-
2606
-
2607
- round_responses = []
2608
-
2609
-
2610
- return
2611
-
2612
- def _execute_data_source_step(self, step, context, source_matches, npc, model, provider):
2613
- """Execute a step with data source"""
2614
- task_template = step.get("task", "")
2615
- table_name = source_matches[0]
2616
-
2617
- try:
2618
-
2619
- db_path = "~/npcsh_history.db"
2620
- engine = create_engine(f"sqlite:///{os.path.expanduser(db_path)}")
2621
- df = pd.read_sql(f"SELECT * FROM {table_name}", engine)
2622
-
2623
-
2624
- if step.get("batch_mode", False):
2625
-
2626
- data_str = df.to_json(orient="records")
2627
- task = task_template.replace(f"{{{{ source('{table_name}') }}}}", data_str)
2628
- task = self._render_template(task, context)
2629
-
2630
-
2631
- response = npy.llm_funcs.get_llm_response(task, model=model, provider=provider, npc=npc)
2632
- return response.get("response", "")
2633
- else:
2634
-
2635
- results = []
2636
- for _, row in df.iterrows():
2637
-
2638
- row_data = json.dumps(row.to_dict())
2639
- row_task = task_template.replace(f"{{{{ source('{table_name}') }}}}", row_data)
2640
- row_task = self._render_template(row_task, context)
2641
-
2642
-
2643
- response = npy.llm_funcs.get_llm_response(row_task, model=model, provider=provider, npc=npc)
2644
- results.append(response.get("response", ""))
2645
-
2646
- return results
2647
- except Exception as e:
2648
- print(f"Error processing data source {table_name}: {e}")
2649
- return f"Error: {str(e)}"
2650
-
2651
-
npcpy/npc_sysenv.py CHANGED
@@ -164,18 +164,18 @@ def get_locally_available_models(project_directory, airplane_mode=False):
164
164
 
165
165
 
166
166
  with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
167
- if 'NPC_STUDIO_LICENSE' in env_vars or os.environ.get('NPC_STUDIO_LICENSE'):
167
+ if 'NPC_STUDIO_LICENSE_KEY' in env_vars or os.environ.get('NPC_STUDIO_LICENSE_KEY'):
168
168
  try:
169
169
  def fetch_enpisi_models():
170
170
  import requests
171
171
 
172
172
  api_url = 'https://api.enpisi.com'
173
173
  headers = {
174
- 'Authorization': f"Bearer {env_vars.get('NPC_STUDIO_LICENSE') or os.environ.get('NPC_STUDIO_LICENSE')}",
174
+ 'Authorization': f"Bearer {env_vars.get('NPC_STUDIO_LICENSE_KEY') or os.environ.get('NPC_STUDIO_LICENSE_KEY')}",
175
175
  'Content-Type': 'application/json'
176
176
  }
177
- import requests
178
177
  response = requests.get(f"{api_url}/models", headers=headers)
178
+
179
179
  return [model['id'] for model in response.json().get('data','')]
180
180
  for model in fetch_enpisi_models():
181
181
  available_models[model+'-npc'] = 'enpisi'