npcpy 1.3.4__py3-none-any.whl → 1.3.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- npcpy/build_funcs.py +288 -0
- npcpy/data/load.py +1 -1
- npcpy/data/web.py +5 -4
- npcpy/gen/image_gen.py +2 -1
- npcpy/gen/response.py +118 -65
- npcpy/gen/world_gen.py +609 -0
- npcpy/llm_funcs.py +173 -271
- npcpy/memory/command_history.py +107 -2
- npcpy/memory/knowledge_graph.py +1 -1
- npcpy/npc_compiler.py +176 -32
- npcpy/npc_sysenv.py +5 -5
- npcpy/serve.py +311 -2
- npcpy/sql/npcsql.py +272 -59
- npcpy/work/browser.py +30 -0
- {npcpy-1.3.4.dist-info → npcpy-1.3.5.dist-info}/METADATA +1 -1
- {npcpy-1.3.4.dist-info → npcpy-1.3.5.dist-info}/RECORD +19 -16
- {npcpy-1.3.4.dist-info → npcpy-1.3.5.dist-info}/WHEEL +0 -0
- {npcpy-1.3.4.dist-info → npcpy-1.3.5.dist-info}/licenses/LICENSE +0 -0
- {npcpy-1.3.4.dist-info → npcpy-1.3.5.dist-info}/top_level.txt +0 -0
npcpy/serve.py
CHANGED
|
@@ -2220,6 +2220,10 @@ def read_ctx_file(file_path):
|
|
|
2220
2220
|
if 'preferences' in data and isinstance(data['preferences'], list):
|
|
2221
2221
|
data['preferences'] = [{"value": item} for item in data['preferences']]
|
|
2222
2222
|
|
|
2223
|
+
# Normalize websites list
|
|
2224
|
+
if 'websites' in data and isinstance(data['websites'], list):
|
|
2225
|
+
data['websites'] = [{"value": item} for item in data['websites']]
|
|
2226
|
+
|
|
2223
2227
|
return data
|
|
2224
2228
|
except yaml.YAMLError as e:
|
|
2225
2229
|
print(f"YAML parsing error in {file_path}: {e}")
|
|
@@ -2246,6 +2250,10 @@ def write_ctx_file(file_path, data):
|
|
|
2246
2250
|
if 'preferences' in data_to_save and isinstance(data_to_save['preferences'], list):
|
|
2247
2251
|
data_to_save['preferences'] = [item.get("value", "") for item in data_to_save['preferences'] if isinstance(item, dict)]
|
|
2248
2252
|
|
|
2253
|
+
# Denormalize websites list
|
|
2254
|
+
if 'websites' in data_to_save and isinstance(data_to_save['websites'], list):
|
|
2255
|
+
data_to_save['websites'] = [item.get("value", "") for item in data_to_save['websites'] if isinstance(item, dict)]
|
|
2256
|
+
|
|
2249
2257
|
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
|
2250
2258
|
with open(file_path, 'w') as f:
|
|
2251
2259
|
yaml.dump(data_to_save, f, default_flow_style=False, sort_keys=False)
|
|
@@ -2329,6 +2337,80 @@ def init_project_team():
|
|
|
2329
2337
|
print(f"Error initializing project team: {e}")
|
|
2330
2338
|
return jsonify({"error": str(e)}), 500
|
|
2331
2339
|
|
|
2340
|
+
@app.route("/api/context/websites", methods=["GET"])
|
|
2341
|
+
def get_context_websites():
|
|
2342
|
+
"""Gets the websites list from a .ctx file."""
|
|
2343
|
+
try:
|
|
2344
|
+
current_path = request.args.get("path")
|
|
2345
|
+
is_global = request.args.get("global", "false").lower() == "true"
|
|
2346
|
+
|
|
2347
|
+
ctx_path = get_ctx_path(is_global=is_global, current_path=current_path)
|
|
2348
|
+
data = read_ctx_file(ctx_path)
|
|
2349
|
+
|
|
2350
|
+
websites = data.get("websites", [])
|
|
2351
|
+
# Normalize to list of objects if needed
|
|
2352
|
+
if isinstance(websites, list):
|
|
2353
|
+
normalized = []
|
|
2354
|
+
for item in websites:
|
|
2355
|
+
if isinstance(item, str):
|
|
2356
|
+
normalized.append({"value": item})
|
|
2357
|
+
elif isinstance(item, dict):
|
|
2358
|
+
normalized.append(item)
|
|
2359
|
+
websites = normalized
|
|
2360
|
+
|
|
2361
|
+
return jsonify({
|
|
2362
|
+
"websites": websites,
|
|
2363
|
+
"path": ctx_path,
|
|
2364
|
+
"error": None
|
|
2365
|
+
})
|
|
2366
|
+
except Exception as e:
|
|
2367
|
+
print(f"Error getting websites from context: {e}")
|
|
2368
|
+
return jsonify({"error": str(e)}), 500
|
|
2369
|
+
|
|
2370
|
+
|
|
2371
|
+
@app.route("/api/context/websites", methods=["POST"])
|
|
2372
|
+
def save_context_websites():
|
|
2373
|
+
"""Saves the websites list to a .ctx file."""
|
|
2374
|
+
try:
|
|
2375
|
+
data = request.json
|
|
2376
|
+
websites = data.get("websites", [])
|
|
2377
|
+
current_path = data.get("path")
|
|
2378
|
+
is_global = data.get("global", False)
|
|
2379
|
+
|
|
2380
|
+
ctx_path = get_ctx_path(is_global=is_global, current_path=current_path, create_default=True)
|
|
2381
|
+
|
|
2382
|
+
if not ctx_path:
|
|
2383
|
+
return jsonify({"error": "Could not determine ctx file path. Provide a path or use global=true."}), 400
|
|
2384
|
+
|
|
2385
|
+
# Read existing ctx data
|
|
2386
|
+
existing_data = read_ctx_file(ctx_path) or {}
|
|
2387
|
+
|
|
2388
|
+
# Normalize websites to list of strings for YAML storage
|
|
2389
|
+
normalized_websites = []
|
|
2390
|
+
for item in websites:
|
|
2391
|
+
if isinstance(item, dict) and "value" in item:
|
|
2392
|
+
normalized_websites.append(item["value"])
|
|
2393
|
+
elif isinstance(item, str):
|
|
2394
|
+
normalized_websites.append(item)
|
|
2395
|
+
|
|
2396
|
+
existing_data["websites"] = normalized_websites
|
|
2397
|
+
|
|
2398
|
+
if write_ctx_file(ctx_path, existing_data):
|
|
2399
|
+
return jsonify({
|
|
2400
|
+
"message": "Websites saved to context.",
|
|
2401
|
+
"websites": [{"value": w} for w in normalized_websites],
|
|
2402
|
+
"path": ctx_path,
|
|
2403
|
+
"error": None
|
|
2404
|
+
})
|
|
2405
|
+
else:
|
|
2406
|
+
return jsonify({"error": "Failed to write context file."}), 500
|
|
2407
|
+
|
|
2408
|
+
except Exception as e:
|
|
2409
|
+
print(f"Error saving websites to context: {e}")
|
|
2410
|
+
return jsonify({"error": str(e)}), 500
|
|
2411
|
+
|
|
2412
|
+
|
|
2413
|
+
|
|
2332
2414
|
|
|
2333
2415
|
|
|
2334
2416
|
|
|
@@ -4437,6 +4519,233 @@ def health_check():
|
|
|
4437
4519
|
return jsonify({"status": "ok", "error": None})
|
|
4438
4520
|
|
|
4439
4521
|
|
|
4522
|
+
# OpenAI-compatible completions API
|
|
4523
|
+
@app.route("/v1/chat/completions", methods=["POST"])
|
|
4524
|
+
def openai_chat_completions():
|
|
4525
|
+
"""
|
|
4526
|
+
OpenAI-compatible chat completions endpoint.
|
|
4527
|
+
Allows using NPC team as a drop-in replacement for OpenAI API.
|
|
4528
|
+
|
|
4529
|
+
Extra parameter:
|
|
4530
|
+
- agent: NPC name to use (optional, uses team's forenpc if not specified)
|
|
4531
|
+
"""
|
|
4532
|
+
try:
|
|
4533
|
+
data = request.get_json()
|
|
4534
|
+
messages = data.get("messages", [])
|
|
4535
|
+
model = data.get("model", "gpt-4o-mini")
|
|
4536
|
+
stream = data.get("stream", False)
|
|
4537
|
+
temperature = data.get("temperature", 0.7)
|
|
4538
|
+
max_tokens = data.get("max_tokens", 4096)
|
|
4539
|
+
|
|
4540
|
+
# Extra: agent/npc selection
|
|
4541
|
+
agent_name = data.get("agent") or data.get("npc")
|
|
4542
|
+
|
|
4543
|
+
current_path = request.headers.get("X-Current-Path", os.getcwd())
|
|
4544
|
+
|
|
4545
|
+
# Load team and NPC
|
|
4546
|
+
db_path = app.config.get('DB_PATH') or os.path.expanduser("~/.npcsh/npcsh_history.db")
|
|
4547
|
+
db_conn = create_engine(f'sqlite:///{db_path}')
|
|
4548
|
+
|
|
4549
|
+
npc = None
|
|
4550
|
+
team = None
|
|
4551
|
+
|
|
4552
|
+
# Try to load from project or global
|
|
4553
|
+
project_team_path = os.path.join(current_path, "npc_team")
|
|
4554
|
+
global_team_path = os.path.expanduser("~/.npcsh/npc_team")
|
|
4555
|
+
|
|
4556
|
+
team_path = project_team_path if os.path.exists(project_team_path) else global_team_path
|
|
4557
|
+
|
|
4558
|
+
if os.path.exists(team_path):
|
|
4559
|
+
try:
|
|
4560
|
+
team = Team(team_path, db_conn=db_conn)
|
|
4561
|
+
if agent_name and agent_name in team.npcs:
|
|
4562
|
+
npc = team.npcs[agent_name]
|
|
4563
|
+
elif team.forenpc:
|
|
4564
|
+
npc = team.forenpc
|
|
4565
|
+
except Exception as e:
|
|
4566
|
+
print(f"Error loading team: {e}")
|
|
4567
|
+
|
|
4568
|
+
# Extract the prompt from messages
|
|
4569
|
+
prompt = ""
|
|
4570
|
+
conversation_messages = []
|
|
4571
|
+
for msg in messages:
|
|
4572
|
+
role = msg.get("role", "user")
|
|
4573
|
+
content = msg.get("content", "")
|
|
4574
|
+
if isinstance(content, list):
|
|
4575
|
+
# Handle multimodal content
|
|
4576
|
+
content = " ".join([c.get("text", "") for c in content if c.get("type") == "text"])
|
|
4577
|
+
conversation_messages.append({"role": role, "content": content})
|
|
4578
|
+
if role == "user":
|
|
4579
|
+
prompt = content
|
|
4580
|
+
|
|
4581
|
+
# Determine provider from model name
|
|
4582
|
+
provider = data.get("provider")
|
|
4583
|
+
if not provider:
|
|
4584
|
+
if "gpt" in model or "o1" in model or model.startswith("o3"):
|
|
4585
|
+
provider = "openai"
|
|
4586
|
+
elif "claude" in model:
|
|
4587
|
+
provider = "anthropic"
|
|
4588
|
+
elif "gemini" in model:
|
|
4589
|
+
provider = "gemini"
|
|
4590
|
+
else:
|
|
4591
|
+
provider = "openai" # default
|
|
4592
|
+
|
|
4593
|
+
if stream:
|
|
4594
|
+
def generate_stream():
|
|
4595
|
+
request_id = f"chatcmpl-{uuid.uuid4().hex[:8]}"
|
|
4596
|
+
created = int(time.time())
|
|
4597
|
+
|
|
4598
|
+
try:
|
|
4599
|
+
response = get_llm_response(
|
|
4600
|
+
prompt,
|
|
4601
|
+
model=model,
|
|
4602
|
+
provider=provider,
|
|
4603
|
+
npc=npc,
|
|
4604
|
+
team=team,
|
|
4605
|
+
messages=conversation_messages[:-1], # exclude last user message (it's the prompt)
|
|
4606
|
+
stream=True,
|
|
4607
|
+
temperature=temperature,
|
|
4608
|
+
max_tokens=max_tokens,
|
|
4609
|
+
)
|
|
4610
|
+
|
|
4611
|
+
for chunk in response:
|
|
4612
|
+
if isinstance(chunk, str):
|
|
4613
|
+
delta_content = chunk
|
|
4614
|
+
elif hasattr(chunk, 'choices') and chunk.choices:
|
|
4615
|
+
delta = chunk.choices[0].delta
|
|
4616
|
+
delta_content = getattr(delta, 'content', '') or ''
|
|
4617
|
+
else:
|
|
4618
|
+
delta_content = str(chunk)
|
|
4619
|
+
|
|
4620
|
+
if delta_content:
|
|
4621
|
+
chunk_data = {
|
|
4622
|
+
"id": request_id,
|
|
4623
|
+
"object": "chat.completion.chunk",
|
|
4624
|
+
"created": created,
|
|
4625
|
+
"model": model,
|
|
4626
|
+
"choices": [{
|
|
4627
|
+
"index": 0,
|
|
4628
|
+
"delta": {"content": delta_content},
|
|
4629
|
+
"finish_reason": None
|
|
4630
|
+
}]
|
|
4631
|
+
}
|
|
4632
|
+
yield f"data: {json.dumps(chunk_data)}\n\n"
|
|
4633
|
+
|
|
4634
|
+
# Final chunk
|
|
4635
|
+
final_chunk = {
|
|
4636
|
+
"id": request_id,
|
|
4637
|
+
"object": "chat.completion.chunk",
|
|
4638
|
+
"created": created,
|
|
4639
|
+
"model": model,
|
|
4640
|
+
"choices": [{
|
|
4641
|
+
"index": 0,
|
|
4642
|
+
"delta": {},
|
|
4643
|
+
"finish_reason": "stop"
|
|
4644
|
+
}]
|
|
4645
|
+
}
|
|
4646
|
+
yield f"data: {json.dumps(final_chunk)}\n\n"
|
|
4647
|
+
yield "data: [DONE]\n\n"
|
|
4648
|
+
|
|
4649
|
+
except Exception as e:
|
|
4650
|
+
error_chunk = {
|
|
4651
|
+
"error": {
|
|
4652
|
+
"message": str(e),
|
|
4653
|
+
"type": "server_error"
|
|
4654
|
+
}
|
|
4655
|
+
}
|
|
4656
|
+
yield f"data: {json.dumps(error_chunk)}\n\n"
|
|
4657
|
+
|
|
4658
|
+
return Response(
|
|
4659
|
+
generate_stream(),
|
|
4660
|
+
mimetype='text/event-stream',
|
|
4661
|
+
headers={
|
|
4662
|
+
'Cache-Control': 'no-cache',
|
|
4663
|
+
'Connection': 'keep-alive',
|
|
4664
|
+
'X-Accel-Buffering': 'no'
|
|
4665
|
+
}
|
|
4666
|
+
)
|
|
4667
|
+
else:
|
|
4668
|
+
# Non-streaming response
|
|
4669
|
+
response = get_llm_response(
|
|
4670
|
+
prompt,
|
|
4671
|
+
model=model,
|
|
4672
|
+
provider=provider,
|
|
4673
|
+
npc=npc,
|
|
4674
|
+
team=team,
|
|
4675
|
+
messages=conversation_messages[:-1],
|
|
4676
|
+
stream=False,
|
|
4677
|
+
temperature=temperature,
|
|
4678
|
+
max_tokens=max_tokens,
|
|
4679
|
+
)
|
|
4680
|
+
|
|
4681
|
+
content = ""
|
|
4682
|
+
if isinstance(response, str):
|
|
4683
|
+
content = response
|
|
4684
|
+
elif hasattr(response, 'choices') and response.choices:
|
|
4685
|
+
content = response.choices[0].message.content or ""
|
|
4686
|
+
elif isinstance(response, dict):
|
|
4687
|
+
content = response.get("response") or response.get("output") or str(response)
|
|
4688
|
+
else:
|
|
4689
|
+
content = str(response)
|
|
4690
|
+
|
|
4691
|
+
return jsonify({
|
|
4692
|
+
"id": f"chatcmpl-{uuid.uuid4().hex[:8]}",
|
|
4693
|
+
"object": "chat.completion",
|
|
4694
|
+
"created": int(time.time()),
|
|
4695
|
+
"model": model,
|
|
4696
|
+
"choices": [{
|
|
4697
|
+
"index": 0,
|
|
4698
|
+
"message": {
|
|
4699
|
+
"role": "assistant",
|
|
4700
|
+
"content": content
|
|
4701
|
+
},
|
|
4702
|
+
"finish_reason": "stop"
|
|
4703
|
+
}],
|
|
4704
|
+
"usage": {
|
|
4705
|
+
"prompt_tokens": -1,
|
|
4706
|
+
"completion_tokens": -1,
|
|
4707
|
+
"total_tokens": -1
|
|
4708
|
+
}
|
|
4709
|
+
})
|
|
4710
|
+
|
|
4711
|
+
except Exception as e:
|
|
4712
|
+
traceback.print_exc()
|
|
4713
|
+
return jsonify({
|
|
4714
|
+
"error": {
|
|
4715
|
+
"message": str(e),
|
|
4716
|
+
"type": "server_error",
|
|
4717
|
+
"code": 500
|
|
4718
|
+
}
|
|
4719
|
+
}), 500
|
|
4720
|
+
|
|
4721
|
+
|
|
4722
|
+
@app.route("/v1/models", methods=["GET"])
|
|
4723
|
+
def openai_list_models():
|
|
4724
|
+
"""OpenAI-compatible models listing - returns available NPCs as models."""
|
|
4725
|
+
current_path = request.headers.get("X-Current-Path", os.getcwd())
|
|
4726
|
+
|
|
4727
|
+
models = []
|
|
4728
|
+
|
|
4729
|
+
# Add NPCs as available "models"
|
|
4730
|
+
project_team_path = os.path.join(current_path, "npc_team")
|
|
4731
|
+
global_team_path = os.path.expanduser("~/.npcsh/npc_team")
|
|
4732
|
+
|
|
4733
|
+
for team_path in [project_team_path, global_team_path]:
|
|
4734
|
+
if os.path.exists(team_path):
|
|
4735
|
+
for npc_file in Path(team_path).glob("*.npc"):
|
|
4736
|
+
models.append({
|
|
4737
|
+
"id": npc_file.stem,
|
|
4738
|
+
"object": "model",
|
|
4739
|
+
"created": int(os.path.getmtime(npc_file)),
|
|
4740
|
+
"owned_by": "npc-team"
|
|
4741
|
+
})
|
|
4742
|
+
|
|
4743
|
+
return jsonify({
|
|
4744
|
+
"object": "list",
|
|
4745
|
+
"data": models
|
|
4746
|
+
})
|
|
4747
|
+
|
|
4748
|
+
|
|
4440
4749
|
def start_flask_server(
|
|
4441
4750
|
port=5337,
|
|
4442
4751
|
cors_origins=None,
|
|
@@ -4490,8 +4799,8 @@ if __name__ == "__main__":
|
|
|
4490
4799
|
|
|
4491
4800
|
SETTINGS_FILE = Path(os.path.expanduser("~/.npcshrc"))
|
|
4492
4801
|
|
|
4493
|
-
|
|
4802
|
+
|
|
4494
4803
|
db_path = os.path.expanduser("~/npcsh_history.db")
|
|
4495
4804
|
user_npc_directory = os.path.expanduser("~/.npcsh/npc_team")
|
|
4496
|
-
|
|
4805
|
+
|
|
4497
4806
|
start_flask_server(db_path=db_path, user_npc_directory=user_npc_directory)
|