npcsh 0.3.28__py3-none-any.whl → 0.3.30__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. npcsh/llm_funcs.py +32 -23
  2. npcsh/npc_compiler.py +23 -4
  3. npcsh/npc_team/tools/bash_executer.tool +32 -0
  4. npcsh/npc_team/tools/code_executor.tool +16 -0
  5. npcsh/npc_team/tools/npcsh_executor.tool +9 -0
  6. npcsh/npc_team/tools/sql_executor.tool +2 -2
  7. npcsh/shell.py +19 -17
  8. npcsh/shell_helpers.py +576 -49
  9. npcsh-0.3.30.data/data/npcsh/npc_team/bash_executer.tool +32 -0
  10. npcsh-0.3.30.data/data/npcsh/npc_team/code_executor.tool +16 -0
  11. npcsh-0.3.30.data/data/npcsh/npc_team/npcsh_executor.tool +9 -0
  12. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/sql_executor.tool +2 -2
  13. {npcsh-0.3.28.dist-info → npcsh-0.3.30.dist-info}/METADATA +43 -3
  14. {npcsh-0.3.28.dist-info → npcsh-0.3.30.dist-info}/RECORD +36 -30
  15. {npcsh-0.3.28.dist-info → npcsh-0.3.30.dist-info}/WHEEL +1 -1
  16. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/calculator.tool +0 -0
  17. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/celona.npc +0 -0
  18. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/corca.npc +0 -0
  19. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/eriane.npc +0 -0
  20. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/foreman.npc +0 -0
  21. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/generic_search.tool +0 -0
  22. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/image_generation.tool +0 -0
  23. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/lineru.npc +0 -0
  24. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/local_search.tool +0 -0
  25. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/maurawa.npc +0 -0
  26. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/npcsh.ctx +0 -0
  27. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/raone.npc +0 -0
  28. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/screen_cap.tool +0 -0
  29. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/sibiji.npc +0 -0
  30. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/slean.npc +0 -0
  31. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/test_pipeline.py +0 -0
  32. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/turnic.npc +0 -0
  33. {npcsh-0.3.28.data → npcsh-0.3.30.data}/data/npcsh/npc_team/welxor.npc +0 -0
  34. {npcsh-0.3.28.dist-info → npcsh-0.3.30.dist-info}/entry_points.txt +0 -0
  35. {npcsh-0.3.28.dist-info → npcsh-0.3.30.dist-info}/licenses/LICENSE +0 -0
  36. {npcsh-0.3.28.dist-info → npcsh-0.3.30.dist-info}/top_level.txt +0 -0
npcsh/shell_helpers.py CHANGED
@@ -22,6 +22,7 @@ import tty
22
22
  import pty
23
23
  import select
24
24
  import signal
25
+ import platform
25
26
  import time
26
27
 
27
28
 
@@ -66,6 +67,7 @@ from .llm_funcs import (
66
67
  execute_llm_question,
67
68
  get_stream,
68
69
  get_conversation,
70
+ get_llm_response,
69
71
  check_llm_command,
70
72
  generate_image,
71
73
  get_embeddings,
@@ -1090,6 +1092,510 @@ def resize_image_tars(image_path):
1090
1092
  image.save(image_path, format="png")
1091
1093
 
1092
1094
 
1095
+ def execute_plan_command(
1096
+ command, npc=None, model=None, provider=None, messages=None, api_url=None
1097
+ ):
1098
+ parts = command.split(maxsplit=1)
1099
+ if len(parts) < 2:
1100
+ return {
1101
+ "messages": messages,
1102
+ "output": "Usage: /plan <command and schedule description>",
1103
+ }
1104
+
1105
+ request = parts[1]
1106
+ platform_system = platform.system()
1107
+
1108
+ # Create standard directories
1109
+ jobs_dir = os.path.expanduser("~/.npcsh/jobs")
1110
+ logs_dir = os.path.expanduser("~/.npcsh/logs")
1111
+ os.makedirs(jobs_dir, exist_ok=True)
1112
+ os.makedirs(logs_dir, exist_ok=True)
1113
+
1114
+ # First part - just the request formatting
1115
+ linux_request = f"""Convert this scheduling request into a crontab-based script:
1116
+ Request: {request}
1117
+
1118
+ """
1119
+
1120
+ # Second part - the static prompt with examples and requirements
1121
+ linux_prompt_static = """Example for "record CPU usage every 10 minutes":
1122
+ {
1123
+ "script": "#!/bin/bash
1124
+ set -euo pipefail
1125
+ IFS=$'\\n\\t'
1126
+
1127
+ LOGFILE=\"$HOME/.npcsh/logs/cpu_usage.log\"
1128
+
1129
+ log_info() {
1130
+ echo \"[$(date '+%Y-%m-%d %H:%M:%S')] [INFO] $*\" >> \"$LOGFILE\"
1131
+ }
1132
+
1133
+ log_error() {
1134
+ echo \"[$(date '+%Y-%m-%d %H:%M:%S')] [ERROR] $*\" >> \"$LOGFILE\"
1135
+ }
1136
+
1137
+ record_cpu() {
1138
+ local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
1139
+ local cpu_usage=$(top -bn1 | grep 'Cpu(s)' | awk '{print $2}')
1140
+ log_info \"CPU Usage: $cpu_usage%\"
1141
+ }
1142
+
1143
+ record_cpu",
1144
+ "schedule": "*/10 * * * *",
1145
+ "description": "Record CPU usage every 10 minutes",
1146
+ "name": "record_cpu_usage"
1147
+ }
1148
+
1149
+ Your response must be valid json with the following keys:
1150
+ - script: The shell script content with proper functions and error handling. special characters must be escaped to ensure python json.loads will work correctly.
1151
+ - schedule: Crontab expression (5 fields: minute hour day month weekday)
1152
+ - description: A human readable description
1153
+ - name: A unique name for the job
1154
+
1155
+ Do not include any additional markdown formatting in your response or leading ```json tags."""
1156
+
1157
+ mac_request = f"""Convert this scheduling request into a launchd-compatible script:
1158
+ Request: {request}
1159
+
1160
+ """
1161
+
1162
+ mac_prompt_static = """Example for "record CPU usage every 10 minutes":
1163
+ {
1164
+ "script": "#!/bin/bash
1165
+ set -euo pipefail
1166
+ IFS=$'\\n\\t'
1167
+
1168
+ LOGFILE=\"$HOME/.npcsh/logs/cpu_usage.log\"
1169
+
1170
+ log_info() {
1171
+ echo \"[$(date '+%Y-%m-%d %H:%M:%S')] [INFO] $*\" >> \"$LOGFILE\"
1172
+ }
1173
+
1174
+ log_error() {
1175
+ echo \"[$(date '+%Y-%m-%d %H:%M:%S')] [ERROR] $*\" >> \"$LOGFILE\"
1176
+ }
1177
+
1178
+ record_cpu() {
1179
+ local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
1180
+ local cpu_usage=$(top -l 1 | grep 'CPU usage' | awk '{print $3}' | tr -d '%')
1181
+ log_info \"CPU Usage: $cpu_usage%\"
1182
+ }
1183
+
1184
+ record_cpu",
1185
+ "schedule": "600",
1186
+ "description": "Record CPU usage every 10 minutes",
1187
+ "name": "record_cpu_usage"
1188
+ }
1189
+
1190
+ Your response must be valid json with the following keys:
1191
+ - script: The shell script content with proper functions and error handling. special characters must be escaped to ensure python json.loads will work correctly.
1192
+ - schedule: Interval in seconds (e.g. 600 for 10 minutes)
1193
+ - description: A human readable description
1194
+ - name: A unique name for the job
1195
+
1196
+ Do not include any additional markdown formatting in your response or leading ```json tags."""
1197
+
1198
+ windows_request = f"""Convert this scheduling request into a PowerShell script with Task Scheduler parameters:
1199
+ Request: {request}
1200
+
1201
+ """
1202
+
1203
+ windows_prompt_static = """Example for "record CPU usage every 10 minutes":
1204
+ {
1205
+ "script": "$ErrorActionPreference = 'Stop'
1206
+
1207
+ $LogFile = \"$HOME\\.npcsh\\logs\\cpu_usage.log\"
1208
+
1209
+ function Write-Log {
1210
+ param($Message, $Type = 'INFO')
1211
+ $timestamp = Get-Date -Format 'yyyy-MM-dd HH:mm:ss'
1212
+ \"[$timestamp] [$Type] $Message\" | Out-File -FilePath $LogFile -Append
1213
+ }
1214
+
1215
+ function Get-CpuUsage {
1216
+ try {
1217
+ $cpu = (Get-Counter '\\Processor(_Total)\\% Processor Time').CounterSamples.CookedValue
1218
+ Write-Log \"CPU Usage: $($cpu)%\"
1219
+ } catch {
1220
+ Write-Log $_.Exception.Message 'ERROR'
1221
+ throw
1222
+ }
1223
+ }
1224
+
1225
+ Get-CpuUsage",
1226
+ "schedule": "/sc minute /mo 10",
1227
+ "description": "Record CPU usage every 10 minutes",
1228
+ "name": "record_cpu_usage"
1229
+ }
1230
+
1231
+ Your response must be valid json with the following keys:
1232
+ - script: The PowerShell script content with proper functions and error handling. special characters must be escaped to ensure python json.loads will work correctly.
1233
+ - schedule: Task Scheduler parameters (e.g. /sc minute /mo 10)
1234
+ - description: A human readable description
1235
+ - name: A unique name for the job
1236
+
1237
+ Do not include any additional markdown formatting in your response or leading ```json tags."""
1238
+
1239
+ prompts = {
1240
+ "Linux": linux_request + linux_prompt_static,
1241
+ "Darwin": mac_request + mac_prompt_static,
1242
+ "Windows": windows_request + windows_prompt_static,
1243
+ }
1244
+
1245
+ prompt = prompts[platform_system]
1246
+ response = get_llm_response(
1247
+ prompt, npc=npc, model=model, provider=provider, format="json"
1248
+ )
1249
+ schedule_info = response.get("response")
1250
+ print("Received schedule info:", schedule_info)
1251
+
1252
+ job_name = f"job_{schedule_info['name']}"
1253
+
1254
+ if platform_system == "Windows":
1255
+ script_path = os.path.join(jobs_dir, f"{job_name}.ps1")
1256
+ else:
1257
+ script_path = os.path.join(jobs_dir, f"{job_name}.sh")
1258
+
1259
+ log_path = os.path.join(logs_dir, f"{job_name}.log")
1260
+
1261
+ # Write the script
1262
+ with open(script_path, "w") as f:
1263
+ f.write(schedule_info["script"])
1264
+ os.chmod(script_path, 0o755)
1265
+
1266
+ if platform_system == "Linux":
1267
+ try:
1268
+ current_crontab = subprocess.check_output(["crontab", "-l"], text=True)
1269
+ except subprocess.CalledProcessError:
1270
+ current_crontab = ""
1271
+
1272
+ crontab_line = f"{schedule_info['schedule']} {script_path} >> {log_path} 2>&1"
1273
+ new_crontab = current_crontab.strip() + "\n" + crontab_line + "\n"
1274
+
1275
+ with tempfile.NamedTemporaryFile(mode="w") as tmp:
1276
+ tmp.write(new_crontab)
1277
+ tmp.flush()
1278
+ subprocess.run(["crontab", tmp.name], check=True)
1279
+
1280
+ output = f"""Job created successfully:
1281
+ - Description: {schedule_info['description']}
1282
+ - Schedule: {schedule_info['schedule']}
1283
+ - Script: {script_path}
1284
+ - Log: {log_path}
1285
+ - Crontab entry: {crontab_line}"""
1286
+
1287
+ elif platform_system == "Darwin":
1288
+ plist_dir = os.path.expanduser("~/Library/LaunchAgents")
1289
+ os.makedirs(plist_dir, exist_ok=True)
1290
+ plist_path = os.path.join(plist_dir, f"com.npcsh.{job_name}.plist")
1291
+
1292
+ plist_content = f"""<?xml version="1.0" encoding="UTF-8"?>
1293
+ <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
1294
+ <plist version="1.0">
1295
+ <dict>
1296
+ <key>Label</key>
1297
+ <string>com.npcsh.{job_name}</string>
1298
+ <key>ProgramArguments</key>
1299
+ <array>
1300
+ <string>{script_path}</string>
1301
+ </array>
1302
+ <key>StartInterval</key>
1303
+ <integer>{schedule_info['schedule']}</integer>
1304
+ <key>StandardOutPath</key>
1305
+ <string>{log_path}</string>
1306
+ <key>StandardErrorPath</key>
1307
+ <string>{log_path}</string>
1308
+ <key>RunAtLoad</key>
1309
+ <true/>
1310
+ </dict>
1311
+ </plist>"""
1312
+
1313
+ with open(plist_path, "w") as f:
1314
+ f.write(plist_content)
1315
+
1316
+ subprocess.run(["launchctl", "unload", plist_path], check=False)
1317
+ subprocess.run(["launchctl", "load", plist_path], check=True)
1318
+
1319
+ output = f"""Job created successfully:
1320
+ - Description: {schedule_info['description']}
1321
+ - Schedule: Every {schedule_info['schedule']} seconds
1322
+ - Script: {script_path}
1323
+ - Log: {log_path}
1324
+ - Launchd plist: {plist_path}"""
1325
+
1326
+ elif platform_system == "Windows":
1327
+ task_name = f"NPCSH_{job_name}"
1328
+
1329
+ # Parse schedule_info['schedule'] into individual parameters
1330
+ schedule_params = schedule_info["schedule"].split()
1331
+
1332
+ cmd = (
1333
+ [
1334
+ "schtasks",
1335
+ "/create",
1336
+ "/tn",
1337
+ task_name,
1338
+ "/tr",
1339
+ f"powershell -NoProfile -ExecutionPolicy Bypass -File {script_path}",
1340
+ ]
1341
+ + schedule_params
1342
+ + ["/f"]
1343
+ ) # /f forces creation if task exists
1344
+
1345
+ subprocess.run(cmd, check=True)
1346
+
1347
+ output = f"""Job created successfully:
1348
+ - Description: {schedule_info['description']}
1349
+ - Schedule: {schedule_info['schedule']}
1350
+ - Script: {script_path}
1351
+ - Log: {log_path}
1352
+ - Task name: {task_name}"""
1353
+
1354
+ return {"messages": messages, "output": output}
1355
+
1356
+
1357
+ def execute_trigger_command(
1358
+ command, npc=None, model=None, provider=None, messages=None, api_url=None
1359
+ ):
1360
+ parts = command.split(maxsplit=1)
1361
+ if len(parts) < 2:
1362
+ return {
1363
+ "messages": messages,
1364
+ "output": "Usage: /trigger <trigger condition and action description>",
1365
+ }
1366
+
1367
+ request = parts[1]
1368
+ platform_system = platform.system()
1369
+
1370
+ linux_request = f"""Convert this trigger request into a single event-monitoring daemon script:
1371
+ Request: {request}
1372
+
1373
+ """
1374
+
1375
+ linux_prompt_static = """Example for "Move PDFs from Downloads to Documents/PDFs":
1376
+ {
1377
+ "script": "#!/bin/bash\\nset -euo pipefail\\nIFS=$'\\n\\t'\\n\\nLOGFILE=\\\"$HOME/.npcsh/logs/pdf_mover.log\\\"\\nSOURCE=\\\"$HOME/Downloads\\\"\\nTARGET=\\\"$HOME/Documents/PDFs\\\"\\n\\nlog_info() {\\n echo \\\"[$(date '+%Y-%m-%d %H:%M:%S')] [INFO] $*\\\" >> \\\"$LOGFILE\\\"\\n}\\n\\nlog_error() {\\n echo \\\"[$(date '+%Y-%m-%d %H:%M:%S')] [ERROR] $*\\\" >> \\\"$LOGFILE\\\"\\n}\\n\\ninotifywait -m -q -e create --format '%w%f' \\\"$SOURCE\\\" | while read filepath; do\\n if [[ \\\"$filepath\\\" =~ \\\\.pdf$ ]]; then\\n mv \\\"$filepath\\\" \\\"$TARGET/\\\" && log_info \\\"Moved $filepath to $TARGET\\\" || log_error \\\"Failed to move $filepath\\\"\\n fi\\ndone",
1378
+ "name": "pdf_mover",
1379
+ "description": "Move PDF files from Downloads to Documents/PDFs folder"
1380
+ }
1381
+
1382
+ The script MUST:
1383
+ - Use inotifywait -m -q -e create --format '%w%f' to get full paths
1384
+ - Double quote ALL file operations: "$SOURCE/$FILE"
1385
+ - Use $HOME for absolute paths
1386
+ - Echo both success and failure messages to log
1387
+
1388
+ Your response must be valid json with the following keys:
1389
+ - script: The shell script content with proper functions and error handling
1390
+ - name: A unique name for the trigger
1391
+ - description: A human readable description
1392
+
1393
+ Do not include any additional markdown formatting in your response."""
1394
+
1395
+ mac_request = f"""Convert this trigger request into a single event-monitoring daemon script:
1396
+ Request: {request}
1397
+
1398
+ """
1399
+
1400
+ mac_prompt_static = """Example for "Move PDFs from Downloads to Documents/PDFs":
1401
+ {
1402
+ "script": "#!/bin/bash\\nset -euo pipefail\\nIFS=$'\\n\\t'\\n\\nLOGFILE=\\\"$HOME/.npcsh/logs/pdf_mover.log\\\"\\nSOURCE=\\\"$HOME/Downloads\\\"\\nTARGET=\\\"$HOME/Documents/PDFs\\\"\\n\\nlog_info() {\\n echo \\\"[$(date '+%Y-%m-%d %H:%M:%S')] [INFO] $*\\\" >> \\\"$LOGFILE\\\"\\n}\\n\\nlog_error() {\\n echo \\\"[$(date '+%Y-%m-%d %H:%M:%S')] [ERROR] $*\\\" >> \\\"$LOGFILE\\\"\\n}\\n\\nfswatch -0 -r -e '.*' --event Created --format '%p' \\\"$SOURCE\\\" | while read -d '' filepath; do\\n if [[ \\\"$filepath\\\" =~ \\\\.pdf$ ]]; then\\n mv \\\"$filepath\\\" \\\"$TARGET/\\\" && log_info \\\"Moved $filepath to $TARGET\\\" || log_error \\\"Failed to move $filepath\\\"\\n fi\\ndone",
1403
+ "name": "pdf_mover",
1404
+ "description": "Move PDF files from Downloads to Documents/PDFs folder"
1405
+ }
1406
+
1407
+ The script MUST:
1408
+ - Use fswatch -0 -r -e '.*' --event Created --format '%p' to get full paths
1409
+ - Double quote ALL file operations: "$SOURCE/$FILE"
1410
+ - Use $HOME for absolute paths
1411
+ - Echo both success and failure messages to log
1412
+
1413
+ Your response must be valid json with the following keys:
1414
+ - script: The shell script content with proper functions and error handling
1415
+ - name: A unique name for the trigger
1416
+ - description: A human readable description
1417
+
1418
+ Do not include any additional markdown formatting in your response."""
1419
+
1420
+ windows_request = f"""Convert this trigger request into a single event-monitoring daemon script:
1421
+ Request: {request}
1422
+
1423
+ """
1424
+
1425
+ windows_prompt_static = """Example for "Move PDFs from Downloads to Documents/PDFs":
1426
+ {
1427
+ "script": "$ErrorActionPreference = 'Stop'\\n\\n$LogFile = \\\"$HOME\\.npcsh\\logs\\pdf_mover.log\\\"\\n$Source = \\\"$HOME\\Downloads\\\"\\n$Target = \\\"$HOME\\Documents\\PDFs\\\"\\n\\nfunction Write-Log {\\n param($Message, $Type = 'INFO')\\n $timestamp = Get-Date -Format 'yyyy-MM-dd HH:mm:ss'\\n \\\"[$timestamp] [$Type] $Message\\\" | Out-File -FilePath $LogFile -Append\\n}\\n\\n$watcher = New-Object System.IO.FileSystemWatcher\\n$watcher.Path = $Source\\n$watcher.Filter = \\\"*.pdf\\\"\\n$watcher.IncludeSubdirectories = $true\\n$watcher.EnableRaisingEvents = $true\\n\\n$action = {\\n $path = $Event.SourceEventArgs.FullPath\\n try {\\n Move-Item -Path $path -Destination $Target\\n Write-Log \\\"Moved $path to $Target\\\"\\n } catch {\\n Write-Log $_.Exception.Message 'ERROR'\\n }\\n}\\n\\nRegister-ObjectEvent $watcher 'Created' -Action $action\\n\\nwhile ($true) { Start-Sleep 1 }",
1428
+ "name": "pdf_mover",
1429
+ "description": "Move PDF files from Downloads to Documents/PDFs folder"
1430
+ }
1431
+
1432
+ The script MUST:
1433
+ - Use FileSystemWatcher for monitoring
1434
+ - Double quote ALL file operations: "$Source\\$File"
1435
+ - Use $HOME for absolute paths
1436
+ - Echo both success and failure messages to log
1437
+
1438
+ Your response must be valid json with the following keys:
1439
+ - script: The PowerShell script content with proper functions and error handling
1440
+ - name: A unique name for the trigger
1441
+ - description: A human readable description
1442
+
1443
+ Do not include any additional markdown formatting in your response."""
1444
+
1445
+ prompts = {
1446
+ "Linux": linux_request + linux_prompt_static,
1447
+ "Darwin": mac_request + mac_prompt_static,
1448
+ "Windows": windows_request + windows_prompt_static,
1449
+ }
1450
+
1451
+ prompt = prompts[platform_system]
1452
+ response = get_llm_response(
1453
+ prompt, npc=npc, model=model, provider=provider, format="json"
1454
+ )
1455
+ trigger_info = response.get("response")
1456
+ print("Trigger info:", trigger_info)
1457
+
1458
+ triggers_dir = os.path.expanduser("~/.npcsh/triggers")
1459
+ logs_dir = os.path.expanduser("~/.npcsh/logs")
1460
+ os.makedirs(triggers_dir, exist_ok=True)
1461
+ os.makedirs(logs_dir, exist_ok=True)
1462
+
1463
+ trigger_name = f"trigger_{trigger_info['name']}"
1464
+ log_path = os.path.join(logs_dir, f"{trigger_name}.log")
1465
+
1466
+ if platform_system == "Linux":
1467
+ script_path = os.path.join(triggers_dir, f"{trigger_name}.sh")
1468
+
1469
+ with open(script_path, "w") as f:
1470
+ f.write(trigger_info["script"])
1471
+ os.chmod(script_path, 0o755)
1472
+
1473
+ service_dir = os.path.expanduser("~/.config/systemd/user")
1474
+ os.makedirs(service_dir, exist_ok=True)
1475
+ service_path = os.path.join(service_dir, f"npcsh-{trigger_name}.service")
1476
+
1477
+ service_content = f"""[Unit]
1478
+ Description={trigger_info['description']}
1479
+ After=network.target
1480
+
1481
+ [Service]
1482
+ Type=simple
1483
+ ExecStart={script_path}
1484
+ Restart=always
1485
+ StandardOutput=append:{log_path}
1486
+ StandardError=append:{log_path}
1487
+
1488
+ [Install]
1489
+ WantedBy=default.target
1490
+ """
1491
+
1492
+ with open(service_path, "w") as f:
1493
+ f.write(service_content)
1494
+
1495
+ subprocess.run(["systemctl", "--user", "daemon-reload"])
1496
+ subprocess.run(
1497
+ ["systemctl", "--user", "enable", f"npcsh-{trigger_name}.service"]
1498
+ )
1499
+ subprocess.run(
1500
+ ["systemctl", "--user", "start", f"npcsh-{trigger_name}.service"]
1501
+ )
1502
+
1503
+ status = subprocess.run(
1504
+ ["systemctl", "--user", "status", f"npcsh-{trigger_name}.service"],
1505
+ capture_output=True,
1506
+ text=True,
1507
+ )
1508
+
1509
+ output = f"""Trigger service created:
1510
+ - Description: {trigger_info['description']}
1511
+ - Script: {script_path}
1512
+ - Service: {service_path}
1513
+ - Log: {log_path}
1514
+
1515
+ Status:
1516
+ {status.stdout}"""
1517
+
1518
+ elif platform_system == "Darwin":
1519
+ script_path = os.path.join(triggers_dir, f"{trigger_name}.sh")
1520
+
1521
+ with open(script_path, "w") as f:
1522
+ f.write(trigger_info["script"])
1523
+ os.chmod(script_path, 0o755)
1524
+
1525
+ plist_dir = os.path.expanduser("~/Library/LaunchAgents")
1526
+ os.makedirs(plist_dir, exist_ok=True)
1527
+ plist_path = os.path.join(plist_dir, f"com.npcsh.{trigger_name}.plist")
1528
+
1529
+ plist_content = f"""<?xml version="1.0" encoding="UTF-8"?>
1530
+ <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
1531
+ <plist version="1.0">
1532
+ <dict>
1533
+ <key>Label</key>
1534
+ <string>com.npcsh.{trigger_name}</string>
1535
+ <key>ProgramArguments</key>
1536
+ <array>
1537
+ <string>{script_path}</string>
1538
+ </array>
1539
+ <key>RunAtLoad</key>
1540
+ <true/>
1541
+ <key>KeepAlive</key>
1542
+ <true/>
1543
+ <key>StandardOutPath</key>
1544
+ <string>{log_path}</string>
1545
+ <key>StandardErrorPath</key>
1546
+ <string>{log_path}</string>
1547
+ </dict>
1548
+ </plist>"""
1549
+
1550
+ with open(plist_path, "w") as f:
1551
+ f.write(plist_content)
1552
+
1553
+ subprocess.run(["launchctl", "unload", plist_path], check=False)
1554
+ subprocess.run(["launchctl", "load", plist_path], check=True)
1555
+
1556
+ output = f"""Trigger service created:
1557
+ - Description: {trigger_info['description']}
1558
+ - Script: {script_path}
1559
+ - Launchd plist: {plist_path}
1560
+ - Log: {log_path}"""
1561
+
1562
+ elif platform_system == "Windows":
1563
+ script_path = os.path.join(triggers_dir, f"{trigger_name}.ps1")
1564
+
1565
+ with open(script_path, "w") as f:
1566
+ f.write(trigger_info["script"])
1567
+
1568
+ task_name = f"NPCSH_{trigger_name}"
1569
+
1570
+ # Create a scheduled task that runs at startup
1571
+ cmd = [
1572
+ "schtasks",
1573
+ "/create",
1574
+ "/tn",
1575
+ task_name,
1576
+ "/tr",
1577
+ f"powershell -NoProfile -ExecutionPolicy Bypass -File {script_path}",
1578
+ "/sc",
1579
+ "onstart",
1580
+ "/ru",
1581
+ "System",
1582
+ "/f", # Force creation
1583
+ ]
1584
+
1585
+ subprocess.run(cmd, check=True)
1586
+
1587
+ # Start the task immediately
1588
+ subprocess.run(["schtasks", "/run", "/tn", task_name])
1589
+
1590
+ output = f"""Trigger service created:
1591
+ - Description: {trigger_info['description']}
1592
+ - Script: {script_path}
1593
+ - Task name: {task_name}
1594
+ - Log: {log_path}"""
1595
+
1596
+ return {"messages": messages, "output": output}
1597
+
1598
+
1093
1599
  def enter_wander_mode(args, messages, npc_compiler, npc, model, provider):
1094
1600
  """
1095
1601
  Wander mode is an exploratory mode where an LLM is given a task and they begin to wander through space.
@@ -1334,6 +1840,25 @@ def execute_slash_command(
1334
1840
  print(output)
1335
1841
  elif command_name == "tools":
1336
1842
  return {"messages": messages, "output": print_tools(tools)}
1843
+ elif command_name == "plan":
1844
+ return execute_plan_command(
1845
+ command,
1846
+ npc=npc,
1847
+ model=model,
1848
+ provider=provider,
1849
+ api_url=api_url,
1850
+ messages=messages,
1851
+ )
1852
+ elif command_name == "trigger":
1853
+ return execute_trigger_command(
1854
+ command,
1855
+ npc=npc,
1856
+ model=model,
1857
+ provider=provider,
1858
+ api_url=api_url,
1859
+ messages=messages,
1860
+ )
1861
+
1337
1862
  elif command_name == "plonk":
1338
1863
  request = " ".join(args)
1339
1864
  plonk_call = plonk(
@@ -2021,60 +2546,62 @@ def execute_command(
2021
2546
  except AttributeError:
2022
2547
  print(output)
2023
2548
 
2024
- piped_outputs.append(f'"{output}"')
2549
+ piped_outputs.append(f'"{output}"')
2025
2550
 
2026
- try:
2027
- # Prepare text to embed (both command and response)
2028
- texts_to_embed = [command, str(output) if output else ""]
2551
+ try:
2552
+ # Prepare text to embed (both command and response)
2553
+ texts_to_embed = [command, str(output) if output else ""]
2029
2554
 
2030
- # Generate embeddings
2031
- embeddings = get_embeddings(
2032
- texts_to_embed,
2033
- )
2555
+ # Generate embeddings
2556
+ embeddings = get_embeddings(
2557
+ texts_to_embed,
2558
+ )
2034
2559
 
2035
- # Prepare metadata
2036
- metadata = [
2037
- {
2038
- "type": "command",
2039
- "timestamp": datetime.datetime.now().isoformat(),
2040
- "path": os.getcwd(),
2041
- "npc": npc.name if npc else None,
2042
- "conversation_id": conversation_id,
2043
- },
2044
- {
2045
- "type": "response",
2046
- "timestamp": datetime.datetime.now().isoformat(),
2047
- "path": os.getcwd(),
2048
- "npc": npc.name if npc else None,
2049
- "conversation_id": conversation_id,
2050
- },
2051
- ]
2052
- embedding_model = os.environ.get("NPCSH_EMBEDDING_MODEL")
2053
- embedding_provider = os.environ.get("NPCSH_EMBEDDING_PROVIDER")
2054
- collection_name = f"{embedding_provider}_{embedding_model}_embeddings"
2560
+ # Prepare metadata
2561
+ metadata = [
2562
+ {
2563
+ "type": "command",
2564
+ "timestamp": datetime.datetime.now().isoformat(),
2565
+ "path": os.getcwd(),
2566
+ "npc": npc.name if npc else None,
2567
+ "conversation_id": conversation_id,
2568
+ },
2569
+ {
2570
+ "type": "response",
2571
+ "timestamp": datetime.datetime.now().isoformat(),
2572
+ "path": os.getcwd(),
2573
+ "npc": npc.name if npc else None,
2574
+ "conversation_id": conversation_id,
2575
+ },
2576
+ ]
2577
+ embedding_model = os.environ.get("NPCSH_EMBEDDING_MODEL")
2578
+ embedding_provider = os.environ.get("NPCSH_EMBEDDING_PROVIDER")
2579
+ collection_name = (
2580
+ f"{embedding_provider}_{embedding_model}_embeddings"
2581
+ )
2055
2582
 
2056
- try:
2057
- collection = chroma_client.get_collection(collection_name)
2058
- except Exception as e:
2059
- print(f"Warning: Failed to get collection: {str(e)}")
2060
- print("Creating new collection...")
2061
- collection = chroma_client.create_collection(collection_name)
2062
- date_str = datetime.datetime.now().isoformat()
2063
- # print(date_str)
2064
-
2065
- # Add to collection
2066
- current_ids = [f"cmd_{date_str}", f"resp_{date_str}"]
2067
- collection.add(
2068
- embeddings=embeddings,
2069
- documents=texts_to_embed, # Adjust as needed
2070
- metadatas=metadata, # Adjust as needed
2071
- ids=current_ids,
2072
- )
2583
+ try:
2584
+ collection = chroma_client.get_collection(collection_name)
2585
+ except Exception as e:
2586
+ print(f"Warning: Failed to get collection: {str(e)}")
2587
+ print("Creating new collection...")
2588
+ collection = chroma_client.create_collection(collection_name)
2589
+ date_str = datetime.datetime.now().isoformat()
2590
+ # print(date_str)
2591
+
2592
+ # Add to collection
2593
+ current_ids = [f"cmd_{date_str}", f"resp_{date_str}"]
2594
+ collection.add(
2595
+ embeddings=embeddings,
2596
+ documents=texts_to_embed, # Adjust as needed
2597
+ metadatas=metadata, # Adjust as needed
2598
+ ids=current_ids,
2599
+ )
2073
2600
 
2074
- # print("Stored embeddings.")
2075
- # print("collection", collection)
2076
- except Exception as e:
2077
- print(f"Warning: Failed to store embeddings: {str(e)}")
2601
+ # print("Stored embeddings.")
2602
+ # print("collection", collection)
2603
+ except Exception as e:
2604
+ print(f"Warning: Failed to store embeddings: {str(e)}")
2078
2605
 
2079
2606
  # return following
2080
2607
  # print(current_npc)