npcsh 1.0.13__py3-none-any.whl → 1.0.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- npcsh/_state.py +23 -41
- npcsh/npc.py +124 -98
- npcsh/npcsh.py +124 -77
- npcsh/routes.py +16 -28
- npcsh/yap.py +115 -106
- {npcsh-1.0.13.dist-info → npcsh-1.0.16.dist-info}/METADATA +108 -58
- {npcsh-1.0.13.dist-info → npcsh-1.0.16.dist-info}/RECORD +11 -11
- {npcsh-1.0.13.dist-info → npcsh-1.0.16.dist-info}/WHEEL +0 -0
- {npcsh-1.0.13.dist-info → npcsh-1.0.16.dist-info}/entry_points.txt +0 -0
- {npcsh-1.0.13.dist-info → npcsh-1.0.16.dist-info}/licenses/LICENSE +0 -0
- {npcsh-1.0.13.dist-info → npcsh-1.0.16.dist-info}/top_level.txt +0 -0
npcsh/_state.py
CHANGED
|
@@ -1,45 +1,35 @@
|
|
|
1
1
|
|
|
2
2
|
from colorama import Fore, Back, Style
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
from datetime import datetime
|
|
6
|
-
from dotenv import load_dotenv
|
|
7
|
-
|
|
8
|
-
import re
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
import filecmp
|
|
9
5
|
import os
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
from typing import Dict, List, Any
|
|
15
|
-
import subprocess
|
|
16
|
-
import termios
|
|
17
|
-
import tty
|
|
6
|
+
import platform
|
|
18
7
|
import pty
|
|
8
|
+
import re
|
|
19
9
|
import select
|
|
10
|
+
import shutil
|
|
20
11
|
import signal
|
|
21
|
-
import time
|
|
22
|
-
import os
|
|
23
|
-
import re
|
|
24
12
|
import sqlite3
|
|
25
|
-
|
|
13
|
+
import subprocess
|
|
14
|
+
import sys
|
|
15
|
+
from termcolor import colored
|
|
16
|
+
import termios
|
|
17
|
+
import time
|
|
18
|
+
from typing import Dict, List, Any, Tuple, Union, Optional
|
|
19
|
+
import tty
|
|
26
20
|
import logging
|
|
27
21
|
import textwrap
|
|
28
22
|
from termcolor import colored
|
|
29
|
-
import
|
|
30
|
-
|
|
31
|
-
|
|
23
|
+
from npcpy.memory.command_history import (
|
|
24
|
+
start_new_conversation,
|
|
25
|
+
)
|
|
26
|
+
from npcpy.npc_compiler import NPC, Team
|
|
32
27
|
|
|
33
28
|
def get_npc_path(npc_name: str, db_path: str) -> str:
|
|
34
|
-
# First, check in project npc_team directory
|
|
35
29
|
project_npc_team_dir = os.path.abspath("./npc_team")
|
|
36
30
|
project_npc_path = os.path.join(project_npc_team_dir, f"{npc_name}.npc")
|
|
37
|
-
|
|
38
|
-
# Then, check in global npc_team directory
|
|
39
31
|
user_npc_team_dir = os.path.expanduser("~/.npcsh/npc_team")
|
|
40
32
|
global_npc_path = os.path.join(user_npc_team_dir, f"{npc_name}.npc")
|
|
41
|
-
|
|
42
|
-
# Check database for compiled NPCs
|
|
43
33
|
try:
|
|
44
34
|
with sqlite3.connect(db_path) as conn:
|
|
45
35
|
cursor = conn.cursor()
|
|
@@ -105,9 +95,6 @@ def initialize_base_npcs_if_needed(db_path: str) -> None:
|
|
|
105
95
|
package_dir = os.path.dirname(__file__)
|
|
106
96
|
package_npc_team_dir = os.path.join(package_dir, "npc_team")
|
|
107
97
|
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
# User's global npc_team directory
|
|
111
98
|
user_npc_team_dir = os.path.expanduser("~/.npcsh/npc_team")
|
|
112
99
|
|
|
113
100
|
user_jinxs_dir = os.path.join(user_npc_team_dir, "jinxs")
|
|
@@ -115,7 +102,7 @@ def initialize_base_npcs_if_needed(db_path: str) -> None:
|
|
|
115
102
|
os.makedirs(user_npc_team_dir, exist_ok=True)
|
|
116
103
|
os.makedirs(user_jinxs_dir, exist_ok=True)
|
|
117
104
|
os.makedirs(user_templates_dir, exist_ok=True)
|
|
118
|
-
|
|
105
|
+
|
|
119
106
|
for filename in os.listdir(package_npc_team_dir):
|
|
120
107
|
if filename.endswith(".npc"):
|
|
121
108
|
source_path = os.path.join(package_npc_team_dir, filename)
|
|
@@ -243,11 +230,11 @@ def ensure_npcshrc_exists() -> str:
|
|
|
243
230
|
npcshrc.write("# NPCSH Configuration File\n")
|
|
244
231
|
npcshrc.write("export NPCSH_INITIALIZED=0\n")
|
|
245
232
|
npcshrc.write("export NPCSH_DEFAULT_MODE='agent'\n")
|
|
233
|
+
npcshrc.write("export NPCSH_BUILD_KG=1")
|
|
246
234
|
npcshrc.write("export NPCSH_CHAT_PROVIDER='ollama'\n")
|
|
247
|
-
npcshrc.write("export NPCSH_CHAT_MODEL='
|
|
235
|
+
npcshrc.write("export NPCSH_CHAT_MODEL='gemma3:4b'\n")
|
|
248
236
|
npcshrc.write("export NPCSH_REASONING_PROVIDER='ollama'\n")
|
|
249
237
|
npcshrc.write("export NPCSH_REASONING_MODEL='deepseek-r1'\n")
|
|
250
|
-
|
|
251
238
|
npcshrc.write("export NPCSH_EMBEDDING_PROVIDER='ollama'\n")
|
|
252
239
|
npcshrc.write("export NPCSH_EMBEDDING_MODEL='nomic-embed-text'\n")
|
|
253
240
|
npcshrc.write("export NPCSH_VISION_PROVIDER='ollama'\n")
|
|
@@ -1059,12 +1046,13 @@ NPCSH_REASONING_PROVIDER = os.environ.get("NPCSH_REASONING_PROVIDER", "ollama")
|
|
|
1059
1046
|
NPCSH_STREAM_OUTPUT = eval(os.environ.get("NPCSH_STREAM_OUTPUT", "0")) == 1
|
|
1060
1047
|
NPCSH_API_URL = os.environ.get("NPCSH_API_URL", None)
|
|
1061
1048
|
NPCSH_SEARCH_PROVIDER = os.environ.get("NPCSH_SEARCH_PROVIDER", "duckduckgo")
|
|
1062
|
-
|
|
1049
|
+
NPCSH_BUILD_KG = os.environ.get("NPCSH_BUILD_KG") == "1"
|
|
1063
1050
|
READLINE_HISTORY_FILE = os.path.expanduser("~/.npcsh_history")
|
|
1064
1051
|
|
|
1065
1052
|
|
|
1066
1053
|
|
|
1067
1054
|
def setup_readline() -> str:
|
|
1055
|
+
import readline
|
|
1068
1056
|
if readline is None:
|
|
1069
1057
|
return None
|
|
1070
1058
|
try:
|
|
@@ -1097,14 +1085,6 @@ def save_readline_history():
|
|
|
1097
1085
|
|
|
1098
1086
|
|
|
1099
1087
|
|
|
1100
|
-
|
|
1101
|
-
from npcpy.memory.command_history import (
|
|
1102
|
-
start_new_conversation,
|
|
1103
|
-
)
|
|
1104
|
-
from dataclasses import dataclass, field
|
|
1105
|
-
from typing import Optional, List, Dict, Any, Tuple, Union
|
|
1106
|
-
from npcpy.npc_compiler import NPC, Team
|
|
1107
|
-
import os
|
|
1108
1088
|
@dataclass
|
|
1109
1089
|
class ShellState:
|
|
1110
1090
|
npc: Optional[Union[NPC, str]] = None
|
|
@@ -1126,6 +1106,7 @@ class ShellState:
|
|
|
1126
1106
|
video_gen_model: str = NPCSH_VIDEO_GEN_MODEL
|
|
1127
1107
|
video_gen_provider: str = NPCSH_VIDEO_GEN_PROVIDER
|
|
1128
1108
|
current_mode: str = NPCSH_DEFAULT_MODE
|
|
1109
|
+
build_kg: bool = NPCSH_BUILD_KG,
|
|
1129
1110
|
api_key: Optional[str] = None
|
|
1130
1111
|
api_url: Optional[str] = NPCSH_API_URL
|
|
1131
1112
|
current_path: str = field(default_factory=os.getcwd)
|
|
@@ -1163,5 +1144,6 @@ initial_state = ShellState(
|
|
|
1163
1144
|
image_gen_provider=NPCSH_IMAGE_GEN_PROVIDER,
|
|
1164
1145
|
video_gen_model=NPCSH_VIDEO_GEN_MODEL,
|
|
1165
1146
|
video_gen_provider=NPCSH_VIDEO_GEN_PROVIDER,
|
|
1147
|
+
build_kg=NPCSH_BUILD_KG,
|
|
1166
1148
|
api_url=NPCSH_API_URL,
|
|
1167
1149
|
)
|
npcsh/npc.py
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import argparse
|
|
2
2
|
import sys
|
|
3
3
|
import os
|
|
4
|
-
import sqlite3
|
|
5
4
|
import traceback
|
|
6
5
|
from typing import Optional
|
|
7
6
|
|
|
@@ -11,7 +10,8 @@ from npcsh._state import (
|
|
|
11
10
|
NPCSH_API_URL,
|
|
12
11
|
NPCSH_DB_PATH,
|
|
13
12
|
NPCSH_STREAM_OUTPUT,
|
|
14
|
-
|
|
13
|
+
initial_state,
|
|
14
|
+
)
|
|
15
15
|
from npcpy.npc_sysenv import (
|
|
16
16
|
print_and_process_stream_with_markdown,
|
|
17
17
|
render_markdown,
|
|
@@ -19,8 +19,18 @@ from npcpy.npc_sysenv import (
|
|
|
19
19
|
from npcpy.npc_compiler import NPC, Team
|
|
20
20
|
from npcsh.routes import router
|
|
21
21
|
from npcpy.llm_funcs import check_llm_command
|
|
22
|
+
from sqlalchemy import create_engine
|
|
23
|
+
|
|
24
|
+
# Import the key functions from npcsh
|
|
25
|
+
from npcsh.npcsh import (
|
|
26
|
+
setup_shell,
|
|
27
|
+
execute_slash_command,
|
|
28
|
+
execute_command,
|
|
29
|
+
process_pipeline_command,
|
|
30
|
+
)
|
|
22
31
|
|
|
23
32
|
def load_npc_by_name(npc_name: str = "sibiji", db_path: str = NPCSH_DB_PATH) -> Optional[NPC]:
|
|
33
|
+
"""Load NPC by name, with fallback logic matching npcsh"""
|
|
24
34
|
if not npc_name:
|
|
25
35
|
npc_name = "sibiji"
|
|
26
36
|
|
|
@@ -37,7 +47,7 @@ def load_npc_by_name(npc_name: str = "sibiji", db_path: str = NPCSH_DB_PATH) ->
|
|
|
37
47
|
|
|
38
48
|
if chosen_path:
|
|
39
49
|
try:
|
|
40
|
-
db_conn =
|
|
50
|
+
db_conn = create_engine(f'sqlite:///{NPCSH_DB_PATH}')
|
|
41
51
|
npc = NPC(file=chosen_path, db_conn=db_conn)
|
|
42
52
|
return npc
|
|
43
53
|
except Exception as e:
|
|
@@ -64,34 +74,38 @@ def main():
|
|
|
64
74
|
"-n", "--npc", help="Name of the NPC to use (default: sibiji)", type=str, default="sibiji"
|
|
65
75
|
)
|
|
66
76
|
|
|
67
|
-
#
|
|
68
|
-
|
|
69
|
-
# First, get any arguments without parsing commands
|
|
77
|
+
# Parse arguments
|
|
70
78
|
args, all_args = parser.parse_known_args()
|
|
71
79
|
global_model = args.model
|
|
72
80
|
global_provider = args.provider
|
|
73
81
|
|
|
74
|
-
# Check if the first argument is a known command
|
|
75
82
|
is_valid_command = False
|
|
76
83
|
command_name = None
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
84
|
+
|
|
85
|
+
if all_args:
|
|
86
|
+
first_arg = all_args[0]
|
|
87
|
+
if first_arg.startswith('/'):
|
|
88
|
+
is_valid_command = True
|
|
89
|
+
command_name = first_arg
|
|
90
|
+
all_args = all_args[1:]
|
|
91
|
+
elif first_arg in router.get_commands():
|
|
92
|
+
is_valid_command = True
|
|
93
|
+
command_name = '/' + first_arg
|
|
94
|
+
all_args = all_args[1:]
|
|
95
|
+
|
|
96
|
+
|
|
81
97
|
|
|
82
|
-
# Only set up subparsers if we have a valid command
|
|
83
98
|
if is_valid_command:
|
|
84
99
|
subparsers = parser.add_subparsers(dest="command", title="Available Commands",
|
|
85
100
|
help="Run 'npc <command> --help' for command-specific help")
|
|
86
101
|
|
|
87
102
|
for cmd_name, help_text in router.help_info.items():
|
|
88
|
-
|
|
89
103
|
cmd_parser = subparsers.add_parser(cmd_name, help=help_text, add_help=False)
|
|
90
104
|
cmd_parser.add_argument('command_args', nargs=argparse.REMAINDER,
|
|
91
105
|
help='Arguments passed directly to the command handler')
|
|
92
106
|
|
|
93
107
|
# Re-parse with command subparsers
|
|
94
|
-
args = parser.parse_args([command_name] + all_args)
|
|
108
|
+
args = parser.parse_args([command_name.lstrip('/')] + all_args)
|
|
95
109
|
command_args = args.command_args if hasattr(args, 'command_args') else []
|
|
96
110
|
unknown_args = []
|
|
97
111
|
else:
|
|
@@ -104,67 +118,86 @@ def main():
|
|
|
104
118
|
args.model = global_model
|
|
105
119
|
if args.provider is None:
|
|
106
120
|
args.provider = global_provider
|
|
107
|
-
# --- END OF FIX ---
|
|
108
|
-
npc_instance = load_npc_by_name(args.npc, NPCSH_DB_PATH)
|
|
109
|
-
|
|
110
|
-
effective_model = args.model or NPCSH_CHAT_MODEL
|
|
111
|
-
effective_provider = args.provider or NPCSH_CHAT_PROVIDER
|
|
112
|
-
|
|
113
121
|
|
|
122
|
+
# Use npcsh's setup_shell to get proper team and NPC setup
|
|
123
|
+
try:
|
|
124
|
+
command_history, team, forenpc_obj = setup_shell()
|
|
125
|
+
except Exception as e:
|
|
126
|
+
print(f"Warning: Could not set up full npcsh environment: {e}", file=sys.stderr)
|
|
127
|
+
print("Falling back to basic NPC loading...", file=sys.stderr)
|
|
128
|
+
team = None
|
|
129
|
+
forenpc_obj = load_npc_by_name(args.npc, NPCSH_DB_PATH)
|
|
130
|
+
|
|
131
|
+
# Determine which NPC to use
|
|
132
|
+
npc_instance = None
|
|
133
|
+
if team and args.npc in team.npcs:
|
|
134
|
+
npc_instance = team.npcs[args.npc]
|
|
135
|
+
elif team and args.npc == team.forenpc.name if team.forenpc else False:
|
|
136
|
+
npc_instance = team.forenpc
|
|
137
|
+
else:
|
|
138
|
+
npc_instance = load_npc_by_name(args.npc, NPCSH_DB_PATH)
|
|
114
139
|
|
|
115
|
-
|
|
140
|
+
if not npc_instance:
|
|
141
|
+
print(f"Error: Could not load NPC '{args.npc}'", file=sys.stderr)
|
|
142
|
+
sys.exit(1)
|
|
116
143
|
|
|
117
|
-
#
|
|
118
|
-
if is_valid_command:
|
|
119
|
-
|
|
120
|
-
if command_args:
|
|
121
|
-
i = 0
|
|
122
|
-
while i < len(command_args):
|
|
123
|
-
arg = command_args[i]
|
|
124
|
-
if arg.startswith("--"):
|
|
125
|
-
param = arg[2:] # Remove --
|
|
126
|
-
if "=" in param:
|
|
127
|
-
param_name, param_value = param.split("=", 1)
|
|
128
|
-
extras[param_name] = param_value
|
|
129
|
-
i += 1
|
|
130
|
-
elif i + 1 < len(command_args) and not command_args[i+1].startswith("--"):
|
|
131
|
-
extras[param] = command_args[i+1]
|
|
132
|
-
i += 2
|
|
133
|
-
else:
|
|
134
|
-
extras[param] = True
|
|
135
|
-
i += 1
|
|
136
|
-
else:
|
|
137
|
-
i += 1
|
|
138
|
-
|
|
139
|
-
handler = router.get_route(command_name)
|
|
140
|
-
if not handler:
|
|
141
|
-
print(f"Error: Command '{command_name}' recognized but no handler found.", file=sys.stderr)
|
|
142
|
-
sys.exit(1)
|
|
143
|
-
|
|
144
|
-
full_command_str = command_name
|
|
145
|
-
if command_args:
|
|
146
|
-
full_command_str += " " + " ".join(command_args)
|
|
144
|
+
# Now check for jinxs if we haven't identified a command yet
|
|
145
|
+
if not is_valid_command and all_args:
|
|
146
|
+
first_arg = all_args[0]
|
|
147
147
|
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
148
|
+
# Check if first argument is a jinx name
|
|
149
|
+
jinx_found = False
|
|
150
|
+
if team and first_arg in team.jinxs_dict:
|
|
151
|
+
jinx_found = True
|
|
152
|
+
elif isinstance(npc_instance, NPC) and hasattr(npc_instance, 'jinxs_dict') and first_arg in npc_instance.jinxs_dict:
|
|
153
|
+
jinx_found = True
|
|
154
|
+
|
|
155
|
+
if jinx_found:
|
|
156
|
+
is_valid_command = True
|
|
157
|
+
command_name = '/' + first_arg
|
|
158
|
+
all_args = all_args[1:]
|
|
159
|
+
|
|
160
|
+
# Create a shell state object similar to npcsh
|
|
161
|
+
shell_state = initial_state
|
|
162
|
+
shell_state.npc = npc_instance
|
|
163
|
+
shell_state.team = team
|
|
164
|
+
shell_state.current_path = os.getcwd()
|
|
165
|
+
shell_state.stream_output = NPCSH_STREAM_OUTPUT
|
|
166
|
+
|
|
167
|
+
# Override model/provider if specified
|
|
168
|
+
effective_model = args.model or (npc_instance.model if npc_instance.model else NPCSH_CHAT_MODEL)
|
|
169
|
+
effective_provider = args.provider or (npc_instance.provider if npc_instance.provider else NPCSH_CHAT_PROVIDER)
|
|
170
|
+
|
|
171
|
+
# Update the NPC's model/provider for this session if overridden
|
|
172
|
+
if args.model:
|
|
173
|
+
npc_instance.model = effective_model
|
|
174
|
+
if args.provider:
|
|
175
|
+
npc_instance.provider = effective_provider
|
|
176
|
+
|
|
177
|
+
try:
|
|
178
|
+
if is_valid_command:
|
|
179
|
+
# Handle slash command using npcsh's execute_slash_command
|
|
180
|
+
full_command_str = command_name
|
|
181
|
+
if command_args:
|
|
182
|
+
full_command_str += " " + " ".join(command_args)
|
|
183
|
+
|
|
184
|
+
print(f"Executing command: {full_command_str}")
|
|
185
|
+
|
|
186
|
+
updated_state, result = execute_slash_command(
|
|
187
|
+
full_command_str,
|
|
188
|
+
stdin_input=None,
|
|
189
|
+
state=shell_state,
|
|
190
|
+
stream=NPCSH_STREAM_OUTPUT
|
|
191
|
+
)
|
|
162
192
|
|
|
193
|
+
# Process and display the result
|
|
163
194
|
if isinstance(result, dict):
|
|
164
195
|
output = result.get("output") or result.get("response")
|
|
196
|
+
model_for_stream = result.get('model', effective_model)
|
|
197
|
+
provider_for_stream = result.get('provider', effective_provider)
|
|
165
198
|
|
|
166
199
|
if NPCSH_STREAM_OUTPUT and not isinstance(output, str):
|
|
167
|
-
print_and_process_stream_with_markdown(output,
|
|
200
|
+
print_and_process_stream_with_markdown(output, model_for_stream, provider_for_stream)
|
|
168
201
|
elif output is not None:
|
|
169
202
|
render_markdown(str(output))
|
|
170
203
|
elif result is not None:
|
|
@@ -172,45 +205,38 @@ def main():
|
|
|
172
205
|
else:
|
|
173
206
|
print(f"Command '{command_name}' executed.")
|
|
174
207
|
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
sys.exit(1)
|
|
179
|
-
else:
|
|
180
|
-
# Process as a prompt
|
|
181
|
-
prompt = " ".join(unknown_args)
|
|
208
|
+
else:
|
|
209
|
+
# Process as a regular prompt using npcsh's execution logic
|
|
210
|
+
prompt = " ".join(unknown_args)
|
|
182
211
|
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
212
|
+
if not prompt:
|
|
213
|
+
# If no prompt and no command, show help
|
|
214
|
+
parser.print_help()
|
|
215
|
+
sys.exit(1)
|
|
187
216
|
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
provider=effective_provider,
|
|
194
|
-
npc=npc_instance,
|
|
195
|
-
stream=NPCSH_STREAM_OUTPUT,
|
|
196
|
-
messages=[],
|
|
197
|
-
team=None,
|
|
198
|
-
api_url=NPCSH_API_URL,
|
|
199
|
-
)
|
|
217
|
+
print(f"Processing prompt: '{prompt}' with NPC: '{args.npc}'...")
|
|
218
|
+
|
|
219
|
+
# Use npcsh's execute_command but force it to chat mode for simple prompts
|
|
220
|
+
shell_state.current_mode = 'chat'
|
|
221
|
+
updated_state, result = execute_command(prompt, shell_state)
|
|
200
222
|
|
|
201
|
-
|
|
202
|
-
|
|
223
|
+
# Process and display the result
|
|
224
|
+
if isinstance(result, dict):
|
|
225
|
+
output = result.get("output")
|
|
226
|
+
model_for_stream = result.get('model', effective_model)
|
|
227
|
+
provider_for_stream = result.get('provider', effective_provider)
|
|
228
|
+
|
|
203
229
|
if NPCSH_STREAM_OUTPUT and hasattr(output, '__iter__') and not isinstance(output, (str, bytes, dict, list)):
|
|
204
|
-
print_and_process_stream_with_markdown(output,
|
|
230
|
+
print_and_process_stream_with_markdown(output, model_for_stream, provider_for_stream)
|
|
205
231
|
elif output is not None:
|
|
206
232
|
render_markdown(str(output))
|
|
207
|
-
elif
|
|
208
|
-
render_markdown(str(
|
|
233
|
+
elif result is not None:
|
|
234
|
+
render_markdown(str(result))
|
|
209
235
|
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
236
|
+
except Exception as e:
|
|
237
|
+
print(f"Error executing command: {e}", file=sys.stderr)
|
|
238
|
+
traceback.print_exc()
|
|
239
|
+
sys.exit(1)
|
|
214
240
|
|
|
215
241
|
if __name__ == "__main__":
|
|
216
242
|
main()
|