autosh 0.0.7__tar.gz → 0.0.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {autosh-0.0.7 → autosh-0.0.9}/PKG-INFO +4 -2
- {autosh-0.0.7 → autosh-0.0.9}/README.md +2 -0
- {autosh-0.0.7 → autosh-0.0.9}/autosh/config.py +2 -1
- {autosh-0.0.7 → autosh-0.0.9}/autosh/main.py +15 -0
- {autosh-0.0.7 → autosh-0.0.9}/autosh/plugins/__init__.py +0 -51
- {autosh-0.0.7 → autosh-0.0.9}/autosh/session.py +62 -61
- {autosh-0.0.7 → autosh-0.0.9}/pyproject.toml +2 -2
- {autosh-0.0.7 → autosh-0.0.9}/.gitignore +0 -0
- {autosh-0.0.7 → autosh-0.0.9}/LICENSE +0 -0
- {autosh-0.0.7 → autosh-0.0.9}/autosh/__init__.py +0 -0
- {autosh-0.0.7 → autosh-0.0.9}/autosh/config-template.toml +0 -0
- {autosh-0.0.7 → autosh-0.0.9}/autosh/plugins/calc.py +0 -0
- {autosh-0.0.7 → autosh-0.0.9}/autosh/plugins/cli.py +0 -0
- {autosh-0.0.7 → autosh-0.0.9}/autosh/plugins/clock.py +0 -0
- {autosh-0.0.7 → autosh-0.0.9}/autosh/plugins/code.py +0 -0
- {autosh-0.0.7 → autosh-0.0.9}/autosh/plugins/search.py +0 -0
- {autosh-0.0.7 → autosh-0.0.9}/autosh/plugins/web.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: autosh
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.9
|
4
4
|
Summary: The AI-powered, noob-friendly interactive shell
|
5
5
|
Author-email: Wenyu Zhao <wenyuzhaox@gmail.com>
|
6
6
|
License-Expression: MIT
|
@@ -10,7 +10,7 @@ Requires-Python: >=3.12
|
|
10
10
|
Requires-Dist: agentia>=0.0.8
|
11
11
|
Requires-Dist: asyncio>=3.4.3
|
12
12
|
Requires-Dist: markdownify>=1.1.0
|
13
|
-
Requires-Dist: neongrid>=0.0.
|
13
|
+
Requires-Dist: neongrid>=0.0.3
|
14
14
|
Requires-Dist: prompt-toolkit>=3.0.51
|
15
15
|
Requires-Dist: pydantic>=2.11.3
|
16
16
|
Requires-Dist: python-dotenv>=1.1.0
|
@@ -22,6 +22,8 @@ Description-Content-Type: text/markdown
|
|
22
22
|
|
23
23
|
# `autosh` - The AI-powered, noob-friendly interactive shell
|
24
24
|
|
25
|
+
 <!-- https://ezgif.com/video-to-gif -->
|
26
|
+
|
25
27
|
# Getting Started
|
26
28
|
|
27
29
|
## Install
|
@@ -40,7 +40,7 @@ class Config(BaseModel):
|
|
40
40
|
description="The banner for the REPL.",
|
41
41
|
)
|
42
42
|
repl_prompt: str = Field(
|
43
|
-
default="
|
43
|
+
default="{short_cwd}> ",
|
44
44
|
description="The prompt for the REPL user input.",
|
45
45
|
)
|
46
46
|
|
@@ -77,6 +77,7 @@ class CLIOptions(BaseModel):
|
|
77
77
|
yes: bool = False
|
78
78
|
quiet: bool = False
|
79
79
|
think: bool = False
|
80
|
+
start_repl_after_prompt: bool = False
|
80
81
|
|
81
82
|
prompt: str | None = None
|
82
83
|
"""The prompt to execute"""
|
@@ -39,6 +39,14 @@ async def start_session(prompt: str | None, args: list[str]):
|
|
39
39
|
file=sys.stderr,
|
40
40
|
)
|
41
41
|
sys.exit(1)
|
42
|
+
if CLI_OPTIONS.start_repl_after_prompt:
|
43
|
+
if piped_stdin or piped_stdout:
|
44
|
+
rich.print(
|
45
|
+
"[bold red]Error:[/bold red] [red]--repl is only available when not using piped stdin or stdout.[/red]",
|
46
|
+
file=sys.stderr,
|
47
|
+
)
|
48
|
+
sys.exit(1)
|
49
|
+
|
42
50
|
if prompt:
|
43
51
|
# No piped stdin, just run the prompt
|
44
52
|
if Path(prompt).is_file():
|
@@ -73,6 +81,11 @@ def print_help():
|
|
73
81
|
f"The LLM model to use. [dim]Default: {CONFIG.model} ({CONFIG.think_model} for reasoning).[/dim]",
|
74
82
|
],
|
75
83
|
["--think", "", "Use the reasoning models to think more before operating."],
|
84
|
+
[
|
85
|
+
"--repl",
|
86
|
+
"",
|
87
|
+
"Start a REPL session after executing the prompt or the script.",
|
88
|
+
],
|
76
89
|
["--help", "-h", "Show this message and exit."],
|
77
90
|
]
|
78
91
|
|
@@ -117,6 +130,7 @@ def parse_args() -> tuple[str | None, list[str]]:
|
|
117
130
|
p.add_argument("--quiet", "-q", action="store_true")
|
118
131
|
p.add_argument("--think", action="store_true")
|
119
132
|
p.add_argument("--model", "-m", type=str, default=None)
|
133
|
+
p.add_argument("--repl", action="store_true")
|
120
134
|
p.add_argument("PROMPT_OR_FILE", nargs="?", default=None)
|
121
135
|
p.add_argument("ARGS", nargs=argparse.REMAINDER)
|
122
136
|
|
@@ -133,6 +147,7 @@ def parse_args() -> tuple[str | None, list[str]]:
|
|
133
147
|
|
134
148
|
CLI_OPTIONS.yes = args.yes
|
135
149
|
CLI_OPTIONS.quiet = args.quiet
|
150
|
+
CLI_OPTIONS.start_repl_after_prompt = args.repl
|
136
151
|
|
137
152
|
if args.model:
|
138
153
|
if args.think:
|
@@ -2,7 +2,6 @@ from dataclasses import dataclass
|
|
2
2
|
import sys
|
3
3
|
from typing import Any, Callable
|
4
4
|
import rich
|
5
|
-
from rich.prompt import Confirm
|
6
5
|
from rich.panel import Panel
|
7
6
|
from rich.console import RenderableType
|
8
7
|
from autosh.config import CLI_OPTIONS, CONFIG
|
@@ -62,56 +61,6 @@ class Banner:
|
|
62
61
|
return True
|
63
62
|
|
64
63
|
|
65
|
-
# def __print_simple_banner(tag: str, text: str | None = None):
|
66
|
-
# if CLI_OPTIONS.quiet:
|
67
|
-
# return
|
68
|
-
# if not sys.stdout.isatty():
|
69
|
-
# s = f"\n[TOOL] {tag}"
|
70
|
-
# if text:
|
71
|
-
# s += f" {text}"
|
72
|
-
# print(s)
|
73
|
-
# return
|
74
|
-
# s = f"\n[bold on magenta] {tag} [/bold on magenta]"
|
75
|
-
# if text:
|
76
|
-
# s += f" [italic dim]{text}[/italic dim]"
|
77
|
-
# rich.print(s)
|
78
|
-
|
79
|
-
|
80
|
-
# def simple_banner(
|
81
|
-
# tag: str | Callable[[Any], str],
|
82
|
-
# text: Callable[[Any], str] | None = None,
|
83
|
-
# text_key: str | None = None,
|
84
|
-
# ):
|
85
|
-
# return lambda x: __print_simple_banner(
|
86
|
-
# tag if isinstance(tag, str) else tag(x),
|
87
|
-
# text(x) if text else (x.get(text_key) if text_key else None),
|
88
|
-
# )
|
89
|
-
|
90
|
-
|
91
|
-
# def __print_code_preview_banner(
|
92
|
-
# title: str, content: RenderableType, short: str | None = None
|
93
|
-
# ):
|
94
|
-
# if CLI_OPTIONS.quiet:
|
95
|
-
# if short and not CLI_OPTIONS.yes:
|
96
|
-
# rich.print(f"\n[magenta]{short}[/magenta]\n")
|
97
|
-
# return
|
98
|
-
# panel = Panel.fit(content, title=f"[magenta]{title}[/magenta]", title_align="left")
|
99
|
-
# rich.print()
|
100
|
-
# rich.print(panel)
|
101
|
-
|
102
|
-
|
103
|
-
# def code_preview_banner(
|
104
|
-
# title: str | Callable[[Any], str],
|
105
|
-
# short: str | Callable[[Any], str],
|
106
|
-
# content: Callable[[Any], RenderableType],
|
107
|
-
# ):
|
108
|
-
# return lambda x: __print_code_preview_banner(
|
109
|
-
# title=title if isinstance(title, str) else title(x),
|
110
|
-
# content=content(x),
|
111
|
-
# short=short if isinstance(short, str) else short(x),
|
112
|
-
# )
|
113
|
-
|
114
|
-
|
115
64
|
def code_result_panel(
|
116
65
|
title: str,
|
117
66
|
out: str | None = None,
|
@@ -1,4 +1,3 @@
|
|
1
|
-
from io import StringIO
|
2
1
|
from pathlib import Path
|
3
2
|
import socket
|
4
3
|
import sys
|
@@ -19,7 +18,6 @@ import neongrid as ng
|
|
19
18
|
from .plugins import Banner, create_plugins
|
20
19
|
import rich
|
21
20
|
import platform
|
22
|
-
from rich.prompt import Confirm
|
23
21
|
import os
|
24
22
|
|
25
23
|
|
@@ -105,6 +103,46 @@ class Session:
|
|
105
103
|
role="user",
|
106
104
|
)
|
107
105
|
|
106
|
+
async def __process_event(self, e: Event, first: bool, repl: bool):
|
107
|
+
prefix_newline = repl or not first
|
108
|
+
if isinstance(e, UserConsentEvent):
|
109
|
+
if CLI_OPTIONS.yes:
|
110
|
+
e.response = True
|
111
|
+
return False
|
112
|
+
if prefix_newline:
|
113
|
+
print()
|
114
|
+
e.response = ng.confirm(e.message)
|
115
|
+
return True
|
116
|
+
if isinstance(e, ToolCallEvent) and e.result is None:
|
117
|
+
if (banner := (e.metadata or {}).get("banner")) and isinstance(
|
118
|
+
banner, Banner
|
119
|
+
):
|
120
|
+
return banner.render(e.arguments, prefix_newline=prefix_newline)
|
121
|
+
return False
|
122
|
+
|
123
|
+
async def __process_run(
|
124
|
+
self, run: Run[Event | MessageStream], loading: Loading | None, repl: bool
|
125
|
+
):
|
126
|
+
first = True
|
127
|
+
async for e in run:
|
128
|
+
if loading:
|
129
|
+
await loading.finish()
|
130
|
+
|
131
|
+
if isinstance(e, Event):
|
132
|
+
if await self.__process_event(e, first=first, repl=repl):
|
133
|
+
first = False
|
134
|
+
else:
|
135
|
+
if repl or not first:
|
136
|
+
print()
|
137
|
+
await self.__render_streamed_markdown(e)
|
138
|
+
first = False
|
139
|
+
|
140
|
+
if loading:
|
141
|
+
loading = self.__create_loading_indicator()
|
142
|
+
|
143
|
+
if loading:
|
144
|
+
await loading.finish()
|
145
|
+
|
108
146
|
async def exec_prompt(self, prompt: str):
|
109
147
|
# Clean up the prompt
|
110
148
|
if prompt is not None:
|
@@ -145,6 +183,8 @@ class Session:
|
|
145
183
|
)
|
146
184
|
run = self.agent.run(prompt, stream=True, events=True)
|
147
185
|
await self.__process_run(run, loading, False)
|
186
|
+
if CLI_OPTIONS.start_repl_after_prompt:
|
187
|
+
await self.run_repl(handover=True)
|
148
188
|
|
149
189
|
async def exec_from_stdin(self):
|
150
190
|
if sys.stdin.isatty():
|
@@ -161,45 +201,29 @@ class Session:
|
|
161
201
|
prompt = f.read()
|
162
202
|
await self.exec_prompt(prompt)
|
163
203
|
|
164
|
-
async def
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
print()
|
172
|
-
e.response = ng.confirm(e.message)
|
173
|
-
return True
|
174
|
-
if isinstance(e, ToolCallEvent) and e.result is None:
|
175
|
-
if (banner := (e.metadata or {}).get("banner")) and isinstance(
|
176
|
-
banner, Banner
|
177
|
-
):
|
178
|
-
return banner.render(e.arguments, prefix_newline=prefix_newline)
|
179
|
-
return False
|
180
|
-
|
181
|
-
async def __process_run(
|
182
|
-
self, run: Run[Event | MessageStream], loading: Loading | None, repl: bool
|
183
|
-
):
|
184
|
-
first = True
|
185
|
-
async for e in run:
|
186
|
-
if loading:
|
187
|
-
await loading.finish()
|
188
|
-
|
189
|
-
if isinstance(e, Event):
|
190
|
-
if await self.__process_event(e, first=first, repl=repl):
|
191
|
-
first = False
|
192
|
-
else:
|
193
|
-
if repl or not first:
|
204
|
+
async def run_repl(self, handover: bool = False):
|
205
|
+
if not handover and CONFIG.repl_banner:
|
206
|
+
rich.print(CONFIG.repl_banner)
|
207
|
+
first = not handover
|
208
|
+
while True:
|
209
|
+
try:
|
210
|
+
if not first:
|
194
211
|
print()
|
195
|
-
await self.__render_streamed_markdown(e)
|
196
212
|
first = False
|
197
|
-
|
198
|
-
|
213
|
+
input_prompt = self.__get_input_prompt()
|
214
|
+
prompt = await ng.input(
|
215
|
+
input_prompt, sync=False, persist="/tmp/autosh-history"
|
216
|
+
)
|
217
|
+
prompt = prompt.strip()
|
218
|
+
if prompt in ["exit", "quit"]:
|
219
|
+
break
|
220
|
+
if len(prompt) == 0:
|
221
|
+
continue
|
199
222
|
loading = self.__create_loading_indicator()
|
200
|
-
|
201
|
-
|
202
|
-
|
223
|
+
run = self.agent.run(prompt, stream=True, events=True)
|
224
|
+
await self.__process_run(run, loading, True)
|
225
|
+
except KeyboardInterrupt:
|
226
|
+
break
|
203
227
|
|
204
228
|
def __get_input_prompt(self):
|
205
229
|
cwd = Path.cwd()
|
@@ -229,29 +253,6 @@ class Session:
|
|
229
253
|
)
|
230
254
|
return prompt
|
231
255
|
|
232
|
-
async def run_repl(self):
|
233
|
-
if CONFIG.repl_banner:
|
234
|
-
rich.print(CONFIG.repl_banner)
|
235
|
-
first = True
|
236
|
-
while True:
|
237
|
-
try:
|
238
|
-
if not first:
|
239
|
-
print()
|
240
|
-
first = False
|
241
|
-
input_prompt = self.__get_input_prompt()
|
242
|
-
rich.print(input_prompt, end="", flush=True)
|
243
|
-
prompt = await ng.input("", sync=False, persist="/tmp/autosh-history")
|
244
|
-
prompt = prompt.strip()
|
245
|
-
if prompt in ["exit", "quit"]:
|
246
|
-
break
|
247
|
-
if len(prompt) == 0:
|
248
|
-
continue
|
249
|
-
loading = self.__create_loading_indicator()
|
250
|
-
run = self.agent.run(prompt, stream=True, events=True)
|
251
|
-
await self.__process_run(run, loading, True)
|
252
|
-
except KeyboardInterrupt:
|
253
|
-
break
|
254
|
-
|
255
256
|
def __create_loading_indicator(self):
|
256
257
|
return ng.loading.kana()
|
257
258
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[project]
|
2
2
|
name = "autosh"
|
3
|
-
version = "0.0.
|
3
|
+
version = "0.0.9"
|
4
4
|
description = "The AI-powered, noob-friendly interactive shell"
|
5
5
|
authors = [{ name = "Wenyu Zhao", email = "wenyuzhaox@gmail.com" }]
|
6
6
|
requires-python = ">=3.12"
|
@@ -29,7 +29,7 @@ dependencies = [
|
|
29
29
|
"tavily-python>=0.5.4",
|
30
30
|
"typer>=0.12.5",
|
31
31
|
"tzlocal>=5.3.1",
|
32
|
-
"neongrid>=0.0.
|
32
|
+
"neongrid>=0.0.3",
|
33
33
|
"agentia>=0.0.8",
|
34
34
|
]
|
35
35
|
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|