vex-cli 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vex_cli-0.1.0/.github/workflows/publish.yml +54 -0
- vex_cli-0.1.0/PKG-INFO +198 -0
- vex_cli-0.1.0/README.md +176 -0
- vex_cli-0.1.0/ex.rb +44 -0
- vex_cli-0.1.0/pyproject.toml +40 -0
- vex_cli-0.1.0/src/ex_cli/__init__.py +1 -0
- vex_cli-0.1.0/src/ex_cli/engine.py +297 -0
- vex_cli-0.1.0/src/ex_cli/main.py +183 -0
- vex_cli-0.1.0/src/ex_cli/setup.py +188 -0
- vex_cli-0.1.0/src/ex_cli/shell/vex.bash +39 -0
- vex_cli-0.1.0/src/ex_cli/shell/vex.zsh +89 -0
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
name: Publish to PyPI
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
tags:
|
|
6
|
+
- "v*" # 推送 v0.1.0 这样的 tag 时触发
|
|
7
|
+
workflow_dispatch: # 支持手动触发
|
|
8
|
+
|
|
9
|
+
jobs:
|
|
10
|
+
build-and-publish:
|
|
11
|
+
name: Build and publish
|
|
12
|
+
runs-on: ubuntu-latest
|
|
13
|
+
environment: pypi # 在 GitHub 仓库 Settings 中配置
|
|
14
|
+
|
|
15
|
+
permissions:
|
|
16
|
+
id-token: write # PyPI Trusted Publisher 无需 token
|
|
17
|
+
|
|
18
|
+
steps:
|
|
19
|
+
- uses: actions/checkout@v4
|
|
20
|
+
|
|
21
|
+
- name: Set up Python
|
|
22
|
+
uses: actions/setup-python@v5
|
|
23
|
+
with:
|
|
24
|
+
python-version: "3.11"
|
|
25
|
+
|
|
26
|
+
- name: Install build tools
|
|
27
|
+
run: pip install hatchling build
|
|
28
|
+
|
|
29
|
+
- name: Build package
|
|
30
|
+
run: python -m build
|
|
31
|
+
|
|
32
|
+
- name: Publish to PyPI
|
|
33
|
+
uses: pypa/gh-action-pypi-publish@release/v1
|
|
34
|
+
# 使用 PyPI Trusted Publisher,无需配置 API token
|
|
35
|
+
# 在 pypi.org 项目设置中添加此仓库即可
|
|
36
|
+
|
|
37
|
+
# 同时发布到 TestPyPI(用于验证)
|
|
38
|
+
test-publish:
|
|
39
|
+
name: Publish to TestPyPI
|
|
40
|
+
runs-on: ubuntu-latest
|
|
41
|
+
if: github.event_name == 'workflow_dispatch'
|
|
42
|
+
|
|
43
|
+
permissions:
|
|
44
|
+
id-token: write
|
|
45
|
+
|
|
46
|
+
steps:
|
|
47
|
+
- uses: actions/checkout@v4
|
|
48
|
+
- uses: actions/setup-python@v5
|
|
49
|
+
with:
|
|
50
|
+
python-version: "3.11"
|
|
51
|
+
- run: pip install hatchling build && python -m build
|
|
52
|
+
- uses: pypa/gh-action-pypi-publish@release/v1
|
|
53
|
+
with:
|
|
54
|
+
repository-url: https://test.pypi.org/legacy/
|
vex_cli-0.1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: vex-cli
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Natural language CLI — type : and speak your intent
|
|
5
|
+
Project-URL: Homepage, https://github.com/yourusername/vex
|
|
6
|
+
Project-URL: Repository, https://github.com/yourusername/vex
|
|
7
|
+
Project-URL: Issues, https://github.com/yourusername/vex/issues
|
|
8
|
+
License: MIT
|
|
9
|
+
Keywords: ai,cli,natural-language,shell,vex,vim
|
|
10
|
+
Classifier: Development Status :: 4 - Beta
|
|
11
|
+
Classifier: Environment :: Console
|
|
12
|
+
Classifier: Intended Audience :: Developers
|
|
13
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
14
|
+
Classifier: Programming Language :: Python :: 3
|
|
15
|
+
Classifier: Topic :: Utilities
|
|
16
|
+
Requires-Python: >=3.8
|
|
17
|
+
Provides-Extra: anthropic
|
|
18
|
+
Requires-Dist: anthropic>=0.20; extra == 'anthropic'
|
|
19
|
+
Provides-Extra: openai
|
|
20
|
+
Requires-Dist: openai>=1.0; extra == 'openai'
|
|
21
|
+
Description-Content-Type: text/markdown
|
|
22
|
+
|
|
23
|
+
# ex
|
|
24
|
+
|
|
25
|
+
> 在终端输入 `:` 进入 AI 命令模式。说出你想做什么,ex 规划并执行。
|
|
26
|
+
|
|
27
|
+
**ex 本身完全免费。** 你需要自己的 LLM API Key(或本地 Ollama,也免费)。
|
|
28
|
+
|
|
29
|
+
```
|
|
30
|
+
❯
|
|
31
|
+
:找出所有超过200行的Python文件,按行数倒序
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
---
|
|
35
|
+
|
|
36
|
+
## 安装
|
|
37
|
+
|
|
38
|
+
### pipx(推荐)
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
pipx install ex-cli
|
|
42
|
+
vex --setup
|
|
43
|
+
vex --setup # 配置你的 API Key + 初始化 shell 集成
|
|
44
|
+
# 重开终端
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
### pip
|
|
48
|
+
|
|
49
|
+
```bash
|
|
50
|
+
pip install ex-cli
|
|
51
|
+
vex --setup
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
### Homebrew(macOS)
|
|
55
|
+
|
|
56
|
+
```bash
|
|
57
|
+
brew tap yourusername/tap
|
|
58
|
+
brew install ex
|
|
59
|
+
vex --setup
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
---
|
|
63
|
+
|
|
64
|
+
## 第一步:获取 API Key
|
|
65
|
+
|
|
66
|
+
`vex --setup` 会引导你选择并填入,支持以下提供商:
|
|
67
|
+
|
|
68
|
+
| 提供商 | 注册地址 | 价格 |
|
|
69
|
+
|--------|---------|------|
|
|
70
|
+
| **DeepSeek** | platform.deepseek.com/api_keys | ¥0.001/千 tokens,**最便宜** |
|
|
71
|
+
| **Kimi** | platform.moonshot.cn/console/api-keys | 注册有免费额度 |
|
|
72
|
+
| **OpenAI** | platform.openai.com/api-keys | GPT-4o-mini 按量付费 |
|
|
73
|
+
| **Ollama** | ollama.com | **完全免费**,本地运行 |
|
|
74
|
+
|
|
75
|
+
填入一个 API Key 就能用,Key 保存在你本地的 `~/.zshrc`,不经过任何第三方服务。
|
|
76
|
+
|
|
77
|
+
---
|
|
78
|
+
|
|
79
|
+
## 使用
|
|
80
|
+
|
|
81
|
+
重开终端后,在**空白行**输入 `:` 即可进入 AI 命令模式:
|
|
82
|
+
|
|
83
|
+
```
|
|
84
|
+
❯
|
|
85
|
+
:找出所有超过200行的Python文件,按行数倒序
|
|
86
|
+
|
|
87
|
+
▌ 找出所有 Python 文件并按行数排序
|
|
88
|
+
|
|
89
|
+
1 fd -e py .
|
|
90
|
+
" 递归查找所有 .py 文件
|
|
91
|
+
|
|
92
|
+
2 xargs wc -l
|
|
93
|
+
" 统计每个文件行数
|
|
94
|
+
|
|
95
|
+
3 sort -rn | grep -v total | head -20
|
|
96
|
+
" 倒序取前20
|
|
97
|
+
|
|
98
|
+
管道:fd -e py . | xargs wc -l | sort -rn | grep -v total | head -20
|
|
99
|
+
|
|
100
|
+
[p]管道执行 [s]逐步确认 [q]退出 > p
|
|
101
|
+
|
|
102
|
+
│ 1842 ./src/engine.py
|
|
103
|
+
│ 967 ./src/routes.py
|
|
104
|
+
│ 543 ./tests/test_engine.py
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
### Vim 语义完整保留
|
|
108
|
+
|
|
109
|
+
| 输入 | 行为 |
|
|
110
|
+
|------|------|
|
|
111
|
+
| `:找大文件` | AI 规划 + 逐步确认 |
|
|
112
|
+
| `:!ls -la` | 直通 shell(Vim `:!` 语义) |
|
|
113
|
+
| `:h 找大文件` | 只看计划,不执行 |
|
|
114
|
+
| `:q` | 什么都不发生 |
|
|
115
|
+
| `:q!` | 退出当前 shell |
|
|
116
|
+
| `Ctrl+:` | 自动执行,不逐步确认 |
|
|
117
|
+
| `ESC` | 取消,返回普通提示符 |
|
|
118
|
+
|
|
119
|
+
### 行不为空时,`:` 正常插入
|
|
120
|
+
|
|
121
|
+
```
|
|
122
|
+
❯ echo "hello:world" ← 正常,不触发 AI
|
|
123
|
+
❯ git commit -m ":" ← 正常,不触发 AI
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
---
|
|
127
|
+
|
|
128
|
+
## 配置 LLM
|
|
129
|
+
|
|
130
|
+
运行 `vex --setup` 时会引导配置,也可以手动设置环境变量:
|
|
131
|
+
|
|
132
|
+
```bash
|
|
133
|
+
# 推荐:DeepSeek(成本最低)
|
|
134
|
+
export DEEPSEEK_API_KEY=sk-xxx
|
|
135
|
+
|
|
136
|
+
# 或 Kimi
|
|
137
|
+
export MOONSHOT_API_KEY=sk-xxx
|
|
138
|
+
|
|
139
|
+
# 或 OpenAI
|
|
140
|
+
export OPENAI_API_KEY=sk-xxx
|
|
141
|
+
|
|
142
|
+
# 或完全本地(需先安装 ollama)
|
|
143
|
+
ollama run qwen2.5-coder:7b
|
|
144
|
+
```
|
|
145
|
+
|
|
146
|
+
---
|
|
147
|
+
|
|
148
|
+
## 命令参考
|
|
149
|
+
|
|
150
|
+
```bash
|
|
151
|
+
vex --setup # 初始化 shell 集成(首次安装后运行)
|
|
152
|
+
vex --config # 修改 LLM / 执行模式配置
|
|
153
|
+
ex --update # 更新到最新版本
|
|
154
|
+
vex --version # 显示版本
|
|
155
|
+
|
|
156
|
+
# 执行模式(也可在 --config 中设置)
|
|
157
|
+
EX_MODE=confirm # 默认:逐步确认
|
|
158
|
+
EX_MODE=auto # 自动执行
|
|
159
|
+
EX_MODE=dry # 演习:只显示计划
|
|
160
|
+
|
|
161
|
+
# 静默启动(不显示提示信息)
|
|
162
|
+
EX_QUIET=1
|
|
163
|
+
```
|
|
164
|
+
|
|
165
|
+
---
|
|
166
|
+
|
|
167
|
+
## 卸载
|
|
168
|
+
|
|
169
|
+
```bash
|
|
170
|
+
vex --config # 选择"卸载",自动从 rc 文件移除
|
|
171
|
+
pipx uninstall ex-cli
|
|
172
|
+
```
|
|
173
|
+
|
|
174
|
+
---
|
|
175
|
+
|
|
176
|
+
## 示例
|
|
177
|
+
|
|
178
|
+
```bash
|
|
179
|
+
:在所有 TypeScript 文件里找使用了 console.log 的地方
|
|
180
|
+
|
|
181
|
+
:把 downloads 目录里超过 100MB 的文件找出来,按大小排序
|
|
182
|
+
|
|
183
|
+
:从 meeting.mp4 提取音频,转成 mp3
|
|
184
|
+
|
|
185
|
+
:扫描这个 Python 项目的安全漏洞,输出 JSON 报告
|
|
186
|
+
|
|
187
|
+
:调用 GitHub API 列出最近 star 的10个仓库
|
|
188
|
+
|
|
189
|
+
:读取 data.json,找出 status 为 error 的条目并统计
|
|
190
|
+
|
|
191
|
+
:查看当前 Kubernetes 集群里所有 Pending 状态的 Pod
|
|
192
|
+
```
|
|
193
|
+
|
|
194
|
+
---
|
|
195
|
+
|
|
196
|
+
## 致谢
|
|
197
|
+
|
|
198
|
+
命名灵感来自 Vim 的 Ex 模式——`:` 从来都是用来执行命令的。
|
vex_cli-0.1.0/README.md
ADDED
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
# ex
|
|
2
|
+
|
|
3
|
+
> 在终端输入 `:` 进入 AI 命令模式。说出你想做什么,ex 规划并执行。
|
|
4
|
+
|
|
5
|
+
**ex 本身完全免费。** 你需要自己的 LLM API Key(或本地 Ollama,也免费)。
|
|
6
|
+
|
|
7
|
+
```
|
|
8
|
+
❯
|
|
9
|
+
:找出所有超过200行的Python文件,按行数倒序
|
|
10
|
+
```
|
|
11
|
+
|
|
12
|
+
---
|
|
13
|
+
|
|
14
|
+
## 安装
|
|
15
|
+
|
|
16
|
+
### pipx(推荐)
|
|
17
|
+
|
|
18
|
+
```bash
|
|
19
|
+
pipx install ex-cli
|
|
20
|
+
vex --setup
|
|
21
|
+
vex --setup # 配置你的 API Key + 初始化 shell 集成
|
|
22
|
+
# 重开终端
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
### pip
|
|
26
|
+
|
|
27
|
+
```bash
|
|
28
|
+
pip install ex-cli
|
|
29
|
+
vex --setup
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
### Homebrew(macOS)
|
|
33
|
+
|
|
34
|
+
```bash
|
|
35
|
+
brew tap yourusername/tap
|
|
36
|
+
brew install ex
|
|
37
|
+
vex --setup
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
---
|
|
41
|
+
|
|
42
|
+
## 第一步:获取 API Key
|
|
43
|
+
|
|
44
|
+
`vex --setup` 会引导你选择并填入,支持以下提供商:
|
|
45
|
+
|
|
46
|
+
| 提供商 | 注册地址 | 价格 |
|
|
47
|
+
|--------|---------|------|
|
|
48
|
+
| **DeepSeek** | platform.deepseek.com/api_keys | ¥0.001/千 tokens,**最便宜** |
|
|
49
|
+
| **Kimi** | platform.moonshot.cn/console/api-keys | 注册有免费额度 |
|
|
50
|
+
| **OpenAI** | platform.openai.com/api-keys | GPT-4o-mini 按量付费 |
|
|
51
|
+
| **Ollama** | ollama.com | **完全免费**,本地运行 |
|
|
52
|
+
|
|
53
|
+
填入一个 API Key 就能用,Key 保存在你本地的 `~/.zshrc`,不经过任何第三方服务。
|
|
54
|
+
|
|
55
|
+
---
|
|
56
|
+
|
|
57
|
+
## 使用
|
|
58
|
+
|
|
59
|
+
重开终端后,在**空白行**输入 `:` 即可进入 AI 命令模式:
|
|
60
|
+
|
|
61
|
+
```
|
|
62
|
+
❯
|
|
63
|
+
:找出所有超过200行的Python文件,按行数倒序
|
|
64
|
+
|
|
65
|
+
▌ 找出所有 Python 文件并按行数排序
|
|
66
|
+
|
|
67
|
+
1 fd -e py .
|
|
68
|
+
" 递归查找所有 .py 文件
|
|
69
|
+
|
|
70
|
+
2 xargs wc -l
|
|
71
|
+
" 统计每个文件行数
|
|
72
|
+
|
|
73
|
+
3 sort -rn | grep -v total | head -20
|
|
74
|
+
" 倒序取前20
|
|
75
|
+
|
|
76
|
+
管道:fd -e py . | xargs wc -l | sort -rn | grep -v total | head -20
|
|
77
|
+
|
|
78
|
+
[p]管道执行 [s]逐步确认 [q]退出 > p
|
|
79
|
+
|
|
80
|
+
│ 1842 ./src/engine.py
|
|
81
|
+
│ 967 ./src/routes.py
|
|
82
|
+
│ 543 ./tests/test_engine.py
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
### Vim 语义完整保留
|
|
86
|
+
|
|
87
|
+
| 输入 | 行为 |
|
|
88
|
+
|------|------|
|
|
89
|
+
| `:找大文件` | AI 规划 + 逐步确认 |
|
|
90
|
+
| `:!ls -la` | 直通 shell(Vim `:!` 语义) |
|
|
91
|
+
| `:h 找大文件` | 只看计划,不执行 |
|
|
92
|
+
| `:q` | 什么都不发生 |
|
|
93
|
+
| `:q!` | 退出当前 shell |
|
|
94
|
+
| `Ctrl+:` | 自动执行,不逐步确认 |
|
|
95
|
+
| `ESC` | 取消,返回普通提示符 |
|
|
96
|
+
|
|
97
|
+
### 行不为空时,`:` 正常插入
|
|
98
|
+
|
|
99
|
+
```
|
|
100
|
+
❯ echo "hello:world" ← 正常,不触发 AI
|
|
101
|
+
❯ git commit -m ":" ← 正常,不触发 AI
|
|
102
|
+
```
|
|
103
|
+
|
|
104
|
+
---
|
|
105
|
+
|
|
106
|
+
## 配置 LLM
|
|
107
|
+
|
|
108
|
+
运行 `vex --setup` 时会引导配置,也可以手动设置环境变量:
|
|
109
|
+
|
|
110
|
+
```bash
|
|
111
|
+
# 推荐:DeepSeek(成本最低)
|
|
112
|
+
export DEEPSEEK_API_KEY=sk-xxx
|
|
113
|
+
|
|
114
|
+
# 或 Kimi
|
|
115
|
+
export MOONSHOT_API_KEY=sk-xxx
|
|
116
|
+
|
|
117
|
+
# 或 OpenAI
|
|
118
|
+
export OPENAI_API_KEY=sk-xxx
|
|
119
|
+
|
|
120
|
+
# 或完全本地(需先安装 ollama)
|
|
121
|
+
ollama run qwen2.5-coder:7b
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
---
|
|
125
|
+
|
|
126
|
+
## 命令参考
|
|
127
|
+
|
|
128
|
+
```bash
|
|
129
|
+
vex --setup # 初始化 shell 集成(首次安装后运行)
|
|
130
|
+
vex --config # 修改 LLM / 执行模式配置
|
|
131
|
+
ex --update # 更新到最新版本
|
|
132
|
+
vex --version # 显示版本
|
|
133
|
+
|
|
134
|
+
# 执行模式(也可在 --config 中设置)
|
|
135
|
+
EX_MODE=confirm # 默认:逐步确认
|
|
136
|
+
EX_MODE=auto # 自动执行
|
|
137
|
+
EX_MODE=dry # 演习:只显示计划
|
|
138
|
+
|
|
139
|
+
# 静默启动(不显示提示信息)
|
|
140
|
+
EX_QUIET=1
|
|
141
|
+
```
|
|
142
|
+
|
|
143
|
+
---
|
|
144
|
+
|
|
145
|
+
## 卸载
|
|
146
|
+
|
|
147
|
+
```bash
|
|
148
|
+
vex --config # 选择"卸载",自动从 rc 文件移除
|
|
149
|
+
pipx uninstall ex-cli
|
|
150
|
+
```
|
|
151
|
+
|
|
152
|
+
---
|
|
153
|
+
|
|
154
|
+
## 示例
|
|
155
|
+
|
|
156
|
+
```bash
|
|
157
|
+
:在所有 TypeScript 文件里找使用了 console.log 的地方
|
|
158
|
+
|
|
159
|
+
:把 downloads 目录里超过 100MB 的文件找出来,按大小排序
|
|
160
|
+
|
|
161
|
+
:从 meeting.mp4 提取音频,转成 mp3
|
|
162
|
+
|
|
163
|
+
:扫描这个 Python 项目的安全漏洞,输出 JSON 报告
|
|
164
|
+
|
|
165
|
+
:调用 GitHub API 列出最近 star 的10个仓库
|
|
166
|
+
|
|
167
|
+
:读取 data.json,找出 status 为 error 的条目并统计
|
|
168
|
+
|
|
169
|
+
:查看当前 Kubernetes 集群里所有 Pending 状态的 Pod
|
|
170
|
+
```
|
|
171
|
+
|
|
172
|
+
---
|
|
173
|
+
|
|
174
|
+
## 致谢
|
|
175
|
+
|
|
176
|
+
命名灵感来自 Vim 的 Ex 模式——`:` 从来都是用来执行命令的。
|
vex_cli-0.1.0/ex.rb
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
class ExCli < Formula
|
|
2
|
+
include Language::Python::Virtualenv
|
|
3
|
+
|
|
4
|
+
desc "Natural language CLI — type : and speak your intent"
|
|
5
|
+
homepage "https://github.com/yourusername/ex"
|
|
6
|
+
url "https://files.pythonhosted.org/packages/.../ex-cli-0.1.0.tar.gz"
|
|
7
|
+
sha256 "FILL_IN_AFTER_PYPI_PUBLISH"
|
|
8
|
+
license "MIT"
|
|
9
|
+
|
|
10
|
+
depends_on "python@3.11"
|
|
11
|
+
|
|
12
|
+
def install
|
|
13
|
+
virtualenv_install_with_resources
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
def post_install
|
|
17
|
+
ohai "ex 已安装!运行以下命令完成初始化:"
|
|
18
|
+
ohai " ex --setup"
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
test do
|
|
22
|
+
system "#{bin}/ex", "--version"
|
|
23
|
+
end
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
# ══════════════════════════════════════════════════════
|
|
27
|
+
# 发布到 Homebrew 的步骤:
|
|
28
|
+
#
|
|
29
|
+
# 1. 先发布到 PyPI,获得 tar.gz 的 sha256
|
|
30
|
+
# pip download ex-cli --no-deps
|
|
31
|
+
# shasum -a 256 ex-cli-*.tar.gz
|
|
32
|
+
#
|
|
33
|
+
# 2. 更新上方的 url 和 sha256
|
|
34
|
+
#
|
|
35
|
+
# 3. 创建自己的 tap(一次性操作):
|
|
36
|
+
# brew tap-new yourusername/homebrew-tap
|
|
37
|
+
# cp ex.rb ~/.homebrew/Library/Taps/yourusername/homebrew-tap/Formula/
|
|
38
|
+
# cd ~/.homebrew/Library/Taps/yourusername/homebrew-tap
|
|
39
|
+
# git add . && git commit -m "Add ex formula" && git push
|
|
40
|
+
#
|
|
41
|
+
# 4. 用户安装:
|
|
42
|
+
# brew tap yourusername/tap
|
|
43
|
+
# brew install ex
|
|
44
|
+
# ══════════════════════════════════════════════════════
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["hatchling"]
|
|
3
|
+
build-backend = "hatchling.build"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "vex-cli"
|
|
7
|
+
version = "0.1.0"
|
|
8
|
+
description = "Natural language CLI — type : and speak your intent"
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
license = { text = "MIT" }
|
|
11
|
+
requires-python = ">=3.8"
|
|
12
|
+
keywords = ["cli", "ai", "natural-language", "vim", "shell", "vex"]
|
|
13
|
+
classifiers = [
|
|
14
|
+
"Development Status :: 4 - Beta",
|
|
15
|
+
"Environment :: Console",
|
|
16
|
+
"Intended Audience :: Developers",
|
|
17
|
+
"License :: OSI Approved :: MIT License",
|
|
18
|
+
"Programming Language :: Python :: 3",
|
|
19
|
+
"Topic :: Utilities",
|
|
20
|
+
]
|
|
21
|
+
|
|
22
|
+
# 零强制依赖——LLM 调用用标准库 urllib,可选安装
|
|
23
|
+
dependencies = []
|
|
24
|
+
|
|
25
|
+
[project.optional-dependencies]
|
|
26
|
+
openai = ["openai>=1.0"]
|
|
27
|
+
anthropic = ["anthropic>=0.20"]
|
|
28
|
+
|
|
29
|
+
[project.scripts]
|
|
30
|
+
# vex = Vim Ex — 避免与系统内置 ex (Vim) 冲突
|
|
31
|
+
vex = "ex_cli.main:main"
|
|
32
|
+
|
|
33
|
+
[project.urls]
|
|
34
|
+
Homepage = "https://github.com/yourusername/vex"
|
|
35
|
+
Repository = "https://github.com/yourusername/vex"
|
|
36
|
+
Issues = "https://github.com/yourusername/vex/issues"
|
|
37
|
+
|
|
38
|
+
[tool.hatch.build.targets.wheel]
|
|
39
|
+
packages = ["src/ex_cli"]
|
|
40
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.1.0"
|
|
@@ -0,0 +1,297 @@
|
|
|
1
|
+
"""
|
|
2
|
+
ex — AI 规划与执行引擎
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import os, sys, json, shutil, subprocess
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
# ─── 颜色 ─────────────────────────────────────────────
|
|
9
|
+
R="\033[0m"; B="\033[1m"; D="\033[2m"
|
|
10
|
+
CY="\033[36m"; YL="\033[33m"; GR="\033[32m"; RD="\033[31m"; WH="\033[97m"
|
|
11
|
+
BG_DARK="\033[48;5;235m"; BG_ST="\033[48;5;238m"
|
|
12
|
+
BG_GR="\033[42m"; BG_YL="\033[43m"
|
|
13
|
+
FG_BK="\033[30m"; FG_GY="\033[90m"
|
|
14
|
+
|
|
15
|
+
def tw():
|
|
16
|
+
try: return os.get_terminal_size().columns
|
|
17
|
+
except: return 80
|
|
18
|
+
|
|
19
|
+
# ─── Vim 风格 UI ──────────────────────────────────────
|
|
20
|
+
|
|
21
|
+
def statusline(mode="DONE", left="", right="smart-cli/ex"):
|
|
22
|
+
w = tw()
|
|
23
|
+
mc = {
|
|
24
|
+
"AI CMD": f"{BG_GR}{FG_BK}",
|
|
25
|
+
"RUNNING": f"{BG_YL}{FG_BK}",
|
|
26
|
+
"DONE": f"{BG_GR}{FG_BK}",
|
|
27
|
+
"ERROR": f"\033[41m{WH}",
|
|
28
|
+
}.get(mode, f"{BG_ST}{WH}")
|
|
29
|
+
|
|
30
|
+
ml = len(f" {mode} ")
|
|
31
|
+
fill = "─" * max(0, w - ml - len(left) - len(right) - 4)
|
|
32
|
+
print(f"{mc}{B} {mode} {R}{BG_ST}{FG_GY} {left}{fill}{right} {R}")
|
|
33
|
+
|
|
34
|
+
def print_plan(plan: dict, user_input: str):
|
|
35
|
+
print(f"\n{D}:{user_input}{R}\n")
|
|
36
|
+
print(f" {CY}{B}▌{R} {B}{plan.get('summary','')}{R}\n")
|
|
37
|
+
|
|
38
|
+
for s in plan.get("steps", []):
|
|
39
|
+
print(f" {BG_DARK}{YL} {s['id']} {R} {WH}{s['cmd']}{R}")
|
|
40
|
+
print(f" {D}\" {s['desc']}{R}\n")
|
|
41
|
+
|
|
42
|
+
if plan.get("pipeline"):
|
|
43
|
+
w = tw()
|
|
44
|
+
print(f" {D}{'─'*(w-4)}{R}")
|
|
45
|
+
print(f" {FG_GY}管道:{R} {YL}{B}{plan['pipeline']}{R}\n")
|
|
46
|
+
|
|
47
|
+
for msg in plan.get("warnings", []):
|
|
48
|
+
print(f" {RD}{B}W:{R}{RD} {msg}{R}")
|
|
49
|
+
if plan.get("warnings"): print()
|
|
50
|
+
|
|
51
|
+
for alt in plan.get("alternatives", []):
|
|
52
|
+
print(f" {CY}\" {alt}{R}")
|
|
53
|
+
if plan.get("alternatives"): print()
|
|
54
|
+
|
|
55
|
+
def print_output(text: str, max_lines: int = 25):
|
|
56
|
+
lines = text.strip().split("\n")
|
|
57
|
+
for l in lines[:max_lines]:
|
|
58
|
+
print(f" {D}│{R} {l}")
|
|
59
|
+
if len(lines) > max_lines:
|
|
60
|
+
print(f" {D}│ ... {len(lines)-max_lines} 行已折叠{R}")
|
|
61
|
+
|
|
62
|
+
# ─── 工具知识库 ───────────────────────────────────────
|
|
63
|
+
TOOLS = """
|
|
64
|
+
ripgrep(rg): rg "p" path | rg --json "p" | rg -t py "p" | rg -l "p"
|
|
65
|
+
fd: fd -e py . | fd -t d name | fd -e log . -x rm {}
|
|
66
|
+
jq: jq '.field' | jq '.[]|.name' | jq 'select(.x>1)' | jq '{a,b}'
|
|
67
|
+
yq: yq '.k' f.yaml | yq -o json f.yaml
|
|
68
|
+
markitdown: markitdown in.pdf | markitdown in.docx > out.md
|
|
69
|
+
pandoc: pandoc in.pdf -o out.md | pandoc in.docx -o out.md
|
|
70
|
+
ffmpeg: ffmpeg -i in.mp4 -vf fps=1 frame_%04d.jpg | ffmpeg -i in.mp4 -vn out.aac
|
|
71
|
+
imagemagick(magick): magick in.png out.jpg | magick mogrify -resize 50% *.jpg
|
|
72
|
+
gh: gh pr list --json number,title | gh issue create -t "t" -b "b" | gh run list
|
|
73
|
+
stripe: stripe listen --forward-to localhost:3000/webhook | stripe events list
|
|
74
|
+
supabase: supabase start | supabase db push | supabase gen types typescript
|
|
75
|
+
semgrep: semgrep --json --config auto . | jq '.results[]'
|
|
76
|
+
ruff: ruff check . | ruff check --fix . | ruff check --output-format json .
|
|
77
|
+
trivy: trivy fs . | trivy image nginx | trivy fs --format json . | jq '.Results[]'
|
|
78
|
+
shellcheck: shellcheck -f json script.sh | jq '.'
|
|
79
|
+
hyperfine: hyperfine 'cmd1' 'cmd2' | hyperfine --export-json res.json 'cmd'
|
|
80
|
+
docker: docker build -t name . | docker ps --format json | jq '.' | docker logs -f id
|
|
81
|
+
kubectl: kubectl get pods -o json | jq '.items[].metadata.name' | kubectl apply -f f.yaml
|
|
82
|
+
ollama: ollama run llama3.2 | ollama list
|
|
83
|
+
llm: llm "q" | llm -m gpt-4o "q" | cat f | llm "总结"
|
|
84
|
+
uv: uv pip install pkg | uv venv | uv run script.py
|
|
85
|
+
terraform: terraform init | terraform plan | terraform apply -auto-approve
|
|
86
|
+
curl: curl -s url | jq '.' | curl -sX POST -H "Content-Type: application/json" -d '{}' url
|
|
87
|
+
sort/awk/sed: sort -rn | awk '{print $1}' | sed 's/old/new/g'
|
|
88
|
+
find: find . -size +10M | find . -mtime -7 | find . -name "*.log" -exec rm {} +
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
SYSTEM = f"""你是 CLI 专家。根据需求输出 JSON 执行计划。
|
|
92
|
+
|
|
93
|
+
工具列表:
|
|
94
|
+
{TOOLS}
|
|
95
|
+
|
|
96
|
+
输出格式:
|
|
97
|
+
{{"summary":"一句话目标","steps":[{{"id":1,"cmd":"完整命令","desc":"说明","output_to_next":false}}],"pipeline":"管道命令或null","warnings":[],"alternatives":[]}}
|
|
98
|
+
|
|
99
|
+
规则:只用上述工具,命令完整可执行,删除操作必须在warnings说明。
|
|
100
|
+
"""
|
|
101
|
+
|
|
102
|
+
# ─── LLM ──────────────────────────────────────────────
|
|
103
|
+
def call_llm(query: str, cwd: str) -> dict:
|
|
104
|
+
prompt = f"当前目录:{cwd}\n需求:{query}"
|
|
105
|
+
|
|
106
|
+
deep = os.environ.get("DEEPSEEK_API_KEY")
|
|
107
|
+
kimi = os.environ.get("MOONSHOT_API_KEY")
|
|
108
|
+
oai = os.environ.get("OPENAI_API_KEY")
|
|
109
|
+
|
|
110
|
+
if deep:
|
|
111
|
+
return _compat("https://api.deepseek.com", deep, "deepseek-chat", prompt)
|
|
112
|
+
if kimi:
|
|
113
|
+
return _compat("https://api.moonshot.cn/v1", kimi, "moonshot-v1-8k", prompt)
|
|
114
|
+
if oai:
|
|
115
|
+
return _compat("https://api.openai.com/v1", oai, "gpt-4o-mini", prompt)
|
|
116
|
+
return _ollama(prompt)
|
|
117
|
+
|
|
118
|
+
def _compat(base, key, model, prompt):
|
|
119
|
+
import urllib.request
|
|
120
|
+
data = json.dumps({
|
|
121
|
+
"model": model,
|
|
122
|
+
"messages": [{"role":"system","content":SYSTEM},
|
|
123
|
+
{"role":"user","content":prompt}],
|
|
124
|
+
"temperature": 0.1,
|
|
125
|
+
"response_format": {"type":"json_object"}
|
|
126
|
+
}).encode()
|
|
127
|
+
req = urllib.request.Request(
|
|
128
|
+
f"{base}/chat/completions", data=data,
|
|
129
|
+
headers={"Content-Type":"application/json",
|
|
130
|
+
"Authorization":f"Bearer {key}"}
|
|
131
|
+
)
|
|
132
|
+
with urllib.request.urlopen(req, timeout=30) as r:
|
|
133
|
+
return json.loads(json.loads(r.read())["choices"][0]["message"]["content"])
|
|
134
|
+
|
|
135
|
+
def _ollama(prompt):
|
|
136
|
+
import urllib.request
|
|
137
|
+
data = json.dumps({
|
|
138
|
+
"model":"qwen2.5-coder:7b",
|
|
139
|
+
"prompt":f"{SYSTEM}\n\n{prompt}",
|
|
140
|
+
"stream":False,"format":"json"
|
|
141
|
+
}).encode()
|
|
142
|
+
try:
|
|
143
|
+
req = urllib.request.Request(
|
|
144
|
+
"http://localhost:11434/api/generate", data=data,
|
|
145
|
+
headers={"Content-Type":"application/json"}
|
|
146
|
+
)
|
|
147
|
+
with urllib.request.urlopen(req, timeout=60) as r:
|
|
148
|
+
return json.loads(json.loads(r.read())["response"])
|
|
149
|
+
except Exception:
|
|
150
|
+
raise RuntimeError(
|
|
151
|
+
"未找到可用的 LLM。请运行 vex --config 配置 API Key。\n\n"
|
|
152
|
+
" 免费选项:\n"
|
|
153
|
+
" · DeepSeek platform.deepseek.com 最便宜\n"
|
|
154
|
+
" · Kimi platform.moonshot.cn\n"
|
|
155
|
+
" · Ollama ollama.com 本地,完全免费\n"
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
# ─── Vim 命令处理 ──────────────────────────────────────
|
|
159
|
+
VIM = {
|
|
160
|
+
"q": "什么都不发生", "q!": None, "qa": None,
|
|
161
|
+
"wq": None, "x": None, "w": "已保存",
|
|
162
|
+
"noh":"(noh)", "set":"这不是 Vim", "vs":"没有窗口", "sp":"没有窗口",
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
def handle_vim(s: str) -> bool:
|
|
166
|
+
"""返回 True=已处理,不需要 AI"""
|
|
167
|
+
if s.startswith("!"):
|
|
168
|
+
print(f"\n {D}:{s}{R}")
|
|
169
|
+
subprocess.run(s[1:].strip(), shell=True)
|
|
170
|
+
print()
|
|
171
|
+
return True
|
|
172
|
+
base = s.split()[0] if s else ""
|
|
173
|
+
if base in VIM:
|
|
174
|
+
msg = VIM[base]
|
|
175
|
+
if msg:
|
|
176
|
+
print(f"\n {D}\" {msg}{R}\n")
|
|
177
|
+
else:
|
|
178
|
+
sys.exit(0)
|
|
179
|
+
return True
|
|
180
|
+
if s.isdigit():
|
|
181
|
+
print(f"\n {D}\" {s}G — 这不是 Vim{R}\n")
|
|
182
|
+
return True
|
|
183
|
+
return False
|
|
184
|
+
|
|
185
|
+
# ─── 执行引擎 ─────────────────────────────────────────
|
|
186
|
+
def missing_tools(plan):
|
|
187
|
+
builtins = {"for","while","if","echo","cat","grep","sort","awk","sed",
|
|
188
|
+
"find","wc","xargs","cut","tr","head","tail","mkdir","rm",
|
|
189
|
+
"cp","mv","chmod","curl","wget","npm","npx","pip","python3",
|
|
190
|
+
"node","go","sh","bash","printf","read","export"}
|
|
191
|
+
seen = set()
|
|
192
|
+
for s in plan.get("steps", []):
|
|
193
|
+
t = s["cmd"].split()[0].lstrip("(")
|
|
194
|
+
if t not in builtins and t not in seen:
|
|
195
|
+
if not shutil.which(t):
|
|
196
|
+
seen.add(t)
|
|
197
|
+
return list(seen)
|
|
198
|
+
|
|
199
|
+
def run_cmd(cmd, cwd, stdin=None):
|
|
200
|
+
r = subprocess.run(cmd, shell=True, cwd=cwd,
|
|
201
|
+
input=stdin, capture_output=True)
|
|
202
|
+
return r.returncode == 0, r.stdout, r.stderr
|
|
203
|
+
|
|
204
|
+
def exec_pipeline(cmd, cwd):
|
|
205
|
+
ok, out, err = run_cmd(cmd, cwd)
|
|
206
|
+
if out: print_output(out.decode(errors="replace"))
|
|
207
|
+
if not ok and err:
|
|
208
|
+
print(f"\n {RD}E:{R} {err.decode(errors='replace')[:300]}")
|
|
209
|
+
return ok
|
|
210
|
+
|
|
211
|
+
def exec_steps(steps, cwd, auto=False):
|
|
212
|
+
n = len(steps)
|
|
213
|
+
buf = None
|
|
214
|
+
for s in steps:
|
|
215
|
+
print(f"\n {BG_ST}{YL} {s['id']}/{n} {R} {D}{s['desc']}{R}")
|
|
216
|
+
print(f" {FG_GY}:{R}{WH}{s['cmd']}{R}")
|
|
217
|
+
if not auto:
|
|
218
|
+
print(f"\n {D}[Y]执行 [n]跳过 [e]编辑 [q]退出{R} ", end="")
|
|
219
|
+
try: ch = input().strip().lower()
|
|
220
|
+
except: print(); return
|
|
221
|
+
if ch == "q": print(f" {D}:q{R}\n"); return
|
|
222
|
+
if ch == "n": print(f" {D}\" 跳过{R}"); continue
|
|
223
|
+
if ch == "e":
|
|
224
|
+
print(f" {FG_GY}:{R}", end="")
|
|
225
|
+
nc = input().strip()
|
|
226
|
+
if nc: s["cmd"] = nc
|
|
227
|
+
ok, out, err = run_cmd(s["cmd"], cwd, buf if s.get("output_to_next") else None)
|
|
228
|
+
if out:
|
|
229
|
+
print(); print_output(out.decode(errors="replace")); buf = out
|
|
230
|
+
if not ok:
|
|
231
|
+
print(f"\n {RD}E:{R} {err.decode(errors='replace')[:200]}")
|
|
232
|
+
if auto: return
|
|
233
|
+
|
|
234
|
+
# ─── 主函数 ───────────────────────────────────────────
|
|
235
|
+
def run(query: str, mode: str = "confirm"):
|
|
236
|
+
cwd = os.getcwd()
|
|
237
|
+
|
|
238
|
+
# h前缀 → dry run
|
|
239
|
+
dry = False
|
|
240
|
+
import re
|
|
241
|
+
if re.match(r'^h(elp)?\s+', query):
|
|
242
|
+
query = re.sub(r'^h(elp)?\s+', '', query)
|
|
243
|
+
dry = True
|
|
244
|
+
|
|
245
|
+
if handle_vim(query):
|
|
246
|
+
return
|
|
247
|
+
|
|
248
|
+
# 规划
|
|
249
|
+
print(f"\n {D}:{query}{R}")
|
|
250
|
+
print(f" {FG_GY}规划中...{R}", end="\r", flush=True)
|
|
251
|
+
try:
|
|
252
|
+
plan = call_llm(query, cwd)
|
|
253
|
+
print(f" {' '}\r", end="")
|
|
254
|
+
except RuntimeError as e:
|
|
255
|
+
print(f"\n {RD}E:{R} {e}\n"); return
|
|
256
|
+
except Exception as e:
|
|
257
|
+
print(f"\n {RD}E:{R} {e}\n"); return
|
|
258
|
+
|
|
259
|
+
print_plan(plan, query)
|
|
260
|
+
|
|
261
|
+
# 缺失工具
|
|
262
|
+
miss = missing_tools(plan)
|
|
263
|
+
if miss:
|
|
264
|
+
HINTS = {
|
|
265
|
+
"rg":"brew install ripgrep","fd":"brew install fd",
|
|
266
|
+
"bat":"brew install bat","eza":"brew install eza",
|
|
267
|
+
"jq":"brew install jq","yq":"brew install yq",
|
|
268
|
+
"semgrep":"pip install semgrep","ruff":"pip install ruff",
|
|
269
|
+
"trivy":"brew install trivy","hyperfine":"brew install hyperfine",
|
|
270
|
+
}
|
|
271
|
+
print(f" {YL}W:{R} 未安装:{', '.join(miss)}")
|
|
272
|
+
for m in miss:
|
|
273
|
+
print(f" {D}:!{HINTS.get(m, f'# 请安装 {m}')}{R}")
|
|
274
|
+
print()
|
|
275
|
+
|
|
276
|
+
if dry or mode == "dry":
|
|
277
|
+
print(f" {D}\" 演习模式{R}\n"); return
|
|
278
|
+
|
|
279
|
+
has_pipe = bool(plan.get("pipeline"))
|
|
280
|
+
steps = plan.get("steps", [])
|
|
281
|
+
|
|
282
|
+
if mode == "auto":
|
|
283
|
+
exec_pipeline(plan["pipeline"], cwd) if has_pipe else exec_steps(steps, cwd, auto=True)
|
|
284
|
+
print(); statusline("DONE", left=f" {query[:40]}"); return
|
|
285
|
+
|
|
286
|
+
if has_pipe and len(steps) > 1:
|
|
287
|
+
print(f" {D}[p]管道执行 [s]逐步确认 [h]只看计划 [q]退出{R} ", end="")
|
|
288
|
+
try: ch = input().strip().lower()
|
|
289
|
+
except: print(); return
|
|
290
|
+
if ch == "q": return
|
|
291
|
+
if ch == "h": return
|
|
292
|
+
if ch == "p":
|
|
293
|
+
print(); exec_pipeline(plan["pipeline"], cwd)
|
|
294
|
+
print(); statusline("DONE", left=f" {query[:40]}"); return
|
|
295
|
+
|
|
296
|
+
exec_steps(steps, cwd)
|
|
297
|
+
print(); statusline("DONE", left=f" {query[:40]}")
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
"""
|
|
2
|
+
ex — 主入口
|
|
3
|
+
处理 CLI 参数,分发到各子功能
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import sys
|
|
7
|
+
import os
|
|
8
|
+
|
|
9
|
+
def main():
|
|
10
|
+
args = sys.argv[1:]
|
|
11
|
+
|
|
12
|
+
# ── 管理命令 ──────────────────────────────────────
|
|
13
|
+
if not args or args[0] in ("-h", "--help"):
|
|
14
|
+
_print_help()
|
|
15
|
+
return
|
|
16
|
+
|
|
17
|
+
if args[0] in ("-v", "--version"):
|
|
18
|
+
from ex_cli import __version__
|
|
19
|
+
print(f"vex {__version__}")
|
|
20
|
+
return
|
|
21
|
+
|
|
22
|
+
if args[0] == "--setup":
|
|
23
|
+
from ex_cli.setup import run_setup
|
|
24
|
+
run_setup()
|
|
25
|
+
return
|
|
26
|
+
|
|
27
|
+
if args[0] == "--config":
|
|
28
|
+
from ex_cli.setup import run_config
|
|
29
|
+
run_config()
|
|
30
|
+
return
|
|
31
|
+
|
|
32
|
+
if args[0] == "--shell-plugin":
|
|
33
|
+
from ex_cli.setup import get_plugin_path
|
|
34
|
+
print(get_plugin_path())
|
|
35
|
+
return
|
|
36
|
+
|
|
37
|
+
if args[0] == "--update":
|
|
38
|
+
_run_update()
|
|
39
|
+
return
|
|
40
|
+
|
|
41
|
+
# ── AI 执行模式 ────────────────────────────────────
|
|
42
|
+
if args:
|
|
43
|
+
query = args[0]
|
|
44
|
+
mode = args[1] if len(args) > 1 else "confirm"
|
|
45
|
+
|
|
46
|
+
# 首次运行检测:没有配置任何 API Key
|
|
47
|
+
if not _has_any_key():
|
|
48
|
+
_prompt_key_setup()
|
|
49
|
+
return
|
|
50
|
+
|
|
51
|
+
from ex_cli.engine import run
|
|
52
|
+
run(query, mode)
|
|
53
|
+
return
|
|
54
|
+
|
|
55
|
+
_print_help()
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _has_any_key() -> bool:
|
|
59
|
+
"""检查是否配置了任意 LLM API Key(包括本地 ollama)"""
|
|
60
|
+
import shutil
|
|
61
|
+
return bool(
|
|
62
|
+
os.environ.get("DEEPSEEK_API_KEY") or
|
|
63
|
+
os.environ.get("MOONSHOT_API_KEY") or
|
|
64
|
+
os.environ.get("OPENAI_API_KEY") or
|
|
65
|
+
os.environ.get("ANTHROPIC_API_KEY") or
|
|
66
|
+
shutil.which("ollama") # 本地 ollama 也算
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def _prompt_key_setup():
|
|
71
|
+
"""首次运行时,引导用户填入 API Key"""
|
|
72
|
+
B="\033[1m"; D="\033[2m"; R="\033[0m"
|
|
73
|
+
Y="\033[33m"; C="\033[36m"; G="\033[32m"; GY="\033[90m"
|
|
74
|
+
|
|
75
|
+
print(f"""
|
|
76
|
+
{B}vex{R} 需要一个 LLM API Key 才能运行。
|
|
77
|
+
|
|
78
|
+
vex 本身{B}完全免费{R},你只需要自己的 API Key。
|
|
79
|
+
以下提供商都有免费额度,按需选择:
|
|
80
|
+
|
|
81
|
+
{C}1{R} {B}DeepSeek{R} {GY}api.deepseek.com{R}
|
|
82
|
+
注册即送余额,¥0.001/千 tokens,最便宜
|
|
83
|
+
适合:追求低成本,国内访问稳定
|
|
84
|
+
|
|
85
|
+
{C}2{R} {B}Kimi{R} {GY}platform.moonshot.cn{R}
|
|
86
|
+
月之暗面,注册有免费额度
|
|
87
|
+
适合:国内用户,中文理解好
|
|
88
|
+
|
|
89
|
+
{C}3{R} {B}OpenAI{R} {GY}platform.openai.com{R}
|
|
90
|
+
GPT-4o-mini,按量付费
|
|
91
|
+
适合:需要最强模型效果
|
|
92
|
+
|
|
93
|
+
{C}4{R} {B}Ollama{R} {GY}ollama.com{R}
|
|
94
|
+
本地运行,{B}完全免费{R},无需 API Key
|
|
95
|
+
适合:注重隐私,不想花钱
|
|
96
|
+
|
|
97
|
+
""")
|
|
98
|
+
|
|
99
|
+
choice = input(f" 选择 [{C}1{R}-{C}4{R},回车取消]: ").strip()
|
|
100
|
+
if not choice:
|
|
101
|
+
return
|
|
102
|
+
|
|
103
|
+
key_map = {
|
|
104
|
+
"1": ("DEEPSEEK_API_KEY", "DeepSeek", "https://platform.deepseek.com/api_keys"),
|
|
105
|
+
"2": ("MOONSHOT_API_KEY", "Kimi", "https://platform.moonshot.cn/console/api-keys"),
|
|
106
|
+
"3": ("OPENAI_API_KEY", "OpenAI", "https://platform.openai.com/api-keys"),
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
if choice == "4":
|
|
110
|
+
print(f"""
|
|
111
|
+
{B}安装 Ollama:{R}
|
|
112
|
+
|
|
113
|
+
curl -fsSL https://ollama.com/install.sh | sh
|
|
114
|
+
ollama run qwen2.5-coder:7b {GY}# 拉取推荐模型(约 5GB){R}
|
|
115
|
+
|
|
116
|
+
安装完成后重新运行 vex 即可,无需 API Key。
|
|
117
|
+
""")
|
|
118
|
+
return
|
|
119
|
+
|
|
120
|
+
if choice not in key_map:
|
|
121
|
+
return
|
|
122
|
+
|
|
123
|
+
env_var, name, url = key_map[choice]
|
|
124
|
+
print(f"\n 打开以下链接获取 API Key:\n {G}{url}{R}\n")
|
|
125
|
+
|
|
126
|
+
key = input(f" 粘贴你的 {name} API Key: ").strip()
|
|
127
|
+
if not key:
|
|
128
|
+
print(f"\n {Y}已取消。之后运行 vex --config 重新配置。{R}\n")
|
|
129
|
+
return
|
|
130
|
+
|
|
131
|
+
# 写入 shell rc 文件
|
|
132
|
+
from ex_cli.setup import detect_shell, _set_env_in_rc
|
|
133
|
+
shell, rc_file = detect_shell()
|
|
134
|
+
_set_env_in_rc(env_var, key, rc_file)
|
|
135
|
+
os.environ[env_var] = key # 当前进程立即生效
|
|
136
|
+
|
|
137
|
+
print(f"\n {G}✓{R} 已保存到 {D}{rc_file}{R}")
|
|
138
|
+
print(f"\n {D}(首次配置需要 source {rc_file} 或重开终端后永久生效){R}")
|
|
139
|
+
print(f"\n 现在重新执行你的命令即可。\n")
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def _print_help():
|
|
143
|
+
from ex_cli import __version__
|
|
144
|
+
B="\033[1m"; D="\033[2m"; C="\033[36m"; G="\033[32m"; GY="\033[90m"; R="\033[0m"
|
|
145
|
+
print(f"""
|
|
146
|
+
{B}vex{R} {D}v{__version__}{R} — 自然语言驱动 CLI {GY}(完全免费,使用你自己的 API Key){R}
|
|
147
|
+
|
|
148
|
+
{B}使用方式{R}
|
|
149
|
+
在空白行输入 {C}:{R} 进入 AI 命令模式:
|
|
150
|
+
{C}:{R}找出所有超过200行的Python文件
|
|
151
|
+
{C}:{R}把 demo.mp4 每秒截一帧
|
|
152
|
+
{C}:!{R}ls -la {D}# 直通 shell(Vim :! 语义){R}
|
|
153
|
+
{C}:q{R} {D}# 什么都不发生{R}
|
|
154
|
+
|
|
155
|
+
{B}命令{R}
|
|
156
|
+
vex --setup 初始化 shell 集成(首次安装后运行一次)
|
|
157
|
+
vex --config 管理 API Key / 执行模式
|
|
158
|
+
vex --version 显示版本
|
|
159
|
+
|
|
160
|
+
{B}API Key{R} {GY}vex 不收费,使用你自己的 Key{R}
|
|
161
|
+
{G}1{R} DeepSeek platform.deepseek.com {D}最便宜{R}
|
|
162
|
+
{G}2{R} Kimi platform.moonshot.cn
|
|
163
|
+
{G}3{R} OpenAI platform.openai.com
|
|
164
|
+
{G}4{R} Ollama ollama.com {D}本地,完全免费{R}
|
|
165
|
+
|
|
166
|
+
{B}快速开始{R}
|
|
167
|
+
pipx install ex-cli
|
|
168
|
+
vex --setup
|
|
169
|
+
{D}重开终端,输入 : 即可{R}
|
|
170
|
+
""")
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def _run_update():
|
|
174
|
+
import subprocess
|
|
175
|
+
print("正在更新 ex-cli ...")
|
|
176
|
+
r = subprocess.run(
|
|
177
|
+
[sys.executable, "-m", "pip", "install", "--upgrade", "ex-cli"],
|
|
178
|
+
capture_output=True, text=True
|
|
179
|
+
)
|
|
180
|
+
if r.returncode == 0:
|
|
181
|
+
print("✓ 已更新到最新版本")
|
|
182
|
+
else:
|
|
183
|
+
print("✗ 更新失败:", r.stderr[:200])
|
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
"""
|
|
2
|
+
vex --setup / --config
|
|
3
|
+
负责 shell 集成和 LLM 配置
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import os
|
|
7
|
+
import sys
|
|
8
|
+
import shutil
|
|
9
|
+
import subprocess
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
# ─── 颜色 ─────────────────────────────────────────────
|
|
13
|
+
B = "\033[1m"; D = "\033[2m"; R = "\033[0m"
|
|
14
|
+
G = "\033[32m"; Y = "\033[33m"; C = "\033[36m"; RED = "\033[31m"
|
|
15
|
+
|
|
16
|
+
def ok(t): print(f"{G}✓{R} {t}")
|
|
17
|
+
def info(t): print(f"{C}▷{R} {t}")
|
|
18
|
+
def warn(t): print(f"{Y}⚠{R} {t}")
|
|
19
|
+
def err(t): print(f"{RED}✗{R} {t}")
|
|
20
|
+
def ask(prompt): return input(f" {D}{prompt}{R} ").strip()
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def get_plugin_path() -> str:
|
|
24
|
+
"""返回 shell 插件文件的绝对路径"""
|
|
25
|
+
pkg_dir = Path(__file__).parent
|
|
26
|
+
shell = Path(os.environ.get("SHELL", "")).name
|
|
27
|
+
if shell == "zsh":
|
|
28
|
+
return str(pkg_dir / "shell" / "vex.zsh")
|
|
29
|
+
else:
|
|
30
|
+
return str(pkg_dir / "shell" / "vex.bash")
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def detect_shell() -> tuple[str, Path]:
|
|
34
|
+
"""返回 (shell名称, rc文件路径)"""
|
|
35
|
+
shell = Path(os.environ.get("SHELL", "/bin/bash")).name
|
|
36
|
+
home = Path.home()
|
|
37
|
+
rc_map = {
|
|
38
|
+
"zsh": home / ".zshrc",
|
|
39
|
+
"bash": home / ".bashrc",
|
|
40
|
+
"fish": home / ".config/fish/config.fish",
|
|
41
|
+
}
|
|
42
|
+
rc = rc_map.get(shell, home / ".bashrc")
|
|
43
|
+
return shell, rc
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def run_setup():
|
|
47
|
+
"""首次安装向导"""
|
|
48
|
+
print(f"\n{B}vex 初始化向导{R}")
|
|
49
|
+
print(f"{D}vex 本身完全免费,使用你自己的 API Key{R}\n")
|
|
50
|
+
|
|
51
|
+
# 1. 检测 shell
|
|
52
|
+
shell, rc_file = detect_shell()
|
|
53
|
+
info(f"Shell:{B}{shell}{R} 配置文件:{D}{rc_file}{R}")
|
|
54
|
+
|
|
55
|
+
if shell == "fish":
|
|
56
|
+
warn("Fish shell 需要手动配置,请参考 README")
|
|
57
|
+
return
|
|
58
|
+
|
|
59
|
+
# 2. 配置 LLM
|
|
60
|
+
print(f"\n{B}选择 LLM 提供商{R} {D}(选一个,填入你自己的 API Key){R}\n")
|
|
61
|
+
print(f" {C}1{R} {B}DeepSeek{R} platform.deepseek.com")
|
|
62
|
+
print(f" 注册即送余额,¥0.001/千 tokens,{B}最便宜{R}")
|
|
63
|
+
print()
|
|
64
|
+
print(f" {C}2{R} {B}Kimi{R} platform.moonshot.cn")
|
|
65
|
+
print(f" 月之暗面,注册有免费额度,国内访问稳定")
|
|
66
|
+
print()
|
|
67
|
+
print(f" {C}3{R} {B}OpenAI{R} platform.openai.com")
|
|
68
|
+
print(f" GPT-4o-mini,按量付费,效果最强")
|
|
69
|
+
print()
|
|
70
|
+
print(f" {C}4{R} {B}Ollama{R} ollama.com")
|
|
71
|
+
print(f" 本地运行,{B}完全免费{R},无需 API Key,注重隐私")
|
|
72
|
+
print()
|
|
73
|
+
print(f" {C}5{R} 稍后手动配置")
|
|
74
|
+
print()
|
|
75
|
+
|
|
76
|
+
choice = ask("选择 [1-5]:")
|
|
77
|
+
_configure_llm(choice, rc_file)
|
|
78
|
+
|
|
79
|
+
# 3. 写入 shell 插件
|
|
80
|
+
_install_shell_plugin(shell, rc_file)
|
|
81
|
+
|
|
82
|
+
# 4. 完成
|
|
83
|
+
print(f"\n{G}{B}✓ 初始化完成{R}\n")
|
|
84
|
+
print(f" {D}执行以下命令立即生效:{R}")
|
|
85
|
+
print(f" source {rc_file}\n")
|
|
86
|
+
print(f" 然后在空白行输入 {B}:{R} 即可使用\n")
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def run_config():
|
|
90
|
+
"""修改已有配置"""
|
|
91
|
+
print(f"\n{B}ex 配置{R}\n")
|
|
92
|
+
shell, rc_file = detect_shell()
|
|
93
|
+
|
|
94
|
+
print(f" {C}1{R} 更换 LLM 提供商")
|
|
95
|
+
print(f" {C}2{R} 修改执行模式(当前:{os.environ.get('EX_MODE','confirm')})")
|
|
96
|
+
print(f" {C}3{R} 重新安装 Shell 插件")
|
|
97
|
+
print(f" {C}4{R} 卸载 ex")
|
|
98
|
+
|
|
99
|
+
choice = ask("选择 [1-4]:")
|
|
100
|
+
|
|
101
|
+
if choice == "1":
|
|
102
|
+
_configure_llm(ask("LLM [1=DeepSeek 2=Kimi 3=OpenAI 4=Ollama]:"), rc_file)
|
|
103
|
+
ok("LLM 配置已更新")
|
|
104
|
+
elif choice == "2":
|
|
105
|
+
mode = ask("执行模式 [confirm/auto/dry]:")
|
|
106
|
+
if mode in ("confirm", "auto", "dry"):
|
|
107
|
+
_set_env_in_rc("EX_MODE", mode, rc_file)
|
|
108
|
+
ok(f"执行模式已设为 {mode}")
|
|
109
|
+
elif choice == "3":
|
|
110
|
+
_install_shell_plugin(shell, rc_file)
|
|
111
|
+
elif choice == "4":
|
|
112
|
+
_uninstall(rc_file)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _configure_llm(choice: str, rc_file: Path):
|
|
116
|
+
llm_map = {
|
|
117
|
+
"1": ("DEEPSEEK_API_KEY", "DeepSeek", "https://platform.deepseek.com/api_keys"),
|
|
118
|
+
"2": ("MOONSHOT_API_KEY", "Kimi", "https://platform.moonshot.cn/console/api-keys"),
|
|
119
|
+
"3": ("OPENAI_API_KEY", "OpenAI", "https://platform.openai.com/api-keys"),
|
|
120
|
+
}
|
|
121
|
+
if choice in llm_map:
|
|
122
|
+
env_var, name, url = llm_map[choice]
|
|
123
|
+
print(f"\n {G}→{R} 在浏览器中打开:{B}{url}{R}")
|
|
124
|
+
print(f" 注册/登录后创建 API Key,然后粘贴到这里\n")
|
|
125
|
+
key = ask(f"粘贴你的 {name} API Key:")
|
|
126
|
+
if key:
|
|
127
|
+
_set_env_in_rc(env_var, key, rc_file)
|
|
128
|
+
ok(f"{name} API Key 已保存")
|
|
129
|
+
else:
|
|
130
|
+
warn("未填写,跳过。之后运行 vex --config 配置。")
|
|
131
|
+
elif choice == "4":
|
|
132
|
+
print(f"\n {B}安装 Ollama(本地免费运行):{R}")
|
|
133
|
+
print(f" curl -fsSL https://ollama.com/install.sh | sh")
|
|
134
|
+
print(f" ollama run qwen2.5-coder:7b {D}# 拉取推荐模型{R}\n")
|
|
135
|
+
if shutil.which("ollama"):
|
|
136
|
+
ok("ollama 已安装")
|
|
137
|
+
r = subprocess.run(["ollama", "list"], capture_output=True, text=True)
|
|
138
|
+
if "qwen2.5-coder" not in r.stdout:
|
|
139
|
+
info("正在拉取推荐模型(约 5GB)...")
|
|
140
|
+
subprocess.run(["ollama", "pull", "qwen2.5-coder:7b"])
|
|
141
|
+
else:
|
|
142
|
+
warn("请按上方步骤安装 ollama 后重新运行 vex --setup")
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def _install_shell_plugin(shell: str, rc_file: Path):
|
|
146
|
+
"""将 source 命令写入 rc 文件"""
|
|
147
|
+
plugin_path = get_plugin_path()
|
|
148
|
+
|
|
149
|
+
# 检查是否已安装
|
|
150
|
+
if rc_file.exists() and "ex-cli" in rc_file.read_text():
|
|
151
|
+
ok(f"Shell 插件已在 {rc_file} 中")
|
|
152
|
+
return
|
|
153
|
+
|
|
154
|
+
source_line = f'source "{plugin_path}"'
|
|
155
|
+
with rc_file.open("a") as f:
|
|
156
|
+
f.write(f"\n# vex-cli: 自然语言 CLI(输入 : 触发)\n")
|
|
157
|
+
f.write(f"{source_line}\n")
|
|
158
|
+
|
|
159
|
+
ok(f"Shell 插件已写入 {rc_file}")
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def _set_env_in_rc(var: str, val: str, rc_file: Path):
|
|
163
|
+
"""在 rc 文件中设置环境变量"""
|
|
164
|
+
content = rc_file.read_text() if rc_file.exists() else ""
|
|
165
|
+
line = f'export {var}="{val}"'
|
|
166
|
+
|
|
167
|
+
if f"export {var}=" in content:
|
|
168
|
+
# 替换已有行
|
|
169
|
+
lines = content.splitlines()
|
|
170
|
+
lines = [line if l.startswith(f"export {var}=") else l for l in lines]
|
|
171
|
+
rc_file.write_text("\n".join(lines) + "\n")
|
|
172
|
+
else:
|
|
173
|
+
with rc_file.open("a") as f:
|
|
174
|
+
f.write(f"\n{line}\n")
|
|
175
|
+
|
|
176
|
+
# 立即生效(当前进程)
|
|
177
|
+
os.environ[var] = val
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def _uninstall(rc_file: Path):
|
|
181
|
+
if not rc_file.exists():
|
|
182
|
+
return
|
|
183
|
+
content = rc_file.read_text()
|
|
184
|
+
lines = [l for l in content.splitlines()
|
|
185
|
+
if "ex-cli" not in l and "ex.zsh" not in l and "ex.bash" not in l]
|
|
186
|
+
rc_file.write_text("\n".join(lines) + "\n")
|
|
187
|
+
ok(f"已从 {rc_file} 移除 vex 配置")
|
|
188
|
+
info("运行 pipx uninstall ex-cli 完成卸载")
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
# vex.bash — 由 vex --setup 自动写入 ~/.bashrc
|
|
2
|
+
|
|
3
|
+
: "${EX_MODE:=confirm}"
|
|
4
|
+
: "${EX_QUIET:=0}"
|
|
5
|
+
|
|
6
|
+
_EX_BIN="$(command -v vex 2>/dev/null)"
|
|
7
|
+
|
|
8
|
+
_ex_readline() {
|
|
9
|
+
if [[ -n "${READLINE_LINE// /}" ]]; then
|
|
10
|
+
READLINE_LINE="${READLINE_LINE}:"; READLINE_POINT=${#READLINE_LINE}; return
|
|
11
|
+
fi
|
|
12
|
+
printf "\r\033[2K"
|
|
13
|
+
local w="${COLUMNS:-80}"
|
|
14
|
+
printf "\033[42m\033[30m\033[1m AI CMD \033[0m"
|
|
15
|
+
printf "\033[48;5;238m\033[90m %-$((w-20))s \033[0m\r" ""
|
|
16
|
+
printf "\033[1m\033[97m:\033[0m\033[97m"
|
|
17
|
+
|
|
18
|
+
local input; IFS= read -re input 2>/dev/null; printf "\033[0m"
|
|
19
|
+
[[ -z "${input// /}" ]] && { READLINE_LINE=""; return; }
|
|
20
|
+
|
|
21
|
+
case "${input}" in
|
|
22
|
+
"q"|"wq"|"w") READLINE_LINE=""; return ;;
|
|
23
|
+
"q!"|"qa"|"x") exit 0 ;;
|
|
24
|
+
esac
|
|
25
|
+
|
|
26
|
+
history -s ":${input}"
|
|
27
|
+
echo "${input}" >> "${HOME}/.vex_history"
|
|
28
|
+
echo
|
|
29
|
+
"${_EX_BIN}" "${input}" "${EX_MODE}"
|
|
30
|
+
echo
|
|
31
|
+
READLINE_LINE=""; READLINE_POINT=0
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
bind -x '":":_ex_readline'
|
|
35
|
+
|
|
36
|
+
function : () { [[ -z "$*" ]] && return; "${_EX_BIN}" "$*" "${EX_MODE}"; }
|
|
37
|
+
|
|
38
|
+
[[ "${EX_QUIET}" != "1" && "$-" == *i* ]] && \
|
|
39
|
+
printf "\\033[90m vex :自然语言 :!cmd :q\\033[0m\\n"
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
# vex.zsh — 由 vex --setup 自动写入 ~/.zshrc
|
|
2
|
+
# 不要手动编辑此文件
|
|
3
|
+
|
|
4
|
+
: "${EX_MODE:=confirm}"
|
|
5
|
+
: "${EX_QUIET:=0}"
|
|
6
|
+
|
|
7
|
+
_EX_BIN="$(command -v vex 2>/dev/null)"
|
|
8
|
+
|
|
9
|
+
_ex_widget() {
|
|
10
|
+
# 行不为空 → 插入普通冒号
|
|
11
|
+
if [[ -n "${BUFFER// /}" ]]; then
|
|
12
|
+
BUFFER="${BUFFER}:"
|
|
13
|
+
CURSOR=$#BUFFER
|
|
14
|
+
return
|
|
15
|
+
fi
|
|
16
|
+
|
|
17
|
+
zle -I
|
|
18
|
+
printf "\r\033[2K"
|
|
19
|
+
|
|
20
|
+
# 状态栏
|
|
21
|
+
local w="${COLUMNS:-80}"
|
|
22
|
+
printf "\033[42m\033[30m\033[1m AI CMD \033[0m"
|
|
23
|
+
printf "\033[48;5;238m\033[90m %-$((w-20))s \033[0m\r" ""
|
|
24
|
+
|
|
25
|
+
# 命令提示符
|
|
26
|
+
printf "\033[1m\033[97m:\033[0m\033[97m"
|
|
27
|
+
|
|
28
|
+
local input
|
|
29
|
+
read -re input 2>/dev/null
|
|
30
|
+
printf "\033[0m"
|
|
31
|
+
|
|
32
|
+
# 空输入或 ESC → 取消
|
|
33
|
+
[[ -z "${input// /}" || "${input}" == $'\e' ]] && {
|
|
34
|
+
printf "\r\033[2K"; zle reset-prompt; return
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
# 记录历史
|
|
38
|
+
print -s ":${input}"
|
|
39
|
+
echo "${input}" >> "${HOME}/.vex_history"
|
|
40
|
+
|
|
41
|
+
# Vim 内置命令快速响应
|
|
42
|
+
case "${input}" in
|
|
43
|
+
"q"|"wq"|"w")
|
|
44
|
+
printf "\r\033[2K\033[90m \" (${input})\033[0m\n"
|
|
45
|
+
zle reset-prompt; return ;;
|
|
46
|
+
"q!"|"qa"|"x")
|
|
47
|
+
exit 0 ;;
|
|
48
|
+
esac
|
|
49
|
+
|
|
50
|
+
echo
|
|
51
|
+
"${_EX_BIN}" "${input}" "${EX_MODE}"
|
|
52
|
+
echo
|
|
53
|
+
zle reset-prompt
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
zle -N _ex_widget
|
|
57
|
+
bindkey ':' _ex_widget
|
|
58
|
+
|
|
59
|
+
# Ctrl+: → 自动执行模式
|
|
60
|
+
_ex_auto_widget() {
|
|
61
|
+
[[ -n "${BUFFER// /}" ]] && { BUFFER="${BUFFER}:"; CURSOR=$#BUFFER; return; }
|
|
62
|
+
|
|
63
|
+
zle -I; printf "\r\033[2K"
|
|
64
|
+
local w="${COLUMNS:-80}"
|
|
65
|
+
printf "\033[43m\033[30m\033[1m AUTO \033[0m"
|
|
66
|
+
printf "\033[48;5;238m\033[90m %-$((w-18))s \033[0m\r" ""
|
|
67
|
+
printf "\033[1m\033[33m:\033[0m\033[97m"
|
|
68
|
+
|
|
69
|
+
local input
|
|
70
|
+
read -re input 2>/dev/null
|
|
71
|
+
printf "\033[0m"
|
|
72
|
+
[[ -z "${input// /}" ]] && { zle reset-prompt; return; }
|
|
73
|
+
|
|
74
|
+
print -s ":${input}!"
|
|
75
|
+
echo
|
|
76
|
+
"${_EX_BIN}" "${input}" "auto"
|
|
77
|
+
echo
|
|
78
|
+
zle reset-prompt
|
|
79
|
+
}
|
|
80
|
+
zle -N _ex_auto_widget
|
|
81
|
+
|
|
82
|
+
# : 函数(命令行直接调用)
|
|
83
|
+
function : () {
|
|
84
|
+
[[ -z "$*" ]] && return
|
|
85
|
+
"${_EX_BIN}" "$*" "${EX_MODE}"
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
[[ "${EX_QUIET}" != "1" && -o interactive ]] && \
|
|
89
|
+
printf "\\033[90m vex :自然语言 :!cmd :q\\033[0m\\n"
|