ripperdoc 0.3.0__py3-none-any.whl → 0.3.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ripperdoc/__init__.py +1 -1
- ripperdoc/cli/cli.py +9 -1
- ripperdoc/cli/commands/agents_cmd.py +93 -53
- ripperdoc/cli/commands/mcp_cmd.py +3 -0
- ripperdoc/cli/commands/models_cmd.py +768 -283
- ripperdoc/cli/commands/permissions_cmd.py +107 -52
- ripperdoc/cli/commands/resume_cmd.py +61 -51
- ripperdoc/cli/commands/themes_cmd.py +31 -1
- ripperdoc/cli/ui/agents_tui/__init__.py +3 -0
- ripperdoc/cli/ui/agents_tui/textual_app.py +1138 -0
- ripperdoc/cli/ui/choice.py +376 -0
- ripperdoc/cli/ui/interrupt_listener.py +233 -0
- ripperdoc/cli/ui/message_display.py +7 -0
- ripperdoc/cli/ui/models_tui/__init__.py +5 -0
- ripperdoc/cli/ui/models_tui/textual_app.py +698 -0
- ripperdoc/cli/ui/panels.py +19 -4
- ripperdoc/cli/ui/permissions_tui/__init__.py +3 -0
- ripperdoc/cli/ui/permissions_tui/textual_app.py +526 -0
- ripperdoc/cli/ui/provider_options.py +220 -80
- ripperdoc/cli/ui/rich_ui.py +91 -83
- ripperdoc/cli/ui/tips.py +89 -0
- ripperdoc/cli/ui/wizard.py +98 -45
- ripperdoc/core/config.py +3 -0
- ripperdoc/core/permissions.py +66 -104
- ripperdoc/core/providers/anthropic.py +11 -0
- ripperdoc/protocol/stdio.py +3 -1
- ripperdoc/tools/bash_tool.py +2 -0
- ripperdoc/tools/file_edit_tool.py +100 -181
- ripperdoc/tools/file_read_tool.py +101 -25
- ripperdoc/tools/multi_edit_tool.py +239 -91
- ripperdoc/tools/notebook_edit_tool.py +11 -29
- ripperdoc/utils/file_editing.py +164 -0
- ripperdoc/utils/permissions/tool_permission_utils.py +11 -0
- {ripperdoc-0.3.0.dist-info → ripperdoc-0.3.2.dist-info}/METADATA +3 -2
- {ripperdoc-0.3.0.dist-info → ripperdoc-0.3.2.dist-info}/RECORD +39 -30
- ripperdoc/cli/ui/interrupt_handler.py +0 -208
- {ripperdoc-0.3.0.dist-info → ripperdoc-0.3.2.dist-info}/WHEEL +0 -0
- {ripperdoc-0.3.0.dist-info → ripperdoc-0.3.2.dist-info}/entry_points.txt +0 -0
- {ripperdoc-0.3.0.dist-info → ripperdoc-0.3.2.dist-info}/licenses/LICENSE +0 -0
- {ripperdoc-0.3.0.dist-info → ripperdoc-0.3.2.dist-info}/top_level.txt +0 -0
|
@@ -81,39 +81,20 @@ def default_model_for_protocol(protocol: ProviderType) -> str:
|
|
|
81
81
|
|
|
82
82
|
KNOWN_PROVIDERS = ProviderRegistry(
|
|
83
83
|
providers=[
|
|
84
|
-
|
|
85
|
-
key="deepseek",
|
|
86
|
-
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
87
|
-
default_model="deepseek-chat",
|
|
88
|
-
model_suggestions=("deepseek-chat", "deepseek-reasoner"),
|
|
89
|
-
default_api_base="https://api.deepseek.com/v1",
|
|
90
|
-
),
|
|
84
|
+
# === Major Cloud Providers ===
|
|
91
85
|
ProviderOption(
|
|
92
86
|
key="openai",
|
|
93
87
|
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
94
88
|
default_model="gpt-4o-mini",
|
|
95
89
|
model_suggestions=(
|
|
96
|
-
"gpt-5.1",
|
|
97
|
-
"gpt-5.1-chat",
|
|
98
|
-
"gpt-5.1-codex",
|
|
99
90
|
"gpt-4o",
|
|
91
|
+
"gpt-4o-mini",
|
|
100
92
|
"gpt-4-turbo",
|
|
101
93
|
"o1-preview",
|
|
102
94
|
"o1-mini",
|
|
103
95
|
),
|
|
104
96
|
default_api_base="https://api.openai.com/v1",
|
|
105
97
|
),
|
|
106
|
-
ProviderOption(
|
|
107
|
-
key="openrouter",
|
|
108
|
-
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
109
|
-
default_model="openai/gpt-4o-mini",
|
|
110
|
-
model_suggestions=(
|
|
111
|
-
"openai/gpt-4o-mini",
|
|
112
|
-
"meta-llama/llama-3.1-8b-instruct",
|
|
113
|
-
"google/gemini-flash-1.5",
|
|
114
|
-
),
|
|
115
|
-
default_api_base="https://openrouter.ai/api/v1",
|
|
116
|
-
),
|
|
117
98
|
ProviderOption(
|
|
118
99
|
key="anthropic",
|
|
119
100
|
protocol=ProviderType.ANTHROPIC,
|
|
@@ -122,118 +103,277 @@ KNOWN_PROVIDERS = ProviderRegistry(
|
|
|
122
103
|
"claude-3-5-sonnet-20241022",
|
|
123
104
|
"claude-3-5-haiku-20241022",
|
|
124
105
|
"claude-3-opus-20240229",
|
|
125
|
-
"claude-3-sonnet-20240229",
|
|
126
|
-
"claude-3-haiku-20240307",
|
|
127
106
|
),
|
|
128
107
|
default_api_base=None,
|
|
129
108
|
),
|
|
130
109
|
ProviderOption(
|
|
131
|
-
key="
|
|
110
|
+
key="google",
|
|
111
|
+
protocol=ProviderType.GEMINI,
|
|
112
|
+
default_model="gemini-1.5-pro",
|
|
113
|
+
model_suggestions=(
|
|
114
|
+
"gemini-2.0-flash-exp",
|
|
115
|
+
"gemini-1.5-pro",
|
|
116
|
+
"gemini-1.5-flash",
|
|
117
|
+
),
|
|
118
|
+
default_api_base="https://generativelanguage.googleapis.com/v1beta",
|
|
119
|
+
),
|
|
120
|
+
# === Aggregators & Open Router ===
|
|
121
|
+
ProviderOption(
|
|
122
|
+
key="openrouter",
|
|
132
123
|
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
133
|
-
default_model="gpt-4o-mini",
|
|
124
|
+
default_model="openai/gpt-4o-mini",
|
|
134
125
|
model_suggestions=(
|
|
135
|
-
"gpt-4o-mini",
|
|
136
|
-
"
|
|
137
|
-
"
|
|
126
|
+
"openai/gpt-4o-mini",
|
|
127
|
+
"anthropic/claude-3.5-sonnet",
|
|
128
|
+
"google/gemini-flash-1.5",
|
|
129
|
+
"meta-llama/llama-3.1-70b-instruct",
|
|
138
130
|
),
|
|
139
|
-
default_api_base=
|
|
131
|
+
default_api_base="https://openrouter.ai/api/v1",
|
|
140
132
|
),
|
|
141
133
|
ProviderOption(
|
|
142
|
-
key="
|
|
134
|
+
key="poe",
|
|
143
135
|
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
144
|
-
default_model="
|
|
136
|
+
default_model="gpt-4o",
|
|
145
137
|
model_suggestions=(
|
|
146
|
-
"
|
|
147
|
-
"
|
|
148
|
-
"
|
|
149
|
-
"
|
|
150
|
-
"ministral-14b-2512",
|
|
151
|
-
"ministral-8b-2512",
|
|
152
|
-
"codestral-latest",
|
|
153
|
-
"pixtral-large-latest",
|
|
138
|
+
"gpt-4o",
|
|
139
|
+
"claude-3.5-sonnet",
|
|
140
|
+
"gemini-1.5-pro",
|
|
141
|
+
"mistral-large",
|
|
154
142
|
),
|
|
155
|
-
default_api_base="https://api.
|
|
143
|
+
default_api_base="https://api.poe.com/v1",
|
|
156
144
|
),
|
|
145
|
+
# === Chinese Providers ===
|
|
157
146
|
ProviderOption(
|
|
158
|
-
key="
|
|
159
|
-
protocol=ProviderType.
|
|
160
|
-
default_model="
|
|
147
|
+
key="deepseek",
|
|
148
|
+
protocol=ProviderType.ANTHROPIC,
|
|
149
|
+
default_model="deepseek-chat",
|
|
161
150
|
model_suggestions=(
|
|
162
|
-
"
|
|
163
|
-
"
|
|
164
|
-
"gemini-2.5-flash",
|
|
165
|
-
"gemini-3-pro-preview",
|
|
166
|
-
"gemini-3-flash-preview",
|
|
151
|
+
"deepseek-chat",
|
|
152
|
+
"deepseek-reasoner",
|
|
167
153
|
),
|
|
168
|
-
default_api_base="https://
|
|
154
|
+
default_api_base="https://api.deepseek.com/v1",
|
|
155
|
+
),
|
|
156
|
+
ProviderOption(
|
|
157
|
+
key="zhipu",
|
|
158
|
+
protocol=ProviderType.ANTHROPIC,
|
|
159
|
+
default_model="glm-4-flash",
|
|
160
|
+
model_suggestions=(
|
|
161
|
+
"glm-4-plus",
|
|
162
|
+
"glm-4-flash",
|
|
163
|
+
"glm-4.7",
|
|
164
|
+
"glm-4.6",
|
|
165
|
+
"glm-4.5",
|
|
166
|
+
"glm-4-air",
|
|
167
|
+
),
|
|
168
|
+
default_api_base="https://open.bigmodel.cn/api/anthropic",
|
|
169
169
|
),
|
|
170
170
|
ProviderOption(
|
|
171
171
|
key="moonshot",
|
|
172
172
|
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
173
|
-
default_model="
|
|
173
|
+
default_model="moonshot-v1-auto",
|
|
174
174
|
model_suggestions=(
|
|
175
|
-
"
|
|
175
|
+
"moonshot-v1-auto",
|
|
176
176
|
"kimi-k2-0711-preview",
|
|
177
177
|
"kimi-k2-turbo-preview",
|
|
178
178
|
"kimi-k2-thinking",
|
|
179
|
-
"kimi-k2-
|
|
179
|
+
"kimi-k2-0905-preview",
|
|
180
180
|
),
|
|
181
181
|
default_api_base="https://api.moonshot.cn/v1",
|
|
182
182
|
),
|
|
183
183
|
ProviderOption(
|
|
184
|
-
key="
|
|
184
|
+
key="volcengine",
|
|
185
185
|
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
186
|
-
default_model="
|
|
186
|
+
default_model="doubao-pro-32k",
|
|
187
|
+
model_suggestions=(
|
|
188
|
+
# Doubao Pro 系列
|
|
189
|
+
"doubao-pro-32k",
|
|
190
|
+
"doubao-pro-256k",
|
|
191
|
+
"doubao-pro-32k-functioncall-241028",
|
|
192
|
+
"doubao-pro-32k-character-241215",
|
|
193
|
+
# Doubao 1.5 系列
|
|
194
|
+
"Doubao-1.5-pro",
|
|
195
|
+
"doubao-1.5-pro-32k",
|
|
196
|
+
"doubao-1.5-pro-32k-character",
|
|
197
|
+
"Doubao-1.5-pro-256k",
|
|
198
|
+
"Doubao-1.5-vision-pro",
|
|
199
|
+
"doubao-1.5-vision-pro",
|
|
200
|
+
"Doubao-1.5-lite-32k",
|
|
201
|
+
# Doubao Lite 系列
|
|
202
|
+
"Doubao-lite-32k",
|
|
203
|
+
"Doubao-lite-128k",
|
|
204
|
+
"Doubao-lite-4k-character-240828",
|
|
205
|
+
"Doubao-lite-32k-character-241015",
|
|
206
|
+
# DeepSeek 系列
|
|
207
|
+
"DeepSeek-V3",
|
|
208
|
+
"DeepSeek-R1",
|
|
209
|
+
"DeepSeek-R1-Distill-Qwen-32B",
|
|
210
|
+
"DeepSeek-R1-Distill-Qwen-7B",
|
|
211
|
+
# Vision 系列
|
|
212
|
+
"Doubao-vision-lite-32k",
|
|
213
|
+
),
|
|
214
|
+
default_api_base="https://ark.cn-beijing.volces.com/api/v3",
|
|
215
|
+
),
|
|
216
|
+
ProviderOption(
|
|
217
|
+
key="aliyun",
|
|
218
|
+
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
219
|
+
default_model="qwen-plus",
|
|
187
220
|
model_suggestions=(
|
|
188
|
-
"qwen-turbo",
|
|
189
221
|
"qwen-plus",
|
|
222
|
+
"qwen-turbo",
|
|
190
223
|
"qwen-max",
|
|
191
|
-
"
|
|
192
|
-
"qwen2.5-coder-32b",
|
|
224
|
+
"qwen-coder-plus",
|
|
193
225
|
),
|
|
194
226
|
default_api_base="https://dashscope.aliyuncs.com/compatible-mode/v1",
|
|
195
227
|
),
|
|
196
228
|
ProviderOption(
|
|
197
|
-
key="
|
|
229
|
+
key="minimax",
|
|
230
|
+
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
231
|
+
default_model="abab6.5s",
|
|
232
|
+
model_suggestions=(
|
|
233
|
+
# abab 系列
|
|
234
|
+
"abab6.5s",
|
|
235
|
+
"abab6.5g",
|
|
236
|
+
"abab6.5t",
|
|
237
|
+
"abab6",
|
|
238
|
+
"abab5.5s",
|
|
239
|
+
"abab5",
|
|
240
|
+
# 01 系列
|
|
241
|
+
"minimax-01",
|
|
242
|
+
# M2 系列
|
|
243
|
+
"MiniMax-M2",
|
|
244
|
+
"MiniMax-M2-Stable",
|
|
245
|
+
),
|
|
246
|
+
default_api_base="https://api.minimax.chat/v1",
|
|
247
|
+
),
|
|
248
|
+
ProviderOption(
|
|
249
|
+
key="z.ai",
|
|
198
250
|
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
199
251
|
default_model="glm-4-flash",
|
|
200
252
|
model_suggestions=(
|
|
253
|
+
"glm-4-flash",
|
|
201
254
|
"glm-4-plus",
|
|
202
|
-
"glm-4-air-250414",
|
|
203
|
-
"glm-4-airx",
|
|
204
|
-
"glm-4-long",
|
|
205
|
-
"glm-4-flashx",
|
|
206
|
-
"glm-4-flash-250414",
|
|
207
255
|
"glm-4.6",
|
|
208
|
-
"glm-4.5",
|
|
209
|
-
"glm-4.5-air",
|
|
210
|
-
"glm-4.5-airx",
|
|
211
|
-
"glm-4.5-x",
|
|
212
|
-
"glm-4.5-flash",
|
|
213
256
|
),
|
|
214
|
-
default_api_base="https://
|
|
257
|
+
default_api_base="https://api.z.ai/api/paas/v4",
|
|
215
258
|
),
|
|
259
|
+
# === Western AI Companies ===
|
|
216
260
|
ProviderOption(
|
|
217
|
-
key="
|
|
261
|
+
key="mistralai",
|
|
218
262
|
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
219
|
-
default_model="
|
|
220
|
-
model_suggestions=(
|
|
221
|
-
|
|
263
|
+
default_model="mistral-large-latest",
|
|
264
|
+
model_suggestions=(
|
|
265
|
+
# Mistral Chat 系列
|
|
266
|
+
"mistral-large-latest",
|
|
267
|
+
"mistral-small-latest",
|
|
268
|
+
"mistral-nemo",
|
|
269
|
+
"mistral-mini",
|
|
270
|
+
# 免费模型
|
|
271
|
+
"mistral-7b",
|
|
272
|
+
"mistral-8b",
|
|
273
|
+
# Mistral Code 系列
|
|
274
|
+
"codestral-latest",
|
|
275
|
+
# 多模态
|
|
276
|
+
"pixtral-large-latest",
|
|
277
|
+
),
|
|
278
|
+
default_api_base="https://api.mistral.ai/v1",
|
|
279
|
+
),
|
|
280
|
+
ProviderOption(
|
|
281
|
+
key="groq",
|
|
282
|
+
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
283
|
+
default_model="llama-3.3-70b-versatile",
|
|
284
|
+
model_suggestions=(
|
|
285
|
+
# Llama 系列
|
|
286
|
+
"llama-3.3-70b-versatile",
|
|
287
|
+
"llama-3.1-8b-instant",
|
|
288
|
+
"llama3-70b-8192",
|
|
289
|
+
"llama3-8b-8192",
|
|
290
|
+
# Gemma 系列
|
|
291
|
+
"gemma2-9b-it",
|
|
292
|
+
"gemma-7b-it",
|
|
293
|
+
# Mistral 系列
|
|
294
|
+
"mistral-saba-24b",
|
|
295
|
+
"mixtral-8x7b-32768",
|
|
296
|
+
),
|
|
297
|
+
default_api_base="https://api.groq.com/openai/v1",
|
|
298
|
+
),
|
|
299
|
+
ProviderOption(
|
|
300
|
+
key="grok",
|
|
301
|
+
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
302
|
+
default_model="grok-3",
|
|
303
|
+
model_suggestions=(
|
|
304
|
+
"grok-4",
|
|
305
|
+
"grok-3",
|
|
306
|
+
"grok-3-fast",
|
|
307
|
+
"grok-3-mini",
|
|
308
|
+
"grok-3-mini-fast",
|
|
309
|
+
),
|
|
310
|
+
default_api_base="https://api.x.ai/v1",
|
|
311
|
+
),
|
|
312
|
+
ProviderOption(
|
|
313
|
+
key="cohere",
|
|
314
|
+
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
315
|
+
default_model="command-r-plus-08-2024",
|
|
316
|
+
model_suggestions=(
|
|
317
|
+
"command-r-plus-08-2024",
|
|
318
|
+
"command-r-08-2024",
|
|
319
|
+
"command-r7b-12-2024",
|
|
320
|
+
),
|
|
321
|
+
default_api_base="https://api.cohere.ai/v1",
|
|
322
|
+
),
|
|
323
|
+
ProviderOption(
|
|
324
|
+
key="together",
|
|
325
|
+
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
326
|
+
default_model="meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
|
|
327
|
+
model_suggestions=(
|
|
328
|
+
"meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
|
|
329
|
+
"Qwen/Qwen2.5-72B-Instruct-Turbo",
|
|
330
|
+
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
|
331
|
+
),
|
|
332
|
+
default_api_base="https://api.together.xyz/v1",
|
|
333
|
+
),
|
|
334
|
+
ProviderOption(
|
|
335
|
+
key="perplexity",
|
|
336
|
+
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
337
|
+
default_model="llama-3.1-sonar-small-128k-online",
|
|
338
|
+
model_suggestions=(
|
|
339
|
+
"llama-3.1-sonar-small-128k-online",
|
|
340
|
+
"llama-3.1-sonar-large-128k-online",
|
|
341
|
+
),
|
|
342
|
+
default_api_base="https://api.perplexity.ai",
|
|
222
343
|
),
|
|
223
344
|
ProviderOption(
|
|
224
345
|
key="siliconflow",
|
|
225
346
|
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
226
|
-
default_model="
|
|
347
|
+
default_model="Qwen/Qwen2.5-72B-Instruct",
|
|
227
348
|
model_suggestions=(
|
|
228
|
-
"
|
|
229
|
-
"
|
|
230
|
-
"
|
|
231
|
-
"zai-org/GLM-4.6",
|
|
232
|
-
"moonshotai/Kimi-K2-Thinking",
|
|
233
|
-
"MiniMaxAI/MiniMax-M2",
|
|
349
|
+
"Qwen/Qwen2.5-72B-Instruct",
|
|
350
|
+
"deepseek-ai/DeepSeek-V3",
|
|
351
|
+
"01-ai/Yi-1.5-34B-Chat",
|
|
234
352
|
),
|
|
235
353
|
default_api_base="https://api.siliconflow.cn/v1",
|
|
236
354
|
),
|
|
355
|
+
# === Generic / Custom ===
|
|
356
|
+
ProviderOption(
|
|
357
|
+
key="openai_compatible",
|
|
358
|
+
protocol=ProviderType.OPENAI_COMPATIBLE,
|
|
359
|
+
default_model="gpt-4o-mini",
|
|
360
|
+
model_suggestions=(
|
|
361
|
+
"gpt-4o-mini",
|
|
362
|
+
"gpt-4o",
|
|
363
|
+
"llama-3.1-70b",
|
|
364
|
+
),
|
|
365
|
+
default_api_base=None,
|
|
366
|
+
),
|
|
367
|
+
ProviderOption(
|
|
368
|
+
key="anthropic_compatible",
|
|
369
|
+
protocol=ProviderType.ANTHROPIC,
|
|
370
|
+
default_model="claude-3-5-sonnet-20241022",
|
|
371
|
+
model_suggestions=(
|
|
372
|
+
"claude-3-5-sonnet-20241022",
|
|
373
|
+
"claude-3-5-haiku-20241022",
|
|
374
|
+
),
|
|
375
|
+
default_api_base=None,
|
|
376
|
+
),
|
|
237
377
|
],
|
|
238
378
|
default_key="deepseek",
|
|
239
379
|
)
|