gpu-server-setup 0.4.8__tar.gz → 0.4.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/PKG-INFO +23 -13
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/README.md +22 -12
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/gpu_server_setup.egg-info/PKG-INFO +23 -13
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/gpu_setup/cli.py +33 -1
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/gpu_setup/core.py +104 -1
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/pyproject.toml +1 -1
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/LICENSE +0 -0
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/gpu_server_setup.egg-info/SOURCES.txt +0 -0
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/gpu_server_setup.egg-info/dependency_links.txt +0 -0
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/gpu_server_setup.egg-info/entry_points.txt +0 -0
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/gpu_server_setup.egg-info/requires.txt +0 -0
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/gpu_server_setup.egg-info/top_level.txt +0 -0
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/gpu_setup/__init__.py +0 -0
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/gpu_setup/__version__.py +0 -0
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/gpu_setup/config.py +0 -0
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/gpu_setup/rich_console.py +0 -0
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/gpu_setup/utils.py +0 -0
- {gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: gpu-server-setup
|
|
3
|
-
Version: 0.4.
|
|
3
|
+
Version: 0.4.9
|
|
4
4
|
Summary: Один клик — удалённый GPU-сервер с Ollama + VS Code Remote
|
|
5
5
|
Author: The Fool
|
|
6
6
|
License: MIT
|
|
@@ -29,6 +29,7 @@ Dynamic: license-file
|
|
|
29
29
|
- Установка Ollama и запуск сервиса
|
|
30
30
|
- Загрузка моделей с живым выводом прогресса
|
|
31
31
|
- Автоматическое открытие VS Code Remote
|
|
32
|
+
- Полноценный Colab-режим: Jupyter Notebook + SSH-туннель на локальный порт + venv с torch
|
|
32
33
|
- Режим `--dry-run` для просмотра плана
|
|
33
34
|
- Красивый вывод с помощью `rich`
|
|
34
35
|
|
|
@@ -66,14 +67,14 @@ Dynamic: license-file
|
|
|
66
67
|
pip install gpu-server-setup
|
|
67
68
|
```
|
|
68
69
|
|
|
69
|
-
После установки команда `gpu-setup` будет доступна в терминале.
|
|
70
|
+
После установки команда `gpu-server-setup` будет доступна в терминале.
|
|
70
71
|
|
|
71
72
|
---
|
|
72
73
|
|
|
73
74
|
## Использование
|
|
74
75
|
|
|
75
76
|
```bash
|
|
76
|
-
gpu-setup [команда] [опции]
|
|
77
|
+
gpu-server-setup [команда] [опции]
|
|
77
78
|
```
|
|
78
79
|
|
|
79
80
|
### Доступные команды
|
|
@@ -84,7 +85,10 @@ gpu-setup [команда] [опции]
|
|
|
84
85
|
| `ssh` | Только настройка SSH-доступа |
|
|
85
86
|
| `ollama-install` | Установка Ollama |
|
|
86
87
|
| `model-pull` | Загрузка модели |
|
|
87
|
-
| `vscode` | Открытие VS Code
|
|
88
|
+
| `vscode` | Открытие VS Code
|
|
89
|
+
colab |
|
|
90
|
+
| `colab` | upyter Notebook + локальный туннель
|
|
91
|
+
Remote |
|
|
88
92
|
| `plan` | Показать план действий |
|
|
89
93
|
|
|
90
94
|
---
|
|
@@ -132,7 +136,7 @@ gpu-setup [команда] [опции]
|
|
|
132
136
|
|
|
133
137
|
**Пример:**
|
|
134
138
|
```bash
|
|
135
|
-
gpu-setup model-pull llama3.2 --ip 123.45.67.89 --user ubuntu --pem-key ~/key.pem
|
|
139
|
+
gpu-server-setup model-pull llama3.2 --ip 123.45.67.89 --user ubuntu --pem-key ~/key.pem
|
|
136
140
|
```
|
|
137
141
|
|
|
138
142
|
**Опции:**
|
|
@@ -152,37 +156,43 @@ gpu-setup model-pull llama3.2 --ip 123.45.67.89 --user ubuntu --pem-key ~/key.pe
|
|
|
152
156
|
### 1. Полная настройка со своим ключом (классический способ)
|
|
153
157
|
|
|
154
158
|
```bash
|
|
155
|
-
gpu-setup setup --ip 123.45.67.89 --user ubuntu --password "pass" --model llama3.2
|
|
159
|
+
gpu-server-setup setup --ip 123.45.67.89 --user ubuntu --password "pass" --model llama3.2
|
|
156
160
|
```
|
|
157
161
|
|
|
158
162
|
### 2. Полная настройка с `.pem`-ключом (самый удобный для облака)
|
|
159
163
|
|
|
160
164
|
```bash
|
|
161
|
-
gpu-setup setup --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem --model llama3.2
|
|
165
|
+
gpu-server-setup setup --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem --model llama3.2
|
|
162
166
|
```
|
|
163
167
|
|
|
164
168
|
### 3. Только SSH с `.pem`-ключом
|
|
165
169
|
|
|
166
170
|
```bash
|
|
167
|
-
gpu-setup ssh --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
171
|
+
gpu-server-setup ssh --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
168
172
|
```
|
|
169
173
|
|
|
170
174
|
### 4. Установка Ollama с `.pem`-ключом
|
|
171
175
|
|
|
172
176
|
```bash
|
|
173
|
-
gpu-setup ollama-install --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
177
|
+
gpu-server-setup ollama-install --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
174
178
|
```
|
|
175
179
|
|
|
176
180
|
### 5. Скачивание модели (после настройки SSH)
|
|
177
181
|
|
|
178
182
|
```bash
|
|
179
|
-
gpu-setup model-pull gemma2:27b --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
183
|
+
gpu-server-setup model-pull gemma2:27b --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
180
184
|
```
|
|
181
185
|
|
|
182
|
-
### 6.
|
|
186
|
+
### 6. Настройка colab
|
|
183
187
|
|
|
184
188
|
```bash
|
|
185
|
-
gpu-setup
|
|
189
|
+
gpu-server-setup colab --ip 123.45.67.89 --user ubuntu --pem-key ~/key.pem (или --password) --port 9999
|
|
190
|
+
```
|
|
191
|
+
|
|
192
|
+
### 7. Просмотр плана
|
|
193
|
+
|
|
194
|
+
```bash
|
|
195
|
+
gpu-server-setup plan --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem --model llama3.2
|
|
186
196
|
```
|
|
187
197
|
|
|
188
198
|
---
|
|
@@ -192,7 +202,7 @@ gpu-setup plan --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-u
|
|
|
192
202
|
Все команды поддерживают `--dry-run`:
|
|
193
203
|
|
|
194
204
|
```bash
|
|
195
|
-
gpu-setup setup --ip IP --user user --pem-key key.pem --dry-run
|
|
205
|
+
gpu-server-setup setup --ip IP --user user --pem-key key.pem --dry-run
|
|
196
206
|
```
|
|
197
207
|
|
|
198
208
|
---
|
|
@@ -13,6 +13,7 @@
|
|
|
13
13
|
- Установка Ollama и запуск сервиса
|
|
14
14
|
- Загрузка моделей с живым выводом прогресса
|
|
15
15
|
- Автоматическое открытие VS Code Remote
|
|
16
|
+
- Полноценный Colab-режим: Jupyter Notebook + SSH-туннель на локальный порт + venv с torch
|
|
16
17
|
- Режим `--dry-run` для просмотра плана
|
|
17
18
|
- Красивый вывод с помощью `rich`
|
|
18
19
|
|
|
@@ -50,14 +51,14 @@
|
|
|
50
51
|
pip install gpu-server-setup
|
|
51
52
|
```
|
|
52
53
|
|
|
53
|
-
После установки команда `gpu-setup` будет доступна в терминале.
|
|
54
|
+
После установки команда `gpu-server-setup` будет доступна в терминале.
|
|
54
55
|
|
|
55
56
|
---
|
|
56
57
|
|
|
57
58
|
## Использование
|
|
58
59
|
|
|
59
60
|
```bash
|
|
60
|
-
gpu-setup [команда] [опции]
|
|
61
|
+
gpu-server-setup [команда] [опции]
|
|
61
62
|
```
|
|
62
63
|
|
|
63
64
|
### Доступные команды
|
|
@@ -68,7 +69,10 @@ gpu-setup [команда] [опции]
|
|
|
68
69
|
| `ssh` | Только настройка SSH-доступа |
|
|
69
70
|
| `ollama-install` | Установка Ollama |
|
|
70
71
|
| `model-pull` | Загрузка модели |
|
|
71
|
-
| `vscode` | Открытие VS Code
|
|
72
|
+
| `vscode` | Открытие VS Code
|
|
73
|
+
colab |
|
|
74
|
+
| `colab` | upyter Notebook + локальный туннель
|
|
75
|
+
Remote |
|
|
72
76
|
| `plan` | Показать план действий |
|
|
73
77
|
|
|
74
78
|
---
|
|
@@ -116,7 +120,7 @@ gpu-setup [команда] [опции]
|
|
|
116
120
|
|
|
117
121
|
**Пример:**
|
|
118
122
|
```bash
|
|
119
|
-
gpu-setup model-pull llama3.2 --ip 123.45.67.89 --user ubuntu --pem-key ~/key.pem
|
|
123
|
+
gpu-server-setup model-pull llama3.2 --ip 123.45.67.89 --user ubuntu --pem-key ~/key.pem
|
|
120
124
|
```
|
|
121
125
|
|
|
122
126
|
**Опции:**
|
|
@@ -136,37 +140,43 @@ gpu-setup model-pull llama3.2 --ip 123.45.67.89 --user ubuntu --pem-key ~/key.pe
|
|
|
136
140
|
### 1. Полная настройка со своим ключом (классический способ)
|
|
137
141
|
|
|
138
142
|
```bash
|
|
139
|
-
gpu-setup setup --ip 123.45.67.89 --user ubuntu --password "pass" --model llama3.2
|
|
143
|
+
gpu-server-setup setup --ip 123.45.67.89 --user ubuntu --password "pass" --model llama3.2
|
|
140
144
|
```
|
|
141
145
|
|
|
142
146
|
### 2. Полная настройка с `.pem`-ключом (самый удобный для облака)
|
|
143
147
|
|
|
144
148
|
```bash
|
|
145
|
-
gpu-setup setup --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem --model llama3.2
|
|
149
|
+
gpu-server-setup setup --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem --model llama3.2
|
|
146
150
|
```
|
|
147
151
|
|
|
148
152
|
### 3. Только SSH с `.pem`-ключом
|
|
149
153
|
|
|
150
154
|
```bash
|
|
151
|
-
gpu-setup ssh --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
155
|
+
gpu-server-setup ssh --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
152
156
|
```
|
|
153
157
|
|
|
154
158
|
### 4. Установка Ollama с `.pem`-ключом
|
|
155
159
|
|
|
156
160
|
```bash
|
|
157
|
-
gpu-setup ollama-install --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
161
|
+
gpu-server-setup ollama-install --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
158
162
|
```
|
|
159
163
|
|
|
160
164
|
### 5. Скачивание модели (после настройки SSH)
|
|
161
165
|
|
|
162
166
|
```bash
|
|
163
|
-
gpu-setup model-pull gemma2:27b --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
167
|
+
gpu-server-setup model-pull gemma2:27b --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
164
168
|
```
|
|
165
169
|
|
|
166
|
-
### 6.
|
|
170
|
+
### 6. Настройка colab
|
|
167
171
|
|
|
168
172
|
```bash
|
|
169
|
-
gpu-setup
|
|
173
|
+
gpu-server-setup colab --ip 123.45.67.89 --user ubuntu --pem-key ~/key.pem (или --password) --port 9999
|
|
174
|
+
```
|
|
175
|
+
|
|
176
|
+
### 7. Просмотр плана
|
|
177
|
+
|
|
178
|
+
```bash
|
|
179
|
+
gpu-server-setup plan --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem --model llama3.2
|
|
170
180
|
```
|
|
171
181
|
|
|
172
182
|
---
|
|
@@ -176,7 +186,7 @@ gpu-setup plan --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-u
|
|
|
176
186
|
Все команды поддерживают `--dry-run`:
|
|
177
187
|
|
|
178
188
|
```bash
|
|
179
|
-
gpu-setup setup --ip IP --user user --pem-key key.pem --dry-run
|
|
189
|
+
gpu-server-setup setup --ip IP --user user --pem-key key.pem --dry-run
|
|
180
190
|
```
|
|
181
191
|
|
|
182
192
|
---
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: gpu-server-setup
|
|
3
|
-
Version: 0.4.
|
|
3
|
+
Version: 0.4.9
|
|
4
4
|
Summary: Один клик — удалённый GPU-сервер с Ollama + VS Code Remote
|
|
5
5
|
Author: The Fool
|
|
6
6
|
License: MIT
|
|
@@ -29,6 +29,7 @@ Dynamic: license-file
|
|
|
29
29
|
- Установка Ollama и запуск сервиса
|
|
30
30
|
- Загрузка моделей с живым выводом прогресса
|
|
31
31
|
- Автоматическое открытие VS Code Remote
|
|
32
|
+
- Полноценный Colab-режим: Jupyter Notebook + SSH-туннель на локальный порт + venv с torch
|
|
32
33
|
- Режим `--dry-run` для просмотра плана
|
|
33
34
|
- Красивый вывод с помощью `rich`
|
|
34
35
|
|
|
@@ -66,14 +67,14 @@ Dynamic: license-file
|
|
|
66
67
|
pip install gpu-server-setup
|
|
67
68
|
```
|
|
68
69
|
|
|
69
|
-
После установки команда `gpu-setup` будет доступна в терминале.
|
|
70
|
+
После установки команда `gpu-server-setup` будет доступна в терминале.
|
|
70
71
|
|
|
71
72
|
---
|
|
72
73
|
|
|
73
74
|
## Использование
|
|
74
75
|
|
|
75
76
|
```bash
|
|
76
|
-
gpu-setup [команда] [опции]
|
|
77
|
+
gpu-server-setup [команда] [опции]
|
|
77
78
|
```
|
|
78
79
|
|
|
79
80
|
### Доступные команды
|
|
@@ -84,7 +85,10 @@ gpu-setup [команда] [опции]
|
|
|
84
85
|
| `ssh` | Только настройка SSH-доступа |
|
|
85
86
|
| `ollama-install` | Установка Ollama |
|
|
86
87
|
| `model-pull` | Загрузка модели |
|
|
87
|
-
| `vscode` | Открытие VS Code
|
|
88
|
+
| `vscode` | Открытие VS Code
|
|
89
|
+
colab |
|
|
90
|
+
| `colab` | upyter Notebook + локальный туннель
|
|
91
|
+
Remote |
|
|
88
92
|
| `plan` | Показать план действий |
|
|
89
93
|
|
|
90
94
|
---
|
|
@@ -132,7 +136,7 @@ gpu-setup [команда] [опции]
|
|
|
132
136
|
|
|
133
137
|
**Пример:**
|
|
134
138
|
```bash
|
|
135
|
-
gpu-setup model-pull llama3.2 --ip 123.45.67.89 --user ubuntu --pem-key ~/key.pem
|
|
139
|
+
gpu-server-setup model-pull llama3.2 --ip 123.45.67.89 --user ubuntu --pem-key ~/key.pem
|
|
136
140
|
```
|
|
137
141
|
|
|
138
142
|
**Опции:**
|
|
@@ -152,37 +156,43 @@ gpu-setup model-pull llama3.2 --ip 123.45.67.89 --user ubuntu --pem-key ~/key.pe
|
|
|
152
156
|
### 1. Полная настройка со своим ключом (классический способ)
|
|
153
157
|
|
|
154
158
|
```bash
|
|
155
|
-
gpu-setup setup --ip 123.45.67.89 --user ubuntu --password "pass" --model llama3.2
|
|
159
|
+
gpu-server-setup setup --ip 123.45.67.89 --user ubuntu --password "pass" --model llama3.2
|
|
156
160
|
```
|
|
157
161
|
|
|
158
162
|
### 2. Полная настройка с `.pem`-ключом (самый удобный для облака)
|
|
159
163
|
|
|
160
164
|
```bash
|
|
161
|
-
gpu-setup setup --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem --model llama3.2
|
|
165
|
+
gpu-server-setup setup --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem --model llama3.2
|
|
162
166
|
```
|
|
163
167
|
|
|
164
168
|
### 3. Только SSH с `.pem`-ключом
|
|
165
169
|
|
|
166
170
|
```bash
|
|
167
|
-
gpu-setup ssh --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
171
|
+
gpu-server-setup ssh --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
168
172
|
```
|
|
169
173
|
|
|
170
174
|
### 4. Установка Ollama с `.pem`-ключом
|
|
171
175
|
|
|
172
176
|
```bash
|
|
173
|
-
gpu-setup ollama-install --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
177
|
+
gpu-server-setup ollama-install --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
174
178
|
```
|
|
175
179
|
|
|
176
180
|
### 5. Скачивание модели (после настройки SSH)
|
|
177
181
|
|
|
178
182
|
```bash
|
|
179
|
-
gpu-setup model-pull gemma2:27b --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
183
|
+
gpu-server-setup model-pull gemma2:27b --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem
|
|
180
184
|
```
|
|
181
185
|
|
|
182
|
-
### 6.
|
|
186
|
+
### 6. Настройка colab
|
|
183
187
|
|
|
184
188
|
```bash
|
|
185
|
-
gpu-setup
|
|
189
|
+
gpu-server-setup colab --ip 123.45.67.89 --user ubuntu --pem-key ~/key.pem (или --password) --port 9999
|
|
190
|
+
```
|
|
191
|
+
|
|
192
|
+
### 7. Просмотр плана
|
|
193
|
+
|
|
194
|
+
```bash
|
|
195
|
+
gpu-server-setup plan --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-ubuntu.pem --model llama3.2
|
|
186
196
|
```
|
|
187
197
|
|
|
188
198
|
---
|
|
@@ -192,7 +202,7 @@ gpu-setup plan --ip 111.11.111.111 --user ubuntu --pem-key ~/Downloads/keyname-u
|
|
|
192
202
|
Все команды поддерживают `--dry-run`:
|
|
193
203
|
|
|
194
204
|
```bash
|
|
195
|
-
gpu-setup setup --ip IP --user user --pem-key key.pem --dry-run
|
|
205
|
+
gpu-server-setup setup --ip IP --user user --pem-key key.pem --dry-run
|
|
196
206
|
```
|
|
197
207
|
|
|
198
208
|
---
|
|
@@ -3,9 +3,11 @@ from typing import Optional
|
|
|
3
3
|
|
|
4
4
|
from .core import GPUServer, get_plan
|
|
5
5
|
from .rich_console import header, plan_table, success, error
|
|
6
|
+
import secrets
|
|
7
|
+
|
|
6
8
|
|
|
7
9
|
app = typer.Typer(
|
|
8
|
-
name="gpu-setup",
|
|
10
|
+
name="gpu-server-setup",
|
|
9
11
|
help="Один клик — удалённый GPU-сервер с Ollama + VS Code Remote",
|
|
10
12
|
rich_markup_mode="rich",
|
|
11
13
|
add_completion=True,
|
|
@@ -206,6 +208,36 @@ def vscode(
|
|
|
206
208
|
)
|
|
207
209
|
server.open_vscode()
|
|
208
210
|
|
|
211
|
+
@app.command()
|
|
212
|
+
def colab(
|
|
213
|
+
ip: str = typer.Option(..., "--ip"),
|
|
214
|
+
user: str = typer.Option(..., "--user"),
|
|
215
|
+
password: Optional[str] = typer.Option(None, "--password", hide_input=True),
|
|
216
|
+
port: int = typer.Option(8889, "--port"),
|
|
217
|
+
token: Optional[str] = typer.Option(None, "--token"),
|
|
218
|
+
dry_run: bool = typer.Option(False, "--dry-run"),
|
|
219
|
+
pem_key: Optional[str] = typer.Option(None, "--pem-key", "--key-file"),
|
|
220
|
+
):
|
|
221
|
+
token = token or secrets.token_hex(16)
|
|
222
|
+
|
|
223
|
+
server = GPUServer(
|
|
224
|
+
ip=ip,
|
|
225
|
+
user=user,
|
|
226
|
+
password=password,
|
|
227
|
+
external_key=pem_key,
|
|
228
|
+
dry_run=dry_run,
|
|
229
|
+
)
|
|
230
|
+
server.generate_key()
|
|
231
|
+
server.copy_pubkey()
|
|
232
|
+
server.setup_ssh_config()
|
|
233
|
+
server.test_connection()
|
|
234
|
+
|
|
235
|
+
port = server.setup_colab(port=port, token=token)
|
|
236
|
+
|
|
237
|
+
success("Colab runtime готов!")
|
|
238
|
+
typer.echo(f"\nToken: [bold]{token}[/bold]")
|
|
239
|
+
typer.echo(f"Local URL: http://localhost:{port}/?token={token}")
|
|
240
|
+
|
|
209
241
|
|
|
210
242
|
if __name__ == "__main__":
|
|
211
243
|
app()
|
|
@@ -310,7 +310,6 @@ class GPUServer:
|
|
|
310
310
|
else:
|
|
311
311
|
success("Сервис Ollama успешно запущен (через systemctl)")
|
|
312
312
|
|
|
313
|
-
# Проверяем наличие модели
|
|
314
313
|
_, stdout, _ = client.exec_command(f"ollama list | grep -q '{model}'")
|
|
315
314
|
if stdout.channel.recv_exit_status() == 0:
|
|
316
315
|
success(f"Модель {model} уже есть")
|
|
@@ -345,7 +344,111 @@ class GPUServer:
|
|
|
345
344
|
success("VS Code Remote открыт")
|
|
346
345
|
else:
|
|
347
346
|
warning("Команда 'code' не найдена. Установи VS Code CLI")
|
|
347
|
+
|
|
348
|
+
def setup_colab(self, port: int = 8889, token: str = "token123") -> int:
|
|
349
|
+
header("Настройка Colab (жёсткий режим)")
|
|
348
350
|
|
|
351
|
+
if self.dry_run:
|
|
352
|
+
console.print("[dim]DRY-RUN: colab setup[/dim]")
|
|
353
|
+
return port
|
|
354
|
+
|
|
355
|
+
def run_ssh(cmd: str, sudo: bool = False, print_output: bool = False) -> tuple[str, str]:
|
|
356
|
+
if sudo:
|
|
357
|
+
if self.password:
|
|
358
|
+
remote_cmd = f"echo '{self.password}' | sudo -S {cmd}"
|
|
359
|
+
else:
|
|
360
|
+
remote_cmd = f"sudo {cmd}"
|
|
361
|
+
else:
|
|
362
|
+
remote_cmd = cmd
|
|
363
|
+
|
|
364
|
+
if self.dry_run:
|
|
365
|
+
console.print(f"[dim]DRY-RUN: ssh {self.alias} '{remote_cmd}'[/dim]")
|
|
366
|
+
return "", ""
|
|
367
|
+
|
|
368
|
+
ssh_cmd = ["ssh", self.alias, remote_cmd]
|
|
369
|
+
|
|
370
|
+
try:
|
|
371
|
+
result = subprocess.run(
|
|
372
|
+
ssh_cmd,
|
|
373
|
+
capture_output=True,
|
|
374
|
+
text=True,
|
|
375
|
+
)
|
|
376
|
+
out = result.stdout
|
|
377
|
+
err = result.stderr
|
|
378
|
+
|
|
379
|
+
if print_output and (out or err):
|
|
380
|
+
if out:
|
|
381
|
+
console.print(out, end="")
|
|
382
|
+
if err:
|
|
383
|
+
console.print(err, end="", style="red")
|
|
384
|
+
|
|
385
|
+
return out, err
|
|
386
|
+
except Exception as e:
|
|
387
|
+
error(f"SSH-ошибка: {e}")
|
|
388
|
+
return "", str(e)
|
|
389
|
+
|
|
390
|
+
header("Чистка окружения")
|
|
391
|
+
run_ssh("pkill -f jupyter || true", print_output=False)
|
|
392
|
+
run_ssh(f"fuser -k {port}/tcp || true", print_output=False)
|
|
393
|
+
subprocess.run(
|
|
394
|
+
["pkill", "-f", f"{port}:localhost:{port}"],
|
|
395
|
+
stdout=subprocess.DEVNULL,
|
|
396
|
+
stderr=subprocess.DEVNULL,
|
|
397
|
+
)
|
|
398
|
+
|
|
399
|
+
header("Подготовка окружения")
|
|
400
|
+
run_ssh("apt update", sudo=True)
|
|
401
|
+
run_ssh("apt install -y python3-venv", sudo=True)
|
|
402
|
+
|
|
403
|
+
run_ssh("test -d ~/venv || python3 -m venv ~/venv")
|
|
404
|
+
|
|
405
|
+
pip_cmd = (
|
|
406
|
+
"bash -lc 'source ~/venv/bin/activate && "
|
|
407
|
+
"pip install --upgrade pip -q && "
|
|
408
|
+
"pip install -q jupyterlab notebook jupyter_server torch torchvision torchaudio'"
|
|
409
|
+
)
|
|
410
|
+
run_ssh(pip_cmd, print_output=False)
|
|
411
|
+
|
|
412
|
+
header("Запуск Jupyter")
|
|
413
|
+
jupyter_cmd = (
|
|
414
|
+
f"setsid nohup ~/venv/bin/jupyter notebook "
|
|
415
|
+
f"--no-browser "
|
|
416
|
+
f"--ip=0.0.0.0 "
|
|
417
|
+
f"--port={port} "
|
|
418
|
+
f"--IdentityProvider.token={token} "
|
|
419
|
+
f"> /tmp/jupyter.log 2>&1 < /dev/null &"
|
|
420
|
+
)
|
|
421
|
+
run_ssh(jupyter_cmd)
|
|
422
|
+
|
|
423
|
+
header("Ожидание запуска Jupyter")
|
|
424
|
+
|
|
425
|
+
def wait_for_port() -> bool:
|
|
426
|
+
for _ in range(30):
|
|
427
|
+
out, _ = run_ssh(
|
|
428
|
+
f"ss -tuln | grep -q '{port}' && echo yes || echo no"
|
|
429
|
+
)
|
|
430
|
+
if out.strip() == "yes":
|
|
431
|
+
return True
|
|
432
|
+
time.sleep(1)
|
|
433
|
+
return False
|
|
434
|
+
|
|
435
|
+
if not wait_for_port():
|
|
436
|
+
error("Jupyter не запустился. Лог ниже:")
|
|
437
|
+
run_ssh("cat /tmp/jupyter.log || echo NO_LOG", print_output=True)
|
|
438
|
+
run_ssh("ps aux | grep -E 'jupyter|python' | grep -v grep", print_output=True)
|
|
439
|
+
sys.exit(1)
|
|
440
|
+
|
|
441
|
+
success("Jupyter запущен")
|
|
442
|
+
|
|
443
|
+
header("Создание SSH-туннеля")
|
|
444
|
+
tunnel_cmd = [
|
|
445
|
+
"ssh", "-N", "-L", f"{port}:localhost:{port}", self.alias
|
|
446
|
+
]
|
|
447
|
+
subprocess.Popen(tunnel_cmd)
|
|
448
|
+
|
|
449
|
+
success("Туннель поднят")
|
|
450
|
+
return port
|
|
451
|
+
|
|
349
452
|
|
|
350
453
|
def get_plan(server: GPUServer, install_ollama: bool, model: Optional[str], no_vscode: bool) -> list:
|
|
351
454
|
plan = [
|
|
File without changes
|
|
File without changes
|
{gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/gpu_server_setup.egg-info/dependency_links.txt
RENAMED
|
File without changes
|
{gpu_server_setup-0.4.8 → gpu_server_setup-0.4.9}/gpu_server_setup.egg-info/entry_points.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|