chipfoundry-cli 1.0.3__tar.gz → 1.0.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {chipfoundry_cli-1.0.3 → chipfoundry_cli-1.0.4}/PKG-INFO +108 -6
- {chipfoundry_cli-1.0.3 → chipfoundry_cli-1.0.4}/README.md +107 -5
- {chipfoundry_cli-1.0.3 → chipfoundry_cli-1.0.4}/chipfoundry_cli/main.py +293 -37
- {chipfoundry_cli-1.0.3 → chipfoundry_cli-1.0.4}/chipfoundry_cli/utils.py +171 -8
- {chipfoundry_cli-1.0.3 → chipfoundry_cli-1.0.4}/pyproject.toml +1 -1
- {chipfoundry_cli-1.0.3 → chipfoundry_cli-1.0.4}/LICENSE +0 -0
- {chipfoundry_cli-1.0.3 → chipfoundry_cli-1.0.4}/chipfoundry_cli/__init__.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: chipfoundry-cli
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.4
|
|
4
4
|
Summary: CLI tool to automate ChipFoundry project submission to SFTP server
|
|
5
5
|
Home-page: https://chipfoundry.io
|
|
6
6
|
License: Apache-2.0
|
|
@@ -27,13 +27,13 @@ Description-Content-Type: text/markdown
|
|
|
27
27
|
[](https://badge.fury.io/py/chipfoundry-cli)
|
|
28
28
|
[](https://pypi.org/project/chipfoundry-cli/)
|
|
29
29
|
|
|
30
|
-
A command-line tool to automate the submission of ChipFoundry projects to the SFTP server.
|
|
30
|
+
A command-line tool to automate the submission of ChipFoundry projects to the SFTP server and manage project results.
|
|
31
31
|
|
|
32
32
|
---
|
|
33
33
|
|
|
34
34
|
## Overview
|
|
35
35
|
|
|
36
|
-
`cf-cli` is a user-friendly command-line tool for securely submitting your ChipFoundry project files to the official SFTP server. It automatically collects the required files, generates or updates your project configuration,
|
|
36
|
+
`cf-cli` is a user-friendly command-line tool for securely submitting your ChipFoundry project files to the official SFTP server and downloading project results. It automatically collects the required files, generates or updates your project configuration, uploads everything to the correct location on the server, and provides tools to view project results and reports.
|
|
37
37
|
|
|
38
38
|
---
|
|
39
39
|
|
|
@@ -72,6 +72,16 @@ cf --help
|
|
|
72
72
|
cf push
|
|
73
73
|
```
|
|
74
74
|
|
|
75
|
+
6. **Download results** (when available):
|
|
76
|
+
```bash
|
|
77
|
+
cf pull
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
7. **View tapeout report**:
|
|
81
|
+
```bash
|
|
82
|
+
cf view-tapeout-report
|
|
83
|
+
```
|
|
84
|
+
|
|
75
85
|
---
|
|
76
86
|
|
|
77
87
|
## Project Structure Requirements
|
|
@@ -82,6 +92,7 @@ Your project directory **must** contain:
|
|
|
82
92
|
- `user_project_wrapper.gds` (for digital projects)
|
|
83
93
|
- `user_analog_project_wrapper.gds` (for analog projects)
|
|
84
94
|
- `openframe_project_wrapper.gds` (for openframe projects)
|
|
95
|
+
- **Note**: Both compressed (`.gz`) and uncompressed (`.gds`) files are supported
|
|
85
96
|
- `verilog/rtl/user_defines.v` (required for digital/analog)
|
|
86
97
|
- `.cf/project.json` (optional; will be created/updated automatically)
|
|
87
98
|
|
|
@@ -180,15 +191,44 @@ cf push [OPTIONS]
|
|
|
180
191
|
4. Uploads files to SFTP with progress bars
|
|
181
192
|
5. Shows clean, informative output
|
|
182
193
|
|
|
183
|
-
|
|
194
|
+
**GDS File Handling:**
|
|
195
|
+
- **Both compressed (`.gz`) and uncompressed (`.gds`) files are supported**
|
|
196
|
+
- **No automatic compression** - files are uploaded as-is
|
|
197
|
+
- **Only one version allowed** - you cannot have both compressed and uncompressed versions of the same file
|
|
198
|
+
- **Prefers uncompressed files** when available
|
|
199
|
+
- **Falls back to compressed files** if no uncompressed version is available
|
|
200
|
+
|
|
201
|
+
### Pull Results (Download)
|
|
184
202
|
|
|
185
203
|
```bash
|
|
186
204
|
cf pull [--project-name NAME]
|
|
187
205
|
```
|
|
188
206
|
|
|
189
|
-
- Downloads project results from SFTP
|
|
207
|
+
- Downloads project results from SFTP server
|
|
190
208
|
- Saves to `sftp-output/<project_name>/`
|
|
191
|
-
-
|
|
209
|
+
- **Automatically updates** your local `.cf/project.json` with the pulled version
|
|
210
|
+
- Creates the expected directory structure:
|
|
211
|
+
```
|
|
212
|
+
sftp-output/
|
|
213
|
+
└── <project_name>/
|
|
214
|
+
├── config/
|
|
215
|
+
│ └── project.json
|
|
216
|
+
└── consolidated_reports/
|
|
217
|
+
└── consolidated_report.html
|
|
218
|
+
```
|
|
219
|
+
|
|
220
|
+
### View Tapeout Report
|
|
221
|
+
|
|
222
|
+
```bash
|
|
223
|
+
cf view-tapeout-report [--project-name NAME] [--report-path PATH]
|
|
224
|
+
```
|
|
225
|
+
|
|
226
|
+
- Opens the consolidated tapeout report in your default browser
|
|
227
|
+
- **Auto-detects project name** from `.cf/project.json` if available
|
|
228
|
+
- Looks for report at `sftp-output/<project_name>/consolidated_reports/consolidated_report.html`
|
|
229
|
+
- **Options:**
|
|
230
|
+
- `--project-name`: Specify project name manually
|
|
231
|
+
- `--report-path`: Provide direct path to HTML report file
|
|
192
232
|
|
|
193
233
|
### Check Status
|
|
194
234
|
|
|
@@ -217,6 +257,7 @@ cf status
|
|
|
217
257
|
1. **File Collection:**
|
|
218
258
|
- Checks for required GDS and Verilog files
|
|
219
259
|
- Auto-detects project type (digital, analog, openframe) based on GDS file name
|
|
260
|
+
- **Supports both compressed and uncompressed GDS files**
|
|
220
261
|
|
|
221
262
|
2. **Configuration:**
|
|
222
263
|
- Creates or updates `.cf/project.json`
|
|
@@ -233,6 +274,26 @@ cf status
|
|
|
233
274
|
|
|
234
275
|
---
|
|
235
276
|
|
|
277
|
+
## What Happens When You Run `cf pull`?
|
|
278
|
+
|
|
279
|
+
1. **Connection:**
|
|
280
|
+
- Connects to SFTP server securely
|
|
281
|
+
- Shows clean connection status
|
|
282
|
+
|
|
283
|
+
2. **Download:**
|
|
284
|
+
- Downloads all project results recursively
|
|
285
|
+
- Shows professional download progress
|
|
286
|
+
- Saves to `sftp-output/<project_name>/`
|
|
287
|
+
|
|
288
|
+
3. **Config Update:**
|
|
289
|
+
- **Automatically updates** your local `.cf/project.json` with the pulled version
|
|
290
|
+
- No manual steps required
|
|
291
|
+
|
|
292
|
+
4. **Success:**
|
|
293
|
+
- Shows confirmation of downloaded files and updated config
|
|
294
|
+
|
|
295
|
+
---
|
|
296
|
+
|
|
236
297
|
## Examples
|
|
237
298
|
|
|
238
299
|
### Basic Workflow
|
|
@@ -258,6 +319,17 @@ cf push
|
|
|
258
319
|
# Uploading project.json ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100%
|
|
259
320
|
# Uploading user_project_wrapper.gds ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100%
|
|
260
321
|
# ✓ Uploaded to incoming/projects/my_awesome_project
|
|
322
|
+
|
|
323
|
+
# Later, download results
|
|
324
|
+
cf pull
|
|
325
|
+
# ✓ Connected to sftp.chipfoundry.io
|
|
326
|
+
# Downloading project results from outgoing/results/my_awesome_project...
|
|
327
|
+
# ✓ All files downloaded to sftp-output/my_awesome_project
|
|
328
|
+
# ✓ Project config automatically updated
|
|
329
|
+
|
|
330
|
+
# View the tapeout report
|
|
331
|
+
cf view-tapeout-report
|
|
332
|
+
# ✓ Opened tapeout report in browser: sftp-output/my_awesome_project/consolidated_reports/consolidated_report.html
|
|
261
333
|
```
|
|
262
334
|
|
|
263
335
|
### Advanced Usage
|
|
@@ -272,10 +344,33 @@ cf push --force-overwrite
|
|
|
272
344
|
# Use different project root
|
|
273
345
|
cf push --project-root /path/to/project
|
|
274
346
|
|
|
347
|
+
# Pull results for specific project
|
|
348
|
+
cf pull --project-name other_project
|
|
349
|
+
|
|
350
|
+
# View report for specific project
|
|
351
|
+
cf view-tapeout-report --project-name other_project
|
|
352
|
+
|
|
353
|
+
# View custom report file
|
|
354
|
+
cf view-tapeout-report --report-path /path/to/custom_report.html
|
|
355
|
+
|
|
275
356
|
# Check project status
|
|
276
357
|
cf status
|
|
277
358
|
```
|
|
278
359
|
|
|
360
|
+
### GDS File Examples
|
|
361
|
+
|
|
362
|
+
```bash
|
|
363
|
+
# Uncompressed GDS file (preferred)
|
|
364
|
+
gds/user_project_wrapper.gds
|
|
365
|
+
|
|
366
|
+
# Compressed GDS file (also supported)
|
|
367
|
+
gds/user_project_wrapper.gds.gz
|
|
368
|
+
|
|
369
|
+
# ❌ INVALID: Both files exist - this will cause an error
|
|
370
|
+
gds/user_project_wrapper.gds # ← Choose ONE version only
|
|
371
|
+
gds/user_project_wrapper.gds.gz # ← Remove this one
|
|
372
|
+
```
|
|
373
|
+
|
|
279
374
|
---
|
|
280
375
|
|
|
281
376
|
## Troubleshooting
|
|
@@ -294,6 +389,13 @@ cf status
|
|
|
294
389
|
|
|
295
390
|
- **Project type detection:**
|
|
296
391
|
- Only one of the recognized GDS files should be present in your `gds/` directory
|
|
392
|
+
- Both compressed and uncompressed versions of the same type are supported
|
|
393
|
+
- **Important**: You cannot have both compressed (`.gz`) and uncompressed (`.gds`) versions of the same file - the tool will error out and ask you to remove one
|
|
394
|
+
|
|
395
|
+
- **Report viewing errors:**
|
|
396
|
+
- Ensure you've run `cf pull` first to download the report
|
|
397
|
+
- Check that the report exists at the expected location
|
|
398
|
+
- Use `--report-path` to specify a custom report location
|
|
297
399
|
|
|
298
400
|
- **ModuleNotFoundError:**
|
|
299
401
|
- Upgrade the CLI: `pip install --upgrade chipfoundry-cli`
|
|
@@ -3,13 +3,13 @@
|
|
|
3
3
|
[](https://badge.fury.io/py/chipfoundry-cli)
|
|
4
4
|
[](https://pypi.org/project/chipfoundry-cli/)
|
|
5
5
|
|
|
6
|
-
A command-line tool to automate the submission of ChipFoundry projects to the SFTP server.
|
|
6
|
+
A command-line tool to automate the submission of ChipFoundry projects to the SFTP server and manage project results.
|
|
7
7
|
|
|
8
8
|
---
|
|
9
9
|
|
|
10
10
|
## Overview
|
|
11
11
|
|
|
12
|
-
`cf-cli` is a user-friendly command-line tool for securely submitting your ChipFoundry project files to the official SFTP server. It automatically collects the required files, generates or updates your project configuration,
|
|
12
|
+
`cf-cli` is a user-friendly command-line tool for securely submitting your ChipFoundry project files to the official SFTP server and downloading project results. It automatically collects the required files, generates or updates your project configuration, uploads everything to the correct location on the server, and provides tools to view project results and reports.
|
|
13
13
|
|
|
14
14
|
---
|
|
15
15
|
|
|
@@ -48,6 +48,16 @@ cf --help
|
|
|
48
48
|
cf push
|
|
49
49
|
```
|
|
50
50
|
|
|
51
|
+
6. **Download results** (when available):
|
|
52
|
+
```bash
|
|
53
|
+
cf pull
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
7. **View tapeout report**:
|
|
57
|
+
```bash
|
|
58
|
+
cf view-tapeout-report
|
|
59
|
+
```
|
|
60
|
+
|
|
51
61
|
---
|
|
52
62
|
|
|
53
63
|
## Project Structure Requirements
|
|
@@ -58,6 +68,7 @@ Your project directory **must** contain:
|
|
|
58
68
|
- `user_project_wrapper.gds` (for digital projects)
|
|
59
69
|
- `user_analog_project_wrapper.gds` (for analog projects)
|
|
60
70
|
- `openframe_project_wrapper.gds` (for openframe projects)
|
|
71
|
+
- **Note**: Both compressed (`.gz`) and uncompressed (`.gds`) files are supported
|
|
61
72
|
- `verilog/rtl/user_defines.v` (required for digital/analog)
|
|
62
73
|
- `.cf/project.json` (optional; will be created/updated automatically)
|
|
63
74
|
|
|
@@ -156,15 +167,44 @@ cf push [OPTIONS]
|
|
|
156
167
|
4. Uploads files to SFTP with progress bars
|
|
157
168
|
5. Shows clean, informative output
|
|
158
169
|
|
|
159
|
-
|
|
170
|
+
**GDS File Handling:**
|
|
171
|
+
- **Both compressed (`.gz`) and uncompressed (`.gds`) files are supported**
|
|
172
|
+
- **No automatic compression** - files are uploaded as-is
|
|
173
|
+
- **Only one version allowed** - you cannot have both compressed and uncompressed versions of the same file
|
|
174
|
+
- **Prefers uncompressed files** when available
|
|
175
|
+
- **Falls back to compressed files** if no uncompressed version is available
|
|
176
|
+
|
|
177
|
+
### Pull Results (Download)
|
|
160
178
|
|
|
161
179
|
```bash
|
|
162
180
|
cf pull [--project-name NAME]
|
|
163
181
|
```
|
|
164
182
|
|
|
165
|
-
- Downloads project results from SFTP
|
|
183
|
+
- Downloads project results from SFTP server
|
|
166
184
|
- Saves to `sftp-output/<project_name>/`
|
|
167
|
-
-
|
|
185
|
+
- **Automatically updates** your local `.cf/project.json` with the pulled version
|
|
186
|
+
- Creates the expected directory structure:
|
|
187
|
+
```
|
|
188
|
+
sftp-output/
|
|
189
|
+
└── <project_name>/
|
|
190
|
+
├── config/
|
|
191
|
+
│ └── project.json
|
|
192
|
+
└── consolidated_reports/
|
|
193
|
+
└── consolidated_report.html
|
|
194
|
+
```
|
|
195
|
+
|
|
196
|
+
### View Tapeout Report
|
|
197
|
+
|
|
198
|
+
```bash
|
|
199
|
+
cf view-tapeout-report [--project-name NAME] [--report-path PATH]
|
|
200
|
+
```
|
|
201
|
+
|
|
202
|
+
- Opens the consolidated tapeout report in your default browser
|
|
203
|
+
- **Auto-detects project name** from `.cf/project.json` if available
|
|
204
|
+
- Looks for report at `sftp-output/<project_name>/consolidated_reports/consolidated_report.html`
|
|
205
|
+
- **Options:**
|
|
206
|
+
- `--project-name`: Specify project name manually
|
|
207
|
+
- `--report-path`: Provide direct path to HTML report file
|
|
168
208
|
|
|
169
209
|
### Check Status
|
|
170
210
|
|
|
@@ -193,6 +233,7 @@ cf status
|
|
|
193
233
|
1. **File Collection:**
|
|
194
234
|
- Checks for required GDS and Verilog files
|
|
195
235
|
- Auto-detects project type (digital, analog, openframe) based on GDS file name
|
|
236
|
+
- **Supports both compressed and uncompressed GDS files**
|
|
196
237
|
|
|
197
238
|
2. **Configuration:**
|
|
198
239
|
- Creates or updates `.cf/project.json`
|
|
@@ -209,6 +250,26 @@ cf status
|
|
|
209
250
|
|
|
210
251
|
---
|
|
211
252
|
|
|
253
|
+
## What Happens When You Run `cf pull`?
|
|
254
|
+
|
|
255
|
+
1. **Connection:**
|
|
256
|
+
- Connects to SFTP server securely
|
|
257
|
+
- Shows clean connection status
|
|
258
|
+
|
|
259
|
+
2. **Download:**
|
|
260
|
+
- Downloads all project results recursively
|
|
261
|
+
- Shows professional download progress
|
|
262
|
+
- Saves to `sftp-output/<project_name>/`
|
|
263
|
+
|
|
264
|
+
3. **Config Update:**
|
|
265
|
+
- **Automatically updates** your local `.cf/project.json` with the pulled version
|
|
266
|
+
- No manual steps required
|
|
267
|
+
|
|
268
|
+
4. **Success:**
|
|
269
|
+
- Shows confirmation of downloaded files and updated config
|
|
270
|
+
|
|
271
|
+
---
|
|
272
|
+
|
|
212
273
|
## Examples
|
|
213
274
|
|
|
214
275
|
### Basic Workflow
|
|
@@ -234,6 +295,17 @@ cf push
|
|
|
234
295
|
# Uploading project.json ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100%
|
|
235
296
|
# Uploading user_project_wrapper.gds ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100%
|
|
236
297
|
# ✓ Uploaded to incoming/projects/my_awesome_project
|
|
298
|
+
|
|
299
|
+
# Later, download results
|
|
300
|
+
cf pull
|
|
301
|
+
# ✓ Connected to sftp.chipfoundry.io
|
|
302
|
+
# Downloading project results from outgoing/results/my_awesome_project...
|
|
303
|
+
# ✓ All files downloaded to sftp-output/my_awesome_project
|
|
304
|
+
# ✓ Project config automatically updated
|
|
305
|
+
|
|
306
|
+
# View the tapeout report
|
|
307
|
+
cf view-tapeout-report
|
|
308
|
+
# ✓ Opened tapeout report in browser: sftp-output/my_awesome_project/consolidated_reports/consolidated_report.html
|
|
237
309
|
```
|
|
238
310
|
|
|
239
311
|
### Advanced Usage
|
|
@@ -248,10 +320,33 @@ cf push --force-overwrite
|
|
|
248
320
|
# Use different project root
|
|
249
321
|
cf push --project-root /path/to/project
|
|
250
322
|
|
|
323
|
+
# Pull results for specific project
|
|
324
|
+
cf pull --project-name other_project
|
|
325
|
+
|
|
326
|
+
# View report for specific project
|
|
327
|
+
cf view-tapeout-report --project-name other_project
|
|
328
|
+
|
|
329
|
+
# View custom report file
|
|
330
|
+
cf view-tapeout-report --report-path /path/to/custom_report.html
|
|
331
|
+
|
|
251
332
|
# Check project status
|
|
252
333
|
cf status
|
|
253
334
|
```
|
|
254
335
|
|
|
336
|
+
### GDS File Examples
|
|
337
|
+
|
|
338
|
+
```bash
|
|
339
|
+
# Uncompressed GDS file (preferred)
|
|
340
|
+
gds/user_project_wrapper.gds
|
|
341
|
+
|
|
342
|
+
# Compressed GDS file (also supported)
|
|
343
|
+
gds/user_project_wrapper.gds.gz
|
|
344
|
+
|
|
345
|
+
# ❌ INVALID: Both files exist - this will cause an error
|
|
346
|
+
gds/user_project_wrapper.gds # ← Choose ONE version only
|
|
347
|
+
gds/user_project_wrapper.gds.gz # ← Remove this one
|
|
348
|
+
```
|
|
349
|
+
|
|
255
350
|
---
|
|
256
351
|
|
|
257
352
|
## Troubleshooting
|
|
@@ -270,6 +365,13 @@ cf status
|
|
|
270
365
|
|
|
271
366
|
- **Project type detection:**
|
|
272
367
|
- Only one of the recognized GDS files should be present in your `gds/` directory
|
|
368
|
+
- Both compressed and uncompressed versions of the same type are supported
|
|
369
|
+
- **Important**: You cannot have both compressed (`.gz`) and uncompressed (`.gds`) versions of the same file - the tool will error out and ask you to remove one
|
|
370
|
+
|
|
371
|
+
- **Report viewing errors:**
|
|
372
|
+
- Ensure you've run `cf pull` first to download the report
|
|
373
|
+
- Check that the report exists at the expected location
|
|
374
|
+
- Use `--report-path` to specify a custom report location
|
|
273
375
|
|
|
274
376
|
- **ModuleNotFoundError:**
|
|
275
377
|
- Upgrade the CLI: `pip install --upgrade chipfoundry-cli`
|
|
@@ -2,8 +2,9 @@ import click
|
|
|
2
2
|
import getpass
|
|
3
3
|
from chipfoundry_cli.utils import (
|
|
4
4
|
collect_project_files, ensure_cf_directory, update_or_create_project_json,
|
|
5
|
-
sftp_connect, upload_with_progress, sftp_ensure_dirs,
|
|
6
|
-
get_config_path, load_user_config, save_user_config, GDS_TYPE_MAP
|
|
5
|
+
sftp_connect, upload_with_progress, sftp_ensure_dirs, sftp_download_recursive,
|
|
6
|
+
get_config_path, load_user_config, save_user_config, GDS_TYPE_MAP,
|
|
7
|
+
open_html_in_browser, download_with_progress
|
|
7
8
|
)
|
|
8
9
|
import os
|
|
9
10
|
from pathlib import Path
|
|
@@ -268,6 +269,10 @@ def push(project_root, sftp_host, sftp_username, sftp_key, project_id, project_n
|
|
|
268
269
|
if candidate.exists():
|
|
269
270
|
found_types.append(gds_type)
|
|
270
271
|
gds_file_path = str(candidate)
|
|
272
|
+
|
|
273
|
+
# Remove duplicates (compressed and uncompressed files of same type)
|
|
274
|
+
found_types = list(set(found_types))
|
|
275
|
+
|
|
271
276
|
if project_type:
|
|
272
277
|
detected_type = project_type
|
|
273
278
|
else:
|
|
@@ -351,6 +356,7 @@ def push(project_root, sftp_host, sftp_username, sftp_key, project_id, project_n
|
|
|
351
356
|
force_overwrite=force_overwrite
|
|
352
357
|
)
|
|
353
358
|
console.print(f"[green]✓ Uploaded to {sftp_base}[/green]")
|
|
359
|
+
|
|
354
360
|
except Exception as e:
|
|
355
361
|
console.print(f"[red]Upload failed: {e}[/red]")
|
|
356
362
|
raise click.Abort()
|
|
@@ -374,11 +380,13 @@ def pull(project_name, output_dir, sftp_host, sftp_username, sftp_key):
|
|
|
374
380
|
if not project_name:
|
|
375
381
|
console.print("[bold red]No project name specified and no .cf/project.json found in current directory. Please provide --project-name.[/bold red]")
|
|
376
382
|
raise click.Abort()
|
|
383
|
+
|
|
384
|
+
# Load user config for defaults
|
|
377
385
|
config = load_user_config()
|
|
378
386
|
if not sftp_username:
|
|
379
387
|
sftp_username = config.get("sftp_username")
|
|
380
388
|
if not sftp_username:
|
|
381
|
-
console.print("[bold red]No SFTP username provided and not found in config. Please run '
|
|
389
|
+
console.print("[bold red]No SFTP username provided and not found in config. Please run 'cf config' or provide --sftp-username.[/bold red]")
|
|
382
390
|
raise click.Abort()
|
|
383
391
|
if not sftp_key:
|
|
384
392
|
sftp_key = config.get("sftp_key")
|
|
@@ -394,7 +402,8 @@ def pull(project_name, output_dir, sftp_host, sftp_username, sftp_key):
|
|
|
394
402
|
console.print("[yellow]Please run 'cf keygen' to generate a key or 'cf config' to set a custom key path.[/yellow]")
|
|
395
403
|
raise click.Abort()
|
|
396
404
|
|
|
397
|
-
|
|
405
|
+
# Connect to SFTP
|
|
406
|
+
console.print(f"[cyan]Connecting to {sftp_host}...[/cyan]")
|
|
398
407
|
transport = None
|
|
399
408
|
try:
|
|
400
409
|
sftp, transport = sftp_connect(
|
|
@@ -402,48 +411,57 @@ def pull(project_name, output_dir, sftp_host, sftp_username, sftp_key):
|
|
|
402
411
|
username=sftp_username,
|
|
403
412
|
key_path=key_path
|
|
404
413
|
)
|
|
414
|
+
console.print(f"[green]✓ Connected to {sftp_host}[/green]")
|
|
405
415
|
except Exception as e:
|
|
406
416
|
console.print(f"[red]Failed to connect to SFTP: {e}[/red]")
|
|
407
417
|
raise click.Abort()
|
|
418
|
+
|
|
408
419
|
try:
|
|
409
420
|
remote_dir = f"outgoing/results/{project_name}"
|
|
410
421
|
output_dir = os.path.join(os.getcwd(), "sftp-output", project_name)
|
|
411
|
-
|
|
422
|
+
|
|
423
|
+
# Check if remote directory exists
|
|
412
424
|
try:
|
|
413
|
-
|
|
425
|
+
sftp.stat(remote_dir)
|
|
414
426
|
except Exception:
|
|
415
427
|
console.print(f"[yellow]No results found for project '{project_name}' on SFTP server.[/yellow]")
|
|
416
428
|
return
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
429
|
+
|
|
430
|
+
# Create output directory
|
|
431
|
+
os.makedirs(output_dir, exist_ok=True)
|
|
432
|
+
|
|
433
|
+
# Download with progress tracking
|
|
434
|
+
console.print(f"[bold cyan]Downloading project results from {remote_dir}...[/bold cyan]")
|
|
435
|
+
|
|
436
|
+
try:
|
|
437
|
+
# Use recursive download function with console for clean logging
|
|
438
|
+
sftp_download_recursive(sftp, remote_dir, output_dir, console=console)
|
|
439
|
+
console.print(f"[green]✓ All files downloaded to {output_dir}[/green]")
|
|
440
|
+
|
|
441
|
+
# Automatically update local project config if available
|
|
442
|
+
pulled_config_path = os.path.join(output_dir, "config", "project.json")
|
|
443
|
+
if os.path.exists(pulled_config_path):
|
|
444
|
+
local_config_path = os.path.join(".cf", "project.json")
|
|
445
|
+
os.makedirs(".cf", exist_ok=True)
|
|
446
|
+
|
|
432
447
|
try:
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
def callback(bytes_transferred, total=file_size):
|
|
437
|
-
progress.update(task, completed=bytes_transferred)
|
|
438
|
-
sftp.getfo(remote_path, f, callback=callback)
|
|
439
|
-
progress.update(task, completed=file_size)
|
|
448
|
+
import shutil
|
|
449
|
+
shutil.copy2(pulled_config_path, local_config_path)
|
|
450
|
+
console.print(f"[green]✓ Project config automatically updated[/green]")
|
|
440
451
|
except Exception as e:
|
|
441
|
-
console.print(f"[
|
|
442
|
-
|
|
452
|
+
console.print(f"[yellow]Warning: Failed to update project config: {e}[/yellow]")
|
|
453
|
+
else:
|
|
454
|
+
console.print(f"[dim]Note: No project config found in pulled results[/dim]")
|
|
455
|
+
|
|
456
|
+
except Exception as e:
|
|
457
|
+
console.print(f"[red]Failed to download project results: {e}[/red]")
|
|
458
|
+
raise click.Abort()
|
|
459
|
+
|
|
443
460
|
finally:
|
|
444
461
|
if transport:
|
|
445
462
|
sftp.close()
|
|
446
463
|
transport.close()
|
|
464
|
+
console.print(f"[dim]Disconnected from {sftp_host}[/dim]")
|
|
447
465
|
|
|
448
466
|
@main.command('status')
|
|
449
467
|
@click.option('--sftp-host', default=DEFAULT_SFTP_HOST, show_default=True, help='SFTP server hostname.')
|
|
@@ -483,11 +501,15 @@ def status(sftp_host, sftp_username, sftp_key):
|
|
|
483
501
|
console.print(f"[red]Failed to connect to SFTP: {e}[/red]")
|
|
484
502
|
raise click.Abort()
|
|
485
503
|
try:
|
|
486
|
-
# List projects in incoming/projects/ and
|
|
504
|
+
# List projects in incoming/projects/, outgoing/results/, and archive/
|
|
487
505
|
incoming_projects_dir = f"incoming/projects"
|
|
488
506
|
outgoing_results_dir = f"outgoing/results"
|
|
507
|
+
archive_dir = f"archive"
|
|
508
|
+
|
|
489
509
|
projects = []
|
|
490
510
|
results = []
|
|
511
|
+
archived_projects = []
|
|
512
|
+
|
|
491
513
|
try:
|
|
492
514
|
projects = sftp.listdir(incoming_projects_dir)
|
|
493
515
|
except Exception:
|
|
@@ -496,23 +518,257 @@ def status(sftp_host, sftp_username, sftp_key):
|
|
|
496
518
|
results = sftp.listdir(outgoing_results_dir)
|
|
497
519
|
except Exception:
|
|
498
520
|
pass
|
|
521
|
+
try:
|
|
522
|
+
archived_items = sftp.listdir(archive_dir)
|
|
523
|
+
# Filter for project directories and parse timestamps
|
|
524
|
+
for item in archived_items:
|
|
525
|
+
if '_' in item and len(item.split('_')) >= 3:
|
|
526
|
+
# Try to parse timestamp from format like "serial_example_20250813_150354"
|
|
527
|
+
parts = item.split('_')
|
|
528
|
+
if len(parts) >= 3:
|
|
529
|
+
# Check if the last two parts look like date and time
|
|
530
|
+
date_part = parts[-2]
|
|
531
|
+
time_part = parts[-1]
|
|
532
|
+
if len(date_part) == 8 and len(time_part) == 6 and date_part.isdigit() and time_part.isdigit():
|
|
533
|
+
# This looks like a timestamped archive
|
|
534
|
+
project_name = '_'.join(parts[:-2]) # Everything except date and time
|
|
535
|
+
timestamp_str = f"{date_part}_{time_part}"
|
|
536
|
+
archived_projects.append((project_name, timestamp_str, item))
|
|
537
|
+
except Exception:
|
|
538
|
+
pass
|
|
539
|
+
|
|
540
|
+
# Create main status table
|
|
499
541
|
table = Table(title=f"SFTP Status for {sftp_username}")
|
|
500
542
|
table.add_column("Project Name", style="cyan", no_wrap=True)
|
|
501
543
|
table.add_column("Has Input", style="yellow")
|
|
502
544
|
table.add_column("Has Output", style="green")
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
545
|
+
table.add_column("Last Tapeout Run", style="blue")
|
|
546
|
+
|
|
547
|
+
# Find the most recent archived project (latest tapeout)
|
|
548
|
+
latest_tapeout = None
|
|
549
|
+
if archived_projects:
|
|
550
|
+
# Sort by timestamp to find the most recent
|
|
551
|
+
archived_projects.sort(key=lambda x: x[1], reverse=True) # Sort by timestamp descending
|
|
552
|
+
latest_tapeout = archived_projects[0]
|
|
553
|
+
|
|
554
|
+
# Parse timestamp to human-readable format
|
|
555
|
+
try:
|
|
556
|
+
# timestamp format is "20250813_150354"
|
|
557
|
+
date_part, time_part = latest_tapeout[1].split('_')
|
|
558
|
+
year = date_part[:4]
|
|
559
|
+
month = date_part[4:6]
|
|
560
|
+
day = date_part[6:8]
|
|
561
|
+
hour = time_part[:2]
|
|
562
|
+
minute = time_part[2:4]
|
|
563
|
+
second = time_part[4:6]
|
|
564
|
+
|
|
565
|
+
formatted_time = f"{year}-{month}-{day} {hour}:{minute}:{second}"
|
|
566
|
+
except:
|
|
567
|
+
formatted_time = latest_tapeout[1]
|
|
568
|
+
|
|
569
|
+
# Show only the latest tapeout run
|
|
570
|
+
# Check if this project has input and output files
|
|
571
|
+
has_input = "Yes" if latest_tapeout[0] in projects else "No"
|
|
572
|
+
has_output = "Yes" if latest_tapeout[0] in results else "No"
|
|
573
|
+
table.add_row(latest_tapeout[0], has_input, has_output, formatted_time)
|
|
574
|
+
else:
|
|
575
|
+
# No tapeout runs yet, show active projects with their status
|
|
576
|
+
all_projects = set(projects) | set(results)
|
|
577
|
+
for proj in sorted(all_projects):
|
|
578
|
+
has_input = "Yes" if proj in projects else "No"
|
|
579
|
+
has_output = "Yes" if proj in results else "No"
|
|
580
|
+
last_tapeout = "No tapeout yet"
|
|
581
|
+
table.add_row(proj, has_input, has_output, last_tapeout)
|
|
582
|
+
|
|
583
|
+
if table.row_count > 0:
|
|
509
584
|
console.print(table)
|
|
510
585
|
else:
|
|
511
586
|
console.print("[yellow]No projects or results found on SFTP server.[/yellow]")
|
|
587
|
+
|
|
588
|
+
# Add informative message about tapeout status
|
|
589
|
+
if not archived_projects and all_projects:
|
|
590
|
+
console.print("\n[cyan]Note: No tapeout runs have started yet. Your projects are waiting in the queue.[/cyan]")
|
|
591
|
+
elif not archived_projects and not all_projects:
|
|
592
|
+
console.print("\n[cyan]Note: No projects found and no tapeout runs have started yet.[/cyan]")
|
|
512
593
|
finally:
|
|
513
594
|
if transport:
|
|
514
595
|
sftp.close()
|
|
515
596
|
transport.close()
|
|
516
597
|
|
|
598
|
+
@main.command('tapeout-history')
|
|
599
|
+
@click.option('--sftp-host', default=DEFAULT_SFTP_HOST, show_default=True, help='SFTP server hostname.')
|
|
600
|
+
@click.option('--sftp-username', required=False, help='SFTP username (defaults to config).')
|
|
601
|
+
@click.option('--sftp-key', type=click.Path(exists=True, dir_okay=False), help='Path to SFTP private key file (defaults to config).', default=None, show_default=False)
|
|
602
|
+
@click.option('--limit', default=50, help='Maximum number of tapeouts to show (default: 50)')
|
|
603
|
+
@click.option('--days', default=None, help='Show tapeouts from last N days only')
|
|
604
|
+
def tapeouts(sftp_host, sftp_username, sftp_key, limit, days):
|
|
605
|
+
"""Show all tapeout runs (archived projects) with their timestamps."""
|
|
606
|
+
config = load_user_config()
|
|
607
|
+
if not sftp_username:
|
|
608
|
+
sftp_username = config.get("sftp_username")
|
|
609
|
+
if not sftp_username:
|
|
610
|
+
console.print("[red]No SFTP username provided and not found in config. Please run 'cf config' or provide --sftp-username.[/red]")
|
|
611
|
+
raise click.Abort()
|
|
612
|
+
if not sftp_key:
|
|
613
|
+
sftp_key = config.get("sftp_key")
|
|
614
|
+
|
|
615
|
+
# Always resolve key_path to absolute path if set
|
|
616
|
+
if sftp_key:
|
|
617
|
+
key_path = os.path.abspath(os.path.expanduser(sftp_key))
|
|
618
|
+
else:
|
|
619
|
+
key_path = DEFAULT_SSH_KEY
|
|
620
|
+
|
|
621
|
+
if not os.path.exists(key_path):
|
|
622
|
+
console.print(f"[red]SFTP key file not found: {key_path}[/red]")
|
|
623
|
+
console.print("[yellow]Please run 'cf keygen' to generate a key or 'cf config' to set a custom key path.[/yellow]")
|
|
624
|
+
raise click.Abort()
|
|
625
|
+
|
|
626
|
+
console.print(f"Connecting to {sftp_host}...")
|
|
627
|
+
transport = None
|
|
628
|
+
try:
|
|
629
|
+
sftp, transport = sftp_connect(
|
|
630
|
+
host=sftp_host,
|
|
631
|
+
username=sftp_username,
|
|
632
|
+
key_path=key_path
|
|
633
|
+
)
|
|
634
|
+
except Exception as e:
|
|
635
|
+
console.print(f"[red]Failed to connect to SFTP: {e}[/red]")
|
|
636
|
+
raise click.Abort()
|
|
637
|
+
|
|
638
|
+
try:
|
|
639
|
+
# List archived projects
|
|
640
|
+
archive_dir = f"archive"
|
|
641
|
+
archived_projects = []
|
|
642
|
+
|
|
643
|
+
try:
|
|
644
|
+
archived_items = sftp.listdir(archive_dir)
|
|
645
|
+
# Filter for project directories and parse timestamps
|
|
646
|
+
for item in archived_items:
|
|
647
|
+
if '_' in item and len(item.split('_')) >= 3:
|
|
648
|
+
# Try to parse timestamp from format like "serial_example_20250813_150354"
|
|
649
|
+
parts = item.split('_')
|
|
650
|
+
if len(parts) >= 3:
|
|
651
|
+
# Check if the last two parts look like date and time
|
|
652
|
+
date_part = parts[-2]
|
|
653
|
+
time_part = parts[-1]
|
|
654
|
+
if len(date_part) == 8 and len(time_part) == 6 and date_part.isdigit() and time_part.isdigit():
|
|
655
|
+
# This looks like a timestamped archive
|
|
656
|
+
project_name = '_'.join(parts[:-2]) # Everything except date and time
|
|
657
|
+
timestamp_str = f"{date_part}_{time_part}"
|
|
658
|
+
archived_projects.append((project_name, timestamp_str, item))
|
|
659
|
+
except Exception as e:
|
|
660
|
+
console.print(f"[yellow]Could not access archive directory: {e}[/yellow]")
|
|
661
|
+
return
|
|
662
|
+
|
|
663
|
+
if not archived_projects:
|
|
664
|
+
console.print("[yellow]No tapeout runs found in archive.[/yellow]")
|
|
665
|
+
return
|
|
666
|
+
|
|
667
|
+
# Sort by timestamp (most recent first)
|
|
668
|
+
archived_projects.sort(key=lambda x: x[1], reverse=True)
|
|
669
|
+
|
|
670
|
+
# Apply day filter if specified
|
|
671
|
+
if days:
|
|
672
|
+
from datetime import datetime, timedelta
|
|
673
|
+
cutoff_date = datetime.now() - timedelta(days=days)
|
|
674
|
+
filtered_projects = []
|
|
675
|
+
for proj_name, timestamp, archive_path in archived_projects:
|
|
676
|
+
try:
|
|
677
|
+
date_part, time_part = timestamp.split('_')
|
|
678
|
+
year = int(date_part[:4])
|
|
679
|
+
month = int(date_part[4:6])
|
|
680
|
+
day = int(date_part[6:8])
|
|
681
|
+
hour = int(time_part[:2])
|
|
682
|
+
minute = int(time_part[2:4])
|
|
683
|
+
second = int(time_part[4:6])
|
|
684
|
+
|
|
685
|
+
archive_datetime = datetime(year, month, day, hour, minute, second)
|
|
686
|
+
if archive_datetime >= cutoff_date:
|
|
687
|
+
filtered_projects.append((proj_name, timestamp, archive_path))
|
|
688
|
+
except:
|
|
689
|
+
# If parsing fails, include it anyway
|
|
690
|
+
filtered_projects.append((proj_name, timestamp, archive_path))
|
|
691
|
+
|
|
692
|
+
archived_projects = filtered_projects
|
|
693
|
+
if archived_projects:
|
|
694
|
+
console.print(f"[cyan]Showing tapeouts from last {days} days[/cyan]")
|
|
695
|
+
|
|
696
|
+
# Apply limit
|
|
697
|
+
if len(archived_projects) > limit:
|
|
698
|
+
console.print(f"[cyan]Showing {limit} most recent tapeouts (use --limit to see more)[/cyan]")
|
|
699
|
+
archived_projects = archived_projects[:limit]
|
|
700
|
+
|
|
701
|
+
# Create tapeout history table
|
|
702
|
+
table = Table(title=f"Tapeout History for {sftp_username}")
|
|
703
|
+
table.add_column("Project Name", style="cyan", no_wrap=True)
|
|
704
|
+
table.add_column("Tapeout Started", style="green")
|
|
705
|
+
|
|
706
|
+
for proj_name, timestamp, archive_path in archived_projects:
|
|
707
|
+
# Parse timestamp to human-readable format
|
|
708
|
+
try:
|
|
709
|
+
# timestamp format is "20250813_150354"
|
|
710
|
+
date_part, time_part = timestamp.split('_')
|
|
711
|
+
year = date_part[:4]
|
|
712
|
+
month = date_part[4:6]
|
|
713
|
+
day = date_part[6:8]
|
|
714
|
+
hour = time_part[:2]
|
|
715
|
+
minute = time_part[2:4]
|
|
716
|
+
second = time_part[4:6]
|
|
717
|
+
|
|
718
|
+
formatted_time = f"{year}-{month}-{day} {hour}:{minute}:{second}"
|
|
719
|
+
except:
|
|
720
|
+
formatted_time = timestamp
|
|
721
|
+
|
|
722
|
+
table.add_row(proj_name, formatted_time)
|
|
723
|
+
|
|
724
|
+
console.print(table)
|
|
725
|
+
|
|
726
|
+
# Show summary
|
|
727
|
+
total_archived = len(archived_projects)
|
|
728
|
+
if total_archived > 0:
|
|
729
|
+
console.print(f"\n[cyan]Total tapeouts shown: {total_archived}[/cyan]")
|
|
730
|
+
|
|
731
|
+
finally:
|
|
732
|
+
if transport:
|
|
733
|
+
sftp.close()
|
|
734
|
+
transport.close()
|
|
735
|
+
|
|
736
|
+
@main.command("view-tapeout-report")
|
|
737
|
+
@click.option("--project-name", required=False, help="Project name to view tapeout report for (defaults to value in .cf/project.json if present).")
|
|
738
|
+
@click.option("--report-path", type=click.Path(exists=True, file_okay=True, dir_okay=False), help="Direct path to the HTML report file.")
|
|
739
|
+
def view_tapeout_report(project_name, report_path):
|
|
740
|
+
"""View the consolidated tapeout report from the pulled sftp-output directory."""
|
|
741
|
+
if report_path:
|
|
742
|
+
# Use the directly specified report path
|
|
743
|
+
html_path = report_path
|
|
744
|
+
else:
|
|
745
|
+
# Try to find the report based on project name
|
|
746
|
+
if not project_name:
|
|
747
|
+
# Try to get project name from .cf/project.json
|
|
748
|
+
_, cwd_project_name = get_project_json_from_cwd()
|
|
749
|
+
if cwd_project_name:
|
|
750
|
+
project_name = cwd_project_name
|
|
751
|
+
else:
|
|
752
|
+
console.print("[bold red]No project name specified and no .cf/project.json found in current directory. Please provide --project-name or --report-path.[/bold red]")
|
|
753
|
+
raise click.Abort()
|
|
754
|
+
|
|
755
|
+
# Look for the consolidated report in the expected location
|
|
756
|
+
expected_report_path = os.path.join("sftp-output", project_name, "consolidated_reports", "consolidated_report.html")
|
|
757
|
+
|
|
758
|
+
if not os.path.exists(expected_report_path):
|
|
759
|
+
console.print(f"[yellow]Tapeout report not found at expected location: {expected_report_path}[/yellow]")
|
|
760
|
+
console.print(f"[cyan]Try running 'cf pull --project-name {project_name}' first to download the report.[/cyan]")
|
|
761
|
+
raise click.Abort()
|
|
762
|
+
|
|
763
|
+
html_path = expected_report_path
|
|
764
|
+
|
|
765
|
+
# Open the HTML report in the default browser
|
|
766
|
+
try:
|
|
767
|
+
open_html_in_browser(html_path)
|
|
768
|
+
console.print(f"[green]Opened tapeout report in browser: {html_path}[/green]")
|
|
769
|
+
except Exception as e:
|
|
770
|
+
console.print(f"[red]Failed to open tapeout report in browser: {e}[/red]")
|
|
771
|
+
raise click.Abort()
|
|
772
|
+
|
|
517
773
|
if __name__ == "__main__":
|
|
518
774
|
main()
|
|
@@ -13,11 +13,14 @@ REQUIRED_FILES = {
|
|
|
13
13
|
"verilog/rtl/user_defines.v": True,
|
|
14
14
|
}
|
|
15
15
|
|
|
16
|
-
# GDS files for different project types
|
|
16
|
+
# GDS files for different project types (both compressed and uncompressed)
|
|
17
17
|
GDS_TYPE_MAP = {
|
|
18
18
|
'user_project_wrapper.gds': 'digital',
|
|
19
|
+
'user_project_wrapper.gds.gz': 'digital',
|
|
19
20
|
'user_analog_project_wrapper.gds': 'analog',
|
|
21
|
+
'user_analog_project_wrapper.gds.gz': 'analog',
|
|
20
22
|
'openframe_project_wrapper.gds': 'openframe',
|
|
23
|
+
'openframe_project_wrapper.gds.gz': 'openframe',
|
|
21
24
|
}
|
|
22
25
|
|
|
23
26
|
def collect_project_files(project_root: str) -> Dict[str, Optional[str]]:
|
|
@@ -50,12 +53,58 @@ def collect_project_files(project_root: str) -> Dict[str, Optional[str]]:
|
|
|
50
53
|
|
|
51
54
|
if len(found_gds_files) == 0:
|
|
52
55
|
raise FileNotFoundError(f"No GDS file found in {gds_dir}. Expected one of: {list(GDS_TYPE_MAP.keys())}")
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
56
|
+
|
|
57
|
+
# Group by project type
|
|
58
|
+
project_type_files = {}
|
|
59
|
+
for gds_name, gds_path in found_gds_files:
|
|
60
|
+
project_type = GDS_TYPE_MAP[gds_name]
|
|
61
|
+
if project_type not in project_type_files:
|
|
62
|
+
project_type_files[project_type] = []
|
|
63
|
+
project_type_files[project_type].append((gds_name, gds_path))
|
|
64
|
+
|
|
65
|
+
if len(project_type_files) > 1:
|
|
66
|
+
found_types = list(project_type_files.keys())
|
|
67
|
+
raise FileNotFoundError(f"Multiple project types found: {found_types}. Only one project type is allowed per project.")
|
|
68
|
+
|
|
69
|
+
# For the single project type, check if both compressed and uncompressed versions exist
|
|
70
|
+
project_type = list(project_type_files.keys())[0]
|
|
71
|
+
type_files = project_type_files[project_type]
|
|
72
|
+
|
|
73
|
+
# Check for both compressed and uncompressed versions of the same file
|
|
74
|
+
compressed_files = [f for f in type_files if f[0].endswith('.gz')]
|
|
75
|
+
uncompressed_files = [f for f in type_files if not f[0].endswith('.gz')]
|
|
76
|
+
|
|
77
|
+
if len(compressed_files) > 0 and len(uncompressed_files) > 0:
|
|
78
|
+
# Find the base name without extension to show which file has both versions
|
|
79
|
+
base_names = set()
|
|
80
|
+
for gds_name, _ in type_files:
|
|
81
|
+
base_name = gds_name.replace('.gz', '')
|
|
82
|
+
base_names.add(base_name)
|
|
83
|
+
|
|
84
|
+
if len(base_names) == 1:
|
|
85
|
+
# Same base file has both compressed and uncompressed versions
|
|
86
|
+
base_name = list(base_names)[0]
|
|
87
|
+
compressed_name = f"{base_name}.gz"
|
|
88
|
+
uncompressed_name = base_name
|
|
89
|
+
raise FileNotFoundError(
|
|
90
|
+
f"Both compressed and uncompressed versions of the same GDS file found: "
|
|
91
|
+
f"'{compressed_name}' and '{uncompressed_name}'. "
|
|
92
|
+
f"Please remove one of them and keep only one version."
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
# Find uncompressed file first, then fall back to compressed
|
|
96
|
+
gds_file_to_use = None
|
|
97
|
+
for gds_name, gds_path in type_files:
|
|
98
|
+
if not gds_name.endswith('.gz'):
|
|
99
|
+
gds_file_to_use = (gds_name, gds_path)
|
|
100
|
+
break
|
|
101
|
+
|
|
102
|
+
# If no uncompressed file found, use the first available (compressed)
|
|
103
|
+
if not gds_file_to_use:
|
|
104
|
+
gds_file_to_use = type_files[0]
|
|
105
|
+
|
|
106
|
+
gds_name, gds_path = gds_file_to_use
|
|
107
|
+
collected[f"gds/{gds_name}"] = gds_path
|
|
59
108
|
|
|
60
109
|
return collected
|
|
61
110
|
|
|
@@ -220,6 +269,103 @@ def upload_with_progress(sftp, local_path, remote_path, force_overwrite=False):
|
|
|
220
269
|
progress.update(task, completed=file_size)
|
|
221
270
|
return result
|
|
222
271
|
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
def download_with_progress(sftp, remote_path, local_path, console=None):
|
|
275
|
+
"""
|
|
276
|
+
Download a file with a rich progress bar.
|
|
277
|
+
"""
|
|
278
|
+
try:
|
|
279
|
+
remote_stat = sftp.stat(remote_path)
|
|
280
|
+
file_size = remote_stat.st_size
|
|
281
|
+
|
|
282
|
+
with Progress(
|
|
283
|
+
TextColumn("[progress.description]{task.description}"),
|
|
284
|
+
BarColumn(),
|
|
285
|
+
TaskProgressColumn(),
|
|
286
|
+
TextColumn("{task.percentage:>3.0f}%"),
|
|
287
|
+
TextColumn("•"),
|
|
288
|
+
TextColumn("{task.completed}/{task.total} bytes"),
|
|
289
|
+
TimeElapsedColumn(),
|
|
290
|
+
) as progress:
|
|
291
|
+
task = progress.add_task(f"Downloading {os.path.basename(remote_path)}", total=file_size)
|
|
292
|
+
|
|
293
|
+
def progress_cb(bytes_transferred, total):
|
|
294
|
+
progress.update(task, completed=bytes_transferred)
|
|
295
|
+
|
|
296
|
+
# Ensure local directory exists
|
|
297
|
+
os.makedirs(os.path.dirname(local_path), exist_ok=True)
|
|
298
|
+
|
|
299
|
+
# Download file with progress
|
|
300
|
+
with open(local_path, "wb") as f:
|
|
301
|
+
def callback(bytes_transferred, total=file_size):
|
|
302
|
+
progress_cb(bytes_transferred, total)
|
|
303
|
+
sftp.getfo(remote_path, f, callback=callback)
|
|
304
|
+
|
|
305
|
+
progress.update(task, completed=file_size)
|
|
306
|
+
return True
|
|
307
|
+
|
|
308
|
+
except Exception as e:
|
|
309
|
+
if console:
|
|
310
|
+
console.print(f"[red]Failed to download {os.path.basename(remote_path)}: {e}[/red]")
|
|
311
|
+
raise
|
|
312
|
+
|
|
313
|
+
def sftp_download_recursive(sftp, remote_path: str, local_path: str, progress_cb=None, console=None):
|
|
314
|
+
"""
|
|
315
|
+
Recursively download files and directories from SFTP server.
|
|
316
|
+
|
|
317
|
+
Args:
|
|
318
|
+
sftp: SFTP client
|
|
319
|
+
remote_path: Remote path on SFTP server
|
|
320
|
+
local_path: Local path to save to
|
|
321
|
+
progress_cb: Optional progress callback function(bytes_transferred, total_bytes)
|
|
322
|
+
console: Optional rich console for logging
|
|
323
|
+
"""
|
|
324
|
+
try:
|
|
325
|
+
# Get remote file/directory stats
|
|
326
|
+
remote_stat = sftp.stat(remote_path)
|
|
327
|
+
|
|
328
|
+
if remote_stat.st_mode & 0o40000: # Directory
|
|
329
|
+
# Create local directory
|
|
330
|
+
os.makedirs(local_path, exist_ok=True)
|
|
331
|
+
if console:
|
|
332
|
+
console.print(f"[dim]Creating directory: {os.path.basename(local_path)}[/dim]")
|
|
333
|
+
|
|
334
|
+
# List contents and download recursively
|
|
335
|
+
try:
|
|
336
|
+
remote_contents = sftp.listdir(remote_path)
|
|
337
|
+
if console:
|
|
338
|
+
console.print(f"[dim]Found {len(remote_contents)} items in {os.path.basename(remote_path)}[/dim]")
|
|
339
|
+
for item in remote_contents:
|
|
340
|
+
remote_item_path = f"{remote_path}/{item}"
|
|
341
|
+
local_item_path = os.path.join(local_path, item)
|
|
342
|
+
sftp_download_recursive(sftp, remote_item_path, local_item_path, progress_cb, console)
|
|
343
|
+
except Exception as e:
|
|
344
|
+
if console:
|
|
345
|
+
console.print(f"[yellow]Warning: Could not list directory {os.path.basename(remote_path)}: {e}[/yellow]")
|
|
346
|
+
|
|
347
|
+
else: # File
|
|
348
|
+
# Ensure local directory exists
|
|
349
|
+
os.makedirs(os.path.dirname(local_path), exist_ok=True)
|
|
350
|
+
|
|
351
|
+
if console:
|
|
352
|
+
console.print(f"[dim]Downloading: {os.path.basename(remote_path)} ({remote_stat.st_size:,} bytes)[/dim]")
|
|
353
|
+
|
|
354
|
+
# Download file with progress if callback provided
|
|
355
|
+
if progress_cb:
|
|
356
|
+
file_size = remote_stat.st_size
|
|
357
|
+
with open(local_path, "wb") as f:
|
|
358
|
+
def callback(bytes_transferred, total=file_size):
|
|
359
|
+
progress_cb(bytes_transferred, total)
|
|
360
|
+
sftp.getfo(remote_path, f, callback=callback)
|
|
361
|
+
else:
|
|
362
|
+
sftp.get(remote_path, local_path)
|
|
363
|
+
|
|
364
|
+
except Exception as e:
|
|
365
|
+
if console:
|
|
366
|
+
console.print(f"[red]Error downloading {os.path.basename(remote_path)}: {e}[/red]")
|
|
367
|
+
raise
|
|
368
|
+
|
|
223
369
|
def get_config_path() -> Path:
|
|
224
370
|
return Path.home() / ".chipfoundry-cli" / "config.toml"
|
|
225
371
|
|
|
@@ -233,4 +379,21 @@ def save_user_config(config: dict):
|
|
|
233
379
|
config_path = get_config_path()
|
|
234
380
|
config_path.parent.mkdir(parents=True, exist_ok=True)
|
|
235
381
|
with open(config_path, 'w') as f:
|
|
236
|
-
toml.dump(config, f)
|
|
382
|
+
toml.dump(config, f)
|
|
383
|
+
|
|
384
|
+
def open_html_in_browser(html_path: str):
|
|
385
|
+
"""
|
|
386
|
+
Open an HTML file in the default browser.
|
|
387
|
+
|
|
388
|
+
Args:
|
|
389
|
+
html_path: Path to the HTML file to open
|
|
390
|
+
"""
|
|
391
|
+
import webbrowser
|
|
392
|
+
import urllib.parse
|
|
393
|
+
|
|
394
|
+
# Convert to absolute path and file URL
|
|
395
|
+
abs_path = os.path.abspath(html_path)
|
|
396
|
+
file_url = f"file://{urllib.parse.quote(abs_path)}"
|
|
397
|
+
|
|
398
|
+
# Open in default browser
|
|
399
|
+
webbrowser.open(file_url)
|
|
File without changes
|
|
File without changes
|