estampo 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- estampo/__init__.py +24 -0
- estampo/adapters.py +248 -0
- estampo/arrange.py +100 -0
- estampo/auth.py +139 -0
- estampo/cli.py +903 -0
- estampo/cloud/__init__.py +41 -0
- estampo/cloud/ams.py +344 -0
- estampo/cloud/bridge.py +574 -0
- estampo/config.py +343 -0
- estampo/constants.py +3 -0
- estampo/credentials.py +420 -0
- estampo/docker_versions.json +1 -0
- estampo/gcode.py +192 -0
- estampo/init.py +1161 -0
- estampo/loader.py +189 -0
- estampo/orient.py +77 -0
- estampo/pipeline.py +457 -0
- estampo/plate.py +212 -0
- estampo/printer.py +448 -0
- estampo/profiles.py +595 -0
- estampo/py.typed +0 -0
- estampo/slicer.py +643 -0
- estampo/thumbnails.py +229 -0
- estampo/ui.py +149 -0
- estampo/viewer.py +84 -0
- estampo-0.2.0.dist-info/METADATA +494 -0
- estampo-0.2.0.dist-info/RECORD +30 -0
- estampo-0.2.0.dist-info/WHEEL +4 -0
- estampo-0.2.0.dist-info/entry_points.txt +3 -0
- estampo-0.2.0.dist-info/licenses/LICENSE +201 -0
estampo/__init__.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"""estampo — Reproducible 3D print builds."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
__version__ = "0.2.0"
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class EstampoError(Exception):
|
|
11
|
+
"""User-facing error — printed without a traceback."""
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
# Backward-compatible alias (fabprint → estampo migration)
|
|
15
|
+
FabprintError = EstampoError
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def require_file(path: Path, label: str = "File") -> None:
|
|
19
|
+
"""Raise FileNotFoundError if *path* does not exist."""
|
|
20
|
+
if not path.exists():
|
|
21
|
+
raise FileNotFoundError(f"{label} not found: {path}")
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
__all__ = ["EstampoError", "FabprintError", "__version__", "require_file"]
|
estampo/adapters.py
ADDED
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
"""Hamilton lifecycle adapters for pipeline observability.
|
|
2
|
+
|
|
3
|
+
These adapters plug into the Hamilton driver to provide per-node timing,
|
|
4
|
+
logging, and extensible hooks without modifying any pipeline node code.
|
|
5
|
+
|
|
6
|
+
Usage::
|
|
7
|
+
|
|
8
|
+
from estampo.adapters import TimingAdapter, ProgressAdapter
|
|
9
|
+
|
|
10
|
+
dr = (
|
|
11
|
+
driver.Builder()
|
|
12
|
+
.with_modules(pipeline)
|
|
13
|
+
.with_adapters(ProgressAdapter())
|
|
14
|
+
.build()
|
|
15
|
+
)
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
import logging
|
|
21
|
+
import time
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
from typing import TYPE_CHECKING
|
|
24
|
+
|
|
25
|
+
from hamilton.lifecycle import NodeExecutionHook
|
|
26
|
+
|
|
27
|
+
if TYPE_CHECKING:
|
|
28
|
+
from rich.status import Status
|
|
29
|
+
|
|
30
|
+
log = logging.getLogger(__name__)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class TimingAdapter(NodeExecutionHook):
|
|
34
|
+
"""Log elapsed time for every pipeline node."""
|
|
35
|
+
|
|
36
|
+
def __init__(self) -> None:
|
|
37
|
+
self._starts: dict[str, float] = {}
|
|
38
|
+
|
|
39
|
+
def run_before_node_execution(
|
|
40
|
+
self,
|
|
41
|
+
*,
|
|
42
|
+
node_name: str,
|
|
43
|
+
node_tags: dict,
|
|
44
|
+
node_kwargs: dict,
|
|
45
|
+
node_return_type: type,
|
|
46
|
+
task_id: str | None,
|
|
47
|
+
run_id: str,
|
|
48
|
+
**future_kwargs,
|
|
49
|
+
) -> None:
|
|
50
|
+
self._starts[node_name] = time.monotonic()
|
|
51
|
+
log.debug("Starting: %s", node_name)
|
|
52
|
+
|
|
53
|
+
def run_after_node_execution(
|
|
54
|
+
self,
|
|
55
|
+
*,
|
|
56
|
+
node_name: str,
|
|
57
|
+
node_tags: dict,
|
|
58
|
+
node_kwargs: dict,
|
|
59
|
+
node_return_type: type,
|
|
60
|
+
result,
|
|
61
|
+
error: Exception | None,
|
|
62
|
+
success: bool,
|
|
63
|
+
task_id: str | None,
|
|
64
|
+
run_id: str,
|
|
65
|
+
**future_kwargs,
|
|
66
|
+
) -> None:
|
|
67
|
+
elapsed = time.monotonic() - self._starts.pop(node_name, time.monotonic())
|
|
68
|
+
if success:
|
|
69
|
+
log.info("Completed: %s (%.2fs)", node_name, elapsed)
|
|
70
|
+
else:
|
|
71
|
+
log.warning("Failed: %s (%.2fs) — %s", node_name, elapsed, error)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
class ProgressAdapter(NodeExecutionHook):
|
|
75
|
+
"""Rich spinner + checkmark progress for user-visible pipeline stages."""
|
|
76
|
+
|
|
77
|
+
# Only these Hamilton nodes get a visible spinner / checkmark line.
|
|
78
|
+
_STAGE_NODES: frozenset[str] = frozenset(
|
|
79
|
+
{
|
|
80
|
+
"loaded_parts",
|
|
81
|
+
"placements",
|
|
82
|
+
"plate_3mf_path",
|
|
83
|
+
"sliced_output_dir",
|
|
84
|
+
"gcode_stats",
|
|
85
|
+
"print_result",
|
|
86
|
+
}
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
_SPINNER_LABELS: dict[str, str] = {
|
|
90
|
+
"loaded_parts": "Loading parts",
|
|
91
|
+
"placements": "Arranging onto plate",
|
|
92
|
+
"plate_3mf_path": "Exporting plate",
|
|
93
|
+
"preview_path": "Exporting preview",
|
|
94
|
+
"sliced_output_dir": "Slicing",
|
|
95
|
+
"gcode_stats": "Reading gcode",
|
|
96
|
+
"print_result": "Sending to printer",
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
def __init__(self) -> None:
|
|
100
|
+
from rich.console import Console
|
|
101
|
+
|
|
102
|
+
self._console = Console(highlight=False)
|
|
103
|
+
self._starts: dict[str, float] = {}
|
|
104
|
+
self._status: Status | None = None
|
|
105
|
+
self._slice_version: str | None = None
|
|
106
|
+
|
|
107
|
+
# ------------------------------------------------------------------
|
|
108
|
+
# Internal helpers
|
|
109
|
+
# ------------------------------------------------------------------
|
|
110
|
+
|
|
111
|
+
def _start_spinner(self, label: str) -> None:
|
|
112
|
+
from rich.status import Status
|
|
113
|
+
|
|
114
|
+
self._status = Status(label, console=self._console, spinner="dots")
|
|
115
|
+
self._status.start()
|
|
116
|
+
|
|
117
|
+
def _stop_spinner(self) -> None:
|
|
118
|
+
if self._status is not None:
|
|
119
|
+
self._status.stop()
|
|
120
|
+
self._status = None
|
|
121
|
+
|
|
122
|
+
def _ok(self, msg: str, elapsed: float = 0, *, show_elapsed: bool = True) -> None:
|
|
123
|
+
elapsed_str = ""
|
|
124
|
+
if show_elapsed and elapsed >= 2:
|
|
125
|
+
elapsed_str = f"[dim]{elapsed:.0f}s[/dim]"
|
|
126
|
+
self._console.print(f"[green]✔[/green] {msg} {elapsed_str}".rstrip())
|
|
127
|
+
|
|
128
|
+
def _err(self, msg: str) -> None:
|
|
129
|
+
self._console.print(f"[red]✗[/red] {msg}")
|
|
130
|
+
|
|
131
|
+
# ------------------------------------------------------------------
|
|
132
|
+
# NodeExecutionHook
|
|
133
|
+
# ------------------------------------------------------------------
|
|
134
|
+
|
|
135
|
+
def run_before_node_execution(
|
|
136
|
+
self,
|
|
137
|
+
*,
|
|
138
|
+
node_name: str,
|
|
139
|
+
node_tags: dict,
|
|
140
|
+
node_kwargs: dict,
|
|
141
|
+
node_return_type: type,
|
|
142
|
+
task_id: str | None,
|
|
143
|
+
run_id: str,
|
|
144
|
+
**future_kwargs,
|
|
145
|
+
) -> None:
|
|
146
|
+
if node_name not in self._STAGE_NODES:
|
|
147
|
+
return
|
|
148
|
+
|
|
149
|
+
self._starts[node_name] = time.monotonic()
|
|
150
|
+
label = self._SPINNER_LABELS.get(node_name, node_name)
|
|
151
|
+
|
|
152
|
+
if node_name == "sliced_output_dir":
|
|
153
|
+
cfg = node_kwargs.get("config")
|
|
154
|
+
ver = node_kwargs.get("docker_version")
|
|
155
|
+
if not ver and cfg and cfg.slicer.version:
|
|
156
|
+
ver = cfg.slicer.version
|
|
157
|
+
if ver:
|
|
158
|
+
label = f"Slicing with OrcaSlicer {ver}"
|
|
159
|
+
self._slice_version = ver
|
|
160
|
+
|
|
161
|
+
elif node_name == "print_result":
|
|
162
|
+
cfg = node_kwargs.get("config")
|
|
163
|
+
if cfg and cfg.printer:
|
|
164
|
+
label = f'Sending to printer "{cfg.printer.name}"'
|
|
165
|
+
|
|
166
|
+
# gcode_stats is fast — skip spinner, just print the result line
|
|
167
|
+
if node_name != "gcode_stats":
|
|
168
|
+
self._start_spinner(label)
|
|
169
|
+
|
|
170
|
+
def run_after_node_execution(
|
|
171
|
+
self,
|
|
172
|
+
*,
|
|
173
|
+
node_name: str,
|
|
174
|
+
node_tags: dict,
|
|
175
|
+
node_kwargs: dict,
|
|
176
|
+
node_return_type: type,
|
|
177
|
+
result,
|
|
178
|
+
error: Exception | None,
|
|
179
|
+
success: bool,
|
|
180
|
+
task_id: str | None,
|
|
181
|
+
run_id: str,
|
|
182
|
+
**future_kwargs,
|
|
183
|
+
) -> None:
|
|
184
|
+
if node_name not in self._STAGE_NODES:
|
|
185
|
+
return
|
|
186
|
+
|
|
187
|
+
elapsed = time.monotonic() - self._starts.pop(node_name, time.monotonic())
|
|
188
|
+
self._stop_spinner()
|
|
189
|
+
|
|
190
|
+
if not success:
|
|
191
|
+
label = self._SPINNER_LABELS.get(node_name, node_name)
|
|
192
|
+
self._err(f"{label} failed — {error}")
|
|
193
|
+
return
|
|
194
|
+
|
|
195
|
+
if node_name == "loaded_parts":
|
|
196
|
+
n = len(result.meshes)
|
|
197
|
+
self._ok(f"Loaded {n} part{'s' if n != 1 else ''}", elapsed)
|
|
198
|
+
|
|
199
|
+
elif node_name == "placements":
|
|
200
|
+
n = len(result)
|
|
201
|
+
cfg = node_kwargs.get("config")
|
|
202
|
+
plate_str = ""
|
|
203
|
+
if cfg:
|
|
204
|
+
w, d = cfg.plate.size
|
|
205
|
+
plate_str = f"[dim]({w:.0f}×{d:.0f}mm)[/dim]"
|
|
206
|
+
self._ok(
|
|
207
|
+
f"Arranged {n} part{'s' if n != 1 else ''} onto plate {plate_str}",
|
|
208
|
+
elapsed,
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
elif node_name == "plate_3mf_path":
|
|
212
|
+
name = result.name if isinstance(result, Path) else "plate.3mf"
|
|
213
|
+
self._ok(f"Plate exported → [dim]{name}[/dim]", elapsed)
|
|
214
|
+
|
|
215
|
+
elif node_name == "preview_path":
|
|
216
|
+
name = result.name if isinstance(result, Path) else "plate_preview.3mf"
|
|
217
|
+
self._ok(f"Preview exported → [dim]{name}[/dim]", elapsed)
|
|
218
|
+
|
|
219
|
+
elif node_name == "sliced_output_dir":
|
|
220
|
+
ver = self._slice_version
|
|
221
|
+
ver_str = f"with OrcaSlicer {ver}" if ver else ""
|
|
222
|
+
time_str = f" in {elapsed:.0f}s" if elapsed >= 2 else ""
|
|
223
|
+
self._ok(f"Sliced {ver_str}{time_str}".rstrip(), show_elapsed=False)
|
|
224
|
+
# Show gcode filename if available
|
|
225
|
+
if isinstance(result, Path):
|
|
226
|
+
gcode_files = list(result.glob("*.gcode"))
|
|
227
|
+
if gcode_files:
|
|
228
|
+
self._console.print(f" [dim]→ {gcode_files[0].name}[/dim]")
|
|
229
|
+
|
|
230
|
+
elif node_name == "gcode_stats":
|
|
231
|
+
parts: list[str] = []
|
|
232
|
+
if "print_time" in result:
|
|
233
|
+
parts.append(f"Print time: {result['print_time']}")
|
|
234
|
+
if "filament_g" in result:
|
|
235
|
+
parts.append(f"{result['filament_g']:.1f}g filament")
|
|
236
|
+
elif "filament_cm3" in result:
|
|
237
|
+
parts.append(f"{result['filament_cm3']:.1f}cm³ filament")
|
|
238
|
+
if parts:
|
|
239
|
+
self._ok(", ".join(parts), elapsed)
|
|
240
|
+
|
|
241
|
+
elif node_name == "print_result":
|
|
242
|
+
cfg = node_kwargs.get("config")
|
|
243
|
+
dry_run = node_kwargs.get("dry_run", False)
|
|
244
|
+
printer_name = cfg.printer.name if cfg and cfg.printer else "printer"
|
|
245
|
+
if dry_run:
|
|
246
|
+
self._ok(f'Dry run — would send to "{printer_name}"', elapsed)
|
|
247
|
+
else:
|
|
248
|
+
self._ok(f'Sent to printer "{printer_name}"', elapsed)
|
estampo/arrange.py
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
"""2D bin packing of parts onto a build plate via rectpack."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
|
|
8
|
+
import rectpack
|
|
9
|
+
import trimesh
|
|
10
|
+
|
|
11
|
+
log = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class Placement:
|
|
16
|
+
mesh: trimesh.Trimesh
|
|
17
|
+
name: str
|
|
18
|
+
x: float
|
|
19
|
+
y: float
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def arrange(
|
|
23
|
+
meshes: list[trimesh.Trimesh],
|
|
24
|
+
names: list[str],
|
|
25
|
+
plate_size: tuple[float, float],
|
|
26
|
+
padding: float = 5.0,
|
|
27
|
+
) -> list[Placement]:
|
|
28
|
+
"""Pack oriented meshes onto a build plate.
|
|
29
|
+
|
|
30
|
+
Returns a list of Placement objects with meshes translated to their packed positions.
|
|
31
|
+
Raises ValueError if not all parts fit on the plate.
|
|
32
|
+
"""
|
|
33
|
+
if len(meshes) != len(names):
|
|
34
|
+
raise ValueError("meshes and names must have the same length")
|
|
35
|
+
|
|
36
|
+
# Compute padded XY bounding boxes (use integer mm for rectpack)
|
|
37
|
+
rects = []
|
|
38
|
+
for i, mesh in enumerate(meshes):
|
|
39
|
+
w = mesh.extents[0] + padding
|
|
40
|
+
h = mesh.extents[1] + padding
|
|
41
|
+
rects.append((i, int(w + 1), int(h + 1))) # ceil to int
|
|
42
|
+
|
|
43
|
+
packer = rectpack.newPacker(
|
|
44
|
+
mode=rectpack.PackingMode.Offline,
|
|
45
|
+
pack_algo=rectpack.MaxRectsBssf,
|
|
46
|
+
rotation=False,
|
|
47
|
+
)
|
|
48
|
+
packer.add_bin(int(plate_size[0]), int(plate_size[1]))
|
|
49
|
+
for idx, w, h in rects:
|
|
50
|
+
packer.add_rect(w, h, rid=idx)
|
|
51
|
+
|
|
52
|
+
packer.pack()
|
|
53
|
+
|
|
54
|
+
packed = packer.rect_list()
|
|
55
|
+
if len(packed) != len(meshes):
|
|
56
|
+
raise ValueError(
|
|
57
|
+
f"Only {len(packed)}/{len(meshes)} parts fit on the "
|
|
58
|
+
f"{plate_size[0]}x{plate_size[1]}mm plate"
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
placements = []
|
|
62
|
+
for _bin_id, x, y, _w, _h, rid in packed:
|
|
63
|
+
mesh = meshes[rid].copy()
|
|
64
|
+
# Translate mesh: move min XY to the packed position (+ half padding offset)
|
|
65
|
+
min_x, min_y = mesh.bounds[0][0], mesh.bounds[0][1]
|
|
66
|
+
offset_x = x + padding / 2 - min_x
|
|
67
|
+
offset_y = y + padding / 2 - min_y
|
|
68
|
+
mesh.apply_translation([offset_x, offset_y, 0])
|
|
69
|
+
placements.append(Placement(mesh=mesh, name=names[rid], x=x, y=y))
|
|
70
|
+
|
|
71
|
+
# Center the packed group on the plate
|
|
72
|
+
_center_on_plate(placements, plate_size)
|
|
73
|
+
|
|
74
|
+
log.info("Packed %d parts onto plate", len(placements))
|
|
75
|
+
return placements
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def _center_on_plate(placements: list[Placement], plate_size: tuple[float, float]) -> None:
|
|
79
|
+
"""Translate all placements so the group is centered on the plate."""
|
|
80
|
+
if not placements:
|
|
81
|
+
return
|
|
82
|
+
|
|
83
|
+
# Find bounding box of all placed meshes
|
|
84
|
+
all_min_x = min(p.mesh.bounds[0][0] for p in placements)
|
|
85
|
+
all_max_x = max(p.mesh.bounds[1][0] for p in placements)
|
|
86
|
+
all_min_y = min(p.mesh.bounds[0][1] for p in placements)
|
|
87
|
+
all_max_y = max(p.mesh.bounds[1][1] for p in placements)
|
|
88
|
+
|
|
89
|
+
group_cx = (all_min_x + all_max_x) / 2
|
|
90
|
+
group_cy = (all_min_y + all_max_y) / 2
|
|
91
|
+
plate_cx = plate_size[0] / 2
|
|
92
|
+
plate_cy = plate_size[1] / 2
|
|
93
|
+
|
|
94
|
+
dx = plate_cx - group_cx
|
|
95
|
+
dy = plate_cy - group_cy
|
|
96
|
+
|
|
97
|
+
for p in placements:
|
|
98
|
+
p.mesh.apply_translation([dx, dy, 0])
|
|
99
|
+
p.x += dx
|
|
100
|
+
p.y += dy
|
estampo/auth.py
ADDED
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
"""Bambu Cloud authentication — login, token caching, and device discovery."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
|
|
8
|
+
import requests
|
|
9
|
+
|
|
10
|
+
from estampo import EstampoError
|
|
11
|
+
|
|
12
|
+
log = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
API_BASE = "https://api.bambulab.com"
|
|
15
|
+
|
|
16
|
+
SLICER_HEADERS = {
|
|
17
|
+
"X-BBL-Client-Name": "OrcaSlicer",
|
|
18
|
+
"X-BBL-Client-Type": "slicer",
|
|
19
|
+
"X-BBL-Client-Version": "02.03.01.00",
|
|
20
|
+
"User-Agent": "bambu_network_agent/02.03.01.00",
|
|
21
|
+
"Content-Type": "application/json",
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _request_verification_code(email: str) -> None:
|
|
26
|
+
"""Request a verification code be sent to the user's email."""
|
|
27
|
+
from estampo import ui
|
|
28
|
+
|
|
29
|
+
resp = requests.post(
|
|
30
|
+
f"{API_BASE}/v1/user-service/user/sendemail/code",
|
|
31
|
+
headers=SLICER_HEADERS,
|
|
32
|
+
json={"email": email, "type": "codeLogin"},
|
|
33
|
+
)
|
|
34
|
+
resp.raise_for_status()
|
|
35
|
+
ui.success(f"Verification code sent to {email}")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _login(email: str, password: str) -> tuple[str, str]:
|
|
39
|
+
"""Login and return (access_token, refresh_token). Handles all auth flows."""
|
|
40
|
+
|
|
41
|
+
from estampo import ui
|
|
42
|
+
|
|
43
|
+
# Step 1: Try password login
|
|
44
|
+
ui.info("Attempting password login...")
|
|
45
|
+
resp = requests.post(
|
|
46
|
+
f"{API_BASE}/v1/user-service/user/login",
|
|
47
|
+
headers=SLICER_HEADERS,
|
|
48
|
+
json={"account": email, "password": password, "apiError": ""},
|
|
49
|
+
)
|
|
50
|
+
resp.raise_for_status()
|
|
51
|
+
data = resp.json()
|
|
52
|
+
|
|
53
|
+
token = data.get("accessToken")
|
|
54
|
+
refresh_token = data.get("refreshToken", "")
|
|
55
|
+
login_type = data.get("loginType", "")
|
|
56
|
+
|
|
57
|
+
# Step 2: Handle verification code flow
|
|
58
|
+
# Always request a code — the API sometimes auto-sends one when
|
|
59
|
+
# loginType == "verifyCode", but not always (IP-based trust).
|
|
60
|
+
# Calling explicitly ensures the user always receives a code,
|
|
61
|
+
# though they may occasionally get two emails.
|
|
62
|
+
if not token and login_type == "verifyCode":
|
|
63
|
+
_request_verification_code(email)
|
|
64
|
+
code = ui.prompt_password("Enter verification code")
|
|
65
|
+
resp = requests.post(
|
|
66
|
+
f"{API_BASE}/v1/user-service/user/login",
|
|
67
|
+
headers=SLICER_HEADERS,
|
|
68
|
+
json={"account": email, "code": code},
|
|
69
|
+
)
|
|
70
|
+
resp.raise_for_status()
|
|
71
|
+
data = resp.json()
|
|
72
|
+
token = data.get("accessToken")
|
|
73
|
+
refresh_token = data.get("refreshToken", "")
|
|
74
|
+
|
|
75
|
+
# Step 3: Handle TFA flow
|
|
76
|
+
if not token and data.get("tfaKey"):
|
|
77
|
+
tfa_key = data["tfaKey"]
|
|
78
|
+
ui.info("Account requires two-factor authentication.")
|
|
79
|
+
tfa_code = ui.prompt_password("Enter 2FA code")
|
|
80
|
+
resp = requests.post(
|
|
81
|
+
f"{API_BASE}/v1/user-service/user/tfa",
|
|
82
|
+
headers=SLICER_HEADERS,
|
|
83
|
+
json={"tfaKey": tfa_key, "tfaCode": tfa_code},
|
|
84
|
+
)
|
|
85
|
+
resp.raise_for_status()
|
|
86
|
+
data = resp.json()
|
|
87
|
+
token = data.get("accessToken")
|
|
88
|
+
refresh_token = data.get("refreshToken", "")
|
|
89
|
+
|
|
90
|
+
if not token:
|
|
91
|
+
log.debug("Login response: %s", json.dumps(data, indent=2))
|
|
92
|
+
raise EstampoError("Login failed — no access token in response")
|
|
93
|
+
|
|
94
|
+
return token, refresh_token
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def _get_user_profile(token: str) -> dict:
|
|
98
|
+
"""Fetch user profile (uid, name, avatar)."""
|
|
99
|
+
resp = requests.get(
|
|
100
|
+
f"{API_BASE}/v1/design-user-service/my/preference",
|
|
101
|
+
headers={**SLICER_HEADERS, "Authorization": f"Bearer {token}"},
|
|
102
|
+
)
|
|
103
|
+
resp.raise_for_status()
|
|
104
|
+
data = resp.json()
|
|
105
|
+
return {
|
|
106
|
+
"uid": str(data.get("uid", "")),
|
|
107
|
+
"name": data.get("name", ""),
|
|
108
|
+
"avatar": data.get("avatar", ""),
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def _get_devices(token: str) -> list[dict]:
|
|
113
|
+
"""List printers bound to the account."""
|
|
114
|
+
resp = requests.get(
|
|
115
|
+
f"{API_BASE}/v1/iot-service/api/user/bind",
|
|
116
|
+
headers={**SLICER_HEADERS, "Authorization": f"Bearer {token}"},
|
|
117
|
+
)
|
|
118
|
+
resp.raise_for_status()
|
|
119
|
+
return resp.json().get("devices", [])
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def _show_devices(token: str) -> None:
|
|
123
|
+
"""Print bound printers."""
|
|
124
|
+
from estampo import ui
|
|
125
|
+
from estampo.credentials import mask_serial
|
|
126
|
+
|
|
127
|
+
devices = _get_devices(token)
|
|
128
|
+
if devices:
|
|
129
|
+
ui.console.print()
|
|
130
|
+
items = []
|
|
131
|
+
for d in devices:
|
|
132
|
+
name = d.get("name", "unnamed")
|
|
133
|
+
dev_id = d.get("dev_id", "?")
|
|
134
|
+
model = d.get("dev_product_name", d.get("dev_model_name", "?"))
|
|
135
|
+
online_str = "[green]online[/green]" if d.get("online") else "[dim]offline[/dim]"
|
|
136
|
+
items.append((name, model, mask_serial(dev_id), online_str))
|
|
137
|
+
ui.choice_table(items, ["Name", "Model", "Serial", "Status"], markup=True)
|
|
138
|
+
else:
|
|
139
|
+
ui.info("No printers found")
|