tescmd 0.1.2__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tescmd/__init__.py +1 -1
- tescmd/api/client.py +8 -1
- tescmd/api/errors.py +8 -0
- tescmd/api/vehicle.py +19 -1
- tescmd/cache/response_cache.py +3 -2
- tescmd/cli/auth.py +30 -2
- tescmd/cli/key.py +149 -14
- tescmd/cli/main.py +44 -0
- tescmd/cli/setup.py +230 -22
- tescmd/cli/vehicle.py +464 -1
- tescmd/crypto/__init__.py +3 -1
- tescmd/crypto/ecdh.py +9 -0
- tescmd/crypto/schnorr.py +191 -0
- tescmd/deploy/tailscale_serve.py +154 -0
- tescmd/models/__init__.py +0 -2
- tescmd/models/auth.py +19 -0
- tescmd/models/config.py +1 -0
- tescmd/models/energy.py +0 -9
- tescmd/protocol/session.py +10 -3
- tescmd/telemetry/__init__.py +19 -0
- tescmd/telemetry/dashboard.py +227 -0
- tescmd/telemetry/decoder.py +284 -0
- tescmd/telemetry/fields.py +248 -0
- tescmd/telemetry/flatbuf.py +162 -0
- tescmd/telemetry/protos/__init__.py +4 -0
- tescmd/telemetry/protos/vehicle_alert.proto +31 -0
- tescmd/telemetry/protos/vehicle_alert_pb2.py +42 -0
- tescmd/telemetry/protos/vehicle_alert_pb2.pyi +44 -0
- tescmd/telemetry/protos/vehicle_connectivity.proto +23 -0
- tescmd/telemetry/protos/vehicle_connectivity_pb2.py +40 -0
- tescmd/telemetry/protos/vehicle_connectivity_pb2.pyi +33 -0
- tescmd/telemetry/protos/vehicle_data.proto +768 -0
- tescmd/telemetry/protos/vehicle_data_pb2.py +136 -0
- tescmd/telemetry/protos/vehicle_data_pb2.pyi +1336 -0
- tescmd/telemetry/protos/vehicle_error.proto +23 -0
- tescmd/telemetry/protos/vehicle_error_pb2.py +44 -0
- tescmd/telemetry/protos/vehicle_error_pb2.pyi +39 -0
- tescmd/telemetry/protos/vehicle_metric.proto +22 -0
- tescmd/telemetry/protos/vehicle_metric_pb2.py +44 -0
- tescmd/telemetry/protos/vehicle_metric_pb2.pyi +37 -0
- tescmd/telemetry/server.py +293 -0
- tescmd/telemetry/tailscale.py +300 -0
- {tescmd-0.1.2.dist-info → tescmd-0.2.0.dist-info}/METADATA +72 -35
- {tescmd-0.1.2.dist-info → tescmd-0.2.0.dist-info}/RECORD +47 -22
- {tescmd-0.1.2.dist-info → tescmd-0.2.0.dist-info}/WHEEL +0 -0
- {tescmd-0.1.2.dist-info → tescmd-0.2.0.dist-info}/entry_points.txt +0 -0
- {tescmd-0.1.2.dist-info → tescmd-0.2.0.dist-info}/licenses/LICENSE +0 -0
tescmd/cli/vehicle.py
CHANGED
|
@@ -4,12 +4,14 @@ from __future__ import annotations
|
|
|
4
4
|
|
|
5
5
|
import asyncio
|
|
6
6
|
import contextlib
|
|
7
|
+
import logging
|
|
8
|
+
import random
|
|
7
9
|
from typing import TYPE_CHECKING
|
|
8
10
|
|
|
9
11
|
import click
|
|
10
12
|
|
|
11
13
|
from tescmd._internal.async_utils import run_async
|
|
12
|
-
from tescmd.api.errors import VehicleAsleepError
|
|
14
|
+
from tescmd.api.errors import TunnelError, VehicleAsleepError
|
|
13
15
|
from tescmd.cli._client import (
|
|
14
16
|
TTL_DEFAULT,
|
|
15
17
|
TTL_FAST,
|
|
@@ -23,8 +25,13 @@ from tescmd.cli._client import (
|
|
|
23
25
|
)
|
|
24
26
|
from tescmd.cli._options import global_options
|
|
25
27
|
|
|
28
|
+
logger = logging.getLogger(__name__)
|
|
29
|
+
|
|
26
30
|
if TYPE_CHECKING:
|
|
31
|
+
from collections.abc import Awaitable, Callable
|
|
32
|
+
|
|
27
33
|
from tescmd.cli.main import AppContext
|
|
34
|
+
from tescmd.output.formatter import OutputFormatter
|
|
28
35
|
|
|
29
36
|
|
|
30
37
|
# ---------------------------------------------------------------------------
|
|
@@ -794,6 +801,462 @@ async def _cmd_telemetry_errors(app_ctx: AppContext, vin_positional: str | None)
|
|
|
794
801
|
formatter.rich.telemetry_errors(data)
|
|
795
802
|
|
|
796
803
|
|
|
804
|
+
@telemetry_group.command("stream")
|
|
805
|
+
@click.argument("vin_positional", required=False, default=None, metavar="VIN")
|
|
806
|
+
@click.option("--port", type=int, default=None, help="Local server port (random if omitted)")
|
|
807
|
+
@click.option("--fields", default="default", help="Field preset or comma-separated names")
|
|
808
|
+
@click.option("--interval", type=int, default=None, help="Override interval for all fields")
|
|
809
|
+
@global_options
|
|
810
|
+
def telemetry_stream_cmd(
|
|
811
|
+
app_ctx: AppContext,
|
|
812
|
+
vin_positional: str | None,
|
|
813
|
+
port: int | None,
|
|
814
|
+
fields: str,
|
|
815
|
+
interval: int | None,
|
|
816
|
+
) -> None:
|
|
817
|
+
"""Stream real-time telemetry via Tailscale Funnel.
|
|
818
|
+
|
|
819
|
+
Starts a local WebSocket server, exposes it via Tailscale Funnel,
|
|
820
|
+
configures the vehicle to push telemetry, and displays it in an
|
|
821
|
+
interactive dashboard (TTY) or JSONL stream (piped).
|
|
822
|
+
|
|
823
|
+
Requires: pip install tescmd[telemetry] and Tailscale (with Funnel enabled).
|
|
824
|
+
"""
|
|
825
|
+
run_async(_cmd_telemetry_stream(app_ctx, vin_positional, port, fields, interval))
|
|
826
|
+
|
|
827
|
+
|
|
828
|
+
async def _noop_stop() -> None:
|
|
829
|
+
"""No-op tunnel cleanup (used when tunnel wasn't started yet)."""
|
|
830
|
+
|
|
831
|
+
|
|
832
|
+
async def _setup_tunnel(
|
|
833
|
+
*,
|
|
834
|
+
port: int,
|
|
835
|
+
formatter: OutputFormatter,
|
|
836
|
+
) -> tuple[str, str, str, Callable[[], Awaitable[None]]]:
|
|
837
|
+
"""Start Tailscale Funnel and return ``(url, hostname, ca_pem, stop_fn)``."""
|
|
838
|
+
from tescmd.telemetry.tailscale import TailscaleManager
|
|
839
|
+
|
|
840
|
+
ts = TailscaleManager()
|
|
841
|
+
await ts.check_available()
|
|
842
|
+
await ts.check_running()
|
|
843
|
+
|
|
844
|
+
url = await ts.start_funnel(port)
|
|
845
|
+
if formatter.format != "json":
|
|
846
|
+
formatter.rich.info(f"Tailscale Funnel active: {url}")
|
|
847
|
+
|
|
848
|
+
hostname = await ts.get_hostname()
|
|
849
|
+
ca_pem = await ts.get_cert_pem()
|
|
850
|
+
return url, hostname, ca_pem, ts.stop_funnel
|
|
851
|
+
|
|
852
|
+
|
|
853
|
+
async def _cmd_telemetry_stream(
|
|
854
|
+
app_ctx: AppContext,
|
|
855
|
+
vin_positional: str | None,
|
|
856
|
+
port: int | None,
|
|
857
|
+
fields_spec: str,
|
|
858
|
+
interval_override: int | None,
|
|
859
|
+
) -> None:
|
|
860
|
+
from tescmd.telemetry.decoder import TelemetryDecoder
|
|
861
|
+
from tescmd.telemetry.fields import resolve_fields
|
|
862
|
+
from tescmd.telemetry.server import TelemetryServer
|
|
863
|
+
|
|
864
|
+
formatter = app_ctx.formatter
|
|
865
|
+
vin = require_vin(vin_positional, app_ctx.vin)
|
|
866
|
+
|
|
867
|
+
# Pick a random high port if none specified
|
|
868
|
+
if port is None:
|
|
869
|
+
port = random.randint(49152, 65535)
|
|
870
|
+
|
|
871
|
+
# Resolve fields
|
|
872
|
+
field_config = resolve_fields(fields_spec, interval_override)
|
|
873
|
+
|
|
874
|
+
# Build API client
|
|
875
|
+
client, api = get_vehicle_api(app_ctx)
|
|
876
|
+
decoder = TelemetryDecoder()
|
|
877
|
+
|
|
878
|
+
# Output callback
|
|
879
|
+
if formatter.format == "json":
|
|
880
|
+
import json as json_mod
|
|
881
|
+
|
|
882
|
+
async def on_frame(frame: TelemetryDecoder | object) -> None:
|
|
883
|
+
from tescmd.telemetry.decoder import TelemetryFrame
|
|
884
|
+
|
|
885
|
+
assert isinstance(frame, TelemetryFrame)
|
|
886
|
+
line = json_mod.dumps(
|
|
887
|
+
{
|
|
888
|
+
"vin": frame.vin,
|
|
889
|
+
"timestamp": frame.created_at.isoformat(),
|
|
890
|
+
"data": {d.field_name: d.value for d in frame.data},
|
|
891
|
+
},
|
|
892
|
+
default=str,
|
|
893
|
+
)
|
|
894
|
+
print(line, flush=True)
|
|
895
|
+
|
|
896
|
+
dashboard = None
|
|
897
|
+
else:
|
|
898
|
+
from tescmd.telemetry.dashboard import TelemetryDashboard
|
|
899
|
+
|
|
900
|
+
dashboard = TelemetryDashboard(formatter.console, formatter.rich._units)
|
|
901
|
+
|
|
902
|
+
async def on_frame(frame: TelemetryDecoder | object) -> None:
|
|
903
|
+
from tescmd.telemetry.decoder import TelemetryFrame
|
|
904
|
+
|
|
905
|
+
assert isinstance(frame, TelemetryFrame)
|
|
906
|
+
assert dashboard is not None
|
|
907
|
+
dashboard.update(frame)
|
|
908
|
+
|
|
909
|
+
# Load settings and public key before server creation — the key must be
|
|
910
|
+
# served at /.well-known/ because Tesla fetches it during partner registration.
|
|
911
|
+
from pathlib import Path
|
|
912
|
+
|
|
913
|
+
from tescmd.crypto.keys import load_public_key_pem
|
|
914
|
+
from tescmd.models.config import AppSettings
|
|
915
|
+
|
|
916
|
+
_settings = AppSettings()
|
|
917
|
+
key_dir = Path(_settings.config_dir).expanduser() / "keys"
|
|
918
|
+
public_key_pem = load_public_key_pem(key_dir)
|
|
919
|
+
|
|
920
|
+
# Server + Tunnel + Config with guaranteed cleanup
|
|
921
|
+
server = TelemetryServer(
|
|
922
|
+
port=port, decoder=decoder, on_frame=on_frame, public_key_pem=public_key_pem
|
|
923
|
+
)
|
|
924
|
+
tunnel_url: str | None = None
|
|
925
|
+
config_created = False
|
|
926
|
+
|
|
927
|
+
# Cleanup callbacks — set by tunnel provider
|
|
928
|
+
stop_tunnel: Callable[[], Awaitable[None]] = _noop_stop
|
|
929
|
+
original_partner_domain: str | None = None
|
|
930
|
+
|
|
931
|
+
try:
|
|
932
|
+
await server.start()
|
|
933
|
+
|
|
934
|
+
if formatter.format != "json":
|
|
935
|
+
formatter.rich.info(f"WebSocket server listening on port {port}")
|
|
936
|
+
|
|
937
|
+
tunnel_url, hostname, ca_pem, stop_tunnel = await _setup_tunnel(
|
|
938
|
+
port=port,
|
|
939
|
+
formatter=formatter,
|
|
940
|
+
)
|
|
941
|
+
|
|
942
|
+
# --- Re-register partner domain if tunnel hostname differs ---
|
|
943
|
+
registered_domain = (_settings.domain or "").lower().rstrip(".")
|
|
944
|
+
tunnel_host = hostname.lower().rstrip(".")
|
|
945
|
+
|
|
946
|
+
if tunnel_host != registered_domain:
|
|
947
|
+
from tescmd.api.errors import AuthError
|
|
948
|
+
from tescmd.auth.oauth import register_partner_account
|
|
949
|
+
|
|
950
|
+
if not _settings.client_id or not _settings.client_secret:
|
|
951
|
+
raise TunnelError(
|
|
952
|
+
"Client credentials required for partner domain "
|
|
953
|
+
"re-registration. Ensure TESLA_CLIENT_ID and "
|
|
954
|
+
"TESLA_CLIENT_SECRET are set."
|
|
955
|
+
)
|
|
956
|
+
|
|
957
|
+
reg_client_id = _settings.client_id
|
|
958
|
+
reg_client_secret = _settings.client_secret
|
|
959
|
+
region = app_ctx.region or _settings.region
|
|
960
|
+
if formatter.format != "json":
|
|
961
|
+
formatter.rich.info(f"Re-registering partner domain: {hostname}")
|
|
962
|
+
|
|
963
|
+
async def _try_register() -> None:
|
|
964
|
+
await register_partner_account(
|
|
965
|
+
client_id=reg_client_id,
|
|
966
|
+
client_secret=reg_client_secret,
|
|
967
|
+
domain=hostname,
|
|
968
|
+
region=region,
|
|
969
|
+
)
|
|
970
|
+
|
|
971
|
+
# Try registration with auto-retry for transient tunnel errors.
|
|
972
|
+
# 412 = Allowed Origin URL missing in Developer Portal.
|
|
973
|
+
# 424 = Tesla failed to reach the tunnel (key fetch failed) —
|
|
974
|
+
# typically a propagation delay after tunnel start.
|
|
975
|
+
# Tunnel propagation delays can cause transient failures,
|
|
976
|
+
# so we retry patiently (12 x 5s = 60s).
|
|
977
|
+
max_retries = 12
|
|
978
|
+
for attempt in range(max_retries):
|
|
979
|
+
try:
|
|
980
|
+
await _try_register()
|
|
981
|
+
if attempt > 0 and formatter.format != "json":
|
|
982
|
+
formatter.rich.info(
|
|
983
|
+
"[green]Tunnel is reachable — registration succeeded.[/green]"
|
|
984
|
+
)
|
|
985
|
+
break
|
|
986
|
+
except AuthError as exc:
|
|
987
|
+
status = getattr(exc, "status_code", None)
|
|
988
|
+
|
|
989
|
+
# 424 = key download failed — likely tunnel propagation delay
|
|
990
|
+
if status == 424 and attempt < max_retries - 1:
|
|
991
|
+
if formatter.format != "json":
|
|
992
|
+
formatter.rich.info(
|
|
993
|
+
f"[yellow]Waiting for tunnel to become reachable "
|
|
994
|
+
f"(HTTP 424)... "
|
|
995
|
+
f"({attempt + 1}/{max_retries})[/yellow]"
|
|
996
|
+
)
|
|
997
|
+
await asyncio.sleep(5)
|
|
998
|
+
continue
|
|
999
|
+
|
|
1000
|
+
if status not in (412, 424):
|
|
1001
|
+
raise TunnelError(
|
|
1002
|
+
f"Partner re-registration failed for {hostname}: {exc}"
|
|
1003
|
+
) from exc
|
|
1004
|
+
|
|
1005
|
+
# 412 or exhausted 424 retries — need user intervention
|
|
1006
|
+
if formatter.format == "json":
|
|
1007
|
+
if status == 412:
|
|
1008
|
+
raise TunnelError(
|
|
1009
|
+
f"Add https://{hostname} as an Allowed Origin "
|
|
1010
|
+
f"URL in your Tesla Developer Portal app, "
|
|
1011
|
+
f"then try again."
|
|
1012
|
+
) from exc
|
|
1013
|
+
raise TunnelError(
|
|
1014
|
+
f"Tesla could not fetch the public key from "
|
|
1015
|
+
f"https://{hostname}. Verify the tunnel is "
|
|
1016
|
+
f"accessible and try again."
|
|
1017
|
+
) from exc
|
|
1018
|
+
|
|
1019
|
+
formatter.rich.info("")
|
|
1020
|
+
if status == 412:
|
|
1021
|
+
formatter.rich.info(
|
|
1022
|
+
"[yellow]Tesla requires the tunnel domain as "
|
|
1023
|
+
"an Allowed Origin URL.[/yellow]"
|
|
1024
|
+
)
|
|
1025
|
+
else:
|
|
1026
|
+
formatter.rich.info(
|
|
1027
|
+
"[yellow]Tesla could not reach the tunnel to "
|
|
1028
|
+
"verify the public key (HTTP 424).[/yellow]"
|
|
1029
|
+
)
|
|
1030
|
+
formatter.rich.info("")
|
|
1031
|
+
formatter.rich.info(" 1. Open your Tesla Developer app:")
|
|
1032
|
+
formatter.rich.info(" [cyan]https://developer.tesla.com[/cyan]")
|
|
1033
|
+
formatter.rich.info(" 2. Add this as an Allowed Origin URL:")
|
|
1034
|
+
formatter.rich.info(f" [cyan]https://{hostname}[/cyan]")
|
|
1035
|
+
formatter.rich.info(" 3. Save the changes")
|
|
1036
|
+
formatter.rich.info("")
|
|
1037
|
+
|
|
1038
|
+
# Wait for user to fix, then retry
|
|
1039
|
+
while True:
|
|
1040
|
+
formatter.rich.info(
|
|
1041
|
+
"Press [bold]Enter[/bold] when done (or Ctrl+C to cancel)..."
|
|
1042
|
+
)
|
|
1043
|
+
await asyncio.get_event_loop().run_in_executor(None, input)
|
|
1044
|
+
try:
|
|
1045
|
+
await _try_register()
|
|
1046
|
+
formatter.rich.info("[green]Registration succeeded![/green]")
|
|
1047
|
+
break
|
|
1048
|
+
except AuthError as retry_exc:
|
|
1049
|
+
retry_status = getattr(retry_exc, "status_code", None)
|
|
1050
|
+
if retry_status in (412, 424):
|
|
1051
|
+
formatter.rich.info(
|
|
1052
|
+
f"[yellow]Tesla returned HTTP "
|
|
1053
|
+
f"{retry_status}. There is a propagation "
|
|
1054
|
+
f"delay on Tesla's end after adding an "
|
|
1055
|
+
f"Allowed Origin URL — this can take up "
|
|
1056
|
+
f"to 5 minutes.[/yellow]"
|
|
1057
|
+
)
|
|
1058
|
+
formatter.rich.info(
|
|
1059
|
+
"Press [bold]Enter[/bold] to retry, or "
|
|
1060
|
+
"wait and try again (Ctrl+C to cancel)..."
|
|
1061
|
+
)
|
|
1062
|
+
continue
|
|
1063
|
+
raise TunnelError(
|
|
1064
|
+
f"Partner re-registration failed: {retry_exc}"
|
|
1065
|
+
) from retry_exc
|
|
1066
|
+
break # registration succeeded in the inner loop
|
|
1067
|
+
|
|
1068
|
+
original_partner_domain = _settings.domain
|
|
1069
|
+
|
|
1070
|
+
# --- Common path: configure fleet telemetry ---
|
|
1071
|
+
inner_config: dict[str, object] = {
|
|
1072
|
+
"hostname": hostname,
|
|
1073
|
+
"port": 443, # Tailscale Funnel terminates TLS on 443
|
|
1074
|
+
"ca": ca_pem,
|
|
1075
|
+
"fields": field_config,
|
|
1076
|
+
"alert_types": ["service"],
|
|
1077
|
+
}
|
|
1078
|
+
|
|
1079
|
+
# Sign the config with the fleet key and use the JWS endpoint
|
|
1080
|
+
# (Tesla requires the Vehicle Command HTTP Proxy or JWS signing).
|
|
1081
|
+
from tescmd.api.errors import MissingScopesError
|
|
1082
|
+
from tescmd.crypto.keys import load_private_key
|
|
1083
|
+
from tescmd.crypto.schnorr import sign_fleet_telemetry_config
|
|
1084
|
+
|
|
1085
|
+
private_key = load_private_key(key_dir)
|
|
1086
|
+
jws_token = sign_fleet_telemetry_config(private_key, inner_config)
|
|
1087
|
+
|
|
1088
|
+
try:
|
|
1089
|
+
await api.fleet_telemetry_config_create_jws(vins=[vin], token=jws_token)
|
|
1090
|
+
except MissingScopesError:
|
|
1091
|
+
# Token lacks required scopes (e.g. vehicle_location was added
|
|
1092
|
+
# after the token was issued, or the partner domain changed).
|
|
1093
|
+
# A full re-login is needed — refresh alone doesn't update scopes.
|
|
1094
|
+
if formatter.format == "json":
|
|
1095
|
+
raise TunnelError(
|
|
1096
|
+
"Your OAuth token is missing required scopes for "
|
|
1097
|
+
"telemetry streaming. Run:\n"
|
|
1098
|
+
" 1. tescmd auth register (restore partner domain)\n"
|
|
1099
|
+
" 2. tescmd auth login (obtain token with updated scopes)\n"
|
|
1100
|
+
"Then retry the stream command."
|
|
1101
|
+
) from None
|
|
1102
|
+
|
|
1103
|
+
from tescmd.auth.oauth import login_flow
|
|
1104
|
+
from tescmd.auth.token_store import TokenStore
|
|
1105
|
+
from tescmd.models.auth import DEFAULT_SCOPES
|
|
1106
|
+
|
|
1107
|
+
formatter.rich.info("")
|
|
1108
|
+
formatter.rich.info(
|
|
1109
|
+
"[yellow]Token is missing required scopes — re-authenticating...[/yellow]"
|
|
1110
|
+
)
|
|
1111
|
+
formatter.rich.info("Opening your browser to sign in to Tesla...")
|
|
1112
|
+
formatter.rich.info(
|
|
1113
|
+
"When prompted, click [cyan]Select All[/cyan] and then"
|
|
1114
|
+
" [cyan]Allow[/cyan] to grant tescmd access."
|
|
1115
|
+
)
|
|
1116
|
+
|
|
1117
|
+
login_port = 8085
|
|
1118
|
+
login_redirect = f"http://localhost:{login_port}/callback"
|
|
1119
|
+
login_store = TokenStore(
|
|
1120
|
+
profile=app_ctx.profile,
|
|
1121
|
+
token_file=_settings.token_file,
|
|
1122
|
+
config_dir=_settings.config_dir,
|
|
1123
|
+
)
|
|
1124
|
+
token_data = await login_flow(
|
|
1125
|
+
client_id=_settings.client_id or "",
|
|
1126
|
+
client_secret=_settings.client_secret,
|
|
1127
|
+
redirect_uri=login_redirect,
|
|
1128
|
+
scopes=DEFAULT_SCOPES,
|
|
1129
|
+
port=login_port,
|
|
1130
|
+
token_store=login_store,
|
|
1131
|
+
region=app_ctx.region or _settings.region,
|
|
1132
|
+
)
|
|
1133
|
+
client.update_token(token_data.access_token)
|
|
1134
|
+
formatter.rich.info("[green]Login successful — retrying config...[/green]")
|
|
1135
|
+
await api.fleet_telemetry_config_create_jws(vins=[vin], token=jws_token)
|
|
1136
|
+
|
|
1137
|
+
config_created = True
|
|
1138
|
+
|
|
1139
|
+
if formatter.format != "json":
|
|
1140
|
+
formatter.rich.info(f"Fleet telemetry configured for VIN {vin}")
|
|
1141
|
+
formatter.rich.info("")
|
|
1142
|
+
|
|
1143
|
+
# Run dashboard or wait for interrupt
|
|
1144
|
+
if dashboard is not None:
|
|
1145
|
+
from rich.live import Live
|
|
1146
|
+
|
|
1147
|
+
dashboard.set_tunnel_url(tunnel_url)
|
|
1148
|
+
with Live(
|
|
1149
|
+
dashboard,
|
|
1150
|
+
console=formatter.console,
|
|
1151
|
+
refresh_per_second=4,
|
|
1152
|
+
) as live:
|
|
1153
|
+
dashboard.set_live(live)
|
|
1154
|
+
await _wait_for_interrupt()
|
|
1155
|
+
else:
|
|
1156
|
+
await _wait_for_interrupt()
|
|
1157
|
+
|
|
1158
|
+
finally:
|
|
1159
|
+
# Cleanup in reverse order — each tolerates failure.
|
|
1160
|
+
# Show progress so the user knows what's happening on 'q'/Ctrl+C.
|
|
1161
|
+
is_rich = formatter.format != "json"
|
|
1162
|
+
|
|
1163
|
+
if config_created:
|
|
1164
|
+
if is_rich:
|
|
1165
|
+
formatter.rich.info("[dim]Removing fleet telemetry config...[/dim]")
|
|
1166
|
+
try:
|
|
1167
|
+
await api.fleet_telemetry_config_delete(vin)
|
|
1168
|
+
except Exception:
|
|
1169
|
+
if is_rich:
|
|
1170
|
+
formatter.rich.info(
|
|
1171
|
+
"[yellow]Warning: failed to remove telemetry config."
|
|
1172
|
+
" It may expire or can be removed manually.[/yellow]"
|
|
1173
|
+
)
|
|
1174
|
+
|
|
1175
|
+
if original_partner_domain is not None:
|
|
1176
|
+
if is_rich:
|
|
1177
|
+
formatter.rich.info(
|
|
1178
|
+
f"[dim]Restoring partner domain to {original_partner_domain}...[/dim]"
|
|
1179
|
+
)
|
|
1180
|
+
try:
|
|
1181
|
+
from tescmd.auth.oauth import register_partner_account
|
|
1182
|
+
|
|
1183
|
+
assert _settings.client_id is not None
|
|
1184
|
+
assert _settings.client_secret is not None
|
|
1185
|
+
await register_partner_account(
|
|
1186
|
+
client_id=_settings.client_id,
|
|
1187
|
+
client_secret=_settings.client_secret,
|
|
1188
|
+
domain=original_partner_domain,
|
|
1189
|
+
region=app_ctx.region or _settings.region,
|
|
1190
|
+
)
|
|
1191
|
+
except Exception:
|
|
1192
|
+
msg = (
|
|
1193
|
+
f"Failed to restore partner domain to {original_partner_domain}. "
|
|
1194
|
+
"Run 'tescmd auth register' to fix this manually."
|
|
1195
|
+
)
|
|
1196
|
+
logger.warning(msg)
|
|
1197
|
+
if is_rich:
|
|
1198
|
+
formatter.rich.info(f"[yellow]Warning: {msg}[/yellow]")
|
|
1199
|
+
|
|
1200
|
+
if is_rich:
|
|
1201
|
+
formatter.rich.info("[dim]Stopping tunnel...[/dim]")
|
|
1202
|
+
with contextlib.suppress(Exception):
|
|
1203
|
+
await stop_tunnel()
|
|
1204
|
+
|
|
1205
|
+
if is_rich:
|
|
1206
|
+
formatter.rich.info("[dim]Stopping server...[/dim]")
|
|
1207
|
+
with contextlib.suppress(Exception):
|
|
1208
|
+
await server.stop()
|
|
1209
|
+
|
|
1210
|
+
await client.close()
|
|
1211
|
+
if is_rich:
|
|
1212
|
+
formatter.rich.info("[green]Stream stopped.[/green]")
|
|
1213
|
+
|
|
1214
|
+
|
|
1215
|
+
async def _wait_for_interrupt() -> None:
|
|
1216
|
+
"""Block until Ctrl+C or 'q' is pressed."""
|
|
1217
|
+
import sys
|
|
1218
|
+
|
|
1219
|
+
if not sys.stdin.isatty():
|
|
1220
|
+
# Non-TTY (piped / JSON mode): just wait for cancellation.
|
|
1221
|
+
try:
|
|
1222
|
+
while True:
|
|
1223
|
+
await asyncio.sleep(1)
|
|
1224
|
+
except asyncio.CancelledError:
|
|
1225
|
+
pass
|
|
1226
|
+
return
|
|
1227
|
+
|
|
1228
|
+
try:
|
|
1229
|
+
import selectors
|
|
1230
|
+
import termios
|
|
1231
|
+
import tty
|
|
1232
|
+
except ImportError:
|
|
1233
|
+
# Non-Unix: fall back to Ctrl+C only.
|
|
1234
|
+
try:
|
|
1235
|
+
while True:
|
|
1236
|
+
await asyncio.sleep(1)
|
|
1237
|
+
except asyncio.CancelledError:
|
|
1238
|
+
pass
|
|
1239
|
+
return
|
|
1240
|
+
|
|
1241
|
+
fd = sys.stdin.fileno()
|
|
1242
|
+
old_settings = termios.tcgetattr(fd)
|
|
1243
|
+
sel = selectors.DefaultSelector()
|
|
1244
|
+
try:
|
|
1245
|
+
tty.setcbreak(fd) # Chars available immediately; Ctrl+C still sends SIGINT
|
|
1246
|
+
sel.register(sys.stdin, selectors.EVENT_READ)
|
|
1247
|
+
while True:
|
|
1248
|
+
await asyncio.sleep(0.1)
|
|
1249
|
+
for _key, _ in sel.select(timeout=0):
|
|
1250
|
+
ch = sys.stdin.read(1)
|
|
1251
|
+
if ch in ("q", "Q"):
|
|
1252
|
+
return
|
|
1253
|
+
except asyncio.CancelledError:
|
|
1254
|
+
pass
|
|
1255
|
+
finally:
|
|
1256
|
+
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
|
|
1257
|
+
sel.close()
|
|
1258
|
+
|
|
1259
|
+
|
|
797
1260
|
# ---------------------------------------------------------------------------
|
|
798
1261
|
# Power management commands
|
|
799
1262
|
# ---------------------------------------------------------------------------
|
tescmd/crypto/__init__.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"""EC key management for Tesla Fleet API
|
|
1
|
+
"""EC key management and signing for Tesla Fleet API."""
|
|
2
2
|
|
|
3
3
|
from tescmd.crypto.keys import (
|
|
4
4
|
generate_ec_key_pair,
|
|
@@ -8,6 +8,7 @@ from tescmd.crypto.keys import (
|
|
|
8
8
|
load_private_key,
|
|
9
9
|
load_public_key_pem,
|
|
10
10
|
)
|
|
11
|
+
from tescmd.crypto.schnorr import sign_fleet_telemetry_config
|
|
11
12
|
|
|
12
13
|
__all__ = [
|
|
13
14
|
"generate_ec_key_pair",
|
|
@@ -16,4 +17,5 @@ __all__ = [
|
|
|
16
17
|
"has_key_pair",
|
|
17
18
|
"load_private_key",
|
|
18
19
|
"load_public_key_pem",
|
|
20
|
+
"sign_fleet_telemetry_config",
|
|
19
21
|
]
|
tescmd/crypto/ecdh.py
CHANGED
|
@@ -13,6 +13,15 @@ def derive_session_key(
|
|
|
13
13
|
) -> bytes:
|
|
14
14
|
"""Derive a 16-byte session key via ECDH + SHA-1 truncation.
|
|
15
15
|
|
|
16
|
+
.. note::
|
|
17
|
+
|
|
18
|
+
SHA-1 is required by Tesla's Vehicle Command Protocol — the
|
|
19
|
+
vehicle firmware expects ``SHA1(shared_secret)[:16]``. SHA-1's
|
|
20
|
+
known collision weaknesses do not affect this usage (only
|
|
21
|
+
preimage resistance matters for key derivation, and SHA-1
|
|
22
|
+
remains preimage-resistant). See ``vehicle-command`` Go SDK
|
|
23
|
+
reference implementation.
|
|
24
|
+
|
|
16
25
|
Parameters
|
|
17
26
|
----------
|
|
18
27
|
private_key:
|