lange-python 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lange/__init__.py ADDED
@@ -0,0 +1,5 @@
1
+ """Public package exports for lange-python."""
2
+
3
+ from .tunnel import Tunnel
4
+
5
+ __all__ = ["Tunnel"]
lange/__main__.py ADDED
@@ -0,0 +1,7 @@
1
+ """Module entrypoint for ``python -m lange``."""
2
+
3
+ from .cli import cli
4
+
5
+
6
+ if __name__ == "__main__":
7
+ cli()
lange/cli.py ADDED
@@ -0,0 +1,264 @@
1
+ """CLI commands for the ``lange`` Python package."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from collections.abc import Iterable
6
+ import json
7
+ import os
8
+ from pathlib import Path
9
+ import subprocess
10
+ from typing import Literal, cast
11
+
12
+ import click
13
+
14
+ from lange.services_schema import Project, Service
15
+
16
+ SUPPORTED_EXTENSIONS: tuple[str, ...] = (".py", ".tsx", ".js", ".jsx", ".ts", ".html", ".css")
17
+ IGNORED_DIRECTORIES: tuple[str, ...] = (".venv", "node_modules", ".git", ".next")
18
+ SERVICES_FILE_NAME = "services.json"
19
+
20
+
21
+ def count_lines_by_extension(root: Path, extensions: Iterable[str]) -> dict[str, int]:
22
+ """
23
+ Count file lines recursively grouped by file ending.
24
+
25
+ :param root: Directory that should be scanned recursively.
26
+ :param extensions: Allowed file endings, including the leading dot.
27
+ :returns: Mapping from file ending to counted LOC.
28
+ """
29
+ normalized_extensions = tuple(extension.lower() for extension in extensions)
30
+ counts = {extension: 0 for extension in normalized_extensions}
31
+
32
+ for current_root, directories, files in os.walk(root, topdown=True):
33
+ directories[:] = [name for name in directories if name not in IGNORED_DIRECTORIES]
34
+
35
+ for file_name in files:
36
+ suffix = Path(file_name).suffix.lower()
37
+ if suffix not in counts:
38
+ continue
39
+
40
+ file_path = Path(current_root) / file_name
41
+ with file_path.open("r", encoding="utf-8", errors="ignore") as file_handle:
42
+ counts[suffix] += sum(1 for _ in file_handle)
43
+
44
+ return counts
45
+
46
+
47
+ def render_stats_table(stats: dict[str, int]) -> str:
48
+ """
49
+ Render LOC statistics as an ASCII box table.
50
+
51
+ :param stats: Mapping from file ending to line count.
52
+ :returns: Table string with file type, LOC and percentage values.
53
+ """
54
+ total = sum(stats.values())
55
+ rows: list[tuple[str, str, str]] = []
56
+
57
+ for extension, loc in sorted(stats.items(), key=lambda item: (-item[1], item[0])):
58
+ percentage = (loc / total * 100.0) if total else 0.0
59
+ rows.append((extension, str(loc), f"{percentage:.2f}%"))
60
+
61
+ total_percentage = 100.0 if total else 0.0
62
+ rows.append(("TOTAL", str(total), f"{total_percentage:.2f}%"))
63
+
64
+ headers = ("File-Type", "LOC", "Percentage")
65
+ col_widths = [
66
+ max(len(headers[index]), max((len(row[index]) for row in rows), default=0))
67
+ for index in range(3)
68
+ ]
69
+
70
+ border = "+" + "+".join("-" * (width + 2) for width in col_widths) + "+"
71
+
72
+ def _render_row(values: tuple[str, str, str]) -> str:
73
+ padded = [value.ljust(col_widths[index]) for index, value in enumerate(values)]
74
+ return "| " + " | ".join(padded) + " |"
75
+
76
+ lines = [border, _render_row(headers), border]
77
+ lines.extend(_render_row(row) for row in rows)
78
+ lines.append(border)
79
+ return "\n".join(lines)
80
+
81
+
82
+ def services_file_path(root: Path) -> Path:
83
+ """
84
+ Build the absolute path to ``services.json`` in a workspace root.
85
+
86
+ :param root: Workspace root directory.
87
+ :returns: Absolute path to ``services.json``.
88
+ """
89
+ return root / SERVICES_FILE_NAME
90
+
91
+
92
+ def load_project_config(root: Path) -> Project:
93
+ """
94
+ Load a project config from ``services.json`` when available.
95
+
96
+ :param root: Workspace root directory.
97
+ :returns: Parsed project config. Defaults to an empty project.
98
+ """
99
+ path = services_file_path(root)
100
+ if not path.exists():
101
+ return Project(services=[])
102
+
103
+ data = json.loads(path.read_text(encoding="utf-8"))
104
+ return Project.model_validate(data)
105
+
106
+
107
+ def save_project_config(root: Path, project: Project) -> Path:
108
+ """
109
+ Persist project config to ``services.json``.
110
+
111
+ :param root: Workspace root directory.
112
+ :param project: Project data to store.
113
+ :returns: Written file path.
114
+ """
115
+ path = services_file_path(root)
116
+ path.write_text(project.model_dump_json(indent=2) + "\n", encoding="utf-8")
117
+ return path
118
+
119
+
120
+ @click.group()
121
+ def cli() -> None:
122
+ """
123
+ Lange CLI entrypoint.
124
+
125
+ :returns: ``None``.
126
+ """
127
+
128
+
129
+ @cli.group()
130
+ def code() -> None:
131
+ """
132
+ Group for source-code related commands.
133
+
134
+ :returns: ``None``.
135
+ """
136
+
137
+
138
+ @cli.command("init")
139
+ def init_workspace() -> None:
140
+ """
141
+ Initialize local ``.lange`` workspace artifacts.
142
+
143
+ Creates ``.lange/.gitignore`` and ``.lange/secrets.json`` in the current
144
+ directory.
145
+
146
+ :returns: ``None``.
147
+ """
148
+ lange_directory = Path.cwd() / ".lange"
149
+ lange_directory.mkdir(parents=True, exist_ok=True)
150
+ gitignore_file = lange_directory / ".gitignore"
151
+ gitignore_file.write_text("*\n", encoding="utf-8")
152
+ secrets_file = lange_directory / "secrets.json"
153
+ secrets_file.write_text("{}\n", encoding="utf-8")
154
+ click.echo(f"Initialized {gitignore_file}")
155
+
156
+
157
+ @cli.command("create")
158
+ def create_service() -> None:
159
+ """
160
+ Interactively add a service entry to ``services.json``.
161
+
162
+ :returns: ``None``.
163
+ """
164
+ root = Path.cwd()
165
+ project = load_project_config(root)
166
+
167
+ name = click.prompt("Service name", type=str).strip()
168
+ path = click.prompt("Service path", type=str, default=".", show_default=True).strip()
169
+ build_type = click.prompt(
170
+ "Build type",
171
+ type=click.Choice(["docker", "pyinstall"], case_sensitive=False),
172
+ default="docker",
173
+ show_default=True,
174
+ ).lower()
175
+ publish_path_input = click.prompt("Publish path (optional)", default="", show_default=False).strip()
176
+
177
+ service = Service(
178
+ name=name,
179
+ path=path,
180
+ build_type=cast(Literal["docker", "pyinstall"], build_type),
181
+ publish_path=publish_path_input or None,
182
+ )
183
+ project.services.append(service)
184
+
185
+ config_path = save_project_config(root, project)
186
+ click.echo(f"Added service '{service.name}' to {config_path}")
187
+
188
+
189
+ def _iter_services_for_build(project: Project, target_name: str | None) -> list[Service]:
190
+ """
191
+ Select services for a build operation.
192
+
193
+ :param project: Parsed project configuration.
194
+ :param target_name: Optional service name filter.
195
+ :returns: Services to build.
196
+ :raises click.ClickException: Raised when no services can be built.
197
+ """
198
+ if target_name is None:
199
+ services = project.services
200
+ else:
201
+ services = [service for service in project.services if service.name == target_name]
202
+
203
+ if not services:
204
+ if target_name:
205
+ raise click.ClickException(f"Service '{target_name}' not found in {SERVICES_FILE_NAME}.")
206
+ raise click.ClickException(f"No services configured in {SERVICES_FILE_NAME}.")
207
+
208
+ return services
209
+
210
+
211
+ @cli.command("build")
212
+ @click.argument("service_name", required=False)
213
+ def build_services(service_name: str | None) -> None:
214
+ """
215
+ Build configured services from ``services.json``.
216
+
217
+ Only docker build logic is implemented at the moment.
218
+
219
+ :param service_name: Optional single service name to build.
220
+ :returns: ``None``.
221
+ """
222
+ root = Path.cwd()
223
+ project = load_project_config(root)
224
+ services = _iter_services_for_build(project, service_name)
225
+
226
+ for service in services:
227
+ if service.build_type != "docker":
228
+ click.echo(f"Skipping '{service.name}' (build_type={service.build_type} not implemented).")
229
+ continue
230
+
231
+ context_path = service.resolve_path(root)
232
+ dockerfile_path = context_path / "Dockerfile"
233
+ if not dockerfile_path.exists():
234
+ raise click.ClickException(f"Missing Dockerfile for service '{service.name}' at {dockerfile_path}")
235
+
236
+ image_name = service.publish_path or service.name
237
+ click.echo(f"Building '{service.name}' as image '{image_name}' from {context_path}")
238
+ subprocess.run(
239
+ [
240
+ "docker",
241
+ "build",
242
+ "-t",
243
+ image_name,
244
+ "-f",
245
+ str(dockerfile_path),
246
+ str(context_path),
247
+ ],
248
+ check=True,
249
+ )
250
+
251
+
252
+ @code.command("stats")
253
+ def code_stats() -> None:
254
+ """
255
+ Print LOC statistics for the current working directory.
256
+
257
+ :returns: ``None``.
258
+ """
259
+ stats = count_lines_by_extension(Path.cwd(), SUPPORTED_EXTENSIONS)
260
+ click.echo()
261
+ click.echo()
262
+ click.echo(f"Recognized file endings: {' '.join(SUPPORTED_EXTENSIONS)}")
263
+ click.echo(f"Ignored folders: {' '.join(IGNORED_DIRECTORIES)}")
264
+ click.echo(render_stats_table(stats))
@@ -0,0 +1,50 @@
1
+ """Pydantic schemas for ``services.json`` project configuration."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from pathlib import Path
6
+ from typing import Literal
7
+
8
+ from pydantic import BaseModel, ConfigDict
9
+
10
+
11
+ class Service(BaseModel):
12
+ """
13
+ Service definition stored in ``services.json``.
14
+
15
+ :param name: Human-readable service name.
16
+ :param path: Relative or absolute directory path for this service.
17
+ :param build_type: Build strategy, either docker or pyinstall.
18
+ :param publish_path: Optional publish target or image name.
19
+ """
20
+
21
+ model_config = ConfigDict(extra="forbid")
22
+
23
+ name: str
24
+ path: str
25
+ build_type: Literal["docker", "pyinstall"]
26
+ publish_path: str | None = None
27
+
28
+ def resolve_path(self, root: Path) -> Path:
29
+ """
30
+ Resolve the service path relative to a workspace root.
31
+
32
+ :param root: Workspace root directory.
33
+ :returns: Absolute service path.
34
+ """
35
+ path = Path(self.path)
36
+ if path.is_absolute():
37
+ return path
38
+ return (root / path).resolve()
39
+
40
+
41
+ class Project(BaseModel):
42
+ """
43
+ Project configuration for service-based operations.
44
+
45
+ :param services: Configured services in this project.
46
+ """
47
+
48
+ model_config = ConfigDict(extra="forbid")
49
+
50
+ services: list[Service]
lange/tunnel.py ADDED
@@ -0,0 +1,446 @@
1
+ """Tunnel worker client implementation for the Lange tunnel service."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import asyncio
6
+ import base64
7
+ import json
8
+ import logging
9
+ import ssl
10
+ import threading
11
+ from typing import Any, Optional
12
+ from urllib.parse import urljoin
13
+
14
+ import httpx
15
+ import websockets
16
+ from httpx import Timeout
17
+
18
+ logger = logging.getLogger("lange.tunnel")
19
+
20
+
21
+ class Tunnel(threading.Thread):
22
+ """
23
+ Thread-based tunnel worker client.
24
+
25
+ Connects to the tunnel runtime WebSocket endpoint and forwards incoming
26
+ proxy messages to a local HTTP target.
27
+
28
+ :param host: Base service URL, e.g. ``wss://example.com``.
29
+ :param secret: Bearer token used for worker authentication.
30
+ :param target: Local HTTP target URL to forward tunnel traffic to.
31
+ :param verify_ssl: Whether to verify TLS certificates for ``wss://`` hosts.
32
+ :param max_retries: Maximum reconnect attempts, ``0`` for infinite retries.
33
+ :param retry_delay: Initial reconnect delay in seconds.
34
+ :param daemon: Whether the worker thread is daemonized.
35
+ :raises ValueError: If ``secret`` is empty.
36
+ """
37
+
38
+ def __init__(
39
+ self,
40
+ host: str,
41
+ secret: str,
42
+ target: str = "http://localhost:80",
43
+ verify_ssl: bool = True,
44
+ max_retries: int = 5,
45
+ retry_delay: float = 5.0,
46
+ daemon: bool = True,
47
+ ) -> None:
48
+ if not secret.strip():
49
+ raise ValueError("A non-empty secret is required for tunnel worker authentication.")
50
+
51
+ super().__init__(daemon=daemon)
52
+ self.host = host.rstrip("/")
53
+ self.secret = secret.strip()
54
+ self.target = target.rstrip("/")
55
+ self.verify_ssl = verify_ssl
56
+ self.max_retries = max_retries
57
+ self.retry_delay = retry_delay
58
+
59
+ self._max_retry_delay = 60.0
60
+ self._stop_event = threading.Event()
61
+ self._reconnect_event = threading.Event()
62
+ self._connected = False
63
+ self._remote_address: Optional[str] = None
64
+ self._remote_address_roundrobin: Optional[str] = None
65
+ self._worker_index = -1
66
+ self._pool_size = 0
67
+ self._reconnect_count = 0
68
+ self._lock = threading.Lock()
69
+ self._loop: Optional[asyncio.AbstractEventLoop] = None
70
+ self._active_ws: Any = None
71
+
72
+ @property
73
+ def connected(self) -> bool:
74
+ """
75
+ Get current connection state.
76
+
77
+ :returns: ``True`` when a worker socket is currently active.
78
+ """
79
+ with self._lock:
80
+ return self._connected
81
+
82
+ @property
83
+ def remote_address(self) -> Optional[str]:
84
+ """
85
+ Get worker-specific public address when provided by the server.
86
+
87
+ :returns: Worker URL or ``None`` when unavailable.
88
+ """
89
+ with self._lock:
90
+ return self._remote_address
91
+
92
+ @property
93
+ def remote_address_roundrobin(self) -> Optional[str]:
94
+ """
95
+ Get round-robin public address when provided by the server.
96
+
97
+ :returns: Pool URL or ``None`` when unavailable.
98
+ """
99
+ with self._lock:
100
+ return self._remote_address_roundrobin
101
+
102
+ @property
103
+ def worker_index(self) -> int:
104
+ """
105
+ Get current worker index from the latest welcome payload.
106
+
107
+ :returns: Worker index, ``-1`` when disconnected.
108
+ """
109
+ with self._lock:
110
+ return self._worker_index
111
+
112
+ @property
113
+ def pool_size(self) -> int:
114
+ """
115
+ Get current worker pool size from the latest welcome payload.
116
+
117
+ :returns: Worker pool size, ``0`` when disconnected.
118
+ """
119
+ with self._lock:
120
+ return self._pool_size
121
+
122
+ @property
123
+ def reconnect_count(self) -> int:
124
+ """
125
+ Get number of reconnect attempts after the last successful connection.
126
+
127
+ :returns: Reconnect counter.
128
+ """
129
+ with self._lock:
130
+ return self._reconnect_count
131
+
132
+ def run(self) -> None:
133
+ """
134
+ Start the worker loop inside the thread.
135
+
136
+ :returns: ``None``.
137
+ """
138
+ asyncio.run(self._run_async())
139
+
140
+ def stop(self) -> None:
141
+ """
142
+ Request a graceful shutdown.
143
+
144
+ :returns: ``None``.
145
+ """
146
+ self._stop_event.set()
147
+
148
+ def reconnect(self) -> None:
149
+ """
150
+ Force a full reconnect cycle.
151
+
152
+ :returns: ``None``.
153
+ """
154
+ self._set_connected(False)
155
+ with self._lock:
156
+ self._reconnect_count = 0
157
+ ws = self._active_ws
158
+ loop = self._loop
159
+ self._reconnect_event.set()
160
+
161
+ if ws is not None and loop is not None and loop.is_running():
162
+ try:
163
+ future = asyncio.run_coroutine_threadsafe(
164
+ ws.close(code=1012, reason="Client reconnect"),
165
+ loop,
166
+ )
167
+ future.result(timeout=2)
168
+ except Exception as exc: # pragma: no cover - best-effort close path
169
+ logger.debug("Failed to close websocket during reconnect: %s", exc)
170
+
171
+ def set_secret(self, secret: str, reconnect: bool = False) -> None:
172
+ """
173
+ Replace the bearer token used for future connections.
174
+
175
+ :param secret: New non-empty bearer token.
176
+ :param reconnect: Whether to reconnect immediately to apply the token.
177
+ :returns: ``None``.
178
+ :raises ValueError: If ``secret`` is empty.
179
+ """
180
+ if not secret.strip():
181
+ raise ValueError("A non-empty secret is required for tunnel worker authentication.")
182
+
183
+ with self._lock:
184
+ self.secret = secret.strip()
185
+
186
+ if reconnect:
187
+ self.reconnect()
188
+
189
+ async def _run_async(self) -> None:
190
+ """
191
+ Run the worker connection loop with reconnect backoff.
192
+
193
+ :returns: ``None``.
194
+ """
195
+ tunnel_url = self._build_tunnel_url()
196
+ ssl_context = self._build_ssl_context(tunnel_url)
197
+
198
+ with self._lock:
199
+ self._loop = asyncio.get_running_loop()
200
+
201
+ current_delay = self.retry_delay
202
+
203
+ while not self._stop_event.is_set():
204
+ headers = self._build_connection_headers()
205
+
206
+ try:
207
+ logger.info("Connecting to %s", tunnel_url)
208
+ async with websockets.connect(
209
+ tunnel_url,
210
+ additional_headers=headers,
211
+ ssl=ssl_context,
212
+ ) as ws:
213
+ with self._lock:
214
+ self._active_ws = ws
215
+
216
+ current_delay = self.retry_delay
217
+ await self._consume_welcome(ws)
218
+
219
+ with self._lock:
220
+ self._reconnect_count = 0
221
+
222
+ try:
223
+ await self._handle_messages(ws)
224
+ finally:
225
+ with self._lock:
226
+ self._active_ws = None
227
+ self._set_connected(False)
228
+ except websockets.exceptions.ConnectionClosed as exc:
229
+ self._set_connected(False)
230
+ logger.warning("Connection closed: %s", exc)
231
+ except Exception as exc: # pragma: no cover - network error path
232
+ self._set_connected(False)
233
+ logger.error("Connection error: %s", exc)
234
+ finally:
235
+ with self._lock:
236
+ self._active_ws = None
237
+
238
+ if self._stop_event.is_set():
239
+ break
240
+
241
+ if self._reconnect_event.is_set():
242
+ self._set_connected(False)
243
+ with self._lock:
244
+ self._reconnect_count = 0
245
+ self._reconnect_event.clear()
246
+ current_delay = self.retry_delay
247
+ logger.info("Manual reconnect requested. Reconnecting now.")
248
+ continue
249
+
250
+ with self._lock:
251
+ self._reconnect_count += 1
252
+ attempt = self._reconnect_count
253
+
254
+ if self.max_retries > 0 and attempt > self.max_retries:
255
+ logger.error("Max reconnection attempts (%s) reached. Giving up.", self.max_retries)
256
+ break
257
+
258
+ logger.info("Reconnecting in %.1fs (attempt %s)", current_delay, attempt)
259
+
260
+ wait_time = 0.0
261
+ while (
262
+ wait_time < current_delay
263
+ and not self._stop_event.is_set()
264
+ and not self._reconnect_event.is_set()
265
+ ):
266
+ await asyncio.sleep(0.5)
267
+ wait_time += 0.5
268
+
269
+ if self._reconnect_event.is_set():
270
+ continue
271
+
272
+ current_delay = min(current_delay * 2, self._max_retry_delay)
273
+
274
+ with self._lock:
275
+ self._active_ws = None
276
+ self._loop = None
277
+
278
+ logger.info("Disconnected from server.")
279
+
280
+ async def _consume_welcome(self, ws: Any) -> None:
281
+ """
282
+ Read and process the initial welcome message when present.
283
+
284
+ :param ws: Active websocket client.
285
+ :returns: ``None``.
286
+ """
287
+ try:
288
+ welcome_message = await ws.recv()
289
+ welcome = json.loads(welcome_message)
290
+ except Exception:
291
+ self._set_connected(True)
292
+ return
293
+
294
+ if isinstance(welcome, dict) and welcome.get("type") == "welcome":
295
+ self._set_connected(
296
+ True,
297
+ remote_address=str(welcome.get("public_address") or "") or None,
298
+ remote_address_roundrobin=str(welcome.get("public_address_generic") or "") or None,
299
+ worker_index=int(welcome.get("worker_index", -1)),
300
+ pool_size=int(welcome.get("pool_size", 0)),
301
+ )
302
+ return
303
+
304
+ self._set_connected(True)
305
+
306
+ async def _handle_messages(self, ws: Any) -> None:
307
+ """
308
+ Handle request/response proxy messages over the worker websocket.
309
+
310
+ :param ws: Active websocket client.
311
+ :returns: ``None``.
312
+ """
313
+ async with httpx.AsyncClient(timeout=Timeout(timeout=15.0)) as http_client:
314
+ while not self._stop_event.is_set() and not self._reconnect_event.is_set():
315
+ try:
316
+ message = await asyncio.wait_for(ws.recv(), timeout=1.0)
317
+ request = json.loads(message)
318
+ response = await self._forward_request(http_client, request)
319
+ await ws.send(json.dumps(response))
320
+ except asyncio.TimeoutError:
321
+ continue
322
+ except websockets.exceptions.ConnectionClosed:
323
+ break
324
+ except Exception as exc:
325
+ logger.error("Error handling message: %s", exc)
326
+
327
+ async def _forward_request(self, client: Any, request: dict[str, Any]) -> dict[str, Any]:
328
+ """
329
+ Forward a single proxied request to the configured local target.
330
+
331
+ :param client: HTTP client supporting ``request``.
332
+ :param request: Incoming tunnel request payload.
333
+ :returns: Tunnel response payload.
334
+ """
335
+ request_id = str(request.get("id", ""))
336
+ method = str(request.get("method", "GET"))
337
+ path = str(request.get("path", "/"))
338
+ headers = request.get("headers", {})
339
+ body_b64 = str(request.get("body", ""))
340
+
341
+ body = base64.b64decode(body_b64) if body_b64 else None
342
+
343
+ url = urljoin(f"{self.target}/", path.lstrip("/"))
344
+ filtered_headers = self._filter_hop_by_hop_headers(headers)
345
+
346
+ try:
347
+ response = await client.request(
348
+ method=method,
349
+ url=url,
350
+ headers=filtered_headers,
351
+ content=body,
352
+ follow_redirects=True,
353
+ )
354
+ return {
355
+ "id": request_id,
356
+ "status": response.status_code,
357
+ "headers": dict(response.headers.items()),
358
+ "body": base64.b64encode(response.content).decode("utf-8"),
359
+ }
360
+ except Exception as exc:
361
+ logger.error("Error forwarding request: %s", exc)
362
+ error_body = json.dumps({"error": str(exc)}).encode("utf-8")
363
+ return {
364
+ "id": request_id,
365
+ "status": 502,
366
+ "headers": {"Content-Type": "application/json"},
367
+ "body": base64.b64encode(error_body).decode("utf-8"),
368
+ }
369
+
370
+ def _build_tunnel_url(self) -> str:
371
+ """
372
+ Build the worker websocket URL.
373
+
374
+ :returns: WebSocket endpoint URL.
375
+ """
376
+ return f"{self.host}/api/tunnels/connection"
377
+
378
+ def _build_connection_headers(self) -> dict[str, str]:
379
+ """
380
+ Build outbound handshake headers.
381
+
382
+ :returns: Header dictionary with bearer authorization only.
383
+ """
384
+ with self._lock:
385
+ return {"Authorization": f"Bearer {self.secret}"}
386
+
387
+ def _set_connected(
388
+ self,
389
+ value: bool,
390
+ remote_address: Optional[str] = None,
391
+ remote_address_roundrobin: Optional[str] = None,
392
+ worker_index: int = -1,
393
+ pool_size: int = 0,
394
+ ) -> None:
395
+ """
396
+ Update connection state atomically.
397
+
398
+ :param value: New connected state.
399
+ :param remote_address: Worker URL from welcome message.
400
+ :param remote_address_roundrobin: Pool URL from welcome message.
401
+ :param worker_index: Worker index from welcome message.
402
+ :param pool_size: Pool size from welcome message.
403
+ :returns: ``None``.
404
+ """
405
+ with self._lock:
406
+ self._connected = value
407
+ self._remote_address = remote_address if value else None
408
+ self._remote_address_roundrobin = remote_address_roundrobin if value else None
409
+ self._worker_index = worker_index if value else -1
410
+ self._pool_size = pool_size if value else 0
411
+
412
+ def _build_ssl_context(self, tunnel_url: str) -> Optional[ssl.SSLContext]:
413
+ """
414
+ Create SSL context for secure websocket URLs.
415
+
416
+ :param tunnel_url: Resolved websocket URL.
417
+ :returns: SSL context for ``wss`` or ``None`` for plain ``ws``.
418
+ """
419
+ if not tunnel_url.startswith("wss://"):
420
+ return None
421
+
422
+ ssl_context = ssl.create_default_context()
423
+ if self.verify_ssl:
424
+ return ssl_context
425
+
426
+ ssl_context.check_hostname = False
427
+ ssl_context.verify_mode = ssl.CERT_NONE
428
+ return ssl_context
429
+
430
+ @staticmethod
431
+ def _filter_hop_by_hop_headers(headers: Any) -> dict[str, str]:
432
+ """
433
+ Remove hop-by-hop headers before forwarding to the local target.
434
+
435
+ :param headers: Request header mapping.
436
+ :returns: Filtered header mapping.
437
+ """
438
+ if not isinstance(headers, dict):
439
+ return {}
440
+
441
+ hop_by_hop = {"connection", "keep-alive", "transfer-encoding", "upgrade"}
442
+ return {
443
+ str(key): str(value)
444
+ for key, value in headers.items()
445
+ if str(key).lower() not in hop_by_hop
446
+ }
@@ -0,0 +1,35 @@
1
+ Metadata-Version: 2.4
2
+ Name: lange-python
3
+ Version: 0.1.0
4
+ Summary: A bundeld set of tools, clients for the lange-suite of tools and more.
5
+ Author: contact@robertlange.me
6
+ Requires-Python: >=3.13
7
+ Classifier: Programming Language :: Python :: 3
8
+ Classifier: Programming Language :: Python :: 3.13
9
+ Classifier: Programming Language :: Python :: 3.14
10
+ Requires-Dist: click (>=8.3.1,<9.0.0)
11
+ Requires-Dist: httpx (>=0.28.1,<0.29.0)
12
+ Requires-Dist: pydantic (>=2.12.5,<3.0.0)
13
+ Requires-Dist: websockets (>=12.0,<20.0)
14
+ Description-Content-Type: text/markdown
15
+
16
+ # lange-python
17
+
18
+ Python helpers and clients for Lange services.
19
+
20
+ ## Tunnel worker
21
+
22
+ ```python
23
+ from lange.tunnel import Tunnel
24
+
25
+ tunnel = Tunnel(
26
+ host="example.com",
27
+ secret="your-bearer-token",
28
+ target="http://localhost:3000",
29
+ )
30
+
31
+ tunnel.start()
32
+ # ...
33
+ tunnel.stop()
34
+ ```
35
+
@@ -0,0 +1,9 @@
1
+ lange/__init__.py,sha256=rfB-SZY_9PXuWSyE329ceK7H49LNwmZ-83RTjEYNP_Y,97
2
+ lange/__main__.py,sha256=es3-sJuP_NnS-se9FQ6n3taoNCcbu0yIh0zfcsZ1304,110
3
+ lange/cli.py,sha256=PDw-50YeTP2DK2xW-r34q3fGWx45Exz8P4RZh4Qj8Ok,8317
4
+ lange/services_schema.py,sha256=uRYME3BvqzNWjVOA04zEPBjCCoZb4smQ6AhRBHa5-tc,1290
5
+ lange/tunnel.py,sha256=UGyne3tkQ8HWD6cJVRzDOBTEpzzRz1L_ryT9tP1EErc,14901
6
+ lange_python-0.1.0.dist-info/METADATA,sha256=uYkVfnzCbf77CJtPsgJ_dn_9f8v0uWXfseAyo113jM0,820
7
+ lange_python-0.1.0.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
8
+ lange_python-0.1.0.dist-info/entry_points.txt,sha256=lPudEWQJNXU-Gj3MVgIvdy-VLdjUNCLY7jwY1m8HY40,39
9
+ lange_python-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: poetry-core 2.2.1
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,3 @@
1
+ [console_scripts]
2
+ lange=lange.cli:cli
3
+