unitlab 2.3.32__py3-none-any.whl → 2.3.34__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
unitlab/client.py CHANGED
@@ -12,12 +12,8 @@ import subprocess
12
12
  import signal
13
13
  import re
14
14
  import time
15
- import threading
16
15
  import psutil
17
16
  from datetime import datetime, timezone
18
- from .tunnel_config import CloudflareTunnel
19
- from .cloudflare_api_tunnel import CloudflareAPITunnel
20
- from .simple_tunnel import SimpleTunnel
21
17
  from .utils import get_api_url, handle_exceptions
22
18
  from pathlib import Path
23
19
 
@@ -29,15 +25,6 @@ except ImportError:
29
25
  HAS_GPU = False
30
26
 
31
27
 
32
- try:
33
- from dotenv import load_dotenv
34
- env_path = Path(__file__).parent.parent.parent / '.env'
35
- if env_path.exists():
36
- load_dotenv(env_path)
37
- except ImportError:
38
- pass # dotenv not installed, use system env vars only
39
-
40
-
41
28
  logger = logging.getLogger(__name__)
42
29
 
43
30
  class UnitlabClient:
@@ -88,6 +75,7 @@ class UnitlabClient:
88
75
  self.hostname = socket.gethostname()
89
76
  self.tunnel_manager = None
90
77
  self.jupyter_url = None
78
+ self.api_expose_url = None
91
79
  self.ssh_url = None
92
80
  self.jupyter_proc = None
93
81
  self.tunnel_proc = None
@@ -289,6 +277,8 @@ class UnitlabClient:
289
277
  # Don't call run() here - it has infinite loop. Call start() in setup_tunnels()
290
278
  self.jupyter_url = None
291
279
  self.ssh_url = None
280
+ self.api_url = None
281
+
292
282
 
293
283
  except ImportError as e:
294
284
  logger.warning(f"Could not import PersistentTunnel: {e}")
@@ -399,26 +389,34 @@ class UnitlabClient:
399
389
 
400
390
  logger.info("Setting up Cloudflare tunnel...")
401
391
 
402
- # Both SimpleTunnel and AutoTunnel handle Jupyter internally
392
+
403
393
  if self.tunnel_manager.start():
404
- # Store the processes for monitoring
394
+
405
395
  self.jupyter_proc = self.tunnel_manager.jupyter_process
406
- # Update URLs after tunnel starts successfully
396
+
407
397
  self.jupyter_url = self.tunnel_manager.jupyter_url
408
- self.ssh_url = self.tunnel_manager.jupyter_url
398
+ self.api_expose_url = self.tunnel_manager.api_expose_url
409
399
  logger.info(f"Tunnel started successfully at {self.jupyter_url}")
410
400
  self.tunnel_proc = self.tunnel_manager.tunnel_process
411
401
  self.jupyter_port = "8888" # Both use fixed port
412
402
 
413
- # Get the URL (AutoTunnel generates it dynamically)
403
+
404
+ if hasattr(self.tunnel_manager, 'ssh_url'):
405
+ self.ssh_url = self.tunnel_manager.ssh_url
406
+ else:
407
+
408
+ self.ssh_url = self.jupyter_url
409
+
410
+
414
411
  if hasattr(self.tunnel_manager, 'tunnel_url') and self.tunnel_manager.tunnel_url:
415
412
  self.jupyter_url = self.tunnel_manager.tunnel_url
416
- self.ssh_url = self.tunnel_manager.tunnel_url
413
+ if not hasattr(self.tunnel_manager, 'ssh_url'):
414
+ self.ssh_url = self.tunnel_manager.tunnel_url
417
415
  elif hasattr(self.tunnel_manager, 'jupyter_url'):
418
416
  self.jupyter_url = self.tunnel_manager.jupyter_url
419
- self.ssh_url = self.tunnel_manager.jupyter_url
417
+ if not hasattr(self.tunnel_manager, 'ssh_url'):
418
+ self.ssh_url = self.tunnel_manager.jupyter_url
420
419
 
421
- # The tunnel is now running
422
420
  logger.info("✅ Tunnel and Jupyter established")
423
421
  logger.info("URL: {}".format(self.jupyter_url))
424
422
  self.report_services()
@@ -470,12 +468,15 @@ class UnitlabClient:
470
468
 
471
469
  logger.info(f"Reporting Jupyter service with URL: {self.jupyter_url}")
472
470
  logger.debug(f"API key present: {bool(self.api_key)}")
471
+
473
472
  if self.api_key:
474
473
  logger.debug(f"API key value: {self.api_key[:8]}...")
474
+
475
475
  jupyter_response = self._post_device(
476
476
  f"/api/tunnel/agent/jupyter/{self.device_id}/",
477
477
  jupyter_data
478
478
  )
479
+
479
480
  logger.info(f"Reported Jupyter service: {jupyter_response.status_code if hasattr(jupyter_response, 'status_code') else jupyter_response}")
480
481
 
481
482
  # Report SSH service (always report, even if SSH is not running locally)
@@ -506,7 +507,22 @@ class UnitlabClient:
506
507
  ssh_data
507
508
  )
508
509
  logger.info(f"Reported SSH service: {ssh_response.status_code if hasattr(ssh_response, 'status_code') else ssh_response}")
510
+ logger.info("Reporting API endpoint:")
509
511
 
512
+ api_expose_data = {
513
+ "service_type": "api",
514
+ "service_name": f"api-{self.device_id}",
515
+ 'local_port': None,
516
+ 'tunnel_url': self.api_expose_url,
517
+ 'status': 'online'
518
+ }
519
+
520
+ api_expose_response = self._post_device(
521
+ f"/api/tunnel/agent/api-url/{self.device_id}/",
522
+ api_expose_data
523
+ )
524
+ logger.info(f"Reported Api service: {api_expose_response.status_code if hasattr(api_expose_response, 'status_code') else api_expose_response}")
525
+
510
526
  except Exception as e:
511
527
  logger.error(f"Failed to report services: {e}", exc_info=True)
512
528
 
@@ -550,55 +566,55 @@ class UnitlabClient:
550
566
 
551
567
  return metrics
552
568
 
553
- def send_metrics(self):
554
- """Send metrics to server"""
555
- try:
556
- metrics = self.collect_metrics()
569
+ # def send_metrics(self):
570
+ # """Send metrics to server"""
571
+ # try:
572
+ # metrics = self.collect_metrics()
557
573
 
558
- # Send CPU metrics
559
- if 'cpu' in metrics:
560
- self._post_device(f"/api/tunnel/agent/cpu/{self.device_id}/", metrics['cpu'])
574
+ # # Send CPU metrics
575
+ # if 'cpu' in metrics:
576
+ # self._post_device(f"/api/tunnel/agent/cpu/{self.device_id}/", metrics['cpu'])
561
577
 
562
- # Send RAM metrics
563
- if 'ram' in metrics:
564
- self._post_device(f"/api/tunnel/agent/ram/{self.device_id}/", metrics['ram'])
578
+ # # Send RAM metrics
579
+ # if 'ram' in metrics:
580
+ # self._post_device(f"/api/tunnel/agent/ram/{self.device_id}/", metrics['ram'])
565
581
 
566
- # Send GPU metrics if available
567
- if 'gpu' in metrics and metrics['gpu']:
568
- self._post_device(f"/api/tunnel/agent/gpu/{self.device_id}/", metrics['gpu'])
582
+ # # Send GPU metrics if available
583
+ # if 'gpu' in metrics and metrics['gpu']:
584
+ # self._post_device(f"/api/tunnel/agent/gpu/{self.device_id}/", metrics['gpu'])
569
585
 
570
- logger.debug(f"Metrics sent - CPU: {metrics['cpu']['percent']:.1f}%, RAM: {metrics['ram']['percent']:.1f}%")
586
+ # logger.debug(f"Metrics sent - CPU: {metrics['cpu']['percent']:.1f}%, RAM: {metrics['ram']['percent']:.1f}%")
571
587
 
572
- except Exception as e:
573
- logger.error(f"Failed to send metrics: {e}")
588
+ # except Exception as e:
589
+ # logger.error(f"Failed to send metrics: {e}")
574
590
 
575
- def metrics_loop(self):
576
- """Background thread for sending metrics"""
577
- logger.info("Starting metrics thread")
591
+ # def metrics_loop(self):
592
+ # """Background thread for sending metrics"""
593
+ # logger.info("Starting metrics thread")
578
594
 
579
- while self.running:
580
- try:
581
- self.send_metrics()
595
+ # while self.running:
596
+ # try:
597
+ # self.send_metrics()
582
598
 
583
- # Check if processes are still running
584
- if self.jupyter_proc and self.jupyter_proc.poll() is not None:
585
- logger.warning("Jupyter process died")
586
- self.jupyter_proc = None
599
+ # # Check if processes are still running
600
+ # if self.jupyter_proc and self.jupyter_proc.poll() is not None:
601
+ # logger.warning("Jupyter process died")
602
+ # self.jupyter_proc = None
587
603
 
588
- if self.tunnel_proc and self.tunnel_proc.poll() is not None:
589
- logger.warning("Tunnel process died")
590
- self.tunnel_proc = None
604
+ # if self.tunnel_proc and self.tunnel_proc.poll() is not None:
605
+ # logger.warning("Tunnel process died")
606
+ # self.tunnel_proc = None
591
607
 
592
- except Exception as e:
593
- logger.error(f"Metrics loop error: {e}")
608
+ # except Exception as e:
609
+ # logger.error(f"Metrics loop error: {e}")
594
610
 
595
- # Wait for next interval (default 5 seconds)
596
- for _ in range(3):
597
- if not self.running:
598
- break
599
- time.sleep(1)
611
+ # # Wait for next interval (default 5 seconds)
612
+ # for _ in range(3):
613
+ # if not self.running:
614
+ # break
615
+ # time.sleep(1)
600
616
 
601
- logger.info("Metrics thread stopped")
617
+ # logger.info("Metrics thread stopped")
602
618
 
603
619
  def run_device_agent(self):
604
620
  """Main run method for device agent"""
@@ -612,17 +628,16 @@ class UnitlabClient:
612
628
  # Check SSH
613
629
  self.check_ssh()
614
630
 
615
- # SimpleTunnel handles Jupyter internally, so we don't start it separately
616
- # Just setup the tunnels which will also start Jupyter
631
+
617
632
  logger.info("Starting integrated Jupyter and tunnel...")
618
633
 
619
- # Setup tunnels
634
+
620
635
  if not self.setup_tunnels():
621
636
  logger.error("Failed to setup tunnels")
622
637
  self.cleanup_device_agent()
623
638
  return
624
639
 
625
- # Print access information
640
+
626
641
  logger.info("=" * 50)
627
642
  logger.info("🎉 All services started successfully!")
628
643
  logger.info(f"📔 Jupyter: {self.jupyter_url}")
@@ -635,8 +650,8 @@ class UnitlabClient:
635
650
  logger.info("=" * 50)
636
651
 
637
652
  # Start metrics thread
638
- self.metrics_thread = threading.Thread(target=self.metrics_loop, daemon=True)
639
- self.metrics_thread.start()
653
+ # self.metrics_thread = threading.Thread(target=self.metrics_loop, daemon=True)
654
+ # self.metrics_thread.start()
640
655
 
641
656
  # Main loop
642
657
  try:
unitlab/main.py CHANGED
@@ -2,16 +2,21 @@ from enum import Enum
2
2
  from pathlib import Path
3
3
  from uuid import UUID
4
4
  import logging
5
- import os
5
+ import threading
6
6
 
7
7
  import typer
8
8
  import validators
9
9
  from typing_extensions import Annotated
10
+ import psutil
11
+ import uvicorn
12
+ from fastapi import FastAPI,Response
13
+
10
14
 
11
15
  from . import utils
12
16
  from .client import UnitlabClient
13
17
 
14
18
 
19
+
15
20
  app = typer.Typer()
16
21
  project_app = typer.Typer()
17
22
  dataset_app = typer.Typer()
@@ -45,6 +50,8 @@ class AnnotationType(str, Enum):
45
50
  IMG_POINT = "img_point"
46
51
 
47
52
 
53
+
54
+
48
55
  @app.command(help="Configure the credentials")
49
56
  def configure(
50
57
  api_key: Annotated[str, typer.Option(help="The api-key obtained from unitlab.ai")],
@@ -111,18 +118,6 @@ def dataset_download(
111
118
  get_client(api_key).dataset_download_files(pk)
112
119
 
113
120
 
114
- def send_metrics_to_server(server_url: str, device_id: str, metrics: dict):
115
- """Standalone function to send metrics to server using client"""
116
- client = UnitlabClient(api_key="dummy") # API key not needed for metrics
117
- return client.send_metrics_to_server(server_url, device_id, metrics)
118
-
119
-
120
- def send_metrics_into_server():
121
- """Standalone function to collect system metrics using client"""
122
- client = UnitlabClient(api_key="dummy") # API key not needed for metrics
123
- return client.collect_system_metrics()
124
-
125
-
126
121
  @agent_app.command(name="run", help="Run the device agent with Jupyter, SSH tunnels and metrics")
127
122
  def run_agent(
128
123
  api_key: API_KEY,
@@ -130,53 +125,32 @@ def run_agent(
130
125
  base_domain: Annotated[str, typer.Option(help="Base domain for tunnels")] = "1scan.uz",
131
126
 
132
127
  ):
133
- """Run the full device agent with Jupyter, SSH tunnels and metrics reporting"""
134
-
135
- # Setup logging
128
+
136
129
  logging.basicConfig(
137
130
  level=logging.INFO,
138
131
  format='%(asctime)s - %(levelname)s - %(message)s',
139
132
  handlers=[logging.StreamHandler()]
140
133
  )
141
134
 
142
- # Get server URL from environment or use default
143
- server_url = 'https://api-dev.unitlab.ai/'
135
+ server_url = 'http://localhost:8000/'
144
136
 
145
- # Generate unique device ID if not provided
146
137
  if not device_id:
147
138
  import uuid
148
139
  import platform
149
- from pathlib import Path
150
-
151
- # Try environment variable first
152
- device_id = os.getenv('DEVICE_ID')
153
- if not device_id:
154
- # Try to load saved device ID
155
- device_id_file = Path.home() / '.unitlab' / 'device_id'
156
- device_id_file.parent.mkdir(exist_ok=True, parents=True)
157
-
158
- if device_id_file.exists():
159
- device_id = device_id_file.read_text().strip()
160
- print(f"📌 Using saved device ID: {device_id}")
161
- else:
162
- # Generate a unique ID based on hostname and random UUID
163
- hostname = platform.node().replace('.', '-').replace(' ', '-')[:20]
164
- random_suffix = str(uuid.uuid4())[:8]
165
- device_id = f"{hostname}-{random_suffix}"
166
-
167
- # Save for future runs
168
- device_id_file.write_text(device_id)
169
- print(f"📝 Generated and saved device ID: {device_id}")
140
+
141
+ hostname = platform.node().replace('.', '-').replace(' ', '-')[:20]
142
+ random_suffix = str(uuid.uuid4())[:8]
143
+ device_id = f"{hostname}-{random_suffix}"
144
+ print(f"📝 Generated unique device ID: {device_id}")
170
145
 
171
-
172
- # Create client and initialize device agent
146
+
173
147
  client = UnitlabClient(api_key=api_key)
174
148
  client.initialize_device_agent(
175
149
  server_url=server_url,
176
150
  device_id=device_id,
177
151
  base_domain=base_domain
178
152
  )
179
-
153
+
180
154
  try:
181
155
  client.run_device_agent()
182
156
  except Exception as e: