ragaai-catalyst 2.1.5b32__py3-none-any.whl → 2.1.5b34__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,684 @@
1
+ """
2
+ trace_uploader.py - A dedicated process for handling trace uploads
3
+ """
4
+
5
+ import os
6
+ import sys
7
+ import json
8
+ import time
9
+ import signal
10
+ import logging
11
+ import argparse
12
+ import tempfile
13
+ from pathlib import Path
14
+ import multiprocessing
15
+ import queue
16
+ from datetime import datetime
17
+ import atexit
18
+ import glob
19
+ from logging.handlers import RotatingFileHandler
20
+
21
+ # Set up logging
22
+ log_dir = os.path.join(tempfile.gettempdir(), "ragaai_logs")
23
+ os.makedirs(log_dir, exist_ok=True)
24
+
25
+ # Define maximum file size (e.g., 5 MB) and backup count
26
+ max_file_size = 5 * 1024 * 1024 # 5 MB
27
+ backup_count = 1 # Number of backup files to keep
28
+
29
+ logging.basicConfig(
30
+ level=logging.DEBUG,
31
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
32
+ handlers=[
33
+ logging.StreamHandler(),
34
+ RotatingFileHandler(
35
+ os.path.join(log_dir, "trace_uploader.log"),
36
+ maxBytes=max_file_size,
37
+ backupCount=backup_count
38
+ )
39
+ ]
40
+ )
41
+ logger = logging.getLogger("trace_uploader")
42
+
43
+ try:
44
+ from ragaai_catalyst.tracers.agentic_tracing.upload.upload_agentic_traces import UploadAgenticTraces
45
+ from ragaai_catalyst.tracers.agentic_tracing.upload.upload_code import upload_code
46
+ from ragaai_catalyst.tracers.agentic_tracing.upload.upload_trace_metric import upload_trace_metric
47
+ from ragaai_catalyst.tracers.agentic_tracing.utils.create_dataset_schema import create_dataset_schema_with_trace
48
+ from ragaai_catalyst import RagaAICatalyst
49
+ IMPORTS_AVAILABLE = True
50
+ except ImportError:
51
+ logger.warning("RagaAI Catalyst imports not available - running in test mode")
52
+ IMPORTS_AVAILABLE = False
53
+
54
+ # Define task queue directory
55
+ QUEUE_DIR = os.path.join(tempfile.gettempdir(), "ragaai_tasks")
56
+ os.makedirs(QUEUE_DIR, exist_ok=True)
57
+
58
+ # Clean up any stale processes
59
+ def cleanup_stale_processes():
60
+ """Check for stale processes but allow active uploads to complete"""
61
+ pid_file = os.path.join(tempfile.gettempdir(), "trace_uploader.pid")
62
+ if os.path.exists(pid_file):
63
+ try:
64
+ with open(pid_file, "r") as f:
65
+ old_pid = int(f.read().strip())
66
+ try:
67
+ import psutil
68
+ if psutil.pid_exists(old_pid):
69
+ p = psutil.Process(old_pid)
70
+ if "trace_uploader.py" in " ".join(p.cmdline()):
71
+ # Instead of terminating, just remove the PID file
72
+ # This allows the process to finish its current uploads
73
+ logger.info(f"Removing PID file for process {old_pid}")
74
+ os.remove(pid_file)
75
+ return
76
+ except Exception as e:
77
+ logger.warning(f"Error checking stale process: {e}")
78
+ os.remove(pid_file)
79
+ except Exception as e:
80
+ logger.warning(f"Error reading PID file: {e}")
81
+
82
+ cleanup_stale_processes()
83
+
84
+ # Status codes
85
+ STATUS_PENDING = "pending"
86
+ STATUS_PROCESSING = "processing"
87
+ STATUS_COMPLETED = "completed"
88
+ STATUS_FAILED = "failed"
89
+
90
+ class UploadTask:
91
+ """Class representing a single upload task"""
92
+
93
+ def __init__(self, task_id=None, **kwargs):
94
+ self.task_id = task_id or f"task_{int(time.time())}_{os.getpid()}_{hash(str(time.time()))}"
95
+ self.status = STATUS_PENDING
96
+ self.attempts = 0
97
+ self.max_attempts = 3
98
+ self.created_at = datetime.now().isoformat()
99
+ self.updated_at = self.created_at
100
+ self.error = None
101
+
102
+ # Task details
103
+ self.filepath = kwargs.get("filepath")
104
+ self.hash_id = kwargs.get("hash_id")
105
+ self.zip_path = kwargs.get("zip_path")
106
+ self.project_name = kwargs.get("project_name")
107
+ self.project_id = kwargs.get("project_id")
108
+ self.dataset_name = kwargs.get("dataset_name")
109
+ self.user_details = kwargs.get("user_details", {})
110
+ self.base_url = kwargs.get("base_url")
111
+
112
+ def to_dict(self):
113
+ """Convert task to dictionary for serialization"""
114
+ return {
115
+ "task_id": self.task_id,
116
+ "status": self.status,
117
+ "attempts": self.attempts,
118
+ "max_attempts": self.max_attempts,
119
+ "created_at": self.created_at,
120
+ "updated_at": self.updated_at,
121
+ "error": self.error,
122
+ "filepath": self.filepath,
123
+ "hash_id": self.hash_id,
124
+ "zip_path": self.zip_path,
125
+ "project_name": self.project_name,
126
+ "project_id": self.project_id,
127
+ "dataset_name": self.dataset_name,
128
+ "user_details": self.user_details,
129
+ "base_url": self.base_url
130
+ }
131
+
132
+ @classmethod
133
+ def from_dict(cls, data):
134
+ """Create task from dictionary"""
135
+ task = cls(task_id=data.get("task_id"))
136
+ task.status = data.get("status", STATUS_PENDING)
137
+ task.attempts = data.get("attempts", 0)
138
+ task.max_attempts = data.get("max_attempts", 3)
139
+ task.created_at = data.get("created_at")
140
+ task.updated_at = data.get("updated_at")
141
+ task.error = data.get("error")
142
+ task.filepath = data.get("filepath")
143
+ task.hash_id = data.get("hash_id")
144
+ task.zip_path = data.get("zip_path")
145
+ task.project_name = data.get("project_name")
146
+ task.project_id = data.get("project_id")
147
+ task.dataset_name = data.get("dataset_name")
148
+ task.user_details = data.get("user_details", {})
149
+ task.base_url = data.get("base_url")
150
+ return task
151
+
152
+ def update_status(self, status, error=None):
153
+ """Update task status"""
154
+ self.status = status
155
+ self.updated_at = datetime.now().isoformat()
156
+ if error:
157
+ self.error = str(error)
158
+ self.save()
159
+
160
+ def increment_attempts(self):
161
+ """Increment the attempt counter"""
162
+ self.attempts += 1
163
+ self.updated_at = datetime.now().isoformat()
164
+ self.save()
165
+
166
+ def save(self):
167
+ """Save task to disk"""
168
+ task_path = os.path.join(QUEUE_DIR, f"{self.task_id}.json")
169
+ with open(task_path, "w") as f:
170
+ json.dump(self.to_dict(), f, indent=2)
171
+
172
+ def delete(self):
173
+ """Delete task file from disk"""
174
+ task_path = os.path.join(QUEUE_DIR, f"{self.task_id}.json")
175
+ if os.path.exists(task_path):
176
+ os.remove(task_path)
177
+
178
+ @staticmethod
179
+ def list_pending_tasks():
180
+ """List all pending tasks"""
181
+ tasks = []
182
+ #logger.info("Listing pending tasks from queue directory: {}".format(QUEUE_DIR))
183
+ for filename in os.listdir(QUEUE_DIR):
184
+ if filename.endswith(".json"):
185
+ try:
186
+ with open(os.path.join(QUEUE_DIR, filename), "r") as f:
187
+ task_data = json.load(f)
188
+ task = UploadTask.from_dict(task_data)
189
+ if task.status in [STATUS_PENDING, STATUS_FAILED] and task.attempts < task.max_attempts:
190
+ # Verify files still exist
191
+ if (not task.filepath or os.path.exists(task.filepath)) and \
192
+ (not task.zip_path or os.path.exists(task.zip_path)):
193
+ tasks.append(task)
194
+ else:
195
+ # Files missing, mark as failed
196
+ task.update_status(STATUS_FAILED, "Required files missing")
197
+ except Exception as e:
198
+ logger.error(f"Error loading task {filename}: {e}")
199
+ return tasks
200
+
201
+
202
+ class TraceUploader:
203
+ """
204
+ Trace uploader process
205
+ Handles the actual upload work in a separate process
206
+ """
207
+
208
+ def __init__(self):
209
+ self.running = True
210
+ self.processing = False
211
+
212
+ def start(self):
213
+ """Start the uploader loop"""
214
+ logger.info("Trace uploader starting")
215
+
216
+ # Register signal handlers
217
+ signal.signal(signal.SIGTERM, self.handle_signal)
218
+ signal.signal(signal.SIGINT, self.handle_signal)
219
+
220
+ # Register cleanup
221
+ atexit.register(self.cleanup)
222
+
223
+ # Main processing loop
224
+ while self.running:
225
+ try:
226
+ # Get pending tasks
227
+ tasks = UploadTask.list_pending_tasks()
228
+ if tasks:
229
+ logger.info(f"Found {len(tasks)} pending tasks")
230
+ for task in tasks:
231
+ if not self.running:
232
+ break
233
+ self.process_task(task)
234
+ else:
235
+ # No tasks, sleep before checking again
236
+ time.sleep(5)
237
+ except Exception as e:
238
+ logger.error(f"Error in uploader loop: {e}")
239
+ time.sleep(5)
240
+
241
+ logger.info("Trace uploader stopped")
242
+
243
+ def process_task(self, task):
244
+ """Process a single upload task"""
245
+ logger.info(f"Starting to process task {task.task_id}")
246
+ logger.debug(f"Task details: {task.to_dict()}")
247
+
248
+ # Check if file exists
249
+ if not os.path.exists(task.filepath):
250
+ error_msg = f"Task filepath does not exist: {task.filepath}"
251
+ logger.error(error_msg)
252
+ task.update_status(STATUS_FAILED, error_msg)
253
+ return
254
+
255
+ if not IMPORTS_AVAILABLE:
256
+ logger.warning(f"Test mode: Simulating processing of task {task.task_id}")
257
+ #time.sleep(2) # Simulate work
258
+ #task.update_status(STATUS_COMPLETED)
259
+ return
260
+
261
+ logger.info(f"Processing task {task.task_id} (attempt {task.attempts+1}/{task.max_attempts})")
262
+ self.processing = True
263
+ task.update_status(STATUS_PROCESSING)
264
+ task.increment_attempts()
265
+
266
+ # Log memory state for debugging
267
+ try:
268
+ import psutil
269
+ process = psutil.Process()
270
+ logger.debug(f"Memory usage before processing: {process.memory_info().rss / 1024 / 1024:.2f} MB")
271
+ except ImportError:
272
+ pass
273
+
274
+ try:
275
+ # Step 1: Create dataset schema
276
+ logger.info(f"Creating dataset schema for {task.dataset_name} with base_url: {task.base_url}")
277
+ response = create_dataset_schema_with_trace(
278
+ dataset_name=task.dataset_name,
279
+ project_name=task.project_name,
280
+ base_url=task.base_url
281
+ )
282
+ logger.info(f"Dataset schema created: {response}")
283
+
284
+ # Step 2: Upload trace metrics
285
+ if task.filepath and os.path.exists(task.filepath):
286
+ logger.info(f"Uploading trace metrics for {task.filepath}")
287
+ try:
288
+ response = upload_trace_metric(
289
+ json_file_path=task.filepath,
290
+ dataset_name=task.dataset_name,
291
+ project_name=task.project_name,
292
+ base_url=task.base_url
293
+ )
294
+ logger.info(f"Trace metrics uploaded: {response}")
295
+ except Exception as e:
296
+ logger.error(f"Error uploading trace metrics: {e}")
297
+ # Continue with other uploads
298
+ else:
299
+ logger.warning(f"Trace file {task.filepath} not found, skipping metrics upload")
300
+
301
+ # Step 3: Upload agentic traces
302
+ if task.filepath and os.path.exists(task.filepath):
303
+ logger.info(f"Uploading agentic traces for {task.filepath}")
304
+ try:
305
+ upload_traces = UploadAgenticTraces(
306
+ json_file_path=task.filepath,
307
+ project_name=task.project_name,
308
+ project_id=task.project_id,
309
+ dataset_name=task.dataset_name,
310
+ user_detail=task.user_details,
311
+ base_url=task.base_url,
312
+ )
313
+ upload_traces.upload_agentic_traces()
314
+ logger.info("Agentic traces uploaded successfully")
315
+ except Exception as e:
316
+ logger.error(f"Error uploading agentic traces: {e}")
317
+ # Continue with code upload
318
+ else:
319
+ logger.warning(f"Trace file {task.filepath} not found, skipping traces upload")
320
+
321
+ # Step 4: Upload code hash
322
+ if task.hash_id and task.zip_path and os.path.exists(task.zip_path):
323
+ logger.info(f"Uploading code hash {task.hash_id}")
324
+ try:
325
+ response = upload_code(
326
+ hash_id=task.hash_id,
327
+ zip_path=task.zip_path,
328
+ project_name=task.project_name,
329
+ dataset_name=task.dataset_name,
330
+ base_url=task.base_url
331
+ )
332
+ logger.info(f"Code hash uploaded: {response}")
333
+ except Exception as e:
334
+ logger.error(f"Error uploading code hash: {e}")
335
+ else:
336
+ logger.warning(f"Code zip {task.zip_path} not found, skipping code upload")
337
+
338
+ # Mark task as completed
339
+ task.update_status(STATUS_COMPLETED)
340
+ logger.info(f"Task {task.task_id} completed successfully")
341
+
342
+ # Clean up task file
343
+ task.delete()
344
+
345
+ except Exception as e:
346
+ logger.error(f"Error processing task {task.task_id}: {e}")
347
+ if task.attempts >= task.max_attempts:
348
+ task.update_status(STATUS_FAILED, str(e))
349
+ logger.error(f"Task {task.task_id} failed after {task.attempts} attempts")
350
+ else:
351
+ task.update_status(STATUS_PENDING, str(e))
352
+ logger.warning(f"Task {task.task_id} will be retried (attempt {task.attempts}/{task.max_attempts})")
353
+ finally:
354
+ self.processing = False
355
+
356
+ def handle_signal(self, signum, frame):
357
+ """Handle termination signals"""
358
+ logger.info(f"Received signal {signum}, shutting down gracefully")
359
+ self.running = False
360
+
361
+ def cleanup(self):
362
+ """Cleanup before exit"""
363
+ logger.info("Performing cleanup before exit")
364
+ self.running = False
365
+
366
+
367
+ def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url):
368
+ """
369
+ Submit a new upload task to the queue.
370
+ This function can be called from the main application.
371
+
372
+ Returns:
373
+ str: Task ID
374
+ """
375
+ logger.info(f"Submitting new upload task for file: {filepath}")
376
+ logger.debug(f"Task details - Project: {project_name}, Dataset: {dataset_name}, Hash: {hash_id}, Base_URL: {base_url}")
377
+
378
+ # Verify the trace file exists
379
+ if not os.path.exists(filepath):
380
+ logger.error(f"Trace file not found: {filepath}")
381
+ return None
382
+
383
+ # Create task with absolute path to the trace file
384
+ filepath = os.path.abspath(filepath)
385
+ logger.debug(f"Using absolute filepath: {filepath}")
386
+
387
+ task = UploadTask(
388
+ filepath=filepath,
389
+ hash_id=hash_id,
390
+ zip_path=zip_path,
391
+ project_name=project_name,
392
+ project_id=project_id,
393
+ dataset_name=dataset_name,
394
+ user_details=user_details,
395
+ base_url=base_url
396
+ )
397
+
398
+ # Save the task with proper error handling
399
+ task_path = os.path.join(QUEUE_DIR, f"{task.task_id}.json")
400
+ logger.debug(f"Saving task to: {task_path}")
401
+
402
+ try:
403
+ # Ensure queue directory exists
404
+ os.makedirs(QUEUE_DIR, exist_ok=True)
405
+
406
+ with open(task_path, "w") as f:
407
+ json.dump(task.to_dict(), f, indent=2)
408
+
409
+ logger.info(f"Task {task.task_id} created successfully for trace file: {filepath}")
410
+ except Exception as e:
411
+ logger.error(f"Error creating task file: {e}", exc_info=True)
412
+ return None
413
+
414
+ # Ensure uploader process is running
415
+ logger.info("Starting uploader process...")
416
+ pid = ensure_uploader_running()
417
+ if pid:
418
+ logger.info(f"Uploader process running with PID {pid}")
419
+ else:
420
+ logger.warning("Failed to start uploader process, but task was queued")
421
+
422
+ return task.task_id
423
+
424
+
425
+ def get_task_status(task_id):
426
+ """
427
+ Get the status of a task by ID.
428
+ This function can be called from the main application.
429
+
430
+ Returns:
431
+ dict: Task status information
432
+ """
433
+ task_path = os.path.join(QUEUE_DIR, f"{task_id}.json")
434
+ if not os.path.exists(task_path):
435
+ # Check if it might be in completed directory
436
+ completed_path = os.path.join(QUEUE_DIR, "completed", f"{task_id}.json")
437
+ if os.path.exists(completed_path):
438
+ with open(completed_path, "r") as f:
439
+ return json.load(f)
440
+ return {"status": "unknown", "error": "Task not found"}
441
+
442
+ with open(task_path, "r") as f:
443
+ return json.load(f)
444
+
445
+
446
+ def ensure_uploader_running():
447
+ """
448
+ Ensure the uploader process is running.
449
+ Starts it if not already running.
450
+ """
451
+ logger.info("Checking if uploader process is running...")
452
+
453
+ # Check if we can find a running process
454
+ pid_file = os.path.join(tempfile.gettempdir(), "trace_uploader.pid")
455
+ logger.debug(f"PID file location: {pid_file}")
456
+
457
+ if os.path.exists(pid_file):
458
+ try:
459
+ with open(pid_file, "r") as f:
460
+ pid_str = f.read().strip()
461
+ logger.debug(f"Read PID from file: {pid_str}")
462
+ pid = int(pid_str)
463
+
464
+ # Check if process is actually running
465
+ # Use platform-specific process check
466
+ is_running = False
467
+ try:
468
+ if os.name == 'posix': # Unix/Linux/Mac
469
+ logger.debug(f"Checking process {pid} on Unix/Mac")
470
+ os.kill(pid, 0) # This raises an exception if process doesn't exist
471
+ is_running = True
472
+ else: # Windows
473
+ logger.debug(f"Checking process {pid} on Windows")
474
+ import ctypes
475
+ kernel32 = ctypes.windll.kernel32
476
+ SYNCHRONIZE = 0x00100000
477
+ process = kernel32.OpenProcess(SYNCHRONIZE, False, pid)
478
+ if process:
479
+ kernel32.CloseHandle(process)
480
+ is_running = True
481
+ except (ImportError, AttributeError) as e:
482
+ logger.debug(f"Platform-specific check failed: {e}, falling back to cross-platform check")
483
+ # Fall back to cross-platform check
484
+ try:
485
+ import psutil
486
+ is_running = psutil.pid_exists(pid)
487
+ logger.debug(f"psutil check result: {is_running}")
488
+ except ImportError:
489
+ logger.debug("psutil not available, using basic process check")
490
+ # If psutil is not available, make a best guess
491
+ try:
492
+ os.kill(pid, 0)
493
+ is_running = True
494
+ except Exception as e:
495
+ logger.debug(f"Basic process check failed: {e}")
496
+ is_running = False
497
+
498
+ if is_running:
499
+ logger.debug(f"Uploader process already running with PID {pid}")
500
+ return pid
501
+ except (ProcessLookupError, ValueError, PermissionError):
502
+ # Process not running or other error, remove stale PID file
503
+ try:
504
+ os.remove(pid_file)
505
+ except:
506
+ pass
507
+
508
+ # Start new process
509
+ logger.info("Starting new uploader process")
510
+
511
+ # Get the path to this script
512
+ script_path = os.path.abspath(__file__)
513
+
514
+ # Start detached process in a platform-specific way
515
+ try:
516
+ # First, try the preferred method for each platform
517
+ if os.name == 'posix': # Unix/Linux/Mac
518
+ import subprocess
519
+
520
+ # Use double fork method on Unix systems
521
+ try:
522
+ # First fork
523
+ pid = os.fork()
524
+ if pid > 0:
525
+ # Parent process, return
526
+ return pid
527
+
528
+ # Decouple from parent environment
529
+ os.chdir('/')
530
+ os.setsid()
531
+ os.umask(0)
532
+
533
+ # Second fork
534
+ pid = os.fork()
535
+ if pid > 0:
536
+ # Exit from second parent
537
+ os._exit(0)
538
+
539
+ # Redirect standard file descriptors
540
+ sys.stdout.flush()
541
+ sys.stderr.flush()
542
+ si = open(os.devnull, 'r')
543
+ so = open(os.path.join(tempfile.gettempdir(), 'trace_uploader_stdout.log'), 'a+')
544
+ se = open(os.path.join(tempfile.gettempdir(), 'trace_uploader_stderr.log'), 'a+')
545
+ os.dup2(si.fileno(), sys.stdin.fileno())
546
+ os.dup2(so.fileno(), sys.stdout.fileno())
547
+ os.dup2(se.fileno(), sys.stderr.fileno())
548
+
549
+ # Execute the daemon process
550
+ os.execl(sys.executable, sys.executable, script_path, '--daemon')
551
+
552
+ except (AttributeError, OSError):
553
+ # Fork not available, try subprocess
554
+ process = subprocess.Popen(
555
+ [sys.executable, script_path, "--daemon"],
556
+ stdout=subprocess.PIPE,
557
+ stderr=subprocess.PIPE,
558
+ stdin=subprocess.PIPE,
559
+ start_new_session=True # Detach from parent
560
+ )
561
+ pid = process.pid
562
+
563
+ else: # Windows
564
+ import subprocess
565
+ # Use the DETACHED_PROCESS flag on Windows
566
+ startupinfo = subprocess.STARTUPINFO()
567
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
568
+ startupinfo.wShowWindow = 0 # SW_HIDE
569
+
570
+ # Windows-specific flags
571
+ DETACHED_PROCESS = 0x00000008
572
+ CREATE_NO_WINDOW = 0x08000000
573
+
574
+ process = subprocess.Popen(
575
+ [sys.executable, script_path, "--daemon"],
576
+ stdout=subprocess.PIPE,
577
+ stderr=subprocess.PIPE,
578
+ stdin=subprocess.PIPE,
579
+ startupinfo=startupinfo,
580
+ creationflags=DETACHED_PROCESS | CREATE_NO_WINDOW
581
+ )
582
+ pid = process.pid
583
+
584
+ # Write PID to file
585
+ with open(pid_file, "w") as f:
586
+ f.write(str(pid))
587
+
588
+ logger.info(f"Started uploader process with PID {pid}")
589
+ return pid
590
+
591
+ except Exception as e:
592
+ logger.error(f"Error starting uploader process using primary method: {e}")
593
+
594
+ # Fallback method using multiprocessing (works on most platforms)
595
+ try:
596
+ logger.info("Trying fallback method with multiprocessing")
597
+ import multiprocessing
598
+
599
+ def run_uploader():
600
+ """Run the uploader in a separate process"""
601
+ # Redirect output
602
+ sys.stdout = open(os.path.join(tempfile.gettempdir(), 'trace_uploader_stdout.log'), 'a+')
603
+ sys.stderr = open(os.path.join(tempfile.gettempdir(), 'trace_uploader_stderr.log'), 'a+')
604
+
605
+ # Run daemon
606
+ run_daemon()
607
+
608
+ # Start process
609
+ process = multiprocessing.Process(target=run_uploader)
610
+ process.daemon = True # Daemonize it
611
+ process.start()
612
+ pid = process.pid
613
+
614
+ # Write PID to file
615
+ with open(pid_file, "w") as f:
616
+ f.write(str(pid))
617
+
618
+ logger.info(f"Started uploader process with fallback method, PID {pid}")
619
+ return pid
620
+
621
+ except Exception as e2:
622
+ logger.error(f"Error starting uploader process using fallback method: {e2}")
623
+
624
+ # Super fallback - run in the current process if all else fails
625
+ # This is not ideal but better than failing completely
626
+ try:
627
+ logger.warning("Using emergency fallback - running in current process thread")
628
+ import threading
629
+
630
+ thread = threading.Thread(target=run_daemon, daemon=True)
631
+ thread.start()
632
+
633
+ # No real PID since it's a thread, but we'll create a marker file
634
+ with open(pid_file, "w") as f:
635
+ f.write(f"thread_{id(thread)}")
636
+
637
+ return None
638
+ except Exception as e3:
639
+ logger.error(f"All methods failed to start uploader: {e3}")
640
+ return None
641
+
642
+
643
+ def run_daemon():
644
+ """Run the uploader as a daemon process"""
645
+ # Write PID to file
646
+ pid_file = os.path.join(tempfile.gettempdir(), "trace_uploader.pid")
647
+ with open(pid_file, "w") as f:
648
+ f.write(str(os.getpid()))
649
+
650
+ try:
651
+ uploader = TraceUploader()
652
+ uploader.start()
653
+ finally:
654
+ # Clean up PID file
655
+ if os.path.exists(pid_file):
656
+ os.remove(pid_file)
657
+
658
+
659
+ if __name__ == "__main__":
660
+ parser = argparse.ArgumentParser(description="Trace uploader process")
661
+ parser.add_argument("--daemon", action="store_true", help="Run as daemon process")
662
+ parser.add_argument("--test", action="store_true", help="Submit a test task")
663
+ args = parser.parse_args()
664
+
665
+ if args.daemon:
666
+ run_daemon()
667
+ elif args.test:
668
+ # Submit a test task
669
+ test_file = os.path.join(tempfile.gettempdir(), "test_trace.json")
670
+ with open(test_file, "w") as f:
671
+ f.write("{}")
672
+
673
+ task_id = submit_upload_task(
674
+ filepath=test_file,
675
+ hash_id="test_hash",
676
+ zip_path=test_file,
677
+ project_name="test_project",
678
+ project_id="test_id",
679
+ dataset_name="test_dataset",
680
+ user_details={"id": "test_user"}
681
+ )
682
+ print(f"Submitted test task with ID: {task_id}")
683
+ else:
684
+ print("Use --daemon to run as daemon or --test to submit a test task")