superbrain-server 1.0.4 → 1.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "superbrain-server",
3
- "version": "1.0.4",
3
+ "version": "1.0.6",
4
4
  "description": "1-Line Auto-Installer and Server Execution wrapper for SuperBrain",
5
5
  "main": "index.js",
6
6
  "bin": {
package/payload/api.py CHANGED
@@ -561,46 +561,12 @@ async def analyze_instagram(request: AnalyzeRequest, token: str = Depends(verify
561
561
  logger.warning(f"⚠️ [{shortcode}] main.py stderr:\n{stderr[:1000]}")
562
562
 
563
563
  if returncode == 2:
564
- # main.py detected quota exhaustion and queued item for retry.
565
- # NOTE: Do NOT remove from queue here main.py already called
566
- # queue_for_retry() which set status='retry'. Removing would lose it.
567
- logger.info(f"⏰ [{shortcode}] Quota exhausted — queued for automatic retry")
564
+ retry_lines = [l.strip() for l in stdout.splitlines() if l.strip().startswith('?')]
565
+ retry_msg = retry_lines[-1].replace('?', '').strip() if retry_lines else "API quota exhausted or rate limited. Queued for automatic retry in 24 hours."
566
+ logger.info(f"? [{shortcode}] {retry_msg}")
568
567
  raise HTTPException(
569
568
  status_code=202,
570
- detail="API quota exhausted. Your request has been queued for automatic retry in 24 hours."
571
- )
572
-
573
- if returncode != 0:
574
- # Extract last meaningful error line from stdout for the error message
575
- error_lines = [l.strip() for l in stdout.splitlines() if l.strip() and ('❌' in l or 'Error' in l or 'failed' in l.lower())]
576
- error_detail = error_lines[-1] if error_lines else (stderr.strip()[:200] or "Analysis failed")
577
- logger.error(f"❌ [{shortcode}] Analysis failed: {error_detail}")
578
- logger.debug(f"[{shortcode}] stdout tail:\n{stdout[-800:]}")
579
- raise HTTPException(
580
- status_code=400,
581
- detail=error_detail
582
- )
583
-
584
- logger.info(f"✅ [{shortcode}] Analysis complete! Fetching from database...")
585
-
586
- # Get result from database — retry up to 4 times in case the SQLite write
587
- # hasn't flushed yet (race condition between subprocess write and our read).
588
- analysis = None
589
- for _attempt in range(4):
590
- analysis = db.check_cache(shortcode)
591
- if analysis:
592
- if _attempt > 0:
593
- logger.info(f"🔄 [{shortcode}] Found in database on retry {_attempt}")
594
- break
595
- if _attempt < 3:
596
- logger.warning(f"⏳ [{shortcode}] Not in DB yet (attempt {_attempt+1}/4), retrying in 1s…")
597
- await asyncio.sleep(1)
598
-
599
- if not analysis:
600
- logger.error(f"❌ [{shortcode}] Not found in database after 4 attempts!")
601
- raise HTTPException(
602
- status_code=500,
603
- detail="Analysis completed but result not found in database"
569
+ detail=retry_msg
604
570
  )
605
571
 
606
572
  # Filter response
package/payload/start.py CHANGED
@@ -1275,7 +1275,7 @@ def launch_backend_status():
1275
1275
  token = TOKEN_FILE.read_text(encoding="utf-8").strip()
1276
1276
 
1277
1277
  url = "NOT_FOUND"
1278
- log_file = BACKEND_DIR / "config" / "localtunnel.log"
1278
+ log_file = BASE_DIR / "config" / "localtunnel.log"
1279
1279
  if log_file.exists():
1280
1280
  match = re.search(r"your url is: (https://[^\s]+)", log_file.read_text(encoding="utf-8"))
1281
1281
  if match: