@leejungkiin/awkit 1.3.8 → 1.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/awk.js +630 -52
- package/bin/claude-generators.js +122 -0
- package/core/AGENTS.md +54 -0
- package/core/CLAUDE.md +155 -0
- package/core/GEMINI.md +44 -9
- package/core/GEMINI.md.bak +126 -199
- package/package.json +1 -1
- package/skills/ai-sprite-maker/SKILL.md +81 -0
- package/skills/ai-sprite-maker/scripts/animate_sprite.py +102 -0
- package/skills/ai-sprite-maker/scripts/process_sprites.py +140 -0
- package/skills/awf-session-restore/SKILL.md +12 -2
- package/skills/brainstorm-agent/SKILL.md +11 -8
- package/skills/code-review/SKILL.md +21 -33
- package/skills/gitnexus/gitnexus-cli/SKILL.md +82 -0
- package/skills/gitnexus/gitnexus-debugging/SKILL.md +89 -0
- package/skills/gitnexus/gitnexus-exploring/SKILL.md +78 -0
- package/skills/gitnexus/gitnexus-guide/SKILL.md +64 -0
- package/skills/gitnexus/gitnexus-impact-analysis/SKILL.md +97 -0
- package/skills/gitnexus/gitnexus-refactoring/SKILL.md +121 -0
- package/skills/lucylab-tts/SKILL.md +64 -0
- package/skills/lucylab-tts/resources/voices_library.json +908 -0
- package/skills/lucylab-tts/scripts/.env +1 -0
- package/skills/lucylab-tts/scripts/lucylab_tts.py +506 -0
- package/skills/nm-memory-sync/SKILL.md +14 -1
- package/skills/orchestrator/SKILL.md +5 -38
- package/skills/ship-to-code/SKILL.md +115 -0
- package/skills/short-maker/SKILL.md +150 -0
- package/skills/short-maker/_backup/storyboard.html +106 -0
- package/skills/short-maker/_backup/video_mixer.py +296 -0
- package/skills/short-maker/outputs/fitbite-promo/background.jpg +0 -0
- package/skills/short-maker/outputs/fitbite-promo/final/promo-final.mp4 +0 -0
- package/skills/short-maker/outputs/fitbite-promo/script.md +19 -0
- package/skills/short-maker/outputs/fitbite-promo/segments/scene-01.mp4 +0 -0
- package/skills/short-maker/outputs/fitbite-promo/segments/scene-02.mp4 +0 -0
- package/skills/short-maker/outputs/fitbite-promo/segments/scene-03.mp4 +0 -0
- package/skills/short-maker/outputs/fitbite-promo/segments/scene-04.mp4 +0 -0
- package/skills/short-maker/outputs/fitbite-promo/storyboard/scene-01.png +0 -0
- package/skills/short-maker/outputs/fitbite-promo/storyboard/scene-02.png +0 -0
- package/skills/short-maker/outputs/fitbite-promo/storyboard/scene-03.png +0 -0
- package/skills/short-maker/outputs/fitbite-promo/storyboard/scene-04.png +0 -0
- package/skills/short-maker/outputs/fitbite-promo/storyboard.html +133 -0
- package/skills/short-maker/outputs/fitbite-promo/storyboard.json +38 -0
- package/skills/short-maker/outputs/fitbite-promo/temp/merged_chroma.mp4 +0 -0
- package/skills/short-maker/outputs/fitbite-promo/temp/merged_crossfaded.mp4 +0 -0
- package/skills/short-maker/outputs/fitbite-promo/temp/ready_00.mp4 +0 -0
- package/skills/short-maker/outputs/fitbite-promo/temp/ready_01.mp4 +0 -0
- package/skills/short-maker/outputs/fitbite-promo/temp/ready_02.mp4 +0 -0
- package/skills/short-maker/outputs/fitbite-promo/temp/ready_03.mp4 +0 -0
- package/skills/short-maker/outputs/fitbite-promo/tts/manifest.json +31 -0
- package/skills/short-maker/outputs/fitbite-promo/tts/scene-01.wav +0 -0
- package/skills/short-maker/outputs/fitbite-promo/tts/scene-02.wav +0 -0
- package/skills/short-maker/outputs/fitbite-promo/tts/scene-03.wav +0 -0
- package/skills/short-maker/outputs/fitbite-promo/tts/scene-04.wav +0 -0
- package/skills/short-maker/outputs/fitbite-promo/tts_script.txt +11 -0
- package/skills/short-maker/scripts/google-flow-cli/.project-identity +41 -0
- package/skills/short-maker/scripts/google-flow-cli/.trae/rules/project_rules.md +52 -0
- package/skills/short-maker/scripts/google-flow-cli/CODEBASE.md +67 -0
- package/skills/short-maker/scripts/google-flow-cli/GoogleFlowCli.code-workspace +29 -0
- package/skills/short-maker/scripts/google-flow-cli/README.md +168 -0
- package/skills/short-maker/scripts/google-flow-cli/docs/specs/PROJECT.md +12 -0
- package/skills/short-maker/scripts/google-flow-cli/docs/specs/REQUIREMENTS.md +22 -0
- package/skills/short-maker/scripts/google-flow-cli/docs/specs/ROADMAP.md +16 -0
- package/skills/short-maker/scripts/google-flow-cli/docs/specs/TECH-SPEC.md +13 -0
- package/skills/short-maker/scripts/google-flow-cli/gflow/__init__.py +3 -0
- package/skills/short-maker/scripts/google-flow-cli/gflow/api/__init__.py +19 -0
- package/skills/short-maker/scripts/google-flow-cli/gflow/api/client.py +1921 -0
- package/skills/short-maker/scripts/google-flow-cli/gflow/api/models.py +64 -0
- package/skills/short-maker/scripts/google-flow-cli/gflow/api/rpc_ids.py +98 -0
- package/skills/short-maker/scripts/google-flow-cli/gflow/auth/__init__.py +15 -0
- package/skills/short-maker/scripts/google-flow-cli/gflow/auth/browser_auth.py +692 -0
- package/skills/short-maker/scripts/google-flow-cli/gflow/auth/humanizer.py +417 -0
- package/skills/short-maker/scripts/google-flow-cli/gflow/auth/proxy_ext.py +120 -0
- package/skills/short-maker/scripts/google-flow-cli/gflow/auth/recaptcha.py +482 -0
- package/skills/short-maker/scripts/google-flow-cli/gflow/batchexecute/__init__.py +5 -0
- package/skills/short-maker/scripts/google-flow-cli/gflow/batchexecute/client.py +414 -0
- package/skills/short-maker/scripts/google-flow-cli/gflow/cli/__init__.py +1 -0
- package/skills/short-maker/scripts/google-flow-cli/gflow/cli/main.py +1075 -0
- package/skills/short-maker/scripts/google-flow-cli/pyproject.toml +36 -0
- package/skills/short-maker/scripts/google-flow-cli/script.txt +22 -0
- package/skills/short-maker/scripts/google-flow-cli/tests/__init__.py +0 -0
- package/skills/short-maker/scripts/google-flow-cli/tests/test_batchexecute.py +113 -0
- package/skills/short-maker/scripts/google-flow-cli/tests/test_client.py +190 -0
- package/skills/short-maker/templates/aida_script.md +40 -0
- package/skills/short-maker/templates/mimic_analyzer.md +29 -0
- package/skills/single-flow-task-execution/SKILL.md +412 -0
- package/skills/single-flow-task-execution/code-quality-reviewer-prompt.md +20 -0
- package/skills/single-flow-task-execution/implementer-prompt.md +78 -0
- package/skills/single-flow-task-execution/spec-reviewer-prompt.md +61 -0
- package/skills/skill-creator/SKILL.md +44 -0
- package/skills/spm-build-analysis/SKILL.md +92 -0
- package/skills/spm-build-analysis/references/build-optimization-sources.md +155 -0
- package/skills/spm-build-analysis/references/recommendation-format.md +85 -0
- package/skills/spm-build-analysis/references/spm-analysis-checks.md +105 -0
- package/skills/spm-build-analysis/scripts/check_spm_pins.py +118 -0
- package/skills/symphony-enforcer/SKILL.md +83 -97
- package/skills/symphony-orchestrator/SKILL.md +1 -1
- package/skills/trello-sync/SKILL.md +52 -45
- package/skills/verification-gate/SKILL.md +13 -2
- package/skills/xcode-build-benchmark/SKILL.md +88 -0
- package/skills/xcode-build-benchmark/references/benchmark-artifacts.md +94 -0
- package/skills/xcode-build-benchmark/references/benchmarking-workflow.md +67 -0
- package/skills/xcode-build-benchmark/schemas/build-benchmark.schema.json +230 -0
- package/skills/xcode-build-benchmark/scripts/benchmark_builds.py +308 -0
- package/skills/xcode-build-fixer/SKILL.md +218 -0
- package/skills/xcode-build-fixer/references/build-settings-best-practices.md +216 -0
- package/skills/xcode-build-fixer/references/fix-patterns.md +290 -0
- package/skills/xcode-build-fixer/references/recommendation-format.md +85 -0
- package/skills/xcode-build-fixer/scripts/benchmark_builds.py +308 -0
- package/skills/xcode-build-orchestrator/SKILL.md +156 -0
- package/skills/xcode-build-orchestrator/references/benchmark-artifacts.md +94 -0
- package/skills/xcode-build-orchestrator/references/build-settings-best-practices.md +216 -0
- package/skills/xcode-build-orchestrator/references/orchestration-report-template.md +143 -0
- package/skills/xcode-build-orchestrator/references/recommendation-format.md +85 -0
- package/skills/xcode-build-orchestrator/scripts/benchmark_builds.py +308 -0
- package/skills/xcode-build-orchestrator/scripts/diagnose_compilation.py +273 -0
- package/skills/xcode-build-orchestrator/scripts/generate_optimization_report.py +533 -0
- package/skills/xcode-compilation-analyzer/SKILL.md +89 -0
- package/skills/xcode-compilation-analyzer/references/build-optimization-sources.md +155 -0
- package/skills/xcode-compilation-analyzer/references/code-compilation-checks.md +106 -0
- package/skills/xcode-compilation-analyzer/references/recommendation-format.md +85 -0
- package/skills/xcode-compilation-analyzer/scripts/diagnose_compilation.py +273 -0
- package/skills/xcode-project-analyzer/SKILL.md +76 -0
- package/skills/xcode-project-analyzer/references/build-optimization-sources.md +155 -0
- package/skills/xcode-project-analyzer/references/build-settings-best-practices.md +216 -0
- package/skills/xcode-project-analyzer/references/project-audit-checks.md +101 -0
- package/skills/xcode-project-analyzer/references/recommendation-format.md +85 -0
- package/templates/CODEBASE.md +26 -42
- package/templates/configs/trello-config.json +2 -2
- package/templates/workflow_dual_mode_template.md +5 -5
- package/workflows/_uncategorized/conductor-codex.md +125 -0
- package/workflows/_uncategorized/conductor.md +97 -0
- package/workflows/_uncategorized/ship-to-code.md +85 -0
- package/workflows/_uncategorized/trello-sync.md +52 -0
- package/workflows/context/codebase-sync.md +10 -87
- package/workflows/quality/visual-debug.md +66 -12
|
@@ -0,0 +1,692 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Browser-based authentication for Google Flow.
|
|
3
|
+
|
|
4
|
+
Opens a real Chrome window (via subprocess, NOT Selenium) and lets the user
|
|
5
|
+
log in to their Google account. Cookies are extracted via Chrome DevTools
|
|
6
|
+
Protocol (CDP). The cookies are used to call the session endpoint
|
|
7
|
+
(labs.google/fx/api/auth/session) to get fresh OAuth2 access_tokens.
|
|
8
|
+
|
|
9
|
+
IMPORTANT: We intentionally avoid Selenium/chromedriver because reCAPTCHA
|
|
10
|
+
Enterprise v3 detects chromedriver artifacts and permanently taints the
|
|
11
|
+
browser session with a low trust score. By launching Chrome directly via
|
|
12
|
+
subprocess with --remote-debugging-port, the browser is completely clean.
|
|
13
|
+
|
|
14
|
+
After authentication, Chrome stays alive so that reCAPTCHA Enterprise tokens
|
|
15
|
+
can be obtained from the same session via CDP.
|
|
16
|
+
|
|
17
|
+
Architecture:
|
|
18
|
+
1. subprocess launches Chrome with --remote-debugging-port
|
|
19
|
+
2. CDP WebSocket connection extracts cookies
|
|
20
|
+
3. Cookies saved to ~/.gflow/env; CDP port saved to ~/.gflow/cdp-port
|
|
21
|
+
4. Chrome stays alive for reCAPTCHA token generation
|
|
22
|
+
5. At runtime, cookies -> /fx/api/auth/session -> fresh access_token
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
from __future__ import annotations
|
|
26
|
+
|
|
27
|
+
import json
|
|
28
|
+
import logging
|
|
29
|
+
import os
|
|
30
|
+
import platform
|
|
31
|
+
import shutil
|
|
32
|
+
import socket
|
|
33
|
+
import subprocess
|
|
34
|
+
import time
|
|
35
|
+
from dataclasses import dataclass
|
|
36
|
+
from pathlib import Path
|
|
37
|
+
|
|
38
|
+
import requests
|
|
39
|
+
|
|
40
|
+
logger = logging.getLogger("gflow.auth")
|
|
41
|
+
|
|
42
|
+
FLOW_HOST = "labs.google"
|
|
43
|
+
FLOW_URL = "https://labs.google/fx/tools/flow"
|
|
44
|
+
SESSION_URL = "https://labs.google/fx/api/auth/session"
|
|
45
|
+
ENV_DIR = Path.home() / ".gflow"
|
|
46
|
+
ENV_FILE = ENV_DIR / "env"
|
|
47
|
+
CDP_PORT_FILE = ENV_DIR / "cdp-port"
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class AuthData:
|
|
52
|
+
"""Authentication credentials for Google Flow."""
|
|
53
|
+
|
|
54
|
+
cookies: str # Full cookie string (Google session cookies)
|
|
55
|
+
token: str = "" # OAuth2 access_token (refreshed from session endpoint)
|
|
56
|
+
expires: str = "" # Token expiry time
|
|
57
|
+
|
|
58
|
+
@property
|
|
59
|
+
def is_valid(self) -> bool:
|
|
60
|
+
return bool(self.cookies)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def refresh_access_token(cookies: str, debug: bool = False) -> dict:
|
|
64
|
+
"""
|
|
65
|
+
Call the session endpoint to get a fresh access_token.
|
|
66
|
+
"""
|
|
67
|
+
headers = {
|
|
68
|
+
"Origin": "https://labs.google",
|
|
69
|
+
"Referer": "https://labs.google/fx/tools/image-fx",
|
|
70
|
+
"Cookie": cookies,
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
if debug:
|
|
74
|
+
logger.info("Refreshing access token from %s", SESSION_URL)
|
|
75
|
+
|
|
76
|
+
# Route through residential proxy if configured (cookies are tied to proxy IP)
|
|
77
|
+
proxies = None
|
|
78
|
+
try:
|
|
79
|
+
from gflow.api.client import get_active_proxy
|
|
80
|
+
proxy_url = get_active_proxy()
|
|
81
|
+
if proxy_url:
|
|
82
|
+
proxies = {"https": proxy_url, "http": proxy_url}
|
|
83
|
+
except Exception:
|
|
84
|
+
pass
|
|
85
|
+
|
|
86
|
+
resp = requests.get(SESSION_URL, headers=headers, timeout=30, proxies=proxies)
|
|
87
|
+
|
|
88
|
+
if resp.status_code == 401:
|
|
89
|
+
raise AuthError(
|
|
90
|
+
"Session expired. Run: gflow auth --clear && gflow auth"
|
|
91
|
+
)
|
|
92
|
+
if resp.status_code != 200:
|
|
93
|
+
raise AuthError(
|
|
94
|
+
f"Session endpoint returned {resp.status_code}: {resp.text[:300]}"
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
data = resp.json()
|
|
98
|
+
|
|
99
|
+
access_token = data.get("access_token", "")
|
|
100
|
+
if not access_token:
|
|
101
|
+
raise AuthError(
|
|
102
|
+
"Session endpoint returned no access_token. "
|
|
103
|
+
"Cookies may be expired. Run: gflow auth --clear && gflow auth"
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
if debug:
|
|
107
|
+
user = data.get("user", {})
|
|
108
|
+
logger.info(
|
|
109
|
+
"Got access_token: %s... (expires: %s, user: %s)",
|
|
110
|
+
access_token[:20],
|
|
111
|
+
data.get("expires", "?"),
|
|
112
|
+
user.get("email", "?"),
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
return {
|
|
116
|
+
"access_token": access_token,
|
|
117
|
+
"expires": data.get("expires", ""),
|
|
118
|
+
"user": data.get("user", {}),
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
# ------------------------------------------------------------------
|
|
123
|
+
# Chrome binary discovery
|
|
124
|
+
# ------------------------------------------------------------------
|
|
125
|
+
|
|
126
|
+
def _find_chrome() -> str:
|
|
127
|
+
"""Find the Chrome binary on the current system."""
|
|
128
|
+
system = platform.system()
|
|
129
|
+
|
|
130
|
+
if system == "Windows":
|
|
131
|
+
candidates = [
|
|
132
|
+
os.path.expandvars(r"%ProgramFiles%\Google\Chrome\Application\chrome.exe"),
|
|
133
|
+
os.path.expandvars(r"%ProgramFiles(x86)%\Google\Chrome\Application\chrome.exe"),
|
|
134
|
+
os.path.expandvars(r"%LocalAppData%\Google\Chrome\Application\chrome.exe"),
|
|
135
|
+
]
|
|
136
|
+
elif system == "Darwin":
|
|
137
|
+
candidates = [
|
|
138
|
+
"/Applications/Google Chrome.app/Contents/MacOS/Google Chrome",
|
|
139
|
+
]
|
|
140
|
+
else:
|
|
141
|
+
candidates = [
|
|
142
|
+
"/usr/bin/google-chrome",
|
|
143
|
+
"/usr/bin/google-chrome-stable",
|
|
144
|
+
"/usr/bin/chromium",
|
|
145
|
+
"/usr/bin/chromium-browser",
|
|
146
|
+
]
|
|
147
|
+
|
|
148
|
+
for path in candidates:
|
|
149
|
+
if os.path.isfile(path):
|
|
150
|
+
return path
|
|
151
|
+
|
|
152
|
+
chrome_in_path = shutil.which("chrome") or shutil.which("google-chrome") or shutil.which("chromium")
|
|
153
|
+
if chrome_in_path:
|
|
154
|
+
return chrome_in_path
|
|
155
|
+
|
|
156
|
+
raise AuthError(
|
|
157
|
+
"Chrome not found. Install Google Chrome or set CHROME_PATH env var."
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def _get_chrome_path() -> str:
|
|
162
|
+
"""Get Chrome binary path, allowing env var override."""
|
|
163
|
+
env_path = os.environ.get("CHROME_PATH")
|
|
164
|
+
if env_path and os.path.isfile(env_path):
|
|
165
|
+
return env_path
|
|
166
|
+
return _find_chrome()
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def _find_free_port() -> int:
|
|
170
|
+
"""Find a free TCP port."""
|
|
171
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
|
172
|
+
s.bind(("127.0.0.1", 0))
|
|
173
|
+
return s.getsockname()[1]
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
# ------------------------------------------------------------------
|
|
177
|
+
# CDP port persistence
|
|
178
|
+
# ------------------------------------------------------------------
|
|
179
|
+
|
|
180
|
+
def get_saved_cdp_port() -> int | None:
|
|
181
|
+
"""Get the saved CDP port from a previous auth session."""
|
|
182
|
+
if not CDP_PORT_FILE.exists():
|
|
183
|
+
return None
|
|
184
|
+
try:
|
|
185
|
+
port = int(CDP_PORT_FILE.read_text().strip())
|
|
186
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
|
187
|
+
s.settimeout(1)
|
|
188
|
+
result = s.connect_ex(("127.0.0.1", port))
|
|
189
|
+
if result == 0:
|
|
190
|
+
return port
|
|
191
|
+
except (ValueError, OSError):
|
|
192
|
+
pass
|
|
193
|
+
return None
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def save_cdp_port(port: int) -> None:
|
|
197
|
+
"""Save the CDP debugging port for reuse."""
|
|
198
|
+
ENV_DIR.mkdir(parents=True, exist_ok=True)
|
|
199
|
+
CDP_PORT_FILE.write_text(str(port))
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def clear_cdp_port() -> None:
|
|
203
|
+
"""Remove the saved CDP port file."""
|
|
204
|
+
if CDP_PORT_FILE.exists():
|
|
205
|
+
CDP_PORT_FILE.unlink()
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
# ------------------------------------------------------------------
|
|
209
|
+
# CDP helpers (no Selenium — pure WebSocket)
|
|
210
|
+
# ------------------------------------------------------------------
|
|
211
|
+
|
|
212
|
+
class _CDPConnection:
|
|
213
|
+
"""Lightweight CDP WebSocket connection for cookie extraction."""
|
|
214
|
+
|
|
215
|
+
def __init__(self, ws_url: str):
|
|
216
|
+
import websocket
|
|
217
|
+
self._ws = websocket.create_connection(ws_url, timeout=30)
|
|
218
|
+
self._msg_id = 0
|
|
219
|
+
|
|
220
|
+
def send(self, method: str, params: dict | None = None) -> dict:
|
|
221
|
+
"""Send CDP command, return result."""
|
|
222
|
+
self._msg_id += 1
|
|
223
|
+
msg = {"id": self._msg_id, "method": method}
|
|
224
|
+
if params:
|
|
225
|
+
msg["params"] = params
|
|
226
|
+
self._ws.send(json.dumps(msg))
|
|
227
|
+
|
|
228
|
+
deadline = time.time() + 30
|
|
229
|
+
while time.time() < deadline:
|
|
230
|
+
try:
|
|
231
|
+
self._ws.settimeout(5)
|
|
232
|
+
raw = self._ws.recv()
|
|
233
|
+
data = json.loads(raw)
|
|
234
|
+
if data.get("id") == self._msg_id:
|
|
235
|
+
if "error" in data:
|
|
236
|
+
raise AuthError(f"CDP error: {data['error']}")
|
|
237
|
+
return data.get("result", {})
|
|
238
|
+
except AuthError:
|
|
239
|
+
raise
|
|
240
|
+
except Exception as e:
|
|
241
|
+
if "timed out" in str(e).lower() or "timeout" in str(e).lower():
|
|
242
|
+
continue
|
|
243
|
+
raise
|
|
244
|
+
raise AuthError("CDP command timed out")
|
|
245
|
+
|
|
246
|
+
def close(self):
|
|
247
|
+
try:
|
|
248
|
+
self._ws.close()
|
|
249
|
+
except Exception:
|
|
250
|
+
pass
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
def _wait_for_cdp_page(port: int, timeout: int = 30) -> str:
|
|
254
|
+
"""Wait for a page-level CDP WebSocket URL."""
|
|
255
|
+
import urllib.request
|
|
256
|
+
import urllib.error
|
|
257
|
+
|
|
258
|
+
deadline = time.time() + timeout
|
|
259
|
+
while time.time() < deadline:
|
|
260
|
+
try:
|
|
261
|
+
url = f"http://127.0.0.1:{port}/json/list"
|
|
262
|
+
resp = urllib.request.urlopen(url, timeout=2)
|
|
263
|
+
targets = json.loads(resp.read().decode())
|
|
264
|
+
for target in targets:
|
|
265
|
+
if target.get("type") == "page":
|
|
266
|
+
ws_url = target.get("webSocketDebuggerUrl", "")
|
|
267
|
+
if ws_url:
|
|
268
|
+
return ws_url
|
|
269
|
+
except (urllib.error.URLError, ConnectionRefusedError, OSError, json.JSONDecodeError):
|
|
270
|
+
pass
|
|
271
|
+
time.sleep(0.5)
|
|
272
|
+
raise AuthError(f"Chrome CDP not available after {timeout}s on port {port}")
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
def _get_all_cookies_cdp(cdp: _CDPConnection) -> list[dict]:
|
|
276
|
+
"""Get all cookies via CDP Network.getAllCookies."""
|
|
277
|
+
result = cdp.send("Network.getAllCookies")
|
|
278
|
+
return result.get("cookies", [])
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
def _get_current_url_cdp(cdp: _CDPConnection) -> str:
|
|
282
|
+
"""Get the current page URL via CDP."""
|
|
283
|
+
try:
|
|
284
|
+
result = cdp.send("Runtime.evaluate", {
|
|
285
|
+
"expression": "window.location.href",
|
|
286
|
+
"returnByValue": True,
|
|
287
|
+
})
|
|
288
|
+
return result.get("result", {}).get("value", "")
|
|
289
|
+
except Exception:
|
|
290
|
+
return ""
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
# ------------------------------------------------------------------
|
|
294
|
+
# Main auth class
|
|
295
|
+
# ------------------------------------------------------------------
|
|
296
|
+
|
|
297
|
+
class BrowserAuth:
|
|
298
|
+
"""
|
|
299
|
+
Handles browser-based authentication for Google Flow.
|
|
300
|
+
|
|
301
|
+
Launches Chrome directly (no Selenium/chromedriver!) via subprocess,
|
|
302
|
+
with --remote-debugging-port for CDP access. User logs in manually,
|
|
303
|
+
cookies are extracted via CDP.
|
|
304
|
+
|
|
305
|
+
Chrome stays alive after auth for reCAPTCHA token generation.
|
|
306
|
+
"""
|
|
307
|
+
|
|
308
|
+
def __init__(self, debug: bool = False):
|
|
309
|
+
self.debug = debug
|
|
310
|
+
|
|
311
|
+
def get_auth(self, profile: str | None = None, interactive: bool = True) -> AuthData:
|
|
312
|
+
"""
|
|
313
|
+
Get authentication credentials.
|
|
314
|
+
|
|
315
|
+
Order:
|
|
316
|
+
1. Environment variables (GFLOW_COOKIES)
|
|
317
|
+
2. Saved credentials in ~/.gflow/env
|
|
318
|
+
3. Browser login (if interactive=True)
|
|
319
|
+
"""
|
|
320
|
+
cookies = os.environ.get("GFLOW_COOKIES", "")
|
|
321
|
+
if cookies:
|
|
322
|
+
if self.debug:
|
|
323
|
+
logger.info("Using cookies from environment variables")
|
|
324
|
+
return AuthData(cookies=cookies)
|
|
325
|
+
|
|
326
|
+
auth = load_env()
|
|
327
|
+
if auth and auth.is_valid:
|
|
328
|
+
if self.debug:
|
|
329
|
+
logger.info("Using saved cookies from %s", ENV_FILE)
|
|
330
|
+
return auth
|
|
331
|
+
|
|
332
|
+
if interactive:
|
|
333
|
+
auth = self._login_with_browser(profile)
|
|
334
|
+
if auth and auth.is_valid:
|
|
335
|
+
save_env(auth)
|
|
336
|
+
return auth
|
|
337
|
+
|
|
338
|
+
raise AuthError(
|
|
339
|
+
"Could not authenticate. Try one of:\n"
|
|
340
|
+
" 1. Run 'gflow auth' to log in via browser\n"
|
|
341
|
+
" 2. Set GFLOW_COOKIES environment variable"
|
|
342
|
+
)
|
|
343
|
+
|
|
344
|
+
def _login_with_browser(self, profile: str | None = None) -> AuthData | None:
|
|
345
|
+
"""
|
|
346
|
+
Launch Chrome directly (no Selenium!), navigate to Flow, wait for
|
|
347
|
+
the user to log in, extract cookies via CDP.
|
|
348
|
+
|
|
349
|
+
Chrome stays alive after auth for reCAPTCHA.
|
|
350
|
+
"""
|
|
351
|
+
# Kill any previously running auth Chrome
|
|
352
|
+
kill_auth_browser()
|
|
353
|
+
|
|
354
|
+
print()
|
|
355
|
+
print("=" * 60)
|
|
356
|
+
print(" Google Flow Authentication")
|
|
357
|
+
print("=" * 60)
|
|
358
|
+
print()
|
|
359
|
+
print(" A Chrome window will open.")
|
|
360
|
+
print(" 1. Log in with your Google account")
|
|
361
|
+
print(" 2. Wait until the Flow page loads")
|
|
362
|
+
print(" 3. Come back here - cookies will be captured")
|
|
363
|
+
print()
|
|
364
|
+
print(" The browser will stay open for image/video generation.")
|
|
365
|
+
print(" Run 'gflow close' when you're done to close it.")
|
|
366
|
+
print()
|
|
367
|
+
print(" Timeout: 5 minutes")
|
|
368
|
+
print()
|
|
369
|
+
|
|
370
|
+
chrome_path = _get_chrome_path()
|
|
371
|
+
cdp_port = _find_free_port()
|
|
372
|
+
|
|
373
|
+
profile_dir = str(ENV_DIR / "chrome-profile")
|
|
374
|
+
|
|
375
|
+
# Build Chrome args — NO chromedriver, NO Selenium
|
|
376
|
+
args = [
|
|
377
|
+
chrome_path,
|
|
378
|
+
f"--remote-debugging-port={cdp_port}",
|
|
379
|
+
"--remote-allow-origins=*",
|
|
380
|
+
f"--user-data-dir={profile_dir}",
|
|
381
|
+
"--no-first-run",
|
|
382
|
+
"--no-default-browser-check",
|
|
383
|
+
]
|
|
384
|
+
|
|
385
|
+
if profile:
|
|
386
|
+
args.append(f"--profile-directory={profile}")
|
|
387
|
+
|
|
388
|
+
# Route Chrome through residential proxy if configured
|
|
389
|
+
try:
|
|
390
|
+
from gflow.auth.proxy_ext import get_chrome_proxy_args
|
|
391
|
+
proxy_args = get_chrome_proxy_args()
|
|
392
|
+
if proxy_args:
|
|
393
|
+
args.extend(proxy_args)
|
|
394
|
+
print(" Using residential proxy for browser")
|
|
395
|
+
except Exception:
|
|
396
|
+
pass
|
|
397
|
+
|
|
398
|
+
# Start with the Flow URL
|
|
399
|
+
args.append(FLOW_URL)
|
|
400
|
+
|
|
401
|
+
if self.debug:
|
|
402
|
+
logger.info("Launching Chrome: %s", " ".join(args[:3]))
|
|
403
|
+
logger.info("CDP port: %d", cdp_port)
|
|
404
|
+
|
|
405
|
+
# Launch Chrome as a detached process (survives after Python exits)
|
|
406
|
+
creation_flags = 0
|
|
407
|
+
if platform.system() == "Windows":
|
|
408
|
+
creation_flags = subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS
|
|
409
|
+
|
|
410
|
+
try:
|
|
411
|
+
subprocess.Popen(
|
|
412
|
+
args,
|
|
413
|
+
stdout=subprocess.DEVNULL,
|
|
414
|
+
stderr=subprocess.DEVNULL,
|
|
415
|
+
creationflags=creation_flags if platform.system() == "Windows" else 0,
|
|
416
|
+
start_new_session=(platform.system() != "Windows"),
|
|
417
|
+
)
|
|
418
|
+
except FileNotFoundError:
|
|
419
|
+
raise AuthError(f"Chrome not found at: {chrome_path}")
|
|
420
|
+
except Exception as e:
|
|
421
|
+
raise AuthError(f"Failed to launch Chrome: {e}")
|
|
422
|
+
|
|
423
|
+
print(" Browser opened. Waiting for login...")
|
|
424
|
+
|
|
425
|
+
# Connect via CDP
|
|
426
|
+
try:
|
|
427
|
+
ws_url = _wait_for_cdp_page(cdp_port, timeout=30)
|
|
428
|
+
except AuthError:
|
|
429
|
+
print("\n Could not connect to Chrome. Make sure Chrome is not already running")
|
|
430
|
+
print(" with this profile. Try: gflow auth --clear")
|
|
431
|
+
return None
|
|
432
|
+
|
|
433
|
+
cdp = _CDPConnection(ws_url)
|
|
434
|
+
|
|
435
|
+
try:
|
|
436
|
+
cdp.send("Network.enable")
|
|
437
|
+
|
|
438
|
+
# Poll for authentication (up to 5 minutes)
|
|
439
|
+
for attempt in range(60):
|
|
440
|
+
time.sleep(5)
|
|
441
|
+
|
|
442
|
+
current_url = _get_current_url_cdp(cdp)
|
|
443
|
+
if self.debug:
|
|
444
|
+
logger.info("Poll %d/60 - URL: %s", attempt + 1, current_url)
|
|
445
|
+
|
|
446
|
+
# If still on a login/accounts page, keep waiting
|
|
447
|
+
if "accounts.google" in current_url or "signin" in current_url.lower():
|
|
448
|
+
if attempt % 6 == 0:
|
|
449
|
+
print(f" Waiting for login... ({(attempt + 1) * 5}s)")
|
|
450
|
+
continue
|
|
451
|
+
|
|
452
|
+
# Get all cookies
|
|
453
|
+
all_cookies = _get_all_cookies_cdp(cdp)
|
|
454
|
+
|
|
455
|
+
# Check for Google auth cookies
|
|
456
|
+
cookie_names = {c["name"] for c in all_cookies}
|
|
457
|
+
has_google_auth = bool(
|
|
458
|
+
{"SID", "HSID", "SSID", "__Secure-1PSID", "SAPISID"}.intersection(cookie_names)
|
|
459
|
+
)
|
|
460
|
+
|
|
461
|
+
if self.debug:
|
|
462
|
+
logger.info("Cookies: %d total, auth=%s", len(all_cookies), has_google_auth)
|
|
463
|
+
|
|
464
|
+
if not has_google_auth:
|
|
465
|
+
if attempt % 6 == 0:
|
|
466
|
+
print(f" On Flow page but no auth cookies yet... ({(attempt + 1) * 5}s)")
|
|
467
|
+
continue
|
|
468
|
+
|
|
469
|
+
# Build cookie string
|
|
470
|
+
cookie_str = "; ".join(
|
|
471
|
+
f'{c["name"]}={c["value"]}' for c in all_cookies
|
|
472
|
+
)
|
|
473
|
+
|
|
474
|
+
# Verify cookies work
|
|
475
|
+
try:
|
|
476
|
+
session_data = refresh_access_token(cookie_str, debug=self.debug)
|
|
477
|
+
user = session_data.get("user", {})
|
|
478
|
+
|
|
479
|
+
# Save CDP port for reCAPTCHA
|
|
480
|
+
save_cdp_port(cdp_port)
|
|
481
|
+
|
|
482
|
+
# Make sure we're on the Flow page (reCAPTCHA needs it)
|
|
483
|
+
if "flow" not in current_url.lower():
|
|
484
|
+
cdp.send("Page.enable")
|
|
485
|
+
cdp.send("Page.navigate", {"url": FLOW_URL})
|
|
486
|
+
time.sleep(3)
|
|
487
|
+
|
|
488
|
+
print()
|
|
489
|
+
print(" Authentication successful!")
|
|
490
|
+
print(f" User: {user.get('name', 'Unknown')} ({user.get('email', '')})")
|
|
491
|
+
print(f" Token: {session_data['access_token'][:20]}...")
|
|
492
|
+
print(f" Cookies: {len(all_cookies)} captured")
|
|
493
|
+
print(f" Saved to: {ENV_FILE}")
|
|
494
|
+
print()
|
|
495
|
+
print(" Chrome stays open for reCAPTCHA. Run 'gflow close' when done.")
|
|
496
|
+
print()
|
|
497
|
+
|
|
498
|
+
return AuthData(
|
|
499
|
+
cookies=cookie_str,
|
|
500
|
+
token=session_data["access_token"],
|
|
501
|
+
expires=session_data.get("expires", ""),
|
|
502
|
+
)
|
|
503
|
+
except AuthError as e:
|
|
504
|
+
logger.warning("Session endpoint failed: %s", e)
|
|
505
|
+
if attempt % 6 == 0:
|
|
506
|
+
print(f" Got cookies but session not ready... ({(attempt + 1) * 5}s)")
|
|
507
|
+
print(f" Reason: {e}")
|
|
508
|
+
continue
|
|
509
|
+
|
|
510
|
+
print()
|
|
511
|
+
print(" Timed out waiting for authentication.")
|
|
512
|
+
print(" Make sure you log in to your Google account in the browser.")
|
|
513
|
+
return None
|
|
514
|
+
|
|
515
|
+
except Exception as e:
|
|
516
|
+
logger.error("Auth error: %s", e)
|
|
517
|
+
if self.debug:
|
|
518
|
+
import traceback
|
|
519
|
+
traceback.print_exc()
|
|
520
|
+
print(f"\n Error: {e}")
|
|
521
|
+
return None
|
|
522
|
+
|
|
523
|
+
finally:
|
|
524
|
+
cdp.close()
|
|
525
|
+
|
|
526
|
+
|
|
527
|
+
def refresh_cookies_from_cdp() -> AuthData | None:
|
|
528
|
+
"""
|
|
529
|
+
Silently re-extract cookies from the already-running Chrome CDP session.
|
|
530
|
+
|
|
531
|
+
Inspired by notebooklm-mcp-cli's approach: instead of forcing the user
|
|
532
|
+
to re-login when cookies rotate, just pull fresh cookies from the Chrome
|
|
533
|
+
instance that's already authenticated and running.
|
|
534
|
+
|
|
535
|
+
Google rotates some cookies on every request, but Chrome handles this
|
|
536
|
+
transparently. By re-reading via CDP, we get the latest values without
|
|
537
|
+
any user interaction.
|
|
538
|
+
|
|
539
|
+
Returns:
|
|
540
|
+
AuthData with fresh cookies, or None if Chrome isn't running.
|
|
541
|
+
"""
|
|
542
|
+
port = get_saved_cdp_port()
|
|
543
|
+
if not port:
|
|
544
|
+
return None
|
|
545
|
+
|
|
546
|
+
try:
|
|
547
|
+
ws_url = _wait_for_cdp_page(port, timeout=5)
|
|
548
|
+
except AuthError:
|
|
549
|
+
return None
|
|
550
|
+
|
|
551
|
+
cdp = _CDPConnection(ws_url)
|
|
552
|
+
try:
|
|
553
|
+
cdp.send("Network.enable")
|
|
554
|
+
all_cookies = _get_all_cookies_cdp(cdp)
|
|
555
|
+
|
|
556
|
+
if not all_cookies:
|
|
557
|
+
return None
|
|
558
|
+
|
|
559
|
+
# Verify Google auth cookies are still present
|
|
560
|
+
cookie_names = {c["name"] for c in all_cookies}
|
|
561
|
+
has_google_auth = bool(
|
|
562
|
+
{"SID", "HSID", "SSID", "__Secure-1PSID", "SAPISID"}.intersection(cookie_names)
|
|
563
|
+
)
|
|
564
|
+
|
|
565
|
+
if not has_google_auth:
|
|
566
|
+
logger.warning("CDP cookie refresh: Chrome running but no Google auth cookies")
|
|
567
|
+
return None
|
|
568
|
+
|
|
569
|
+
cookie_str = "; ".join(f'{c["name"]}={c["value"]}' for c in all_cookies)
|
|
570
|
+
|
|
571
|
+
# Verify the cookies actually work before returning them
|
|
572
|
+
try:
|
|
573
|
+
session_data = refresh_access_token(cookie_str, debug=False)
|
|
574
|
+
if session_data.get("access_token"):
|
|
575
|
+
logger.info(
|
|
576
|
+
"Silent CDP cookie refresh successful (%d cookies)",
|
|
577
|
+
len(all_cookies),
|
|
578
|
+
)
|
|
579
|
+
auth = AuthData(
|
|
580
|
+
cookies=cookie_str,
|
|
581
|
+
token=session_data["access_token"],
|
|
582
|
+
expires=session_data.get("expires", ""),
|
|
583
|
+
)
|
|
584
|
+
# Persist refreshed cookies so next startup uses them
|
|
585
|
+
save_env(auth)
|
|
586
|
+
return auth
|
|
587
|
+
except AuthError:
|
|
588
|
+
logger.warning("CDP cookie refresh: cookies extracted but session endpoint rejected them")
|
|
589
|
+
return None
|
|
590
|
+
|
|
591
|
+
except Exception as e:
|
|
592
|
+
logger.warning("CDP cookie refresh failed: %s", e)
|
|
593
|
+
return None
|
|
594
|
+
finally:
|
|
595
|
+
cdp.close()
|
|
596
|
+
|
|
597
|
+
|
|
598
|
+
def kill_auth_browser() -> None:
|
|
599
|
+
"""Kill the Chrome browser that was kept alive for reCAPTCHA."""
|
|
600
|
+
port = get_saved_cdp_port()
|
|
601
|
+
|
|
602
|
+
if port:
|
|
603
|
+
try:
|
|
604
|
+
import urllib.request
|
|
605
|
+
url = f"http://127.0.0.1:{port}/json/version"
|
|
606
|
+
resp = urllib.request.urlopen(url, timeout=2)
|
|
607
|
+
data = json.loads(resp.read().decode())
|
|
608
|
+
ws_url = data.get("webSocketDebuggerUrl", "")
|
|
609
|
+
|
|
610
|
+
if ws_url:
|
|
611
|
+
import websocket
|
|
612
|
+
ws = websocket.create_connection(ws_url, timeout=5)
|
|
613
|
+
ws.send(json.dumps({"id": 1, "method": "Browser.close"}))
|
|
614
|
+
ws.close()
|
|
615
|
+
except Exception:
|
|
616
|
+
pass
|
|
617
|
+
|
|
618
|
+
# Forcefully kill any remaining processes using this profile to prevent port hijacking
|
|
619
|
+
profile_dir = str(ENV_DIR / "chrome-profile")
|
|
620
|
+
try:
|
|
621
|
+
if platform.system() == "Darwin" or platform.system() == "Linux":
|
|
622
|
+
import os
|
|
623
|
+
import signal
|
|
624
|
+
# Find and kill processes matching the profile dir
|
|
625
|
+
cmd = f"ps aux | grep '{profile_dir}' | grep -v grep | awk '{{print $2}}'"
|
|
626
|
+
pids = subprocess.check_output(cmd, shell=True).decode().splitlines()
|
|
627
|
+
for pid_str in pids:
|
|
628
|
+
if pid_str.strip():
|
|
629
|
+
try:
|
|
630
|
+
os.kill(int(pid_str.strip()), signal.SIGKILL)
|
|
631
|
+
except OSError:
|
|
632
|
+
pass
|
|
633
|
+
elif platform.system() == "Windows":
|
|
634
|
+
# On Windows, taskkill based on command line is tricky, but wmic can do it
|
|
635
|
+
cmd = f'wmic process where "name=\'chrome.exe\' and CommandLine like \'%chrome-profile%\'" call terminate'
|
|
636
|
+
subprocess.run(cmd, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
637
|
+
except Exception as e:
|
|
638
|
+
logger.warning("Failed to force kill leftover Chrome: %s", e)
|
|
639
|
+
|
|
640
|
+
clear_cdp_port()
|
|
641
|
+
|
|
642
|
+
|
|
643
|
+
class AuthError(Exception):
|
|
644
|
+
"""Raised when authentication fails."""
|
|
645
|
+
pass
|
|
646
|
+
|
|
647
|
+
|
|
648
|
+
# ------------------------------------------------------------------
|
|
649
|
+
# Persistence helpers
|
|
650
|
+
# ------------------------------------------------------------------
|
|
651
|
+
|
|
652
|
+
def save_env(auth: AuthData) -> None:
|
|
653
|
+
"""Save authentication credentials to ~/.gflow/env."""
|
|
654
|
+
ENV_DIR.mkdir(parents=True, exist_ok=True)
|
|
655
|
+
ENV_FILE.write_text(
|
|
656
|
+
f"GFLOW_COOKIES={auth.cookies}\n",
|
|
657
|
+
)
|
|
658
|
+
try:
|
|
659
|
+
ENV_FILE.chmod(0o600)
|
|
660
|
+
except OSError:
|
|
661
|
+
pass
|
|
662
|
+
|
|
663
|
+
|
|
664
|
+
def load_env() -> AuthData | None:
|
|
665
|
+
"""Load authentication credentials from ~/.gflow/env."""
|
|
666
|
+
if not ENV_FILE.exists():
|
|
667
|
+
return None
|
|
668
|
+
|
|
669
|
+
cookies = ""
|
|
670
|
+
for line in ENV_FILE.read_text().splitlines():
|
|
671
|
+
line = line.strip()
|
|
672
|
+
if line.startswith("GFLOW_COOKIES="):
|
|
673
|
+
cookies = line[len("GFLOW_COOKIES="):]
|
|
674
|
+
|
|
675
|
+
if cookies:
|
|
676
|
+
return AuthData(cookies=cookies)
|
|
677
|
+
return None
|
|
678
|
+
|
|
679
|
+
|
|
680
|
+
def clear_env() -> None:
|
|
681
|
+
"""Remove saved authentication credentials."""
|
|
682
|
+
kill_auth_browser()
|
|
683
|
+
|
|
684
|
+
if ENV_FILE.exists():
|
|
685
|
+
ENV_FILE.unlink()
|
|
686
|
+
profile_dir = ENV_DIR / "chrome-profile"
|
|
687
|
+
if profile_dir.exists():
|
|
688
|
+
import shutil
|
|
689
|
+
try:
|
|
690
|
+
shutil.rmtree(profile_dir)
|
|
691
|
+
except OSError:
|
|
692
|
+
pass
|