claude-controller 0.1.1 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/ctl +867 -0
- package/bin/native-app.py +1 -1
- package/package.json +7 -5
- package/postinstall.sh +96 -15
- package/web/handler.py +190 -467
- package/web/handler_fs.py +152 -0
- package/web/handler_jobs.py +249 -0
- package/web/handler_sessions.py +132 -0
- package/web/jobs.py +9 -1
- package/web/pipeline.py +349 -0
- package/web/projects.py +192 -0
- package/web/static/api.js +54 -0
- package/web/static/app.js +17 -1937
- package/web/static/attachments.js +144 -0
- package/web/static/base.css +458 -0
- package/web/static/context.js +194 -0
- package/web/static/dirs.js +246 -0
- package/web/static/form.css +762 -0
- package/web/static/i18n.js +337 -0
- package/web/static/index.html +77 -11
- package/web/static/jobs.css +580 -0
- package/web/static/jobs.js +434 -0
- package/web/static/pipeline.css +31 -0
- package/web/static/pipelines.js +252 -0
- package/web/static/send.js +113 -0
- package/web/static/settings-style.css +260 -0
- package/web/static/settings.js +45 -0
- package/web/static/stream.js +311 -0
- package/web/static/utils.js +79 -0
- package/web/static/styles.css +0 -1922
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
"""
|
|
2
|
+
File System & Config HTTP 핸들러 Mixin
|
|
3
|
+
|
|
4
|
+
포함 엔드포인트:
|
|
5
|
+
- GET/POST /api/config
|
|
6
|
+
- GET/POST /api/recent-dirs
|
|
7
|
+
- GET /api/dirs
|
|
8
|
+
- POST /api/mkdir
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import json
|
|
12
|
+
import os
|
|
13
|
+
|
|
14
|
+
from config import DATA_DIR, SETTINGS_FILE, RECENT_DIRS_FILE
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class FsHandlerMixin:
|
|
18
|
+
|
|
19
|
+
def _handle_get_config(self):
|
|
20
|
+
defaults = {
|
|
21
|
+
"skip_permissions": True,
|
|
22
|
+
"allowed_tools": "Bash,Read,Write,Edit,Glob,Grep,Agent,NotebookEdit,WebFetch,WebSearch",
|
|
23
|
+
"model": "",
|
|
24
|
+
"max_jobs": 10,
|
|
25
|
+
"append_system_prompt": "",
|
|
26
|
+
"target_repo": "",
|
|
27
|
+
"base_branch": "main",
|
|
28
|
+
"checkpoint_interval": 5,
|
|
29
|
+
"locale": "ko",
|
|
30
|
+
}
|
|
31
|
+
try:
|
|
32
|
+
if SETTINGS_FILE.exists():
|
|
33
|
+
saved = json.loads(SETTINGS_FILE.read_text("utf-8"))
|
|
34
|
+
defaults.update(saved)
|
|
35
|
+
except (json.JSONDecodeError, OSError):
|
|
36
|
+
pass
|
|
37
|
+
self._json_response(defaults)
|
|
38
|
+
|
|
39
|
+
def _handle_save_config(self):
|
|
40
|
+
body = self._read_body()
|
|
41
|
+
if not body or not isinstance(body, dict):
|
|
42
|
+
return self._error_response("설정 데이터가 필요합니다")
|
|
43
|
+
|
|
44
|
+
current = {}
|
|
45
|
+
try:
|
|
46
|
+
if SETTINGS_FILE.exists():
|
|
47
|
+
current = json.loads(SETTINGS_FILE.read_text("utf-8"))
|
|
48
|
+
except (json.JSONDecodeError, OSError):
|
|
49
|
+
pass
|
|
50
|
+
|
|
51
|
+
allowed_keys = {
|
|
52
|
+
"skip_permissions", "allowed_tools", "model", "max_jobs",
|
|
53
|
+
"append_system_prompt", "target_repo", "base_branch",
|
|
54
|
+
"checkpoint_interval", "locale",
|
|
55
|
+
}
|
|
56
|
+
for k, v in body.items():
|
|
57
|
+
if k in allowed_keys:
|
|
58
|
+
current[k] = v
|
|
59
|
+
|
|
60
|
+
try:
|
|
61
|
+
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
|
62
|
+
SETTINGS_FILE.write_text(
|
|
63
|
+
json.dumps(current, ensure_ascii=False, indent=2), "utf-8"
|
|
64
|
+
)
|
|
65
|
+
self._json_response({"ok": True, "config": current})
|
|
66
|
+
except OSError as e:
|
|
67
|
+
self._error_response(f"설정 저장 실패: {e}", 500)
|
|
68
|
+
|
|
69
|
+
def _handle_get_recent_dirs(self):
|
|
70
|
+
try:
|
|
71
|
+
if RECENT_DIRS_FILE.exists():
|
|
72
|
+
data = json.loads(RECENT_DIRS_FILE.read_text("utf-8"))
|
|
73
|
+
else:
|
|
74
|
+
data = []
|
|
75
|
+
self._json_response(data)
|
|
76
|
+
except (json.JSONDecodeError, OSError):
|
|
77
|
+
self._json_response([])
|
|
78
|
+
|
|
79
|
+
def _handle_save_recent_dirs(self):
|
|
80
|
+
body = self._read_body()
|
|
81
|
+
dirs = body.get("dirs")
|
|
82
|
+
if not isinstance(dirs, list):
|
|
83
|
+
return self._error_response("dirs 배열이 필요합니다")
|
|
84
|
+
dirs = [d for d in dirs if isinstance(d, str)][:8]
|
|
85
|
+
try:
|
|
86
|
+
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
|
87
|
+
RECENT_DIRS_FILE.write_text(json.dumps(dirs, ensure_ascii=False), "utf-8")
|
|
88
|
+
self._json_response({"ok": True})
|
|
89
|
+
except OSError as e:
|
|
90
|
+
self._error_response(f"저장 실패: {e}", 500)
|
|
91
|
+
|
|
92
|
+
def _handle_dirs(self, dir_path):
|
|
93
|
+
try:
|
|
94
|
+
dir_path = os.path.abspath(os.path.expanduser(dir_path))
|
|
95
|
+
if not os.path.isdir(dir_path):
|
|
96
|
+
return self._error_response("디렉토리가 아닙니다", 400)
|
|
97
|
+
|
|
98
|
+
entries = []
|
|
99
|
+
try:
|
|
100
|
+
items = sorted(os.listdir(dir_path))
|
|
101
|
+
except PermissionError:
|
|
102
|
+
return self._error_response("접근 권한 없음", 403)
|
|
103
|
+
|
|
104
|
+
parent = os.path.dirname(dir_path)
|
|
105
|
+
if parent != dir_path:
|
|
106
|
+
entries.append({"name": "..", "path": parent, "type": "dir"})
|
|
107
|
+
|
|
108
|
+
for item in items:
|
|
109
|
+
if item.startswith("."):
|
|
110
|
+
continue
|
|
111
|
+
full = os.path.join(dir_path, item)
|
|
112
|
+
entry = {"name": item, "path": full}
|
|
113
|
+
if os.path.isdir(full):
|
|
114
|
+
entry["type"] = "dir"
|
|
115
|
+
else:
|
|
116
|
+
entry["type"] = "file"
|
|
117
|
+
try:
|
|
118
|
+
entry["size"] = os.path.getsize(full)
|
|
119
|
+
except OSError:
|
|
120
|
+
entry["size"] = 0
|
|
121
|
+
entries.append(entry)
|
|
122
|
+
|
|
123
|
+
self._json_response({"current": dir_path, "entries": entries})
|
|
124
|
+
except Exception as e:
|
|
125
|
+
self._error_response(f"디렉토리 읽기 실패: {e}", 500)
|
|
126
|
+
|
|
127
|
+
def _handle_mkdir(self):
|
|
128
|
+
body = self._read_body()
|
|
129
|
+
parent = body.get("parent", "").strip()
|
|
130
|
+
name = body.get("name", "").strip()
|
|
131
|
+
|
|
132
|
+
if not parent or not name:
|
|
133
|
+
return self._error_response("parent와 name 필드가 필요합니다")
|
|
134
|
+
|
|
135
|
+
if "/" in name or "\\" in name or name in (".", ".."):
|
|
136
|
+
return self._error_response("잘못된 디렉토리 이름입니다")
|
|
137
|
+
|
|
138
|
+
try:
|
|
139
|
+
parent = os.path.abspath(os.path.expanduser(parent))
|
|
140
|
+
if not os.path.isdir(parent):
|
|
141
|
+
return self._error_response("상위 디렉토리가 존재하지 않습니다", 400)
|
|
142
|
+
|
|
143
|
+
new_dir = os.path.join(parent, name)
|
|
144
|
+
if os.path.exists(new_dir):
|
|
145
|
+
return self._error_response("이미 존재하는 이름입니다", 409)
|
|
146
|
+
|
|
147
|
+
os.makedirs(new_dir)
|
|
148
|
+
self._json_response({"ok": True, "path": new_dir}, 201)
|
|
149
|
+
except PermissionError:
|
|
150
|
+
self._error_response("접근 권한 없음", 403)
|
|
151
|
+
except OSError as e:
|
|
152
|
+
self._error_response(f"디렉토리 생성 실패: {e}", 500)
|
|
@@ -0,0 +1,249 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Job 관련 HTTP 핸들러 Mixin
|
|
3
|
+
|
|
4
|
+
포함 엔드포인트:
|
|
5
|
+
- GET /api/jobs, /api/jobs/:id/result, /api/jobs/:id/stream, /api/jobs/:id/checkpoints
|
|
6
|
+
- GET /api/session/:id/job
|
|
7
|
+
- POST /api/send, /api/upload, /api/service/start, /api/service/stop
|
|
8
|
+
- POST /api/jobs/:id/rewind
|
|
9
|
+
- DELETE /api/jobs, /api/jobs/:id
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import base64
|
|
13
|
+
import json
|
|
14
|
+
import os
|
|
15
|
+
import time
|
|
16
|
+
from urllib.parse import urlparse, parse_qs
|
|
17
|
+
|
|
18
|
+
from config import LOGS_DIR, UPLOADS_DIR
|
|
19
|
+
from utils import parse_meta_file
|
|
20
|
+
|
|
21
|
+
IMAGE_EXTS = {".png", ".jpg", ".jpeg", ".gif", ".webp", ".bmp"}
|
|
22
|
+
ALLOWED_UPLOAD_EXTS = IMAGE_EXTS | {
|
|
23
|
+
".txt", ".md", ".csv", ".json", ".xml", ".yaml", ".yml", ".toml",
|
|
24
|
+
".py", ".js", ".ts", ".jsx", ".tsx", ".html", ".css", ".scss",
|
|
25
|
+
".sh", ".bash", ".zsh", ".fish",
|
|
26
|
+
".c", ".cpp", ".h", ".hpp", ".java", ".kt", ".go", ".rs", ".rb",
|
|
27
|
+
".swift", ".m", ".r", ".sql", ".graphql",
|
|
28
|
+
".log", ".env", ".conf", ".ini", ".cfg",
|
|
29
|
+
".pdf", ".doc", ".docx", ".xls", ".xlsx", ".pptx",
|
|
30
|
+
".zip", ".tar", ".gz",
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class JobHandlerMixin:
|
|
35
|
+
|
|
36
|
+
def _handle_jobs(self):
|
|
37
|
+
self._json_response(self._jobs_mod().get_all_jobs())
|
|
38
|
+
|
|
39
|
+
def _handle_job_result(self, job_id):
|
|
40
|
+
result, err = self._jobs_mod().get_job_result(job_id)
|
|
41
|
+
if err:
|
|
42
|
+
self._error_response(err, 404)
|
|
43
|
+
else:
|
|
44
|
+
self._json_response(result)
|
|
45
|
+
|
|
46
|
+
def _handle_upload(self):
|
|
47
|
+
body = self._read_body()
|
|
48
|
+
data_b64 = body.get("data", "")
|
|
49
|
+
filename = body.get("filename", "file")
|
|
50
|
+
|
|
51
|
+
if not data_b64:
|
|
52
|
+
return self._error_response("data 필드가 필요합니다")
|
|
53
|
+
if "," in data_b64:
|
|
54
|
+
data_b64 = data_b64.split(",", 1)[1]
|
|
55
|
+
|
|
56
|
+
try:
|
|
57
|
+
raw = base64.b64decode(data_b64)
|
|
58
|
+
except Exception:
|
|
59
|
+
return self._error_response("잘못된 base64 데이터")
|
|
60
|
+
|
|
61
|
+
ext = os.path.splitext(filename)[1].lower()
|
|
62
|
+
if ext not in ALLOWED_UPLOAD_EXTS:
|
|
63
|
+
ext = ext if ext else ".bin"
|
|
64
|
+
prefix = "img" if ext in IMAGE_EXTS else "file"
|
|
65
|
+
safe_name = f"{prefix}_{int(time.time())}_{os.getpid()}_{id(raw) % 10000}{ext}"
|
|
66
|
+
|
|
67
|
+
UPLOADS_DIR.mkdir(parents=True, exist_ok=True)
|
|
68
|
+
filepath = UPLOADS_DIR / safe_name
|
|
69
|
+
filepath.write_bytes(raw)
|
|
70
|
+
|
|
71
|
+
is_image = ext in IMAGE_EXTS
|
|
72
|
+
self._json_response({
|
|
73
|
+
"path": str(filepath),
|
|
74
|
+
"filename": safe_name,
|
|
75
|
+
"originalName": filename,
|
|
76
|
+
"size": len(raw),
|
|
77
|
+
"isImage": is_image,
|
|
78
|
+
}, 201)
|
|
79
|
+
|
|
80
|
+
def _handle_send(self):
|
|
81
|
+
body = self._read_body()
|
|
82
|
+
prompt = body.get("prompt", "").strip()
|
|
83
|
+
if not prompt:
|
|
84
|
+
return self._error_response("prompt 필드가 필요합니다")
|
|
85
|
+
|
|
86
|
+
result, err = self._jobs_mod().send_to_fifo(
|
|
87
|
+
prompt,
|
|
88
|
+
cwd=body.get("cwd") or None,
|
|
89
|
+
job_id=body.get("id") or None,
|
|
90
|
+
images=body.get("images") or None,
|
|
91
|
+
session=body.get("session") or None,
|
|
92
|
+
)
|
|
93
|
+
if err:
|
|
94
|
+
self._error_response(err, 502)
|
|
95
|
+
else:
|
|
96
|
+
self._json_response(result, 201)
|
|
97
|
+
|
|
98
|
+
def _handle_service_start(self):
|
|
99
|
+
ok, _ = self._jobs_mod().start_controller_service()
|
|
100
|
+
if ok:
|
|
101
|
+
self._json_response({"started": True})
|
|
102
|
+
else:
|
|
103
|
+
self._error_response("서비스 시작 실패", 500)
|
|
104
|
+
|
|
105
|
+
def _handle_service_stop(self):
|
|
106
|
+
ok, err = self._jobs_mod().stop_controller_service()
|
|
107
|
+
if ok:
|
|
108
|
+
self._json_response({"stopped": True})
|
|
109
|
+
else:
|
|
110
|
+
self._error_response(err or "서비스 종료 실패", 500)
|
|
111
|
+
|
|
112
|
+
def _handle_delete_job(self, job_id):
|
|
113
|
+
meta_file = LOGS_DIR / f"job_{job_id}.meta"
|
|
114
|
+
out_file = LOGS_DIR / f"job_{job_id}.out"
|
|
115
|
+
|
|
116
|
+
if not meta_file.exists():
|
|
117
|
+
return self._error_response("작업을 찾을 수 없습니다", 404)
|
|
118
|
+
|
|
119
|
+
meta = parse_meta_file(meta_file)
|
|
120
|
+
if meta and meta.get("STATUS") == "running":
|
|
121
|
+
pid = meta.get("PID")
|
|
122
|
+
if pid:
|
|
123
|
+
try:
|
|
124
|
+
os.kill(int(pid), 0)
|
|
125
|
+
return self._error_response("실행 중인 작업은 삭제할 수 없습니다", 409)
|
|
126
|
+
except (ProcessLookupError, ValueError, OSError):
|
|
127
|
+
pass
|
|
128
|
+
|
|
129
|
+
try:
|
|
130
|
+
if meta_file.exists():
|
|
131
|
+
meta_file.unlink()
|
|
132
|
+
if out_file.exists():
|
|
133
|
+
out_file.unlink()
|
|
134
|
+
self._json_response({"deleted": True, "job_id": job_id})
|
|
135
|
+
except OSError as e:
|
|
136
|
+
self._error_response(f"삭제 실패: {e}", 500)
|
|
137
|
+
|
|
138
|
+
def _handle_delete_completed_jobs(self):
|
|
139
|
+
deleted = []
|
|
140
|
+
for mf in list(LOGS_DIR.glob("job_*.meta")):
|
|
141
|
+
meta = parse_meta_file(mf)
|
|
142
|
+
if not meta:
|
|
143
|
+
continue
|
|
144
|
+
status = meta.get("STATUS", "")
|
|
145
|
+
if status in ("done", "failed"):
|
|
146
|
+
pid = meta.get("PID")
|
|
147
|
+
if pid and status == "running":
|
|
148
|
+
try:
|
|
149
|
+
os.kill(int(pid), 0)
|
|
150
|
+
continue
|
|
151
|
+
except (ProcessLookupError, ValueError, OSError):
|
|
152
|
+
pass
|
|
153
|
+
job_id = meta.get("JOB_ID", "")
|
|
154
|
+
out_file = LOGS_DIR / f"job_{job_id}.out"
|
|
155
|
+
try:
|
|
156
|
+
mf.unlink()
|
|
157
|
+
if out_file.exists():
|
|
158
|
+
out_file.unlink()
|
|
159
|
+
deleted.append(job_id)
|
|
160
|
+
except OSError:
|
|
161
|
+
pass
|
|
162
|
+
self._json_response({"deleted": deleted, "count": len(deleted)})
|
|
163
|
+
|
|
164
|
+
def _handle_job_stream(self, job_id):
|
|
165
|
+
out_file = LOGS_DIR / f"job_{job_id}.out"
|
|
166
|
+
meta_file = LOGS_DIR / f"job_{job_id}.meta"
|
|
167
|
+
|
|
168
|
+
if not meta_file.exists():
|
|
169
|
+
return self._error_response("작업을 찾을 수 없습니다", 404)
|
|
170
|
+
|
|
171
|
+
parsed = urlparse(self.path)
|
|
172
|
+
qs = parse_qs(parsed.query)
|
|
173
|
+
offset = int(qs.get("offset", [0])[0])
|
|
174
|
+
|
|
175
|
+
if not out_file.exists():
|
|
176
|
+
return self._json_response({"events": [], "offset": 0, "done": False})
|
|
177
|
+
|
|
178
|
+
try:
|
|
179
|
+
events = []
|
|
180
|
+
with open(out_file, "r") as f:
|
|
181
|
+
f.seek(offset)
|
|
182
|
+
for raw_line in f:
|
|
183
|
+
if '"type":"assistant"' not in raw_line and '"type":"result"' not in raw_line:
|
|
184
|
+
continue
|
|
185
|
+
try:
|
|
186
|
+
evt = json.loads(raw_line)
|
|
187
|
+
evt_type = evt.get("type", "")
|
|
188
|
+
if evt_type == "assistant":
|
|
189
|
+
msg = evt.get("message", {})
|
|
190
|
+
content = msg.get("content", [])
|
|
191
|
+
text_parts = [c.get("text", "") for c in content if c.get("type") == "text"]
|
|
192
|
+
if text_parts:
|
|
193
|
+
events.append({"type": "text", "text": "".join(text_parts)})
|
|
194
|
+
for tp in content:
|
|
195
|
+
if tp.get("type") == "tool_use":
|
|
196
|
+
events.append({
|
|
197
|
+
"type": "tool_use",
|
|
198
|
+
"tool": tp.get("name", ""),
|
|
199
|
+
"input": str(tp.get("input", ""))[:200]
|
|
200
|
+
})
|
|
201
|
+
elif evt_type == "result":
|
|
202
|
+
events.append({
|
|
203
|
+
"type": "result",
|
|
204
|
+
"result": evt.get("result", ""),
|
|
205
|
+
"cost_usd": evt.get("total_cost_usd"),
|
|
206
|
+
"duration_ms": evt.get("duration_ms"),
|
|
207
|
+
"is_error": evt.get("is_error", False),
|
|
208
|
+
"session_id": evt.get("session_id", "")
|
|
209
|
+
})
|
|
210
|
+
except json.JSONDecodeError:
|
|
211
|
+
continue
|
|
212
|
+
new_offset = f.tell()
|
|
213
|
+
|
|
214
|
+
meta = parse_meta_file(meta_file)
|
|
215
|
+
done = meta.get("STATUS", "") in ("done", "failed")
|
|
216
|
+
self._json_response({"events": events, "offset": new_offset, "done": done})
|
|
217
|
+
except OSError as e:
|
|
218
|
+
self._error_response(f"스트림 읽기 실패: {e}", 500)
|
|
219
|
+
|
|
220
|
+
def _handle_job_checkpoints(self, job_id):
|
|
221
|
+
checkpoints, err = self._ckpt_mod().get_job_checkpoints(job_id)
|
|
222
|
+
if err:
|
|
223
|
+
self._error_response(err, 404)
|
|
224
|
+
else:
|
|
225
|
+
self._json_response(checkpoints)
|
|
226
|
+
|
|
227
|
+
def _handle_job_by_session(self, session_id):
|
|
228
|
+
jobs = self._jobs_mod().get_all_jobs()
|
|
229
|
+
matched = [j for j in jobs if j.get("session_id") == session_id]
|
|
230
|
+
if not matched:
|
|
231
|
+
return self._error_response(
|
|
232
|
+
f"Session ID '{session_id[:8]}...'에 해당하는 작업을 찾을 수 없습니다", 404)
|
|
233
|
+
self._json_response(matched[0])
|
|
234
|
+
|
|
235
|
+
def _handle_job_rewind(self, job_id):
|
|
236
|
+
body = self._read_body()
|
|
237
|
+
checkpoint_hash = body.get("checkpoint", "").strip()
|
|
238
|
+
new_prompt = body.get("prompt", "").strip()
|
|
239
|
+
|
|
240
|
+
if not checkpoint_hash:
|
|
241
|
+
return self._error_response("checkpoint 필드가 필요합니다")
|
|
242
|
+
if not new_prompt:
|
|
243
|
+
return self._error_response("prompt 필드가 필요합니다")
|
|
244
|
+
|
|
245
|
+
result, err = self._ckpt_mod().rewind_job(job_id, checkpoint_hash, new_prompt)
|
|
246
|
+
if err:
|
|
247
|
+
self._error_response(err, 400 if "찾을 수 없습니다" in err else 500)
|
|
248
|
+
else:
|
|
249
|
+
self._json_response(result, 201)
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Session 목록 HTTP 핸들러 Mixin
|
|
3
|
+
|
|
4
|
+
Claude Code 네이티브 세션 + history.log + job meta 파일을 통합하여 세션 목록을 제공한다.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import os
|
|
9
|
+
|
|
10
|
+
from config import LOGS_DIR, SESSIONS_DIR, CLAUDE_PROJECTS_DIR
|
|
11
|
+
from utils import parse_meta_file, cwd_to_project_dir, scan_claude_sessions
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class SessionHandlerMixin:
|
|
15
|
+
|
|
16
|
+
def _handle_sessions(self, filter_cwd=None):
|
|
17
|
+
seen = {}
|
|
18
|
+
|
|
19
|
+
# 0) Claude Code 네이티브 세션 스캔
|
|
20
|
+
if filter_cwd:
|
|
21
|
+
proj_name = cwd_to_project_dir(filter_cwd)
|
|
22
|
+
project_dirs = [CLAUDE_PROJECTS_DIR / proj_name]
|
|
23
|
+
else:
|
|
24
|
+
if CLAUDE_PROJECTS_DIR.exists():
|
|
25
|
+
all_dirs = sorted(
|
|
26
|
+
(d for d in CLAUDE_PROJECTS_DIR.iterdir() if d.is_dir()),
|
|
27
|
+
key=lambda d: d.stat().st_mtime,
|
|
28
|
+
reverse=True,
|
|
29
|
+
)
|
|
30
|
+
project_dirs = all_dirs[:15]
|
|
31
|
+
else:
|
|
32
|
+
project_dirs = []
|
|
33
|
+
|
|
34
|
+
for pd in project_dirs:
|
|
35
|
+
native = scan_claude_sessions(pd, limit=60)
|
|
36
|
+
for sid, info in native.items():
|
|
37
|
+
if sid not in seen:
|
|
38
|
+
seen[sid] = info
|
|
39
|
+
|
|
40
|
+
# 1) Job meta 파일에서 보강
|
|
41
|
+
if LOGS_DIR.exists():
|
|
42
|
+
meta_files = sorted(
|
|
43
|
+
LOGS_DIR.glob("job_*.meta"),
|
|
44
|
+
key=lambda f: int(f.stem.split("_")[1]),
|
|
45
|
+
reverse=True,
|
|
46
|
+
)
|
|
47
|
+
for mf in meta_files:
|
|
48
|
+
meta = parse_meta_file(mf)
|
|
49
|
+
if not meta:
|
|
50
|
+
continue
|
|
51
|
+
sid = meta.get("SESSION_ID", "").strip()
|
|
52
|
+
if not sid:
|
|
53
|
+
continue
|
|
54
|
+
|
|
55
|
+
status = meta.get("STATUS", "unknown")
|
|
56
|
+
if status == "running" and meta.get("PID"):
|
|
57
|
+
try:
|
|
58
|
+
os.kill(int(meta["PID"]), 0)
|
|
59
|
+
except (ProcessLookupError, ValueError, OSError):
|
|
60
|
+
status = "done"
|
|
61
|
+
|
|
62
|
+
job_id = meta.get("JOB_ID", "")
|
|
63
|
+
cost_usd = None
|
|
64
|
+
if status in ("done", "failed"):
|
|
65
|
+
out_file = LOGS_DIR / f"job_{job_id}.out"
|
|
66
|
+
if out_file.exists():
|
|
67
|
+
try:
|
|
68
|
+
for line in open(out_file, "r"):
|
|
69
|
+
try:
|
|
70
|
+
obj = json.loads(line.strip())
|
|
71
|
+
if obj.get("type") == "result":
|
|
72
|
+
cost_usd = obj.get("total_cost_usd")
|
|
73
|
+
except json.JSONDecodeError:
|
|
74
|
+
continue
|
|
75
|
+
except OSError:
|
|
76
|
+
pass
|
|
77
|
+
|
|
78
|
+
entry = {
|
|
79
|
+
"session_id": sid,
|
|
80
|
+
"job_id": job_id,
|
|
81
|
+
"prompt": meta.get("PROMPT", ""),
|
|
82
|
+
"timestamp": meta.get("CREATED_AT", ""),
|
|
83
|
+
"status": status,
|
|
84
|
+
"cwd": meta.get("CWD", ""),
|
|
85
|
+
"cost_usd": cost_usd,
|
|
86
|
+
"slug": "",
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
if sid not in seen:
|
|
90
|
+
seen[sid] = entry
|
|
91
|
+
else:
|
|
92
|
+
existing = seen[sid]
|
|
93
|
+
if existing.get("job_id") is None:
|
|
94
|
+
existing.update({"job_id": job_id, "status": status, "cost_usd": cost_usd})
|
|
95
|
+
else:
|
|
96
|
+
try:
|
|
97
|
+
if int(job_id) > int(existing.get("job_id", 0)):
|
|
98
|
+
seen[sid] = entry
|
|
99
|
+
except (ValueError, TypeError):
|
|
100
|
+
pass
|
|
101
|
+
|
|
102
|
+
# 2) history.log 보충
|
|
103
|
+
history_file = SESSIONS_DIR / "history.log"
|
|
104
|
+
if history_file.exists():
|
|
105
|
+
try:
|
|
106
|
+
for line in history_file.read_text("utf-8").strip().split("\n"):
|
|
107
|
+
parts = line.split("|", 2)
|
|
108
|
+
if len(parts) >= 2:
|
|
109
|
+
ts, sid = parts[0].strip(), parts[1].strip()
|
|
110
|
+
if not sid:
|
|
111
|
+
continue
|
|
112
|
+
prompt = parts[2].strip() if len(parts) > 2 else ""
|
|
113
|
+
if sid not in seen:
|
|
114
|
+
seen[sid] = {
|
|
115
|
+
"session_id": sid, "job_id": None,
|
|
116
|
+
"prompt": prompt, "timestamp": ts,
|
|
117
|
+
"status": "done", "cwd": None,
|
|
118
|
+
"cost_usd": None, "slug": "",
|
|
119
|
+
}
|
|
120
|
+
except OSError:
|
|
121
|
+
pass
|
|
122
|
+
|
|
123
|
+
# cwd 필터 적용
|
|
124
|
+
if filter_cwd:
|
|
125
|
+
norm = os.path.normpath(filter_cwd)
|
|
126
|
+
seen = {
|
|
127
|
+
sid: s for sid, s in seen.items()
|
|
128
|
+
if s.get("cwd") and os.path.normpath(s["cwd"]) == norm
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
sessions = sorted(seen.values(), key=lambda s: s.get("timestamp") or "", reverse=True)
|
|
132
|
+
self._json_response(sessions[:50])
|
package/web/jobs.py
CHANGED
|
@@ -108,7 +108,15 @@ def get_job_result(job_id):
|
|
|
108
108
|
|
|
109
109
|
meta = parse_meta_file(meta_file)
|
|
110
110
|
if meta.get("STATUS") == "running":
|
|
111
|
-
|
|
111
|
+
# 프로세스가 실제로 살아있는지 확인 — meta가 running이지만 PID가 죽었으면 done 처리
|
|
112
|
+
pid = meta.get("PID")
|
|
113
|
+
if pid:
|
|
114
|
+
try:
|
|
115
|
+
os.kill(int(pid), 0)
|
|
116
|
+
except (ProcessLookupError, ValueError, OSError):
|
|
117
|
+
meta["STATUS"] = "done"
|
|
118
|
+
if meta.get("STATUS") == "running":
|
|
119
|
+
return {"status": "running", "result": None}, None
|
|
112
120
|
|
|
113
121
|
if not out_file.exists():
|
|
114
122
|
return None, "출력 파일이 없습니다"
|