claude-controller 0.1.2 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/bin/autoloop.sh +382 -0
- package/bin/ctl +1189 -0
- package/bin/native-app.py +6 -3
- package/bin/watchdog.sh +357 -0
- package/cognitive/__init__.py +14 -0
- package/cognitive/__pycache__/__init__.cpython-314.pyc +0 -0
- package/cognitive/__pycache__/dispatcher.cpython-314.pyc +0 -0
- package/cognitive/__pycache__/evaluator.cpython-314.pyc +0 -0
- package/cognitive/__pycache__/goal_engine.cpython-314.pyc +0 -0
- package/cognitive/__pycache__/learning.cpython-314.pyc +0 -0
- package/cognitive/__pycache__/orchestrator.cpython-314.pyc +0 -0
- package/cognitive/__pycache__/planner.cpython-314.pyc +0 -0
- package/cognitive/dispatcher.py +192 -0
- package/cognitive/evaluator.py +289 -0
- package/cognitive/goal_engine.py +232 -0
- package/cognitive/learning.py +189 -0
- package/cognitive/orchestrator.py +303 -0
- package/cognitive/planner.py +207 -0
- package/cognitive/prompts/analyst.md +31 -0
- package/cognitive/prompts/coder.md +22 -0
- package/cognitive/prompts/reviewer.md +33 -0
- package/cognitive/prompts/tester.md +21 -0
- package/cognitive/prompts/writer.md +25 -0
- package/config.sh +6 -1
- package/dag/__init__.py +5 -0
- package/dag/__pycache__/__init__.cpython-314.pyc +0 -0
- package/dag/__pycache__/graph.cpython-314.pyc +0 -0
- package/dag/graph.py +222 -0
- package/lib/jobs.sh +12 -1
- package/package.json +11 -5
- package/postinstall.sh +1 -1
- package/service/controller.sh +43 -11
- package/web/audit.py +122 -0
- package/web/checkpoint.py +80 -0
- package/web/config.py +2 -5
- package/web/handler.py +634 -473
- package/web/handler_fs.py +153 -0
- package/web/handler_goals.py +203 -0
- package/web/handler_jobs.py +372 -0
- package/web/handler_memory.py +203 -0
- package/web/handler_sessions.py +132 -0
- package/web/jobs.py +585 -13
- package/web/personas.py +419 -0
- package/web/pipeline.py +981 -0
- package/web/presets.py +506 -0
- package/web/projects.py +246 -0
- package/web/static/api.js +141 -0
- package/web/static/app.js +25 -1937
- package/web/static/attachments.js +144 -0
- package/web/static/base.css +497 -0
- package/web/static/context.js +204 -0
- package/web/static/dirs.js +246 -0
- package/web/static/form.css +763 -0
- package/web/static/goals.css +363 -0
- package/web/static/goals.js +300 -0
- package/web/static/i18n.js +625 -0
- package/web/static/index.html +215 -13
- package/web/static/{styles.css → jobs.css} +746 -1141
- package/web/static/jobs.js +1270 -0
- package/web/static/memoryview.js +117 -0
- package/web/static/personas.js +228 -0
- package/web/static/pipeline.css +338 -0
- package/web/static/pipelines.js +487 -0
- package/web/static/presets.js +244 -0
- package/web/static/send.js +135 -0
- package/web/static/settings-style.css +291 -0
- package/web/static/settings.js +81 -0
- package/web/static/stream.js +534 -0
- package/web/static/utils.js +131 -0
- package/web/webhook.py +210 -0
|
@@ -0,0 +1,372 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Job 관련 HTTP 핸들러 Mixin
|
|
3
|
+
|
|
4
|
+
포함 엔드포인트:
|
|
5
|
+
- GET /api/jobs, /api/jobs/:id/result, /api/jobs/:id/stream, /api/jobs/:id/checkpoints
|
|
6
|
+
- GET /api/session/:id/job
|
|
7
|
+
- POST /api/send, /api/upload, /api/service/start, /api/service/stop
|
|
8
|
+
- POST /api/jobs/:id/rewind
|
|
9
|
+
- DELETE /api/jobs, /api/jobs/:id
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import base64
|
|
13
|
+
import json
|
|
14
|
+
import os
|
|
15
|
+
import time
|
|
16
|
+
from urllib.parse import urlparse, parse_qs
|
|
17
|
+
|
|
18
|
+
from config import LOGS_DIR, UPLOADS_DIR
|
|
19
|
+
from utils import parse_meta_file
|
|
20
|
+
|
|
21
|
+
IMAGE_EXTS = {".png", ".jpg", ".jpeg", ".gif", ".webp", ".bmp"}
|
|
22
|
+
ALLOWED_UPLOAD_EXTS = IMAGE_EXTS | {
|
|
23
|
+
".txt", ".md", ".csv", ".json", ".xml", ".yaml", ".yml", ".toml",
|
|
24
|
+
".py", ".js", ".ts", ".jsx", ".tsx", ".html", ".css", ".scss",
|
|
25
|
+
".sh", ".bash", ".zsh", ".fish",
|
|
26
|
+
".c", ".cpp", ".h", ".hpp", ".java", ".kt", ".go", ".rs", ".rb",
|
|
27
|
+
".swift", ".m", ".r", ".sql", ".graphql",
|
|
28
|
+
".log", ".env", ".conf", ".ini", ".cfg",
|
|
29
|
+
".pdf", ".doc", ".docx", ".xls", ".xlsx", ".pptx",
|
|
30
|
+
".zip", ".tar", ".gz",
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class JobHandlerMixin:
|
|
35
|
+
|
|
36
|
+
def _handle_jobs(self, cwd_filter=None, page=1, limit=10):
|
|
37
|
+
all_jobs = self._jobs_mod().get_all_jobs(cwd_filter=cwd_filter)
|
|
38
|
+
total = len(all_jobs)
|
|
39
|
+
page = max(1, page)
|
|
40
|
+
limit = max(1, min(limit, 100))
|
|
41
|
+
pages = max(1, (total + limit - 1) // limit)
|
|
42
|
+
page = min(page, pages)
|
|
43
|
+
start = (page - 1) * limit
|
|
44
|
+
self._json_response({
|
|
45
|
+
"jobs": all_jobs[start:start + limit],
|
|
46
|
+
"total": total,
|
|
47
|
+
"page": page,
|
|
48
|
+
"limit": limit,
|
|
49
|
+
"pages": pages,
|
|
50
|
+
})
|
|
51
|
+
|
|
52
|
+
def _handle_job_result(self, job_id):
|
|
53
|
+
result, err = self._jobs_mod().get_job_result(job_id)
|
|
54
|
+
if err:
|
|
55
|
+
self._error_response(err, 404, code="JOB_NOT_FOUND")
|
|
56
|
+
else:
|
|
57
|
+
self._json_response(result)
|
|
58
|
+
|
|
59
|
+
def _handle_upload(self):
|
|
60
|
+
body = self._read_body()
|
|
61
|
+
data_b64 = body.get("data", "")
|
|
62
|
+
filename = body.get("filename", "file")
|
|
63
|
+
|
|
64
|
+
if not data_b64:
|
|
65
|
+
return self._error_response("data 필드가 필요합니다", code="MISSING_FIELD")
|
|
66
|
+
if "," in data_b64:
|
|
67
|
+
data_b64 = data_b64.split(",", 1)[1]
|
|
68
|
+
|
|
69
|
+
try:
|
|
70
|
+
raw = base64.b64decode(data_b64)
|
|
71
|
+
except Exception:
|
|
72
|
+
return self._error_response("잘못된 base64 데이터", code="INVALID_DATA")
|
|
73
|
+
|
|
74
|
+
ext = os.path.splitext(filename)[1].lower()
|
|
75
|
+
if ext not in ALLOWED_UPLOAD_EXTS:
|
|
76
|
+
return self._error_response(
|
|
77
|
+
f"허용되지 않는 파일 형식입니다: {ext or '(확장자 없음)'}",
|
|
78
|
+
400, code="INVALID_FILE_TYPE")
|
|
79
|
+
prefix = "img" if ext in IMAGE_EXTS else "file"
|
|
80
|
+
safe_name = f"{prefix}_{int(time.time())}_{os.getpid()}_{id(raw) % 10000}{ext}"
|
|
81
|
+
|
|
82
|
+
UPLOADS_DIR.mkdir(parents=True, exist_ok=True)
|
|
83
|
+
filepath = UPLOADS_DIR / safe_name
|
|
84
|
+
filepath.write_bytes(raw)
|
|
85
|
+
|
|
86
|
+
is_image = ext in IMAGE_EXTS
|
|
87
|
+
self._json_response({
|
|
88
|
+
"path": str(filepath),
|
|
89
|
+
"filename": safe_name,
|
|
90
|
+
"originalName": filename,
|
|
91
|
+
"size": len(raw),
|
|
92
|
+
"isImage": is_image,
|
|
93
|
+
}, 201)
|
|
94
|
+
|
|
95
|
+
def _handle_send(self):
|
|
96
|
+
body = self._read_body()
|
|
97
|
+
prompt = body.get("prompt", "").strip()
|
|
98
|
+
if not prompt:
|
|
99
|
+
return self._error_response("prompt 필드가 필요합니다", code="MISSING_FIELD")
|
|
100
|
+
|
|
101
|
+
# 페르소나 적용: system_prompt를 프롬프트 앞에 주입
|
|
102
|
+
persona_id = body.get("persona")
|
|
103
|
+
if persona_id:
|
|
104
|
+
import personas as _p
|
|
105
|
+
prompt = _p.apply_persona_to_prompt(persona_id, prompt)
|
|
106
|
+
|
|
107
|
+
# depends_on: 선행 작업 ID 목록 (예: [42, 43] 또는 "42,43")
|
|
108
|
+
depends_on = body.get("depends_on")
|
|
109
|
+
if isinstance(depends_on, str):
|
|
110
|
+
depends_on = [d.strip() for d in depends_on.split(",") if d.strip()]
|
|
111
|
+
|
|
112
|
+
result, err = self._jobs_mod().send_to_fifo(
|
|
113
|
+
prompt,
|
|
114
|
+
cwd=body.get("cwd") or None,
|
|
115
|
+
job_id=body.get("id") or None,
|
|
116
|
+
images=body.get("images") or None,
|
|
117
|
+
session=body.get("session") or None,
|
|
118
|
+
depends_on=depends_on or None,
|
|
119
|
+
)
|
|
120
|
+
if err:
|
|
121
|
+
self._error_response(err, 502, code="SEND_FAILED")
|
|
122
|
+
else:
|
|
123
|
+
self._json_response(result, 201)
|
|
124
|
+
|
|
125
|
+
def _handle_service_start(self):
|
|
126
|
+
ok, _ = self._jobs_mod().start_controller_service()
|
|
127
|
+
if ok:
|
|
128
|
+
self._json_response({"started": True})
|
|
129
|
+
else:
|
|
130
|
+
self._error_response("서비스 시작 실패", 500, code="SERVICE_START_FAILED")
|
|
131
|
+
|
|
132
|
+
def _handle_service_stop(self):
|
|
133
|
+
ok, err = self._jobs_mod().stop_controller_service()
|
|
134
|
+
if ok:
|
|
135
|
+
self._json_response({"stopped": True})
|
|
136
|
+
else:
|
|
137
|
+
self._error_response(err or "서비스 종료 실패", 500, code="SERVICE_STOP_FAILED")
|
|
138
|
+
|
|
139
|
+
def _handle_delete_job(self, job_id):
|
|
140
|
+
meta_file = LOGS_DIR / f"job_{job_id}.meta"
|
|
141
|
+
out_file = LOGS_DIR / f"job_{job_id}.out"
|
|
142
|
+
|
|
143
|
+
if not meta_file.exists():
|
|
144
|
+
return self._error_response("작업을 찾을 수 없습니다", 404, code="JOB_NOT_FOUND")
|
|
145
|
+
|
|
146
|
+
meta = parse_meta_file(meta_file)
|
|
147
|
+
if meta and meta.get("STATUS") == "running":
|
|
148
|
+
pid = meta.get("PID")
|
|
149
|
+
if pid:
|
|
150
|
+
try:
|
|
151
|
+
os.kill(int(pid), 0)
|
|
152
|
+
return self._error_response("실행 중인 작업은 삭제할 수 없습니다", 409, code="JOB_RUNNING")
|
|
153
|
+
except (ProcessLookupError, ValueError, OSError):
|
|
154
|
+
pass
|
|
155
|
+
|
|
156
|
+
try:
|
|
157
|
+
if meta_file.exists():
|
|
158
|
+
meta_file.unlink()
|
|
159
|
+
if out_file.exists():
|
|
160
|
+
out_file.unlink()
|
|
161
|
+
self._json_response({"deleted": True, "job_id": job_id})
|
|
162
|
+
except OSError as e:
|
|
163
|
+
self._error_response(f"삭제 실패: {e}", 500, code="DELETE_FAILED")
|
|
164
|
+
|
|
165
|
+
def _handle_delete_completed_jobs(self):
|
|
166
|
+
deleted = []
|
|
167
|
+
for mf in list(LOGS_DIR.glob("job_*.meta")):
|
|
168
|
+
meta = parse_meta_file(mf)
|
|
169
|
+
if not meta:
|
|
170
|
+
continue
|
|
171
|
+
status = meta.get("STATUS", "")
|
|
172
|
+
if status in ("done", "failed"):
|
|
173
|
+
job_id = meta.get("JOB_ID", "")
|
|
174
|
+
out_file = LOGS_DIR / f"job_{job_id}.out"
|
|
175
|
+
try:
|
|
176
|
+
mf.unlink()
|
|
177
|
+
if out_file.exists():
|
|
178
|
+
out_file.unlink()
|
|
179
|
+
deleted.append(job_id)
|
|
180
|
+
except OSError:
|
|
181
|
+
pass
|
|
182
|
+
self._json_response({"deleted": deleted, "count": len(deleted)})
|
|
183
|
+
|
|
184
|
+
@staticmethod
|
|
185
|
+
def _parse_stream_events(out_file, offset):
|
|
186
|
+
"""out 파일에서 offset 이후의 스트림 이벤트를 파싱한다. (events, new_offset) 반환."""
|
|
187
|
+
events = []
|
|
188
|
+
new_offset = offset
|
|
189
|
+
if not out_file.exists():
|
|
190
|
+
return events, new_offset
|
|
191
|
+
try:
|
|
192
|
+
with open(out_file, "r") as f:
|
|
193
|
+
f.seek(offset)
|
|
194
|
+
for raw_line in f:
|
|
195
|
+
if '"type":"assistant"' not in raw_line and '"type":"result"' not in raw_line:
|
|
196
|
+
continue
|
|
197
|
+
try:
|
|
198
|
+
evt = json.loads(raw_line)
|
|
199
|
+
evt_type = evt.get("type", "")
|
|
200
|
+
if evt_type == "assistant":
|
|
201
|
+
msg = evt.get("message", {})
|
|
202
|
+
content = msg.get("content", [])
|
|
203
|
+
text_parts = [c.get("text", "") for c in content if c.get("type") == "text"]
|
|
204
|
+
if text_parts:
|
|
205
|
+
events.append({"type": "text", "text": "".join(text_parts)})
|
|
206
|
+
for tp in content:
|
|
207
|
+
if tp.get("type") == "tool_use":
|
|
208
|
+
events.append({
|
|
209
|
+
"type": "tool_use",
|
|
210
|
+
"tool": tp.get("name", ""),
|
|
211
|
+
"input": str(tp.get("input", ""))[:200]
|
|
212
|
+
})
|
|
213
|
+
elif evt_type == "result":
|
|
214
|
+
result_evt = {
|
|
215
|
+
"type": "result",
|
|
216
|
+
"result": evt.get("result", ""),
|
|
217
|
+
"cost_usd": evt.get("total_cost_usd"),
|
|
218
|
+
"duration_ms": evt.get("duration_ms"),
|
|
219
|
+
"is_error": evt.get("is_error", False),
|
|
220
|
+
"session_id": evt.get("session_id", "")
|
|
221
|
+
}
|
|
222
|
+
if result_evt["is_error"]:
|
|
223
|
+
from jobs import classify_error
|
|
224
|
+
result_evt["user_error"] = classify_error(evt.get("result", ""))
|
|
225
|
+
events.append(result_evt)
|
|
226
|
+
except json.JSONDecodeError:
|
|
227
|
+
continue
|
|
228
|
+
new_offset = f.tell()
|
|
229
|
+
except OSError:
|
|
230
|
+
pass
|
|
231
|
+
return events, new_offset
|
|
232
|
+
|
|
233
|
+
def _handle_job_stream(self, job_id):
|
|
234
|
+
# SSE content negotiation — Accept 헤더로 분기
|
|
235
|
+
accept = self.headers.get("Accept", "")
|
|
236
|
+
if "text/event-stream" in accept:
|
|
237
|
+
return self._handle_job_stream_sse(job_id)
|
|
238
|
+
|
|
239
|
+
out_file = LOGS_DIR / f"job_{job_id}.out"
|
|
240
|
+
meta_file = LOGS_DIR / f"job_{job_id}.meta"
|
|
241
|
+
|
|
242
|
+
if not meta_file.exists():
|
|
243
|
+
return self._error_response("작업을 찾을 수 없습니다", 404, code="JOB_NOT_FOUND")
|
|
244
|
+
|
|
245
|
+
parsed = urlparse(self.path)
|
|
246
|
+
qs = parse_qs(parsed.query)
|
|
247
|
+
offset = self._safe_int(qs.get("offset", [0])[0], 0)
|
|
248
|
+
|
|
249
|
+
if not out_file.exists():
|
|
250
|
+
return self._json_response({"events": [], "offset": 0, "done": False})
|
|
251
|
+
|
|
252
|
+
try:
|
|
253
|
+
events, new_offset = self._parse_stream_events(out_file, offset)
|
|
254
|
+
meta = parse_meta_file(meta_file)
|
|
255
|
+
done = meta.get("STATUS", "") in ("done", "failed")
|
|
256
|
+
self._json_response({"events": events, "offset": new_offset, "done": done})
|
|
257
|
+
except OSError as e:
|
|
258
|
+
self._error_response(f"스트림 읽기 실패: {e}", 500, code="STREAM_READ_ERROR")
|
|
259
|
+
|
|
260
|
+
def _handle_job_stream_sse(self, job_id):
|
|
261
|
+
"""SSE 실시간 스트림 — 이벤트를 push 방식으로 전달한다."""
|
|
262
|
+
import time as _time
|
|
263
|
+
|
|
264
|
+
out_file = LOGS_DIR / f"job_{job_id}.out"
|
|
265
|
+
meta_file = LOGS_DIR / f"job_{job_id}.meta"
|
|
266
|
+
|
|
267
|
+
if not meta_file.exists():
|
|
268
|
+
return self._error_response("작업을 찾을 수 없습니다", 404, code="JOB_NOT_FOUND")
|
|
269
|
+
|
|
270
|
+
self.send_response(200)
|
|
271
|
+
self.send_header("Content-Type", "text/event-stream; charset=utf-8")
|
|
272
|
+
self.send_header("Cache-Control", "no-cache")
|
|
273
|
+
self.send_header("X-Accel-Buffering", "no")
|
|
274
|
+
self._set_cors_headers()
|
|
275
|
+
self.end_headers()
|
|
276
|
+
|
|
277
|
+
offset = 0
|
|
278
|
+
last_activity = _time.time()
|
|
279
|
+
|
|
280
|
+
try:
|
|
281
|
+
while True:
|
|
282
|
+
events, new_offset = self._parse_stream_events(out_file, offset)
|
|
283
|
+
offset = new_offset
|
|
284
|
+
|
|
285
|
+
for evt in events:
|
|
286
|
+
data = json.dumps(evt, ensure_ascii=False)
|
|
287
|
+
self.wfile.write(f"data: {data}\n\n".encode("utf-8"))
|
|
288
|
+
|
|
289
|
+
if events:
|
|
290
|
+
self.wfile.flush()
|
|
291
|
+
last_activity = _time.time()
|
|
292
|
+
|
|
293
|
+
# 작업 완료 확인
|
|
294
|
+
meta = parse_meta_file(meta_file)
|
|
295
|
+
status = meta.get("STATUS", "")
|
|
296
|
+
if status == "running" and meta.get("PID"):
|
|
297
|
+
try:
|
|
298
|
+
os.kill(int(meta["PID"]), 0)
|
|
299
|
+
except (ProcessLookupError, ValueError, OSError):
|
|
300
|
+
status = "done"
|
|
301
|
+
|
|
302
|
+
if status in ("done", "failed"):
|
|
303
|
+
# 최종 이벤트 한 번 더 수집
|
|
304
|
+
final_events, _ = self._parse_stream_events(out_file, offset)
|
|
305
|
+
for evt in final_events:
|
|
306
|
+
data = json.dumps(evt, ensure_ascii=False)
|
|
307
|
+
self.wfile.write(f"data: {data}\n\n".encode("utf-8"))
|
|
308
|
+
self.wfile.write(f"event: done\ndata: {{\"status\":\"{status}\"}}\n\n".encode("utf-8"))
|
|
309
|
+
self.wfile.flush()
|
|
310
|
+
break
|
|
311
|
+
|
|
312
|
+
# Heartbeat — 15초 동안 이벤트 없으면 keepalive 전송
|
|
313
|
+
now = _time.time()
|
|
314
|
+
if now - last_activity > 15:
|
|
315
|
+
self.wfile.write(b": heartbeat\n\n")
|
|
316
|
+
self.wfile.flush()
|
|
317
|
+
last_activity = now
|
|
318
|
+
|
|
319
|
+
_time.sleep(0.3)
|
|
320
|
+
|
|
321
|
+
except (BrokenPipeError, ConnectionResetError, OSError):
|
|
322
|
+
pass # 클라이언트 연결 끊김
|
|
323
|
+
|
|
324
|
+
def _handle_job_checkpoints(self, job_id):
|
|
325
|
+
checkpoints, err = self._ckpt_mod().get_job_checkpoints(job_id)
|
|
326
|
+
if err:
|
|
327
|
+
self._error_response(err, 404, code="JOB_NOT_FOUND")
|
|
328
|
+
else:
|
|
329
|
+
self._json_response(checkpoints)
|
|
330
|
+
|
|
331
|
+
def _handle_job_by_session(self, session_id):
|
|
332
|
+
jobs = self._jobs_mod().get_all_jobs()
|
|
333
|
+
matched = [j for j in jobs if j.get("session_id") == session_id]
|
|
334
|
+
if not matched:
|
|
335
|
+
return self._error_response(
|
|
336
|
+
f"Session ID '{session_id[:8]}...'에 해당하는 작업을 찾을 수 없습니다", 404, code="SESSION_NOT_FOUND")
|
|
337
|
+
self._json_response(matched[0])
|
|
338
|
+
|
|
339
|
+
def _handle_job_diff(self, job_id):
|
|
340
|
+
parsed = urlparse(self.path)
|
|
341
|
+
qs = parse_qs(parsed.query)
|
|
342
|
+
from_hash = qs.get("from", [""])[0].strip()
|
|
343
|
+
to_hash = qs.get("to", [""])[0].strip()
|
|
344
|
+
|
|
345
|
+
if not from_hash:
|
|
346
|
+
return self._error_response("from 파라미터가 필요합니다", code="MISSING_FIELD")
|
|
347
|
+
|
|
348
|
+
result, err = self._ckpt_mod().diff_checkpoints(job_id, from_hash, to_hash or None)
|
|
349
|
+
if err:
|
|
350
|
+
status = 404 if "찾을 수 없습니다" in err else 500
|
|
351
|
+
self._error_response(err, status, code="DIFF_FAILED")
|
|
352
|
+
else:
|
|
353
|
+
self._json_response(result)
|
|
354
|
+
|
|
355
|
+
def _handle_job_rewind(self, job_id):
|
|
356
|
+
body = self._read_body()
|
|
357
|
+
checkpoint_hash = body.get("checkpoint", "").strip()
|
|
358
|
+
new_prompt = body.get("prompt", "").strip()
|
|
359
|
+
|
|
360
|
+
if not checkpoint_hash:
|
|
361
|
+
return self._error_response("checkpoint 필드가 필요합니다", code="MISSING_FIELD")
|
|
362
|
+
if not new_prompt:
|
|
363
|
+
return self._error_response("prompt 필드가 필요합니다", code="MISSING_FIELD")
|
|
364
|
+
|
|
365
|
+
result, err = self._ckpt_mod().rewind_job(job_id, checkpoint_hash, new_prompt)
|
|
366
|
+
if err:
|
|
367
|
+
if "찾을 수 없습니다" in err:
|
|
368
|
+
self._error_response(err, 400, code="CHECKPOINT_NOT_FOUND")
|
|
369
|
+
else:
|
|
370
|
+
self._error_response(err, 500, code="REWIND_FAILED")
|
|
371
|
+
else:
|
|
372
|
+
self._json_response(result, 201)
|
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Memory 관련 HTTP 핸들러 Mixin
|
|
3
|
+
|
|
4
|
+
포함 엔드포인트:
|
|
5
|
+
- GET /api/memory # 메모리 검색 (query, type, tags, project 파라미터)
|
|
6
|
+
- GET /api/memory/:id # 메모리 상세
|
|
7
|
+
- POST /api/memory # 메모리 추가
|
|
8
|
+
- PUT /api/memory/:id/update # 메모리 수정 (POST로 처리)
|
|
9
|
+
- DELETE /api/memory/:id # 메모리 삭제
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import sys
|
|
13
|
+
from urllib.parse import parse_qs
|
|
14
|
+
|
|
15
|
+
from config import CONTROLLER_DIR, DATA_DIR
|
|
16
|
+
|
|
17
|
+
# memory 패키지를 import 경로에 추가
|
|
18
|
+
if str(CONTROLLER_DIR) not in sys.path:
|
|
19
|
+
sys.path.insert(0, str(CONTROLLER_DIR))
|
|
20
|
+
|
|
21
|
+
from memory.store import MemoryStore, MemoryType
|
|
22
|
+
|
|
23
|
+
# 모듈 수준 싱글턴
|
|
24
|
+
_memory_store = None
|
|
25
|
+
|
|
26
|
+
# 유효한 MemoryType 값 목록
|
|
27
|
+
_VALID_TYPES = [t.value for t in MemoryType]
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _get_store():
|
|
31
|
+
global _memory_store
|
|
32
|
+
if _memory_store is None:
|
|
33
|
+
_memory_store = MemoryStore(str(DATA_DIR / "memory"))
|
|
34
|
+
return _memory_store
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class MemoryHandlerMixin:
|
|
38
|
+
|
|
39
|
+
def _handle_list_memory(self, parsed):
|
|
40
|
+
"""GET /api/memory — 메모리 검색/목록
|
|
41
|
+
|
|
42
|
+
쿼리 파라미터:
|
|
43
|
+
- query: 키워드 검색어
|
|
44
|
+
- type: 메모리 유형 필터 (decision, pattern, failure, context)
|
|
45
|
+
- tags: 태그 필터 (쉼표 구분)
|
|
46
|
+
- project: 프로젝트 스코프 필터
|
|
47
|
+
- limit: 최대 반환 수 (기본 20)
|
|
48
|
+
"""
|
|
49
|
+
qs = parse_qs(parsed.query)
|
|
50
|
+
query = qs.get("query", [None])[0]
|
|
51
|
+
type_str = qs.get("type", [None])[0]
|
|
52
|
+
tags_str = qs.get("tags", [None])[0]
|
|
53
|
+
project = qs.get("project", [None])[0]
|
|
54
|
+
|
|
55
|
+
try:
|
|
56
|
+
limit = int(qs.get("limit", [20])[0])
|
|
57
|
+
if limit < 1:
|
|
58
|
+
limit = 20
|
|
59
|
+
except (ValueError, TypeError):
|
|
60
|
+
limit = 20
|
|
61
|
+
|
|
62
|
+
# type 유효성 검사
|
|
63
|
+
mem_type = None
|
|
64
|
+
if type_str:
|
|
65
|
+
if type_str not in _VALID_TYPES:
|
|
66
|
+
return self._error_response(
|
|
67
|
+
f"유효하지 않은 type: {type_str}. 가능한 값: {_VALID_TYPES}",
|
|
68
|
+
400, code="INVALID_PARAM")
|
|
69
|
+
mem_type = MemoryType(type_str)
|
|
70
|
+
|
|
71
|
+
tags = [t.strip() for t in tags_str.split(",") if t.strip()] if tags_str else None
|
|
72
|
+
|
|
73
|
+
store = _get_store()
|
|
74
|
+
if query:
|
|
75
|
+
results = store.search(
|
|
76
|
+
query=query, memory_type=mem_type,
|
|
77
|
+
tags=tags, project=project, limit=limit)
|
|
78
|
+
else:
|
|
79
|
+
results = store.list_all(memory_type=mem_type, limit=limit)
|
|
80
|
+
# list_all은 project 필터가 없으므로 수동 필터
|
|
81
|
+
if project:
|
|
82
|
+
results = [m for m in results if not m.get("project") or m["project"] == project]
|
|
83
|
+
if tags:
|
|
84
|
+
tag_set = set(tags)
|
|
85
|
+
results = [m for m in results if tag_set & set(m.get("tags", []))]
|
|
86
|
+
|
|
87
|
+
self._json_response({"memories": results, "count": len(results)})
|
|
88
|
+
|
|
89
|
+
def _handle_get_memory(self, mem_id):
|
|
90
|
+
"""GET /api/memory/:id — 메모리 상세"""
|
|
91
|
+
mem = _get_store().get(mem_id)
|
|
92
|
+
if mem is None:
|
|
93
|
+
return self._error_response(
|
|
94
|
+
"메모리를 찾을 수 없습니다", 404, code="MEMORY_NOT_FOUND")
|
|
95
|
+
self._json_response(mem)
|
|
96
|
+
|
|
97
|
+
def _handle_create_memory(self):
|
|
98
|
+
"""POST /api/memory — 메모리 추가
|
|
99
|
+
|
|
100
|
+
요청 body:
|
|
101
|
+
- type: string (필수) — decision, pattern, failure, context
|
|
102
|
+
- title: string (필수)
|
|
103
|
+
- content: string (필수)
|
|
104
|
+
- tags: string[] (선택, 기본 [])
|
|
105
|
+
- project: string (선택)
|
|
106
|
+
- goal_id: string (선택)
|
|
107
|
+
"""
|
|
108
|
+
body = self._read_body()
|
|
109
|
+
|
|
110
|
+
# 필수 필드 검증
|
|
111
|
+
type_str = body.get("type", "").strip()
|
|
112
|
+
if not type_str:
|
|
113
|
+
return self._error_response(
|
|
114
|
+
"type 필드가 필요합니다", 400, code="MISSING_FIELD")
|
|
115
|
+
if type_str not in _VALID_TYPES:
|
|
116
|
+
return self._error_response(
|
|
117
|
+
f"유효하지 않은 type: {type_str}. 가능한 값: {_VALID_TYPES}",
|
|
118
|
+
400, code="INVALID_PARAM")
|
|
119
|
+
|
|
120
|
+
title = body.get("title", "").strip()
|
|
121
|
+
if not title:
|
|
122
|
+
return self._error_response(
|
|
123
|
+
"title 필드가 필요합니다", 400, code="MISSING_FIELD")
|
|
124
|
+
|
|
125
|
+
content = body.get("content", "").strip()
|
|
126
|
+
if not content:
|
|
127
|
+
return self._error_response(
|
|
128
|
+
"content 필드가 필요합니다", 400, code="MISSING_FIELD")
|
|
129
|
+
|
|
130
|
+
tags = body.get("tags", [])
|
|
131
|
+
if not isinstance(tags, list):
|
|
132
|
+
return self._error_response(
|
|
133
|
+
"tags는 문자열 배열이어야 합니다", 400, code="INVALID_PARAM")
|
|
134
|
+
|
|
135
|
+
project = body.get("project")
|
|
136
|
+
goal_id = body.get("goal_id")
|
|
137
|
+
|
|
138
|
+
mem = _get_store().add(
|
|
139
|
+
memory_type=MemoryType(type_str),
|
|
140
|
+
title=title,
|
|
141
|
+
content=content,
|
|
142
|
+
tags=tags,
|
|
143
|
+
project=project,
|
|
144
|
+
goal_id=goal_id,
|
|
145
|
+
)
|
|
146
|
+
self._json_response(mem, 201)
|
|
147
|
+
|
|
148
|
+
def _handle_update_memory(self, mem_id):
|
|
149
|
+
"""POST /api/memory/:id/update — 메모리 수정
|
|
150
|
+
|
|
151
|
+
요청 body (모두 선택):
|
|
152
|
+
- title: string
|
|
153
|
+
- content: string
|
|
154
|
+
- tags: string[]
|
|
155
|
+
- project: string
|
|
156
|
+
"""
|
|
157
|
+
store = _get_store()
|
|
158
|
+
existing = store.get(mem_id)
|
|
159
|
+
if existing is None:
|
|
160
|
+
return self._error_response(
|
|
161
|
+
"메모리를 찾을 수 없습니다", 404, code="MEMORY_NOT_FOUND")
|
|
162
|
+
|
|
163
|
+
body = self._read_body()
|
|
164
|
+
kwargs = {}
|
|
165
|
+
|
|
166
|
+
if "title" in body:
|
|
167
|
+
title = body["title"].strip() if isinstance(body["title"], str) else ""
|
|
168
|
+
if not title:
|
|
169
|
+
return self._error_response(
|
|
170
|
+
"title은 빈 문자열일 수 없습니다", 400, code="INVALID_PARAM")
|
|
171
|
+
kwargs["title"] = title
|
|
172
|
+
|
|
173
|
+
if "content" in body:
|
|
174
|
+
content = body["content"].strip() if isinstance(body["content"], str) else ""
|
|
175
|
+
if not content:
|
|
176
|
+
return self._error_response(
|
|
177
|
+
"content는 빈 문자열일 수 없습니다", 400, code="INVALID_PARAM")
|
|
178
|
+
kwargs["content"] = content
|
|
179
|
+
|
|
180
|
+
if "tags" in body:
|
|
181
|
+
if not isinstance(body["tags"], list):
|
|
182
|
+
return self._error_response(
|
|
183
|
+
"tags는 문자열 배열이어야 합니다", 400, code="INVALID_PARAM")
|
|
184
|
+
kwargs["tags"] = body["tags"]
|
|
185
|
+
|
|
186
|
+
if "project" in body:
|
|
187
|
+
kwargs["project"] = body["project"]
|
|
188
|
+
|
|
189
|
+
if not kwargs:
|
|
190
|
+
return self._error_response(
|
|
191
|
+
"변경할 필드가 없습니다. title, content, tags, project 중 하나를 지정하세요.",
|
|
192
|
+
400, code="NO_CHANGES")
|
|
193
|
+
|
|
194
|
+
updated = store.update(mem_id, **kwargs)
|
|
195
|
+
self._json_response(updated)
|
|
196
|
+
|
|
197
|
+
def _handle_delete_memory(self, mem_id):
|
|
198
|
+
"""DELETE /api/memory/:id — 메모리 삭제"""
|
|
199
|
+
deleted = _get_store().delete(mem_id)
|
|
200
|
+
if not deleted:
|
|
201
|
+
return self._error_response(
|
|
202
|
+
"메모리를 찾을 수 없습니다", 404, code="MEMORY_NOT_FOUND")
|
|
203
|
+
self._json_response({"deleted": True, "id": mem_id})
|