claude-controller 0.1.2 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/bin/autoloop.sh +382 -0
- package/bin/ctl +1189 -0
- package/bin/native-app.py +6 -3
- package/bin/watchdog.sh +357 -0
- package/cognitive/__init__.py +14 -0
- package/cognitive/__pycache__/__init__.cpython-314.pyc +0 -0
- package/cognitive/__pycache__/dispatcher.cpython-314.pyc +0 -0
- package/cognitive/__pycache__/evaluator.cpython-314.pyc +0 -0
- package/cognitive/__pycache__/goal_engine.cpython-314.pyc +0 -0
- package/cognitive/__pycache__/learning.cpython-314.pyc +0 -0
- package/cognitive/__pycache__/orchestrator.cpython-314.pyc +0 -0
- package/cognitive/__pycache__/planner.cpython-314.pyc +0 -0
- package/cognitive/dispatcher.py +192 -0
- package/cognitive/evaluator.py +289 -0
- package/cognitive/goal_engine.py +232 -0
- package/cognitive/learning.py +189 -0
- package/cognitive/orchestrator.py +303 -0
- package/cognitive/planner.py +207 -0
- package/cognitive/prompts/analyst.md +31 -0
- package/cognitive/prompts/coder.md +22 -0
- package/cognitive/prompts/reviewer.md +33 -0
- package/cognitive/prompts/tester.md +21 -0
- package/cognitive/prompts/writer.md +25 -0
- package/config.sh +6 -1
- package/dag/__init__.py +5 -0
- package/dag/__pycache__/__init__.cpython-314.pyc +0 -0
- package/dag/__pycache__/graph.cpython-314.pyc +0 -0
- package/dag/graph.py +222 -0
- package/lib/jobs.sh +12 -1
- package/package.json +11 -5
- package/postinstall.sh +1 -1
- package/service/controller.sh +43 -11
- package/web/audit.py +122 -0
- package/web/checkpoint.py +80 -0
- package/web/config.py +2 -5
- package/web/handler.py +634 -473
- package/web/handler_fs.py +153 -0
- package/web/handler_goals.py +203 -0
- package/web/handler_jobs.py +372 -0
- package/web/handler_memory.py +203 -0
- package/web/handler_sessions.py +132 -0
- package/web/jobs.py +585 -13
- package/web/personas.py +419 -0
- package/web/pipeline.py +981 -0
- package/web/presets.py +506 -0
- package/web/projects.py +246 -0
- package/web/static/api.js +141 -0
- package/web/static/app.js +25 -1937
- package/web/static/attachments.js +144 -0
- package/web/static/base.css +497 -0
- package/web/static/context.js +204 -0
- package/web/static/dirs.js +246 -0
- package/web/static/form.css +763 -0
- package/web/static/goals.css +363 -0
- package/web/static/goals.js +300 -0
- package/web/static/i18n.js +625 -0
- package/web/static/index.html +215 -13
- package/web/static/{styles.css → jobs.css} +746 -1141
- package/web/static/jobs.js +1270 -0
- package/web/static/memoryview.js +117 -0
- package/web/static/personas.js +228 -0
- package/web/static/pipeline.css +338 -0
- package/web/static/pipelines.js +487 -0
- package/web/static/presets.js +244 -0
- package/web/static/send.js +135 -0
- package/web/static/settings-style.css +291 -0
- package/web/static/settings.js +81 -0
- package/web/static/stream.js +534 -0
- package/web/static/utils.js +131 -0
- package/web/webhook.py +210 -0
package/web/pipeline.py
ADDED
|
@@ -0,0 +1,981 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Pipeline Engine — 자기 진화형 자동화
|
|
3
|
+
|
|
4
|
+
핵심 기능:
|
|
5
|
+
1. on/off 자동화 (기존)
|
|
6
|
+
2. 컨텍스트 주입 — 실행 전 git log + 이전 결과를 프롬프트에 자동 삽입
|
|
7
|
+
3. 결과 히스토리 — 최근 N개 결과 저장으로 패턴 감지
|
|
8
|
+
4. 적응형 인터벌 — 결과 기반 실행 주기 자동 조절
|
|
9
|
+
5. 파이프라인 체이닝 — 완료 시 다른 파이프라인 트리거
|
|
10
|
+
|
|
11
|
+
상태: active / stopped
|
|
12
|
+
저장: data/pipelines.json
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
import fcntl
|
|
16
|
+
import json
|
|
17
|
+
import os
|
|
18
|
+
import re
|
|
19
|
+
import subprocess
|
|
20
|
+
import time
|
|
21
|
+
from contextlib import contextmanager
|
|
22
|
+
|
|
23
|
+
from config import DATA_DIR, LOGS_DIR
|
|
24
|
+
from jobs import send_to_fifo, get_job_result
|
|
25
|
+
from utils import parse_meta_file
|
|
26
|
+
|
|
27
|
+
PIPELINES_FILE = DATA_DIR / "pipelines.json"
|
|
28
|
+
|
|
29
|
+
# 히스토리 최대 보관 수
|
|
30
|
+
_MAX_HISTORY = 10
|
|
31
|
+
|
|
32
|
+
# 적응형 인터벌 범위 (초)
|
|
33
|
+
_MIN_INTERVAL_SEC = 60 # 최소 1분
|
|
34
|
+
_MAX_INTERVAL_SEC = 14400 # 최대 4시간 (이전: 1시간 — 너무 보수적)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
# ══════════════════════════════════════════════════════════════
|
|
38
|
+
# 유틸리티
|
|
39
|
+
# ══════════════════════════════════════════════════════════════
|
|
40
|
+
|
|
41
|
+
_LOCK_FILE = DATA_DIR / "pipelines.lock"
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@contextmanager
|
|
45
|
+
def _pipeline_lock():
|
|
46
|
+
"""pipelines.json에 대한 파일 수준 배타적 잠금.
|
|
47
|
+
|
|
48
|
+
동시에 여러 스레드/프로세스가 load→modify→save 하면 뒤쪽 쓰기가
|
|
49
|
+
앞쪽 변경을 덮어쓴다. flock으로 직렬화하여 데이터 유실을 방지한다.
|
|
50
|
+
"""
|
|
51
|
+
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
|
52
|
+
fd = open(_LOCK_FILE, "w")
|
|
53
|
+
try:
|
|
54
|
+
fcntl.flock(fd, fcntl.LOCK_EX)
|
|
55
|
+
yield
|
|
56
|
+
finally:
|
|
57
|
+
fcntl.flock(fd, fcntl.LOCK_UN)
|
|
58
|
+
fd.close()
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _load_pipelines() -> list[dict]:
|
|
62
|
+
try:
|
|
63
|
+
if PIPELINES_FILE.exists():
|
|
64
|
+
return json.loads(PIPELINES_FILE.read_text("utf-8"))
|
|
65
|
+
except (json.JSONDecodeError, OSError):
|
|
66
|
+
pass
|
|
67
|
+
return []
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def _save_pipelines(pipelines: list[dict]):
|
|
71
|
+
"""원자적 쓰기: temp 파일에 쓴 뒤 rename으로 교체.
|
|
72
|
+
|
|
73
|
+
안전장치: 기존 파이프라인 수보다 줄어들면 백업 생성 + 경고 로그.
|
|
74
|
+
"""
|
|
75
|
+
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
|
76
|
+
|
|
77
|
+
# 안전장치: 파이프라인 수 감소 감지
|
|
78
|
+
existing_count = 0
|
|
79
|
+
backup_path = PIPELINES_FILE.with_suffix(".bak")
|
|
80
|
+
if PIPELINES_FILE.exists():
|
|
81
|
+
try:
|
|
82
|
+
existing = json.loads(PIPELINES_FILE.read_text("utf-8"))
|
|
83
|
+
existing_count = len(existing)
|
|
84
|
+
except (json.JSONDecodeError, OSError):
|
|
85
|
+
pass
|
|
86
|
+
|
|
87
|
+
if existing_count > 0 and len(pipelines) < existing_count:
|
|
88
|
+
# 백업 생성 후 경고 (삭제 API 호출이 아닌 비정상 감소 방지)
|
|
89
|
+
import shutil
|
|
90
|
+
shutil.copy2(PIPELINES_FILE, backup_path)
|
|
91
|
+
import sys
|
|
92
|
+
print(
|
|
93
|
+
f"[pipeline] WARNING: 파이프라인 수 감소 {existing_count} → {len(pipelines)}, "
|
|
94
|
+
f"백업 저장: {backup_path}",
|
|
95
|
+
file=sys.stderr,
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
tmp = PIPELINES_FILE.with_suffix(".tmp")
|
|
99
|
+
tmp.write_text(
|
|
100
|
+
json.dumps(pipelines, ensure_ascii=False, indent=2), "utf-8"
|
|
101
|
+
)
|
|
102
|
+
tmp.rename(PIPELINES_FILE)
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
_id_counter = 0
|
|
106
|
+
|
|
107
|
+
def _generate_id() -> str:
|
|
108
|
+
global _id_counter
|
|
109
|
+
_id_counter += 1
|
|
110
|
+
return f"pipe-{int(time.time())}-{os.getpid() % 10000}-{_id_counter}"
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def _parse_interval(interval: str | None) -> int | None:
|
|
114
|
+
if not interval:
|
|
115
|
+
return None
|
|
116
|
+
m = re.match(r"^(\d+)\s*(s|m|h)$", interval.strip())
|
|
117
|
+
if not m:
|
|
118
|
+
return None
|
|
119
|
+
val, unit = int(m.group(1)), m.group(2)
|
|
120
|
+
return val * {"s": 1, "m": 60, "h": 3600}[unit]
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def _uuid_to_job_id(uuid: str) -> str | None:
|
|
124
|
+
if not LOGS_DIR.exists():
|
|
125
|
+
return None
|
|
126
|
+
for mf in sorted(LOGS_DIR.glob("job_*.meta"), reverse=True):
|
|
127
|
+
meta = parse_meta_file(mf)
|
|
128
|
+
if meta and meta.get("UUID") == uuid:
|
|
129
|
+
return meta.get("JOB_ID")
|
|
130
|
+
return None
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
_UUID_RESOLVE_TIMEOUT = 300
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def _resolve_job(job_id: str, resolved_cache: str = None) -> tuple[str | None, str | None, str]:
|
|
137
|
+
"""job 결과 조회. 반환: (result_text, error, resolved_id)"""
|
|
138
|
+
resolved = resolved_cache or _uuid_to_job_id(job_id) or job_id
|
|
139
|
+
result, err = get_job_result(resolved)
|
|
140
|
+
if err:
|
|
141
|
+
if "-web-" in job_id:
|
|
142
|
+
try:
|
|
143
|
+
if time.time() - int(job_id.split("-")[0]) > _UUID_RESOLVE_TIMEOUT:
|
|
144
|
+
return None, "작업 유실", resolved
|
|
145
|
+
except (ValueError, IndexError):
|
|
146
|
+
pass
|
|
147
|
+
return None, "running", resolved
|
|
148
|
+
return None, err, resolved
|
|
149
|
+
if result and result.get("status") == "running":
|
|
150
|
+
return None, "running", resolved
|
|
151
|
+
if result:
|
|
152
|
+
return result.get("result", ""), None, resolved
|
|
153
|
+
return None, "결과 없음", resolved
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def _update_pipeline(pipe_id: str, updater):
|
|
157
|
+
with _pipeline_lock():
|
|
158
|
+
pipelines = _load_pipelines()
|
|
159
|
+
for p in pipelines:
|
|
160
|
+
if p["id"] == pipe_id:
|
|
161
|
+
updater(p)
|
|
162
|
+
p["updated_at"] = time.strftime("%Y-%m-%dT%H:%M:%S")
|
|
163
|
+
_save_pipelines(pipelines)
|
|
164
|
+
return p, None
|
|
165
|
+
return None, "파이프라인을 찾을 수 없습니다"
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def _parse_timestamp(ts: str) -> float:
|
|
169
|
+
try:
|
|
170
|
+
return time.mktime(time.strptime(ts, "%Y-%m-%dT%H:%M:%S"))
|
|
171
|
+
except (ValueError, TypeError):
|
|
172
|
+
return 0
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def _next_run_str(interval_sec: int) -> str:
|
|
176
|
+
return time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime(time.time() + interval_sec))
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
# ══════════════════════════════════════════════════════════════
|
|
180
|
+
# Pre-dispatch 스킵 가드 — 변경 없으면 실행 자체를 건너뜀
|
|
181
|
+
# ══════════════════════════════════════════════════════════════
|
|
182
|
+
|
|
183
|
+
def _get_git_head_sha(project_path: str) -> str:
|
|
184
|
+
"""프로젝트의 현재 HEAD SHA를 반환한다."""
|
|
185
|
+
try:
|
|
186
|
+
result = subprocess.run(
|
|
187
|
+
["git", "rev-parse", "HEAD"],
|
|
188
|
+
cwd=project_path, capture_output=True, text=True, timeout=5
|
|
189
|
+
)
|
|
190
|
+
if result.returncode == 0:
|
|
191
|
+
return result.stdout.strip()
|
|
192
|
+
except (subprocess.SubprocessError, OSError):
|
|
193
|
+
pass
|
|
194
|
+
return ""
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def _get_git_dirty_hash(project_path: str) -> str:
|
|
198
|
+
"""uncommitted 변경사항의 해시를 반환한다 (변경 없으면 빈 문자열)."""
|
|
199
|
+
try:
|
|
200
|
+
result = subprocess.run(
|
|
201
|
+
["git", "diff", "HEAD", "--stat"],
|
|
202
|
+
cwd=project_path, capture_output=True, text=True, timeout=5
|
|
203
|
+
)
|
|
204
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
205
|
+
import hashlib
|
|
206
|
+
return hashlib.md5(result.stdout.encode()).hexdigest()[:12]
|
|
207
|
+
except (subprocess.SubprocessError, OSError):
|
|
208
|
+
pass
|
|
209
|
+
return ""
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def _should_skip_dispatch(pipe: dict) -> tuple[bool, str]:
|
|
213
|
+
"""Pre-dispatch 스킵 판단. 반환: (skip 여부, 사유).
|
|
214
|
+
|
|
215
|
+
스킵 조건:
|
|
216
|
+
1. git HEAD와 dirty hash가 이전 실행과 동일 (코드 변경 없음)
|
|
217
|
+
2. 이전 결과가 no_change이고 연속 무변경 3회 이상
|
|
218
|
+
"""
|
|
219
|
+
project_path = pipe["project_path"]
|
|
220
|
+
|
|
221
|
+
# 현재 git 상태 스냅샷
|
|
222
|
+
head_sha = _get_git_head_sha(project_path)
|
|
223
|
+
dirty_hash = _get_git_dirty_hash(project_path)
|
|
224
|
+
current_snapshot = f"{head_sha}:{dirty_hash}"
|
|
225
|
+
|
|
226
|
+
# 이전 실행 시 git 스냅샷과 비교
|
|
227
|
+
last_snapshot = pipe.get("last_git_snapshot", "")
|
|
228
|
+
|
|
229
|
+
if not last_snapshot or current_snapshot != last_snapshot:
|
|
230
|
+
return False, "" # 코드 변경 있음 → 실행
|
|
231
|
+
|
|
232
|
+
# 코드 변경 없음 + 이전 결과 확인
|
|
233
|
+
history = pipe.get("history", [])
|
|
234
|
+
if not history:
|
|
235
|
+
return False, "" # 히스토리 없음 → 첫 실행이므로 허용
|
|
236
|
+
|
|
237
|
+
# 최근 연속 무변경/unknown 횟수
|
|
238
|
+
consecutive_idle = 0
|
|
239
|
+
for h in reversed(history):
|
|
240
|
+
cls = h.get("classification", "unknown")
|
|
241
|
+
if cls in ("no_change", "unknown"):
|
|
242
|
+
consecutive_idle += 1
|
|
243
|
+
else:
|
|
244
|
+
break
|
|
245
|
+
|
|
246
|
+
if consecutive_idle >= 2:
|
|
247
|
+
return True, f"git 변경 없음 + 연속 {consecutive_idle}회 무변경"
|
|
248
|
+
|
|
249
|
+
return False, ""
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
# ══════════════════════════════════════════════════════════════
|
|
253
|
+
# 컨텍스트 주입 — 프롬프트를 풍부하게 만든다
|
|
254
|
+
# ══════════════════════════════════════════════════════════════
|
|
255
|
+
|
|
256
|
+
def _get_git_context(project_path: str, max_commits: int = 5) -> str:
|
|
257
|
+
"""프로젝트의 최근 git 변경사항을 요약한다."""
|
|
258
|
+
try:
|
|
259
|
+
result = subprocess.run(
|
|
260
|
+
["git", "log", f"--oneline", f"-{max_commits}", "--no-decorate"],
|
|
261
|
+
cwd=project_path, capture_output=True, text=True, timeout=5
|
|
262
|
+
)
|
|
263
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
264
|
+
return result.stdout.strip()
|
|
265
|
+
except (subprocess.SubprocessError, OSError):
|
|
266
|
+
pass
|
|
267
|
+
return ""
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
def _get_git_diff_stat(project_path: str) -> str:
|
|
271
|
+
"""현재 uncommitted 변경사항의 stat을 가져온다."""
|
|
272
|
+
try:
|
|
273
|
+
result = subprocess.run(
|
|
274
|
+
["git", "diff", "--stat", "HEAD"],
|
|
275
|
+
cwd=project_path, capture_output=True, text=True, timeout=5
|
|
276
|
+
)
|
|
277
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
278
|
+
return result.stdout.strip()
|
|
279
|
+
except (subprocess.SubprocessError, OSError):
|
|
280
|
+
pass
|
|
281
|
+
return ""
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
def _build_enriched_prompt(pipe: dict) -> str:
|
|
285
|
+
"""파이프라인의 원본 command에 컨텍스트를 주입한 프롬프트를 생성한다."""
|
|
286
|
+
command = pipe["command"]
|
|
287
|
+
project_path = pipe["project_path"]
|
|
288
|
+
sections = []
|
|
289
|
+
|
|
290
|
+
# 1. Git 컨텍스트
|
|
291
|
+
git_log = _get_git_context(project_path)
|
|
292
|
+
git_diff = _get_git_diff_stat(project_path)
|
|
293
|
+
if git_log:
|
|
294
|
+
sections.append(f"[최근 커밋]\n{git_log}")
|
|
295
|
+
if git_diff:
|
|
296
|
+
sections.append(f"[현재 uncommitted 변경]\n{git_diff}")
|
|
297
|
+
|
|
298
|
+
# 2. 이전 실행 결과
|
|
299
|
+
history = pipe.get("history", [])
|
|
300
|
+
if history:
|
|
301
|
+
last = history[-1]
|
|
302
|
+
last_summary = (last.get("result", "") or "")[:500]
|
|
303
|
+
last_cost = last.get("cost_usd")
|
|
304
|
+
last_time = last.get("completed_at", "")
|
|
305
|
+
if last_summary:
|
|
306
|
+
cost_info = f" (비용: ${last_cost:.4f})" if last_cost else ""
|
|
307
|
+
sections.append(
|
|
308
|
+
f"[이전 실행 결과 — {last_time}{cost_info}]\n{last_summary}"
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
# 3. 실행 통계
|
|
312
|
+
run_count = pipe.get("run_count", 0)
|
|
313
|
+
current_interval = pipe.get("effective_interval_sec") or pipe.get("interval_sec")
|
|
314
|
+
if run_count > 0 and current_interval:
|
|
315
|
+
interval_str = f"{current_interval // 60}분" if current_interval >= 60 else f"{current_interval}초"
|
|
316
|
+
sections.append(
|
|
317
|
+
f"[실행 통계] {run_count}회 실행됨 | 현재 간격: {interval_str}"
|
|
318
|
+
)
|
|
319
|
+
|
|
320
|
+
if not sections:
|
|
321
|
+
return command
|
|
322
|
+
|
|
323
|
+
context_block = "\n\n".join(sections)
|
|
324
|
+
return f"""=== 자동 주입 컨텍스트 (이전 실행 기반) ===
|
|
325
|
+
{context_block}
|
|
326
|
+
=== 컨텍스트 끝 ===
|
|
327
|
+
|
|
328
|
+
{command}"""
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
# ══════════════════════════════════════════════════════════════
|
|
332
|
+
# 적응형 인터벌 — 결과 기반 주기 조절
|
|
333
|
+
# ══════════════════════════════════════════════════════════════
|
|
334
|
+
|
|
335
|
+
_NO_CHANGE_PATTERNS = [
|
|
336
|
+
r"변경.*없",
|
|
337
|
+
r"이슈.*없",
|
|
338
|
+
r"문제.*없",
|
|
339
|
+
r"no\s+(issues?|changes?|problems?)",
|
|
340
|
+
r"nothing\s+to",
|
|
341
|
+
r"all\s+ok",
|
|
342
|
+
r"삭제\s*대상\s*없",
|
|
343
|
+
r"개선.*없",
|
|
344
|
+
r"이미.*해결",
|
|
345
|
+
r"already",
|
|
346
|
+
r"회귀.*없",
|
|
347
|
+
r"오류\s*없",
|
|
348
|
+
r"고임팩트.*없",
|
|
349
|
+
# 테스트 결과 패턴 (all passed, 0 failures)
|
|
350
|
+
r"(?:test|테스트).*(?:pass|통과|성공)",
|
|
351
|
+
r"0\s*(?:fail|error|오류)",
|
|
352
|
+
r"(?:all|모든).*(?:pass|통과|ok|정상)",
|
|
353
|
+
r"ran\s+\d+\s+test.*\nok",
|
|
354
|
+
# 유지보수 결과 패턴
|
|
355
|
+
r"정리.*(?:없|0개|완료)",
|
|
356
|
+
r"(?:디스크|disk).*(?:ok|정상|양호)",
|
|
357
|
+
r"(?:상태|status).*(?:정상|양호|ok|healthy)",
|
|
358
|
+
r"(?:점검|확인).*(?:완료|이상\s*없)",
|
|
359
|
+
r"불필요.*없",
|
|
360
|
+
r"(?:특이|이상)\s*(?:사항|점)\s*없",
|
|
361
|
+
# 코드 분석 결과 패턴
|
|
362
|
+
r"(?:품질|quality).*(?:양호|good|ok)",
|
|
363
|
+
r"(?:취약|vuln).*(?:없|0)",
|
|
364
|
+
r"(?:개선|수정)\s*(?:사항|할\s*것)\s*없",
|
|
365
|
+
r"(?:추가|변경)\s*(?:불필요|사항\s*없)",
|
|
366
|
+
# 일반적 무변경 표현
|
|
367
|
+
r"현재\s*(?:상태|코드).*(?:적절|양호|충분)",
|
|
368
|
+
r"(?:작업|할\s*것).*없",
|
|
369
|
+
]
|
|
370
|
+
|
|
371
|
+
# 개별 분리 — 각 키워드가 독립적으로 1점씩 기여하여
|
|
372
|
+
# change_score >= 2 조건이 정확하게 동작한다.
|
|
373
|
+
_CHANGE_PATTERNS = [
|
|
374
|
+
r"수정",
|
|
375
|
+
r"변경",
|
|
376
|
+
r"추가",
|
|
377
|
+
r"개선",
|
|
378
|
+
r"구현",
|
|
379
|
+
r"삭제",
|
|
380
|
+
r"교체",
|
|
381
|
+
r"리팩",
|
|
382
|
+
r"fix|change|add|remov|improv|implement|refactor",
|
|
383
|
+
r"Edit|Write", # 도구 사용 흔적
|
|
384
|
+
r"작성.*완료",
|
|
385
|
+
r"생성.*완료",
|
|
386
|
+
r"파일.*(?:생성|작성|수정)",
|
|
387
|
+
r"커밋|commit",
|
|
388
|
+
]
|
|
389
|
+
|
|
390
|
+
|
|
391
|
+
def _classify_result(result_text: str) -> str:
|
|
392
|
+
"""결과를 분류한다: 'no_change', 'has_change', 'unknown'"""
|
|
393
|
+
if not result_text:
|
|
394
|
+
return "unknown"
|
|
395
|
+
text = result_text[:2000].lower()
|
|
396
|
+
|
|
397
|
+
# 변경 없음 패턴 우선 체크
|
|
398
|
+
for pat in _NO_CHANGE_PATTERNS:
|
|
399
|
+
if re.search(pat, text, re.IGNORECASE):
|
|
400
|
+
return "no_change"
|
|
401
|
+
|
|
402
|
+
# 변경 있음 패턴
|
|
403
|
+
change_score = 0
|
|
404
|
+
for pat in _CHANGE_PATTERNS:
|
|
405
|
+
if re.search(pat, text, re.IGNORECASE):
|
|
406
|
+
change_score += 1
|
|
407
|
+
if change_score >= 2:
|
|
408
|
+
return "has_change"
|
|
409
|
+
|
|
410
|
+
return "unknown"
|
|
411
|
+
|
|
412
|
+
|
|
413
|
+
def _adapt_interval(pipe: dict, result_text: str) -> int | None:
|
|
414
|
+
"""결과를 기반으로 적응형 인터벌을 계산한다.
|
|
415
|
+
|
|
416
|
+
개선점 (v2):
|
|
417
|
+
- no_change: 1.5x~2.0x로 공격적 확대 (이전: 최대 1.3x)
|
|
418
|
+
- unknown: no_change와 동일하게 감속 (이전: 현상 유지로 낭비)
|
|
419
|
+
- has_change: 기본 간격으로 복귀
|
|
420
|
+
"""
|
|
421
|
+
base_interval = pipe.get("interval_sec")
|
|
422
|
+
if not base_interval:
|
|
423
|
+
return None
|
|
424
|
+
|
|
425
|
+
current = pipe.get("effective_interval_sec") or base_interval
|
|
426
|
+
classification = _classify_result(result_text)
|
|
427
|
+
history = pipe.get("history", [])
|
|
428
|
+
|
|
429
|
+
# 최근 연속 idle (no_change + unknown) 횟수
|
|
430
|
+
consecutive_idle = 0
|
|
431
|
+
for h in reversed(history):
|
|
432
|
+
cls = h.get("classification", "unknown")
|
|
433
|
+
if cls in ("no_change", "unknown"):
|
|
434
|
+
consecutive_idle += 1
|
|
435
|
+
else:
|
|
436
|
+
break
|
|
437
|
+
|
|
438
|
+
if classification in ("no_change", "unknown"):
|
|
439
|
+
consecutive_idle += 1
|
|
440
|
+
# 공격적 확대: 1.5x 기본, 연속 idle마다 0.1씩 추가 (최대 2.0x)
|
|
441
|
+
multiplier = min(2.0, 1.5 + (consecutive_idle * 0.1))
|
|
442
|
+
new_interval = int(current * multiplier)
|
|
443
|
+
elif classification == "has_change":
|
|
444
|
+
new_interval = base_interval
|
|
445
|
+
else:
|
|
446
|
+
new_interval = current
|
|
447
|
+
|
|
448
|
+
return max(_MIN_INTERVAL_SEC, min(_MAX_INTERVAL_SEC, new_interval))
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
# ══════════════════════════════════════════════════════════════
|
|
452
|
+
# CRUD
|
|
453
|
+
# ══════════════════════════════════════════════════════════════
|
|
454
|
+
|
|
455
|
+
def list_pipelines() -> list[dict]:
|
|
456
|
+
return _load_pipelines()
|
|
457
|
+
|
|
458
|
+
|
|
459
|
+
def get_pipeline(pipe_id: str) -> tuple[dict | None, str | None]:
|
|
460
|
+
for p in _load_pipelines():
|
|
461
|
+
if p["id"] == pipe_id:
|
|
462
|
+
return p, None
|
|
463
|
+
return None, "파이프라인을 찾을 수 없습니다"
|
|
464
|
+
|
|
465
|
+
|
|
466
|
+
def create_pipeline(
|
|
467
|
+
project_path: str, command: str, interval: str = "",
|
|
468
|
+
name: str = "", on_complete: str = "",
|
|
469
|
+
) -> tuple[dict | None, str | None]:
|
|
470
|
+
project_path = os.path.abspath(os.path.expanduser(project_path))
|
|
471
|
+
if not command.strip():
|
|
472
|
+
return None, "명령어(command)를 입력하세요"
|
|
473
|
+
if not name:
|
|
474
|
+
name = os.path.basename(project_path)
|
|
475
|
+
|
|
476
|
+
interval_sec = _parse_interval(interval) if interval else None
|
|
477
|
+
now = time.strftime("%Y-%m-%dT%H:%M:%S")
|
|
478
|
+
pipe = {
|
|
479
|
+
"id": _generate_id(),
|
|
480
|
+
"name": name,
|
|
481
|
+
"project_path": project_path,
|
|
482
|
+
"command": command,
|
|
483
|
+
"interval": interval or None,
|
|
484
|
+
"interval_sec": interval_sec,
|
|
485
|
+
"effective_interval_sec": interval_sec, # 적응형 인터벌
|
|
486
|
+
"status": "active",
|
|
487
|
+
"job_id": None,
|
|
488
|
+
"next_run": None,
|
|
489
|
+
"last_run": None,
|
|
490
|
+
"last_result": None,
|
|
491
|
+
"last_error": None,
|
|
492
|
+
"run_count": 0,
|
|
493
|
+
"history": [], # 결과 히스토리
|
|
494
|
+
"on_complete": on_complete or None, # 체이닝: 완료 시 트리거할 pipe_id
|
|
495
|
+
"created_at": now,
|
|
496
|
+
"updated_at": now,
|
|
497
|
+
}
|
|
498
|
+
with _pipeline_lock():
|
|
499
|
+
pipelines = _load_pipelines()
|
|
500
|
+
pipelines.append(pipe)
|
|
501
|
+
_save_pipelines(pipelines)
|
|
502
|
+
return pipe, None
|
|
503
|
+
|
|
504
|
+
|
|
505
|
+
def update_pipeline(
|
|
506
|
+
pipe_id: str, command: str = None, interval: str = None,
|
|
507
|
+
name: str = None, on_complete: str = None,
|
|
508
|
+
) -> tuple[dict | None, str | None]:
|
|
509
|
+
def updater(p):
|
|
510
|
+
if command is not None:
|
|
511
|
+
p["command"] = command
|
|
512
|
+
if name is not None:
|
|
513
|
+
p["name"] = name
|
|
514
|
+
if on_complete is not None:
|
|
515
|
+
p["on_complete"] = on_complete if on_complete else None
|
|
516
|
+
if interval is not None:
|
|
517
|
+
if interval == "":
|
|
518
|
+
p["interval"] = None
|
|
519
|
+
p["interval_sec"] = None
|
|
520
|
+
p["effective_interval_sec"] = None
|
|
521
|
+
p["next_run"] = None
|
|
522
|
+
else:
|
|
523
|
+
new_sec = _parse_interval(interval)
|
|
524
|
+
p["interval"] = interval
|
|
525
|
+
p["interval_sec"] = new_sec
|
|
526
|
+
p["effective_interval_sec"] = new_sec
|
|
527
|
+
# active 상태이고 job 미실행 중이면 next_run도 즉시 재계산
|
|
528
|
+
if p.get("status") == "active" and not p.get("job_id") and new_sec:
|
|
529
|
+
p["next_run"] = _next_run_str(new_sec)
|
|
530
|
+
return _update_pipeline(pipe_id, updater)
|
|
531
|
+
|
|
532
|
+
|
|
533
|
+
def delete_pipeline(pipe_id: str) -> tuple[dict | None, str | None]:
|
|
534
|
+
with _pipeline_lock():
|
|
535
|
+
pipelines = _load_pipelines()
|
|
536
|
+
for i, p in enumerate(pipelines):
|
|
537
|
+
if p["id"] == pipe_id:
|
|
538
|
+
removed = pipelines.pop(i)
|
|
539
|
+
_save_pipelines(pipelines)
|
|
540
|
+
return removed, None
|
|
541
|
+
return None, "파이프라인을 찾을 수 없습니다"
|
|
542
|
+
|
|
543
|
+
|
|
544
|
+
# ══════════════════════════════════════════════════════════════
|
|
545
|
+
# 핵심: dispatch + tick
|
|
546
|
+
# ══════════════════════════════════════════════════════════════
|
|
547
|
+
|
|
548
|
+
# 자동 일시정지 임계값: 연속 idle 이 횟수 이상이면 자동 pause
|
|
549
|
+
_AUTO_PAUSE_THRESHOLD = 5
|
|
550
|
+
|
|
551
|
+
|
|
552
|
+
def dispatch(pipe_id: str, force: bool = False) -> tuple[dict | None, str | None]:
|
|
553
|
+
"""작업을 FIFO로 전송한다. 컨텍스트 주입 + 적응형 인터벌 적용.
|
|
554
|
+
|
|
555
|
+
이중 발사 방지: lock 안에서 job_id 확인 후 dispatch 여부를 결정한다.
|
|
556
|
+
Pre-dispatch 스킵 가드: git 변경 없고 연속 무변경이면 스킵한다.
|
|
557
|
+
자동 일시정지: 연속 idle 5회 이상이면 파이프라인을 자동 pause한다.
|
|
558
|
+
"""
|
|
559
|
+
with _pipeline_lock():
|
|
560
|
+
pipelines = _load_pipelines()
|
|
561
|
+
pipe = None
|
|
562
|
+
for p in pipelines:
|
|
563
|
+
if p["id"] == pipe_id:
|
|
564
|
+
pipe = p
|
|
565
|
+
break
|
|
566
|
+
if not pipe:
|
|
567
|
+
return None, "파이프라인을 찾을 수 없습니다"
|
|
568
|
+
if pipe["status"] != "active":
|
|
569
|
+
return None, "파이프라인이 꺼져 있습니다"
|
|
570
|
+
# 이중 발사 방지: 이미 job이 실행 중이면 skip
|
|
571
|
+
if pipe.get("job_id"):
|
|
572
|
+
return {"action": "already_running", "job_id": pipe["job_id"]}, None
|
|
573
|
+
|
|
574
|
+
# ── Pre-dispatch 스킵 가드 (force=True일 때 건너뜀) ──
|
|
575
|
+
if not force:
|
|
576
|
+
skip, reason = _should_skip_dispatch(pipe)
|
|
577
|
+
if skip:
|
|
578
|
+
# next_run만 재설정하고 실행 안 함
|
|
579
|
+
effective = pipe.get("effective_interval_sec") or pipe.get("interval_sec")
|
|
580
|
+
if effective:
|
|
581
|
+
pipe["next_run"] = _next_run_str(effective)
|
|
582
|
+
pipe["skip_count"] = pipe.get("skip_count", 0) + 1
|
|
583
|
+
pipe["updated_at"] = time.strftime("%Y-%m-%dT%H:%M:%S")
|
|
584
|
+
_save_pipelines(pipelines)
|
|
585
|
+
return {"action": "skipped", "reason": reason, "name": pipe["name"]}, None
|
|
586
|
+
|
|
587
|
+
# ── 자동 일시정지: 연속 idle 5회 이상 ──
|
|
588
|
+
history = pipe.get("history", [])
|
|
589
|
+
consecutive_idle = 0
|
|
590
|
+
for h in reversed(history):
|
|
591
|
+
cls = h.get("classification", "unknown")
|
|
592
|
+
if cls in ("no_change", "unknown"):
|
|
593
|
+
consecutive_idle += 1
|
|
594
|
+
else:
|
|
595
|
+
break
|
|
596
|
+
if consecutive_idle >= _AUTO_PAUSE_THRESHOLD:
|
|
597
|
+
pipe["status"] = "stopped"
|
|
598
|
+
pipe["last_error"] = f"자동 일시정지: 연속 {consecutive_idle}회 무변경"
|
|
599
|
+
pipe["next_run"] = None
|
|
600
|
+
pipe["updated_at"] = time.strftime("%Y-%m-%dT%H:%M:%S")
|
|
601
|
+
_save_pipelines(pipelines)
|
|
602
|
+
return {
|
|
603
|
+
"action": "auto_paused",
|
|
604
|
+
"reason": f"연속 {consecutive_idle}회 무변경 → 자동 일시정지",
|
|
605
|
+
"name": pipe["name"],
|
|
606
|
+
}, None
|
|
607
|
+
|
|
608
|
+
# lock 안에서 dispatching 마커 설정 (다른 프로세스가 동시에 dispatch 못하게)
|
|
609
|
+
pipe["job_id"] = "__dispatching__"
|
|
610
|
+
pipe["updated_at"] = time.strftime("%Y-%m-%dT%H:%M:%S")
|
|
611
|
+
_save_pipelines(pipelines)
|
|
612
|
+
|
|
613
|
+
# lock 밖에서 실제 전송 (시간이 걸릴 수 있음)
|
|
614
|
+
enriched_prompt = _build_enriched_prompt(pipe)
|
|
615
|
+
result, send_err = send_to_fifo(enriched_prompt, cwd=pipe["project_path"])
|
|
616
|
+
|
|
617
|
+
if send_err:
|
|
618
|
+
# 전송 실패: dispatching 마커 제거
|
|
619
|
+
def clear_marker(p):
|
|
620
|
+
p["job_id"] = None
|
|
621
|
+
p["last_error"] = f"FIFO 전송 실패: {send_err}"
|
|
622
|
+
_update_pipeline(pipe_id, clear_marker)
|
|
623
|
+
return None, f"FIFO 전송 실패: {send_err}"
|
|
624
|
+
|
|
625
|
+
job_id = result["job_id"]
|
|
626
|
+
effective = pipe.get("effective_interval_sec") or pipe.get("interval_sec")
|
|
627
|
+
nr = _next_run_str(effective) if effective else None
|
|
628
|
+
|
|
629
|
+
# git snapshot 저장 (다음 dispatch 시 비교용)
|
|
630
|
+
head_sha = _get_git_head_sha(pipe["project_path"])
|
|
631
|
+
dirty_hash = _get_git_dirty_hash(pipe["project_path"])
|
|
632
|
+
git_snapshot = f"{head_sha}:{dirty_hash}"
|
|
633
|
+
|
|
634
|
+
def do_dispatch(p, _snapshot=git_snapshot):
|
|
635
|
+
p["job_id"] = job_id
|
|
636
|
+
p["last_run"] = time.strftime("%Y-%m-%dT%H:%M:%S")
|
|
637
|
+
p["last_error"] = None
|
|
638
|
+
p["next_run"] = nr
|
|
639
|
+
p["last_git_snapshot"] = _snapshot
|
|
640
|
+
_update_pipeline(pipe_id, do_dispatch)
|
|
641
|
+
|
|
642
|
+
return {
|
|
643
|
+
"action": "dispatched",
|
|
644
|
+
"job_id": job_id,
|
|
645
|
+
"name": pipe["name"],
|
|
646
|
+
"next_run": nr,
|
|
647
|
+
"context_injected": bool(enriched_prompt != pipe["command"]),
|
|
648
|
+
}, None
|
|
649
|
+
|
|
650
|
+
|
|
651
|
+
def tick(pipe_id: str) -> tuple[dict | None, str | None]:
|
|
652
|
+
"""active 파이프라인의 job 완료를 확인한다."""
|
|
653
|
+
pipe, err = get_pipeline(pipe_id)
|
|
654
|
+
if err:
|
|
655
|
+
return None, err
|
|
656
|
+
if pipe["status"] != "active":
|
|
657
|
+
return {"action": "off"}, None
|
|
658
|
+
|
|
659
|
+
job_id = pipe.get("job_id")
|
|
660
|
+
if not job_id:
|
|
661
|
+
# job 없음 → next_run 확인 후 dispatch
|
|
662
|
+
effective = pipe.get("effective_interval_sec") or pipe.get("interval_sec")
|
|
663
|
+
if pipe.get("next_run") and _parse_timestamp(pipe["next_run"]) > time.time():
|
|
664
|
+
remaining = int(_parse_timestamp(pipe["next_run"]) - time.time())
|
|
665
|
+
return {"action": "waiting", "remaining_sec": remaining}, None
|
|
666
|
+
return dispatch(pipe_id)
|
|
667
|
+
|
|
668
|
+
# dispatching 마커: 다른 프로세스가 dispatch 중
|
|
669
|
+
if job_id == "__dispatching__":
|
|
670
|
+
return {"action": "dispatching"}, None
|
|
671
|
+
|
|
672
|
+
# job 실행 중 → 완료 확인
|
|
673
|
+
resolved_cache = pipe.get("resolved_job_id")
|
|
674
|
+
result_text, result_err, resolved = _resolve_job(job_id, resolved_cache)
|
|
675
|
+
|
|
676
|
+
# UUID 해석 결과 캐싱
|
|
677
|
+
if resolved != job_id and not resolved_cache:
|
|
678
|
+
def cache(p, _r=resolved):
|
|
679
|
+
p["resolved_job_id"] = _r
|
|
680
|
+
_update_pipeline(pipe_id, cache)
|
|
681
|
+
|
|
682
|
+
if result_err == "running":
|
|
683
|
+
return {"action": "running", "job_id": resolved}, None
|
|
684
|
+
|
|
685
|
+
if result_err:
|
|
686
|
+
def set_err(p, _e=result_err):
|
|
687
|
+
p["last_error"] = _e
|
|
688
|
+
p["job_id"] = None
|
|
689
|
+
p.pop("resolved_job_id", None)
|
|
690
|
+
_update_pipeline(pipe_id, set_err)
|
|
691
|
+
return {"action": "error", "error": result_err}, None
|
|
692
|
+
|
|
693
|
+
# ── 완료: 히스토리 기록 + 적응형 인터벌 + 체이닝 ──
|
|
694
|
+
summary = (result_text or "")[:500]
|
|
695
|
+
classification = _classify_result(result_text or "")
|
|
696
|
+
|
|
697
|
+
# 비용/시간 정보 추출
|
|
698
|
+
cost_usd = None
|
|
699
|
+
duration_ms = None
|
|
700
|
+
if resolved:
|
|
701
|
+
full_result, _ = get_job_result(resolved)
|
|
702
|
+
if full_result:
|
|
703
|
+
cost_usd = full_result.get("cost_usd")
|
|
704
|
+
duration_ms = full_result.get("duration_ms")
|
|
705
|
+
|
|
706
|
+
# 적응형 인터벌 계산
|
|
707
|
+
new_interval = _adapt_interval(pipe, result_text or "")
|
|
708
|
+
|
|
709
|
+
# 체이닝 대상 확인
|
|
710
|
+
chain_target = pipe.get("on_complete")
|
|
711
|
+
|
|
712
|
+
def complete(p, _s=summary, _c=classification, _ni=new_interval,
|
|
713
|
+
_cost=cost_usd, _dur=duration_ms):
|
|
714
|
+
p["last_result"] = _s
|
|
715
|
+
p["run_count"] = p.get("run_count", 0) + 1
|
|
716
|
+
p["job_id"] = None
|
|
717
|
+
p.pop("resolved_job_id", None)
|
|
718
|
+
|
|
719
|
+
# 히스토리 추가
|
|
720
|
+
history = p.get("history", [])
|
|
721
|
+
history.append({
|
|
722
|
+
"result": _s,
|
|
723
|
+
"classification": _c,
|
|
724
|
+
"cost_usd": _cost,
|
|
725
|
+
"duration_ms": _dur,
|
|
726
|
+
"completed_at": time.strftime("%Y-%m-%dT%H:%M:%S"),
|
|
727
|
+
})
|
|
728
|
+
# 최대 보관 수 초과 시 오래된 것 제거
|
|
729
|
+
if len(history) > _MAX_HISTORY:
|
|
730
|
+
history = history[-_MAX_HISTORY:]
|
|
731
|
+
p["history"] = history
|
|
732
|
+
|
|
733
|
+
# 적응형 인터벌 적용
|
|
734
|
+
if _ni is not None:
|
|
735
|
+
p["effective_interval_sec"] = _ni
|
|
736
|
+
p["next_run"] = _next_run_str(_ni)
|
|
737
|
+
elif p.get("interval_sec"):
|
|
738
|
+
p["next_run"] = _next_run_str(p["interval_sec"])
|
|
739
|
+
|
|
740
|
+
_update_pipeline(pipe_id, complete)
|
|
741
|
+
|
|
742
|
+
# 체이닝: on_complete에 지정된 파이프라인 트리거
|
|
743
|
+
chain_result = None
|
|
744
|
+
if chain_target:
|
|
745
|
+
chain_result, chain_err = dispatch(chain_target)
|
|
746
|
+
if chain_err:
|
|
747
|
+
chain_result = {"chain_error": chain_err}
|
|
748
|
+
|
|
749
|
+
response = {
|
|
750
|
+
"action": "completed",
|
|
751
|
+
"run_count": pipe.get("run_count", 0) + 1,
|
|
752
|
+
"classification": classification,
|
|
753
|
+
}
|
|
754
|
+
if new_interval and pipe.get("interval_sec") and new_interval != pipe.get("interval_sec"):
|
|
755
|
+
base = pipe["interval_sec"]
|
|
756
|
+
response["interval_adapted"] = {
|
|
757
|
+
"base": base,
|
|
758
|
+
"new": new_interval,
|
|
759
|
+
"change": f"{'+' if new_interval > base else ''}{int((new_interval - base) / base * 100)}%",
|
|
760
|
+
}
|
|
761
|
+
if chain_result:
|
|
762
|
+
response["chain"] = chain_result
|
|
763
|
+
|
|
764
|
+
return response, None
|
|
765
|
+
|
|
766
|
+
|
|
767
|
+
# ══════════════════════════════════════════════════════════════
|
|
768
|
+
# 액션 함수
|
|
769
|
+
# ══════════════════════════════════════════════════════════════
|
|
770
|
+
|
|
771
|
+
def run_next(pipe_id: str) -> tuple[dict | None, str | None]:
|
|
772
|
+
"""ON으로 켜고 즉시 dispatch (스킵 가드 우회)."""
|
|
773
|
+
def activate(p):
|
|
774
|
+
p["status"] = "active"
|
|
775
|
+
p["job_id"] = None
|
|
776
|
+
p["next_run"] = None
|
|
777
|
+
p["last_error"] = None
|
|
778
|
+
_update_pipeline(pipe_id, activate)
|
|
779
|
+
return dispatch(pipe_id, force=True)
|
|
780
|
+
|
|
781
|
+
|
|
782
|
+
def force_run(pipe_id: str) -> tuple[dict | None, str | None]:
|
|
783
|
+
return run_next(pipe_id)
|
|
784
|
+
|
|
785
|
+
|
|
786
|
+
def stop_pipeline(pipe_id: str) -> tuple[dict | None, str | None]:
|
|
787
|
+
"""OFF로 끈다."""
|
|
788
|
+
def stop(p):
|
|
789
|
+
p["status"] = "stopped"
|
|
790
|
+
p["job_id"] = None
|
|
791
|
+
p["next_run"] = None
|
|
792
|
+
p["last_error"] = None
|
|
793
|
+
return _update_pipeline(pipe_id, stop)
|
|
794
|
+
|
|
795
|
+
|
|
796
|
+
def reset_phase(pipe_id: str, phase: str = None) -> tuple[dict | None, str | None]:
|
|
797
|
+
"""상태 초기화 — 적응형 인터벌도 기본값으로 복구."""
|
|
798
|
+
def reset(p):
|
|
799
|
+
p["effective_interval_sec"] = p.get("interval_sec")
|
|
800
|
+
_update_pipeline(pipe_id, reset)
|
|
801
|
+
return run_next(pipe_id)
|
|
802
|
+
|
|
803
|
+
|
|
804
|
+
def get_pipeline_status(pipe_id: str) -> tuple[dict | None, str | None]:
|
|
805
|
+
pipe, err = get_pipeline(pipe_id)
|
|
806
|
+
if err:
|
|
807
|
+
return None, err
|
|
808
|
+
|
|
809
|
+
job_status = None
|
|
810
|
+
if pipe.get("job_id"):
|
|
811
|
+
resolved = pipe.get("resolved_job_id") or _uuid_to_job_id(pipe["job_id"]) or pipe["job_id"]
|
|
812
|
+
result, _ = get_job_result(resolved)
|
|
813
|
+
if result:
|
|
814
|
+
job_status = {
|
|
815
|
+
"job_id": resolved,
|
|
816
|
+
"status": result.get("status"),
|
|
817
|
+
"cost_usd": result.get("cost_usd"),
|
|
818
|
+
"duration_ms": result.get("duration_ms"),
|
|
819
|
+
}
|
|
820
|
+
|
|
821
|
+
remaining_sec = None
|
|
822
|
+
if pipe.get("next_run"):
|
|
823
|
+
remaining_sec = max(0, int(_parse_timestamp(pipe["next_run"]) - time.time()))
|
|
824
|
+
|
|
825
|
+
# 히스토리 통계
|
|
826
|
+
history = pipe.get("history", [])
|
|
827
|
+
total_cost = sum(h.get("cost_usd", 0) or 0 for h in history)
|
|
828
|
+
classifications = [h.get("classification", "unknown") for h in history]
|
|
829
|
+
|
|
830
|
+
return {
|
|
831
|
+
"id": pipe["id"],
|
|
832
|
+
"name": pipe["name"],
|
|
833
|
+
"project_path": pipe["project_path"],
|
|
834
|
+
"command": pipe["command"],
|
|
835
|
+
"interval": pipe.get("interval"),
|
|
836
|
+
"effective_interval_sec": pipe.get("effective_interval_sec"),
|
|
837
|
+
"status": pipe["status"],
|
|
838
|
+
"job_id": pipe.get("job_id"),
|
|
839
|
+
"job_status": job_status,
|
|
840
|
+
"next_run": pipe.get("next_run"),
|
|
841
|
+
"remaining_sec": remaining_sec,
|
|
842
|
+
"last_run": pipe.get("last_run"),
|
|
843
|
+
"last_result": pipe.get("last_result"),
|
|
844
|
+
"last_error": pipe.get("last_error"),
|
|
845
|
+
"run_count": pipe.get("run_count", 0),
|
|
846
|
+
"on_complete": pipe.get("on_complete"),
|
|
847
|
+
"history_count": len(history),
|
|
848
|
+
"total_cost_usd": round(total_cost, 4) if total_cost else None,
|
|
849
|
+
"classifications": classifications[-5:], # 최근 5개 분류
|
|
850
|
+
"created_at": pipe["created_at"],
|
|
851
|
+
"updated_at": pipe["updated_at"],
|
|
852
|
+
}, None
|
|
853
|
+
|
|
854
|
+
|
|
855
|
+
def get_pipeline_history(pipe_id: str) -> tuple[dict | None, str | None]:
|
|
856
|
+
"""파이프라인의 실행 이력을 반환한다."""
|
|
857
|
+
pipe, err = get_pipeline(pipe_id)
|
|
858
|
+
if err:
|
|
859
|
+
return None, err
|
|
860
|
+
history = pipe.get("history", [])
|
|
861
|
+
return {
|
|
862
|
+
"id": pipe["id"],
|
|
863
|
+
"name": pipe["name"],
|
|
864
|
+
"run_count": pipe.get("run_count", 0),
|
|
865
|
+
"total_cost_usd": round(sum(h.get("cost_usd", 0) or 0 for h in history), 4),
|
|
866
|
+
"entries": list(reversed(history)), # 최신순
|
|
867
|
+
}, None
|
|
868
|
+
|
|
869
|
+
|
|
870
|
+
# ══════════════════════════════════════════════════════════════
|
|
871
|
+
# Self-Evolution: 메타 분석
|
|
872
|
+
# ══════════════════════════════════════════════════════════════
|
|
873
|
+
|
|
874
|
+
def get_evolution_summary() -> dict:
|
|
875
|
+
"""전체 파이프라인 시스템의 자기 진화 상태를 요약한다."""
|
|
876
|
+
pipelines = _load_pipelines()
|
|
877
|
+
active = [p for p in pipelines if p["status"] == "active"]
|
|
878
|
+
total_runs = sum(p.get("run_count", 0) for p in pipelines)
|
|
879
|
+
total_cost = 0
|
|
880
|
+
all_classifications = {"has_change": 0, "no_change": 0, "unknown": 0}
|
|
881
|
+
|
|
882
|
+
for p in pipelines:
|
|
883
|
+
for h in p.get("history", []):
|
|
884
|
+
cost = h.get("cost_usd")
|
|
885
|
+
if cost:
|
|
886
|
+
total_cost += cost
|
|
887
|
+
cls = h.get("classification", "unknown")
|
|
888
|
+
all_classifications[cls] = all_classifications.get(cls, 0) + 1
|
|
889
|
+
|
|
890
|
+
# 효율성 점수: 변경 있는 실행 / 전체 실행
|
|
891
|
+
total_classified = sum(all_classifications.values())
|
|
892
|
+
efficiency = (
|
|
893
|
+
round(all_classifications["has_change"] / total_classified * 100, 1)
|
|
894
|
+
if total_classified > 0 else 0
|
|
895
|
+
)
|
|
896
|
+
|
|
897
|
+
# 적응형 인터벌 상태
|
|
898
|
+
interval_adaptations = []
|
|
899
|
+
for p in pipelines:
|
|
900
|
+
base = p.get("interval_sec")
|
|
901
|
+
effective = p.get("effective_interval_sec")
|
|
902
|
+
if base and effective and base != effective:
|
|
903
|
+
interval_adaptations.append({
|
|
904
|
+
"name": p["name"],
|
|
905
|
+
"base_sec": base,
|
|
906
|
+
"effective_sec": effective,
|
|
907
|
+
"change_pct": int((effective - base) / base * 100),
|
|
908
|
+
})
|
|
909
|
+
|
|
910
|
+
# 자동 일시정지된 파이프라인
|
|
911
|
+
auto_paused = [
|
|
912
|
+
{"name": p["name"], "reason": p.get("last_error", "")}
|
|
913
|
+
for p in pipelines
|
|
914
|
+
if p["status"] == "stopped" and (p.get("last_error") or "").startswith("자동 일시정지")
|
|
915
|
+
]
|
|
916
|
+
|
|
917
|
+
# 스킵된 실행 횟수 (total_runs 대비 절감률)
|
|
918
|
+
total_skips = sum(p.get("skip_count", 0) for p in pipelines)
|
|
919
|
+
|
|
920
|
+
return {
|
|
921
|
+
"active_count": len(active),
|
|
922
|
+
"total_pipelines": len(pipelines),
|
|
923
|
+
"total_runs": total_runs,
|
|
924
|
+
"total_skips": total_skips,
|
|
925
|
+
"total_cost_usd": round(total_cost, 4),
|
|
926
|
+
"classifications": all_classifications,
|
|
927
|
+
"efficiency_pct": efficiency,
|
|
928
|
+
"interval_adaptations": interval_adaptations,
|
|
929
|
+
"auto_paused": auto_paused,
|
|
930
|
+
}
|
|
931
|
+
|
|
932
|
+
|
|
933
|
+
# ══════════════════════════════════════════════════════════════
|
|
934
|
+
# Tick All
|
|
935
|
+
# ══════════════════════════════════════════════════════════════
|
|
936
|
+
|
|
937
|
+
_TICK_ALL_LOCK = DATA_DIR / ".tick_all.lock"
|
|
938
|
+
_TICK_ALL_DEBOUNCE_SEC = 3 # 3초 내 중복 호출 무시
|
|
939
|
+
|
|
940
|
+
|
|
941
|
+
def tick_all() -> list[dict]:
|
|
942
|
+
"""모든 active 파이프라인을 tick한다.
|
|
943
|
+
|
|
944
|
+
debounce: autoloop.sh cron과 프론트엔드 poll이 동시에 호출해도
|
|
945
|
+
3초 내 중복은 무시한다.
|
|
946
|
+
"""
|
|
947
|
+
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
|
948
|
+
try:
|
|
949
|
+
fd = open(_TICK_ALL_LOCK, "a+")
|
|
950
|
+
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
|
951
|
+
except (BlockingIOError, OSError):
|
|
952
|
+
# 다른 프로세스가 이미 tick_all 실행 중
|
|
953
|
+
return [{"skip": True, "reason": "another tick_all in progress"}]
|
|
954
|
+
|
|
955
|
+
try:
|
|
956
|
+
# debounce: 마지막 tick_all 시각 확인
|
|
957
|
+
fd.seek(0)
|
|
958
|
+
last_tick_str = fd.read().strip()
|
|
959
|
+
if last_tick_str:
|
|
960
|
+
try:
|
|
961
|
+
last_tick = float(last_tick_str)
|
|
962
|
+
if time.time() - last_tick < _TICK_ALL_DEBOUNCE_SEC:
|
|
963
|
+
return [{"skip": True, "reason": "debounced"}]
|
|
964
|
+
except ValueError:
|
|
965
|
+
pass
|
|
966
|
+
|
|
967
|
+
# 현재 시각 기록
|
|
968
|
+
fd.seek(0)
|
|
969
|
+
fd.truncate()
|
|
970
|
+
fd.write(str(time.time()))
|
|
971
|
+
fd.flush()
|
|
972
|
+
|
|
973
|
+
results = []
|
|
974
|
+
for p in _load_pipelines():
|
|
975
|
+
if p["status"] == "active":
|
|
976
|
+
result, err = tick(p["id"])
|
|
977
|
+
results.append({"pipeline_id": p["id"], "name": p["name"], "result": result, "error": err})
|
|
978
|
+
return results
|
|
979
|
+
finally:
|
|
980
|
+
fcntl.flock(fd, fcntl.LOCK_UN)
|
|
981
|
+
fd.close()
|