claude-controller 0.2.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/README.md +2 -2
  2. package/bin/autoloop.sh +382 -0
  3. package/bin/ctl +327 -5
  4. package/bin/native-app.py +5 -2
  5. package/bin/watchdog.sh +357 -0
  6. package/cognitive/__init__.py +14 -0
  7. package/cognitive/__pycache__/__init__.cpython-314.pyc +0 -0
  8. package/cognitive/__pycache__/dispatcher.cpython-314.pyc +0 -0
  9. package/cognitive/__pycache__/evaluator.cpython-314.pyc +0 -0
  10. package/cognitive/__pycache__/goal_engine.cpython-314.pyc +0 -0
  11. package/cognitive/__pycache__/learning.cpython-314.pyc +0 -0
  12. package/cognitive/__pycache__/orchestrator.cpython-314.pyc +0 -0
  13. package/cognitive/__pycache__/planner.cpython-314.pyc +0 -0
  14. package/cognitive/dispatcher.py +192 -0
  15. package/cognitive/evaluator.py +289 -0
  16. package/cognitive/goal_engine.py +232 -0
  17. package/cognitive/learning.py +189 -0
  18. package/cognitive/orchestrator.py +303 -0
  19. package/cognitive/planner.py +207 -0
  20. package/cognitive/prompts/analyst.md +31 -0
  21. package/cognitive/prompts/coder.md +22 -0
  22. package/cognitive/prompts/reviewer.md +33 -0
  23. package/cognitive/prompts/tester.md +21 -0
  24. package/cognitive/prompts/writer.md +25 -0
  25. package/config.sh +6 -1
  26. package/dag/__init__.py +5 -0
  27. package/dag/__pycache__/__init__.cpython-314.pyc +0 -0
  28. package/dag/__pycache__/graph.cpython-314.pyc +0 -0
  29. package/dag/graph.py +222 -0
  30. package/lib/jobs.sh +12 -1
  31. package/package.json +5 -1
  32. package/postinstall.sh +1 -1
  33. package/service/controller.sh +43 -11
  34. package/web/audit.py +122 -0
  35. package/web/checkpoint.py +80 -0
  36. package/web/config.py +2 -5
  37. package/web/handler.py +464 -26
  38. package/web/handler_fs.py +15 -14
  39. package/web/handler_goals.py +203 -0
  40. package/web/handler_jobs.py +165 -42
  41. package/web/handler_memory.py +203 -0
  42. package/web/jobs.py +576 -12
  43. package/web/personas.py +419 -0
  44. package/web/pipeline.py +682 -50
  45. package/web/presets.py +506 -0
  46. package/web/projects.py +58 -4
  47. package/web/static/api.js +90 -3
  48. package/web/static/app.js +8 -0
  49. package/web/static/base.css +51 -12
  50. package/web/static/context.js +14 -4
  51. package/web/static/form.css +3 -2
  52. package/web/static/goals.css +363 -0
  53. package/web/static/goals.js +300 -0
  54. package/web/static/i18n.js +288 -0
  55. package/web/static/index.html +142 -6
  56. package/web/static/jobs.css +951 -4
  57. package/web/static/jobs.js +890 -54
  58. package/web/static/memoryview.js +117 -0
  59. package/web/static/personas.js +228 -0
  60. package/web/static/pipeline.css +308 -1
  61. package/web/static/pipelines.js +249 -14
  62. package/web/static/presets.js +244 -0
  63. package/web/static/send.js +26 -4
  64. package/web/static/settings-style.css +34 -3
  65. package/web/static/settings.js +37 -1
  66. package/web/static/stream.js +242 -19
  67. package/web/static/utils.js +54 -2
  68. package/web/webhook.py +210 -0
package/web/pipeline.py CHANGED
@@ -1,19 +1,24 @@
1
1
  """
2
- Pipeline Engine — on/off 자동화
2
+ Pipeline Engine — 자기 진화형 자동화
3
3
 
4
- 상태: active / stopped
5
- active + job_id → 작업 실행 중
6
- active + !job_id 대기 (next_run 카운트다운)
7
- stopped 꺼짐
4
+ 핵심 기능:
5
+ 1. on/off 자동화 (기존)
6
+ 2. 컨텍스트 주입 — 실행 전 git log + 이전 결과를 프롬프트에 자동 삽입
7
+ 3. 결과 히스토리 — 최근 N개 결과 저장으로 패턴 감지
8
+ 4. 적응형 인터벌 — 결과 기반 실행 주기 자동 조절
9
+ 5. 파이프라인 체이닝 — 완료 시 다른 파이프라인 트리거
8
10
 
11
+ 상태: active / stopped
9
12
  저장: data/pipelines.json
10
13
  """
11
14
 
15
+ import fcntl
12
16
  import json
13
17
  import os
14
18
  import re
19
+ import subprocess
15
20
  import time
16
- from pathlib import Path
21
+ from contextlib import contextmanager
17
22
 
18
23
  from config import DATA_DIR, LOGS_DIR
19
24
  from jobs import send_to_fifo, get_job_result
@@ -21,11 +26,38 @@ from utils import parse_meta_file
21
26
 
22
27
  PIPELINES_FILE = DATA_DIR / "pipelines.json"
23
28
 
29
+ # 히스토리 최대 보관 수
30
+ _MAX_HISTORY = 10
31
+
32
+ # 적응형 인터벌 범위 (초)
33
+ _MIN_INTERVAL_SEC = 60 # 최소 1분
34
+ _MAX_INTERVAL_SEC = 14400 # 최대 4시간 (이전: 1시간 — 너무 보수적)
35
+
24
36
 
25
37
  # ══════════════════════════════════════════════════════════════
26
38
  # 유틸리티
27
39
  # ══════════════════════════════════════════════════════════════
28
40
 
41
+ _LOCK_FILE = DATA_DIR / "pipelines.lock"
42
+
43
+
44
+ @contextmanager
45
+ def _pipeline_lock():
46
+ """pipelines.json에 대한 파일 수준 배타적 잠금.
47
+
48
+ 동시에 여러 스레드/프로세스가 load→modify→save 하면 뒤쪽 쓰기가
49
+ 앞쪽 변경을 덮어쓴다. flock으로 직렬화하여 데이터 유실을 방지한다.
50
+ """
51
+ DATA_DIR.mkdir(parents=True, exist_ok=True)
52
+ fd = open(_LOCK_FILE, "w")
53
+ try:
54
+ fcntl.flock(fd, fcntl.LOCK_EX)
55
+ yield
56
+ finally:
57
+ fcntl.flock(fd, fcntl.LOCK_UN)
58
+ fd.close()
59
+
60
+
29
61
  def _load_pipelines() -> list[dict]:
30
62
  try:
31
63
  if PIPELINES_FILE.exists():
@@ -36,14 +68,46 @@ def _load_pipelines() -> list[dict]:
36
68
 
37
69
 
38
70
  def _save_pipelines(pipelines: list[dict]):
71
+ """원자적 쓰기: temp 파일에 쓴 뒤 rename으로 교체.
72
+
73
+ 안전장치: 기존 파이프라인 수보다 줄어들면 백업 생성 + 경고 로그.
74
+ """
39
75
  DATA_DIR.mkdir(parents=True, exist_ok=True)
40
- PIPELINES_FILE.write_text(
76
+
77
+ # 안전장치: 파이프라인 수 감소 감지
78
+ existing_count = 0
79
+ backup_path = PIPELINES_FILE.with_suffix(".bak")
80
+ if PIPELINES_FILE.exists():
81
+ try:
82
+ existing = json.loads(PIPELINES_FILE.read_text("utf-8"))
83
+ existing_count = len(existing)
84
+ except (json.JSONDecodeError, OSError):
85
+ pass
86
+
87
+ if existing_count > 0 and len(pipelines) < existing_count:
88
+ # 백업 생성 후 경고 (삭제 API 호출이 아닌 비정상 감소 방지)
89
+ import shutil
90
+ shutil.copy2(PIPELINES_FILE, backup_path)
91
+ import sys
92
+ print(
93
+ f"[pipeline] WARNING: 파이프라인 수 감소 {existing_count} → {len(pipelines)}, "
94
+ f"백업 저장: {backup_path}",
95
+ file=sys.stderr,
96
+ )
97
+
98
+ tmp = PIPELINES_FILE.with_suffix(".tmp")
99
+ tmp.write_text(
41
100
  json.dumps(pipelines, ensure_ascii=False, indent=2), "utf-8"
42
101
  )
102
+ tmp.rename(PIPELINES_FILE)
103
+
43
104
 
105
+ _id_counter = 0
44
106
 
45
107
  def _generate_id() -> str:
46
- return f"pipe-{int(time.time())}-{os.getpid() % 10000}"
108
+ global _id_counter
109
+ _id_counter += 1
110
+ return f"pipe-{int(time.time())}-{os.getpid() % 10000}-{_id_counter}"
47
111
 
48
112
 
49
113
  def _parse_interval(interval: str | None) -> int | None:
@@ -90,13 +154,14 @@ def _resolve_job(job_id: str, resolved_cache: str = None) -> tuple[str | None, s
90
154
 
91
155
 
92
156
  def _update_pipeline(pipe_id: str, updater):
93
- pipelines = _load_pipelines()
94
- for p in pipelines:
95
- if p["id"] == pipe_id:
96
- updater(p)
97
- p["updated_at"] = time.strftime("%Y-%m-%dT%H:%M:%S")
98
- _save_pipelines(pipelines)
99
- return p, None
157
+ with _pipeline_lock():
158
+ pipelines = _load_pipelines()
159
+ for p in pipelines:
160
+ if p["id"] == pipe_id:
161
+ updater(p)
162
+ p["updated_at"] = time.strftime("%Y-%m-%dT%H:%M:%S")
163
+ _save_pipelines(pipelines)
164
+ return p, None
100
165
  return None, "파이프라인을 찾을 수 없습니다"
101
166
 
102
167
 
@@ -111,6 +176,278 @@ def _next_run_str(interval_sec: int) -> str:
111
176
  return time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime(time.time() + interval_sec))
112
177
 
113
178
 
179
+ # ══════════════════════════════════════════════════════════════
180
+ # Pre-dispatch 스킵 가드 — 변경 없으면 실행 자체를 건너뜀
181
+ # ══════════════════════════════════════════════════════════════
182
+
183
+ def _get_git_head_sha(project_path: str) -> str:
184
+ """프로젝트의 현재 HEAD SHA를 반환한다."""
185
+ try:
186
+ result = subprocess.run(
187
+ ["git", "rev-parse", "HEAD"],
188
+ cwd=project_path, capture_output=True, text=True, timeout=5
189
+ )
190
+ if result.returncode == 0:
191
+ return result.stdout.strip()
192
+ except (subprocess.SubprocessError, OSError):
193
+ pass
194
+ return ""
195
+
196
+
197
+ def _get_git_dirty_hash(project_path: str) -> str:
198
+ """uncommitted 변경사항의 해시를 반환한다 (변경 없으면 빈 문자열)."""
199
+ try:
200
+ result = subprocess.run(
201
+ ["git", "diff", "HEAD", "--stat"],
202
+ cwd=project_path, capture_output=True, text=True, timeout=5
203
+ )
204
+ if result.returncode == 0 and result.stdout.strip():
205
+ import hashlib
206
+ return hashlib.md5(result.stdout.encode()).hexdigest()[:12]
207
+ except (subprocess.SubprocessError, OSError):
208
+ pass
209
+ return ""
210
+
211
+
212
+ def _should_skip_dispatch(pipe: dict) -> tuple[bool, str]:
213
+ """Pre-dispatch 스킵 판단. 반환: (skip 여부, 사유).
214
+
215
+ 스킵 조건:
216
+ 1. git HEAD와 dirty hash가 이전 실행과 동일 (코드 변경 없음)
217
+ 2. 이전 결과가 no_change이고 연속 무변경 3회 이상
218
+ """
219
+ project_path = pipe["project_path"]
220
+
221
+ # 현재 git 상태 스냅샷
222
+ head_sha = _get_git_head_sha(project_path)
223
+ dirty_hash = _get_git_dirty_hash(project_path)
224
+ current_snapshot = f"{head_sha}:{dirty_hash}"
225
+
226
+ # 이전 실행 시 git 스냅샷과 비교
227
+ last_snapshot = pipe.get("last_git_snapshot", "")
228
+
229
+ if not last_snapshot or current_snapshot != last_snapshot:
230
+ return False, "" # 코드 변경 있음 → 실행
231
+
232
+ # 코드 변경 없음 + 이전 결과 확인
233
+ history = pipe.get("history", [])
234
+ if not history:
235
+ return False, "" # 히스토리 없음 → 첫 실행이므로 허용
236
+
237
+ # 최근 연속 무변경/unknown 횟수
238
+ consecutive_idle = 0
239
+ for h in reversed(history):
240
+ cls = h.get("classification", "unknown")
241
+ if cls in ("no_change", "unknown"):
242
+ consecutive_idle += 1
243
+ else:
244
+ break
245
+
246
+ if consecutive_idle >= 2:
247
+ return True, f"git 변경 없음 + 연속 {consecutive_idle}회 무변경"
248
+
249
+ return False, ""
250
+
251
+
252
+ # ══════════════════════════════════════════════════════════════
253
+ # 컨텍스트 주입 — 프롬프트를 풍부하게 만든다
254
+ # ══════════════════════════════════════════════════════════════
255
+
256
+ def _get_git_context(project_path: str, max_commits: int = 5) -> str:
257
+ """프로젝트의 최근 git 변경사항을 요약한다."""
258
+ try:
259
+ result = subprocess.run(
260
+ ["git", "log", f"--oneline", f"-{max_commits}", "--no-decorate"],
261
+ cwd=project_path, capture_output=True, text=True, timeout=5
262
+ )
263
+ if result.returncode == 0 and result.stdout.strip():
264
+ return result.stdout.strip()
265
+ except (subprocess.SubprocessError, OSError):
266
+ pass
267
+ return ""
268
+
269
+
270
+ def _get_git_diff_stat(project_path: str) -> str:
271
+ """현재 uncommitted 변경사항의 stat을 가져온다."""
272
+ try:
273
+ result = subprocess.run(
274
+ ["git", "diff", "--stat", "HEAD"],
275
+ cwd=project_path, capture_output=True, text=True, timeout=5
276
+ )
277
+ if result.returncode == 0 and result.stdout.strip():
278
+ return result.stdout.strip()
279
+ except (subprocess.SubprocessError, OSError):
280
+ pass
281
+ return ""
282
+
283
+
284
+ def _build_enriched_prompt(pipe: dict) -> str:
285
+ """파이프라인의 원본 command에 컨텍스트를 주입한 프롬프트를 생성한다."""
286
+ command = pipe["command"]
287
+ project_path = pipe["project_path"]
288
+ sections = []
289
+
290
+ # 1. Git 컨텍스트
291
+ git_log = _get_git_context(project_path)
292
+ git_diff = _get_git_diff_stat(project_path)
293
+ if git_log:
294
+ sections.append(f"[최근 커밋]\n{git_log}")
295
+ if git_diff:
296
+ sections.append(f"[현재 uncommitted 변경]\n{git_diff}")
297
+
298
+ # 2. 이전 실행 결과
299
+ history = pipe.get("history", [])
300
+ if history:
301
+ last = history[-1]
302
+ last_summary = (last.get("result", "") or "")[:500]
303
+ last_cost = last.get("cost_usd")
304
+ last_time = last.get("completed_at", "")
305
+ if last_summary:
306
+ cost_info = f" (비용: ${last_cost:.4f})" if last_cost else ""
307
+ sections.append(
308
+ f"[이전 실행 결과 — {last_time}{cost_info}]\n{last_summary}"
309
+ )
310
+
311
+ # 3. 실행 통계
312
+ run_count = pipe.get("run_count", 0)
313
+ current_interval = pipe.get("effective_interval_sec") or pipe.get("interval_sec")
314
+ if run_count > 0 and current_interval:
315
+ interval_str = f"{current_interval // 60}분" if current_interval >= 60 else f"{current_interval}초"
316
+ sections.append(
317
+ f"[실행 통계] {run_count}회 실행됨 | 현재 간격: {interval_str}"
318
+ )
319
+
320
+ if not sections:
321
+ return command
322
+
323
+ context_block = "\n\n".join(sections)
324
+ return f"""=== 자동 주입 컨텍스트 (이전 실행 기반) ===
325
+ {context_block}
326
+ === 컨텍스트 끝 ===
327
+
328
+ {command}"""
329
+
330
+
331
+ # ══════════════════════════════════════════════════════════════
332
+ # 적응형 인터벌 — 결과 기반 주기 조절
333
+ # ══════════════════════════════════════════════════════════════
334
+
335
+ _NO_CHANGE_PATTERNS = [
336
+ r"변경.*없",
337
+ r"이슈.*없",
338
+ r"문제.*없",
339
+ r"no\s+(issues?|changes?|problems?)",
340
+ r"nothing\s+to",
341
+ r"all\s+ok",
342
+ r"삭제\s*대상\s*없",
343
+ r"개선.*없",
344
+ r"이미.*해결",
345
+ r"already",
346
+ r"회귀.*없",
347
+ r"오류\s*없",
348
+ r"고임팩트.*없",
349
+ # 테스트 결과 패턴 (all passed, 0 failures)
350
+ r"(?:test|테스트).*(?:pass|통과|성공)",
351
+ r"0\s*(?:fail|error|오류)",
352
+ r"(?:all|모든).*(?:pass|통과|ok|정상)",
353
+ r"ran\s+\d+\s+test.*\nok",
354
+ # 유지보수 결과 패턴
355
+ r"정리.*(?:없|0개|완료)",
356
+ r"(?:디스크|disk).*(?:ok|정상|양호)",
357
+ r"(?:상태|status).*(?:정상|양호|ok|healthy)",
358
+ r"(?:점검|확인).*(?:완료|이상\s*없)",
359
+ r"불필요.*없",
360
+ r"(?:특이|이상)\s*(?:사항|점)\s*없",
361
+ # 코드 분석 결과 패턴
362
+ r"(?:품질|quality).*(?:양호|good|ok)",
363
+ r"(?:취약|vuln).*(?:없|0)",
364
+ r"(?:개선|수정)\s*(?:사항|할\s*것)\s*없",
365
+ r"(?:추가|변경)\s*(?:불필요|사항\s*없)",
366
+ # 일반적 무변경 표현
367
+ r"현재\s*(?:상태|코드).*(?:적절|양호|충분)",
368
+ r"(?:작업|할\s*것).*없",
369
+ ]
370
+
371
+ # 개별 분리 — 각 키워드가 독립적으로 1점씩 기여하여
372
+ # change_score >= 2 조건이 정확하게 동작한다.
373
+ _CHANGE_PATTERNS = [
374
+ r"수정",
375
+ r"변경",
376
+ r"추가",
377
+ r"개선",
378
+ r"구현",
379
+ r"삭제",
380
+ r"교체",
381
+ r"리팩",
382
+ r"fix|change|add|remov|improv|implement|refactor",
383
+ r"Edit|Write", # 도구 사용 흔적
384
+ r"작성.*완료",
385
+ r"생성.*완료",
386
+ r"파일.*(?:생성|작성|수정)",
387
+ r"커밋|commit",
388
+ ]
389
+
390
+
391
+ def _classify_result(result_text: str) -> str:
392
+ """결과를 분류한다: 'no_change', 'has_change', 'unknown'"""
393
+ if not result_text:
394
+ return "unknown"
395
+ text = result_text[:2000].lower()
396
+
397
+ # 변경 없음 패턴 우선 체크
398
+ for pat in _NO_CHANGE_PATTERNS:
399
+ if re.search(pat, text, re.IGNORECASE):
400
+ return "no_change"
401
+
402
+ # 변경 있음 패턴
403
+ change_score = 0
404
+ for pat in _CHANGE_PATTERNS:
405
+ if re.search(pat, text, re.IGNORECASE):
406
+ change_score += 1
407
+ if change_score >= 2:
408
+ return "has_change"
409
+
410
+ return "unknown"
411
+
412
+
413
+ def _adapt_interval(pipe: dict, result_text: str) -> int | None:
414
+ """결과를 기반으로 적응형 인터벌을 계산한다.
415
+
416
+ 개선점 (v2):
417
+ - no_change: 1.5x~2.0x로 공격적 확대 (이전: 최대 1.3x)
418
+ - unknown: no_change와 동일하게 감속 (이전: 현상 유지로 낭비)
419
+ - has_change: 기본 간격으로 복귀
420
+ """
421
+ base_interval = pipe.get("interval_sec")
422
+ if not base_interval:
423
+ return None
424
+
425
+ current = pipe.get("effective_interval_sec") or base_interval
426
+ classification = _classify_result(result_text)
427
+ history = pipe.get("history", [])
428
+
429
+ # 최근 연속 idle (no_change + unknown) 횟수
430
+ consecutive_idle = 0
431
+ for h in reversed(history):
432
+ cls = h.get("classification", "unknown")
433
+ if cls in ("no_change", "unknown"):
434
+ consecutive_idle += 1
435
+ else:
436
+ break
437
+
438
+ if classification in ("no_change", "unknown"):
439
+ consecutive_idle += 1
440
+ # 공격적 확대: 1.5x 기본, 연속 idle마다 0.1씩 추가 (최대 2.0x)
441
+ multiplier = min(2.0, 1.5 + (consecutive_idle * 0.1))
442
+ new_interval = int(current * multiplier)
443
+ elif classification == "has_change":
444
+ new_interval = base_interval
445
+ else:
446
+ new_interval = current
447
+
448
+ return max(_MIN_INTERVAL_SEC, min(_MAX_INTERVAL_SEC, new_interval))
449
+
450
+
114
451
  # ══════════════════════════════════════════════════════════════
115
452
  # CRUD
116
453
  # ══════════════════════════════════════════════════════════════
@@ -126,7 +463,10 @@ def get_pipeline(pipe_id: str) -> tuple[dict | None, str | None]:
126
463
  return None, "파이프라인을 찾을 수 없습니다"
127
464
 
128
465
 
129
- def create_pipeline(project_path: str, command: str, interval: str = "", name: str = "") -> tuple[dict | None, str | None]:
466
+ def create_pipeline(
467
+ project_path: str, command: str, interval: str = "",
468
+ name: str = "", on_complete: str = "",
469
+ ) -> tuple[dict | None, str | None]:
130
470
  project_path = os.path.abspath(os.path.expanduser(project_path))
131
471
  if not command.strip():
132
472
  return None, "명령어(command)를 입력하세요"
@@ -134,7 +474,6 @@ def create_pipeline(project_path: str, command: str, interval: str = "", name: s
134
474
  name = os.path.basename(project_path)
135
475
 
136
476
  interval_sec = _parse_interval(interval) if interval else None
137
- pipelines = _load_pipelines()
138
477
  now = time.strftime("%Y-%m-%dT%H:%M:%S")
139
478
  pipe = {
140
479
  "id": _generate_id(),
@@ -143,6 +482,7 @@ def create_pipeline(project_path: str, command: str, interval: str = "", name: s
143
482
  "command": command,
144
483
  "interval": interval or None,
145
484
  "interval_sec": interval_sec,
485
+ "effective_interval_sec": interval_sec, # 적응형 인터벌
146
486
  "status": "active",
147
487
  "job_id": None,
148
488
  "next_run": None,
@@ -150,37 +490,54 @@ def create_pipeline(project_path: str, command: str, interval: str = "", name: s
150
490
  "last_result": None,
151
491
  "last_error": None,
152
492
  "run_count": 0,
493
+ "history": [], # 결과 히스토리
494
+ "on_complete": on_complete or None, # 체이닝: 완료 시 트리거할 pipe_id
153
495
  "created_at": now,
154
496
  "updated_at": now,
155
497
  }
156
- pipelines.append(pipe)
157
- _save_pipelines(pipelines)
498
+ with _pipeline_lock():
499
+ pipelines = _load_pipelines()
500
+ pipelines.append(pipe)
501
+ _save_pipelines(pipelines)
158
502
  return pipe, None
159
503
 
160
504
 
161
- def update_pipeline(pipe_id: str, command: str = None, interval: str = None, name: str = None) -> tuple[dict | None, str | None]:
505
+ def update_pipeline(
506
+ pipe_id: str, command: str = None, interval: str = None,
507
+ name: str = None, on_complete: str = None,
508
+ ) -> tuple[dict | None, str | None]:
162
509
  def updater(p):
163
510
  if command is not None:
164
511
  p["command"] = command
165
512
  if name is not None:
166
513
  p["name"] = name
514
+ if on_complete is not None:
515
+ p["on_complete"] = on_complete if on_complete else None
167
516
  if interval is not None:
168
517
  if interval == "":
169
518
  p["interval"] = None
170
519
  p["interval_sec"] = None
520
+ p["effective_interval_sec"] = None
521
+ p["next_run"] = None
171
522
  else:
523
+ new_sec = _parse_interval(interval)
172
524
  p["interval"] = interval
173
- p["interval_sec"] = _parse_interval(interval)
525
+ p["interval_sec"] = new_sec
526
+ p["effective_interval_sec"] = new_sec
527
+ # active 상태이고 job 미실행 중이면 next_run도 즉시 재계산
528
+ if p.get("status") == "active" and not p.get("job_id") and new_sec:
529
+ p["next_run"] = _next_run_str(new_sec)
174
530
  return _update_pipeline(pipe_id, updater)
175
531
 
176
532
 
177
533
  def delete_pipeline(pipe_id: str) -> tuple[dict | None, str | None]:
178
- pipelines = _load_pipelines()
179
- for i, p in enumerate(pipelines):
180
- if p["id"] == pipe_id:
181
- removed = pipelines.pop(i)
182
- _save_pipelines(pipelines)
183
- return removed, None
534
+ with _pipeline_lock():
535
+ pipelines = _load_pipelines()
536
+ for i, p in enumerate(pipelines):
537
+ if p["id"] == pipe_id:
538
+ removed = pipelines.pop(i)
539
+ _save_pipelines(pipelines)
540
+ return removed, None
184
541
  return None, "파이프라인을 찾을 수 없습니다"
185
542
 
186
543
 
@@ -188,29 +545,107 @@ def delete_pipeline(pipe_id: str) -> tuple[dict | None, str | None]:
188
545
  # 핵심: dispatch + tick
189
546
  # ══════════════════════════════════════════════════════════════
190
547
 
191
- def dispatch(pipe_id: str) -> tuple[dict | None, str | None]:
192
- """작업을 FIFO로 전송하고 next_run을 설정한다."""
193
- pipe, err = get_pipeline(pipe_id)
194
- if err:
195
- return None, err
196
- if pipe["status"] != "active":
197
- return None, "파이프라인이 꺼져 있습니다"
548
+ # 자동 일시정지 임계값: 연속 idle 횟수 이상이면 자동 pause
549
+ _AUTO_PAUSE_THRESHOLD = 5
550
+
551
+
552
+ def dispatch(pipe_id: str, force: bool = False) -> tuple[dict | None, str | None]:
553
+ """작업을 FIFO로 전송한다. 컨텍스트 주입 + 적응형 인터벌 적용.
554
+
555
+ 이중 발사 방지: lock 안에서 job_id 확인 후 dispatch 여부를 결정한다.
556
+ Pre-dispatch 스킵 가드: git 변경 없고 연속 무변경이면 스킵한다.
557
+ 자동 일시정지: 연속 idle 5회 이상이면 파이프라인을 자동 pause한다.
558
+ """
559
+ with _pipeline_lock():
560
+ pipelines = _load_pipelines()
561
+ pipe = None
562
+ for p in pipelines:
563
+ if p["id"] == pipe_id:
564
+ pipe = p
565
+ break
566
+ if not pipe:
567
+ return None, "파이프라인을 찾을 수 없습니다"
568
+ if pipe["status"] != "active":
569
+ return None, "파이프라인이 꺼져 있습니다"
570
+ # 이중 발사 방지: 이미 job이 실행 중이면 skip
571
+ if pipe.get("job_id"):
572
+ return {"action": "already_running", "job_id": pipe["job_id"]}, None
573
+
574
+ # ── Pre-dispatch 스킵 가드 (force=True일 때 건너뜀) ──
575
+ if not force:
576
+ skip, reason = _should_skip_dispatch(pipe)
577
+ if skip:
578
+ # next_run만 재설정하고 실행 안 함
579
+ effective = pipe.get("effective_interval_sec") or pipe.get("interval_sec")
580
+ if effective:
581
+ pipe["next_run"] = _next_run_str(effective)
582
+ pipe["skip_count"] = pipe.get("skip_count", 0) + 1
583
+ pipe["updated_at"] = time.strftime("%Y-%m-%dT%H:%M:%S")
584
+ _save_pipelines(pipelines)
585
+ return {"action": "skipped", "reason": reason, "name": pipe["name"]}, None
586
+
587
+ # ── 자동 일시정지: 연속 idle 5회 이상 ──
588
+ history = pipe.get("history", [])
589
+ consecutive_idle = 0
590
+ for h in reversed(history):
591
+ cls = h.get("classification", "unknown")
592
+ if cls in ("no_change", "unknown"):
593
+ consecutive_idle += 1
594
+ else:
595
+ break
596
+ if consecutive_idle >= _AUTO_PAUSE_THRESHOLD:
597
+ pipe["status"] = "stopped"
598
+ pipe["last_error"] = f"자동 일시정지: 연속 {consecutive_idle}회 무변경"
599
+ pipe["next_run"] = None
600
+ pipe["updated_at"] = time.strftime("%Y-%m-%dT%H:%M:%S")
601
+ _save_pipelines(pipelines)
602
+ return {
603
+ "action": "auto_paused",
604
+ "reason": f"연속 {consecutive_idle}회 무변경 → 자동 일시정지",
605
+ "name": pipe["name"],
606
+ }, None
607
+
608
+ # lock 안에서 dispatching 마커 설정 (다른 프로세스가 동시에 dispatch 못하게)
609
+ pipe["job_id"] = "__dispatching__"
610
+ pipe["updated_at"] = time.strftime("%Y-%m-%dT%H:%M:%S")
611
+ _save_pipelines(pipelines)
612
+
613
+ # lock 밖에서 실제 전송 (시간이 걸릴 수 있음)
614
+ enriched_prompt = _build_enriched_prompt(pipe)
615
+ result, send_err = send_to_fifo(enriched_prompt, cwd=pipe["project_path"])
198
616
 
199
- result, send_err = send_to_fifo(pipe["command"], cwd=pipe["project_path"])
200
617
  if send_err:
618
+ # 전송 실패: dispatching 마커 제거
619
+ def clear_marker(p):
620
+ p["job_id"] = None
621
+ p["last_error"] = f"FIFO 전송 실패: {send_err}"
622
+ _update_pipeline(pipe_id, clear_marker)
201
623
  return None, f"FIFO 전송 실패: {send_err}"
202
624
 
203
625
  job_id = result["job_id"]
204
- nr = _next_run_str(pipe["interval_sec"]) if pipe.get("interval_sec") else None
626
+ effective = pipe.get("effective_interval_sec") or pipe.get("interval_sec")
627
+ nr = _next_run_str(effective) if effective else None
628
+
629
+ # git snapshot 저장 (다음 dispatch 시 비교용)
630
+ head_sha = _get_git_head_sha(pipe["project_path"])
631
+ dirty_hash = _get_git_dirty_hash(pipe["project_path"])
632
+ git_snapshot = f"{head_sha}:{dirty_hash}"
205
633
 
206
- def do_dispatch(p):
634
+ def do_dispatch(p, _snapshot=git_snapshot):
207
635
  p["job_id"] = job_id
208
636
  p["last_run"] = time.strftime("%Y-%m-%dT%H:%M:%S")
209
637
  p["last_error"] = None
210
638
  p["next_run"] = nr
639
+ p["last_git_snapshot"] = _snapshot
211
640
  _update_pipeline(pipe_id, do_dispatch)
212
641
 
213
- return {"action": "dispatched", "job_id": job_id, "name": pipe["name"], "next_run": nr}, None
642
+ return {
643
+ "action": "dispatched",
644
+ "job_id": job_id,
645
+ "name": pipe["name"],
646
+ "next_run": nr,
647
+ "context_injected": bool(enriched_prompt != pipe["command"]),
648
+ }, None
214
649
 
215
650
 
216
651
  def tick(pipe_id: str) -> tuple[dict | None, str | None]:
@@ -224,11 +659,16 @@ def tick(pipe_id: str) -> tuple[dict | None, str | None]:
224
659
  job_id = pipe.get("job_id")
225
660
  if not job_id:
226
661
  # job 없음 → next_run 확인 후 dispatch
662
+ effective = pipe.get("effective_interval_sec") or pipe.get("interval_sec")
227
663
  if pipe.get("next_run") and _parse_timestamp(pipe["next_run"]) > time.time():
228
664
  remaining = int(_parse_timestamp(pipe["next_run"]) - time.time())
229
665
  return {"action": "waiting", "remaining_sec": remaining}, None
230
666
  return dispatch(pipe_id)
231
667
 
668
+ # dispatching 마커: 다른 프로세스가 dispatch 중
669
+ if job_id == "__dispatching__":
670
+ return {"action": "dispatching"}, None
671
+
232
672
  # job 실행 중 → 완료 확인
233
673
  resolved_cache = pipe.get("resolved_job_id")
234
674
  result_text, result_err, resolved = _resolve_job(job_id, resolved_cache)
@@ -250,16 +690,78 @@ def tick(pipe_id: str) -> tuple[dict | None, str | None]:
250
690
  _update_pipeline(pipe_id, set_err)
251
691
  return {"action": "error", "error": result_err}, None
252
692
 
253
- # 완료
254
- summary = (result_text or "")[:200]
255
- def complete(p, _s=summary):
693
+ # ── 완료: 히스토리 기록 + 적응형 인터벌 + 체이닝 ──
694
+ summary = (result_text or "")[:500]
695
+ classification = _classify_result(result_text or "")
696
+
697
+ # 비용/시간 정보 추출
698
+ cost_usd = None
699
+ duration_ms = None
700
+ if resolved:
701
+ full_result, _ = get_job_result(resolved)
702
+ if full_result:
703
+ cost_usd = full_result.get("cost_usd")
704
+ duration_ms = full_result.get("duration_ms")
705
+
706
+ # 적응형 인터벌 계산
707
+ new_interval = _adapt_interval(pipe, result_text or "")
708
+
709
+ # 체이닝 대상 확인
710
+ chain_target = pipe.get("on_complete")
711
+
712
+ def complete(p, _s=summary, _c=classification, _ni=new_interval,
713
+ _cost=cost_usd, _dur=duration_ms):
256
714
  p["last_result"] = _s
257
715
  p["run_count"] = p.get("run_count", 0) + 1
258
716
  p["job_id"] = None
259
717
  p.pop("resolved_job_id", None)
718
+
719
+ # 히스토리 추가
720
+ history = p.get("history", [])
721
+ history.append({
722
+ "result": _s,
723
+ "classification": _c,
724
+ "cost_usd": _cost,
725
+ "duration_ms": _dur,
726
+ "completed_at": time.strftime("%Y-%m-%dT%H:%M:%S"),
727
+ })
728
+ # 최대 보관 수 초과 시 오래된 것 제거
729
+ if len(history) > _MAX_HISTORY:
730
+ history = history[-_MAX_HISTORY:]
731
+ p["history"] = history
732
+
733
+ # 적응형 인터벌 적용
734
+ if _ni is not None:
735
+ p["effective_interval_sec"] = _ni
736
+ p["next_run"] = _next_run_str(_ni)
737
+ elif p.get("interval_sec"):
738
+ p["next_run"] = _next_run_str(p["interval_sec"])
739
+
260
740
  _update_pipeline(pipe_id, complete)
261
741
 
262
- return {"action": "completed", "run_count": pipe.get("run_count", 0) + 1}, None
742
+ # 체이닝: on_complete에 지정된 파이프라인 트리거
743
+ chain_result = None
744
+ if chain_target:
745
+ chain_result, chain_err = dispatch(chain_target)
746
+ if chain_err:
747
+ chain_result = {"chain_error": chain_err}
748
+
749
+ response = {
750
+ "action": "completed",
751
+ "run_count": pipe.get("run_count", 0) + 1,
752
+ "classification": classification,
753
+ }
754
+ if new_interval and pipe.get("interval_sec") and new_interval != pipe.get("interval_sec"):
755
+ base = pipe["interval_sec"]
756
+ response["interval_adapted"] = {
757
+ "base": base,
758
+ "new": new_interval,
759
+ "change": f"{'+' if new_interval > base else ''}{int((new_interval - base) / base * 100)}%",
760
+ }
761
+ if chain_result:
762
+ response["chain"] = chain_result
763
+
764
+ return response, None
263
765
 
264
766
 
265
767
  # ══════════════════════════════════════════════════════════════
@@ -267,14 +769,14 @@ def tick(pipe_id: str) -> tuple[dict | None, str | None]:
267
769
  # ══════════════════════════════════════════════════════════════
268
770
 
269
771
  def run_next(pipe_id: str) -> tuple[dict | None, str | None]:
270
- """ON으로 켜고 즉시 dispatch."""
772
+ """ON으로 켜고 즉시 dispatch (스킵 가드 우회)."""
271
773
  def activate(p):
272
774
  p["status"] = "active"
273
775
  p["job_id"] = None
274
776
  p["next_run"] = None
275
777
  p["last_error"] = None
276
778
  _update_pipeline(pipe_id, activate)
277
- return dispatch(pipe_id)
779
+ return dispatch(pipe_id, force=True)
278
780
 
279
781
 
280
782
  def force_run(pipe_id: str) -> tuple[dict | None, str | None]:
@@ -292,6 +794,10 @@ def stop_pipeline(pipe_id: str) -> tuple[dict | None, str | None]:
292
794
 
293
795
 
294
796
  def reset_phase(pipe_id: str, phase: str = None) -> tuple[dict | None, str | None]:
797
+ """상태 초기화 — 적응형 인터벌도 기본값으로 복구."""
798
+ def reset(p):
799
+ p["effective_interval_sec"] = p.get("interval_sec")
800
+ _update_pipeline(pipe_id, reset)
295
801
  return run_next(pipe_id)
296
802
 
297
803
 
@@ -316,12 +822,18 @@ def get_pipeline_status(pipe_id: str) -> tuple[dict | None, str | None]:
316
822
  if pipe.get("next_run"):
317
823
  remaining_sec = max(0, int(_parse_timestamp(pipe["next_run"]) - time.time()))
318
824
 
825
+ # 히스토리 통계
826
+ history = pipe.get("history", [])
827
+ total_cost = sum(h.get("cost_usd", 0) or 0 for h in history)
828
+ classifications = [h.get("classification", "unknown") for h in history]
829
+
319
830
  return {
320
831
  "id": pipe["id"],
321
832
  "name": pipe["name"],
322
833
  "project_path": pipe["project_path"],
323
834
  "command": pipe["command"],
324
835
  "interval": pipe.get("interval"),
836
+ "effective_interval_sec": pipe.get("effective_interval_sec"),
325
837
  "status": pipe["status"],
326
838
  "job_id": pipe.get("job_id"),
327
839
  "job_status": job_status,
@@ -331,19 +843,139 @@ def get_pipeline_status(pipe_id: str) -> tuple[dict | None, str | None]:
331
843
  "last_result": pipe.get("last_result"),
332
844
  "last_error": pipe.get("last_error"),
333
845
  "run_count": pipe.get("run_count", 0),
846
+ "on_complete": pipe.get("on_complete"),
847
+ "history_count": len(history),
848
+ "total_cost_usd": round(total_cost, 4) if total_cost else None,
849
+ "classifications": classifications[-5:], # 최근 5개 분류
334
850
  "created_at": pipe["created_at"],
335
851
  "updated_at": pipe["updated_at"],
336
852
  }, None
337
853
 
338
854
 
855
+ def get_pipeline_history(pipe_id: str) -> tuple[dict | None, str | None]:
856
+ """파이프라인의 실행 이력을 반환한다."""
857
+ pipe, err = get_pipeline(pipe_id)
858
+ if err:
859
+ return None, err
860
+ history = pipe.get("history", [])
861
+ return {
862
+ "id": pipe["id"],
863
+ "name": pipe["name"],
864
+ "run_count": pipe.get("run_count", 0),
865
+ "total_cost_usd": round(sum(h.get("cost_usd", 0) or 0 for h in history), 4),
866
+ "entries": list(reversed(history)), # 최신순
867
+ }, None
868
+
869
+
870
+ # ══════════════════════════════════════════════════════════════
871
+ # Self-Evolution: 메타 분석
872
+ # ══════════════════════════════════════════════════════════════
873
+
874
+ def get_evolution_summary() -> dict:
875
+ """전체 파이프라인 시스템의 자기 진화 상태를 요약한다."""
876
+ pipelines = _load_pipelines()
877
+ active = [p for p in pipelines if p["status"] == "active"]
878
+ total_runs = sum(p.get("run_count", 0) for p in pipelines)
879
+ total_cost = 0
880
+ all_classifications = {"has_change": 0, "no_change": 0, "unknown": 0}
881
+
882
+ for p in pipelines:
883
+ for h in p.get("history", []):
884
+ cost = h.get("cost_usd")
885
+ if cost:
886
+ total_cost += cost
887
+ cls = h.get("classification", "unknown")
888
+ all_classifications[cls] = all_classifications.get(cls, 0) + 1
889
+
890
+ # 효율성 점수: 변경 있는 실행 / 전체 실행
891
+ total_classified = sum(all_classifications.values())
892
+ efficiency = (
893
+ round(all_classifications["has_change"] / total_classified * 100, 1)
894
+ if total_classified > 0 else 0
895
+ )
896
+
897
+ # 적응형 인터벌 상태
898
+ interval_adaptations = []
899
+ for p in pipelines:
900
+ base = p.get("interval_sec")
901
+ effective = p.get("effective_interval_sec")
902
+ if base and effective and base != effective:
903
+ interval_adaptations.append({
904
+ "name": p["name"],
905
+ "base_sec": base,
906
+ "effective_sec": effective,
907
+ "change_pct": int((effective - base) / base * 100),
908
+ })
909
+
910
+ # 자동 일시정지된 파이프라인
911
+ auto_paused = [
912
+ {"name": p["name"], "reason": p.get("last_error", "")}
913
+ for p in pipelines
914
+ if p["status"] == "stopped" and (p.get("last_error") or "").startswith("자동 일시정지")
915
+ ]
916
+
917
+ # 스킵된 실행 횟수 (total_runs 대비 절감률)
918
+ total_skips = sum(p.get("skip_count", 0) for p in pipelines)
919
+
920
+ return {
921
+ "active_count": len(active),
922
+ "total_pipelines": len(pipelines),
923
+ "total_runs": total_runs,
924
+ "total_skips": total_skips,
925
+ "total_cost_usd": round(total_cost, 4),
926
+ "classifications": all_classifications,
927
+ "efficiency_pct": efficiency,
928
+ "interval_adaptations": interval_adaptations,
929
+ "auto_paused": auto_paused,
930
+ }
931
+
932
+
339
933
  # ══════════════════════════════════════════════════════════════
340
934
  # Tick All
341
935
  # ══════════════════════════════════════════════════════════════
342
936
 
937
+ _TICK_ALL_LOCK = DATA_DIR / ".tick_all.lock"
938
+ _TICK_ALL_DEBOUNCE_SEC = 3 # 3초 내 중복 호출 무시
939
+
940
+
343
941
  def tick_all() -> list[dict]:
344
- results = []
345
- for p in _load_pipelines():
346
- if p["status"] == "active":
347
- result, err = tick(p["id"])
348
- results.append({"pipeline_id": p["id"], "name": p["name"], "result": result, "error": err})
349
- return results
942
+ """모든 active 파이프라인을 tick한다.
943
+
944
+ debounce: autoloop.sh cron과 프론트엔드 poll이 동시에 호출해도
945
+ 3초 중복은 무시한다.
946
+ """
947
+ DATA_DIR.mkdir(parents=True, exist_ok=True)
948
+ try:
949
+ fd = open(_TICK_ALL_LOCK, "a+")
950
+ fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
951
+ except (BlockingIOError, OSError):
952
+ # 다른 프로세스가 이미 tick_all 실행 중
953
+ return [{"skip": True, "reason": "another tick_all in progress"}]
954
+
955
+ try:
956
+ # debounce: 마지막 tick_all 시각 확인
957
+ fd.seek(0)
958
+ last_tick_str = fd.read().strip()
959
+ if last_tick_str:
960
+ try:
961
+ last_tick = float(last_tick_str)
962
+ if time.time() - last_tick < _TICK_ALL_DEBOUNCE_SEC:
963
+ return [{"skip": True, "reason": "debounced"}]
964
+ except ValueError:
965
+ pass
966
+
967
+ # 현재 시각 기록
968
+ fd.seek(0)
969
+ fd.truncate()
970
+ fd.write(str(time.time()))
971
+ fd.flush()
972
+
973
+ results = []
974
+ for p in _load_pipelines():
975
+ if p["status"] == "active":
976
+ result, err = tick(p["id"])
977
+ results.append({"pipeline_id": p["id"], "name": p["name"], "result": result, "error": err})
978
+ return results
979
+ finally:
980
+ fcntl.flock(fd, fcntl.LOCK_UN)
981
+ fd.close()