llmcode-cli 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llm_code/__init__.py +2 -0
- llm_code/analysis/__init__.py +6 -0
- llm_code/analysis/cache.py +33 -0
- llm_code/analysis/engine.py +256 -0
- llm_code/analysis/go_rules.py +114 -0
- llm_code/analysis/js_rules.py +84 -0
- llm_code/analysis/python_rules.py +311 -0
- llm_code/analysis/rules.py +140 -0
- llm_code/analysis/rust_rules.py +108 -0
- llm_code/analysis/universal_rules.py +111 -0
- llm_code/api/__init__.py +0 -0
- llm_code/api/client.py +90 -0
- llm_code/api/errors.py +73 -0
- llm_code/api/openai_compat.py +390 -0
- llm_code/api/provider.py +35 -0
- llm_code/api/sse.py +52 -0
- llm_code/api/types.py +140 -0
- llm_code/cli/__init__.py +0 -0
- llm_code/cli/commands.py +70 -0
- llm_code/cli/image.py +122 -0
- llm_code/cli/render.py +214 -0
- llm_code/cli/status_line.py +79 -0
- llm_code/cli/streaming.py +92 -0
- llm_code/cli/tui_main.py +220 -0
- llm_code/computer_use/__init__.py +11 -0
- llm_code/computer_use/app_detect.py +49 -0
- llm_code/computer_use/app_tier.py +57 -0
- llm_code/computer_use/coordinator.py +99 -0
- llm_code/computer_use/input_control.py +71 -0
- llm_code/computer_use/screenshot.py +93 -0
- llm_code/cron/__init__.py +13 -0
- llm_code/cron/parser.py +145 -0
- llm_code/cron/scheduler.py +135 -0
- llm_code/cron/storage.py +126 -0
- llm_code/enterprise/__init__.py +1 -0
- llm_code/enterprise/audit.py +59 -0
- llm_code/enterprise/auth.py +26 -0
- llm_code/enterprise/oidc.py +95 -0
- llm_code/enterprise/rbac.py +65 -0
- llm_code/harness/__init__.py +5 -0
- llm_code/harness/config.py +33 -0
- llm_code/harness/engine.py +129 -0
- llm_code/harness/guides.py +41 -0
- llm_code/harness/sensors.py +68 -0
- llm_code/harness/templates.py +84 -0
- llm_code/hida/__init__.py +1 -0
- llm_code/hida/classifier.py +187 -0
- llm_code/hida/engine.py +49 -0
- llm_code/hida/profiles.py +95 -0
- llm_code/hida/types.py +28 -0
- llm_code/ide/__init__.py +1 -0
- llm_code/ide/bridge.py +80 -0
- llm_code/ide/detector.py +76 -0
- llm_code/ide/server.py +169 -0
- llm_code/logging.py +29 -0
- llm_code/lsp/__init__.py +0 -0
- llm_code/lsp/client.py +298 -0
- llm_code/lsp/detector.py +42 -0
- llm_code/lsp/manager.py +56 -0
- llm_code/lsp/tools.py +288 -0
- llm_code/marketplace/__init__.py +0 -0
- llm_code/marketplace/builtin_registry.py +102 -0
- llm_code/marketplace/installer.py +162 -0
- llm_code/marketplace/plugin.py +78 -0
- llm_code/marketplace/registry.py +360 -0
- llm_code/mcp/__init__.py +0 -0
- llm_code/mcp/bridge.py +87 -0
- llm_code/mcp/client.py +117 -0
- llm_code/mcp/health.py +120 -0
- llm_code/mcp/manager.py +214 -0
- llm_code/mcp/oauth.py +219 -0
- llm_code/mcp/transport.py +254 -0
- llm_code/mcp/types.py +53 -0
- llm_code/remote/__init__.py +0 -0
- llm_code/remote/client.py +136 -0
- llm_code/remote/protocol.py +22 -0
- llm_code/remote/server.py +275 -0
- llm_code/remote/ssh_proxy.py +56 -0
- llm_code/runtime/__init__.py +0 -0
- llm_code/runtime/auto_commit.py +56 -0
- llm_code/runtime/auto_diagnose.py +62 -0
- llm_code/runtime/checkpoint.py +70 -0
- llm_code/runtime/checkpoint_recovery.py +142 -0
- llm_code/runtime/compaction.py +35 -0
- llm_code/runtime/compressor.py +415 -0
- llm_code/runtime/config.py +533 -0
- llm_code/runtime/context.py +49 -0
- llm_code/runtime/conversation.py +921 -0
- llm_code/runtime/cost_tracker.py +126 -0
- llm_code/runtime/dream.py +127 -0
- llm_code/runtime/file_protection.py +150 -0
- llm_code/runtime/hardware.py +85 -0
- llm_code/runtime/hooks.py +223 -0
- llm_code/runtime/indexer.py +230 -0
- llm_code/runtime/knowledge_compiler.py +232 -0
- llm_code/runtime/memory.py +132 -0
- llm_code/runtime/memory_layers.py +467 -0
- llm_code/runtime/memory_lint.py +252 -0
- llm_code/runtime/model_aliases.py +37 -0
- llm_code/runtime/ollama.py +93 -0
- llm_code/runtime/overlay.py +124 -0
- llm_code/runtime/permissions.py +200 -0
- llm_code/runtime/plan.py +45 -0
- llm_code/runtime/prompt.py +238 -0
- llm_code/runtime/repo_map.py +174 -0
- llm_code/runtime/sandbox.py +116 -0
- llm_code/runtime/session.py +268 -0
- llm_code/runtime/skill_resolver.py +61 -0
- llm_code/runtime/skills.py +133 -0
- llm_code/runtime/speculative.py +75 -0
- llm_code/runtime/streaming_executor.py +216 -0
- llm_code/runtime/telemetry.py +196 -0
- llm_code/runtime/token_budget.py +26 -0
- llm_code/runtime/vcr.py +142 -0
- llm_code/runtime/vision.py +102 -0
- llm_code/swarm/__init__.py +1 -0
- llm_code/swarm/backend_subprocess.py +108 -0
- llm_code/swarm/backend_tmux.py +103 -0
- llm_code/swarm/backend_worktree.py +306 -0
- llm_code/swarm/checkpoint.py +74 -0
- llm_code/swarm/coordinator.py +236 -0
- llm_code/swarm/mailbox.py +88 -0
- llm_code/swarm/manager.py +202 -0
- llm_code/swarm/memory_sync.py +80 -0
- llm_code/swarm/recovery.py +21 -0
- llm_code/swarm/team.py +67 -0
- llm_code/swarm/types.py +31 -0
- llm_code/task/__init__.py +16 -0
- llm_code/task/diagnostics.py +93 -0
- llm_code/task/manager.py +162 -0
- llm_code/task/types.py +112 -0
- llm_code/task/verifier.py +104 -0
- llm_code/tools/__init__.py +0 -0
- llm_code/tools/agent.py +145 -0
- llm_code/tools/agent_roles.py +82 -0
- llm_code/tools/base.py +94 -0
- llm_code/tools/bash.py +565 -0
- llm_code/tools/computer_use_tools.py +278 -0
- llm_code/tools/coordinator_tool.py +75 -0
- llm_code/tools/cron_create.py +90 -0
- llm_code/tools/cron_delete.py +49 -0
- llm_code/tools/cron_list.py +51 -0
- llm_code/tools/deferred.py +92 -0
- llm_code/tools/dump.py +116 -0
- llm_code/tools/edit_file.py +282 -0
- llm_code/tools/git_tools.py +531 -0
- llm_code/tools/glob_search.py +112 -0
- llm_code/tools/grep_search.py +144 -0
- llm_code/tools/ide_diagnostics.py +59 -0
- llm_code/tools/ide_open.py +58 -0
- llm_code/tools/ide_selection.py +52 -0
- llm_code/tools/memory_tools.py +138 -0
- llm_code/tools/multi_edit.py +143 -0
- llm_code/tools/notebook_edit.py +107 -0
- llm_code/tools/notebook_read.py +81 -0
- llm_code/tools/parsing.py +63 -0
- llm_code/tools/read_file.py +154 -0
- llm_code/tools/registry.py +58 -0
- llm_code/tools/search_backends/__init__.py +56 -0
- llm_code/tools/search_backends/brave.py +56 -0
- llm_code/tools/search_backends/duckduckgo.py +129 -0
- llm_code/tools/search_backends/searxng.py +71 -0
- llm_code/tools/search_backends/tavily.py +73 -0
- llm_code/tools/swarm_create.py +109 -0
- llm_code/tools/swarm_delete.py +95 -0
- llm_code/tools/swarm_list.py +44 -0
- llm_code/tools/swarm_message.py +109 -0
- llm_code/tools/task_close.py +79 -0
- llm_code/tools/task_plan.py +79 -0
- llm_code/tools/task_verify.py +90 -0
- llm_code/tools/tool_search.py +65 -0
- llm_code/tools/web_common.py +258 -0
- llm_code/tools/web_fetch.py +223 -0
- llm_code/tools/web_search.py +280 -0
- llm_code/tools/write_file.py +118 -0
- llm_code/tui/__init__.py +1 -0
- llm_code/tui/app.py +2432 -0
- llm_code/tui/chat_view.py +82 -0
- llm_code/tui/chat_widgets.py +309 -0
- llm_code/tui/header_bar.py +46 -0
- llm_code/tui/input_bar.py +349 -0
- llm_code/tui/keybindings.py +142 -0
- llm_code/tui/marketplace.py +210 -0
- llm_code/tui/status_bar.py +72 -0
- llm_code/tui/theme.py +96 -0
- llm_code/utils/__init__.py +0 -0
- llm_code/utils/diff.py +111 -0
- llm_code/utils/errors.py +70 -0
- llm_code/utils/hyperlink.py +73 -0
- llm_code/utils/notebook.py +179 -0
- llm_code/utils/search.py +69 -0
- llm_code/utils/text_normalize.py +28 -0
- llm_code/utils/version_check.py +62 -0
- llm_code/vim/__init__.py +4 -0
- llm_code/vim/engine.py +51 -0
- llm_code/vim/motions.py +172 -0
- llm_code/vim/operators.py +183 -0
- llm_code/vim/text_objects.py +139 -0
- llm_code/vim/transitions.py +279 -0
- llm_code/vim/types.py +68 -0
- llm_code/voice/__init__.py +1 -0
- llm_code/voice/languages.py +43 -0
- llm_code/voice/recorder.py +136 -0
- llm_code/voice/stt.py +36 -0
- llm_code/voice/stt_anthropic.py +66 -0
- llm_code/voice/stt_google.py +32 -0
- llm_code/voice/stt_whisper.py +52 -0
- llmcode_cli-1.0.0.dist-info/METADATA +524 -0
- llmcode_cli-1.0.0.dist-info/RECORD +212 -0
- llmcode_cli-1.0.0.dist-info/WHEEL +4 -0
- llmcode_cli-1.0.0.dist-info/entry_points.txt +2 -0
- llmcode_cli-1.0.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"""Cron scheduling module for llm-code."""
|
|
2
|
+
from llm_code.cron.parser import CronExpression, next_fire_time, parse_cron
|
|
3
|
+
from llm_code.cron.storage import CronStorage, CronTask
|
|
4
|
+
from llm_code.cron.scheduler import CronScheduler
|
|
5
|
+
|
|
6
|
+
__all__ = [
|
|
7
|
+
"CronExpression",
|
|
8
|
+
"CronScheduler",
|
|
9
|
+
"CronStorage",
|
|
10
|
+
"CronTask",
|
|
11
|
+
"next_fire_time",
|
|
12
|
+
"parse_cron",
|
|
13
|
+
]
|
llm_code/cron/parser.py
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
"""Cron expression parser — 5-field standard format (local time).
|
|
2
|
+
|
|
3
|
+
Fields: minute(0-59) hour(0-23) day-of-month(1-31) month(1-12) day-of-week(0-6, 0=Sunday)
|
|
4
|
+
Syntax: * (all), N (single), N-M (range), N,M (list), */N or N-M/N (step)
|
|
5
|
+
"""
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import dataclasses
|
|
9
|
+
import datetime
|
|
10
|
+
|
|
11
|
+
_FIELD_RANGES = {
|
|
12
|
+
"minute": (0, 59),
|
|
13
|
+
"hour": (0, 23),
|
|
14
|
+
"day_of_month": (1, 31),
|
|
15
|
+
"month": (1, 12),
|
|
16
|
+
"day_of_week": (0, 6),
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
_FIELD_ORDER = ("minute", "hour", "day_of_month", "month", "day_of_week")
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclasses.dataclass(frozen=True)
|
|
23
|
+
class CronExpression:
|
|
24
|
+
minute: tuple[int | str, ...]
|
|
25
|
+
hour: tuple[int | str, ...]
|
|
26
|
+
day_of_month: tuple[int | str, ...]
|
|
27
|
+
month: tuple[int | str, ...]
|
|
28
|
+
day_of_week: tuple[int | str, ...]
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _parse_field(token: str, field_name: str) -> tuple[int, ...]:
|
|
32
|
+
"""Parse a single cron field token into a sorted tuple of valid integers."""
|
|
33
|
+
lo, hi = _FIELD_RANGES[field_name]
|
|
34
|
+
|
|
35
|
+
if token == "*":
|
|
36
|
+
return tuple(range(lo, hi + 1))
|
|
37
|
+
|
|
38
|
+
# Handle step: */N or range/N
|
|
39
|
+
if "/" in token:
|
|
40
|
+
base, step_str = token.split("/", 1)
|
|
41
|
+
step = int(step_str)
|
|
42
|
+
if step <= 0:
|
|
43
|
+
raise ValueError(f"Invalid step value in {field_name}: {step}")
|
|
44
|
+
if base == "*":
|
|
45
|
+
return tuple(range(lo, hi + 1, step))
|
|
46
|
+
if "-" in base:
|
|
47
|
+
rlo, rhi = (int(x) for x in base.split("-", 1))
|
|
48
|
+
else:
|
|
49
|
+
rlo, rhi = int(base), hi
|
|
50
|
+
_validate_range(rlo, rhi, lo, hi, field_name)
|
|
51
|
+
return tuple(range(rlo, rhi + 1, step))
|
|
52
|
+
|
|
53
|
+
# Handle list: N,M,...
|
|
54
|
+
if "," in token:
|
|
55
|
+
values = sorted(int(x) for x in token.split(","))
|
|
56
|
+
for v in values:
|
|
57
|
+
if v < lo or v > hi:
|
|
58
|
+
raise ValueError(f"Value {v} out of range for {field_name} ({lo}-{hi})")
|
|
59
|
+
return tuple(values)
|
|
60
|
+
|
|
61
|
+
# Handle range: N-M
|
|
62
|
+
if "-" in token:
|
|
63
|
+
rlo, rhi = (int(x) for x in token.split("-", 1))
|
|
64
|
+
_validate_range(rlo, rhi, lo, hi, field_name)
|
|
65
|
+
return tuple(range(rlo, rhi + 1))
|
|
66
|
+
|
|
67
|
+
# Single value
|
|
68
|
+
val = int(token)
|
|
69
|
+
if val < lo or val > hi:
|
|
70
|
+
raise ValueError(f"Value {val} out of range for {field_name} ({lo}-{hi})")
|
|
71
|
+
return (val,)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def _validate_range(rlo: int, rhi: int, lo: int, hi: int, field_name: str) -> None:
|
|
75
|
+
if rlo < lo or rhi > hi or rlo > rhi:
|
|
76
|
+
raise ValueError(f"Invalid range {rlo}-{rhi} for {field_name} ({lo}-{hi})")
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def parse_cron(expr: str) -> CronExpression:
|
|
80
|
+
"""Parse a 5-field cron expression string into a CronExpression."""
|
|
81
|
+
tokens = expr.strip().split()
|
|
82
|
+
if len(tokens) != 5:
|
|
83
|
+
raise ValueError(f"Cron expression must have 5 fields, got {len(tokens)}: '{expr}'")
|
|
84
|
+
|
|
85
|
+
fields: dict[str, tuple[int, ...]] = {}
|
|
86
|
+
for token, field_name in zip(tokens, _FIELD_ORDER):
|
|
87
|
+
try:
|
|
88
|
+
fields[field_name] = _parse_field(token, field_name)
|
|
89
|
+
except ValueError:
|
|
90
|
+
raise
|
|
91
|
+
except Exception as exc:
|
|
92
|
+
raise ValueError(f"Invalid {field_name} field '{token}': {exc}") from exc
|
|
93
|
+
|
|
94
|
+
return CronExpression(**fields)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def _python_weekday_to_cron(py_wd: int) -> int:
|
|
98
|
+
"""Convert Python weekday (0=Mon) to cron weekday (0=Sun)."""
|
|
99
|
+
return (py_wd + 1) % 7
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def next_fire_time(
|
|
103
|
+
expr: CronExpression,
|
|
104
|
+
after: datetime.datetime,
|
|
105
|
+
) -> datetime.datetime:
|
|
106
|
+
"""Return the next datetime matching the cron expression, strictly after `after`.
|
|
107
|
+
|
|
108
|
+
Uses local (naive) time. Raises ValueError if no match found within 1 year.
|
|
109
|
+
"""
|
|
110
|
+
# Start from the next minute boundary
|
|
111
|
+
candidate = after.replace(second=0, microsecond=0) + datetime.timedelta(minutes=1)
|
|
112
|
+
limit = after + datetime.timedelta(days=366)
|
|
113
|
+
|
|
114
|
+
while candidate <= limit:
|
|
115
|
+
cron_dow = _python_weekday_to_cron(candidate.weekday())
|
|
116
|
+
|
|
117
|
+
if (
|
|
118
|
+
candidate.month in expr.month
|
|
119
|
+
and candidate.day in expr.day_of_month
|
|
120
|
+
and cron_dow in expr.day_of_week
|
|
121
|
+
and candidate.hour in expr.hour
|
|
122
|
+
and candidate.minute in expr.minute
|
|
123
|
+
):
|
|
124
|
+
return candidate
|
|
125
|
+
|
|
126
|
+
# Advance: skip non-matching months, days, hours, minutes efficiently
|
|
127
|
+
if candidate.month not in expr.month:
|
|
128
|
+
# Jump to first day of next month
|
|
129
|
+
if candidate.month == 12:
|
|
130
|
+
candidate = candidate.replace(year=candidate.year + 1, month=1, day=1, hour=0, minute=0)
|
|
131
|
+
else:
|
|
132
|
+
candidate = candidate.replace(month=candidate.month + 1, day=1, hour=0, minute=0)
|
|
133
|
+
continue
|
|
134
|
+
|
|
135
|
+
if candidate.day not in expr.day_of_month or cron_dow not in expr.day_of_week:
|
|
136
|
+
candidate = (candidate + datetime.timedelta(days=1)).replace(hour=0, minute=0)
|
|
137
|
+
continue
|
|
138
|
+
|
|
139
|
+
if candidate.hour not in expr.hour:
|
|
140
|
+
candidate = (candidate + datetime.timedelta(hours=1)).replace(minute=0)
|
|
141
|
+
continue
|
|
142
|
+
|
|
143
|
+
candidate += datetime.timedelta(minutes=1)
|
|
144
|
+
|
|
145
|
+
raise ValueError(f"Cron expression has no matching time within 1 year after {after}")
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
"""Asyncio-based cron scheduler with file locking and auto-expiry."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import asyncio
|
|
5
|
+
import datetime
|
|
6
|
+
import fcntl
|
|
7
|
+
import logging
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Awaitable, Callable
|
|
10
|
+
|
|
11
|
+
from llm_code.cron.parser import next_fire_time, parse_cron
|
|
12
|
+
from llm_code.cron.storage import CronStorage, CronTask
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
_EXPIRY_DAYS = 30
|
|
17
|
+
_DEFAULT_POLL_SECONDS = 60
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class CronScheduler:
|
|
21
|
+
"""Background cron scheduler that polls storage every N seconds."""
|
|
22
|
+
|
|
23
|
+
def __init__(
|
|
24
|
+
self,
|
|
25
|
+
storage: CronStorage,
|
|
26
|
+
lock_path: Path,
|
|
27
|
+
on_fire: Callable[[str], Awaitable[None]],
|
|
28
|
+
) -> None:
|
|
29
|
+
self._storage = storage
|
|
30
|
+
self._lock_path = Path(lock_path)
|
|
31
|
+
self._on_fire = on_fire
|
|
32
|
+
self._running = False
|
|
33
|
+
|
|
34
|
+
async def start(self, poll_interval: float = _DEFAULT_POLL_SECONDS) -> None:
|
|
35
|
+
"""Run the scheduler loop until stop() is called."""
|
|
36
|
+
self._running = True
|
|
37
|
+
while self._running:
|
|
38
|
+
try:
|
|
39
|
+
await self._tick(now=datetime.datetime.now())
|
|
40
|
+
except Exception:
|
|
41
|
+
logger.exception("Error in cron scheduler tick")
|
|
42
|
+
await asyncio.sleep(poll_interval)
|
|
43
|
+
|
|
44
|
+
def stop(self) -> None:
|
|
45
|
+
self._running = False
|
|
46
|
+
|
|
47
|
+
def check_missed(self, now: datetime.datetime) -> list[CronTask]:
|
|
48
|
+
"""Return tasks that have missed fire times since their last_fired_at."""
|
|
49
|
+
missed: list[CronTask] = []
|
|
50
|
+
for task in self._storage.list_all():
|
|
51
|
+
if task.last_fired_at is None:
|
|
52
|
+
# Never fired — check if created_at means it should have fired
|
|
53
|
+
ref = task.created_at
|
|
54
|
+
else:
|
|
55
|
+
ref = task.last_fired_at
|
|
56
|
+
try:
|
|
57
|
+
expr = parse_cron(task.cron)
|
|
58
|
+
nxt = next_fire_time(expr, ref)
|
|
59
|
+
if nxt <= now:
|
|
60
|
+
missed.append(task)
|
|
61
|
+
except ValueError:
|
|
62
|
+
continue
|
|
63
|
+
return missed
|
|
64
|
+
|
|
65
|
+
async def _tick(self, now: datetime.datetime) -> None:
|
|
66
|
+
"""Run one scheduling cycle."""
|
|
67
|
+
if not self._try_lock():
|
|
68
|
+
return
|
|
69
|
+
|
|
70
|
+
try:
|
|
71
|
+
await self._process_tasks(now)
|
|
72
|
+
finally:
|
|
73
|
+
self._release_lock()
|
|
74
|
+
|
|
75
|
+
async def _process_tasks(self, now: datetime.datetime) -> None:
|
|
76
|
+
tasks = self._storage.list_all()
|
|
77
|
+
to_remove: list[str] = []
|
|
78
|
+
|
|
79
|
+
for task in tasks:
|
|
80
|
+
# Auto-expire non-permanent recurring tasks older than 30 days
|
|
81
|
+
if not task.permanent and task.recurring:
|
|
82
|
+
age = now - task.created_at
|
|
83
|
+
if age.days > _EXPIRY_DAYS:
|
|
84
|
+
logger.info("Expiring task %s (age: %d days)", task.id, age.days)
|
|
85
|
+
to_remove.append(task.id)
|
|
86
|
+
continue
|
|
87
|
+
|
|
88
|
+
# Determine if task should fire
|
|
89
|
+
try:
|
|
90
|
+
expr = parse_cron(task.cron)
|
|
91
|
+
except ValueError:
|
|
92
|
+
logger.warning("Invalid cron expression for task %s: %s", task.id, task.cron)
|
|
93
|
+
continue
|
|
94
|
+
|
|
95
|
+
ref = task.last_fired_at or task.created_at
|
|
96
|
+
try:
|
|
97
|
+
nxt = next_fire_time(expr, ref)
|
|
98
|
+
except ValueError:
|
|
99
|
+
continue
|
|
100
|
+
|
|
101
|
+
if nxt <= now:
|
|
102
|
+
logger.info("Firing task %s: %s", task.id, task.prompt)
|
|
103
|
+
await self._on_fire(task.prompt)
|
|
104
|
+
self._storage.update_last_fired(task.id, now)
|
|
105
|
+
|
|
106
|
+
if not task.recurring:
|
|
107
|
+
to_remove.append(task.id)
|
|
108
|
+
|
|
109
|
+
for tid in to_remove:
|
|
110
|
+
self._storage.remove(tid)
|
|
111
|
+
|
|
112
|
+
def _try_lock(self) -> bool:
|
|
113
|
+
"""Acquire a file lock; return True if successful."""
|
|
114
|
+
try:
|
|
115
|
+
self._lock_path.parent.mkdir(parents=True, exist_ok=True)
|
|
116
|
+
# Close previous FD if exists to prevent leak
|
|
117
|
+
if hasattr(self, "_lock_fd") and self._lock_fd:
|
|
118
|
+
try:
|
|
119
|
+
self._lock_fd.close()
|
|
120
|
+
except Exception:
|
|
121
|
+
pass
|
|
122
|
+
self._lock_fd = open(self._lock_path, "w")
|
|
123
|
+
fcntl.flock(self._lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
|
124
|
+
return True
|
|
125
|
+
except (OSError, IOError):
|
|
126
|
+
return False
|
|
127
|
+
|
|
128
|
+
def _release_lock(self) -> None:
|
|
129
|
+
"""Release the file lock."""
|
|
130
|
+
try:
|
|
131
|
+
if hasattr(self, "_lock_fd") and self._lock_fd:
|
|
132
|
+
fcntl.flock(self._lock_fd, fcntl.LOCK_UN)
|
|
133
|
+
self._lock_fd.close()
|
|
134
|
+
except (OSError, IOError):
|
|
135
|
+
pass
|
llm_code/cron/storage.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
"""Persistent storage for scheduled cron tasks."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import dataclasses
|
|
5
|
+
import datetime
|
|
6
|
+
import json
|
|
7
|
+
import uuid
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
_MAX_TASKS = 50
|
|
11
|
+
_ISO_FORMAT = "%Y-%m-%dT%H:%M:%S"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclasses.dataclass(frozen=True)
|
|
15
|
+
class CronTask:
|
|
16
|
+
id: str
|
|
17
|
+
cron: str
|
|
18
|
+
prompt: str
|
|
19
|
+
recurring: bool
|
|
20
|
+
permanent: bool
|
|
21
|
+
created_at: datetime.datetime
|
|
22
|
+
last_fired_at: datetime.datetime | None = None
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class CronStorage:
|
|
26
|
+
"""Load/save cron tasks from a JSON file under .llm-code/."""
|
|
27
|
+
|
|
28
|
+
def __init__(self, path: Path) -> None:
|
|
29
|
+
self._path = Path(path)
|
|
30
|
+
self._tasks: list[CronTask] = self._load()
|
|
31
|
+
|
|
32
|
+
def _load(self) -> list[CronTask]:
|
|
33
|
+
if not self._path.exists():
|
|
34
|
+
return []
|
|
35
|
+
try:
|
|
36
|
+
data = json.loads(self._path.read_text(encoding="utf-8"))
|
|
37
|
+
except (json.JSONDecodeError, OSError):
|
|
38
|
+
return []
|
|
39
|
+
tasks: list[CronTask] = []
|
|
40
|
+
for raw in data.get("tasks", []):
|
|
41
|
+
last_fired = None
|
|
42
|
+
if raw.get("last_fired_at"):
|
|
43
|
+
last_fired = datetime.datetime.strptime(raw["last_fired_at"], _ISO_FORMAT)
|
|
44
|
+
tasks.append(CronTask(
|
|
45
|
+
id=raw["id"],
|
|
46
|
+
cron=raw["cron"],
|
|
47
|
+
prompt=raw["prompt"],
|
|
48
|
+
recurring=raw.get("recurring", True),
|
|
49
|
+
permanent=raw.get("permanent", False),
|
|
50
|
+
created_at=datetime.datetime.strptime(raw["created_at"], _ISO_FORMAT),
|
|
51
|
+
last_fired_at=last_fired,
|
|
52
|
+
))
|
|
53
|
+
return tasks
|
|
54
|
+
|
|
55
|
+
def _save(self) -> None:
|
|
56
|
+
self._path.parent.mkdir(parents=True, exist_ok=True)
|
|
57
|
+
data = {
|
|
58
|
+
"tasks": [
|
|
59
|
+
{
|
|
60
|
+
"id": t.id,
|
|
61
|
+
"cron": t.cron,
|
|
62
|
+
"prompt": t.prompt,
|
|
63
|
+
"recurring": t.recurring,
|
|
64
|
+
"permanent": t.permanent,
|
|
65
|
+
"created_at": t.created_at.strftime(_ISO_FORMAT),
|
|
66
|
+
"last_fired_at": t.last_fired_at.strftime(_ISO_FORMAT) if t.last_fired_at else None,
|
|
67
|
+
}
|
|
68
|
+
for t in self._tasks
|
|
69
|
+
]
|
|
70
|
+
}
|
|
71
|
+
self._path.write_text(json.dumps(data, indent=2), encoding="utf-8")
|
|
72
|
+
|
|
73
|
+
def add(
|
|
74
|
+
self,
|
|
75
|
+
cron: str,
|
|
76
|
+
prompt: str,
|
|
77
|
+
recurring: bool,
|
|
78
|
+
permanent: bool,
|
|
79
|
+
) -> CronTask:
|
|
80
|
+
"""Add a new task. Raises ValueError if at capacity (50)."""
|
|
81
|
+
if len(self._tasks) >= _MAX_TASKS:
|
|
82
|
+
raise ValueError(f"Maximum {_MAX_TASKS} scheduled tasks reached")
|
|
83
|
+
task = CronTask(
|
|
84
|
+
id=uuid.uuid4().hex[:12],
|
|
85
|
+
cron=cron,
|
|
86
|
+
prompt=prompt,
|
|
87
|
+
recurring=recurring,
|
|
88
|
+
permanent=permanent,
|
|
89
|
+
created_at=datetime.datetime.now(),
|
|
90
|
+
)
|
|
91
|
+
self._tasks = [*self._tasks, task]
|
|
92
|
+
self._save()
|
|
93
|
+
return task
|
|
94
|
+
|
|
95
|
+
def remove(self, task_id: str) -> bool:
|
|
96
|
+
"""Remove a task by ID. Returns True if found and removed."""
|
|
97
|
+
new_tasks = [t for t in self._tasks if t.id != task_id]
|
|
98
|
+
if len(new_tasks) == len(self._tasks):
|
|
99
|
+
return False
|
|
100
|
+
self._tasks = new_tasks
|
|
101
|
+
self._save()
|
|
102
|
+
return True
|
|
103
|
+
|
|
104
|
+
def list_all(self) -> list[CronTask]:
|
|
105
|
+
"""Return all tasks (immutable copies via frozen dataclass)."""
|
|
106
|
+
return list(self._tasks)
|
|
107
|
+
|
|
108
|
+
def update_last_fired(
|
|
109
|
+
self,
|
|
110
|
+
task_id: str,
|
|
111
|
+
fired_at: datetime.datetime,
|
|
112
|
+
) -> CronTask | None:
|
|
113
|
+
"""Update last_fired_at for a task. Returns updated task or None."""
|
|
114
|
+
new_tasks: list[CronTask] = []
|
|
115
|
+
updated: CronTask | None = None
|
|
116
|
+
for t in self._tasks:
|
|
117
|
+
if t.id == task_id:
|
|
118
|
+
updated = dataclasses.replace(t, last_fired_at=fired_at)
|
|
119
|
+
new_tasks.append(updated)
|
|
120
|
+
else:
|
|
121
|
+
new_tasks.append(t)
|
|
122
|
+
if updated is None:
|
|
123
|
+
return None
|
|
124
|
+
self._tasks = new_tasks
|
|
125
|
+
self._save()
|
|
126
|
+
return updated
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Enterprise features — auth, RBAC, audit."""
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
"""Audit logging — JSONL file logger with composite support."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import json
|
|
5
|
+
import logging
|
|
6
|
+
from abc import ABC, abstractmethod
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
_log = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass(frozen=True)
|
|
14
|
+
class AuditEvent:
|
|
15
|
+
timestamp: str
|
|
16
|
+
event_type: str
|
|
17
|
+
user_id: str
|
|
18
|
+
tool_name: str = ""
|
|
19
|
+
action: str = ""
|
|
20
|
+
outcome: str = ""
|
|
21
|
+
metadata: dict = field(default_factory=dict)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class AuditLogger(ABC):
|
|
25
|
+
@abstractmethod
|
|
26
|
+
async def log(self, event: AuditEvent) -> None: ...
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class FileAuditLogger(AuditLogger):
|
|
30
|
+
def __init__(self, audit_dir: Path) -> None:
|
|
31
|
+
self._audit_dir = audit_dir
|
|
32
|
+
|
|
33
|
+
async def log(self, event: AuditEvent) -> None:
|
|
34
|
+
self._audit_dir.mkdir(parents=True, exist_ok=True)
|
|
35
|
+
date_str = event.timestamp[:10]
|
|
36
|
+
path = self._audit_dir / f"{date_str}.jsonl"
|
|
37
|
+
line = json.dumps({
|
|
38
|
+
"timestamp": event.timestamp,
|
|
39
|
+
"event_type": event.event_type,
|
|
40
|
+
"user_id": event.user_id,
|
|
41
|
+
"tool_name": event.tool_name,
|
|
42
|
+
"action": event.action,
|
|
43
|
+
"outcome": event.outcome,
|
|
44
|
+
"metadata": event.metadata,
|
|
45
|
+
})
|
|
46
|
+
with open(path, "a", encoding="utf-8") as f:
|
|
47
|
+
f.write(line + "\n")
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class CompositeAuditLogger(AuditLogger):
|
|
51
|
+
def __init__(self, loggers: list[AuditLogger]) -> None:
|
|
52
|
+
self._loggers = loggers
|
|
53
|
+
|
|
54
|
+
async def log(self, event: AuditEvent) -> None:
|
|
55
|
+
for logger in self._loggers:
|
|
56
|
+
try:
|
|
57
|
+
await logger.log(event)
|
|
58
|
+
except Exception as exc:
|
|
59
|
+
_log.warning("Audit logger failed: %s", exc)
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"""Authentication provider abstraction and identity model."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
from abc import ABC, abstractmethod
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass(frozen=True)
|
|
9
|
+
class AuthIdentity:
|
|
10
|
+
"""Represents an authenticated user."""
|
|
11
|
+
user_id: str
|
|
12
|
+
email: str
|
|
13
|
+
display_name: str
|
|
14
|
+
groups: tuple[str, ...] = ()
|
|
15
|
+
raw_claims: dict = field(default_factory=dict)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class AuthProvider(ABC):
|
|
19
|
+
@abstractmethod
|
|
20
|
+
async def authenticate(self) -> AuthIdentity: ...
|
|
21
|
+
|
|
22
|
+
@abstractmethod
|
|
23
|
+
async def refresh(self) -> AuthIdentity | None: ...
|
|
24
|
+
|
|
25
|
+
@abstractmethod
|
|
26
|
+
async def revoke(self) -> None: ...
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
"""OIDC authentication provider with PKCE flow."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import hashlib
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
import secrets
|
|
8
|
+
from base64 import urlsafe_b64encode
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
import httpx
|
|
13
|
+
|
|
14
|
+
from llm_code.enterprise.auth import AuthIdentity, AuthProvider
|
|
15
|
+
|
|
16
|
+
_log = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass(frozen=True)
|
|
20
|
+
class OIDCConfig:
|
|
21
|
+
issuer: str
|
|
22
|
+
client_id: str
|
|
23
|
+
client_secret: str = ""
|
|
24
|
+
scopes: tuple[str, ...] = ("openid", "email", "profile")
|
|
25
|
+
redirect_port: int = 9877
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class OIDCProvider(AuthProvider):
|
|
29
|
+
def __init__(self, config: OIDCConfig, token_dir: Path | None = None) -> None:
|
|
30
|
+
self._config = config
|
|
31
|
+
self._token_dir = token_dir or Path.home() / ".llm-code" / "auth"
|
|
32
|
+
self._token_path = self._token_dir / "oidc_tokens.json"
|
|
33
|
+
self._endpoints: dict[str, str] | None = None
|
|
34
|
+
|
|
35
|
+
async def _discover(self) -> dict[str, str]:
|
|
36
|
+
if self._endpoints is not None:
|
|
37
|
+
return self._endpoints
|
|
38
|
+
url = f"{self._config.issuer.rstrip('/')}/.well-known/openid-configuration"
|
|
39
|
+
async with httpx.AsyncClient() as client:
|
|
40
|
+
resp = await client.get(url)
|
|
41
|
+
resp.raise_for_status()
|
|
42
|
+
self._endpoints = resp.json()
|
|
43
|
+
return self._endpoints
|
|
44
|
+
|
|
45
|
+
@staticmethod
|
|
46
|
+
def _generate_pkce() -> tuple[str, str]:
|
|
47
|
+
verifier = secrets.token_urlsafe(64)
|
|
48
|
+
digest = hashlib.sha256(verifier.encode()).digest()
|
|
49
|
+
challenge = urlsafe_b64encode(digest).rstrip(b"=").decode()
|
|
50
|
+
return verifier, challenge
|
|
51
|
+
|
|
52
|
+
def _save_tokens(self, tokens: dict) -> None:
|
|
53
|
+
self._token_dir.mkdir(parents=True, exist_ok=True)
|
|
54
|
+
self._token_path.write_text(json.dumps(tokens), encoding="utf-8")
|
|
55
|
+
|
|
56
|
+
def _load_tokens(self) -> dict | None:
|
|
57
|
+
if not self._token_path.exists():
|
|
58
|
+
return None
|
|
59
|
+
try:
|
|
60
|
+
return json.loads(self._token_path.read_text(encoding="utf-8"))
|
|
61
|
+
except (json.JSONDecodeError, OSError):
|
|
62
|
+
return None
|
|
63
|
+
|
|
64
|
+
async def authenticate(self) -> AuthIdentity:
|
|
65
|
+
await self._discover()
|
|
66
|
+
raise NotImplementedError(
|
|
67
|
+
"Full OIDC PKCE flow requires browser interaction. "
|
|
68
|
+
"Use 'llm-code auth login' command."
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
async def refresh(self) -> AuthIdentity | None:
|
|
72
|
+
tokens = self._load_tokens()
|
|
73
|
+
if not tokens or "refresh_token" not in tokens:
|
|
74
|
+
return None
|
|
75
|
+
endpoints = await self._discover()
|
|
76
|
+
token_url = endpoints.get("token_endpoint", "")
|
|
77
|
+
async with httpx.AsyncClient() as client:
|
|
78
|
+
resp = await client.post(token_url, data={
|
|
79
|
+
"grant_type": "refresh_token",
|
|
80
|
+
"client_id": self._config.client_id,
|
|
81
|
+
"refresh_token": tokens["refresh_token"],
|
|
82
|
+
})
|
|
83
|
+
if resp.status_code != 200:
|
|
84
|
+
return None
|
|
85
|
+
new_tokens = resp.json()
|
|
86
|
+
self._save_tokens(new_tokens)
|
|
87
|
+
return AuthIdentity(
|
|
88
|
+
user_id=new_tokens.get("sub", ""),
|
|
89
|
+
email=new_tokens.get("email", ""),
|
|
90
|
+
display_name=new_tokens.get("name", ""),
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
async def revoke(self) -> None:
|
|
94
|
+
if self._token_path.exists():
|
|
95
|
+
self._token_path.unlink()
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
"""Role-based access control engine."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import fnmatch
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
|
|
7
|
+
from llm_code.enterprise.auth import AuthIdentity
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass(frozen=True)
|
|
11
|
+
class Role:
|
|
12
|
+
name: str
|
|
13
|
+
permissions: frozenset[str]
|
|
14
|
+
tool_allow: tuple[str, ...] = ()
|
|
15
|
+
tool_deny: tuple[str, ...] = ()
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
DEFAULT_ROLES: dict[str, Role] = {
|
|
19
|
+
"admin": Role("admin", frozenset({"*"})),
|
|
20
|
+
"developer": Role(
|
|
21
|
+
"developer",
|
|
22
|
+
frozenset({"tool:*", "swarm:create", "session:*", "skill:*"}),
|
|
23
|
+
tool_deny=("tool:bash:rm -rf *",),
|
|
24
|
+
),
|
|
25
|
+
"viewer": Role(
|
|
26
|
+
"viewer",
|
|
27
|
+
frozenset({"tool:read", "tool:glob", "tool:grep", "session:read"}),
|
|
28
|
+
),
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class RBACEngine:
|
|
33
|
+
def __init__(self, group_role_mapping: dict[str, str], custom_roles: dict[str, Role] | None = None) -> None:
|
|
34
|
+
self._group_role_mapping = group_role_mapping
|
|
35
|
+
self._roles = {**DEFAULT_ROLES, **(custom_roles or {})}
|
|
36
|
+
|
|
37
|
+
def _get_roles(self, identity: AuthIdentity | None) -> list[Role]:
|
|
38
|
+
if identity is None:
|
|
39
|
+
return [self._roles["admin"]]
|
|
40
|
+
roles = []
|
|
41
|
+
for group in identity.groups:
|
|
42
|
+
role_name = self._group_role_mapping.get(group)
|
|
43
|
+
if role_name and role_name in self._roles:
|
|
44
|
+
roles.append(self._roles[role_name])
|
|
45
|
+
return roles
|
|
46
|
+
|
|
47
|
+
def is_allowed(self, identity: AuthIdentity | None, permission: str) -> bool:
|
|
48
|
+
roles = self._get_roles(identity)
|
|
49
|
+
if not roles:
|
|
50
|
+
return False
|
|
51
|
+
for role in roles:
|
|
52
|
+
if "*" in role.permissions:
|
|
53
|
+
return True
|
|
54
|
+
for perm in role.permissions:
|
|
55
|
+
if perm == permission or (perm.endswith(":*") and permission.startswith(perm[:-1])):
|
|
56
|
+
return True
|
|
57
|
+
return False
|
|
58
|
+
|
|
59
|
+
def is_denied_by_pattern(self, identity: AuthIdentity | None, action: str) -> bool:
|
|
60
|
+
roles = self._get_roles(identity)
|
|
61
|
+
for role in roles:
|
|
62
|
+
for pattern in role.tool_deny:
|
|
63
|
+
if fnmatch.fnmatch(action, pattern):
|
|
64
|
+
return True
|
|
65
|
+
return False
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
"""Harness Engine — unified quality control framework."""
|
|
2
|
+
from llm_code.harness.config import HarnessConfig, HarnessControl, HarnessFinding
|
|
3
|
+
from llm_code.harness.engine import HarnessEngine
|
|
4
|
+
|
|
5
|
+
__all__ = ["HarnessConfig", "HarnessControl", "HarnessFinding", "HarnessEngine"]
|