python-infrakit-dev 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. infrakit/__init__.py +0 -0
  2. infrakit/cli/__init__.py +1 -0
  3. infrakit/cli/commands/__init__.py +1 -0
  4. infrakit/cli/commands/deps.py +530 -0
  5. infrakit/cli/commands/init.py +129 -0
  6. infrakit/cli/commands/llm.py +295 -0
  7. infrakit/cli/commands/logger.py +160 -0
  8. infrakit/cli/commands/module.py +342 -0
  9. infrakit/cli/commands/time.py +81 -0
  10. infrakit/cli/main.py +65 -0
  11. infrakit/core/__init__.py +0 -0
  12. infrakit/core/config/__init__.py +0 -0
  13. infrakit/core/config/converter.py +480 -0
  14. infrakit/core/config/exporter.py +304 -0
  15. infrakit/core/config/loader.py +713 -0
  16. infrakit/core/config/validator.py +389 -0
  17. infrakit/core/logger/__init__.py +21 -0
  18. infrakit/core/logger/formatters.py +143 -0
  19. infrakit/core/logger/handlers.py +322 -0
  20. infrakit/core/logger/retention.py +176 -0
  21. infrakit/core/logger/setup.py +314 -0
  22. infrakit/deps/__init__.py +239 -0
  23. infrakit/deps/clean.py +141 -0
  24. infrakit/deps/depfile.py +405 -0
  25. infrakit/deps/health.py +357 -0
  26. infrakit/deps/optimizer.py +642 -0
  27. infrakit/deps/scanner.py +550 -0
  28. infrakit/llm/__init__.py +35 -0
  29. infrakit/llm/batch.py +165 -0
  30. infrakit/llm/client.py +575 -0
  31. infrakit/llm/key_manager.py +728 -0
  32. infrakit/llm/llm_readme.md +306 -0
  33. infrakit/llm/models.py +148 -0
  34. infrakit/llm/providers/__init__.py +5 -0
  35. infrakit/llm/providers/base.py +112 -0
  36. infrakit/llm/providers/gemini.py +164 -0
  37. infrakit/llm/providers/openai.py +168 -0
  38. infrakit/llm/rate_limiter.py +54 -0
  39. infrakit/scaffolder/__init__.py +31 -0
  40. infrakit/scaffolder/ai.py +508 -0
  41. infrakit/scaffolder/backend.py +555 -0
  42. infrakit/scaffolder/cli_tool.py +386 -0
  43. infrakit/scaffolder/generator.py +338 -0
  44. infrakit/scaffolder/pipeline.py +562 -0
  45. infrakit/scaffolder/registry.py +121 -0
  46. infrakit/time/__init__.py +60 -0
  47. infrakit/time/profiler.py +511 -0
  48. python_infrakit_dev-0.1.0.dist-info/METADATA +124 -0
  49. python_infrakit_dev-0.1.0.dist-info/RECORD +51 -0
  50. python_infrakit_dev-0.1.0.dist-info/WHEEL +4 -0
  51. python_infrakit_dev-0.1.0.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,322 @@
1
+ """
2
+ infrakit.core.logger.handlers
3
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
4
+ Maps a strategy + stream combination to a list of configured handlers.
5
+
6
+ File strategies control folder structure:
7
+ file logs/app.log
8
+ date logs/app.2025-03-22.log
9
+ level logs/debug/debug.log
10
+ logs/info/info.log
11
+ logs/warning/warning.log
12
+ logs/error/error.log
13
+ date_level logs/debug/debug.2025-03-22.log
14
+ logs/info/info.2025-03-22.log
15
+ logs/warning/warning.2025-03-22.log
16
+ logs/error/error.2025-03-22.log
17
+ date_size logs/app.2025-03-22.log (+ rotates at max_bytes)
18
+
19
+ stream= adds a stream handler on top of any file strategy:
20
+ stream="stdout" also write to stdout
21
+ stream="stderr" also write to stderr
22
+ stream=None no stream output (default)
23
+
24
+ Stream-only (no files):
25
+ strategy=None, stream="stdout"
26
+ strategy=None, stream="stderr"
27
+
28
+ Session isolation:
29
+ log_dir is already resolved by setup() before build_handlers() is called.
30
+ Handlers just write to whatever log_dir they receive.
31
+ """
32
+
33
+ from __future__ import annotations
34
+
35
+ import logging
36
+ import logging.handlers
37
+ import sys
38
+ from datetime import datetime, timezone
39
+ from pathlib import Path
40
+ from typing import Any
41
+
42
+ from infrakit.core.logger.formatters import HumanFormatter, JsonFormatter
43
+
44
+
45
+ # ---------------------------------------------------------------------------
46
+ # Public constants
47
+ # ---------------------------------------------------------------------------
48
+
49
+ FILE_STRATEGIES = {
50
+ "file",
51
+ "date",
52
+ "level",
53
+ "date_level",
54
+ "date_size",
55
+ }
56
+
57
+ STREAM_OPTIONS = {"stdout", "stderr", None}
58
+
59
+ _FILE_LEVELS = [
60
+ logging.DEBUG,
61
+ logging.INFO,
62
+ logging.WARNING,
63
+ logging.ERROR,
64
+ ]
65
+
66
+ _DEFAULT_MAX_BYTES = 10 * 1024 * 1024 # 10 MB
67
+ _DEFAULT_BACKUP_COUNT = 5
68
+
69
+
70
+ # ---------------------------------------------------------------------------
71
+ # Public API
72
+ # ---------------------------------------------------------------------------
73
+
74
+ def build_handlers(
75
+ *,
76
+ strategy: str | None,
77
+ stream: str | None,
78
+ log_dir: Path,
79
+ fmt: str = "human",
80
+ file_fmt: str = "json",
81
+ max_bytes: int = _DEFAULT_MAX_BYTES,
82
+ level: int = logging.DEBUG,
83
+ ) -> list[logging.Handler]:
84
+ """Return a configured list of handlers.
85
+
86
+ Parameters
87
+ ----------
88
+ strategy:
89
+ File storage strategy — one of FILE_STRATEGIES, or None for no files.
90
+ stream:
91
+ Stream to mirror logs to: ``"stdout"``, ``"stderr"``, or ``None``.
92
+ log_dir:
93
+ Resolved base directory (already includes session subfolder if any).
94
+ Created automatically for file strategies.
95
+ fmt:
96
+ Formatter for stream output — ``"human"`` or ``"json"``.
97
+ file_fmt:
98
+ Formatter for file output — ``"json"`` or ``"human"``.
99
+ max_bytes:
100
+ Max file size before rotation (``file`` and ``date_size``).
101
+ level:
102
+ Minimum level all handlers accept.
103
+
104
+ Returns
105
+ -------
106
+ list[logging.Handler]
107
+
108
+ Raises
109
+ ------
110
+ ValueError
111
+ If *strategy* or *stream* is not a recognised value.
112
+ """
113
+ if strategy is not None and strategy not in FILE_STRATEGIES:
114
+ raise ValueError(
115
+ f"Unknown file strategy '{strategy}'. "
116
+ f"Valid strategies: {', '.join(sorted(FILE_STRATEGIES))} or None."
117
+ )
118
+ if stream not in STREAM_OPTIONS:
119
+ raise ValueError(
120
+ f"Unknown stream '{stream}'. Valid options: 'stdout', 'stderr', None."
121
+ )
122
+
123
+ stream_formatter = _make_formatter(fmt, is_stream=True)
124
+ file_formatter = _make_formatter(file_fmt, is_stream=False)
125
+
126
+ handlers: list[logging.Handler] = []
127
+
128
+ # --- Stream handler (independent of file strategy) ---
129
+ if stream == "stdout":
130
+ handlers.append(_stream_handler(sys.stdout, stream_formatter, level))
131
+ elif stream == "stderr":
132
+ handlers.append(_stream_handler(sys.stderr, stream_formatter, level))
133
+
134
+ # --- File handlers ---
135
+ if strategy is None:
136
+ pass
137
+
138
+ elif strategy == "file":
139
+ _ensure_dir(log_dir)
140
+ handlers.append(_rotating_handler(
141
+ log_dir / "app.log", file_formatter, level, max_bytes,
142
+ ))
143
+
144
+ elif strategy == "date":
145
+ _ensure_dir(log_dir)
146
+ handlers.append(_timed_handler(
147
+ log_dir / _dated_name("app"), file_formatter, level,
148
+ ))
149
+
150
+ elif strategy == "level":
151
+ handlers.extend(_level_handlers(
152
+ log_dir, file_formatter,
153
+ dated=False, max_bytes=max_bytes, level=level,
154
+ ))
155
+
156
+ elif strategy == "date_level":
157
+ handlers.extend(_level_handlers(
158
+ log_dir, file_formatter,
159
+ dated=True, max_bytes=max_bytes, level=level,
160
+ ))
161
+
162
+ elif strategy == "date_size":
163
+ _ensure_dir(log_dir)
164
+ handlers.append(_timed_size_handler(
165
+ log_dir / _dated_name("app"), file_formatter, level, max_bytes,
166
+ ))
167
+
168
+ return handlers
169
+
170
+
171
+ # ---------------------------------------------------------------------------
172
+ # Handler factories
173
+ # ---------------------------------------------------------------------------
174
+
175
+ def _stream_handler(
176
+ stream: Any,
177
+ formatter: logging.Formatter,
178
+ level: int,
179
+ ) -> logging.StreamHandler:
180
+ h = logging.StreamHandler(stream)
181
+ h.setFormatter(formatter)
182
+ h.setLevel(level)
183
+ return h
184
+
185
+
186
+ def _rotating_handler(
187
+ path: Path,
188
+ formatter: logging.Formatter,
189
+ level: int,
190
+ max_bytes: int,
191
+ ) -> logging.handlers.RotatingFileHandler:
192
+ _ensure_dir(path.parent)
193
+ h = logging.handlers.RotatingFileHandler(
194
+ path,
195
+ maxBytes=max_bytes,
196
+ backupCount=_DEFAULT_BACKUP_COUNT,
197
+ encoding="utf-8",
198
+ )
199
+ h.setFormatter(formatter)
200
+ h.setLevel(level)
201
+ return h
202
+
203
+
204
+ def _timed_handler(
205
+ path: Path,
206
+ formatter: logging.Formatter,
207
+ level: int,
208
+ ) -> logging.handlers.TimedRotatingFileHandler:
209
+ _ensure_dir(path.parent)
210
+ h = logging.handlers.TimedRotatingFileHandler(
211
+ path,
212
+ when="midnight",
213
+ backupCount=_DEFAULT_BACKUP_COUNT,
214
+ encoding="utf-8",
215
+ )
216
+ h.setFormatter(formatter)
217
+ h.setLevel(level)
218
+ return h
219
+
220
+
221
+ def _timed_size_handler(
222
+ path: Path,
223
+ formatter: logging.Formatter,
224
+ level: int,
225
+ max_bytes: int,
226
+ ) -> logging.handlers.RotatingFileHandler:
227
+ """Size-rotating handler on a date-named file.
228
+
229
+ Python stdlib has no combined timed+size handler. We use RotatingFileHandler
230
+ on a file whose name contains today's date — a new file is used each day
231
+ naturally, and within a day size rotation handles overflow.
232
+ """
233
+ _ensure_dir(path.parent)
234
+ h = logging.handlers.RotatingFileHandler(
235
+ path,
236
+ maxBytes=max_bytes,
237
+ backupCount=_DEFAULT_BACKUP_COUNT,
238
+ encoding="utf-8",
239
+ )
240
+ h.setFormatter(formatter)
241
+ h.setLevel(level)
242
+ return h
243
+
244
+
245
+ def _level_handlers(
246
+ log_dir: Path,
247
+ formatter: logging.Formatter,
248
+ *,
249
+ dated: bool,
250
+ max_bytes: int,
251
+ level: int,
252
+ ) -> list[logging.Handler]:
253
+ """One handler per level, each in its own subfolder.
254
+
255
+ Folder layout:
256
+ logs/debug/debug.log (dated=False)
257
+ logs/debug/debug.2025-03-22.log (dated=True)
258
+
259
+ Each handler has an _ExactLevelFilter — only records at that exact
260
+ level are written. WARNING never appears in info.log.
261
+ Levels below the configured minimum are skipped entirely.
262
+ """
263
+ handlers: list[logging.Handler] = []
264
+
265
+ for lvl in _FILE_LEVELS:
266
+ if lvl < level:
267
+ continue
268
+
269
+ level_name = logging.getLevelName(lvl).lower()
270
+ level_dir = log_dir / level_name
271
+ _ensure_dir(level_dir)
272
+
273
+ filename = _dated_name(level_name) if dated else f"{level_name}.log"
274
+ path = level_dir / filename
275
+
276
+ h = logging.handlers.RotatingFileHandler(
277
+ path,
278
+ maxBytes=max_bytes,
279
+ backupCount=_DEFAULT_BACKUP_COUNT,
280
+ encoding="utf-8",
281
+ )
282
+ h.setFormatter(formatter)
283
+ h.setLevel(lvl)
284
+ h.addFilter(_ExactLevelFilter(lvl))
285
+ handlers.append(h)
286
+
287
+ return handlers
288
+
289
+
290
+ # ---------------------------------------------------------------------------
291
+ # Filters
292
+ # ---------------------------------------------------------------------------
293
+
294
+ class _ExactLevelFilter(logging.Filter):
295
+ """Pass only records at exactly *level* — not above, not below."""
296
+
297
+ def __init__(self, level: int) -> None:
298
+ super().__init__()
299
+ self.level = level
300
+
301
+ def filter(self, record: logging.LogRecord) -> bool:
302
+ return record.levelno == self.level
303
+
304
+
305
+ # ---------------------------------------------------------------------------
306
+ # Helpers
307
+ # ---------------------------------------------------------------------------
308
+
309
+ def _make_formatter(fmt: str, *, is_stream: bool) -> logging.Formatter:
310
+ if fmt == "json":
311
+ return JsonFormatter()
312
+ return HumanFormatter(use_colour=is_stream)
313
+
314
+
315
+ def _ensure_dir(path: Path) -> None:
316
+ path.mkdir(parents=True, exist_ok=True)
317
+
318
+
319
+ def _dated_name(stem: str) -> str:
320
+ """Return 'stem.YYYY-MM-DD.log' for today's UTC date."""
321
+ date = datetime.now(tz=timezone.utc).strftime("%Y-%m-%d")
322
+ return f"{stem}.{date}.log"
@@ -0,0 +1,176 @@
1
+ """
2
+ infrakit.core.logger.retention
3
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
4
+ Sweep the log directory on startup and delete files older than N days.
5
+
6
+ Retention runs once inside setup() — not on every log write.
7
+ Each storage strategy produces a different folder layout; the sweeper
8
+ handles all of them by walking the entire log_dir tree.
9
+
10
+ Deletion rules:
11
+ - Only files matching known log patterns are deleted (*.log, *.log.*)
12
+ - Folders are never deleted, even if empty after sweep
13
+ - Files modified within the retention window are always kept
14
+ - Dry-run mode returns what would be deleted without touching anything
15
+ """
16
+
17
+ from __future__ import annotations
18
+
19
+ import logging
20
+ import os
21
+ from dataclasses import dataclass
22
+ from datetime import datetime, timedelta, timezone
23
+ from pathlib import Path
24
+
25
+
26
+ log = logging.getLogger(__name__)
27
+
28
+ # Suffixes considered log files — anything else is left alone
29
+ _LOG_SUFFIXES = {".log"}
30
+
31
+
32
+ # ---------------------------------------------------------------------------
33
+ # Public types
34
+ # ---------------------------------------------------------------------------
35
+
36
+ @dataclass
37
+ class RetentionResult:
38
+ """Summary of a retention sweep."""
39
+ deleted: list[Path]
40
+ kept: list[Path]
41
+ errors: list[tuple[Path, Exception]]
42
+
43
+ @property
44
+ def deleted_count(self) -> int:
45
+ return len(self.deleted)
46
+
47
+ @property
48
+ def kept_count(self) -> int:
49
+ return len(self.kept)
50
+
51
+ def __str__(self) -> str:
52
+ parts = [f"Deleted {self.deleted_count} file(s), kept {self.kept_count}."]
53
+ if self.errors:
54
+ parts.append(f"{len(self.errors)} error(s) during sweep.")
55
+ return " ".join(parts)
56
+
57
+
58
+ # ---------------------------------------------------------------------------
59
+ # Public API
60
+ # ---------------------------------------------------------------------------
61
+
62
+ def sweep(
63
+ log_dir: str | Path,
64
+ *,
65
+ retention_days: int,
66
+ dry_run: bool = False,
67
+ ) -> RetentionResult:
68
+ """Delete log files in *log_dir* older than *retention_days* days.
69
+
70
+ Parameters
71
+ ----------
72
+ log_dir:
73
+ Root directory to sweep. All subdirectories are included.
74
+ retention_days:
75
+ Files whose last-modified time is older than this are deleted.
76
+ Must be >= 1. Pass 0 to keep nothing (dangerous — use with care).
77
+ dry_run:
78
+ If True, files are identified but not deleted. Useful for testing
79
+ and for previewing what would be removed.
80
+
81
+ Returns
82
+ -------
83
+ RetentionResult
84
+ Summary of what was deleted, kept, and any errors encountered.
85
+
86
+ Raises
87
+ ------
88
+ ValueError
89
+ If *retention_days* is negative.
90
+ """
91
+ log_dir = Path(log_dir)
92
+
93
+ if retention_days < 0:
94
+ raise ValueError(
95
+ f"retention_days must be >= 0, got {retention_days}."
96
+ )
97
+
98
+ if not log_dir.exists():
99
+ # Nothing to sweep — not an error
100
+ return RetentionResult(deleted=[], kept=[], errors=[])
101
+
102
+ cutoff = datetime.now(tz=timezone.utc) - timedelta(days=retention_days)
103
+
104
+ deleted: list[Path] = []
105
+ kept: list[Path] = []
106
+ errors: list[tuple[Path, Exception]] = []
107
+
108
+ for file in _iter_log_files(log_dir):
109
+ try:
110
+ mtime = _mtime_utc(file)
111
+ if mtime < cutoff:
112
+ if not dry_run:
113
+ file.unlink()
114
+ deleted.append(file)
115
+ log.debug(
116
+ "%s %s (modified %s, older than %d days)",
117
+ "Would delete" if dry_run else "Deleted",
118
+ file,
119
+ mtime.strftime("%Y-%m-%d"),
120
+ retention_days,
121
+ )
122
+ else:
123
+ kept.append(file)
124
+ except Exception as exc:
125
+ errors.append((file, exc))
126
+ log.warning("Could not process log file '%s': %s", file, exc)
127
+
128
+ result = RetentionResult(deleted=deleted, kept=kept, errors=errors)
129
+
130
+ if deleted or errors:
131
+ log.info(
132
+ "Log retention sweep%s: %s",
133
+ " (dry run)" if dry_run else "",
134
+ result,
135
+ )
136
+
137
+ return result
138
+
139
+
140
+ # ---------------------------------------------------------------------------
141
+ # Helpers
142
+ # ---------------------------------------------------------------------------
143
+
144
+ def _iter_log_files(root: Path):
145
+ """Yield all log files under *root* recursively."""
146
+ for dirpath, _, filenames in os.walk(root):
147
+ for name in filenames:
148
+ path = Path(dirpath) / name
149
+ if _is_log_file(path):
150
+ yield path
151
+
152
+
153
+ def _is_log_file(path: Path) -> bool:
154
+ """Return True if *path* looks like a log file we own.
155
+
156
+ Matches:
157
+ app.log
158
+ app.2025-03-22.log
159
+ error.log.1 (rotation backup)
160
+ error.2025-03-22.log.3 (rotation backup)
161
+ """
162
+ name = path.name
163
+ # Direct .log extension
164
+ if path.suffix == ".log":
165
+ return True
166
+ # Rotation backups: .log.1, .log.2, etc.
167
+ parts = name.rsplit(".", 2)
168
+ if len(parts) >= 2 and parts[-2] == "log" and parts[-1].isdigit():
169
+ return True
170
+ return False
171
+
172
+
173
+ def _mtime_utc(path: Path) -> datetime:
174
+ """Return the last-modified time of *path* as a UTC-aware datetime."""
175
+ mtime = path.stat().st_mtime
176
+ return datetime.fromtimestamp(mtime, tz=timezone.utc)