python-infrakit-dev 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. infrakit/__init__.py +0 -0
  2. infrakit/cli/__init__.py +1 -0
  3. infrakit/cli/commands/__init__.py +1 -0
  4. infrakit/cli/commands/deps.py +530 -0
  5. infrakit/cli/commands/init.py +129 -0
  6. infrakit/cli/commands/llm.py +295 -0
  7. infrakit/cli/commands/logger.py +160 -0
  8. infrakit/cli/commands/module.py +342 -0
  9. infrakit/cli/commands/time.py +81 -0
  10. infrakit/cli/main.py +65 -0
  11. infrakit/core/__init__.py +0 -0
  12. infrakit/core/config/__init__.py +0 -0
  13. infrakit/core/config/converter.py +480 -0
  14. infrakit/core/config/exporter.py +304 -0
  15. infrakit/core/config/loader.py +713 -0
  16. infrakit/core/config/validator.py +389 -0
  17. infrakit/core/logger/__init__.py +21 -0
  18. infrakit/core/logger/formatters.py +143 -0
  19. infrakit/core/logger/handlers.py +322 -0
  20. infrakit/core/logger/retention.py +176 -0
  21. infrakit/core/logger/setup.py +314 -0
  22. infrakit/deps/__init__.py +239 -0
  23. infrakit/deps/clean.py +141 -0
  24. infrakit/deps/depfile.py +405 -0
  25. infrakit/deps/health.py +357 -0
  26. infrakit/deps/optimizer.py +642 -0
  27. infrakit/deps/scanner.py +550 -0
  28. infrakit/llm/__init__.py +35 -0
  29. infrakit/llm/batch.py +165 -0
  30. infrakit/llm/client.py +575 -0
  31. infrakit/llm/key_manager.py +728 -0
  32. infrakit/llm/llm_readme.md +306 -0
  33. infrakit/llm/models.py +148 -0
  34. infrakit/llm/providers/__init__.py +5 -0
  35. infrakit/llm/providers/base.py +112 -0
  36. infrakit/llm/providers/gemini.py +164 -0
  37. infrakit/llm/providers/openai.py +168 -0
  38. infrakit/llm/rate_limiter.py +54 -0
  39. infrakit/scaffolder/__init__.py +31 -0
  40. infrakit/scaffolder/ai.py +508 -0
  41. infrakit/scaffolder/backend.py +555 -0
  42. infrakit/scaffolder/cli_tool.py +386 -0
  43. infrakit/scaffolder/generator.py +338 -0
  44. infrakit/scaffolder/pipeline.py +562 -0
  45. infrakit/scaffolder/registry.py +121 -0
  46. infrakit/time/__init__.py +60 -0
  47. infrakit/time/profiler.py +511 -0
  48. python_infrakit_dev-0.1.0.dist-info/METADATA +124 -0
  49. python_infrakit_dev-0.1.0.dist-info/RECORD +51 -0
  50. python_infrakit_dev-0.1.0.dist-info/WHEEL +4 -0
  51. python_infrakit_dev-0.1.0.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,60 @@
1
+ """
2
+ infrakit.time - Lightweight timing and profiling system
3
+
4
+ Provides both CLI-based and decorator-based profiling for Python projects.
5
+
6
+ CLI Usage:
7
+ ik time run script.py [--max-functions 30] [--min-time 1.0] [--include-stdlib]
8
+
9
+ Decorator Usage:
10
+ from infrakit.time import pipeline_profiler, track
11
+
12
+ @pipeline_profiler("Data Processing Pipeline")
13
+ def main():
14
+ load_data()
15
+ transform_data()
16
+ save_results()
17
+
18
+ @track
19
+ def load_data():
20
+ # Your code here
21
+ pass
22
+
23
+ @track(name="Transform Step")
24
+ def transform_data():
25
+ # Your code here
26
+ pass
27
+ """
28
+
29
+ from .profiler import (
30
+ # Main functions
31
+ profile_script,
32
+ pipeline_profiler,
33
+ track,
34
+
35
+ # Classes
36
+ CLIProfiler,
37
+ PipelineProfiler,
38
+
39
+ # Data structures
40
+ ProfileResult,
41
+ PipelineResult,
42
+ FunctionStats,
43
+ PipelineStep,
44
+
45
+ # Utilities
46
+ format_time,
47
+ )
48
+
49
+ __all__ = [
50
+ 'profile_script',
51
+ 'pipeline_profiler',
52
+ 'track',
53
+ 'CLIProfiler',
54
+ 'PipelineProfiler',
55
+ 'ProfileResult',
56
+ 'PipelineResult',
57
+ 'FunctionStats',
58
+ 'PipelineStep',
59
+ 'format_time',
60
+ ]
@@ -0,0 +1,511 @@
1
+ """
2
+ infrakit.time - Lightweight timing and profiling system
3
+
4
+ This module provides:
5
+ 1. CLI-based profiler (ik time run) - Zero-setup profiling using cProfile
6
+ 2. Decorator-based profiler (@pipeline_profiler + @track) - Fine-grained pipeline timing
7
+ """
8
+
9
+ import cProfile
10
+ import pstats
11
+ import io
12
+ import time
13
+ import functools
14
+ from typing import Dict, List, Optional, Callable, Any
15
+ from dataclasses import dataclass, field
16
+ from contextlib import contextmanager
17
+ from pathlib import Path
18
+ import sys
19
+
20
+
21
+ # ============================================================================
22
+ # DATA STRUCTURES
23
+ # ============================================================================
24
+
25
+ @dataclass
26
+ class FunctionStats:
27
+ """Statistics for a single function call"""
28
+ name: str
29
+ total_time: float
30
+ call_count: int
31
+ avg_time: float
32
+ percentage: float
33
+
34
+
35
+ @dataclass
36
+ class PipelineStep:
37
+ """Represents a single step in a pipeline"""
38
+ name: str
39
+ execution_times: List[float] = field(default_factory=list)
40
+ call_count: int = 0
41
+ total_time: float = 0.0
42
+
43
+ def add_execution(self, duration: float):
44
+ """Record a single execution"""
45
+ self.execution_times.append(duration)
46
+ self.call_count += 1
47
+ self.total_time += duration
48
+
49
+ @property
50
+ def avg_time(self) -> float:
51
+ """Average execution time"""
52
+ return self.total_time / self.call_count if self.call_count > 0 else 0.0
53
+
54
+ @property
55
+ def min_time(self) -> float:
56
+ """Minimum execution time"""
57
+ return min(self.execution_times) if self.execution_times else 0.0
58
+
59
+ @property
60
+ def max_time(self) -> float:
61
+ """Maximum execution time"""
62
+ return max(self.execution_times) if self.execution_times else 0.0
63
+
64
+
65
+ @dataclass
66
+ class ProfileResult:
67
+ """Result from CLI profiler"""
68
+ script_path: str
69
+ total_time: float
70
+ function_stats: List[FunctionStats]
71
+ raw_stats: Optional[pstats.Stats] = None
72
+
73
+
74
+ @dataclass
75
+ class PipelineResult:
76
+ """Result from decorator-based pipeline profiler"""
77
+ pipeline_name: str
78
+ total_time: float
79
+ steps: Dict[str, PipelineStep]
80
+
81
+ def get_sorted_steps(self) -> List[tuple[str, PipelineStep]]:
82
+ """Return steps sorted by total time (descending)"""
83
+ return sorted(self.steps.items(), key=lambda x: x[1].total_time, reverse=True)
84
+
85
+
86
+ # ============================================================================
87
+ # CLI-BASED PROFILER
88
+ # ============================================================================
89
+
90
+ class CLIProfiler:
91
+ """CLI-based profiler using cProfile"""
92
+
93
+ def __init__(self,
94
+ max_functions: int = 30,
95
+ min_time_ms: float = 1.0,
96
+ exclude_stdlib: bool = True):
97
+ """
98
+ Initialize CLI profiler
99
+
100
+ Args:
101
+ max_functions: Maximum number of functions to display
102
+ min_time_ms: Minimum execution time (ms) to include
103
+ exclude_stdlib: Filter out Python standard library calls
104
+ """
105
+ self.max_functions = max_functions
106
+ self.min_time_ms = min_time_ms
107
+ self.exclude_stdlib = exclude_stdlib
108
+
109
+ def profile_script(self, script_path: str) -> ProfileResult:
110
+ """
111
+ Profile a Python script using cProfile
112
+
113
+ Args:
114
+ script_path: Path to the Python script to profile
115
+
116
+ Returns:
117
+ ProfileResult containing timing statistics
118
+ """
119
+ if not Path(script_path).exists():
120
+ raise FileNotFoundError(f"Script not found: {script_path}")
121
+
122
+ # Create profiler
123
+ profiler = cProfile.Profile()
124
+
125
+ # Execute script under profiling
126
+ script_globals = {
127
+ '__name__': '__main__',
128
+ '__file__': script_path,
129
+ }
130
+
131
+ start_time = time.time()
132
+
133
+ try:
134
+ with open(script_path) as f:
135
+ code = compile(f.read(), script_path, 'exec')
136
+ profiler.runctx(code, script_globals, script_globals)
137
+ except Exception as e:
138
+ raise RuntimeError(f"Error executing script: {e}")
139
+
140
+ total_time = time.time() - start_time
141
+
142
+ # Extract statistics
143
+ stats = pstats.Stats(profiler)
144
+ stats.strip_dirs()
145
+
146
+ # Convert to structured data
147
+ function_stats = self._extract_function_stats(stats, total_time)
148
+
149
+ return ProfileResult(
150
+ script_path=script_path,
151
+ total_time=total_time,
152
+ function_stats=function_stats,
153
+ raw_stats=stats
154
+ )
155
+
156
+ def _extract_function_stats(self, stats: pstats.Stats, total_time: float) -> List[FunctionStats]:
157
+ """Extract and filter function statistics"""
158
+ # Get raw stats
159
+ raw_stats = []
160
+ for func, (cc, nc, tt, ct, callers) in stats.stats.items():
161
+ filename, line, func_name = func
162
+
163
+ # Filter stdlib if requested
164
+ if self.exclude_stdlib and self._is_stdlib(filename):
165
+ continue
166
+
167
+ # Filter by minimum time
168
+ if tt * 1000 < self.min_time_ms:
169
+ continue
170
+
171
+ full_name = f"{Path(filename).name}:{func_name}"
172
+ raw_stats.append({
173
+ 'name': full_name,
174
+ 'total_time': tt,
175
+ 'call_count': nc,
176
+ 'avg_time': tt / nc if nc > 0 else 0,
177
+ })
178
+
179
+ # Sort by total time
180
+ raw_stats.sort(key=lambda x: x['total_time'], reverse=True)
181
+
182
+ # Take top N
183
+ raw_stats = raw_stats[:self.max_functions]
184
+
185
+ # Calculate percentages
186
+ result = []
187
+ for stat in raw_stats:
188
+ percentage = (stat['total_time'] / total_time * 100) if total_time > 0 else 0
189
+ result.append(FunctionStats(
190
+ name=stat['name'],
191
+ total_time=stat['total_time'],
192
+ call_count=stat['call_count'],
193
+ avg_time=stat['avg_time'],
194
+ percentage=percentage
195
+ ))
196
+
197
+ return result
198
+
199
+ @staticmethod
200
+ def _is_stdlib(filename: str) -> bool:
201
+ """Check if a file is from Python standard library"""
202
+ stdlib_markers = [
203
+ '/lib/python',
204
+ '\\lib\\python',
205
+ 'site-packages',
206
+ '<frozen',
207
+ '<built-in',
208
+ ]
209
+ return any(marker in filename for marker in stdlib_markers)
210
+
211
+
212
+ # ============================================================================
213
+ # DECORATOR-BASED PIPELINE PROFILER
214
+ # ============================================================================
215
+
216
+ class PipelineProfiler:
217
+ """Context manager and decorator for pipeline profiling"""
218
+
219
+ def __init__(self, name: str = "Pipeline"):
220
+ """
221
+ Initialize pipeline profiler
222
+
223
+ Args:
224
+ name: Name of the pipeline
225
+ """
226
+ self.name = name
227
+ self.steps: Dict[str, PipelineStep] = {}
228
+ self._start_time: Optional[float] = None
229
+ self._total_time: float = 0.0
230
+ self._active = False
231
+
232
+ def track(self, func: Optional[Callable] = None, *, name: Optional[str] = None):
233
+ """
234
+ Decorator to track a function's execution time
235
+
236
+ Usage:
237
+ @profiler.track
238
+ def my_function():
239
+ pass
240
+
241
+ @profiler.track(name="Custom Name")
242
+ def my_function():
243
+ pass
244
+ """
245
+ def decorator(f: Callable) -> Callable:
246
+ step_name = name or f.__name__
247
+
248
+ @functools.wraps(f)
249
+ def wrapper(*args, **kwargs):
250
+ if not self._active:
251
+ # Profiler not active, just run the function
252
+ return f(*args, **kwargs)
253
+
254
+ # Initialize step if needed
255
+ if step_name not in self.steps:
256
+ self.steps[step_name] = PipelineStep(name=step_name)
257
+
258
+ # Time the execution
259
+ start = time.perf_counter()
260
+ try:
261
+ result = f(*args, **kwargs)
262
+ return result
263
+ finally:
264
+ duration = time.perf_counter() - start
265
+ self.steps[step_name].add_execution(duration)
266
+
267
+ return wrapper
268
+
269
+ # Handle both @track and @track()
270
+ if func is None:
271
+ return decorator
272
+ else:
273
+ return decorator(func)
274
+
275
+ @contextmanager
276
+ def profile(self):
277
+ """
278
+ Context manager to activate profiling
279
+
280
+ Usage:
281
+ with profiler.profile():
282
+ step1()
283
+ step2()
284
+ """
285
+ self._active = True
286
+ self._start_time = time.perf_counter()
287
+
288
+ try:
289
+ yield self
290
+ finally:
291
+ self._total_time = time.perf_counter() - self._start_time
292
+ self._active = False
293
+
294
+ def get_results(self) -> PipelineResult:
295
+ """Get profiling results"""
296
+ return PipelineResult(
297
+ pipeline_name=self.name,
298
+ total_time=self._total_time,
299
+ steps=self.steps
300
+ )
301
+
302
+ def reset(self):
303
+ """Reset all collected statistics"""
304
+ self.steps.clear()
305
+ self._start_time = None
306
+ self._total_time = 0.0
307
+ self._active = False
308
+
309
+
310
+ # Convenience decorator for simple use cases
311
+ _global_profiler: Optional[PipelineProfiler] = None
312
+
313
+
314
+ def pipeline_profiler(name: str = "Pipeline"):
315
+ """
316
+ Decorator to create a pipeline profiler context
317
+
318
+ Usage:
319
+ @pipeline_profiler("My Pipeline")
320
+ def main():
321
+ step1()
322
+ step2()
323
+ # Results printed automatically at end
324
+ """
325
+ def decorator(func: Callable) -> Callable:
326
+ @functools.wraps(func)
327
+ def wrapper(*args, **kwargs):
328
+ profiler = PipelineProfiler(name=name)
329
+ global _global_profiler
330
+ _global_profiler = profiler
331
+
332
+ with profiler.profile():
333
+ result = func(*args, **kwargs)
334
+
335
+ # Print results
336
+ _print_pipeline_results(profiler.get_results())
337
+
338
+ return result
339
+
340
+ return wrapper
341
+
342
+ return decorator
343
+
344
+
345
+ def track(func: Optional[Callable] = None, *, name: Optional[str] = None):
346
+ """
347
+ Standalone decorator to track function execution
348
+ Uses the global profiler instance
349
+
350
+ Usage:
351
+ @track
352
+ def my_function():
353
+ pass
354
+
355
+ @track(name="Custom Name")
356
+ def my_function():
357
+ pass
358
+ """
359
+ def decorator(f: Callable) -> Callable:
360
+ step_name = name or f.__name__
361
+
362
+ @functools.wraps(f)
363
+ def wrapper(*args, **kwargs):
364
+ global _global_profiler
365
+
366
+ if _global_profiler is None or not _global_profiler._active:
367
+ return f(*args, **kwargs)
368
+
369
+ # Initialize step if needed
370
+ if step_name not in _global_profiler.steps:
371
+ _global_profiler.steps[step_name] = PipelineStep(name=step_name)
372
+
373
+ # Time the execution
374
+ start = time.perf_counter()
375
+ try:
376
+ result = f(*args, **kwargs)
377
+ return result
378
+ finally:
379
+ duration = time.perf_counter() - start
380
+ _global_profiler.steps[step_name].add_execution(duration)
381
+
382
+ return wrapper
383
+
384
+ # Handle both @track and @track()
385
+ if func is None:
386
+ return decorator
387
+ else:
388
+ return decorator(func)
389
+
390
+
391
+ # ============================================================================
392
+ # OUTPUT FORMATTERS
393
+ # ============================================================================
394
+
395
+ def format_time(seconds: float) -> str:
396
+ """Format time in human-readable format"""
397
+ if seconds < 0.001:
398
+ return f"{seconds * 1_000_000:.2f}µs"
399
+ elif seconds < 1:
400
+ return f"{seconds * 1000:.2f}ms"
401
+ else:
402
+ return f"{seconds:.3f}s"
403
+
404
+
405
+ def _print_cli_results(result: ProfileResult):
406
+ """Print CLI profiler results in a clean table"""
407
+ print(f"\n{'=' * 80}")
408
+ print(f"Profile Results: {result.script_path}")
409
+ print(f"Total execution time: {format_time(result.total_time)}")
410
+ print(f"{'=' * 80}\n")
411
+
412
+ if not result.function_stats:
413
+ print("No functions met the filtering criteria.")
414
+ return
415
+
416
+ # Table header
417
+ print(f"{'Function':<50} {'Time':<12} {'Calls':<10} {'Avg':<12} {'%':<8}")
418
+ print(f"{'-' * 50} {'-' * 12} {'-' * 10} {'-' * 12} {'-' * 8}")
419
+
420
+ # Table rows
421
+ for stat in result.function_stats:
422
+ print(f"{stat.name:<50} "
423
+ f"{format_time(stat.total_time):<12} "
424
+ f"{stat.call_count:<10} "
425
+ f"{format_time(stat.avg_time):<12} "
426
+ f"{stat.percentage:>6.2f}%")
427
+
428
+ print(f"\n{'=' * 80}\n")
429
+
430
+
431
+ def _print_pipeline_results(result: PipelineResult):
432
+ """Print pipeline profiler results in a clean table"""
433
+ print(f"\n{'=' * 80}")
434
+ print(f"Pipeline Profile: {result.pipeline_name}")
435
+ print(f"Total execution time: {format_time(result.total_time)}")
436
+ print(f"{'=' * 80}\n")
437
+
438
+ if not result.steps:
439
+ print("No tracked steps found.")
440
+ return
441
+
442
+ # Calculate percentages
443
+ sorted_steps = result.get_sorted_steps()
444
+
445
+ # Table header
446
+ print(f"{'Step':<40} {'Total':<12} {'Calls':<8} {'Avg':<12} {'Min':<12} {'Max':<12} {'%':<8}")
447
+ print(f"{'-' * 40} {'-' * 12} {'-' * 8} {'-' * 12} {'-' * 12} {'-' * 12} {'-' * 8}")
448
+
449
+ # Table rows
450
+ for step_name, step in sorted_steps:
451
+ percentage = (step.total_time / result.total_time * 100) if result.total_time > 0 else 0
452
+ print(f"{step_name:<40} "
453
+ f"{format_time(step.total_time):<12} "
454
+ f"{step.call_count:<8} "
455
+ f"{format_time(step.avg_time):<12} "
456
+ f"{format_time(step.min_time):<12} "
457
+ f"{format_time(step.max_time):<12} "
458
+ f"{percentage:>6.2f}%")
459
+
460
+ print(f"\n{'=' * 80}\n")
461
+
462
+
463
+ # ============================================================================
464
+ # PUBLIC API
465
+ # ============================================================================
466
+
467
+ def profile_script(script_path: str,
468
+ max_functions: int = 30,
469
+ min_time_ms: float = 1.0,
470
+ exclude_stdlib: bool = True) -> ProfileResult:
471
+ """
472
+ Profile a Python script using cProfile
473
+
474
+ Args:
475
+ script_path: Path to the Python script to profile
476
+ max_functions: Maximum number of functions to display
477
+ min_time_ms: Minimum execution time (ms) to include
478
+ exclude_stdlib: Filter out Python standard library calls
479
+
480
+ Returns:
481
+ ProfileResult containing timing statistics
482
+ """
483
+ profiler = CLIProfiler(
484
+ max_functions=max_functions,
485
+ min_time_ms=min_time_ms,
486
+ exclude_stdlib=exclude_stdlib
487
+ )
488
+ result = profiler.profile_script(script_path)
489
+ _print_cli_results(result)
490
+ return result
491
+
492
+
493
+ __all__ = [
494
+ # Main functions
495
+ 'profile_script',
496
+ 'pipeline_profiler',
497
+ 'track',
498
+
499
+ # Classes
500
+ 'CLIProfiler',
501
+ 'PipelineProfiler',
502
+
503
+ # Data structures
504
+ 'ProfileResult',
505
+ 'PipelineResult',
506
+ 'FunctionStats',
507
+ 'PipelineStep',
508
+
509
+ # Utilities
510
+ 'format_time',
511
+ ]
@@ -0,0 +1,124 @@
1
+ Metadata-Version: 2.4
2
+ Name: python-infrakit-dev
3
+ Version: 0.1.0
4
+ Summary: A comprehensive Python developer infrastructure toolkit
5
+ Project-URL: Homepage, https://github.com/chiragg21/infrakit
6
+ Project-URL: Repository, https://github.com/chiragg21/infrakit
7
+ Requires-Python: >=3.13
8
+ Requires-Dist: google-genai>=1.69.0
9
+ Requires-Dist: isort>=8.0.1
10
+ Requires-Dist: openai>=2.30.0
11
+ Requires-Dist: pydantic>=2.12.5
12
+ Requires-Dist: python-dotenv>=1.2.2
13
+ Requires-Dist: pyyaml>=6.0.3
14
+ Requires-Dist: tqdm>=4.67.3
15
+ Requires-Dist: typer>=0.24.1
16
+ Description-Content-Type: text/markdown
17
+
18
+ # Infrakit
19
+
20
+ Infrakit is a comprehensive, modular developer infrastructure toolkit for Python projects. It provides CLI utilities, rich project scaffolding, a robust multi-provider LLM client, dependency management, profiling, and core utilities for logging and configuration.
21
+
22
+ ## Installation
23
+
24
+ Install via pip:
25
+
26
+ ```bash
27
+ pip install infrakit
28
+ # or if using uv
29
+ uv pip install infrakit
30
+ ```
31
+
32
+ The CLI is exposed as `infrakit` and conveniently aliased as `ik`.
33
+
34
+ ## Key Features
35
+
36
+ ### 1. Project Scaffolding (`ik init`)
37
+
38
+ Quickly bootstrap new Python projects with standardized layouts, ready-to-use boilerplate, and optional LLM integration. Existing files are safely skipped if you re-run over a directory.
39
+
40
+ ```bash
41
+ ik init my-project -t basic
42
+ ik init my-fastapi-app -t backend -v 0.1.0
43
+ ik init my-ai-tool -t ai --include-llm
44
+ ```
45
+
46
+ **Available Templates**:
47
+ - **`basic`**: Minimal template (src, utils, tests).
48
+ - **`backend`**: FastAPI service (app, routes, middleware, Dockerfile, docker-compose).
49
+ - **`cli_tool`**: Distributable CLI application using Typer.
50
+ - **`pipeline`**: Data pipeline / ETL template (extract, transform, load, enrich).
51
+ - **`ai`**: AI/ML project optimized for notebooks and pipelines.
52
+
53
+ ### 2. Multi-provider LLM Client (`infrakit.llm`)
54
+
55
+ A unified LLM client interface supporting **OpenAI** and **Gemini**. Built natively for robust production use cases, particularly when navigating free-tier API quotas.
56
+
57
+ **Features:**
58
+ - Seamless key rotation and persistent storage tracking.
59
+ - Local rate limiting (RPM/TPM gates).
60
+ - Async and multi-threaded batch generation.
61
+ - Structured output parsing and schema validation using `pydantic.BaseModel`.
62
+
63
+ **Quick Start:**
64
+ ```python
65
+ from infrakit.llm import LLMClient, Prompt
66
+
67
+ client = LLMClient(keys={"openai_keys": ["sk-..."], "gemini_keys": ["AIza..."]}, storage_dir="./logs")
68
+ response = client.generate(Prompt(user="What is the capital of France?"), provider="openai")
69
+ print(response.content)
70
+ ```
71
+
72
+ **LLM CLI tools:**
73
+ Monitor and control limits right from the CLI.
74
+ - `ik llm status --storage-dir ./logs`
75
+ - `ik llm quota set --provider openai --key sk-abc --rpm 60 --storage-dir ./logs`
76
+
77
+ ### 3. Dependency Management (`infrakit.deps`)
78
+
79
+ In-depth Python dependency tools available under `ik deps` (or programmatically via `infrakit.deps`) to clean, health-check, scan, and optimize dependencies.
80
+
81
+ **Features:**
82
+ - `scan(root)`: Scan your project for actual Python dependencies used in code.
83
+ - `export()`: Export used dependencies to update `requirements.txt` or `pyproject.toml` automatically.
84
+ - `check(packages)`: Run health, security, and outdated checks on packages.
85
+ - `clean()`: Find and remove unused packages from the virtual environment.
86
+ - `optimise()`: Optimize and format imports across the project (integrates with `isort`).
87
+
88
+ ### 4. Code Profiling (`infrakit.time`)
89
+
90
+ Lightweight timing and execution profiling for your Python projects.
91
+
92
+ **CLI Usage:**
93
+ Profile any script instantly to detect performance bottlenecks.
94
+ ```bash
95
+ ik time run script.py --max-functions 30 --min-time 1.0
96
+ ```
97
+
98
+ **Decorator Usage:**
99
+ Track specific pipeline steps inside your code:
100
+ ```python
101
+ from infrakit.time import pipeline_profiler, track
102
+
103
+ @pipeline_profiler("Data Processing Pipeline")
104
+ def main():
105
+ load_data()
106
+ transform_data()
107
+
108
+ @track(name="Load Step")
109
+ def load_data(): pass
110
+ ```
111
+
112
+ ### 5. Core Utilities (`infrakit.core`)
113
+
114
+ Convenient implementations for common application needs.
115
+
116
+ - **Config Loader** (`infrakit.core.config.loader`): Multi-format configuration loader supporting JSON, YAML, INI, and `.env`. Automatically resolves variables using `python-dotenv` and converts types properly natively.
117
+ - **Logger** (`infrakit.core.logger`): Unified logging setups for consistent output across applications.
118
+ ```python
119
+ from infrakit.core.logger import setup, get_logger
120
+
121
+ setup(level="INFO", strategy="date_level", stream="stdout")
122
+ log = get_logger(__name__)
123
+ log.info("Ready")
124
+ ```