logtap 0.2.2__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
logtap/core/runs.py ADDED
@@ -0,0 +1,393 @@
1
+ """Run store for ingested log streams.
2
+
3
+ Provides append-only storage with in-memory tail cache and cursor management.
4
+ """
5
+
6
+ import threading
7
+ import time
8
+ from collections import deque
9
+ from dataclasses import dataclass, field
10
+ from datetime import datetime, timezone
11
+ from pathlib import Path
12
+ from typing import Dict, Iterator, List, Optional
13
+
14
+ TAG_KEY_PATTERN = r"^[a-zA-Z0-9_.-]+$"
15
+ TAG_VALUE_MAX_LEN = 256
16
+
17
+
18
+ @dataclass
19
+ class RunLine:
20
+ """A single log line with cursor and timestamp."""
21
+
22
+ cursor: int
23
+ line: str
24
+ ts: datetime
25
+
26
+
27
+ @dataclass
28
+ class RunMetadata:
29
+ """Metadata for a run."""
30
+
31
+ id: str
32
+ tags: Dict[str, str] = field(default_factory=dict)
33
+ created_at: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
34
+ last_activity: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
35
+ cursor_start: int = 0 # First cursor ever (immutable)
36
+ cursor_latest: int = -1 # Latest cursor written
37
+ lines_count: int = 0
38
+ bytes_on_disk: int = 0
39
+ active: bool = True
40
+
41
+
42
+ class Run:
43
+ """A single run with append-only file storage and in-memory tail cache."""
44
+
45
+ def __init__(self, run_id: str, data_dir: Path, buffer_lines: int = 100_000):
46
+ self.id = run_id
47
+ self.data_dir = data_dir
48
+ self.buffer_lines = buffer_lines
49
+ self._lock = threading.RLock()
50
+
51
+ # In-memory tail cache (deque for O(1) append and popleft)
52
+ self._cache: deque[RunLine] = deque(maxlen=buffer_lines)
53
+ self._cache_start_cursor: int = 0 # Cursor of first item in cache
54
+
55
+ # Run directory and files
56
+ self.run_dir = data_dir / run_id
57
+ self.log_file = self.run_dir / "log.txt"
58
+ self.meta_file = self.run_dir / "meta.json"
59
+
60
+ # Metadata
61
+ self.metadata: RunMetadata
62
+
63
+ # Initialize or load
64
+ if self.run_dir.exists():
65
+ self._load()
66
+ else:
67
+ self._create()
68
+
69
+ def _create(self) -> None:
70
+ """Create a new run."""
71
+ self.run_dir.mkdir(parents=True, exist_ok=True)
72
+ self.metadata = RunMetadata(id=self.id)
73
+ self._save_metadata()
74
+
75
+ def _load(self) -> None:
76
+ """Load existing run from disk."""
77
+ import json
78
+
79
+ # Load metadata
80
+ if self.meta_file.exists():
81
+ with open(self.meta_file, "r", encoding="utf-8") as f:
82
+ data = json.load(f)
83
+ self.metadata = RunMetadata(
84
+ id=data["id"],
85
+ tags=data.get("tags", {}),
86
+ created_at=datetime.fromisoformat(data["created_at"]),
87
+ last_activity=datetime.fromisoformat(data["last_activity"]),
88
+ cursor_start=data.get("cursor_start", 0),
89
+ cursor_latest=data.get("cursor_latest", -1),
90
+ lines_count=data.get("lines_count", 0),
91
+ bytes_on_disk=data.get("bytes_on_disk", 0),
92
+ active=data.get("active", True),
93
+ )
94
+ else:
95
+ self.metadata = RunMetadata(id=self.id)
96
+
97
+ # Populate cache from end of log file
98
+ if self.log_file.exists():
99
+ self._populate_cache_from_disk()
100
+
101
+ def _populate_cache_from_disk(self) -> None:
102
+ """Load last N lines from disk into cache."""
103
+ if not self.log_file.exists():
104
+ return
105
+
106
+ lines: List[str] = []
107
+ with open(self.log_file, "r", encoding="utf-8", errors="replace") as f:
108
+ # Read all lines (for small files) or tail
109
+ for line in f:
110
+ lines.append(line.rstrip("\n"))
111
+
112
+ # Only keep last buffer_lines
113
+ if len(lines) > self.buffer_lines:
114
+ lines = lines[-self.buffer_lines :]
115
+ start_cursor = self.metadata.cursor_latest - len(lines) + 1
116
+ else:
117
+ start_cursor = 0
118
+
119
+ self._cache_start_cursor = start_cursor
120
+ self._cache.clear()
121
+ for i, line in enumerate(lines):
122
+ self._cache.append(
123
+ RunLine(
124
+ cursor=start_cursor + i,
125
+ line=line,
126
+ ts=self.metadata.last_activity, # Approximate
127
+ )
128
+ )
129
+
130
+ def _save_metadata(self) -> None:
131
+ """Save metadata to disk."""
132
+ import json
133
+
134
+ with open(self.meta_file, "w", encoding="utf-8") as f:
135
+ json.dump(
136
+ {
137
+ "id": self.metadata.id,
138
+ "tags": self.metadata.tags,
139
+ "created_at": self.metadata.created_at.isoformat(),
140
+ "last_activity": self.metadata.last_activity.isoformat(),
141
+ "cursor_start": self.metadata.cursor_start,
142
+ "cursor_latest": self.metadata.cursor_latest,
143
+ "lines_count": self.metadata.lines_count,
144
+ "bytes_on_disk": self.metadata.bytes_on_disk,
145
+ "active": self.metadata.active,
146
+ },
147
+ f,
148
+ )
149
+
150
+ def append(self, line: str) -> RunLine:
151
+ """Append a line to the run. Returns the line with assigned cursor."""
152
+ with self._lock:
153
+ now = datetime.now(timezone.utc)
154
+ cursor = self.metadata.cursor_latest + 1
155
+
156
+ run_line = RunLine(cursor=cursor, line=line, ts=now)
157
+
158
+ # Append to disk
159
+ with open(self.log_file, "a", encoding="utf-8") as f:
160
+ written = f.write(line + "\n")
161
+ self.metadata.bytes_on_disk += written
162
+
163
+ # Update cache
164
+ if len(self._cache) >= self.buffer_lines:
165
+ self._cache_start_cursor += 1
166
+ self._cache.append(run_line)
167
+
168
+ # Update metadata
169
+ self.metadata.cursor_latest = cursor
170
+ self.metadata.lines_count += 1
171
+ self.metadata.last_activity = now
172
+
173
+ return run_line
174
+
175
+ def append_batch(self, lines: List[str]) -> List[RunLine]:
176
+ """Append multiple lines atomically."""
177
+ with self._lock:
178
+ result = []
179
+ for line in lines:
180
+ result.append(self.append(line))
181
+ self._save_metadata()
182
+ return result
183
+
184
+ def set_tags(self, tags: Dict[str, str]) -> Optional[str]:
185
+ """Set tags, merging with existing. Returns error message on conflict, None on success."""
186
+ import re
187
+
188
+ with self._lock:
189
+ for key, value in tags.items():
190
+ # Validate key
191
+ if not re.match(TAG_KEY_PATTERN, key):
192
+ return f"Invalid tag key: {key}"
193
+ # Validate value length
194
+ if len(value) > TAG_VALUE_MAX_LEN:
195
+ return f"Tag value too long: {key}"
196
+ # Check for conflict
197
+ if key in self.metadata.tags and self.metadata.tags[key] != value:
198
+ existing = self.metadata.tags[key]
199
+ return f"Tag conflict for key '{key}': existing='{existing}', new='{value}'"
200
+
201
+ # Merge tags
202
+ self.metadata.tags.update(tags)
203
+ self._save_metadata()
204
+ return None
205
+
206
+ @property
207
+ def cursor_earliest(self) -> int:
208
+ """Earliest cursor available in cache/disk."""
209
+ with self._lock:
210
+ if self._cache:
211
+ return self._cache[0].cursor
212
+ return 0
213
+
214
+ @property
215
+ def cursor_latest(self) -> int:
216
+ """Latest cursor written."""
217
+ with self._lock:
218
+ return self.metadata.cursor_latest
219
+
220
+ def get_lines(
221
+ self,
222
+ since: Optional[int] = None,
223
+ tail: int = 50,
224
+ limit: int = 1000,
225
+ ) -> tuple[List[RunLine], bool]:
226
+ """
227
+ Get lines from run.
228
+
229
+ Args:
230
+ since: Cursor to start from (exclusive). If None, returns last `tail` lines.
231
+ tail: Number of recent lines if since is None.
232
+ limit: Maximum lines to return.
233
+
234
+ Returns:
235
+ Tuple of (lines, gap_detected).
236
+ gap_detected is True if since < cursor_earliest.
237
+ """
238
+ with self._lock:
239
+ gap = False
240
+
241
+ if since is not None:
242
+ # Resume from cursor
243
+ if since < self.cursor_earliest:
244
+ gap = True
245
+ # Start from earliest available
246
+ start_cursor = self.cursor_earliest
247
+ else:
248
+ start_cursor = since + 1 # Exclusive
249
+
250
+ # Get lines from cache
251
+ lines = [ln for ln in self._cache if ln.cursor >= start_cursor]
252
+ else:
253
+ # Tail mode - get last N lines
254
+ lines = list(self._cache)[-tail:]
255
+
256
+ # Apply limit
257
+ if len(lines) > limit:
258
+ lines = lines[:limit]
259
+
260
+ return lines, gap
261
+
262
+ def tail_iter(self, since: Optional[int] = None) -> Iterator[RunLine]:
263
+ """
264
+ Iterator that yields new lines as they arrive.
265
+
266
+ Args:
267
+ since: Cursor to start from (exclusive). If None, starts from latest.
268
+ """
269
+ last_cursor = since if since is not None else self.cursor_latest
270
+
271
+ while True:
272
+ with self._lock:
273
+ new_lines = [ln for ln in self._cache if ln.cursor > last_cursor]
274
+
275
+ for line in new_lines:
276
+ last_cursor = line.cursor
277
+ yield line
278
+
279
+ if not new_lines:
280
+ time.sleep(0.1) # Poll interval
281
+
282
+ def close(self) -> None:
283
+ """Mark run as inactive and save metadata."""
284
+ with self._lock:
285
+ self.metadata.active = False
286
+ self._save_metadata()
287
+
288
+
289
+ class RunStore:
290
+ """Manages all runs with disk persistence."""
291
+
292
+ def __init__(
293
+ self,
294
+ data_dir: Path,
295
+ buffer_lines: int = 100_000,
296
+ max_disk_mb: int = 1000,
297
+ retention_hours: int = 72,
298
+ ):
299
+ self.data_dir = Path(data_dir).expanduser()
300
+ self.buffer_lines = buffer_lines
301
+ self.max_disk_bytes = max_disk_mb * 1024 * 1024
302
+ self.retention_seconds = retention_hours * 3600
303
+ self._runs: Dict[str, Run] = {}
304
+ self._lock = threading.RLock()
305
+
306
+ # Create data directory
307
+ self.data_dir.mkdir(parents=True, exist_ok=True)
308
+
309
+ # Load existing runs
310
+ self._load_existing_runs()
311
+
312
+ def _load_existing_runs(self) -> None:
313
+ """Load existing runs from disk."""
314
+ if not self.data_dir.exists():
315
+ return
316
+
317
+ for run_dir in self.data_dir.iterdir():
318
+ if run_dir.is_dir() and (run_dir / "meta.json").exists():
319
+ try:
320
+ run = Run(run_dir.name, self.data_dir, self.buffer_lines)
321
+ self._runs[run_dir.name] = run
322
+ except Exception:
323
+ pass # Skip corrupted runs
324
+
325
+ def get_or_create(self, run_id: str) -> tuple[Run, bool]:
326
+ """Get existing run or create new one. Returns (run, created)."""
327
+ with self._lock:
328
+ if run_id in self._runs:
329
+ return self._runs[run_id], False
330
+
331
+ run = Run(run_id, self.data_dir, self.buffer_lines)
332
+ self._runs[run_id] = run
333
+ return run, True
334
+
335
+ def get(self, run_id: str) -> Optional[Run]:
336
+ """Get run by ID, or None if not found."""
337
+ with self._lock:
338
+ return self._runs.get(run_id)
339
+
340
+ def list_runs(self, since_hours: Optional[int] = None) -> List[Run]:
341
+ """List all runs, optionally filtered by recent activity."""
342
+ with self._lock:
343
+ runs = list(self._runs.values())
344
+
345
+ if since_hours is not None:
346
+ cutoff = datetime.now(timezone.utc).timestamp() - (since_hours * 3600)
347
+ runs = [r for r in runs if r.metadata.last_activity.timestamp() >= cutoff]
348
+
349
+ # Sort by last activity (most recent first)
350
+ runs.sort(key=lambda r: r.metadata.last_activity, reverse=True)
351
+ return runs
352
+
353
+ def total_disk_usage(self) -> int:
354
+ """Get total disk usage across all runs in bytes."""
355
+ with self._lock:
356
+ return sum(r.metadata.bytes_on_disk for r in self._runs.values())
357
+
358
+ def enforce_retention(self) -> None:
359
+ """Remove runs older than retention period."""
360
+ with self._lock:
361
+ cutoff = datetime.now(timezone.utc).timestamp() - self.retention_seconds
362
+ to_remove = [
363
+ run_id
364
+ for run_id, run in self._runs.items()
365
+ if run.metadata.last_activity.timestamp() < cutoff
366
+ ]
367
+
368
+ for run_id in to_remove:
369
+ self._delete_run(run_id)
370
+
371
+ def enforce_disk_limit(self) -> None:
372
+ """Remove oldest runs if disk limit exceeded."""
373
+ with self._lock:
374
+ while self.total_disk_usage() > self.max_disk_bytes and self._runs:
375
+ # Find oldest run
376
+ oldest = min(self._runs.values(), key=lambda r: r.metadata.last_activity)
377
+ self._delete_run(oldest.id)
378
+
379
+ def _delete_run(self, run_id: str) -> None:
380
+ """Delete a run from disk and memory."""
381
+ import shutil
382
+
383
+ if run_id in self._runs:
384
+ run = self._runs.pop(run_id)
385
+ run_dir = run.run_dir
386
+ if run_dir.exists():
387
+ shutil.rmtree(run_dir)
388
+
389
+ def check_storage(self) -> Optional[str]:
390
+ """Check if storage is available. Returns error message if not."""
391
+ if self.total_disk_usage() >= self.max_disk_bytes:
392
+ return "insufficient_storage"
393
+ return None
logtap/core/search.py CHANGED
@@ -2,11 +2,13 @@
2
2
  Search and filtering functionality for logtap.
3
3
 
4
4
  Provides substring, regex-based, and severity-based filtering of log lines.
5
+ Uses google-re2 for regex matching to prevent ReDoS attacks.
5
6
  """
6
7
 
7
- import re
8
8
  from typing import List, Optional, Set
9
9
 
10
+ import re2
11
+
10
12
  from logtap.core.parsers.base import LogLevel, ParsedLogEntry
11
13
 
12
14
 
@@ -19,6 +21,9 @@ def filter_lines(
19
21
  """
20
22
  Filter lines by substring or regex pattern.
21
23
 
24
+ Uses google-re2 for regex matching, which guarantees linear time
25
+ complexity and is immune to ReDoS attacks.
26
+
22
27
  Args:
23
28
  lines: List of log lines to filter.
24
29
  term: Substring to search for. If provided, only lines containing
@@ -35,11 +40,12 @@ def filter_lines(
35
40
  return lines
36
41
 
37
42
  if regex:
38
- flags = 0 if case_sensitive else re.IGNORECASE
39
43
  try:
40
- pattern = re.compile(regex, flags)
44
+ options = re2.Options()
45
+ options.case_sensitive = case_sensitive
46
+ pattern = re2.compile(regex, options)
41
47
  return [line for line in lines if pattern.search(line)]
42
- except re.error:
48
+ except re2.error:
43
49
  # Invalid regex, return empty list
44
50
  return []
45
51
 
@@ -85,10 +91,7 @@ def filter_by_level(
85
91
 
86
92
  if min_level:
87
93
  min_severity = min_level.severity
88
- return [
89
- e for e in entries
90
- if e.level is not None and e.level.severity <= min_severity
91
- ]
94
+ return [e for e in entries if e.level is not None and e.level.severity <= min_severity]
92
95
 
93
96
  return entries
94
97
 
@@ -126,11 +129,12 @@ def filter_entries(
126
129
  # Apply text filter
127
130
  if term or regex:
128
131
  if regex:
129
- flags = 0 if case_sensitive else re.IGNORECASE
130
132
  try:
131
- pattern = re.compile(regex, flags)
133
+ options = re2.Options()
134
+ options.case_sensitive = case_sensitive
135
+ pattern = re2.compile(regex, options)
132
136
  result = [e for e in result if pattern.search(e.message)]
133
- except re.error:
137
+ except re2.error:
134
138
  result = []
135
139
  elif term:
136
140
  if case_sensitive:
@@ -1,6 +1,7 @@
1
1
  """Response models for logtap API."""
2
2
 
3
- from typing import List, Optional
3
+ from datetime import datetime
4
+ from typing import Dict, List, Optional
4
5
 
5
6
  from pydantic import BaseModel, Field
6
7
 
@@ -63,3 +64,55 @@ class HealthResponse(BaseModel):
63
64
 
64
65
  status: str = Field(default="healthy", description="Service status")
65
66
  version: str = Field(description="logtap version")
67
+ mode: Optional[str] = Field(default=None, description="Server mode: serve, collect, or both")
68
+ features: Optional[List[str]] = Field(default=None, description="Available features")
69
+ runs: Optional[int] = Field(default=None, description="Number of active runs (collect mode)")
70
+ uptime_seconds: Optional[int] = Field(default=None, description="Server uptime in seconds")
71
+
72
+
73
+ # Run-related models for collector mode
74
+
75
+
76
+ class RunInfo(BaseModel):
77
+ """Information about a single run."""
78
+
79
+ id: str = Field(description="Run identifier")
80
+ lines: int = Field(description="Total lines ingested")
81
+ cursor_earliest: int = Field(description="Earliest available cursor")
82
+ cursor_latest: int = Field(description="Latest cursor")
83
+ tags: Dict[str, str] = Field(default_factory=dict, description="Run tags")
84
+ created_at: datetime = Field(description="When the run was created")
85
+ last_activity: datetime = Field(description="Last activity timestamp")
86
+ active: bool = Field(description="Whether the run is actively receiving data")
87
+ bytes_on_disk: Optional[int] = Field(default=None, description="Disk usage in bytes")
88
+
89
+
90
+ class RunListResponse(BaseModel):
91
+ """Response for listing runs."""
92
+
93
+ runs: List[RunInfo] = Field(description="List of runs")
94
+
95
+
96
+ class IngestResponse(BaseModel):
97
+ """Response after ingest completes."""
98
+
99
+ run_id: str = Field(description="Run identifier")
100
+ lines_ingested: int = Field(description="Number of lines ingested in this request")
101
+ cursor_end: int = Field(description="Final cursor after ingest")
102
+
103
+
104
+ class StreamMetaEvent(BaseModel):
105
+ """Meta event sent at start of stream."""
106
+
107
+ cursor_earliest: int = Field(description="Earliest available cursor")
108
+ cursor_latest: int = Field(description="Latest cursor")
109
+ gap: bool = Field(default=False, description="Whether a gap was detected")
110
+ missed: Optional[int] = Field(default=None, description="Number of missed lines if gap")
111
+
112
+
113
+ class StreamLineEvent(BaseModel):
114
+ """Line event in stream."""
115
+
116
+ cursor: int = Field(description="Line cursor")
117
+ line: str = Field(description="Log line content")
118
+ ts: datetime = Field(description="Timestamp when line was ingested")
@@ -1,13 +1,13 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: logtap
3
- Version: 0.2.2
3
+ Version: 0.4.0
4
4
  Summary: A CLI-first log access tool for Unix systems. Remote log file access without SSH.
5
+ Project-URL: Homepage, https://github.com/cainky/logtap
6
+ Project-URL: Repository, https://github.com/cainky/logtap
7
+ Author-email: cainky <kylecain.me@gmail.com>
5
8
  License: GPL-3.0-or-later
6
9
  License-File: LICENSE
7
- Keywords: logs,monitoring,cli,devops,sysadmin
8
- Author: cainky
9
- Author-email: kylecain.me@gmail.com
10
- Requires-Python: >=3.10,<4.0
10
+ Keywords: cli,devops,logs,monitoring,sysadmin
11
11
  Classifier: Development Status :: 4 - Beta
12
12
  Classifier: Environment :: Console
13
13
  Classifier: Intended Audience :: Developers
@@ -15,27 +15,30 @@ Classifier: Intended Audience :: System Administrators
15
15
  Classifier: License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
16
16
  Classifier: Operating System :: MacOS
17
17
  Classifier: Operating System :: POSIX :: Linux
18
- Classifier: Programming Language :: Python :: 3
19
18
  Classifier: Programming Language :: Python :: 3.10
20
19
  Classifier: Programming Language :: Python :: 3.11
21
20
  Classifier: Programming Language :: Python :: 3.12
22
- Classifier: Programming Language :: Python :: 3.13
23
- Classifier: Programming Language :: Python :: 3.14
24
21
  Classifier: Topic :: System :: Logging
25
22
  Classifier: Topic :: System :: Monitoring
26
23
  Classifier: Topic :: System :: Systems Administration
27
- Requires-Dist: aiofiles (>=23.2.1)
28
- Requires-Dist: fastapi (>=0.109.0)
29
- Requires-Dist: httpx (>=0.26.0)
30
- Requires-Dist: pydantic (>=2.5.0)
31
- Requires-Dist: pydantic-settings (>=2.1.0)
32
- Requires-Dist: python-dotenv (>=1.0.1)
33
- Requires-Dist: rich (>=13.7.0)
34
- Requires-Dist: typer[all] (>=0.9.0)
35
- Requires-Dist: uvicorn[standard] (>=0.27.0)
36
- Requires-Dist: websockets (>=12.0)
37
- Project-URL: Homepage, https://github.com/cainky/logtap
38
- Project-URL: Repository, https://github.com/cainky/logtap
24
+ Requires-Python: >=3.10
25
+ Requires-Dist: aiofiles>=23.2.1
26
+ Requires-Dist: fastapi>=0.109.0
27
+ Requires-Dist: google-re2>=1.1
28
+ Requires-Dist: httpx>=0.26.0
29
+ Requires-Dist: pydantic-settings>=2.1.0
30
+ Requires-Dist: pydantic>=2.5.0
31
+ Requires-Dist: python-dotenv>=1.0.1
32
+ Requires-Dist: rich>=13.7.0
33
+ Requires-Dist: typer>=0.9.0
34
+ Requires-Dist: uvicorn[standard]>=0.27.0
35
+ Requires-Dist: websockets>=12.0
36
+ Provides-Extra: dev
37
+ Requires-Dist: pre-commit>=4.5.1; extra == 'dev'
38
+ Requires-Dist: pytest-asyncio>=0.23.0; extra == 'dev'
39
+ Requires-Dist: pytest-cov>=4.1.0; extra == 'dev'
40
+ Requires-Dist: pytest>=7.4.0; extra == 'dev'
41
+ Requires-Dist: ruff>=0.1.0; extra == 'dev'
39
42
  Description-Content-Type: text/markdown
40
43
 
41
44
  # logtap
@@ -314,4 +317,3 @@ Contributions are welcome! Please open an issue to discuss potential changes bef
314
317
  ## Author
315
318
 
316
319
  Kyle Cain - [@cainky](https://github.com/cainky)
317
-
@@ -1,36 +1,41 @@
1
- logtap/__init__.py,sha256=AZzOTH4EFgrz3x5g_i6HELBi5Cg8pOXqCWbJbCWnl2A,179
1
+ logtap/__init__.py,sha256=kPYm2mOUmJEBnDtZ78K-fva7PcnK1J7yjQToY6bEqyc,179
2
2
  logtap/__main__.py,sha256=vqJPz3Zf-ICn_4P3B1o7U4NxcCo0qWgfAGEE_j13t-c,138
3
3
  logtap/api/__init__.py,sha256=80bP-eIxtAzidgv5nzcfyCEdl8EI2QjVY_eyxjvvhA0,98
4
- logtap/api/app.py,sha256=BBphxLKvk7yIiLyQ3tdiagqtjc7xFixd_FUzNFjlC0c,1176
4
+ logtap/api/app.py,sha256=6TIwQFjwyW0VYYo3ayRiUO2IuHnPKsK5kAIPO9xzPvo,3094
5
5
  logtap/api/dependencies.py,sha256=1cx1qrp0O6v1fHXA2JdEhC8P4caG2oUSCfMk2-8zmGs,1620
6
6
  logtap/api/routes/__init__.py,sha256=XYvFyTP4zKywRZH0v97k0EZCYgxdL2PSUaNet20znPE,29
7
7
  logtap/api/routes/files.py,sha256=bqZYrX6jrF5-7GzBpUIXXoPVdxUwm6o0LTcJBLtaJUE,991
8
- logtap/api/routes/health.py,sha256=Ak-z2ChqZZ7FgHdu1JDo3v5aDBPR3VIICyXTLDBf75E,462
9
- logtap/api/routes/logs.py,sha256=rIc9pkn7YtVjf0mAluJztVocJ_P9THR280KTEDN1zqE,8431
8
+ logtap/api/routes/health.py,sha256=s117Hr1E8OcBGPOWq2WwHLZSq35hS7wmLPk6BYq3dq4,1112
9
+ logtap/api/routes/logs.py,sha256=XpRAd4fZmVyylz6bHCHm4y0Y2GofSquH6j5WJP3Jyao,8467
10
10
  logtap/api/routes/parsed.py,sha256=XVvkKBE_hQvfJyrDBBPR_PpVxvof-y4B77xKe9Rr0Qk,3367
11
+ logtap/api/routes/runs.py,sha256=Fxb6joJ5FPXPCKTfgD41i0H4UQ4U4fmFxk08SFUxt_s,11355
11
12
  logtap/cli/__init__.py,sha256=U4zaUJ1rm0qHXqeArpzC45S5N-5SBdd8K6foe513msk,31
13
+ logtap/cli/main.py,sha256=jfzN-S6dn3bg6yuQ3ovJtaLYb7LnCDg_cl7vqRWTBxw,1230
12
14
  logtap/cli/commands/__init__.py,sha256=U4zaUJ1rm0qHXqeArpzC45S5N-5SBdd8K6foe513msk,31
15
+ logtap/cli/commands/collect.py,sha256=8x6LyMrzI79wYtfLZcbQdgpy5nxPZuQOEillE9IfwwE,3002
13
16
  logtap/cli/commands/files.py,sha256=WFr8kA0SdgQHz3ZyONTaljxHMcD-nQlndp3UIOwZATc,2455
17
+ logtap/cli/commands/ingest.py,sha256=JaItHHYV3fBmPkseYpubyHryNbuEuxyjRBk-EiiEwyU,4054
14
18
  logtap/cli/commands/query.py,sha256=uD9nH5E-7EqJryLf3hHkDbJSQo4kWFGmzzHgTfAKFwk,3418
19
+ logtap/cli/commands/runs.py,sha256=Dweswku19Dj2KOFhT0kaega9KSKmUrvya3eLn0-5lXo,3632
15
20
  logtap/cli/commands/serve.py,sha256=9OvfII21q6cel3zZfSsAsiERKwKFt0ZFTXmUd2Psthg,1910
16
- logtap/cli/commands/tail.py,sha256=dwPRXub1dcRwKulDt_qNa2waQm1YPOxIg0QokAK6Gyw,3648
17
- logtap/cli/main.py,sha256=fWSuQdin9G-RC7Oqzesfp93WZI1-v7227P-WWTsxtIQ,1045
21
+ logtap/cli/commands/tail.py,sha256=w7P3_1o0OtVtos3kV8w4goShWXzzUDo4ekSye3VSpGo,10015
18
22
  logtap/core/__init__.py,sha256=tsoL0XuDrPd5xHEu975WqFHoA7EQgloxrum7CjsWHuk,450
23
+ logtap/core/reader.py,sha256=BuBrEAbS2naCBTtuBNc0Un6thbekzabaHTBzYE1SwKg,5277
24
+ logtap/core/runs.py,sha256=t4JnQvZTi-YB2II8maBIcaJD77gp_CjKVcTGYwHhuU8,13488
25
+ logtap/core/search.py,sha256=rtq8WP96RYUvRkX_R5x_mdD_dw1syDuNkHx3uP_diOg,4574
26
+ logtap/core/validation.py,sha256=Nk86jHqEfI4H96fk-1rjbC5sBwfzls43hyOhnRV6rxI,1359
19
27
  logtap/core/parsers/__init__.py,sha256=5f3hFxf_DgNScRDchRT8ocFVgi7Md4xuMN-ShvlssBo,575
20
28
  logtap/core/parsers/apache.py,sha256=JjuQ4v-b7HJvTCcjbOMgv5_dSdiNVPX_EUyplc3f5Qw,5332
21
29
  logtap/core/parsers/auto.py,sha256=OLLuX7XIxS0Upnv9FQ-_B0sGAyZmfNxjnMDGdZtUIO4,3565
22
- logtap/core/parsers/base.py,sha256=HK886mB2mOuRwsVit_U8UgSfrrQOYwMt_XQm-bgso40,4829
23
- logtap/core/parsers/json_parser.py,sha256=_3uMGT0EW0wwKzXk1Uc9A7iNvey0Ijqr0uRnJuU1_XM,3728
30
+ logtap/core/parsers/base.py,sha256=AVTk64djuIxih2mav3N25V_ldLXYTc68JIchH7ZVd3g,4894
31
+ logtap/core/parsers/json_parser.py,sha256=AEWKfKnFdMVmtImcJqtXYZjyW2TKnla6YwXomGFpXr4,4169
24
32
  logtap/core/parsers/nginx.py,sha256=j_oILELOM0azDPLc41wXrLu5o_LhnPs9fT0_iaOqqAQ,3526
25
33
  logtap/core/parsers/syslog.py,sha256=gBNQ39QXsigOpfnq3cEdmvFa8NLp_wmiSMDlTt0SIbs,2430
26
- logtap/core/reader.py,sha256=YY3wyV9NvYiskTh2nrl1tZAZNY11fr33vo85OhW1qHc,5247
27
- logtap/core/search.py,sha256=-P2KLkjTWANyLQhdxqZc3I-0UpbhWjQQK_yHHxwYJaA,4350
28
- logtap/core/validation.py,sha256=Nk86jHqEfI4H96fk-1rjbC5sBwfzls43hyOhnRV6rxI,1359
29
34
  logtap/models/__init__.py,sha256=tce3Q0QjPhnlAYG8IcwxPedyh1ibBlKIF3CjXe5wwgo,280
30
35
  logtap/models/config.py,sha256=8x6OR_y2ZB8SSoQWQGwDB7DXH30UyMNXUcRWOctjUn8,927
31
- logtap/models/responses.py,sha256=45J-Xw1Gb35uP5188wxy2QZlyy3Fh18fpAFizZnpi3A,1850
32
- logtap-0.2.2.dist-info/METADATA,sha256=3WqwebLrTMIChXa8c0k2-2brpXnxk_olw7tHQb7b394,7372
33
- logtap-0.2.2.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
34
- logtap-0.2.2.dist-info/entry_points.txt,sha256=NsN54PsyqB2TA7b8W9-nH4_CNl2_biG4i4FhOQAU9rQ,46
35
- logtap-0.2.2.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
36
- logtap-0.2.2.dist-info/RECORD,,
36
+ logtap/models/responses.py,sha256=xKdKdS85soxMYGNad3WfF0pOG0Pb5Z7XwVrwK-TCnHs,4084
37
+ logtap-0.4.0.dist-info/METADATA,sha256=_Y9ZSz2BwIF2SOOHYPQZ8YrLIxmpB31tOaabDty1BDY,7466
38
+ logtap-0.4.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
39
+ logtap-0.4.0.dist-info/entry_points.txt,sha256=tuAit8kt97yjtACQKvN35wWozp4KhSju_gfDhSS1IrM,47
40
+ logtap-0.4.0.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
41
+ logtap-0.4.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 2.2.1
2
+ Generator: hatchling 1.28.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ logtap = logtap.cli.main:app
@@ -1,3 +0,0 @@
1
- [console_scripts]
2
- logtap=logtap.cli.main:app
3
-