cosma-backend 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. cosma_backend/__init__.py +14 -0
  2. cosma_backend/__main__.py +4 -0
  3. cosma_backend/api/__init__.py +29 -0
  4. cosma_backend/api/files.py +154 -0
  5. cosma_backend/api/index.py +114 -0
  6. cosma_backend/api/models.py +28 -0
  7. cosma_backend/api/search.py +166 -0
  8. cosma_backend/api/status.py +28 -0
  9. cosma_backend/api/updates.py +67 -0
  10. cosma_backend/api/watch.py +156 -0
  11. cosma_backend/app.py +192 -0
  12. cosma_backend/db/__init__.py +2 -0
  13. cosma_backend/db/database.py +638 -0
  14. cosma_backend/discoverer/__init__.py +1 -0
  15. cosma_backend/discoverer/discoverer.py +34 -0
  16. cosma_backend/embedder/__init__.py +1 -0
  17. cosma_backend/embedder/embedder.py +637 -0
  18. cosma_backend/logging.py +73 -0
  19. cosma_backend/models/__init__.py +3 -0
  20. cosma_backend/models/file.py +169 -0
  21. cosma_backend/models/status.py +10 -0
  22. cosma_backend/models/update.py +202 -0
  23. cosma_backend/models/watch.py +132 -0
  24. cosma_backend/pipeline/__init__.py +2 -0
  25. cosma_backend/pipeline/pipeline.py +222 -0
  26. cosma_backend/schema.sql +319 -0
  27. cosma_backend/searcher/__init__.py +1 -0
  28. cosma_backend/searcher/searcher.py +397 -0
  29. cosma_backend/summarizer/__init__.py +44 -0
  30. cosma_backend/summarizer/summarizer.py +1075 -0
  31. cosma_backend/utils/bundled.py +24 -0
  32. cosma_backend/utils/pubsub.py +31 -0
  33. cosma_backend/utils/sse.py +92 -0
  34. cosma_backend/watcher/__init__.py +1 -0
  35. cosma_backend/watcher/awatchdog.py +80 -0
  36. cosma_backend/watcher/watcher.py +257 -0
  37. cosma_backend-0.1.0.dist-info/METADATA +23 -0
  38. cosma_backend-0.1.0.dist-info/RECORD +39 -0
  39. cosma_backend-0.1.0.dist-info/WHEEL +4 -0
@@ -0,0 +1,73 @@
1
+ import json
2
+ import logging
3
+ from datetime import datetime, date, time, timedelta
4
+ from decimal import Decimal
5
+ from pathlib import Path
6
+ from uuid import UUID
7
+ import numpy as np
8
+
9
+
10
+ class Encoder(json.JSONEncoder):
11
+ def default(self, o):
12
+ # Handle sets
13
+ if isinstance(o, set):
14
+ return tuple(o)
15
+
16
+ # Handle datetime types
17
+ if isinstance(o, datetime):
18
+ return o.isoformat()
19
+ if isinstance(o, date):
20
+ return o.isoformat()
21
+ if isinstance(o, time):
22
+ return o.isoformat()
23
+ if isinstance(o, timedelta):
24
+ return o.total_seconds()
25
+
26
+ # Handle numeric types
27
+ if isinstance(o, Decimal):
28
+ return float(o)
29
+
30
+ # Handle UUID (common in databases)
31
+ if isinstance(o, UUID):
32
+ return str(o)
33
+
34
+ # Handle Path objects
35
+ if isinstance(o, Path):
36
+ return str(o)
37
+
38
+ # Handle bytes
39
+ if isinstance(o, bytes):
40
+ return o.decode('utf-8', errors='replace')
41
+
42
+ # Handle numpy arrays
43
+ if isinstance(o, np.ndarray):
44
+ return f"<ndarray shape={o.shape} dtype={o.dtype}>"
45
+
46
+ # Handle File model from backend.models
47
+ if hasattr(o, '__class__') and o.__class__.__name__ == 'File':
48
+ return {
49
+ 'id': getattr(o, 'id', None),
50
+ 'filename': getattr(o, 'filename', None),
51
+ 'file_path': getattr(o, 'file_path', None),
52
+ 'status': getattr(o, 'status', None).name if hasattr(getattr(o, 'status', None), 'name') else str(getattr(o, 'status', None)),
53
+ 'content_hash': getattr(o, 'content_hash', None)
54
+ }
55
+
56
+ # return super().default(o)
57
+ if hasattr(o, '__str__'):
58
+ return str(o)
59
+
60
+ return repr(o)
61
+
62
+
63
+ class StructuredMessage:
64
+ def __init__(self, message, /, **kwargs):
65
+ self.message = message
66
+ self.kwargs = kwargs
67
+
68
+ def __str__(self):
69
+ s = Encoder().encode(self.kwargs)
70
+ return f'{self.message} {s}'
71
+
72
+
73
+ sm = StructuredMessage # optional, to improve readability
@@ -0,0 +1,3 @@
1
+ from .file import File as File
2
+ from .status import ProcessingStatus as ProcessingStatus
3
+ from .watch import WatchedDirectory as WatchedDirectory
@@ -0,0 +1,169 @@
1
+ from dataclasses import dataclass
2
+ from datetime import datetime
3
+ from pathlib import Path
4
+ import sqlite3
5
+ from typing import Any, Optional, List, Self, TYPE_CHECKING
6
+ import logging
7
+
8
+ import numpy as np
9
+
10
+ from backend.models.status import ProcessingStatus
11
+
12
+ if TYPE_CHECKING:
13
+ from backend.api.models import FileResponse
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ @dataclass
19
+ class File:
20
+ """
21
+ A unified file model that progresses through the pipeline stages.
22
+ Each stage adds more data to the model.
23
+ """
24
+ # Stage 0: Discovery (file system metadata)
25
+ path: Path
26
+ file_path: str
27
+ filename: str
28
+ extension: str
29
+ file_size: int
30
+ created: datetime
31
+ modified: datetime
32
+ accessed: datetime
33
+
34
+ # Stage 1: Parsing (content extraction)
35
+ id: Optional[int] = None
36
+ content_type: Optional[str] = None
37
+ content: Optional[str] = None
38
+ content_hash: Optional[str] = None
39
+ parsed_at: Optional[datetime] = None
40
+
41
+ # Stage 2: Summarization (AI processing)
42
+ summary: Optional[str] = None
43
+ title: Optional[str] = None
44
+ keywords: Optional[List[str]] = None
45
+ summarized_at: Optional[datetime] = None
46
+
47
+ # Stage 3: Embedding (vector representation)
48
+ embedding: Optional[np.ndarray] = None
49
+ embedding_model: Optional[str] = None
50
+ embedding_dimensions: Optional[int] = None
51
+ embedded_at: Optional[datetime] = None
52
+
53
+ # Meta
54
+ status: ProcessingStatus = ProcessingStatus.DISCOVERED
55
+ processing_error: Optional[str] = None
56
+
57
+ @classmethod
58
+ def from_path(cls, path: Path) -> Self:
59
+ path = path.resolve()
60
+ file_stats = path.stat()
61
+
62
+ modified_at = datetime.fromtimestamp(file_stats.st_mtime)
63
+ created_at = datetime.fromtimestamp(file_stats.st_ctime)
64
+ accessed_at = datetime.fromtimestamp(file_stats.st_atime)
65
+
66
+ return cls(
67
+ path=path,
68
+ file_path=str(path),
69
+ filename=path.name,
70
+ extension=path.suffix,
71
+ file_size=file_stats.st_size,
72
+ created=created_at,
73
+ modified=modified_at,
74
+ accessed=accessed_at,
75
+ )
76
+
77
+ @classmethod
78
+ def from_row(cls, row: sqlite3.Row) -> Self:
79
+ """
80
+ Create a File instance from a database row.
81
+
82
+ Args:
83
+ row: A database row (dict-like object with column names as keys)
84
+
85
+ Returns:
86
+ A File instance populated with data from the row
87
+ """
88
+ # Helper function to safely get a value from a Row object
89
+ def get_value(key: str) -> Optional[Any]:
90
+ try:
91
+ return row[key]
92
+ except (KeyError, IndexError):
93
+ return None
94
+
95
+ # Helper function to parse unix timestamps from database
96
+ def parse_timestamp(value) -> Optional[datetime]:
97
+ if not value:
98
+ return None
99
+
100
+ # If already a datetime object, return it
101
+ if isinstance(value, datetime):
102
+ return value
103
+
104
+ # Parse unix timestamp
105
+ try:
106
+ return datetime.fromtimestamp(value)
107
+ except (ValueError, AttributeError):
108
+ logger.warning(f"Failed to parse timestamp: {value}")
109
+ return None
110
+
111
+ # Parse status from string to enum
112
+ status = ProcessingStatus[row["status"]] if row["status"] else ProcessingStatus.DISCOVERED
113
+
114
+ # Parse timestamps (they're stored as UNIX timestamps in the database)
115
+ created = parse_timestamp(row["created"])
116
+ modified = parse_timestamp(row["modified"])
117
+ accessed = parse_timestamp(row["accessed"])
118
+ parsed_at = parse_timestamp(get_value("parsed_at"))
119
+ summarized_at = parse_timestamp(get_value("summarized_at"))
120
+ embedded_at = parse_timestamp(get_value("embedded_at"))
121
+
122
+ # Parse keywords if present (stored as comma or || separated string)
123
+ keywords = None
124
+ keywords_value = get_value("keywords") or get_value("keywords_str")
125
+ if keywords_value:
126
+ # Handle both comma and || separators
127
+ keywords = [k.strip() for k in keywords_value.replace("||", ",").split(",") if k.strip()]
128
+
129
+ return cls(
130
+ id=get_value("id"),
131
+ path=Path(row["file_path"]),
132
+ file_path=row["file_path"],
133
+ filename=row["filename"],
134
+ extension=row["extension"],
135
+ file_size=row["file_size"],
136
+ created=created,
137
+ modified=modified,
138
+ accessed=accessed,
139
+ content_type=get_value("content_type"),
140
+ content_hash=get_value("content_hash"),
141
+ parsed_at=parsed_at,
142
+ summary=get_value("summary"),
143
+ title=get_value("title"),
144
+ keywords=keywords,
145
+ summarized_at=summarized_at,
146
+ embedded_at=embedded_at,
147
+ status=status,
148
+ processing_error=get_value("processing_error"),
149
+ )
150
+
151
+ def to_response(self) -> "FileResponse":
152
+ """
153
+ Convert this File instance to a FileResponse for API serialization.
154
+
155
+ Returns:
156
+ A FileResponse instance with the relevant fields from this File
157
+ """
158
+ from backend.api.models import FileResponse
159
+
160
+ return FileResponse(
161
+ file_path=self.file_path,
162
+ filename=self.filename,
163
+ extension=self.extension,
164
+ created=self.created,
165
+ modified=self.modified,
166
+ accessed=self.accessed,
167
+ title=self.title,
168
+ summary=self.summary,
169
+ )
@@ -0,0 +1,10 @@
1
+ import enum
2
+
3
+
4
+ class ProcessingStatus(enum.Enum):
5
+ DISCOVERED = 0
6
+ PARSED = 1
7
+ SUMMARIZED = 2
8
+ COMPLETE = 3
9
+
10
+ FAILED = 4
@@ -0,0 +1,202 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import Any, Dict, Self
3
+ import enum
4
+
5
+ from backend.utils.sse import ServerSentEvent
6
+
7
+
8
+ class UpdateOpcode(enum.Enum):
9
+ """
10
+ Opcodes for different types of backend updates sent to the frontend via SSE.
11
+ """
12
+ # File processing updates
13
+ FILE_PARSING = "file_parsing"
14
+ FILE_PARSED = "file_parsed"
15
+ FILE_SUMMARIZING = "file_summarizing"
16
+ FILE_SUMMARIZED = "file_summarized"
17
+ FILE_EMBEDDING = "file_embedding"
18
+ FILE_EMBEDDED = "file_embedded"
19
+ FILE_COMPLETE = "file_complete"
20
+ FILE_FAILED = "file_failed"
21
+ FILE_SKIPPED = "file_skipped"
22
+
23
+ # File system events (from watcher)
24
+ FILE_CREATED = "file_created"
25
+ FILE_MODIFIED = "file_modified"
26
+ FILE_DELETED = "file_deleted"
27
+ FILE_MOVED = "file_moved"
28
+
29
+ # Watch directory updates
30
+ WATCH_ADDED = "watch_added"
31
+ WATCH_REMOVED = "watch_removed"
32
+ WATCH_STARTED = "watch_started"
33
+
34
+ # Directory processing updates
35
+ DIRECTORY_PROCESSING_STARTED = "directory_processing_started"
36
+ DIRECTORY_PROCESSING_COMPLETED = "directory_processing_completed"
37
+
38
+ # General updates
39
+ STATUS_UPDATE = "status_update"
40
+ ERROR = "error"
41
+ INFO = "info"
42
+
43
+ SHUTTING_DOWN = "shutting_down"
44
+
45
+
46
+ @dataclass
47
+ class Update:
48
+ """
49
+ A model representing a backend update to be sent to the frontend via SSE.
50
+
51
+ Each update has an opcode (message type) and optional data payload.
52
+ """
53
+ opcode: UpdateOpcode
54
+ data: Dict[str, Any] = field(default_factory=dict)
55
+
56
+ @classmethod
57
+ def create(cls, opcode: UpdateOpcode, **kwargs) -> Self:
58
+ """
59
+ Create an Update instance with the given opcode and data.
60
+
61
+ Args:
62
+ opcode: The type of update (UpdateOpcode enum value)
63
+ **kwargs: Arbitrary keyword arguments that will be stored in the data dict
64
+
65
+ Returns:
66
+ An Update instance
67
+
68
+ Example:
69
+ >>> update = Update.create(UpdateOpcode.FILE_DISCOVERED, path="/docs/file.pdf", size=1024)
70
+ >>> update.opcode
71
+ <UpdateOpcode.FILE_DISCOVERED: 'file_discovered'>
72
+ >>> update.data
73
+ {'path': '/docs/file.pdf', 'size': 1024}
74
+ """
75
+ return cls(opcode=opcode, data=kwargs)
76
+
77
+ @classmethod
78
+ def file_parsing(cls, path: str, filename: str, **kwargs) -> Self:
79
+ """Convenience method for creating a FILE_PARSING update."""
80
+ return cls.create(UpdateOpcode.FILE_PARSING, path=path, filename=filename, **kwargs)
81
+
82
+ @classmethod
83
+ def file_parsed(cls, path: str, filename: str, **kwargs) -> Self:
84
+ """Convenience method for creating a FILE_PARSED update."""
85
+ return cls.create(UpdateOpcode.FILE_PARSED, path=path, filename=filename, **kwargs)
86
+
87
+ @classmethod
88
+ def file_summarizing(cls, path: str, filename: str, **kwargs) -> Self:
89
+ """Convenience method for creating a FILE_SUMMARIZING update."""
90
+ return cls.create(UpdateOpcode.FILE_SUMMARIZING, path=path, filename=filename, **kwargs)
91
+
92
+ @classmethod
93
+ def file_summarized(cls, path: str, filename: str, **kwargs) -> Self:
94
+ """Convenience method for creating a FILE_SUMMARIZED update."""
95
+ return cls.create(UpdateOpcode.FILE_SUMMARIZED, path=path, filename=filename, **kwargs)
96
+
97
+ @classmethod
98
+ def file_embedding(cls, path: str, filename: str, **kwargs) -> Self:
99
+ """Convenience method for creating a FILE_EMBEDDING update."""
100
+ return cls.create(UpdateOpcode.FILE_EMBEDDING, path=path, filename=filename, **kwargs)
101
+
102
+ @classmethod
103
+ def file_embedded(cls, path: str, filename: str, **kwargs) -> Self:
104
+ """Convenience method for creating a FILE_EMBEDDED update."""
105
+ return cls.create(UpdateOpcode.FILE_EMBEDDED, path=path, filename=filename, **kwargs)
106
+
107
+ @classmethod
108
+ def file_complete(cls, path: str, filename: str, **kwargs) -> Self:
109
+ """Convenience method for creating a FILE_COMPLETE update."""
110
+ return cls.create(UpdateOpcode.FILE_COMPLETE, path=path, filename=filename, **kwargs)
111
+
112
+ @classmethod
113
+ def file_skipped(cls, path: str, filename: str, reason: str, **kwargs) -> Self:
114
+ """Convenience method for creating a FILE_SKIPPED update."""
115
+ return cls.create(UpdateOpcode.FILE_SKIPPED, path=path, filename=filename, reason=reason, **kwargs)
116
+
117
+ @classmethod
118
+ def file_failed(cls, path: str, filename: str, error: str, **kwargs) -> Self:
119
+ """Convenience method for creating a FILE_FAILED update."""
120
+ return cls.create(UpdateOpcode.FILE_FAILED, path=path, filename=filename, error=error, **kwargs)
121
+
122
+ @classmethod
123
+ def file_created(cls, path: str, **kwargs) -> Self:
124
+ """Convenience method for creating a FILE_CREATED update."""
125
+ return cls.create(UpdateOpcode.FILE_CREATED, path=path, **kwargs)
126
+
127
+ @classmethod
128
+ def file_modified(cls, path: str, **kwargs) -> Self:
129
+ """Convenience method for creating a FILE_MODIFIED update."""
130
+ return cls.create(UpdateOpcode.FILE_MODIFIED, path=path, **kwargs)
131
+
132
+ @classmethod
133
+ def file_deleted(cls, path: str, **kwargs) -> Self:
134
+ """Convenience method for creating a FILE_DELETED update."""
135
+ return cls.create(UpdateOpcode.FILE_DELETED, path=path, **kwargs)
136
+
137
+ @classmethod
138
+ def file_moved(cls, src_path: str, dest_path: str, **kwargs) -> Self:
139
+ """Convenience method for creating a FILE_MOVED update."""
140
+ return cls.create(UpdateOpcode.FILE_MOVED, src_path=src_path, dest_path=dest_path, **kwargs)
141
+
142
+ @classmethod
143
+ def directory_processing_started(cls, path: str, **kwargs) -> Self:
144
+ """Convenience method for creating a DIRECTORY_PROCESSING_STARTED update."""
145
+ return cls.create(UpdateOpcode.DIRECTORY_PROCESSING_STARTED, path=path, **kwargs)
146
+
147
+ @classmethod
148
+ def directory_processing_completed(cls, path: str, **kwargs) -> Self:
149
+ """Convenience method for creating a DIRECTORY_PROCESSING_COMPLETED update."""
150
+ return cls.create(UpdateOpcode.DIRECTORY_PROCESSING_COMPLETED, path=path, **kwargs)
151
+
152
+ @classmethod
153
+ def error(cls, message: str, **kwargs) -> Self:
154
+ """Convenience method for creating an ERROR update."""
155
+ return cls.create(UpdateOpcode.ERROR, message=message, **kwargs)
156
+
157
+ @classmethod
158
+ def info(cls, message: str, **kwargs) -> Self:
159
+ """Convenience method for creating an INFO update."""
160
+ return cls.create(UpdateOpcode.INFO, message=message, **kwargs)
161
+
162
+ @classmethod
163
+ def shutting_down(cls, **kwargs) -> Self:
164
+ """Convenience method for creating a SHUTTING_DOWN update."""
165
+ return cls.create(UpdateOpcode.SHUTTING_DOWN, **kwargs)
166
+
167
+ def to_dict(self) -> Dict[str, Any]:
168
+ """
169
+ Convert the Update to a dictionary for serialization.
170
+
171
+ Returns:
172
+ A dictionary with 'opcode' and 'data' keys
173
+ """
174
+ return {
175
+ "opcode": self.opcode.value,
176
+ "data": self.data
177
+ }
178
+
179
+ def to_sse(self, event_id: str | None = None) -> ServerSentEvent:
180
+ """
181
+ Convert the Update to a ServerSentEvent for SSE transmission.
182
+
183
+ Args:
184
+ event_id: Optional event ID for SSE reconnection support
185
+
186
+ Returns:
187
+ A ServerSentEvent instance ready to be encoded and sent
188
+
189
+ Example:
190
+ >>> update = Update.file_parsed("/docs/file.pdf", "file.pdf")
191
+ >>> sse = update.to_sse()
192
+ >>> message = sse.encode() # Ready to send via SSE endpoint
193
+ """
194
+ return ServerSentEvent(
195
+ data=self.to_dict(),
196
+ event="update", # All updates use the same event type
197
+ id=event_id,
198
+ )
199
+
200
+ def __str__(self) -> str:
201
+ """Return a string representation of the update."""
202
+ return f"Update(opcode={self.opcode.value}, data={self.data})"
@@ -0,0 +1,132 @@
1
+ from dataclasses import dataclass
2
+ from datetime import datetime
3
+ from pathlib import Path
4
+ import sqlite3
5
+ from typing import Any, Optional, Self
6
+ import logging
7
+
8
+ logger = logging.getLogger(__name__)
9
+
10
+
11
+ @dataclass
12
+ class WatchedDirectory:
13
+ """
14
+ A model representing a directory being watched for file changes.
15
+ """
16
+ # Core fields
17
+ path: Path
18
+ is_active: bool = True
19
+ recursive: bool = True
20
+ file_pattern: Optional[str] = None
21
+
22
+ # Database fields
23
+ id: Optional[int] = None
24
+ last_scan: Optional[datetime] = None
25
+ created_at: Optional[datetime] = None
26
+ updated_at: Optional[datetime] = None
27
+
28
+ @classmethod
29
+ def from_path(cls, path: Path, recursive: bool = True, file_pattern: Optional[str] = None) -> Self:
30
+ """
31
+ Create a WatchedDirectory instance from a path.
32
+
33
+ Args:
34
+ path: Path to the directory to watch
35
+ recursive: Whether to watch subdirectories recursively
36
+ file_pattern: Optional glob pattern for filtering files (e.g., "*.pdf")
37
+
38
+ Returns:
39
+ A WatchedDirectory instance
40
+ """
41
+ path = path.resolve()
42
+
43
+ if not path.exists():
44
+ raise ValueError(f"Path does not exist: {path}")
45
+
46
+ if not path.is_dir():
47
+ raise ValueError(f"Path is not a directory: {path}")
48
+
49
+ return cls(
50
+ path=path,
51
+ recursive=recursive,
52
+ file_pattern=file_pattern,
53
+ )
54
+
55
+ @classmethod
56
+ def from_row(cls, row: sqlite3.Row) -> Self:
57
+ """
58
+ Create a WatchedDirectory instance from a database row.
59
+
60
+ Args:
61
+ row: A database row (dict-like object with column names as keys)
62
+
63
+ Returns:
64
+ A WatchedDirectory instance populated with data from the row
65
+ """
66
+ # Helper function to safely get a value from a Row object
67
+ def get_value(key: str) -> Optional[Any]:
68
+ try:
69
+ return row[key]
70
+ except (KeyError, IndexError):
71
+ return None
72
+
73
+ # Helper function to parse unix timestamps from database
74
+ def parse_timestamp(value) -> Optional[datetime]:
75
+ if not value:
76
+ return None
77
+
78
+ # If already a datetime object, return it
79
+ if isinstance(value, datetime):
80
+ return value
81
+
82
+ # Parse unix timestamp
83
+ try:
84
+ return datetime.fromtimestamp(value)
85
+ except (ValueError, AttributeError):
86
+ logger.warning(f"Failed to parse timestamp: {value}")
87
+ return None
88
+
89
+ # Parse timestamps
90
+ last_scan = parse_timestamp(get_value("last_scan"))
91
+ created_at = parse_timestamp(get_value("created_at"))
92
+ updated_at = parse_timestamp(get_value("updated_at"))
93
+
94
+ return cls(
95
+ id=get_value("id"),
96
+ path=Path(row["path"]),
97
+ is_active=bool(row["is_active"]),
98
+ recursive=bool(row["recursive"]),
99
+ file_pattern=get_value("file_pattern"),
100
+ last_scan=last_scan,
101
+ created_at=created_at,
102
+ updated_at=updated_at,
103
+ )
104
+
105
+ @property
106
+ def path_str(self) -> str:
107
+ """Return the path as a string."""
108
+ return str(self.path)
109
+
110
+ def to_response(self) -> "JobResponse":
111
+ """
112
+ Convert this WatchedDirectory instance to a JobResponse for API serialization.
113
+
114
+ Returns:
115
+ A JobResponse instance with the relevant fields from this WatchedDirectory
116
+ """
117
+ from backend.api.models import JobResponse
118
+
119
+ return JobResponse(
120
+ id=self.id,
121
+ path=self.path_str,
122
+ is_active=self.is_active,
123
+ recursive=self.recursive,
124
+ file_pattern=self.file_pattern,
125
+ last_scan=self.last_scan,
126
+ created_at=self.created_at,
127
+ updated_at=self.updated_at,
128
+ )
129
+
130
+ def __str__(self) -> str:
131
+ """Return a string representation of the watched directory."""
132
+ return f"WatchedDirectory(id={self.id}, path={self.path}, active={self.is_active})"
@@ -0,0 +1,2 @@
1
+ from .pipeline import Pipeline as Pipeline
2
+ from .pipeline import PipelineResult as PipelineResult