basic-memory 0.7.0__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of basic-memory might be problematic. Click here for more details.
- basic_memory/__init__.py +1 -1
- basic_memory/alembic/alembic.ini +119 -0
- basic_memory/alembic/env.py +23 -1
- basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink.py +51 -0
- basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py +44 -0
- basic_memory/api/app.py +0 -4
- basic_memory/api/routers/knowledge_router.py +1 -1
- basic_memory/api/routers/memory_router.py +16 -16
- basic_memory/api/routers/resource_router.py +105 -4
- basic_memory/cli/app.py +0 -2
- basic_memory/cli/commands/status.py +9 -21
- basic_memory/cli/commands/sync.py +12 -16
- basic_memory/cli/commands/tools.py +36 -13
- basic_memory/cli/main.py +0 -1
- basic_memory/config.py +15 -1
- basic_memory/file_utils.py +6 -4
- basic_memory/markdown/entity_parser.py +3 -3
- basic_memory/mcp/async_client.py +1 -1
- basic_memory/mcp/main.py +25 -0
- basic_memory/mcp/prompts/__init__.py +15 -0
- basic_memory/mcp/prompts/ai_assistant_guide.py +28 -0
- basic_memory/mcp/prompts/continue_conversation.py +172 -0
- basic_memory/mcp/prompts/json_canvas_spec.py +25 -0
- basic_memory/mcp/prompts/recent_activity.py +46 -0
- basic_memory/mcp/prompts/search.py +127 -0
- basic_memory/mcp/prompts/utils.py +98 -0
- basic_memory/mcp/server.py +3 -7
- basic_memory/mcp/tools/__init__.py +6 -4
- basic_memory/mcp/tools/canvas.py +99 -0
- basic_memory/mcp/tools/memory.py +12 -5
- basic_memory/mcp/tools/notes.py +1 -2
- basic_memory/mcp/tools/resource.py +192 -0
- basic_memory/mcp/tools/utils.py +2 -1
- basic_memory/models/knowledge.py +27 -11
- basic_memory/repository/repository.py +1 -1
- basic_memory/repository/search_repository.py +14 -4
- basic_memory/schemas/__init__.py +0 -11
- basic_memory/schemas/base.py +4 -1
- basic_memory/schemas/memory.py +11 -2
- basic_memory/schemas/search.py +2 -1
- basic_memory/services/entity_service.py +19 -12
- basic_memory/services/file_service.py +69 -2
- basic_memory/services/link_resolver.py +12 -9
- basic_memory/services/search_service.py +56 -12
- basic_memory/sync/__init__.py +3 -2
- basic_memory/sync/sync_service.py +294 -123
- basic_memory/sync/watch_service.py +125 -129
- basic_memory/utils.py +24 -9
- {basic_memory-0.7.0.dist-info → basic_memory-0.8.0.dist-info}/METADATA +2 -1
- basic_memory-0.8.0.dist-info/RECORD +91 -0
- basic_memory/alembic/README +0 -1
- basic_memory/schemas/discovery.py +0 -28
- basic_memory/sync/file_change_scanner.py +0 -158
- basic_memory/sync/utils.py +0 -31
- basic_memory-0.7.0.dist-info/RECORD +0 -82
- {basic_memory-0.7.0.dist-info → basic_memory-0.8.0.dist-info}/WHEEL +0 -0
- {basic_memory-0.7.0.dist-info → basic_memory-0.8.0.dist-info}/entry_points.txt +0 -0
- {basic_memory-0.7.0.dist-info → basic_memory-0.8.0.dist-info}/licenses/LICENSE +0 -0
basic_memory/schemas/memory.py
CHANGED
|
@@ -9,7 +9,7 @@ from pydantic import BaseModel, Field, BeforeValidator, TypeAdapter
|
|
|
9
9
|
from basic_memory.schemas.search import SearchItemType
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
def normalize_memory_url(url: str) -> str:
|
|
12
|
+
def normalize_memory_url(url: str | None) -> str:
|
|
13
13
|
"""Normalize a MemoryUrl string.
|
|
14
14
|
|
|
15
15
|
Args:
|
|
@@ -24,6 +24,9 @@ def normalize_memory_url(url: str) -> str:
|
|
|
24
24
|
>>> normalize_memory_url("memory://specs/search")
|
|
25
25
|
'memory://specs/search'
|
|
26
26
|
"""
|
|
27
|
+
if not url:
|
|
28
|
+
return ""
|
|
29
|
+
|
|
27
30
|
clean_path = url.removeprefix("memory://")
|
|
28
31
|
return f"memory://{clean_path}"
|
|
29
32
|
|
|
@@ -59,7 +62,7 @@ class EntitySummary(BaseModel):
|
|
|
59
62
|
"""Simplified entity representation."""
|
|
60
63
|
|
|
61
64
|
type: str = "entity"
|
|
62
|
-
permalink: str
|
|
65
|
+
permalink: Optional[str]
|
|
63
66
|
title: str
|
|
64
67
|
file_path: str
|
|
65
68
|
created_at: datetime
|
|
@@ -69,19 +72,25 @@ class RelationSummary(BaseModel):
|
|
|
69
72
|
"""Simplified relation representation."""
|
|
70
73
|
|
|
71
74
|
type: str = "relation"
|
|
75
|
+
title: str
|
|
76
|
+
file_path: str
|
|
72
77
|
permalink: str
|
|
73
78
|
relation_type: str
|
|
74
79
|
from_id: str
|
|
75
80
|
to_id: Optional[str] = None
|
|
81
|
+
created_at: datetime
|
|
76
82
|
|
|
77
83
|
|
|
78
84
|
class ObservationSummary(BaseModel):
|
|
79
85
|
"""Simplified observation representation."""
|
|
80
86
|
|
|
81
87
|
type: str = "observation"
|
|
88
|
+
title: str
|
|
89
|
+
file_path: str
|
|
82
90
|
permalink: str
|
|
83
91
|
category: str
|
|
84
92
|
content: str
|
|
93
|
+
created_at: datetime
|
|
85
94
|
|
|
86
95
|
|
|
87
96
|
class MemoryMetadata(BaseModel):
|
basic_memory/schemas/search.py
CHANGED
|
@@ -124,17 +124,19 @@ class EntityService(BaseService[EntityModel]):
|
|
|
124
124
|
entity_markdown = await self.entity_parser.parse_file(file_path)
|
|
125
125
|
|
|
126
126
|
# create entity
|
|
127
|
-
await self.create_entity_from_markdown(file_path, entity_markdown)
|
|
127
|
+
created = await self.create_entity_from_markdown(file_path, entity_markdown)
|
|
128
128
|
|
|
129
129
|
# add relations
|
|
130
|
-
entity = await self.update_entity_relations(file_path, entity_markdown)
|
|
130
|
+
entity = await self.update_entity_relations(created.file_path, entity_markdown)
|
|
131
131
|
|
|
132
132
|
# Set final checksum to mark complete
|
|
133
133
|
return await self.repository.update(entity.id, {"checksum": checksum})
|
|
134
134
|
|
|
135
135
|
async def update_entity(self, entity: EntityModel, schema: EntitySchema) -> EntityModel:
|
|
136
136
|
"""Update an entity's content and metadata."""
|
|
137
|
-
logger.debug(
|
|
137
|
+
logger.debug(
|
|
138
|
+
f"Updating entity with permalink: {entity.permalink} content-type: {schema.content_type}"
|
|
139
|
+
)
|
|
138
140
|
|
|
139
141
|
# Convert file path string to Path
|
|
140
142
|
file_path = Path(entity.file_path)
|
|
@@ -152,20 +154,25 @@ class EntityService(BaseService[EntityModel]):
|
|
|
152
154
|
entity = await self.update_entity_and_observations(file_path, entity_markdown)
|
|
153
155
|
|
|
154
156
|
# add relations
|
|
155
|
-
await self.update_entity_relations(file_path, entity_markdown)
|
|
157
|
+
await self.update_entity_relations(str(file_path), entity_markdown)
|
|
156
158
|
|
|
157
159
|
# Set final checksum to match file
|
|
158
160
|
entity = await self.repository.update(entity.id, {"checksum": checksum})
|
|
159
161
|
|
|
160
162
|
return entity
|
|
161
163
|
|
|
162
|
-
async def delete_entity(self,
|
|
164
|
+
async def delete_entity(self, permalink_or_id: str | int) -> bool:
|
|
163
165
|
"""Delete entity and its file."""
|
|
164
|
-
logger.debug(f"Deleting entity: {
|
|
166
|
+
logger.debug(f"Deleting entity: {permalink_or_id}")
|
|
165
167
|
|
|
166
168
|
try:
|
|
167
169
|
# Get entity first for file deletion
|
|
168
|
-
|
|
170
|
+
if isinstance(permalink_or_id, str):
|
|
171
|
+
entity = await self.get_by_permalink(permalink_or_id)
|
|
172
|
+
else:
|
|
173
|
+
entities = await self.get_entities_by_id([permalink_or_id])
|
|
174
|
+
assert len(entities) == 1, f"Expected 1 entity, got {len(entities)}"
|
|
175
|
+
entity = entities[0]
|
|
169
176
|
|
|
170
177
|
# Delete file first
|
|
171
178
|
await self.file_service.delete_entity_file(entity)
|
|
@@ -174,7 +181,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
174
181
|
return await self.repository.delete(entity.id)
|
|
175
182
|
|
|
176
183
|
except EntityNotFoundError:
|
|
177
|
-
logger.info(f"Entity not found: {
|
|
184
|
+
logger.info(f"Entity not found: {permalink_or_id}")
|
|
178
185
|
return True # Already deleted
|
|
179
186
|
|
|
180
187
|
async def get_by_permalink(self, permalink: str) -> EntityModel:
|
|
@@ -256,13 +263,13 @@ class EntityService(BaseService[EntityModel]):
|
|
|
256
263
|
|
|
257
264
|
async def update_entity_relations(
|
|
258
265
|
self,
|
|
259
|
-
|
|
266
|
+
path: str,
|
|
260
267
|
markdown: EntityMarkdown,
|
|
261
268
|
) -> EntityModel:
|
|
262
269
|
"""Update relations for entity"""
|
|
263
|
-
logger.debug(f"Updating relations for entity: {
|
|
270
|
+
logger.debug(f"Updating relations for entity: {path}")
|
|
264
271
|
|
|
265
|
-
db_entity = await self.repository.get_by_file_path(
|
|
272
|
+
db_entity = await self.repository.get_by_file_path(path)
|
|
266
273
|
|
|
267
274
|
# Clear existing relations first
|
|
268
275
|
await self.relation_repository.delete_outgoing_relations_from_entity(db_entity.id)
|
|
@@ -296,4 +303,4 @@ class EntityService(BaseService[EntityModel]):
|
|
|
296
303
|
)
|
|
297
304
|
continue
|
|
298
305
|
|
|
299
|
-
return await self.repository.get_by_file_path(
|
|
306
|
+
return await self.repository.get_by_file_path(path)
|
|
@@ -1,11 +1,14 @@
|
|
|
1
1
|
"""Service for file operations with checksum tracking."""
|
|
2
2
|
|
|
3
|
+
import mimetypes
|
|
4
|
+
from os import stat_result
|
|
3
5
|
from pathlib import Path
|
|
4
|
-
from typing import Tuple, Union
|
|
6
|
+
from typing import Tuple, Union, Dict, Any
|
|
5
7
|
|
|
6
8
|
from loguru import logger
|
|
7
9
|
|
|
8
10
|
from basic_memory import file_utils
|
|
11
|
+
from basic_memory.file_utils import FileError
|
|
9
12
|
from basic_memory.markdown.markdown_processor import MarkdownProcessor
|
|
10
13
|
from basic_memory.models import Entity as EntityModel
|
|
11
14
|
from basic_memory.schemas import Entity as EntitySchema
|
|
@@ -134,6 +137,7 @@ class FileService:
|
|
|
134
137
|
logger.error(f"Failed to write file {full_path}: {e}")
|
|
135
138
|
raise FileOperationError(f"Failed to write file: {e}")
|
|
136
139
|
|
|
140
|
+
# TODO remove read_file
|
|
137
141
|
async def read_file(self, path: Union[Path, str]) -> Tuple[str, str]:
|
|
138
142
|
"""Read file and compute checksum.
|
|
139
143
|
|
|
@@ -153,7 +157,7 @@ class FileService:
|
|
|
153
157
|
full_path = path if path.is_absolute() else self.base_path / path
|
|
154
158
|
|
|
155
159
|
try:
|
|
156
|
-
content =
|
|
160
|
+
content = full_path.read_text()
|
|
157
161
|
checksum = await file_utils.compute_checksum(content)
|
|
158
162
|
logger.debug(f"read file: {full_path}, checksum: {checksum}")
|
|
159
163
|
return content, checksum
|
|
@@ -174,3 +178,66 @@ class FileService:
|
|
|
174
178
|
path = Path(path)
|
|
175
179
|
full_path = path if path.is_absolute() else self.base_path / path
|
|
176
180
|
full_path.unlink(missing_ok=True)
|
|
181
|
+
|
|
182
|
+
async def update_frontmatter(self, path: Union[Path, str], updates: Dict[str, Any]) -> str:
|
|
183
|
+
"""
|
|
184
|
+
Update frontmatter fields in a file while preserving all content.
|
|
185
|
+
"""
|
|
186
|
+
|
|
187
|
+
path = Path(path)
|
|
188
|
+
full_path = path if path.is_absolute() else self.base_path / path
|
|
189
|
+
return await file_utils.update_frontmatter(full_path, updates)
|
|
190
|
+
|
|
191
|
+
async def compute_checksum(self, path: Union[str, Path]) -> str:
|
|
192
|
+
"""Compute checksum for a file."""
|
|
193
|
+
path = Path(path)
|
|
194
|
+
full_path = path if path.is_absolute() else self.base_path / path
|
|
195
|
+
try:
|
|
196
|
+
if self.is_markdown(path):
|
|
197
|
+
# read str
|
|
198
|
+
content = full_path.read_text()
|
|
199
|
+
else:
|
|
200
|
+
# read bytes
|
|
201
|
+
content = full_path.read_bytes()
|
|
202
|
+
return await file_utils.compute_checksum(content)
|
|
203
|
+
|
|
204
|
+
except Exception as e: # pragma: no cover
|
|
205
|
+
logger.error(f"Failed to compute checksum for {path}: {e}")
|
|
206
|
+
raise FileError(f"Failed to compute checksum for {path}: {e}")
|
|
207
|
+
|
|
208
|
+
def file_stats(self, path: Union[Path, str]) -> stat_result:
|
|
209
|
+
"""
|
|
210
|
+
Return file stats for a given path.
|
|
211
|
+
:param path:
|
|
212
|
+
:return:
|
|
213
|
+
"""
|
|
214
|
+
path = Path(path)
|
|
215
|
+
full_path = path if path.is_absolute() else self.base_path / path
|
|
216
|
+
# get file timestamps
|
|
217
|
+
return full_path.stat()
|
|
218
|
+
|
|
219
|
+
def content_type(self, path: Union[Path, str]) -> str:
|
|
220
|
+
"""
|
|
221
|
+
Return content_type for a given path.
|
|
222
|
+
:param path:
|
|
223
|
+
:return:
|
|
224
|
+
"""
|
|
225
|
+
path = Path(path)
|
|
226
|
+
full_path = path if path.is_absolute() else self.base_path / path
|
|
227
|
+
# get file timestamps
|
|
228
|
+
mime_type, _ = mimetypes.guess_type(full_path.name)
|
|
229
|
+
|
|
230
|
+
# .canvas files are json
|
|
231
|
+
if full_path.suffix == ".canvas":
|
|
232
|
+
mime_type = "application/json"
|
|
233
|
+
|
|
234
|
+
content_type = mime_type or "text/plain"
|
|
235
|
+
return content_type
|
|
236
|
+
|
|
237
|
+
def is_markdown(self, path: Union[Path, str]) -> bool:
|
|
238
|
+
"""
|
|
239
|
+
Return content_type for a given path.
|
|
240
|
+
:param path:
|
|
241
|
+
:return:
|
|
242
|
+
"""
|
|
243
|
+
return self.content_type(path) == "text/markdown"
|
|
@@ -4,11 +4,11 @@ from typing import Optional, Tuple, List
|
|
|
4
4
|
|
|
5
5
|
from loguru import logger
|
|
6
6
|
|
|
7
|
+
from basic_memory.models import Entity
|
|
7
8
|
from basic_memory.repository.entity_repository import EntityRepository
|
|
8
9
|
from basic_memory.repository.search_repository import SearchIndexRow
|
|
9
|
-
from basic_memory.services.search_service import SearchService
|
|
10
|
-
from basic_memory.models import Entity
|
|
11
10
|
from basic_memory.schemas.search import SearchQuery, SearchItemType
|
|
11
|
+
from basic_memory.services.search_service import SearchService
|
|
12
12
|
|
|
13
13
|
|
|
14
14
|
class LinkResolver:
|
|
@@ -58,7 +58,8 @@ class LinkResolver:
|
|
|
58
58
|
logger.debug(
|
|
59
59
|
f"Selected best match from {len(results)} results: {best_match.permalink}"
|
|
60
60
|
)
|
|
61
|
-
|
|
61
|
+
if best_match.permalink:
|
|
62
|
+
return await self.entity_repository.get_by_permalink(best_match.permalink)
|
|
62
63
|
|
|
63
64
|
# if we couldn't find anything then return None
|
|
64
65
|
return None
|
|
@@ -103,12 +104,14 @@ class LinkResolver:
|
|
|
103
104
|
scored_results = []
|
|
104
105
|
for result in results:
|
|
105
106
|
# Start with base score (lower is better)
|
|
106
|
-
score = result.score
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
107
|
+
score = result.score or 0
|
|
108
|
+
|
|
109
|
+
if result.permalink:
|
|
110
|
+
# Parse path components
|
|
111
|
+
path_parts = result.permalink.lower().split("/")
|
|
112
|
+
last_part = path_parts[-1] if path_parts else ""
|
|
113
|
+
else:
|
|
114
|
+
last_part = "" # pragma: no cover
|
|
112
115
|
|
|
113
116
|
# Title word match boosts
|
|
114
117
|
term_matches = [term for term in terms if term in last_part]
|
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
from datetime import datetime
|
|
4
4
|
from typing import List, Optional, Set
|
|
5
5
|
|
|
6
|
+
from dateparser import parse
|
|
6
7
|
from fastapi import BackgroundTasks
|
|
7
8
|
from loguru import logger
|
|
8
9
|
|
|
@@ -69,7 +70,7 @@ class SearchService:
|
|
|
69
70
|
(
|
|
70
71
|
query.after_date
|
|
71
72
|
if isinstance(query.after_date, datetime)
|
|
72
|
-
else
|
|
73
|
+
else parse(query.after_date)
|
|
73
74
|
)
|
|
74
75
|
if query.after_date
|
|
75
76
|
else None
|
|
@@ -118,6 +119,46 @@ class SearchService:
|
|
|
118
119
|
self,
|
|
119
120
|
entity: Entity,
|
|
120
121
|
background_tasks: Optional[BackgroundTasks] = None,
|
|
122
|
+
) -> None:
|
|
123
|
+
if background_tasks:
|
|
124
|
+
background_tasks.add_task(self.index_entity_data, entity)
|
|
125
|
+
else:
|
|
126
|
+
await self.index_entity_data(entity)
|
|
127
|
+
|
|
128
|
+
async def index_entity_data(
|
|
129
|
+
self,
|
|
130
|
+
entity: Entity,
|
|
131
|
+
) -> None:
|
|
132
|
+
# delete all search index data associated with entity
|
|
133
|
+
await self.repository.delete_by_entity_id(entity_id=entity.id)
|
|
134
|
+
|
|
135
|
+
# reindex
|
|
136
|
+
await self.index_entity_markdown(
|
|
137
|
+
entity
|
|
138
|
+
) if entity.is_markdown else await self.index_entity_file(entity)
|
|
139
|
+
|
|
140
|
+
async def index_entity_file(
|
|
141
|
+
self,
|
|
142
|
+
entity: Entity,
|
|
143
|
+
) -> None:
|
|
144
|
+
# Index entity file with no content
|
|
145
|
+
await self.repository.index_item(
|
|
146
|
+
SearchIndexRow(
|
|
147
|
+
id=entity.id,
|
|
148
|
+
type=SearchItemType.ENTITY.value,
|
|
149
|
+
title=entity.title,
|
|
150
|
+
file_path=entity.file_path,
|
|
151
|
+
metadata={
|
|
152
|
+
"entity_type": entity.entity_type,
|
|
153
|
+
},
|
|
154
|
+
created_at=entity.created_at,
|
|
155
|
+
updated_at=entity.updated_at,
|
|
156
|
+
)
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
async def index_entity_markdown(
|
|
160
|
+
self,
|
|
161
|
+
entity: Entity,
|
|
121
162
|
) -> None:
|
|
122
163
|
"""Index an entity and all its observations and relations.
|
|
123
164
|
|
|
@@ -136,16 +177,10 @@ class SearchService:
|
|
|
136
177
|
|
|
137
178
|
Each type gets its own row in the search index with appropriate metadata.
|
|
138
179
|
"""
|
|
139
|
-
if background_tasks:
|
|
140
|
-
background_tasks.add_task(self.index_entity_data, entity)
|
|
141
|
-
else:
|
|
142
|
-
await self.index_entity_data(entity)
|
|
143
180
|
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
) -> None:
|
|
148
|
-
"""Actually perform the indexing."""
|
|
181
|
+
assert entity.permalink is not None, (
|
|
182
|
+
"entity.permalink should not be None for markdown entities"
|
|
183
|
+
)
|
|
149
184
|
|
|
150
185
|
content_parts = []
|
|
151
186
|
title_variants = self._generate_variants(entity.title)
|
|
@@ -160,6 +195,9 @@ class SearchService:
|
|
|
160
195
|
|
|
161
196
|
entity_content = "\n".join(p for p in content_parts if p and p.strip())
|
|
162
197
|
|
|
198
|
+
assert entity.permalink is not None, (
|
|
199
|
+
"entity.permalink should not be None for markdown entities"
|
|
200
|
+
)
|
|
163
201
|
# Index entity
|
|
164
202
|
await self.repository.index_item(
|
|
165
203
|
SearchIndexRow(
|
|
@@ -169,6 +207,7 @@ class SearchService:
|
|
|
169
207
|
content=entity_content,
|
|
170
208
|
permalink=entity.permalink,
|
|
171
209
|
file_path=entity.file_path,
|
|
210
|
+
entity_id=entity.id,
|
|
172
211
|
metadata={
|
|
173
212
|
"entity_type": entity.entity_type,
|
|
174
213
|
},
|
|
@@ -214,6 +253,7 @@ class SearchService:
|
|
|
214
253
|
permalink=rel.permalink,
|
|
215
254
|
file_path=entity.file_path,
|
|
216
255
|
type=SearchItemType.RELATION.value,
|
|
256
|
+
entity_id=entity.id,
|
|
217
257
|
from_id=rel.from_id,
|
|
218
258
|
to_id=rel.to_id,
|
|
219
259
|
relation_type=rel.relation_type,
|
|
@@ -222,6 +262,10 @@ class SearchService:
|
|
|
222
262
|
)
|
|
223
263
|
)
|
|
224
264
|
|
|
225
|
-
async def delete_by_permalink(self,
|
|
265
|
+
async def delete_by_permalink(self, permalink: str):
|
|
266
|
+
"""Delete an item from the search index."""
|
|
267
|
+
await self.repository.delete_by_permalink(permalink)
|
|
268
|
+
|
|
269
|
+
async def delete_by_entity_id(self, entity_id: int):
|
|
226
270
|
"""Delete an item from the search index."""
|
|
227
|
-
await self.repository.
|
|
271
|
+
await self.repository.delete_by_entity_id(entity_id)
|
basic_memory/sync/__init__.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
|
-
|
|
1
|
+
"""Basic Memory sync services."""
|
|
2
|
+
|
|
2
3
|
from .sync_service import SyncService
|
|
3
4
|
from .watch_service import WatchService
|
|
4
5
|
|
|
5
|
-
__all__ = ["SyncService", "
|
|
6
|
+
__all__ = ["SyncService", "WatchService"]
|