basic-memory 0.11.0__py3-none-any.whl → 0.12.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of basic-memory might be problematic. Click here for more details.
- basic_memory/__init__.py +1 -1
- basic_memory/api/app.py +11 -3
- basic_memory/cli/app.py +12 -7
- basic_memory/cli/commands/mcp.py +18 -9
- basic_memory/cli/commands/sync.py +9 -8
- basic_memory/cli/commands/tool.py +28 -15
- basic_memory/cli/main.py +12 -44
- basic_memory/config.py +30 -6
- basic_memory/db.py +3 -1
- basic_memory/file_utils.py +3 -0
- basic_memory/markdown/entity_parser.py +16 -7
- basic_memory/markdown/utils.py +21 -13
- basic_memory/mcp/prompts/continue_conversation.py +4 -4
- basic_memory/mcp/prompts/search.py +2 -2
- basic_memory/mcp/server.py +29 -3
- basic_memory/mcp/tools/read_note.py +2 -3
- basic_memory/mcp/tools/search.py +64 -28
- basic_memory/mcp/tools/write_note.py +3 -1
- basic_memory/repository/repository.py +0 -4
- basic_memory/repository/search_repository.py +11 -11
- basic_memory/schemas/search.py +2 -2
- basic_memory/services/context_service.py +1 -1
- basic_memory/services/entity_service.py +10 -10
- basic_memory/services/file_service.py +1 -1
- basic_memory/services/initialization.py +143 -0
- basic_memory/services/link_resolver.py +8 -1
- basic_memory/services/search_service.py +3 -23
- basic_memory/sync/sync_service.py +120 -191
- basic_memory/sync/watch_service.py +49 -30
- basic_memory/utils.py +10 -2
- {basic_memory-0.11.0.dist-info → basic_memory-0.12.1.dist-info}/METADATA +42 -11
- {basic_memory-0.11.0.dist-info → basic_memory-0.12.1.dist-info}/RECORD +35 -34
- {basic_memory-0.11.0.dist-info → basic_memory-0.12.1.dist-info}/WHEEL +0 -0
- {basic_memory-0.11.0.dist-info → basic_memory-0.12.1.dist-info}/entry_points.txt +0 -0
- {basic_memory-0.11.0.dist-info → basic_memory-0.12.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -11,13 +11,14 @@ from typing import Dict, Optional, Set, Tuple
|
|
|
11
11
|
from loguru import logger
|
|
12
12
|
from sqlalchemy.exc import IntegrityError
|
|
13
13
|
|
|
14
|
+
from basic_memory.config import ProjectConfig
|
|
15
|
+
from basic_memory.file_utils import has_frontmatter
|
|
14
16
|
from basic_memory.markdown import EntityParser
|
|
15
17
|
from basic_memory.models import Entity
|
|
16
18
|
from basic_memory.repository import EntityRepository, RelationRepository
|
|
17
19
|
from basic_memory.services import EntityService, FileService
|
|
18
20
|
from basic_memory.services.search_service import SearchService
|
|
19
21
|
import time
|
|
20
|
-
from rich.progress import Progress, TextColumn, BarColumn, TaskProgressColumn
|
|
21
22
|
|
|
22
23
|
|
|
23
24
|
@dataclass
|
|
@@ -65,6 +66,7 @@ class SyncService:
|
|
|
65
66
|
|
|
66
67
|
def __init__(
|
|
67
68
|
self,
|
|
69
|
+
config: ProjectConfig,
|
|
68
70
|
entity_service: EntityService,
|
|
69
71
|
entity_parser: EntityParser,
|
|
70
72
|
entity_repository: EntityRepository,
|
|
@@ -72,6 +74,7 @@ class SyncService:
|
|
|
72
74
|
search_service: SearchService,
|
|
73
75
|
file_service: FileService,
|
|
74
76
|
):
|
|
77
|
+
self.config = config
|
|
75
78
|
self.entity_service = entity_service
|
|
76
79
|
self.entity_parser = entity_parser
|
|
77
80
|
self.entity_repository = entity_repository
|
|
@@ -79,145 +82,51 @@ class SyncService:
|
|
|
79
82
|
self.search_service = search_service
|
|
80
83
|
self.file_service = file_service
|
|
81
84
|
|
|
82
|
-
async def sync(self, directory: Path
|
|
85
|
+
async def sync(self, directory: Path) -> SyncReport:
|
|
83
86
|
"""Sync all files with database."""
|
|
84
87
|
|
|
85
88
|
start_time = time.time()
|
|
86
|
-
|
|
87
|
-
progress = None # Will be initialized if show_progress is True
|
|
88
|
-
|
|
89
|
-
logger.info("Sync operation started", directory=str(directory))
|
|
89
|
+
logger.info(f"Sync operation started for directory: {directory}")
|
|
90
90
|
|
|
91
91
|
# initial paths from db to sync
|
|
92
92
|
# path -> checksum
|
|
93
|
-
if show_progress:
|
|
94
|
-
from rich.console import Console
|
|
95
|
-
|
|
96
|
-
console = Console()
|
|
97
|
-
console.print(f"Scanning directory: {directory}")
|
|
98
|
-
|
|
99
93
|
report = await self.scan(directory)
|
|
100
94
|
|
|
101
95
|
# Initialize progress tracking if requested
|
|
102
|
-
if show_progress and report.total > 0:
|
|
103
|
-
progress = Progress(
|
|
104
|
-
TextColumn("[bold blue]{task.description}"),
|
|
105
|
-
BarColumn(),
|
|
106
|
-
TaskProgressColumn(),
|
|
107
|
-
console=console,
|
|
108
|
-
expand=True,
|
|
109
|
-
)
|
|
110
|
-
|
|
111
96
|
# order of sync matters to resolve relations effectively
|
|
112
97
|
logger.info(
|
|
113
|
-
"Sync changes detected"
|
|
114
|
-
|
|
115
|
-
modified_files=len(report.modified),
|
|
116
|
-
deleted_files=len(report.deleted),
|
|
117
|
-
moved_files=len(report.moves),
|
|
98
|
+
f"Sync changes detected: new_files={len(report.new)}, modified_files={len(report.modified)}, "
|
|
99
|
+
+ f"deleted_files={len(report.deleted)}, moved_files={len(report.moves)}"
|
|
118
100
|
)
|
|
119
101
|
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
if report.deleted: # pragma: no cover
|
|
129
|
-
delete_task = progress.add_task( # pyright: ignore
|
|
130
|
-
"[red]Deleting files...", total=len(report.deleted)
|
|
131
|
-
)
|
|
132
|
-
|
|
133
|
-
new_task = None
|
|
134
|
-
if report.new:
|
|
135
|
-
new_task = progress.add_task( # pyright: ignore
|
|
136
|
-
"[green]Adding new files...", total=len(report.new)
|
|
137
|
-
)
|
|
138
|
-
|
|
139
|
-
modify_task = None
|
|
140
|
-
if report.modified: # pragma: no cover
|
|
141
|
-
modify_task = progress.add_task( # pyright: ignore
|
|
142
|
-
"[yellow]Updating modified files...", total=len(report.modified)
|
|
102
|
+
# sync moves first
|
|
103
|
+
for old_path, new_path in report.moves.items():
|
|
104
|
+
# in the case where a file has been deleted and replaced by another file
|
|
105
|
+
# it will show up in the move and modified lists, so handle it in modified
|
|
106
|
+
if new_path in report.modified:
|
|
107
|
+
report.modified.remove(new_path)
|
|
108
|
+
logger.debug(
|
|
109
|
+
f"File marked as moved and modified: old_path={old_path}, new_path={new_path}"
|
|
143
110
|
)
|
|
111
|
+
else:
|
|
112
|
+
await self.handle_move(old_path, new_path)
|
|
144
113
|
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
# it will show up in the move and modified lists, so handle it in modified
|
|
149
|
-
if new_path in report.modified: # pragma: no cover
|
|
150
|
-
report.modified.remove(new_path)
|
|
151
|
-
logger.debug(
|
|
152
|
-
"File marked as moved and modified",
|
|
153
|
-
old_path=old_path,
|
|
154
|
-
new_path=new_path,
|
|
155
|
-
action="processing as modified",
|
|
156
|
-
)
|
|
157
|
-
else: # pragma: no cover
|
|
158
|
-
await self.handle_move(old_path, new_path)
|
|
159
|
-
|
|
160
|
-
if move_task is not None: # pragma: no cover
|
|
161
|
-
progress.update(move_task, advance=1) # pyright: ignore
|
|
162
|
-
|
|
163
|
-
# deleted next
|
|
164
|
-
for i, path in enumerate(report.deleted): # pragma: no cover
|
|
165
|
-
await self.handle_delete(path)
|
|
166
|
-
if delete_task is not None: # pragma: no cover
|
|
167
|
-
progress.update(delete_task, advance=1) # pyright: ignore
|
|
168
|
-
|
|
169
|
-
# then new and modified
|
|
170
|
-
for i, path in enumerate(report.new):
|
|
171
|
-
await self.sync_file(path, new=True)
|
|
172
|
-
if new_task is not None:
|
|
173
|
-
progress.update(new_task, advance=1) # pyright: ignore
|
|
174
|
-
|
|
175
|
-
for i, path in enumerate(report.modified): # pragma: no cover
|
|
176
|
-
await self.sync_file(path, new=False)
|
|
177
|
-
if modify_task is not None: # pragma: no cover
|
|
178
|
-
progress.update(modify_task, advance=1) # pyright: ignore
|
|
179
|
-
|
|
180
|
-
# Final step - resolving relations
|
|
181
|
-
if report.total > 0:
|
|
182
|
-
relation_task = progress.add_task("[cyan]Resolving relations...", total=1) # pyright: ignore
|
|
183
|
-
await self.resolve_relations()
|
|
184
|
-
progress.update(relation_task, advance=1) # pyright: ignore
|
|
185
|
-
else:
|
|
186
|
-
# No progress display - proceed with normal sync
|
|
187
|
-
# sync moves first
|
|
188
|
-
for old_path, new_path in report.moves.items():
|
|
189
|
-
# in the case where a file has been deleted and replaced by another file
|
|
190
|
-
# it will show up in the move and modified lists, so handle it in modified
|
|
191
|
-
if new_path in report.modified:
|
|
192
|
-
report.modified.remove(new_path)
|
|
193
|
-
logger.debug(
|
|
194
|
-
"File marked as moved and modified",
|
|
195
|
-
old_path=old_path,
|
|
196
|
-
new_path=new_path,
|
|
197
|
-
action="processing as modified",
|
|
198
|
-
)
|
|
199
|
-
else:
|
|
200
|
-
await self.handle_move(old_path, new_path)
|
|
201
|
-
|
|
202
|
-
# deleted next
|
|
203
|
-
for path in report.deleted:
|
|
204
|
-
await self.handle_delete(path)
|
|
114
|
+
# deleted next
|
|
115
|
+
for path in report.deleted:
|
|
116
|
+
await self.handle_delete(path)
|
|
205
117
|
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
118
|
+
# then new and modified
|
|
119
|
+
for path in report.new:
|
|
120
|
+
await self.sync_file(path, new=True)
|
|
209
121
|
|
|
210
|
-
|
|
211
|
-
|
|
122
|
+
for path in report.modified:
|
|
123
|
+
await self.sync_file(path, new=False)
|
|
212
124
|
|
|
213
|
-
|
|
125
|
+
await self.resolve_relations()
|
|
214
126
|
|
|
215
127
|
duration_ms = int((time.time() - start_time) * 1000)
|
|
216
128
|
logger.info(
|
|
217
|
-
"Sync operation completed"
|
|
218
|
-
directory=str(directory),
|
|
219
|
-
total_changes=report.total,
|
|
220
|
-
duration_ms=duration_ms,
|
|
129
|
+
f"Sync operation completed: directory={directory}, total_changes={report.total}, duration_ms={duration_ms}"
|
|
221
130
|
)
|
|
222
131
|
|
|
223
132
|
return report
|
|
@@ -226,6 +135,7 @@ class SyncService:
|
|
|
226
135
|
"""Scan directory for changes compared to database state."""
|
|
227
136
|
|
|
228
137
|
db_paths = await self.get_db_file_state()
|
|
138
|
+
logger.debug(f"Found {len(db_paths)} db paths")
|
|
229
139
|
|
|
230
140
|
# Track potentially moved files by checksum
|
|
231
141
|
scan_result = await self.scan_directory(directory)
|
|
@@ -276,6 +186,7 @@ class SyncService:
|
|
|
276
186
|
:param db_records: the data from the db
|
|
277
187
|
"""
|
|
278
188
|
db_records = await self.entity_repository.find_all()
|
|
189
|
+
logger.info(f"Found {len(db_records)} db records")
|
|
279
190
|
return {r.file_path: r.checksum or "" for r in db_records}
|
|
280
191
|
|
|
281
192
|
async def sync_file(
|
|
@@ -292,10 +203,7 @@ class SyncService:
|
|
|
292
203
|
"""
|
|
293
204
|
try:
|
|
294
205
|
logger.debug(
|
|
295
|
-
"Syncing file"
|
|
296
|
-
path=path,
|
|
297
|
-
is_new=new,
|
|
298
|
-
is_markdown=self.file_service.is_markdown(path),
|
|
206
|
+
f"Syncing file path={path} is_new={new} is_markdown={self.file_service.is_markdown(path)}"
|
|
299
207
|
)
|
|
300
208
|
|
|
301
209
|
if self.file_service.is_markdown(path):
|
|
@@ -307,7 +215,7 @@ class SyncService:
|
|
|
307
215
|
await self.search_service.index_entity(entity)
|
|
308
216
|
|
|
309
217
|
logger.debug(
|
|
310
|
-
"File sync completed
|
|
218
|
+
f"File sync completed, path={path}, entity_id={entity.id}, checksum={checksum[:8]}"
|
|
311
219
|
)
|
|
312
220
|
return entity, checksum
|
|
313
221
|
|
|
@@ -326,54 +234,59 @@ class SyncService:
|
|
|
326
234
|
Tuple of (entity, checksum)
|
|
327
235
|
"""
|
|
328
236
|
# Parse markdown first to get any existing permalink
|
|
329
|
-
logger.debug("Parsing markdown file
|
|
237
|
+
logger.debug(f"Parsing markdown file, path: {path}, new: {new}")
|
|
238
|
+
|
|
239
|
+
file_path = self.entity_parser.base_path / path
|
|
240
|
+
file_content = file_path.read_text()
|
|
241
|
+
file_contains_frontmatter = has_frontmatter(file_content)
|
|
242
|
+
|
|
243
|
+
# entity markdown will always contain front matter, so it can be used up create/update the entity
|
|
330
244
|
entity_markdown = await self.entity_parser.parse_file(path)
|
|
331
245
|
|
|
332
|
-
#
|
|
333
|
-
|
|
246
|
+
# if the file contains frontmatter, resolve a permalink
|
|
247
|
+
if file_contains_frontmatter:
|
|
248
|
+
# Resolve permalink - this handles all the cases including conflicts
|
|
249
|
+
permalink = await self.entity_service.resolve_permalink(path, markdown=entity_markdown)
|
|
334
250
|
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
old_permalink=entity_markdown.frontmatter.permalink,
|
|
341
|
-
new_permalink=permalink,
|
|
342
|
-
)
|
|
251
|
+
# If permalink changed, update the file
|
|
252
|
+
if permalink != entity_markdown.frontmatter.permalink:
|
|
253
|
+
logger.info(
|
|
254
|
+
f"Updating permalink for path: {path}, old_permalink: {entity_markdown.frontmatter.permalink}, new_permalink: {permalink}"
|
|
255
|
+
)
|
|
343
256
|
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
else:
|
|
347
|
-
checksum = await self.file_service.compute_checksum(path)
|
|
257
|
+
entity_markdown.frontmatter.metadata["permalink"] = permalink
|
|
258
|
+
await self.file_service.update_frontmatter(path, {"permalink": permalink})
|
|
348
259
|
|
|
349
260
|
# if the file is new, create an entity
|
|
350
261
|
if new:
|
|
351
262
|
# Create entity with final permalink
|
|
352
|
-
logger.debug("Creating new entity from markdown
|
|
353
|
-
|
|
263
|
+
logger.debug(f"Creating new entity from markdown, path={path}")
|
|
354
264
|
await self.entity_service.create_entity_from_markdown(Path(path), entity_markdown)
|
|
355
265
|
|
|
356
266
|
# otherwise we need to update the entity and observations
|
|
357
267
|
else:
|
|
358
|
-
logger.debug("Updating entity from markdown
|
|
359
|
-
|
|
268
|
+
logger.debug(f"Updating entity from markdown, path={path}")
|
|
360
269
|
await self.entity_service.update_entity_and_observations(Path(path), entity_markdown)
|
|
361
270
|
|
|
362
271
|
# Update relations and search index
|
|
363
272
|
entity = await self.entity_service.update_entity_relations(path, entity_markdown)
|
|
364
273
|
|
|
274
|
+
# After updating relations, we need to compute the checksum again
|
|
275
|
+
# This is necessary for files with wikilinks to ensure consistent checksums
|
|
276
|
+
# after relation processing is complete
|
|
277
|
+
final_checksum = await self.file_service.compute_checksum(path)
|
|
278
|
+
|
|
365
279
|
# set checksum
|
|
366
|
-
await self.entity_repository.update(entity.id, {"checksum":
|
|
280
|
+
await self.entity_repository.update(entity.id, {"checksum": final_checksum})
|
|
367
281
|
|
|
368
282
|
logger.debug(
|
|
369
|
-
"Markdown sync completed"
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
observation_count=len(entity.observations),
|
|
373
|
-
relation_count=len(entity.relations),
|
|
283
|
+
f"Markdown sync completed: path={path}, entity_id={entity.id}, "
|
|
284
|
+
f"observation_count={len(entity.observations)}, relation_count={len(entity.relations)}, "
|
|
285
|
+
f"checksum={final_checksum[:8]}"
|
|
374
286
|
)
|
|
375
287
|
|
|
376
|
-
|
|
288
|
+
# Return the final checksum to ensure everything is consistent
|
|
289
|
+
return entity, final_checksum
|
|
377
290
|
|
|
378
291
|
async def sync_regular_file(self, path: str, new: bool = True) -> Tuple[Optional[Entity], str]:
|
|
379
292
|
"""Sync a non-markdown file with basic tracking.
|
|
@@ -414,7 +327,7 @@ class SyncService:
|
|
|
414
327
|
else:
|
|
415
328
|
entity = await self.entity_repository.get_by_file_path(path)
|
|
416
329
|
if entity is None: # pragma: no cover
|
|
417
|
-
logger.error("Entity not found for existing file
|
|
330
|
+
logger.error(f"Entity not found for existing file, path={path}")
|
|
418
331
|
raise ValueError(f"Entity not found for existing file: {path}")
|
|
419
332
|
|
|
420
333
|
updated = await self.entity_repository.update(
|
|
@@ -422,7 +335,7 @@ class SyncService:
|
|
|
422
335
|
)
|
|
423
336
|
|
|
424
337
|
if updated is None: # pragma: no cover
|
|
425
|
-
logger.error("Failed to update entity
|
|
338
|
+
logger.error(f"Failed to update entity, entity_id={entity.id}, path={path}")
|
|
426
339
|
raise ValueError(f"Failed to update entity with ID {entity.id}")
|
|
427
340
|
|
|
428
341
|
return updated, checksum
|
|
@@ -434,10 +347,7 @@ class SyncService:
|
|
|
434
347
|
entity = await self.entity_repository.get_by_file_path(file_path)
|
|
435
348
|
if entity:
|
|
436
349
|
logger.info(
|
|
437
|
-
"Deleting entity"
|
|
438
|
-
file_path=file_path,
|
|
439
|
-
entity_id=entity.id,
|
|
440
|
-
permalink=entity.permalink,
|
|
350
|
+
f"Deleting entity with file_path={file_path}, entity_id={entity.id}, permalink={entity.permalink}"
|
|
441
351
|
)
|
|
442
352
|
|
|
443
353
|
# Delete from db (this cascades to observations/relations)
|
|
@@ -451,10 +361,8 @@ class SyncService:
|
|
|
451
361
|
)
|
|
452
362
|
|
|
453
363
|
logger.debug(
|
|
454
|
-
"Cleaning up search index"
|
|
455
|
-
|
|
456
|
-
file_path=file_path,
|
|
457
|
-
index_entries=len(permalinks),
|
|
364
|
+
f"Cleaning up search index for entity_id={entity.id}, file_path={file_path}, "
|
|
365
|
+
f"index_entries={len(permalinks)}"
|
|
458
366
|
)
|
|
459
367
|
|
|
460
368
|
for permalink in permalinks:
|
|
@@ -464,28 +372,49 @@ class SyncService:
|
|
|
464
372
|
await self.search_service.delete_by_entity_id(entity.id)
|
|
465
373
|
|
|
466
374
|
async def handle_move(self, old_path, new_path):
|
|
467
|
-
logger.
|
|
375
|
+
logger.debug("Moving entity", old_path=old_path, new_path=new_path)
|
|
468
376
|
|
|
469
377
|
entity = await self.entity_repository.get_by_file_path(old_path)
|
|
470
378
|
if entity:
|
|
471
|
-
# Update file_path
|
|
472
|
-
|
|
379
|
+
# Update file_path in all cases
|
|
380
|
+
updates = {"file_path": new_path}
|
|
381
|
+
|
|
382
|
+
# If configured, also update permalink to match new path
|
|
383
|
+
if self.config.update_permalinks_on_move:
|
|
384
|
+
# generate new permalink value
|
|
385
|
+
new_permalink = await self.entity_service.resolve_permalink(new_path)
|
|
386
|
+
|
|
387
|
+
# write to file and get new checksum
|
|
388
|
+
new_checksum = await self.file_service.update_frontmatter(
|
|
389
|
+
new_path, {"permalink": new_permalink}
|
|
390
|
+
)
|
|
391
|
+
|
|
392
|
+
updates["permalink"] = new_permalink
|
|
393
|
+
updates["checksum"] = new_checksum
|
|
394
|
+
|
|
395
|
+
logger.info(
|
|
396
|
+
f"Updating permalink on move,old_permalink={entity.permalink}"
|
|
397
|
+
f"new_permalink={new_permalink}"
|
|
398
|
+
f"new_checksum={new_checksum}"
|
|
399
|
+
)
|
|
400
|
+
|
|
401
|
+
updated = await self.entity_repository.update(entity.id, updates)
|
|
473
402
|
|
|
474
403
|
if updated is None: # pragma: no cover
|
|
475
404
|
logger.error(
|
|
476
|
-
"Failed to update entity path"
|
|
477
|
-
entity_id=entity.id
|
|
478
|
-
old_path=old_path
|
|
479
|
-
new_path=new_path
|
|
405
|
+
"Failed to update entity path"
|
|
406
|
+
f"entity_id={entity.id}"
|
|
407
|
+
f"old_path={old_path}"
|
|
408
|
+
f"new_path={new_path}"
|
|
480
409
|
)
|
|
481
410
|
raise ValueError(f"Failed to update entity path for ID {entity.id}")
|
|
482
411
|
|
|
483
412
|
logger.debug(
|
|
484
|
-
"Entity path updated"
|
|
485
|
-
entity_id=entity.id
|
|
486
|
-
permalink=entity.permalink
|
|
487
|
-
old_path=old_path
|
|
488
|
-
new_path=new_path
|
|
413
|
+
"Entity path updated"
|
|
414
|
+
f"entity_id={entity.id} "
|
|
415
|
+
f"permalink={entity.permalink} "
|
|
416
|
+
f"old_path={old_path} "
|
|
417
|
+
f"new_path={new_path} "
|
|
489
418
|
)
|
|
490
419
|
|
|
491
420
|
# update search index
|
|
@@ -500,10 +429,10 @@ class SyncService:
|
|
|
500
429
|
|
|
501
430
|
for relation in unresolved_relations:
|
|
502
431
|
logger.debug(
|
|
503
|
-
"Attempting to resolve relation"
|
|
504
|
-
relation_id=relation.id
|
|
505
|
-
from_id=relation.from_id
|
|
506
|
-
to_name=relation.to_name
|
|
432
|
+
"Attempting to resolve relation "
|
|
433
|
+
f"relation_id={relation.id} "
|
|
434
|
+
f"from_id={relation.from_id} "
|
|
435
|
+
f"to_name={relation.to_name}"
|
|
507
436
|
)
|
|
508
437
|
|
|
509
438
|
resolved_entity = await self.entity_service.link_resolver.resolve_link(relation.to_name)
|
|
@@ -511,12 +440,12 @@ class SyncService:
|
|
|
511
440
|
# ignore reference to self
|
|
512
441
|
if resolved_entity and resolved_entity.id != relation.from_id:
|
|
513
442
|
logger.debug(
|
|
514
|
-
"Resolved forward reference"
|
|
515
|
-
relation_id=relation.id
|
|
516
|
-
from_id=relation.from_id
|
|
517
|
-
to_name=relation.to_name
|
|
518
|
-
resolved_id=resolved_entity.id
|
|
519
|
-
resolved_title=resolved_entity.title,
|
|
443
|
+
"Resolved forward reference "
|
|
444
|
+
f"relation_id={relation.id} "
|
|
445
|
+
f"from_id={relation.from_id} "
|
|
446
|
+
f"to_name={relation.to_name} "
|
|
447
|
+
f"resolved_id={resolved_entity.id} "
|
|
448
|
+
f"resolved_title={resolved_entity.title}",
|
|
520
449
|
)
|
|
521
450
|
try:
|
|
522
451
|
await self.relation_repository.update(
|
|
@@ -528,10 +457,10 @@ class SyncService:
|
|
|
528
457
|
)
|
|
529
458
|
except IntegrityError: # pragma: no cover
|
|
530
459
|
logger.debug(
|
|
531
|
-
"Ignoring duplicate relation"
|
|
532
|
-
relation_id=relation.id
|
|
533
|
-
from_id=relation.from_id
|
|
534
|
-
to_name=relation.to_name
|
|
460
|
+
"Ignoring duplicate relation "
|
|
461
|
+
f"relation_id={relation.id} "
|
|
462
|
+
f"from_id={relation.from_id} "
|
|
463
|
+
f"to_name={relation.to_name}"
|
|
535
464
|
)
|
|
536
465
|
|
|
537
466
|
# update search index
|
|
@@ -549,7 +478,7 @@ class SyncService:
|
|
|
549
478
|
"""
|
|
550
479
|
start_time = time.time()
|
|
551
480
|
|
|
552
|
-
logger.debug("Scanning directory
|
|
481
|
+
logger.debug(f"Scanning directory {directory}")
|
|
553
482
|
result = ScanResult()
|
|
554
483
|
|
|
555
484
|
for root, dirnames, filenames in os.walk(str(directory)):
|
|
@@ -571,10 +500,10 @@ class SyncService:
|
|
|
571
500
|
|
|
572
501
|
duration_ms = int((time.time() - start_time) * 1000)
|
|
573
502
|
logger.debug(
|
|
574
|
-
"
|
|
575
|
-
directory=str(directory)
|
|
576
|
-
files_found=len(result.files)
|
|
577
|
-
duration_ms=duration_ms
|
|
503
|
+
f"{directory} scan completed "
|
|
504
|
+
f"directory={str(directory)} "
|
|
505
|
+
f"files_found={len(result.files)} "
|
|
506
|
+
f"duration_ms={duration_ms}"
|
|
578
507
|
)
|
|
579
508
|
|
|
580
509
|
return result
|