basic-memory 0.14.3__py3-none-any.whl → 0.14.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of basic-memory might be problematic. Click here for more details.
- basic_memory/__init__.py +1 -1
- basic_memory/alembic/versions/a1b2c3d4e5f6_fix_project_foreign_keys.py +53 -0
- basic_memory/api/routers/resource_router.py +3 -3
- basic_memory/cli/commands/project.py +9 -10
- basic_memory/config.py +20 -8
- basic_memory/file_utils.py +65 -0
- basic_memory/importers/chatgpt_importer.py +1 -1
- basic_memory/importers/utils.py +2 -2
- basic_memory/markdown/entity_parser.py +2 -2
- basic_memory/markdown/markdown_processor.py +2 -2
- basic_memory/markdown/plugins.py +42 -26
- basic_memory/markdown/utils.py +1 -1
- basic_memory/mcp/tools/build_context.py +12 -2
- basic_memory/mcp/tools/project_management.py +22 -7
- basic_memory/mcp/tools/read_note.py +16 -13
- basic_memory/models/knowledge.py +13 -2
- basic_memory/models/project.py +2 -2
- basic_memory/repository/entity_repository.py +2 -2
- basic_memory/repository/project_repository.py +1 -1
- basic_memory/repository/search_repository.py +7 -3
- basic_memory/schemas/base.py +40 -10
- basic_memory/schemas/memory.py +23 -11
- basic_memory/services/context_service.py +12 -2
- basic_memory/services/directory_service.py +7 -0
- basic_memory/services/entity_service.py +8 -8
- basic_memory/services/project_service.py +11 -11
- basic_memory/sync/sync_service.py +3 -3
- basic_memory/sync/watch_service.py +31 -8
- basic_memory/utils.py +169 -107
- {basic_memory-0.14.3.dist-info → basic_memory-0.14.4.dist-info}/METADATA +20 -91
- {basic_memory-0.14.3.dist-info → basic_memory-0.14.4.dist-info}/RECORD +34 -33
- {basic_memory-0.14.3.dist-info → basic_memory-0.14.4.dist-info}/WHEEL +0 -0
- {basic_memory-0.14.3.dist-info → basic_memory-0.14.4.dist-info}/entry_points.txt +0 -0
- {basic_memory-0.14.3.dist-info → basic_memory-0.14.4.dist-info}/licenses/LICENSE +0 -0
basic_memory/models/knowledge.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"""Knowledge graph models."""
|
|
2
2
|
|
|
3
3
|
from datetime import datetime
|
|
4
|
+
from basic_memory.utils import ensure_timezone_aware
|
|
4
5
|
from typing import Optional
|
|
5
6
|
|
|
6
7
|
from sqlalchemy import (
|
|
@@ -73,8 +74,8 @@ class Entity(Base):
|
|
|
73
74
|
checksum: Mapped[Optional[str]] = mapped_column(String, nullable=True)
|
|
74
75
|
|
|
75
76
|
# Metadata and tracking
|
|
76
|
-
created_at: Mapped[datetime] = mapped_column(DateTime)
|
|
77
|
-
updated_at: Mapped[datetime] = mapped_column(DateTime)
|
|
77
|
+
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: datetime.now().astimezone())
|
|
78
|
+
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: datetime.now().astimezone(), onupdate=lambda: datetime.now().astimezone())
|
|
78
79
|
|
|
79
80
|
# Relationships
|
|
80
81
|
project = relationship("Project", back_populates="entities")
|
|
@@ -103,6 +104,16 @@ class Entity(Base):
|
|
|
103
104
|
def is_markdown(self):
|
|
104
105
|
"""Check if the entity is a markdown file."""
|
|
105
106
|
return self.content_type == "text/markdown"
|
|
107
|
+
|
|
108
|
+
def __getattribute__(self, name):
|
|
109
|
+
"""Override attribute access to ensure datetime fields are timezone-aware."""
|
|
110
|
+
value = super().__getattribute__(name)
|
|
111
|
+
|
|
112
|
+
# Ensure datetime fields are timezone-aware
|
|
113
|
+
if name in ('created_at', 'updated_at') and isinstance(value, datetime):
|
|
114
|
+
return ensure_timezone_aware(value)
|
|
115
|
+
|
|
116
|
+
return value
|
|
106
117
|
|
|
107
118
|
def __repr__(self) -> str:
|
|
108
119
|
return f"Entity(id={self.id}, name='{self.title}', type='{self.entity_type}'"
|
basic_memory/models/project.py
CHANGED
|
@@ -52,9 +52,9 @@ class Project(Base):
|
|
|
52
52
|
is_default: Mapped[Optional[bool]] = mapped_column(Boolean, default=None, nullable=True)
|
|
53
53
|
|
|
54
54
|
# Timestamps
|
|
55
|
-
created_at: Mapped[datetime] = mapped_column(DateTime, default=lambda: datetime.now(UTC))
|
|
55
|
+
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: datetime.now(UTC))
|
|
56
56
|
updated_at: Mapped[datetime] = mapped_column(
|
|
57
|
-
DateTime, default=lambda: datetime.now(UTC), onupdate=lambda: datetime.now(UTC)
|
|
57
|
+
DateTime(timezone=True), default=lambda: datetime.now(UTC), onupdate=lambda: datetime.now(UTC)
|
|
58
58
|
)
|
|
59
59
|
|
|
60
60
|
# Define relationships to entities, observations, and relations
|
|
@@ -57,7 +57,7 @@ class EntityRepository(Repository[Entity]):
|
|
|
57
57
|
"""
|
|
58
58
|
query = (
|
|
59
59
|
self.select()
|
|
60
|
-
.where(Entity.file_path ==
|
|
60
|
+
.where(Entity.file_path == Path(file_path).as_posix())
|
|
61
61
|
.options(*self.get_load_options())
|
|
62
62
|
)
|
|
63
63
|
return await self.find_one(query)
|
|
@@ -68,7 +68,7 @@ class EntityRepository(Repository[Entity]):
|
|
|
68
68
|
Args:
|
|
69
69
|
file_path: Path to the entity file (will be converted to string internally)
|
|
70
70
|
"""
|
|
71
|
-
return await self.delete_by_fields(file_path=
|
|
71
|
+
return await self.delete_by_fields(file_path=Path(file_path).as_posix())
|
|
72
72
|
|
|
73
73
|
def get_load_options(self) -> List[LoaderOption]:
|
|
74
74
|
"""Get SQLAlchemy loader options for eager loading relationships."""
|
|
@@ -46,7 +46,7 @@ class ProjectRepository(Repository[Project]):
|
|
|
46
46
|
Args:
|
|
47
47
|
path: Path to the project directory (will be converted to string internally)
|
|
48
48
|
"""
|
|
49
|
-
query = self.select().where(Project.path ==
|
|
49
|
+
query = self.select().where(Project.path == Path(path).as_posix())
|
|
50
50
|
return await self.find_one(query)
|
|
51
51
|
|
|
52
52
|
async def get_default_project(self) -> Optional[Project]:
|
|
@@ -6,6 +6,7 @@ import time
|
|
|
6
6
|
from dataclasses import dataclass
|
|
7
7
|
from datetime import datetime
|
|
8
8
|
from typing import Any, Dict, List, Optional
|
|
9
|
+
from pathlib import Path
|
|
9
10
|
|
|
10
11
|
from loguru import logger
|
|
11
12
|
from sqlalchemy import Executable, Result, text
|
|
@@ -59,8 +60,11 @@ class SearchIndexRow:
|
|
|
59
60
|
if not self.type == SearchItemType.ENTITY.value and not self.file_path:
|
|
60
61
|
return ""
|
|
61
62
|
|
|
63
|
+
# Normalize path separators to handle both Windows (\) and Unix (/) paths
|
|
64
|
+
normalized_path = Path(self.file_path).as_posix()
|
|
65
|
+
|
|
62
66
|
# Split the path by slashes
|
|
63
|
-
parts =
|
|
67
|
+
parts = normalized_path.split("/")
|
|
64
68
|
|
|
65
69
|
# If there's only one part (e.g., "README.md"), it's at the root
|
|
66
70
|
if len(parts) <= 1:
|
|
@@ -523,8 +527,8 @@ class SearchRepository:
|
|
|
523
527
|
async with db.scoped_session(self.session_maker) as session:
|
|
524
528
|
# Delete existing record if any
|
|
525
529
|
await session.execute(
|
|
526
|
-
text("DELETE FROM search_index WHERE permalink = :permalink"),
|
|
527
|
-
{"permalink": search_index_row.permalink},
|
|
530
|
+
text("DELETE FROM search_index WHERE permalink = :permalink AND project_id = :project_id"),
|
|
531
|
+
{"permalink": search_index_row.permalink, "project_id": self.project_id},
|
|
528
532
|
)
|
|
529
533
|
|
|
530
534
|
# Prepare data for insert with project_id
|
basic_memory/schemas/base.py
CHANGED
|
@@ -22,6 +22,8 @@ from dateparser import parse
|
|
|
22
22
|
|
|
23
23
|
from pydantic import BaseModel, BeforeValidator, Field, model_validator
|
|
24
24
|
|
|
25
|
+
from basic_memory.config import ConfigManager
|
|
26
|
+
from basic_memory.file_utils import sanitize_for_filename
|
|
25
27
|
from basic_memory.utils import generate_permalink
|
|
26
28
|
|
|
27
29
|
|
|
@@ -53,22 +55,28 @@ def parse_timeframe(timeframe: str) -> datetime:
|
|
|
53
55
|
timeframe: Natural language timeframe like 'today', '1d', '1 week ago', etc.
|
|
54
56
|
|
|
55
57
|
Returns:
|
|
56
|
-
datetime: The parsed datetime for the start of the timeframe
|
|
58
|
+
datetime: The parsed datetime for the start of the timeframe, timezone-aware in local system timezone
|
|
57
59
|
|
|
58
60
|
Examples:
|
|
59
|
-
parse_timeframe('today') -> 2025-06-05 00:00:00 (start of today)
|
|
60
|
-
parse_timeframe('1d') -> 2025-06-04 14:50:00 (24 hours ago)
|
|
61
|
-
parse_timeframe('1 week ago') -> 2025-05-29 14:50:00 (1 week ago)
|
|
61
|
+
parse_timeframe('today') -> 2025-06-05 00:00:00-07:00 (start of today with local timezone)
|
|
62
|
+
parse_timeframe('1d') -> 2025-06-04 14:50:00-07:00 (24 hours ago with local timezone)
|
|
63
|
+
parse_timeframe('1 week ago') -> 2025-05-29 14:50:00-07:00 (1 week ago with local timezone)
|
|
62
64
|
"""
|
|
63
65
|
if timeframe.lower() == "today":
|
|
64
|
-
# Return start of today (00:00:00)
|
|
65
|
-
|
|
66
|
+
# Return start of today (00:00:00) in local timezone
|
|
67
|
+
naive_dt = datetime.combine(datetime.now().date(), time.min)
|
|
68
|
+
return naive_dt.astimezone()
|
|
66
69
|
else:
|
|
67
70
|
# Use dateparser for other formats
|
|
68
71
|
parsed = parse(timeframe)
|
|
69
72
|
if not parsed:
|
|
70
73
|
raise ValueError(f"Could not parse timeframe: {timeframe}")
|
|
71
|
-
|
|
74
|
+
|
|
75
|
+
# If the parsed datetime is naive, make it timezone-aware in local system timezone
|
|
76
|
+
if parsed.tzinfo is None:
|
|
77
|
+
return parsed.astimezone()
|
|
78
|
+
else:
|
|
79
|
+
return parsed
|
|
72
80
|
|
|
73
81
|
|
|
74
82
|
def validate_timeframe(timeframe: str) -> str:
|
|
@@ -85,7 +93,7 @@ def validate_timeframe(timeframe: str) -> str:
|
|
|
85
93
|
parsed = parse_timeframe(timeframe)
|
|
86
94
|
|
|
87
95
|
# Convert to duration
|
|
88
|
-
now = datetime.now()
|
|
96
|
+
now = datetime.now().astimezone()
|
|
89
97
|
if parsed > now:
|
|
90
98
|
raise ValueError("Timeframe cannot be in the future")
|
|
91
99
|
|
|
@@ -184,13 +192,35 @@ class Entity(BaseModel):
|
|
|
184
192
|
default="text/markdown",
|
|
185
193
|
)
|
|
186
194
|
|
|
195
|
+
@property
|
|
196
|
+
def safe_title(self) -> str:
|
|
197
|
+
"""
|
|
198
|
+
A sanitized version of the title, which is safe for use on the filesystem. For example,
|
|
199
|
+
a title of "Coupon Enable/Disable Feature" should create a the file as "Coupon Enable-Disable Feature.md"
|
|
200
|
+
instead of creating a file named "Disable Feature.md" beneath the "Coupon Enable" directory.
|
|
201
|
+
|
|
202
|
+
Replaces POSIX and/or Windows style slashes as well as a few other characters that are not safe for filenames.
|
|
203
|
+
If kebab_filenames is True, then behavior is consistent with transformation used when generating permalink
|
|
204
|
+
strings (e.g. "Coupon Enable/Disable Feature" -> "coupon-enable-disable-feature").
|
|
205
|
+
"""
|
|
206
|
+
fixed_title = sanitize_for_filename(self.title)
|
|
207
|
+
|
|
208
|
+
app_config = ConfigManager().config
|
|
209
|
+
use_kebab_case = app_config.kebab_filenames
|
|
210
|
+
|
|
211
|
+
if use_kebab_case:
|
|
212
|
+
fixed_title = generate_permalink(file_path=fixed_title, split_extension=False)
|
|
213
|
+
|
|
214
|
+
return fixed_title
|
|
215
|
+
|
|
187
216
|
@property
|
|
188
217
|
def file_path(self):
|
|
189
218
|
"""Get the file path for this entity based on its permalink."""
|
|
219
|
+
safe_title = self.safe_title
|
|
190
220
|
if self.content_type == "text/markdown":
|
|
191
|
-
return f"{self.folder}/{
|
|
221
|
+
return f"{self.folder}/{safe_title}.md" if self.folder else f"{safe_title}.md"
|
|
192
222
|
else:
|
|
193
|
-
return f"{self.folder}/{
|
|
223
|
+
return f"{self.folder}/{safe_title}" if self.folder else safe_title
|
|
194
224
|
|
|
195
225
|
@property
|
|
196
226
|
def permalink(self) -> Permalink:
|
basic_memory/schemas/memory.py
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
"""Schemas for memory context."""
|
|
2
2
|
|
|
3
3
|
from datetime import datetime
|
|
4
|
-
from typing import List, Optional, Annotated, Sequence
|
|
4
|
+
from typing import List, Optional, Annotated, Sequence, Literal, Union
|
|
5
5
|
|
|
6
6
|
from annotated_types import MinLen, MaxLen
|
|
7
|
-
from pydantic import BaseModel, Field, BeforeValidator, TypeAdapter
|
|
7
|
+
from pydantic import BaseModel, Field, BeforeValidator, TypeAdapter, ConfigDict
|
|
8
8
|
|
|
9
9
|
from basic_memory.schemas.search import SearchItemType
|
|
10
10
|
|
|
@@ -117,8 +117,10 @@ def memory_url_path(url: memory_url) -> str: # pyright: ignore
|
|
|
117
117
|
|
|
118
118
|
class EntitySummary(BaseModel):
|
|
119
119
|
"""Simplified entity representation."""
|
|
120
|
+
|
|
121
|
+
model_config = ConfigDict(json_encoders={datetime: lambda dt: dt.isoformat()})
|
|
120
122
|
|
|
121
|
-
type:
|
|
123
|
+
type: Literal["entity"] = "entity"
|
|
122
124
|
permalink: Optional[str]
|
|
123
125
|
title: str
|
|
124
126
|
content: Optional[str] = None
|
|
@@ -128,8 +130,10 @@ class EntitySummary(BaseModel):
|
|
|
128
130
|
|
|
129
131
|
class RelationSummary(BaseModel):
|
|
130
132
|
"""Simplified relation representation."""
|
|
133
|
+
|
|
134
|
+
model_config = ConfigDict(json_encoders={datetime: lambda dt: dt.isoformat()})
|
|
131
135
|
|
|
132
|
-
type:
|
|
136
|
+
type: Literal["relation"] = "relation"
|
|
133
137
|
title: str
|
|
134
138
|
file_path: str
|
|
135
139
|
permalink: str
|
|
@@ -141,8 +145,10 @@ class RelationSummary(BaseModel):
|
|
|
141
145
|
|
|
142
146
|
class ObservationSummary(BaseModel):
|
|
143
147
|
"""Simplified observation representation."""
|
|
148
|
+
|
|
149
|
+
model_config = ConfigDict(json_encoders={datetime: lambda dt: dt.isoformat()})
|
|
144
150
|
|
|
145
|
-
type:
|
|
151
|
+
type: Literal["observation"] = "observation"
|
|
146
152
|
title: str
|
|
147
153
|
file_path: str
|
|
148
154
|
permalink: str
|
|
@@ -153,6 +159,8 @@ class ObservationSummary(BaseModel):
|
|
|
153
159
|
|
|
154
160
|
class MemoryMetadata(BaseModel):
|
|
155
161
|
"""Simplified response metadata."""
|
|
162
|
+
|
|
163
|
+
model_config = ConfigDict(json_encoders={datetime: lambda dt: dt.isoformat()})
|
|
156
164
|
|
|
157
165
|
uri: Optional[str] = None
|
|
158
166
|
types: Optional[List[SearchItemType]] = None
|
|
@@ -169,17 +177,21 @@ class MemoryMetadata(BaseModel):
|
|
|
169
177
|
class ContextResult(BaseModel):
|
|
170
178
|
"""Context result containing a primary item with its observations and related items."""
|
|
171
179
|
|
|
172
|
-
primary_result:
|
|
173
|
-
|
|
174
|
-
|
|
180
|
+
primary_result: Annotated[
|
|
181
|
+
Union[EntitySummary, RelationSummary, ObservationSummary],
|
|
182
|
+
Field(discriminator="type", description="Primary item")
|
|
183
|
+
]
|
|
175
184
|
|
|
176
185
|
observations: Sequence[ObservationSummary] = Field(
|
|
177
186
|
description="Observations belonging to this entity", default_factory=list
|
|
178
187
|
)
|
|
179
188
|
|
|
180
|
-
related_results: Sequence[
|
|
181
|
-
|
|
182
|
-
|
|
189
|
+
related_results: Sequence[
|
|
190
|
+
Annotated[
|
|
191
|
+
Union[EntitySummary, RelationSummary, ObservationSummary],
|
|
192
|
+
Field(discriminator="type")
|
|
193
|
+
]
|
|
194
|
+
] = Field(description="Related items", default_factory=list)
|
|
183
195
|
|
|
184
196
|
|
|
185
197
|
class GraphContext(BaseModel):
|
|
@@ -245,8 +245,8 @@ class ContextService:
|
|
|
245
245
|
# For compatibility with the old query, we still need this for filtering
|
|
246
246
|
values = ", ".join([f"('{t}', {i})" for t, i in type_id_pairs])
|
|
247
247
|
|
|
248
|
-
# Parameters for bindings
|
|
249
|
-
params = {"max_depth": max_depth, "max_results": max_results}
|
|
248
|
+
# Parameters for bindings - include project_id for security filtering
|
|
249
|
+
params = {"max_depth": max_depth, "max_results": max_results, "project_id": self.search_repository.project_id}
|
|
250
250
|
|
|
251
251
|
# Build date and timeframe filters conditionally based on since parameter
|
|
252
252
|
if since:
|
|
@@ -258,6 +258,10 @@ class ContextService:
|
|
|
258
258
|
date_filter = ""
|
|
259
259
|
relation_date_filter = ""
|
|
260
260
|
timeframe_condition = ""
|
|
261
|
+
|
|
262
|
+
# Add project filtering for security - ensure all entities and relations belong to the same project
|
|
263
|
+
project_filter = "AND e.project_id = :project_id"
|
|
264
|
+
relation_project_filter = "AND e_from.project_id = :project_id"
|
|
261
265
|
|
|
262
266
|
# Use a CTE that operates directly on entity and relation tables
|
|
263
267
|
# This avoids the overhead of the search_index virtual table
|
|
@@ -284,6 +288,7 @@ class ContextService:
|
|
|
284
288
|
FROM entity e
|
|
285
289
|
WHERE e.id IN ({entity_id_values})
|
|
286
290
|
{date_filter}
|
|
291
|
+
{project_filter}
|
|
287
292
|
|
|
288
293
|
UNION ALL
|
|
289
294
|
|
|
@@ -314,8 +319,12 @@ class ContextService:
|
|
|
314
319
|
JOIN entity e_from ON (
|
|
315
320
|
r.from_id = e_from.id
|
|
316
321
|
{relation_date_filter}
|
|
322
|
+
{relation_project_filter}
|
|
317
323
|
)
|
|
324
|
+
LEFT JOIN entity e_to ON (r.to_id = e_to.id)
|
|
318
325
|
WHERE eg.depth < :max_depth
|
|
326
|
+
-- Ensure to_entity (if exists) also belongs to same project
|
|
327
|
+
AND (r.to_id IS NULL OR e_to.project_id = :project_id)
|
|
319
328
|
|
|
320
329
|
UNION ALL
|
|
321
330
|
|
|
@@ -348,6 +357,7 @@ class ContextService:
|
|
|
348
357
|
ELSE eg.from_id
|
|
349
358
|
END
|
|
350
359
|
{date_filter}
|
|
360
|
+
{project_filter}
|
|
351
361
|
)
|
|
352
362
|
WHERE eg.depth < :max_depth
|
|
353
363
|
-- Only include entities connected by relations within timeframe if specified
|
|
@@ -106,8 +106,15 @@ class DirectoryService:
|
|
|
106
106
|
List of DirectoryNode objects matching the criteria
|
|
107
107
|
"""
|
|
108
108
|
# Normalize directory path
|
|
109
|
+
# Strip ./ prefix if present (handles relative path notation)
|
|
110
|
+
if dir_name.startswith("./"):
|
|
111
|
+
dir_name = dir_name[2:] # Remove "./" prefix
|
|
112
|
+
|
|
113
|
+
# Ensure path starts with "/"
|
|
109
114
|
if not dir_name.startswith("/"):
|
|
110
115
|
dir_name = f"/{dir_name}"
|
|
116
|
+
|
|
117
|
+
# Remove trailing slashes except for root
|
|
111
118
|
if dir_name != "/" and dir_name.endswith("/"):
|
|
112
119
|
dir_name = dir_name.rstrip("/")
|
|
113
120
|
|
|
@@ -9,7 +9,7 @@ from loguru import logger
|
|
|
9
9
|
from sqlalchemy.exc import IntegrityError
|
|
10
10
|
|
|
11
11
|
from basic_memory.config import ProjectConfig, BasicMemoryConfig
|
|
12
|
-
from basic_memory.file_utils import has_frontmatter, parse_frontmatter, remove_frontmatter
|
|
12
|
+
from basic_memory.file_utils import has_frontmatter, parse_frontmatter, remove_frontmatter, dump_frontmatter
|
|
13
13
|
from basic_memory.markdown import EntityMarkdown
|
|
14
14
|
from basic_memory.markdown.entity_parser import EntityParser
|
|
15
15
|
from basic_memory.markdown.utils import entity_model_from_markdown, schema_to_markdown
|
|
@@ -91,7 +91,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
91
91
|
|
|
92
92
|
Enhanced to detect and handle character-related conflicts.
|
|
93
93
|
"""
|
|
94
|
-
file_path_str =
|
|
94
|
+
file_path_str = Path(file_path).as_posix()
|
|
95
95
|
|
|
96
96
|
# Check for potential file path conflicts before resolving permalink
|
|
97
97
|
conflicts = await self.detect_file_path_conflicts(file_path_str)
|
|
@@ -119,7 +119,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
119
119
|
if markdown and markdown.frontmatter.permalink:
|
|
120
120
|
desired_permalink = markdown.frontmatter.permalink
|
|
121
121
|
else:
|
|
122
|
-
desired_permalink = generate_permalink(
|
|
122
|
+
desired_permalink = generate_permalink(file_path_str)
|
|
123
123
|
|
|
124
124
|
# Make unique if needed - enhanced to handle character conflicts
|
|
125
125
|
permalink = desired_permalink
|
|
@@ -196,7 +196,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
196
196
|
post = await schema_to_markdown(schema)
|
|
197
197
|
|
|
198
198
|
# write file
|
|
199
|
-
final_content =
|
|
199
|
+
final_content = dump_frontmatter(post)
|
|
200
200
|
checksum = await self.file_service.write_file(file_path, final_content)
|
|
201
201
|
|
|
202
202
|
# parse entity from file
|
|
@@ -273,7 +273,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
273
273
|
merged_post = frontmatter.Post(post.content, **existing_markdown.frontmatter.metadata)
|
|
274
274
|
|
|
275
275
|
# write file
|
|
276
|
-
final_content =
|
|
276
|
+
final_content = dump_frontmatter(merged_post)
|
|
277
277
|
checksum = await self.file_service.write_file(file_path, final_content)
|
|
278
278
|
|
|
279
279
|
# parse entity from file
|
|
@@ -283,7 +283,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
283
283
|
entity = await self.update_entity_and_observations(file_path, entity_markdown)
|
|
284
284
|
|
|
285
285
|
# add relations
|
|
286
|
-
await self.update_entity_relations(
|
|
286
|
+
await self.update_entity_relations(file_path.as_posix(), entity_markdown)
|
|
287
287
|
|
|
288
288
|
# Set final checksum to match file
|
|
289
289
|
entity = await self.repository.update(entity.id, {"checksum": checksum})
|
|
@@ -374,7 +374,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
374
374
|
"""
|
|
375
375
|
logger.debug(f"Updating entity and observations: {file_path}")
|
|
376
376
|
|
|
377
|
-
db_entity = await self.repository.get_by_file_path(
|
|
377
|
+
db_entity = await self.repository.get_by_file_path(file_path.as_posix())
|
|
378
378
|
|
|
379
379
|
# Clear observations for entity
|
|
380
380
|
await self.observation_repository.delete_by_fields(entity_id=db_entity.id)
|
|
@@ -498,7 +498,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
498
498
|
|
|
499
499
|
# Update entity and its relationships
|
|
500
500
|
entity = await self.update_entity_and_observations(file_path, entity_markdown)
|
|
501
|
-
await self.update_entity_relations(
|
|
501
|
+
await self.update_entity_relations(file_path.as_posix(), entity_markdown)
|
|
502
502
|
|
|
503
503
|
# Set final checksum to match file
|
|
504
504
|
entity = await self.repository.update(entity.id, {"checksum": checksum})
|
|
@@ -100,7 +100,7 @@ class ProjectService:
|
|
|
100
100
|
raise ValueError("Repository is required for add_project")
|
|
101
101
|
|
|
102
102
|
# Resolve to absolute path
|
|
103
|
-
resolved_path = os.path.abspath(os.path.expanduser(path))
|
|
103
|
+
resolved_path = Path(os.path.abspath(os.path.expanduser(path))).as_posix()
|
|
104
104
|
|
|
105
105
|
# First add to config file (this will validate the project doesn't exist)
|
|
106
106
|
project_config = self.config_manager.add_project(name, resolved_path)
|
|
@@ -139,8 +139,8 @@ class ProjectService:
|
|
|
139
139
|
# First remove from config (this will validate the project exists and is not default)
|
|
140
140
|
self.config_manager.remove_project(name)
|
|
141
141
|
|
|
142
|
-
# Then remove from database
|
|
143
|
-
project = await self.
|
|
142
|
+
# Then remove from database using robust lookup
|
|
143
|
+
project = await self.get_project(name)
|
|
144
144
|
if project:
|
|
145
145
|
await self.repository.delete(project.id)
|
|
146
146
|
|
|
@@ -161,8 +161,8 @@ class ProjectService:
|
|
|
161
161
|
# First update config file (this will validate the project exists)
|
|
162
162
|
self.config_manager.set_default_project(name)
|
|
163
163
|
|
|
164
|
-
# Then update database
|
|
165
|
-
project = await self.
|
|
164
|
+
# Then update database using the same lookup logic as get_project
|
|
165
|
+
project = await self.get_project(name)
|
|
166
166
|
if project:
|
|
167
167
|
await self.repository.set_as_default(project.id)
|
|
168
168
|
else:
|
|
@@ -323,7 +323,7 @@ class ProjectService:
|
|
|
323
323
|
raise ValueError("Repository is required for move_project")
|
|
324
324
|
|
|
325
325
|
# Resolve to absolute path
|
|
326
|
-
resolved_path = os.path.abspath(os.path.expanduser(new_path))
|
|
326
|
+
resolved_path = Path(os.path.abspath(os.path.expanduser(new_path))).as_posix()
|
|
327
327
|
|
|
328
328
|
# Validate project exists in config
|
|
329
329
|
if name not in self.config_manager.projects:
|
|
@@ -338,8 +338,8 @@ class ProjectService:
|
|
|
338
338
|
config.projects[name] = resolved_path
|
|
339
339
|
self.config_manager.save_config(config)
|
|
340
340
|
|
|
341
|
-
# Update in database
|
|
342
|
-
project = await self.
|
|
341
|
+
# Update in database using robust lookup
|
|
342
|
+
project = await self.get_project(name)
|
|
343
343
|
if project:
|
|
344
344
|
await self.repository.update_path(project.id, resolved_path)
|
|
345
345
|
logger.info(f"Moved project '{name}' from {old_path} to {resolved_path}")
|
|
@@ -370,15 +370,15 @@ class ProjectService:
|
|
|
370
370
|
if name not in self.config_manager.projects:
|
|
371
371
|
raise ValueError(f"Project '{name}' not found in configuration")
|
|
372
372
|
|
|
373
|
-
# Get project from database
|
|
374
|
-
project = await self.
|
|
373
|
+
# Get project from database using robust lookup
|
|
374
|
+
project = await self.get_project(name)
|
|
375
375
|
if not project:
|
|
376
376
|
logger.error(f"Project '{name}' exists in config but not in database")
|
|
377
377
|
return
|
|
378
378
|
|
|
379
379
|
# Update path if provided
|
|
380
380
|
if updated_path:
|
|
381
|
-
resolved_path = os.path.abspath(os.path.expanduser(updated_path))
|
|
381
|
+
resolved_path = Path(os.path.abspath(os.path.expanduser(updated_path))).as_posix()
|
|
382
382
|
|
|
383
383
|
# Update in config
|
|
384
384
|
config = self.config_manager.load_config()
|
|
@@ -357,8 +357,8 @@ class SyncService:
|
|
|
357
357
|
|
|
358
358
|
# get file timestamps
|
|
359
359
|
file_stats = self.file_service.file_stats(path)
|
|
360
|
-
created = datetime.fromtimestamp(file_stats.st_ctime)
|
|
361
|
-
modified = datetime.fromtimestamp(file_stats.st_mtime)
|
|
360
|
+
created = datetime.fromtimestamp(file_stats.st_ctime).astimezone()
|
|
361
|
+
modified = datetime.fromtimestamp(file_stats.st_mtime).astimezone()
|
|
362
362
|
|
|
363
363
|
# get mime type
|
|
364
364
|
content_type = self.file_service.content_type(path)
|
|
@@ -619,7 +619,7 @@ class SyncService:
|
|
|
619
619
|
continue
|
|
620
620
|
|
|
621
621
|
path = Path(root) / filename
|
|
622
|
-
rel_path =
|
|
622
|
+
rel_path = path.relative_to(directory).as_posix()
|
|
623
623
|
checksum = await self.file_service.compute_checksum(rel_path)
|
|
624
624
|
result.files[rel_path] = checksum
|
|
625
625
|
result.checksums[checksum] = rel_path
|
|
@@ -197,7 +197,7 @@ class WatchService:
|
|
|
197
197
|
|
|
198
198
|
for change, path in changes:
|
|
199
199
|
# convert to relative path
|
|
200
|
-
relative_path =
|
|
200
|
+
relative_path = Path(path).relative_to(directory).as_posix()
|
|
201
201
|
|
|
202
202
|
# Skip .tmp files - they're temporary and shouldn't be synced
|
|
203
203
|
if relative_path.endswith(".tmp"):
|
|
@@ -284,13 +284,36 @@ class WatchService:
|
|
|
284
284
|
# Process deletes
|
|
285
285
|
for path in deletes:
|
|
286
286
|
if path not in processed:
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
287
|
+
# Check if file still exists on disk (vim atomic write edge case)
|
|
288
|
+
full_path = directory / path
|
|
289
|
+
if full_path.exists() and full_path.is_file():
|
|
290
|
+
# File still exists despite DELETE event - treat as modification
|
|
291
|
+
logger.debug("File exists despite DELETE event, treating as modification", path=path)
|
|
292
|
+
entity, checksum = await sync_service.sync_file(path, new=False)
|
|
293
|
+
self.state.add_event(path=path, action="modified", status="success", checksum=checksum)
|
|
294
|
+
self.console.print(f"[yellow]✎[/yellow] {path} (atomic write)")
|
|
295
|
+
logger.info(f"atomic write detected: {path}")
|
|
296
|
+
processed.add(path)
|
|
297
|
+
modify_count += 1
|
|
298
|
+
else:
|
|
299
|
+
# Check if this was a directory - skip if so
|
|
300
|
+
# (we can't tell if the deleted path was a directory since it no longer exists,
|
|
301
|
+
# so we check if there's an entity in the database for it)
|
|
302
|
+
entity = await sync_service.entity_repository.get_by_file_path(path)
|
|
303
|
+
if entity is None:
|
|
304
|
+
# No entity means this was likely a directory - skip it
|
|
305
|
+
logger.debug(f"Skipping deleted path with no entity (likely directory), path={path}")
|
|
306
|
+
processed.add(path)
|
|
307
|
+
continue
|
|
308
|
+
|
|
309
|
+
# File truly deleted
|
|
310
|
+
logger.debug("Processing deleted file", path=path)
|
|
311
|
+
await sync_service.handle_delete(path)
|
|
312
|
+
self.state.add_event(path=path, action="deleted", status="success")
|
|
313
|
+
self.console.print(f"[red]✕[/red] {path}")
|
|
314
|
+
logger.info(f"deleted: {path}")
|
|
315
|
+
processed.add(path)
|
|
316
|
+
delete_count += 1
|
|
294
317
|
|
|
295
318
|
# Process adds
|
|
296
319
|
for path in adds:
|