basic-memory 0.14.3__py3-none-any.whl → 0.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (90) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/versions/a1b2c3d4e5f6_fix_project_foreign_keys.py +49 -0
  3. basic_memory/api/app.py +10 -4
  4. basic_memory/api/routers/knowledge_router.py +25 -8
  5. basic_memory/api/routers/project_router.py +99 -4
  6. basic_memory/api/routers/resource_router.py +3 -3
  7. basic_memory/cli/app.py +9 -28
  8. basic_memory/cli/auth.py +277 -0
  9. basic_memory/cli/commands/cloud/__init__.py +5 -0
  10. basic_memory/cli/commands/cloud/api_client.py +112 -0
  11. basic_memory/cli/commands/cloud/bisync_commands.py +818 -0
  12. basic_memory/cli/commands/cloud/core_commands.py +288 -0
  13. basic_memory/cli/commands/cloud/mount_commands.py +295 -0
  14. basic_memory/cli/commands/cloud/rclone_config.py +288 -0
  15. basic_memory/cli/commands/cloud/rclone_installer.py +198 -0
  16. basic_memory/cli/commands/command_utils.py +60 -0
  17. basic_memory/cli/commands/import_memory_json.py +0 -4
  18. basic_memory/cli/commands/mcp.py +16 -4
  19. basic_memory/cli/commands/project.py +141 -145
  20. basic_memory/cli/commands/status.py +34 -22
  21. basic_memory/cli/commands/sync.py +45 -228
  22. basic_memory/cli/commands/tool.py +87 -16
  23. basic_memory/cli/main.py +1 -0
  24. basic_memory/config.py +96 -20
  25. basic_memory/db.py +104 -3
  26. basic_memory/deps.py +20 -3
  27. basic_memory/file_utils.py +89 -0
  28. basic_memory/ignore_utils.py +295 -0
  29. basic_memory/importers/chatgpt_importer.py +1 -1
  30. basic_memory/importers/utils.py +2 -2
  31. basic_memory/markdown/entity_parser.py +2 -2
  32. basic_memory/markdown/markdown_processor.py +2 -2
  33. basic_memory/markdown/plugins.py +39 -21
  34. basic_memory/markdown/utils.py +1 -1
  35. basic_memory/mcp/async_client.py +22 -10
  36. basic_memory/mcp/project_context.py +141 -0
  37. basic_memory/mcp/prompts/ai_assistant_guide.py +49 -4
  38. basic_memory/mcp/prompts/continue_conversation.py +1 -1
  39. basic_memory/mcp/prompts/recent_activity.py +116 -32
  40. basic_memory/mcp/prompts/search.py +1 -1
  41. basic_memory/mcp/prompts/utils.py +11 -4
  42. basic_memory/mcp/resources/ai_assistant_guide.md +179 -41
  43. basic_memory/mcp/resources/project_info.py +20 -6
  44. basic_memory/mcp/server.py +0 -37
  45. basic_memory/mcp/tools/__init__.py +5 -6
  46. basic_memory/mcp/tools/build_context.py +39 -19
  47. basic_memory/mcp/tools/canvas.py +19 -8
  48. basic_memory/mcp/tools/chatgpt_tools.py +178 -0
  49. basic_memory/mcp/tools/delete_note.py +67 -34
  50. basic_memory/mcp/tools/edit_note.py +55 -39
  51. basic_memory/mcp/tools/headers.py +44 -0
  52. basic_memory/mcp/tools/list_directory.py +18 -8
  53. basic_memory/mcp/tools/move_note.py +119 -41
  54. basic_memory/mcp/tools/project_management.py +77 -229
  55. basic_memory/mcp/tools/read_content.py +28 -12
  56. basic_memory/mcp/tools/read_note.py +97 -57
  57. basic_memory/mcp/tools/recent_activity.py +441 -42
  58. basic_memory/mcp/tools/search.py +82 -70
  59. basic_memory/mcp/tools/sync_status.py +5 -4
  60. basic_memory/mcp/tools/utils.py +19 -0
  61. basic_memory/mcp/tools/view_note.py +31 -6
  62. basic_memory/mcp/tools/write_note.py +65 -14
  63. basic_memory/models/knowledge.py +19 -2
  64. basic_memory/models/project.py +6 -2
  65. basic_memory/repository/entity_repository.py +31 -84
  66. basic_memory/repository/project_repository.py +1 -1
  67. basic_memory/repository/relation_repository.py +13 -0
  68. basic_memory/repository/repository.py +2 -2
  69. basic_memory/repository/search_repository.py +9 -3
  70. basic_memory/schemas/__init__.py +6 -0
  71. basic_memory/schemas/base.py +70 -12
  72. basic_memory/schemas/cloud.py +46 -0
  73. basic_memory/schemas/memory.py +99 -18
  74. basic_memory/schemas/project_info.py +9 -10
  75. basic_memory/schemas/sync_report.py +48 -0
  76. basic_memory/services/context_service.py +35 -11
  77. basic_memory/services/directory_service.py +7 -0
  78. basic_memory/services/entity_service.py +82 -52
  79. basic_memory/services/initialization.py +30 -11
  80. basic_memory/services/project_service.py +23 -33
  81. basic_memory/sync/sync_service.py +148 -24
  82. basic_memory/sync/watch_service.py +128 -44
  83. basic_memory/utils.py +181 -109
  84. {basic_memory-0.14.3.dist-info → basic_memory-0.15.0.dist-info}/METADATA +26 -96
  85. basic_memory-0.15.0.dist-info/RECORD +147 -0
  86. basic_memory/mcp/project_session.py +0 -120
  87. basic_memory-0.14.3.dist-info/RECORD +0 -132
  88. {basic_memory-0.14.3.dist-info → basic_memory-0.15.0.dist-info}/WHEEL +0 -0
  89. {basic_memory-0.14.3.dist-info → basic_memory-0.15.0.dist-info}/entry_points.txt +0 -0
  90. {basic_memory-0.14.3.dist-info → basic_memory-0.15.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,6 +1,7 @@
1
1
  """Knowledge graph models."""
2
2
 
3
3
  from datetime import datetime
4
+ from basic_memory.utils import ensure_timezone_aware
4
5
  from typing import Optional
5
6
 
6
7
  from sqlalchemy import (
@@ -73,8 +74,14 @@ class Entity(Base):
73
74
  checksum: Mapped[Optional[str]] = mapped_column(String, nullable=True)
74
75
 
75
76
  # Metadata and tracking
76
- created_at: Mapped[datetime] = mapped_column(DateTime)
77
- updated_at: Mapped[datetime] = mapped_column(DateTime)
77
+ created_at: Mapped[datetime] = mapped_column(
78
+ DateTime(timezone=True), default=lambda: datetime.now().astimezone()
79
+ )
80
+ updated_at: Mapped[datetime] = mapped_column(
81
+ DateTime(timezone=True),
82
+ default=lambda: datetime.now().astimezone(),
83
+ onupdate=lambda: datetime.now().astimezone(),
84
+ )
78
85
 
79
86
  # Relationships
80
87
  project = relationship("Project", back_populates="entities")
@@ -104,6 +111,16 @@ class Entity(Base):
104
111
  """Check if the entity is a markdown file."""
105
112
  return self.content_type == "text/markdown"
106
113
 
114
+ def __getattribute__(self, name):
115
+ """Override attribute access to ensure datetime fields are timezone-aware."""
116
+ value = super().__getattribute__(name)
117
+
118
+ # Ensure datetime fields are timezone-aware
119
+ if name in ("created_at", "updated_at") and isinstance(value, datetime):
120
+ return ensure_timezone_aware(value)
121
+
122
+ return value
123
+
107
124
  def __repr__(self) -> str:
108
125
  return f"Entity(id={self.id}, name='{self.title}', type='{self.entity_type}'"
109
126
 
@@ -52,9 +52,13 @@ class Project(Base):
52
52
  is_default: Mapped[Optional[bool]] = mapped_column(Boolean, default=None, nullable=True)
53
53
 
54
54
  # Timestamps
55
- created_at: Mapped[datetime] = mapped_column(DateTime, default=lambda: datetime.now(UTC))
55
+ created_at: Mapped[datetime] = mapped_column(
56
+ DateTime(timezone=True), default=lambda: datetime.now(UTC)
57
+ )
56
58
  updated_at: Mapped[datetime] = mapped_column(
57
- DateTime, default=lambda: datetime.now(UTC), onupdate=lambda: datetime.now(UTC)
59
+ DateTime(timezone=True),
60
+ default=lambda: datetime.now(UTC),
61
+ onupdate=lambda: datetime.now(UTC),
58
62
  )
59
63
 
60
64
  # Define relationships to entities, observations, and relations
@@ -57,7 +57,7 @@ class EntityRepository(Repository[Entity]):
57
57
  """
58
58
  query = (
59
59
  self.select()
60
- .where(Entity.file_path == str(file_path))
60
+ .where(Entity.file_path == Path(file_path).as_posix())
61
61
  .options(*self.get_load_options())
62
62
  )
63
63
  return await self.find_one(query)
@@ -68,7 +68,7 @@ class EntityRepository(Repository[Entity]):
68
68
  Args:
69
69
  file_path: Path to the entity file (will be converted to string internally)
70
70
  """
71
- return await self.delete_by_fields(file_path=str(file_path))
71
+ return await self.delete_by_fields(file_path=Path(file_path).as_posix())
72
72
 
73
73
  def get_load_options(self) -> List[LoaderOption]:
74
74
  """Get SQLAlchemy loader options for eager loading relationships."""
@@ -101,11 +101,10 @@ class EntityRepository(Repository[Entity]):
101
101
  return list(result.scalars().all())
102
102
 
103
103
  async def upsert_entity(self, entity: Entity) -> Entity:
104
- """Insert or update entity using a hybrid approach.
104
+ """Insert or update entity using simple try/catch with database-level conflict resolution.
105
105
 
106
- This method provides a cleaner alternative to the try/catch approach
107
- for handling permalink and file_path conflicts. It first tries direct
108
- insertion, then handles conflicts intelligently.
106
+ Handles file_path race conditions by checking for existing entity on IntegrityError.
107
+ For permalink conflicts, generates a unique permalink with numeric suffix.
109
108
 
110
109
  Args:
111
110
  entity: The entity to insert or update
@@ -113,50 +112,12 @@ class EntityRepository(Repository[Entity]):
113
112
  Returns:
114
113
  The inserted or updated entity
115
114
  """
116
-
117
115
  async with db.scoped_session(self.session_maker) as session:
118
116
  # Set project_id if applicable and not already set
119
117
  self._set_project_id_if_needed(entity)
120
118
 
121
- # Check for existing entity with same file_path first
122
- existing_by_path = await session.execute(
123
- select(Entity).where(
124
- Entity.file_path == entity.file_path, Entity.project_id == entity.project_id
125
- )
126
- )
127
- existing_path_entity = existing_by_path.scalar_one_or_none()
128
-
129
- if existing_path_entity:
130
- # Update existing entity with same file path
131
- for key, value in {
132
- "title": entity.title,
133
- "entity_type": entity.entity_type,
134
- "entity_metadata": entity.entity_metadata,
135
- "content_type": entity.content_type,
136
- "permalink": entity.permalink,
137
- "checksum": entity.checksum,
138
- "updated_at": entity.updated_at,
139
- }.items():
140
- setattr(existing_path_entity, key, value)
141
-
142
- await session.flush()
143
- # Return with relationships loaded
144
- query = (
145
- self.select()
146
- .where(Entity.file_path == entity.file_path)
147
- .options(*self.get_load_options())
148
- )
149
- result = await session.execute(query)
150
- found = result.scalar_one_or_none()
151
- if not found: # pragma: no cover
152
- raise RuntimeError(
153
- f"Failed to retrieve entity after update: {entity.file_path}"
154
- )
155
- return found
156
-
157
- # No existing entity with same file_path, try insert
119
+ # Try simple insert first
158
120
  try:
159
- # Simple insert for new entity
160
121
  session.add(entity)
161
122
  await session.flush()
162
123
 
@@ -175,20 +136,20 @@ class EntityRepository(Repository[Entity]):
175
136
  return found
176
137
 
177
138
  except IntegrityError:
178
- # Could be either file_path or permalink conflict
179
139
  await session.rollback()
180
140
 
181
- # Check if it's a file_path conflict (race condition)
182
- existing_by_path_check = await session.execute(
183
- select(Entity).where(
141
+ # Re-query after rollback to get a fresh, attached entity
142
+ existing_result = await session.execute(
143
+ select(Entity)
144
+ .where(
184
145
  Entity.file_path == entity.file_path, Entity.project_id == entity.project_id
185
146
  )
147
+ .options(*self.get_load_options())
186
148
  )
187
- race_condition_entity = existing_by_path_check.scalar_one_or_none()
149
+ existing_entity = existing_result.scalar_one_or_none()
188
150
 
189
- if race_condition_entity:
190
- # Race condition: file_path conflict detected after our initial check
191
- # Update the existing entity instead
151
+ if existing_entity:
152
+ # File path conflict - update the existing entity
192
153
  for key, value in {
193
154
  "title": entity.title,
194
155
  "entity_type": entity.entity_type,
@@ -198,25 +159,22 @@ class EntityRepository(Repository[Entity]):
198
159
  "checksum": entity.checksum,
199
160
  "updated_at": entity.updated_at,
200
161
  }.items():
201
- setattr(race_condition_entity, key, value)
202
-
203
- await session.flush()
204
- # Return the updated entity with relationships loaded
205
- query = (
206
- self.select()
207
- .where(Entity.file_path == entity.file_path)
208
- .options(*self.get_load_options())
209
- )
210
- result = await session.execute(query)
211
- found = result.scalar_one_or_none()
212
- if not found: # pragma: no cover
213
- raise RuntimeError(
214
- f"Failed to retrieve entity after race condition update: {entity.file_path}"
215
- )
216
- return found
162
+ setattr(existing_entity, key, value)
163
+
164
+ # Clear and re-add observations
165
+ existing_entity.observations.clear()
166
+ for obs in entity.observations:
167
+ obs.entity_id = existing_entity.id
168
+ existing_entity.observations.append(obs)
169
+
170
+ await session.commit()
171
+ return existing_entity
172
+
217
173
  else:
218
- # Must be permalink conflict - generate unique permalink
219
- return await self._handle_permalink_conflict(entity, session)
174
+ # No file_path conflict - must be permalink conflict
175
+ # Generate unique permalink and retry
176
+ entity = await self._handle_permalink_conflict(entity, session)
177
+ return entity
220
178
 
221
179
  async def _handle_permalink_conflict(self, entity: Entity, session: AsyncSession) -> Entity:
222
180
  """Handle permalink conflicts by generating a unique permalink."""
@@ -237,18 +195,7 @@ class EntityRepository(Repository[Entity]):
237
195
  break
238
196
  suffix += 1
239
197
 
240
- # Insert with unique permalink (no conflict possible now)
198
+ # Insert with unique permalink
241
199
  session.add(entity)
242
200
  await session.flush()
243
-
244
- # Return the inserted entity with relationships loaded
245
- query = (
246
- self.select()
247
- .where(Entity.file_path == entity.file_path)
248
- .options(*self.get_load_options())
249
- )
250
- result = await session.execute(query)
251
- found = result.scalar_one_or_none()
252
- if not found: # pragma: no cover
253
- raise RuntimeError(f"Failed to retrieve entity after insert: {entity.file_path}")
254
- return found
201
+ return entity
@@ -46,7 +46,7 @@ class ProjectRepository(Repository[Project]):
46
46
  Args:
47
47
  path: Path to the project directory (will be converted to string internally)
48
48
  """
49
- query = self.select().where(Project.path == str(path))
49
+ query = self.select().where(Project.path == Path(path).as_posix())
50
50
  return await self.find_one(query)
51
51
 
52
52
  async def get_default_project(self) -> Optional[Project]:
@@ -73,5 +73,18 @@ class RelationRepository(Repository[Relation]):
73
73
  result = await self.execute_query(query)
74
74
  return result.scalars().all()
75
75
 
76
+ async def find_unresolved_relations_for_entity(self, entity_id: int) -> Sequence[Relation]:
77
+ """Find unresolved relations for a specific entity.
78
+
79
+ Args:
80
+ entity_id: The entity whose unresolved outgoing relations to find.
81
+
82
+ Returns:
83
+ List of unresolved relations where this entity is the source.
84
+ """
85
+ query = select(Relation).filter(Relation.from_id == entity_id, Relation.to_id.is_(None))
86
+ result = await self.execute_query(query)
87
+ return result.scalars().all()
88
+
76
89
  def get_load_options(self) -> List[LoaderOption]:
77
90
  return [selectinload(Relation.from_entity), selectinload(Relation.to_entity)]
@@ -10,13 +10,13 @@ from sqlalchemy import (
10
10
  Executable,
11
11
  inspect,
12
12
  Result,
13
- Column,
14
13
  and_,
15
14
  delete,
16
15
  )
17
16
  from sqlalchemy.exc import NoResultFound
18
17
  from sqlalchemy.ext.asyncio import async_sessionmaker, AsyncSession
19
18
  from sqlalchemy.orm.interfaces import LoaderOption
19
+ from sqlalchemy.sql.elements import ColumnElement
20
20
 
21
21
  from basic_memory import db
22
22
  from basic_memory.models import Base
@@ -38,7 +38,7 @@ class Repository[T: Base]:
38
38
  if Model:
39
39
  self.Model = Model
40
40
  self.mapper = inspect(self.Model).mapper
41
- self.primary_key: Column[Any] = self.mapper.primary_key[0]
41
+ self.primary_key: ColumnElement[Any] = self.mapper.primary_key[0]
42
42
  self.valid_columns = [column.key for column in self.mapper.columns]
43
43
  # Check if this model has a project_id column
44
44
  self.has_project_id = "project_id" in self.valid_columns
@@ -6,6 +6,7 @@ import time
6
6
  from dataclasses import dataclass
7
7
  from datetime import datetime
8
8
  from typing import Any, Dict, List, Optional
9
+ from pathlib import Path
9
10
 
10
11
  from loguru import logger
11
12
  from sqlalchemy import Executable, Result, text
@@ -59,8 +60,11 @@ class SearchIndexRow:
59
60
  if not self.type == SearchItemType.ENTITY.value and not self.file_path:
60
61
  return ""
61
62
 
63
+ # Normalize path separators to handle both Windows (\) and Unix (/) paths
64
+ normalized_path = Path(self.file_path).as_posix()
65
+
62
66
  # Split the path by slashes
63
- parts = self.file_path.split("/")
67
+ parts = normalized_path.split("/")
64
68
 
65
69
  # If there's only one part (e.g., "README.md"), it's at the root
66
70
  if len(parts) <= 1:
@@ -523,8 +527,10 @@ class SearchRepository:
523
527
  async with db.scoped_session(self.session_maker) as session:
524
528
  # Delete existing record if any
525
529
  await session.execute(
526
- text("DELETE FROM search_index WHERE permalink = :permalink"),
527
- {"permalink": search_index_row.permalink},
530
+ text(
531
+ "DELETE FROM search_index WHERE permalink = :permalink AND project_id = :project_id"
532
+ ),
533
+ {"permalink": search_index_row.permalink, "project_id": self.project_id},
528
534
  )
529
535
 
530
536
  # Prepare data for insert with project_id
@@ -48,6 +48,10 @@ from basic_memory.schemas.directory import (
48
48
  DirectoryNode,
49
49
  )
50
50
 
51
+ from basic_memory.schemas.sync_report import (
52
+ SyncReportResponse,
53
+ )
54
+
51
55
  # For convenient imports, export all models
52
56
  __all__ = [
53
57
  # Base
@@ -77,4 +81,6 @@ __all__ = [
77
81
  "ProjectInfoResponse",
78
82
  # Directory
79
83
  "DirectoryNode",
84
+ # Sync
85
+ "SyncReportResponse",
80
86
  ]
@@ -11,9 +11,10 @@ Key Concepts:
11
11
  4. Everything is stored in both SQLite and markdown files
12
12
  """
13
13
 
14
+ import os
14
15
  import mimetypes
15
16
  import re
16
- from datetime import datetime, time
17
+ from datetime import datetime, timedelta
17
18
  from pathlib import Path
18
19
  from typing import List, Optional, Annotated, Dict
19
20
 
@@ -22,6 +23,8 @@ from dateparser import parse
22
23
 
23
24
  from pydantic import BaseModel, BeforeValidator, Field, model_validator
24
25
 
26
+ from basic_memory.config import ConfigManager
27
+ from basic_memory.file_utils import sanitize_for_filename, sanitize_for_folder
25
28
  from basic_memory.utils import generate_permalink
26
29
 
27
30
 
@@ -49,26 +52,49 @@ def to_snake_case(name: str) -> str:
49
52
  def parse_timeframe(timeframe: str) -> datetime:
50
53
  """Parse timeframe with special handling for 'today' and other natural language expressions.
51
54
 
55
+ Enforces a minimum 1-day lookback to handle timezone differences in distributed deployments.
56
+
52
57
  Args:
53
58
  timeframe: Natural language timeframe like 'today', '1d', '1 week ago', etc.
54
59
 
55
60
  Returns:
56
- datetime: The parsed datetime for the start of the timeframe
61
+ datetime: The parsed datetime for the start of the timeframe, timezone-aware in local system timezone
62
+ Always returns at least 1 day ago to handle timezone differences.
57
63
 
58
64
  Examples:
59
- parse_timeframe('today') -> 2025-06-05 00:00:00 (start of today)
60
- parse_timeframe('1d') -> 2025-06-04 14:50:00 (24 hours ago)
61
- parse_timeframe('1 week ago') -> 2025-05-29 14:50:00 (1 week ago)
65
+ parse_timeframe('today') -> 2025-06-04 14:50:00-07:00 (1 day ago, not start of today)
66
+ parse_timeframe('1h') -> 2025-06-04 14:50:00-07:00 (1 day ago, not 1 hour ago)
67
+ parse_timeframe('1d') -> 2025-06-04 14:50:00-07:00 (24 hours ago with local timezone)
68
+ parse_timeframe('1 week ago') -> 2025-05-29 14:50:00-07:00 (1 week ago with local timezone)
62
69
  """
63
70
  if timeframe.lower() == "today":
64
- # Return start of today (00:00:00)
65
- return datetime.combine(datetime.now().date(), time.min)
71
+ # For "today", return 1 day ago to ensure we capture recent activity across timezones
72
+ # This handles the case where client and server are in different timezones
73
+ now = datetime.now()
74
+ one_day_ago = now - timedelta(days=1)
75
+ return one_day_ago.astimezone()
66
76
  else:
67
77
  # Use dateparser for other formats
68
78
  parsed = parse(timeframe)
69
79
  if not parsed:
70
80
  raise ValueError(f"Could not parse timeframe: {timeframe}")
71
- return parsed
81
+
82
+ # If the parsed datetime is naive, make it timezone-aware in local system timezone
83
+ if parsed.tzinfo is None:
84
+ parsed = parsed.astimezone()
85
+ else:
86
+ parsed = parsed
87
+
88
+ # Enforce minimum 1-day lookback to handle timezone differences
89
+ # This ensures we don't miss recent activity due to client/server timezone mismatches
90
+ now = datetime.now().astimezone()
91
+ one_day_ago = now - timedelta(days=1)
92
+
93
+ # If the parsed time is more recent than 1 day ago, use 1 day ago instead
94
+ if parsed > one_day_ago:
95
+ return one_day_ago
96
+ else:
97
+ return parsed
72
98
 
73
99
 
74
100
  def validate_timeframe(timeframe: str) -> str:
@@ -85,7 +111,7 @@ def validate_timeframe(timeframe: str) -> str:
85
111
  parsed = parse_timeframe(timeframe)
86
112
 
87
113
  # Convert to duration
88
- now = datetime.now()
114
+ now = datetime.now().astimezone()
89
115
  if parsed > now:
90
116
  raise ValueError("Timeframe cannot be in the future")
91
117
 
@@ -171,6 +197,7 @@ class Entity(BaseModel):
171
197
  """
172
198
 
173
199
  # private field to override permalink
200
+ # Use empty string "" as sentinel to indicate permalinks are explicitly disabled
174
201
  _permalink: Optional[str] = None
175
202
 
176
203
  title: str
@@ -184,17 +211,48 @@ class Entity(BaseModel):
184
211
  default="text/markdown",
185
212
  )
186
213
 
214
+ def __init__(self, **data):
215
+ data["folder"] = sanitize_for_folder(data.get("folder", ""))
216
+ super().__init__(**data)
217
+
218
+ @property
219
+ def safe_title(self) -> str:
220
+ """
221
+ A sanitized version of the title, which is safe for use on the filesystem. For example,
222
+ a title of "Coupon Enable/Disable Feature" should create a the file as "Coupon Enable-Disable Feature.md"
223
+ instead of creating a file named "Disable Feature.md" beneath the "Coupon Enable" directory.
224
+
225
+ Replaces POSIX and/or Windows style slashes as well as a few other characters that are not safe for filenames.
226
+ If kebab_filenames is True, then behavior is consistent with transformation used when generating permalink
227
+ strings (e.g. "Coupon Enable/Disable Feature" -> "coupon-enable-disable-feature").
228
+ """
229
+ fixed_title = sanitize_for_filename(self.title)
230
+
231
+ app_config = ConfigManager().config
232
+ use_kebab_case = app_config.kebab_filenames
233
+
234
+ if use_kebab_case:
235
+ fixed_title = generate_permalink(file_path=fixed_title, split_extension=False)
236
+
237
+ return fixed_title
238
+
187
239
  @property
188
240
  def file_path(self):
189
241
  """Get the file path for this entity based on its permalink."""
242
+ safe_title = self.safe_title
190
243
  if self.content_type == "text/markdown":
191
- return f"{self.folder}/{self.title}.md" if self.folder else f"{self.title}.md"
244
+ return (
245
+ os.path.join(self.folder, f"{safe_title}.md") if self.folder else f"{safe_title}.md"
246
+ )
192
247
  else:
193
- return f"{self.folder}/{self.title}" if self.folder else self.title
248
+ return os.path.join(self.folder, safe_title) if self.folder else safe_title
194
249
 
195
250
  @property
196
- def permalink(self) -> Permalink:
251
+ def permalink(self) -> Optional[Permalink]:
197
252
  """Get a url friendly path}."""
253
+ # Empty string is a sentinel value indicating permalinks are disabled
254
+ if self._permalink == "":
255
+ return None
198
256
  return self._permalink or generate_permalink(self.file_path)
199
257
 
200
258
  @model_validator(mode="after")
@@ -0,0 +1,46 @@
1
+ """Schemas for cloud-related API responses."""
2
+
3
+ from pydantic import BaseModel, Field
4
+
5
+
6
+ class TenantMountInfo(BaseModel):
7
+ """Response from /tenant/mount/info endpoint."""
8
+
9
+ tenant_id: str = Field(..., description="Unique identifier for the tenant")
10
+ bucket_name: str = Field(..., description="S3 bucket name for the tenant")
11
+
12
+
13
+ class MountCredentials(BaseModel):
14
+ """Response from /tenant/mount/credentials endpoint."""
15
+
16
+ access_key: str = Field(..., description="S3 access key for mount")
17
+ secret_key: str = Field(..., description="S3 secret key for mount")
18
+
19
+
20
+ class CloudProject(BaseModel):
21
+ """Representation of a cloud project."""
22
+
23
+ name: str = Field(..., description="Project name")
24
+ path: str = Field(..., description="Project path on cloud")
25
+
26
+
27
+ class CloudProjectList(BaseModel):
28
+ """Response from /proxy/projects/projects endpoint."""
29
+
30
+ projects: list[CloudProject] = Field(default_factory=list, description="List of cloud projects")
31
+
32
+
33
+ class CloudProjectCreateRequest(BaseModel):
34
+ """Request to create a new cloud project."""
35
+
36
+ name: str = Field(..., description="Project name")
37
+ path: str = Field(..., description="Project path (permalink)")
38
+ set_default: bool = Field(default=False, description="Set as default project")
39
+
40
+
41
+ class CloudProjectCreateResponse(BaseModel):
42
+ """Response from creating a cloud project."""
43
+
44
+ name: str = Field(..., description="Created project name")
45
+ path: str = Field(..., description="Created project path")
46
+ message: str = Field(default="", description="Success message")