basic-memory 0.14.3__py3-none-any.whl → 0.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (90) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/versions/a1b2c3d4e5f6_fix_project_foreign_keys.py +49 -0
  3. basic_memory/api/app.py +10 -4
  4. basic_memory/api/routers/knowledge_router.py +25 -8
  5. basic_memory/api/routers/project_router.py +99 -4
  6. basic_memory/api/routers/resource_router.py +3 -3
  7. basic_memory/cli/app.py +9 -28
  8. basic_memory/cli/auth.py +277 -0
  9. basic_memory/cli/commands/cloud/__init__.py +5 -0
  10. basic_memory/cli/commands/cloud/api_client.py +112 -0
  11. basic_memory/cli/commands/cloud/bisync_commands.py +818 -0
  12. basic_memory/cli/commands/cloud/core_commands.py +288 -0
  13. basic_memory/cli/commands/cloud/mount_commands.py +295 -0
  14. basic_memory/cli/commands/cloud/rclone_config.py +288 -0
  15. basic_memory/cli/commands/cloud/rclone_installer.py +198 -0
  16. basic_memory/cli/commands/command_utils.py +60 -0
  17. basic_memory/cli/commands/import_memory_json.py +0 -4
  18. basic_memory/cli/commands/mcp.py +16 -4
  19. basic_memory/cli/commands/project.py +141 -145
  20. basic_memory/cli/commands/status.py +34 -22
  21. basic_memory/cli/commands/sync.py +45 -228
  22. basic_memory/cli/commands/tool.py +87 -16
  23. basic_memory/cli/main.py +1 -0
  24. basic_memory/config.py +96 -20
  25. basic_memory/db.py +104 -3
  26. basic_memory/deps.py +20 -3
  27. basic_memory/file_utils.py +89 -0
  28. basic_memory/ignore_utils.py +295 -0
  29. basic_memory/importers/chatgpt_importer.py +1 -1
  30. basic_memory/importers/utils.py +2 -2
  31. basic_memory/markdown/entity_parser.py +2 -2
  32. basic_memory/markdown/markdown_processor.py +2 -2
  33. basic_memory/markdown/plugins.py +39 -21
  34. basic_memory/markdown/utils.py +1 -1
  35. basic_memory/mcp/async_client.py +22 -10
  36. basic_memory/mcp/project_context.py +141 -0
  37. basic_memory/mcp/prompts/ai_assistant_guide.py +49 -4
  38. basic_memory/mcp/prompts/continue_conversation.py +1 -1
  39. basic_memory/mcp/prompts/recent_activity.py +116 -32
  40. basic_memory/mcp/prompts/search.py +1 -1
  41. basic_memory/mcp/prompts/utils.py +11 -4
  42. basic_memory/mcp/resources/ai_assistant_guide.md +179 -41
  43. basic_memory/mcp/resources/project_info.py +20 -6
  44. basic_memory/mcp/server.py +0 -37
  45. basic_memory/mcp/tools/__init__.py +5 -6
  46. basic_memory/mcp/tools/build_context.py +39 -19
  47. basic_memory/mcp/tools/canvas.py +19 -8
  48. basic_memory/mcp/tools/chatgpt_tools.py +178 -0
  49. basic_memory/mcp/tools/delete_note.py +67 -34
  50. basic_memory/mcp/tools/edit_note.py +55 -39
  51. basic_memory/mcp/tools/headers.py +44 -0
  52. basic_memory/mcp/tools/list_directory.py +18 -8
  53. basic_memory/mcp/tools/move_note.py +119 -41
  54. basic_memory/mcp/tools/project_management.py +77 -229
  55. basic_memory/mcp/tools/read_content.py +28 -12
  56. basic_memory/mcp/tools/read_note.py +97 -57
  57. basic_memory/mcp/tools/recent_activity.py +441 -42
  58. basic_memory/mcp/tools/search.py +82 -70
  59. basic_memory/mcp/tools/sync_status.py +5 -4
  60. basic_memory/mcp/tools/utils.py +19 -0
  61. basic_memory/mcp/tools/view_note.py +31 -6
  62. basic_memory/mcp/tools/write_note.py +65 -14
  63. basic_memory/models/knowledge.py +19 -2
  64. basic_memory/models/project.py +6 -2
  65. basic_memory/repository/entity_repository.py +31 -84
  66. basic_memory/repository/project_repository.py +1 -1
  67. basic_memory/repository/relation_repository.py +13 -0
  68. basic_memory/repository/repository.py +2 -2
  69. basic_memory/repository/search_repository.py +9 -3
  70. basic_memory/schemas/__init__.py +6 -0
  71. basic_memory/schemas/base.py +70 -12
  72. basic_memory/schemas/cloud.py +46 -0
  73. basic_memory/schemas/memory.py +99 -18
  74. basic_memory/schemas/project_info.py +9 -10
  75. basic_memory/schemas/sync_report.py +48 -0
  76. basic_memory/services/context_service.py +35 -11
  77. basic_memory/services/directory_service.py +7 -0
  78. basic_memory/services/entity_service.py +82 -52
  79. basic_memory/services/initialization.py +30 -11
  80. basic_memory/services/project_service.py +23 -33
  81. basic_memory/sync/sync_service.py +148 -24
  82. basic_memory/sync/watch_service.py +128 -44
  83. basic_memory/utils.py +181 -109
  84. {basic_memory-0.14.3.dist-info → basic_memory-0.15.0.dist-info}/METADATA +26 -96
  85. basic_memory-0.15.0.dist-info/RECORD +147 -0
  86. basic_memory/mcp/project_session.py +0 -120
  87. basic_memory-0.14.3.dist-info/RECORD +0 -132
  88. {basic_memory-0.14.3.dist-info → basic_memory-0.15.0.dist-info}/WHEEL +0 -0
  89. {basic_memory-0.14.3.dist-info → basic_memory-0.15.0.dist-info}/entry_points.txt +0 -0
  90. {basic_memory-0.14.3.dist-info → basic_memory-0.15.0.dist-info}/licenses/LICENSE +0 -0
basic_memory/db.py CHANGED
@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ import os
2
3
  from contextlib import asynccontextmanager
3
4
  from enum import Enum, auto
4
5
  from pathlib import Path
@@ -9,7 +10,7 @@ from alembic import command
9
10
  from alembic.config import Config
10
11
 
11
12
  from loguru import logger
12
- from sqlalchemy import text
13
+ from sqlalchemy import text, event
13
14
  from sqlalchemy.ext.asyncio import (
14
15
  create_async_engine,
15
16
  async_sessionmaker,
@@ -17,6 +18,7 @@ from sqlalchemy.ext.asyncio import (
17
18
  AsyncEngine,
18
19
  async_scoped_session,
19
20
  )
21
+ from sqlalchemy.pool import NullPool
20
22
 
21
23
  from basic_memory.repository.search_repository import SearchRepository
22
24
 
@@ -73,13 +75,77 @@ async def scoped_session(
73
75
  await factory.remove()
74
76
 
75
77
 
78
+ def _configure_sqlite_connection(dbapi_conn, enable_wal: bool = True) -> None:
79
+ """Configure SQLite connection with WAL mode and optimizations.
80
+
81
+ Args:
82
+ dbapi_conn: Database API connection object
83
+ enable_wal: Whether to enable WAL mode (should be False for in-memory databases)
84
+ """
85
+ cursor = dbapi_conn.cursor()
86
+ try:
87
+ # Enable WAL mode for better concurrency (not supported for in-memory databases)
88
+ if enable_wal:
89
+ cursor.execute("PRAGMA journal_mode=WAL")
90
+ # Set busy timeout to handle locked databases
91
+ cursor.execute("PRAGMA busy_timeout=10000") # 10 seconds
92
+ # Optimize for performance
93
+ cursor.execute("PRAGMA synchronous=NORMAL")
94
+ cursor.execute("PRAGMA cache_size=-64000") # 64MB cache
95
+ cursor.execute("PRAGMA temp_store=MEMORY")
96
+ # Windows-specific optimizations
97
+ if os.name == "nt":
98
+ cursor.execute("PRAGMA locking_mode=NORMAL") # Ensure normal locking on Windows
99
+ except Exception as e:
100
+ # Log but don't fail - some PRAGMAs may not be supported
101
+ logger.warning(f"Failed to configure SQLite connection: {e}")
102
+ finally:
103
+ cursor.close()
104
+
105
+
76
106
  def _create_engine_and_session(
77
107
  db_path: Path, db_type: DatabaseType = DatabaseType.FILESYSTEM
78
108
  ) -> tuple[AsyncEngine, async_sessionmaker[AsyncSession]]:
79
109
  """Internal helper to create engine and session maker."""
80
110
  db_url = DatabaseType.get_db_url(db_path, db_type)
81
111
  logger.debug(f"Creating engine for db_url: {db_url}")
82
- engine = create_async_engine(db_url, connect_args={"check_same_thread": False})
112
+
113
+ # Configure connection args with Windows-specific settings
114
+ connect_args: dict[str, bool | float | None] = {"check_same_thread": False}
115
+
116
+ # Add Windows-specific parameters to improve reliability
117
+ if os.name == "nt": # Windows
118
+ connect_args.update(
119
+ {
120
+ "timeout": 30.0, # Increase timeout to 30 seconds for Windows
121
+ "isolation_level": None, # Use autocommit mode
122
+ }
123
+ )
124
+ # Use NullPool for Windows filesystem databases to avoid connection pooling issues
125
+ # Important: Do NOT use NullPool for in-memory databases as it will destroy the database
126
+ # between connections
127
+ if db_type == DatabaseType.FILESYSTEM:
128
+ engine = create_async_engine(
129
+ db_url,
130
+ connect_args=connect_args,
131
+ poolclass=NullPool, # Disable connection pooling on Windows
132
+ echo=False,
133
+ )
134
+ else:
135
+ # In-memory databases need connection pooling to maintain state
136
+ engine = create_async_engine(db_url, connect_args=connect_args)
137
+ else:
138
+ engine = create_async_engine(db_url, connect_args=connect_args)
139
+
140
+ # Enable WAL mode for better concurrency and reliability
141
+ # Note: WAL mode is not supported for in-memory databases
142
+ enable_wal = db_type != DatabaseType.MEMORY
143
+
144
+ @event.listens_for(engine.sync_engine, "connect")
145
+ def enable_wal_mode(dbapi_conn, connection_record):
146
+ """Enable WAL mode on each connection."""
147
+ _configure_sqlite_connection(dbapi_conn, enable_wal=enable_wal)
148
+
83
149
  session_maker = async_sessionmaker(engine, expire_on_commit=False)
84
150
  return engine, session_maker
85
151
 
@@ -140,7 +206,42 @@ async def engine_session_factory(
140
206
  db_url = DatabaseType.get_db_url(db_path, db_type)
141
207
  logger.debug(f"Creating engine for db_url: {db_url}")
142
208
 
143
- _engine = create_async_engine(db_url, connect_args={"check_same_thread": False})
209
+ # Configure connection args with Windows-specific settings
210
+ connect_args: dict[str, bool | float | None] = {"check_same_thread": False}
211
+
212
+ # Add Windows-specific parameters to improve reliability
213
+ if os.name == "nt": # Windows
214
+ connect_args.update(
215
+ {
216
+ "timeout": 30.0, # Increase timeout to 30 seconds for Windows
217
+ "isolation_level": None, # Use autocommit mode
218
+ }
219
+ )
220
+ # Use NullPool for Windows filesystem databases to avoid connection pooling issues
221
+ # Important: Do NOT use NullPool for in-memory databases as it will destroy the database
222
+ # between connections
223
+ if db_type == DatabaseType.FILESYSTEM:
224
+ _engine = create_async_engine(
225
+ db_url,
226
+ connect_args=connect_args,
227
+ poolclass=NullPool, # Disable connection pooling on Windows
228
+ echo=False,
229
+ )
230
+ else:
231
+ # In-memory databases need connection pooling to maintain state
232
+ _engine = create_async_engine(db_url, connect_args=connect_args)
233
+ else:
234
+ _engine = create_async_engine(db_url, connect_args=connect_args)
235
+
236
+ # Enable WAL mode for better concurrency and reliability
237
+ # Note: WAL mode is not supported for in-memory databases
238
+ enable_wal = db_type != DatabaseType.MEMORY
239
+
240
+ @event.listens_for(_engine.sync_engine, "connect")
241
+ def enable_wal_mode(dbapi_conn, connection_record):
242
+ """Enable WAL mode on each connection."""
243
+ _configure_sqlite_connection(dbapi_conn, enable_wal=enable_wal)
244
+
144
245
  try:
145
246
  _session_maker = async_sessionmaker(_engine, expire_on_commit=False)
146
247
 
basic_memory/deps.py CHANGED
@@ -3,7 +3,7 @@
3
3
  from typing import Annotated
4
4
  from loguru import logger
5
5
 
6
- from fastapi import Depends, HTTPException, Path, status
6
+ from fastapi import Depends, HTTPException, Path, status, Request
7
7
  from sqlalchemy.ext.asyncio import (
8
8
  AsyncSession,
9
9
  AsyncEngine,
@@ -78,9 +78,24 @@ ProjectConfigDep = Annotated[ProjectConfig, Depends(get_project_config)] # prag
78
78
 
79
79
 
80
80
  async def get_engine_factory(
81
- app_config: AppConfigDep,
81
+ request: Request,
82
82
  ) -> tuple[AsyncEngine, async_sessionmaker[AsyncSession]]: # pragma: no cover
83
- """Get engine and session maker."""
83
+ """Get cached engine and session maker from app state.
84
+
85
+ For API requests, returns cached connections from app.state for optimal performance.
86
+ For non-API contexts (CLI), falls back to direct database connection.
87
+ """
88
+ # Try to get cached connections from app state (API context)
89
+ if (
90
+ hasattr(request, "app")
91
+ and hasattr(request.app.state, "engine")
92
+ and hasattr(request.app.state, "session_maker")
93
+ ):
94
+ return request.app.state.engine, request.app.state.session_maker
95
+
96
+ # Fallback for non-API contexts (CLI)
97
+ logger.debug("Using fallback database connection for non-API context")
98
+ app_config = get_app_config()
84
99
  engine, session_maker = await db.get_or_create_db(app_config.database_path)
85
100
  return engine, session_maker
86
101
 
@@ -245,6 +260,7 @@ async def get_entity_service(
245
260
  entity_parser: EntityParserDep,
246
261
  file_service: FileServiceDep,
247
262
  link_resolver: "LinkResolverDep",
263
+ app_config: AppConfigDep,
248
264
  ) -> EntityService:
249
265
  """Create EntityService with repository."""
250
266
  return EntityService(
@@ -254,6 +270,7 @@ async def get_entity_service(
254
270
  entity_parser=entity_parser,
255
271
  file_service=file_service,
256
272
  link_resolver=link_resolver,
273
+ app_config=app_config,
257
274
  )
258
275
 
259
276
 
@@ -2,9 +2,11 @@
2
2
 
3
3
  import hashlib
4
4
  from pathlib import Path
5
+ import re
5
6
  from typing import Any, Dict, Union
6
7
 
7
8
  import yaml
9
+ import frontmatter
8
10
  from loguru import logger
9
11
 
10
12
  from basic_memory.utils import FilePath
@@ -233,3 +235,90 @@ async def update_frontmatter(path: FilePath, updates: Dict[str, Any]) -> str:
233
235
  error=str(e),
234
236
  )
235
237
  raise FileError(f"Failed to update frontmatter: {e}")
238
+
239
+
240
+ def dump_frontmatter(post: frontmatter.Post) -> str:
241
+ """
242
+ Serialize frontmatter.Post to markdown with Obsidian-compatible YAML format.
243
+
244
+ This function ensures that tags are formatted as YAML lists instead of JSON arrays:
245
+
246
+ Good (Obsidian compatible):
247
+ ---
248
+ tags:
249
+ - system
250
+ - overview
251
+ - reference
252
+ ---
253
+
254
+ Bad (current behavior):
255
+ ---
256
+ tags: ["system", "overview", "reference"]
257
+ ---
258
+
259
+ Args:
260
+ post: frontmatter.Post object to serialize
261
+
262
+ Returns:
263
+ String containing markdown with properly formatted YAML frontmatter
264
+ """
265
+ if not post.metadata:
266
+ # No frontmatter, just return content
267
+ return post.content
268
+
269
+ # Serialize YAML with block style for lists
270
+ yaml_str = yaml.dump(
271
+ post.metadata, sort_keys=False, allow_unicode=True, default_flow_style=False
272
+ )
273
+
274
+ # Construct the final markdown with frontmatter
275
+ if post.content:
276
+ return f"---\n{yaml_str}---\n\n{post.content}"
277
+ else:
278
+ return f"---\n{yaml_str}---\n"
279
+
280
+
281
+ def sanitize_for_filename(text: str, replacement: str = "-") -> str:
282
+ """
283
+ Sanitize string to be safe for use as a note title
284
+ Replaces path separators and other problematic characters
285
+ with hyphens.
286
+ """
287
+ # replace both POSIX and Windows path separators
288
+ text = re.sub(r"[/\\]", replacement, text)
289
+
290
+ # replace some other problematic chars
291
+ text = re.sub(r'[<>:"|?*]', replacement, text)
292
+
293
+ # compress multiple, repeated replacements
294
+ text = re.sub(f"{re.escape(replacement)}+", replacement, text)
295
+
296
+ return text.strip(replacement)
297
+
298
+
299
+ def sanitize_for_folder(folder: str) -> str:
300
+ """
301
+ Sanitize folder path to be safe for use in file system paths.
302
+ Removes leading/trailing whitespace, compresses multiple slashes,
303
+ and removes special characters except for /, -, and _.
304
+ """
305
+ if not folder:
306
+ return ""
307
+
308
+ sanitized = folder.strip()
309
+
310
+ if sanitized.startswith("./"):
311
+ sanitized = sanitized[2:]
312
+
313
+ # ensure no special characters (except for a few that are allowed)
314
+ sanitized = "".join(
315
+ c for c in sanitized if c.isalnum() or c in (".", " ", "-", "_", "\\", "/")
316
+ ).rstrip()
317
+
318
+ # compress multiple, repeated instances of path separators
319
+ sanitized = re.sub(r"[\\/]+", "/", sanitized)
320
+
321
+ # trim any leading/trailing path separators
322
+ sanitized = sanitized.strip("\\/")
323
+
324
+ return sanitized
@@ -0,0 +1,295 @@
1
+ """Utilities for handling .gitignore patterns and file filtering."""
2
+
3
+ import fnmatch
4
+ from pathlib import Path
5
+ from typing import Set
6
+
7
+
8
+ # Common directories and patterns to ignore by default
9
+ # These are used as fallback if .bmignore doesn't exist
10
+ DEFAULT_IGNORE_PATTERNS = {
11
+ # Hidden files (files starting with dot)
12
+ ".*",
13
+ # Basic Memory internal files
14
+ "memory.db",
15
+ "memory.db-shm",
16
+ "memory.db-wal",
17
+ "config.json",
18
+ # Version control
19
+ ".git",
20
+ ".svn",
21
+ # Python
22
+ "__pycache__",
23
+ "*.pyc",
24
+ "*.pyo",
25
+ "*.pyd",
26
+ ".pytest_cache",
27
+ ".coverage",
28
+ "*.egg-info",
29
+ ".tox",
30
+ ".mypy_cache",
31
+ ".ruff_cache",
32
+ # Virtual environments
33
+ ".venv",
34
+ "venv",
35
+ "env",
36
+ ".env",
37
+ # Node.js
38
+ "node_modules",
39
+ # Build artifacts
40
+ "build",
41
+ "dist",
42
+ ".cache",
43
+ # IDE
44
+ ".idea",
45
+ ".vscode",
46
+ # OS files
47
+ ".DS_Store",
48
+ "Thumbs.db",
49
+ "desktop.ini",
50
+ # Obsidian
51
+ ".obsidian",
52
+ # Temporary files
53
+ "*.tmp",
54
+ "*.swp",
55
+ "*.swo",
56
+ "*~",
57
+ }
58
+
59
+
60
+ def get_bmignore_path() -> Path:
61
+ """Get path to .bmignore file.
62
+
63
+ Returns:
64
+ Path to ~/.basic-memory/.bmignore
65
+ """
66
+ return Path.home() / ".basic-memory" / ".bmignore"
67
+
68
+
69
+ def create_default_bmignore() -> None:
70
+ """Create default .bmignore file if it doesn't exist.
71
+
72
+ This ensures users have a file they can customize for all Basic Memory operations.
73
+ """
74
+ bmignore_path = get_bmignore_path()
75
+
76
+ if bmignore_path.exists():
77
+ return
78
+
79
+ bmignore_path.parent.mkdir(parents=True, exist_ok=True)
80
+ bmignore_path.write_text("""# Basic Memory Ignore Patterns
81
+ # This file is used by both 'bm cloud upload', 'bm cloud bisync', and file sync
82
+ # Patterns use standard gitignore-style syntax
83
+
84
+ # Hidden files (files starting with dot)
85
+ .*
86
+
87
+ # Basic Memory internal files
88
+ memory.db
89
+ memory.db-shm
90
+ memory.db-wal
91
+ config.json
92
+
93
+ # Version control
94
+ .git
95
+ .svn
96
+
97
+ # Python
98
+ __pycache__
99
+ *.pyc
100
+ *.pyo
101
+ *.pyd
102
+ .pytest_cache
103
+ .coverage
104
+ *.egg-info
105
+ .tox
106
+ .mypy_cache
107
+ .ruff_cache
108
+
109
+ # Virtual environments
110
+ .venv
111
+ venv
112
+ env
113
+ .env
114
+
115
+ # Node.js
116
+ node_modules
117
+
118
+ # Build artifacts
119
+ build
120
+ dist
121
+ .cache
122
+
123
+ # IDE
124
+ .idea
125
+ .vscode
126
+
127
+ # OS files
128
+ .DS_Store
129
+ Thumbs.db
130
+ desktop.ini
131
+
132
+ # Obsidian
133
+ .obsidian
134
+
135
+ # Temporary files
136
+ *.tmp
137
+ *.swp
138
+ *.swo
139
+ *~
140
+ """)
141
+
142
+
143
+ def load_bmignore_patterns() -> Set[str]:
144
+ """Load patterns from .bmignore file.
145
+
146
+ Returns:
147
+ Set of patterns from .bmignore, or DEFAULT_IGNORE_PATTERNS if file doesn't exist
148
+ """
149
+ bmignore_path = get_bmignore_path()
150
+
151
+ # Create default file if it doesn't exist
152
+ if not bmignore_path.exists():
153
+ create_default_bmignore()
154
+
155
+ patterns = set()
156
+
157
+ try:
158
+ with bmignore_path.open("r", encoding="utf-8") as f:
159
+ for line in f:
160
+ line = line.strip()
161
+ # Skip empty lines and comments
162
+ if line and not line.startswith("#"):
163
+ patterns.add(line)
164
+ except Exception:
165
+ # If we can't read .bmignore, fall back to defaults
166
+ return set(DEFAULT_IGNORE_PATTERNS)
167
+
168
+ # If no patterns were loaded, use defaults
169
+ if not patterns:
170
+ return set(DEFAULT_IGNORE_PATTERNS)
171
+
172
+ return patterns
173
+
174
+
175
+ def load_gitignore_patterns(base_path: Path) -> Set[str]:
176
+ """Load gitignore patterns from .gitignore file and .bmignore.
177
+
178
+ Combines patterns from:
179
+ 1. ~/.basic-memory/.bmignore (user's global ignore patterns)
180
+ 2. {base_path}/.gitignore (project-specific patterns)
181
+
182
+ Args:
183
+ base_path: The base directory to search for .gitignore file
184
+
185
+ Returns:
186
+ Set of patterns to ignore
187
+ """
188
+ # Start with patterns from .bmignore
189
+ patterns = load_bmignore_patterns()
190
+
191
+ gitignore_file = base_path / ".gitignore"
192
+ if gitignore_file.exists():
193
+ try:
194
+ with gitignore_file.open("r", encoding="utf-8") as f:
195
+ for line in f:
196
+ line = line.strip()
197
+ # Skip empty lines and comments
198
+ if line and not line.startswith("#"):
199
+ patterns.add(line)
200
+ except Exception:
201
+ # If we can't read .gitignore, just use default patterns
202
+ pass
203
+
204
+ return patterns
205
+
206
+
207
+ def should_ignore_path(file_path: Path, base_path: Path, ignore_patterns: Set[str]) -> bool:
208
+ """Check if a file path should be ignored based on gitignore patterns.
209
+
210
+ Args:
211
+ file_path: The file path to check
212
+ base_path: The base directory for relative path calculation
213
+ ignore_patterns: Set of patterns to match against
214
+
215
+ Returns:
216
+ True if the path should be ignored, False otherwise
217
+ """
218
+ # Get the relative path from base
219
+ try:
220
+ relative_path = file_path.relative_to(base_path)
221
+ relative_str = str(relative_path)
222
+ relative_posix = relative_path.as_posix() # Use forward slashes for matching
223
+
224
+ # Check each pattern
225
+ for pattern in ignore_patterns:
226
+ # Handle patterns starting with / (root relative)
227
+ if pattern.startswith("/"):
228
+ root_pattern = pattern[1:] # Remove leading /
229
+
230
+ # For directory patterns ending with /
231
+ if root_pattern.endswith("/"):
232
+ dir_name = root_pattern[:-1] # Remove trailing /
233
+ # Check if the first part of the path matches the directory name
234
+ if len(relative_path.parts) > 0 and relative_path.parts[0] == dir_name:
235
+ return True
236
+ else:
237
+ # Regular root-relative pattern
238
+ if fnmatch.fnmatch(relative_posix, root_pattern):
239
+ return True
240
+ continue
241
+
242
+ # Handle directory patterns (ending with /)
243
+ if pattern.endswith("/"):
244
+ dir_name = pattern[:-1] # Remove trailing /
245
+ # Check if any path part matches the directory name
246
+ if dir_name in relative_path.parts:
247
+ return True
248
+ continue
249
+
250
+ # Direct name match (e.g., ".git", "node_modules")
251
+ if pattern in relative_path.parts:
252
+ return True
253
+
254
+ # Check if any individual path part matches the glob pattern
255
+ # This handles cases like ".*" matching ".hidden.md" in "concept/.hidden.md"
256
+ for part in relative_path.parts:
257
+ if fnmatch.fnmatch(part, pattern):
258
+ return True
259
+
260
+ # Glob pattern match on full path
261
+ if fnmatch.fnmatch(relative_posix, pattern) or fnmatch.fnmatch(relative_str, pattern):
262
+ return True
263
+
264
+ return False
265
+ except ValueError:
266
+ # If we can't get relative path, don't ignore
267
+ return False
268
+
269
+
270
+ def filter_files(
271
+ files: list[Path], base_path: Path, ignore_patterns: Set[str] | None = None
272
+ ) -> tuple[list[Path], int]:
273
+ """Filter a list of files based on gitignore patterns.
274
+
275
+ Args:
276
+ files: List of file paths to filter
277
+ base_path: The base directory for relative path calculation
278
+ ignore_patterns: Set of patterns to ignore. If None, loads from .gitignore
279
+
280
+ Returns:
281
+ Tuple of (filtered_files, ignored_count)
282
+ """
283
+ if ignore_patterns is None:
284
+ ignore_patterns = load_gitignore_patterns(base_path)
285
+
286
+ filtered_files = []
287
+ ignored_count = 0
288
+
289
+ for file_path in files:
290
+ if should_ignore_path(file_path, base_path, ignore_patterns):
291
+ ignored_count += 1
292
+ else:
293
+ filtered_files.append(file_path)
294
+
295
+ return filtered_files, ignored_count
@@ -93,7 +93,7 @@ class ChatGPTImporter(Importer[ChatImportResult]):
93
93
  break
94
94
 
95
95
  # Generate permalink
96
- date_prefix = datetime.fromtimestamp(created_at).strftime("%Y%m%d")
96
+ date_prefix = datetime.fromtimestamp(created_at).astimezone().strftime("%Y%m%d")
97
97
  clean_title = clean_filename(conversation["title"])
98
98
 
99
99
  # Format content
@@ -43,13 +43,13 @@ def format_timestamp(timestamp: Any) -> str: # pragma: no cover
43
43
  except ValueError:
44
44
  try:
45
45
  # Try unix timestamp as string
46
- timestamp = datetime.fromtimestamp(float(timestamp))
46
+ timestamp = datetime.fromtimestamp(float(timestamp)).astimezone()
47
47
  except ValueError:
48
48
  # Return as is if we can't parse it
49
49
  return timestamp
50
50
  elif isinstance(timestamp, (int, float)):
51
51
  # Unix timestamp
52
- timestamp = datetime.fromtimestamp(timestamp)
52
+ timestamp = datetime.fromtimestamp(timestamp).astimezone()
53
53
 
54
54
  if isinstance(timestamp, datetime):
55
55
  return timestamp.strftime("%Y-%m-%d %H:%M:%S")
@@ -130,6 +130,6 @@ class EntityParser:
130
130
  content=post.content,
131
131
  observations=entity_content.observations,
132
132
  relations=entity_content.relations,
133
- created=datetime.fromtimestamp(file_stats.st_ctime),
134
- modified=datetime.fromtimestamp(file_stats.st_mtime),
133
+ created=datetime.fromtimestamp(file_stats.st_ctime).astimezone(),
134
+ modified=datetime.fromtimestamp(file_stats.st_mtime).astimezone(),
135
135
  )
@@ -2,11 +2,11 @@ from pathlib import Path
2
2
  from typing import Optional
3
3
  from collections import OrderedDict
4
4
 
5
- import frontmatter
6
5
  from frontmatter import Post
7
6
  from loguru import logger
8
7
 
9
8
  from basic_memory import file_utils
9
+ from basic_memory.file_utils import dump_frontmatter
10
10
  from basic_memory.markdown.entity_parser import EntityParser
11
11
  from basic_memory.markdown.schemas import EntityMarkdown, Observation, Relation
12
12
 
@@ -115,7 +115,7 @@ class MarkdownProcessor:
115
115
 
116
116
  # Create Post object for frontmatter
117
117
  post = Post(content, **frontmatter_dict)
118
- final_content = frontmatter.dumps(post, sort_keys=False)
118
+ final_content = dump_frontmatter(post)
119
119
 
120
120
  logger.debug(f"writing file {path} with content:\n{final_content}")
121
121