basic-memory 0.14.4__py3-none-any.whl → 0.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (82) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/versions/a1b2c3d4e5f6_fix_project_foreign_keys.py +5 -9
  3. basic_memory/api/app.py +10 -4
  4. basic_memory/api/routers/knowledge_router.py +25 -8
  5. basic_memory/api/routers/project_router.py +99 -4
  6. basic_memory/cli/app.py +9 -28
  7. basic_memory/cli/auth.py +277 -0
  8. basic_memory/cli/commands/cloud/__init__.py +5 -0
  9. basic_memory/cli/commands/cloud/api_client.py +112 -0
  10. basic_memory/cli/commands/cloud/bisync_commands.py +818 -0
  11. basic_memory/cli/commands/cloud/core_commands.py +288 -0
  12. basic_memory/cli/commands/cloud/mount_commands.py +295 -0
  13. basic_memory/cli/commands/cloud/rclone_config.py +288 -0
  14. basic_memory/cli/commands/cloud/rclone_installer.py +198 -0
  15. basic_memory/cli/commands/command_utils.py +60 -0
  16. basic_memory/cli/commands/import_memory_json.py +0 -4
  17. basic_memory/cli/commands/mcp.py +16 -4
  18. basic_memory/cli/commands/project.py +139 -142
  19. basic_memory/cli/commands/status.py +34 -22
  20. basic_memory/cli/commands/sync.py +45 -228
  21. basic_memory/cli/commands/tool.py +87 -16
  22. basic_memory/cli/main.py +1 -0
  23. basic_memory/config.py +76 -12
  24. basic_memory/db.py +104 -3
  25. basic_memory/deps.py +20 -3
  26. basic_memory/file_utils.py +37 -13
  27. basic_memory/ignore_utils.py +295 -0
  28. basic_memory/markdown/plugins.py +9 -7
  29. basic_memory/mcp/async_client.py +22 -10
  30. basic_memory/mcp/project_context.py +141 -0
  31. basic_memory/mcp/prompts/ai_assistant_guide.py +49 -4
  32. basic_memory/mcp/prompts/continue_conversation.py +1 -1
  33. basic_memory/mcp/prompts/recent_activity.py +116 -32
  34. basic_memory/mcp/prompts/search.py +1 -1
  35. basic_memory/mcp/prompts/utils.py +11 -4
  36. basic_memory/mcp/resources/ai_assistant_guide.md +179 -41
  37. basic_memory/mcp/resources/project_info.py +20 -6
  38. basic_memory/mcp/server.py +0 -37
  39. basic_memory/mcp/tools/__init__.py +5 -6
  40. basic_memory/mcp/tools/build_context.py +29 -19
  41. basic_memory/mcp/tools/canvas.py +19 -8
  42. basic_memory/mcp/tools/chatgpt_tools.py +178 -0
  43. basic_memory/mcp/tools/delete_note.py +67 -34
  44. basic_memory/mcp/tools/edit_note.py +55 -39
  45. basic_memory/mcp/tools/headers.py +44 -0
  46. basic_memory/mcp/tools/list_directory.py +18 -8
  47. basic_memory/mcp/tools/move_note.py +119 -41
  48. basic_memory/mcp/tools/project_management.py +61 -228
  49. basic_memory/mcp/tools/read_content.py +28 -12
  50. basic_memory/mcp/tools/read_note.py +83 -46
  51. basic_memory/mcp/tools/recent_activity.py +441 -42
  52. basic_memory/mcp/tools/search.py +82 -70
  53. basic_memory/mcp/tools/sync_status.py +5 -4
  54. basic_memory/mcp/tools/utils.py +19 -0
  55. basic_memory/mcp/tools/view_note.py +31 -6
  56. basic_memory/mcp/tools/write_note.py +65 -14
  57. basic_memory/models/knowledge.py +12 -6
  58. basic_memory/models/project.py +6 -2
  59. basic_memory/repository/entity_repository.py +29 -82
  60. basic_memory/repository/relation_repository.py +13 -0
  61. basic_memory/repository/repository.py +2 -2
  62. basic_memory/repository/search_repository.py +4 -2
  63. basic_memory/schemas/__init__.py +6 -0
  64. basic_memory/schemas/base.py +39 -11
  65. basic_memory/schemas/cloud.py +46 -0
  66. basic_memory/schemas/memory.py +90 -21
  67. basic_memory/schemas/project_info.py +9 -10
  68. basic_memory/schemas/sync_report.py +48 -0
  69. basic_memory/services/context_service.py +25 -11
  70. basic_memory/services/entity_service.py +75 -45
  71. basic_memory/services/initialization.py +30 -11
  72. basic_memory/services/project_service.py +13 -23
  73. basic_memory/sync/sync_service.py +145 -21
  74. basic_memory/sync/watch_service.py +101 -40
  75. basic_memory/utils.py +14 -4
  76. {basic_memory-0.14.4.dist-info → basic_memory-0.15.0.dist-info}/METADATA +7 -6
  77. basic_memory-0.15.0.dist-info/RECORD +147 -0
  78. basic_memory/mcp/project_session.py +0 -120
  79. basic_memory-0.14.4.dist-info/RECORD +0 -133
  80. {basic_memory-0.14.4.dist-info → basic_memory-0.15.0.dist-info}/WHEEL +0 -0
  81. {basic_memory-0.14.4.dist-info → basic_memory-0.15.0.dist-info}/entry_points.txt +0 -0
  82. {basic_memory-0.14.4.dist-info → basic_memory-0.15.0.dist-info}/licenses/LICENSE +0 -0
basic_memory/db.py CHANGED
@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ import os
2
3
  from contextlib import asynccontextmanager
3
4
  from enum import Enum, auto
4
5
  from pathlib import Path
@@ -9,7 +10,7 @@ from alembic import command
9
10
  from alembic.config import Config
10
11
 
11
12
  from loguru import logger
12
- from sqlalchemy import text
13
+ from sqlalchemy import text, event
13
14
  from sqlalchemy.ext.asyncio import (
14
15
  create_async_engine,
15
16
  async_sessionmaker,
@@ -17,6 +18,7 @@ from sqlalchemy.ext.asyncio import (
17
18
  AsyncEngine,
18
19
  async_scoped_session,
19
20
  )
21
+ from sqlalchemy.pool import NullPool
20
22
 
21
23
  from basic_memory.repository.search_repository import SearchRepository
22
24
 
@@ -73,13 +75,77 @@ async def scoped_session(
73
75
  await factory.remove()
74
76
 
75
77
 
78
+ def _configure_sqlite_connection(dbapi_conn, enable_wal: bool = True) -> None:
79
+ """Configure SQLite connection with WAL mode and optimizations.
80
+
81
+ Args:
82
+ dbapi_conn: Database API connection object
83
+ enable_wal: Whether to enable WAL mode (should be False for in-memory databases)
84
+ """
85
+ cursor = dbapi_conn.cursor()
86
+ try:
87
+ # Enable WAL mode for better concurrency (not supported for in-memory databases)
88
+ if enable_wal:
89
+ cursor.execute("PRAGMA journal_mode=WAL")
90
+ # Set busy timeout to handle locked databases
91
+ cursor.execute("PRAGMA busy_timeout=10000") # 10 seconds
92
+ # Optimize for performance
93
+ cursor.execute("PRAGMA synchronous=NORMAL")
94
+ cursor.execute("PRAGMA cache_size=-64000") # 64MB cache
95
+ cursor.execute("PRAGMA temp_store=MEMORY")
96
+ # Windows-specific optimizations
97
+ if os.name == "nt":
98
+ cursor.execute("PRAGMA locking_mode=NORMAL") # Ensure normal locking on Windows
99
+ except Exception as e:
100
+ # Log but don't fail - some PRAGMAs may not be supported
101
+ logger.warning(f"Failed to configure SQLite connection: {e}")
102
+ finally:
103
+ cursor.close()
104
+
105
+
76
106
  def _create_engine_and_session(
77
107
  db_path: Path, db_type: DatabaseType = DatabaseType.FILESYSTEM
78
108
  ) -> tuple[AsyncEngine, async_sessionmaker[AsyncSession]]:
79
109
  """Internal helper to create engine and session maker."""
80
110
  db_url = DatabaseType.get_db_url(db_path, db_type)
81
111
  logger.debug(f"Creating engine for db_url: {db_url}")
82
- engine = create_async_engine(db_url, connect_args={"check_same_thread": False})
112
+
113
+ # Configure connection args with Windows-specific settings
114
+ connect_args: dict[str, bool | float | None] = {"check_same_thread": False}
115
+
116
+ # Add Windows-specific parameters to improve reliability
117
+ if os.name == "nt": # Windows
118
+ connect_args.update(
119
+ {
120
+ "timeout": 30.0, # Increase timeout to 30 seconds for Windows
121
+ "isolation_level": None, # Use autocommit mode
122
+ }
123
+ )
124
+ # Use NullPool for Windows filesystem databases to avoid connection pooling issues
125
+ # Important: Do NOT use NullPool for in-memory databases as it will destroy the database
126
+ # between connections
127
+ if db_type == DatabaseType.FILESYSTEM:
128
+ engine = create_async_engine(
129
+ db_url,
130
+ connect_args=connect_args,
131
+ poolclass=NullPool, # Disable connection pooling on Windows
132
+ echo=False,
133
+ )
134
+ else:
135
+ # In-memory databases need connection pooling to maintain state
136
+ engine = create_async_engine(db_url, connect_args=connect_args)
137
+ else:
138
+ engine = create_async_engine(db_url, connect_args=connect_args)
139
+
140
+ # Enable WAL mode for better concurrency and reliability
141
+ # Note: WAL mode is not supported for in-memory databases
142
+ enable_wal = db_type != DatabaseType.MEMORY
143
+
144
+ @event.listens_for(engine.sync_engine, "connect")
145
+ def enable_wal_mode(dbapi_conn, connection_record):
146
+ """Enable WAL mode on each connection."""
147
+ _configure_sqlite_connection(dbapi_conn, enable_wal=enable_wal)
148
+
83
149
  session_maker = async_sessionmaker(engine, expire_on_commit=False)
84
150
  return engine, session_maker
85
151
 
@@ -140,7 +206,42 @@ async def engine_session_factory(
140
206
  db_url = DatabaseType.get_db_url(db_path, db_type)
141
207
  logger.debug(f"Creating engine for db_url: {db_url}")
142
208
 
143
- _engine = create_async_engine(db_url, connect_args={"check_same_thread": False})
209
+ # Configure connection args with Windows-specific settings
210
+ connect_args: dict[str, bool | float | None] = {"check_same_thread": False}
211
+
212
+ # Add Windows-specific parameters to improve reliability
213
+ if os.name == "nt": # Windows
214
+ connect_args.update(
215
+ {
216
+ "timeout": 30.0, # Increase timeout to 30 seconds for Windows
217
+ "isolation_level": None, # Use autocommit mode
218
+ }
219
+ )
220
+ # Use NullPool for Windows filesystem databases to avoid connection pooling issues
221
+ # Important: Do NOT use NullPool for in-memory databases as it will destroy the database
222
+ # between connections
223
+ if db_type == DatabaseType.FILESYSTEM:
224
+ _engine = create_async_engine(
225
+ db_url,
226
+ connect_args=connect_args,
227
+ poolclass=NullPool, # Disable connection pooling on Windows
228
+ echo=False,
229
+ )
230
+ else:
231
+ # In-memory databases need connection pooling to maintain state
232
+ _engine = create_async_engine(db_url, connect_args=connect_args)
233
+ else:
234
+ _engine = create_async_engine(db_url, connect_args=connect_args)
235
+
236
+ # Enable WAL mode for better concurrency and reliability
237
+ # Note: WAL mode is not supported for in-memory databases
238
+ enable_wal = db_type != DatabaseType.MEMORY
239
+
240
+ @event.listens_for(_engine.sync_engine, "connect")
241
+ def enable_wal_mode(dbapi_conn, connection_record):
242
+ """Enable WAL mode on each connection."""
243
+ _configure_sqlite_connection(dbapi_conn, enable_wal=enable_wal)
244
+
144
245
  try:
145
246
  _session_maker = async_sessionmaker(_engine, expire_on_commit=False)
146
247
 
basic_memory/deps.py CHANGED
@@ -3,7 +3,7 @@
3
3
  from typing import Annotated
4
4
  from loguru import logger
5
5
 
6
- from fastapi import Depends, HTTPException, Path, status
6
+ from fastapi import Depends, HTTPException, Path, status, Request
7
7
  from sqlalchemy.ext.asyncio import (
8
8
  AsyncSession,
9
9
  AsyncEngine,
@@ -78,9 +78,24 @@ ProjectConfigDep = Annotated[ProjectConfig, Depends(get_project_config)] # prag
78
78
 
79
79
 
80
80
  async def get_engine_factory(
81
- app_config: AppConfigDep,
81
+ request: Request,
82
82
  ) -> tuple[AsyncEngine, async_sessionmaker[AsyncSession]]: # pragma: no cover
83
- """Get engine and session maker."""
83
+ """Get cached engine and session maker from app state.
84
+
85
+ For API requests, returns cached connections from app.state for optimal performance.
86
+ For non-API contexts (CLI), falls back to direct database connection.
87
+ """
88
+ # Try to get cached connections from app state (API context)
89
+ if (
90
+ hasattr(request, "app")
91
+ and hasattr(request.app.state, "engine")
92
+ and hasattr(request.app.state, "session_maker")
93
+ ):
94
+ return request.app.state.engine, request.app.state.session_maker
95
+
96
+ # Fallback for non-API contexts (CLI)
97
+ logger.debug("Using fallback database connection for non-API context")
98
+ app_config = get_app_config()
84
99
  engine, session_maker = await db.get_or_create_db(app_config.database_path)
85
100
  return engine, session_maker
86
101
 
@@ -245,6 +260,7 @@ async def get_entity_service(
245
260
  entity_parser: EntityParserDep,
246
261
  file_service: FileServiceDep,
247
262
  link_resolver: "LinkResolverDep",
263
+ app_config: AppConfigDep,
248
264
  ) -> EntityService:
249
265
  """Create EntityService with repository."""
250
266
  return EntityService(
@@ -254,6 +270,7 @@ async def get_entity_service(
254
270
  entity_parser=entity_parser,
255
271
  file_service=file_service,
256
272
  link_resolver=link_resolver,
273
+ app_config=app_config,
257
274
  )
258
275
 
259
276
 
@@ -240,40 +240,37 @@ async def update_frontmatter(path: FilePath, updates: Dict[str, Any]) -> str:
240
240
  def dump_frontmatter(post: frontmatter.Post) -> str:
241
241
  """
242
242
  Serialize frontmatter.Post to markdown with Obsidian-compatible YAML format.
243
-
243
+
244
244
  This function ensures that tags are formatted as YAML lists instead of JSON arrays:
245
-
245
+
246
246
  Good (Obsidian compatible):
247
247
  ---
248
248
  tags:
249
249
  - system
250
- - overview
250
+ - overview
251
251
  - reference
252
252
  ---
253
-
253
+
254
254
  Bad (current behavior):
255
255
  ---
256
256
  tags: ["system", "overview", "reference"]
257
257
  ---
258
-
258
+
259
259
  Args:
260
260
  post: frontmatter.Post object to serialize
261
-
261
+
262
262
  Returns:
263
263
  String containing markdown with properly formatted YAML frontmatter
264
- """
264
+ """
265
265
  if not post.metadata:
266
266
  # No frontmatter, just return content
267
267
  return post.content
268
-
268
+
269
269
  # Serialize YAML with block style for lists
270
270
  yaml_str = yaml.dump(
271
- post.metadata,
272
- sort_keys=False,
273
- allow_unicode=True,
274
- default_flow_style=False
271
+ post.metadata, sort_keys=False, allow_unicode=True, default_flow_style=False
275
272
  )
276
-
273
+
277
274
  # Construct the final markdown with frontmatter
278
275
  if post.content:
279
276
  return f"---\n{yaml_str}---\n\n{post.content}"
@@ -298,3 +295,30 @@ def sanitize_for_filename(text: str, replacement: str = "-") -> str:
298
295
 
299
296
  return text.strip(replacement)
300
297
 
298
+
299
+ def sanitize_for_folder(folder: str) -> str:
300
+ """
301
+ Sanitize folder path to be safe for use in file system paths.
302
+ Removes leading/trailing whitespace, compresses multiple slashes,
303
+ and removes special characters except for /, -, and _.
304
+ """
305
+ if not folder:
306
+ return ""
307
+
308
+ sanitized = folder.strip()
309
+
310
+ if sanitized.startswith("./"):
311
+ sanitized = sanitized[2:]
312
+
313
+ # ensure no special characters (except for a few that are allowed)
314
+ sanitized = "".join(
315
+ c for c in sanitized if c.isalnum() or c in (".", " ", "-", "_", "\\", "/")
316
+ ).rstrip()
317
+
318
+ # compress multiple, repeated instances of path separators
319
+ sanitized = re.sub(r"[\\/]+", "/", sanitized)
320
+
321
+ # trim any leading/trailing path separators
322
+ sanitized = sanitized.strip("\\/")
323
+
324
+ return sanitized
@@ -0,0 +1,295 @@
1
+ """Utilities for handling .gitignore patterns and file filtering."""
2
+
3
+ import fnmatch
4
+ from pathlib import Path
5
+ from typing import Set
6
+
7
+
8
+ # Common directories and patterns to ignore by default
9
+ # These are used as fallback if .bmignore doesn't exist
10
+ DEFAULT_IGNORE_PATTERNS = {
11
+ # Hidden files (files starting with dot)
12
+ ".*",
13
+ # Basic Memory internal files
14
+ "memory.db",
15
+ "memory.db-shm",
16
+ "memory.db-wal",
17
+ "config.json",
18
+ # Version control
19
+ ".git",
20
+ ".svn",
21
+ # Python
22
+ "__pycache__",
23
+ "*.pyc",
24
+ "*.pyo",
25
+ "*.pyd",
26
+ ".pytest_cache",
27
+ ".coverage",
28
+ "*.egg-info",
29
+ ".tox",
30
+ ".mypy_cache",
31
+ ".ruff_cache",
32
+ # Virtual environments
33
+ ".venv",
34
+ "venv",
35
+ "env",
36
+ ".env",
37
+ # Node.js
38
+ "node_modules",
39
+ # Build artifacts
40
+ "build",
41
+ "dist",
42
+ ".cache",
43
+ # IDE
44
+ ".idea",
45
+ ".vscode",
46
+ # OS files
47
+ ".DS_Store",
48
+ "Thumbs.db",
49
+ "desktop.ini",
50
+ # Obsidian
51
+ ".obsidian",
52
+ # Temporary files
53
+ "*.tmp",
54
+ "*.swp",
55
+ "*.swo",
56
+ "*~",
57
+ }
58
+
59
+
60
+ def get_bmignore_path() -> Path:
61
+ """Get path to .bmignore file.
62
+
63
+ Returns:
64
+ Path to ~/.basic-memory/.bmignore
65
+ """
66
+ return Path.home() / ".basic-memory" / ".bmignore"
67
+
68
+
69
+ def create_default_bmignore() -> None:
70
+ """Create default .bmignore file if it doesn't exist.
71
+
72
+ This ensures users have a file they can customize for all Basic Memory operations.
73
+ """
74
+ bmignore_path = get_bmignore_path()
75
+
76
+ if bmignore_path.exists():
77
+ return
78
+
79
+ bmignore_path.parent.mkdir(parents=True, exist_ok=True)
80
+ bmignore_path.write_text("""# Basic Memory Ignore Patterns
81
+ # This file is used by both 'bm cloud upload', 'bm cloud bisync', and file sync
82
+ # Patterns use standard gitignore-style syntax
83
+
84
+ # Hidden files (files starting with dot)
85
+ .*
86
+
87
+ # Basic Memory internal files
88
+ memory.db
89
+ memory.db-shm
90
+ memory.db-wal
91
+ config.json
92
+
93
+ # Version control
94
+ .git
95
+ .svn
96
+
97
+ # Python
98
+ __pycache__
99
+ *.pyc
100
+ *.pyo
101
+ *.pyd
102
+ .pytest_cache
103
+ .coverage
104
+ *.egg-info
105
+ .tox
106
+ .mypy_cache
107
+ .ruff_cache
108
+
109
+ # Virtual environments
110
+ .venv
111
+ venv
112
+ env
113
+ .env
114
+
115
+ # Node.js
116
+ node_modules
117
+
118
+ # Build artifacts
119
+ build
120
+ dist
121
+ .cache
122
+
123
+ # IDE
124
+ .idea
125
+ .vscode
126
+
127
+ # OS files
128
+ .DS_Store
129
+ Thumbs.db
130
+ desktop.ini
131
+
132
+ # Obsidian
133
+ .obsidian
134
+
135
+ # Temporary files
136
+ *.tmp
137
+ *.swp
138
+ *.swo
139
+ *~
140
+ """)
141
+
142
+
143
+ def load_bmignore_patterns() -> Set[str]:
144
+ """Load patterns from .bmignore file.
145
+
146
+ Returns:
147
+ Set of patterns from .bmignore, or DEFAULT_IGNORE_PATTERNS if file doesn't exist
148
+ """
149
+ bmignore_path = get_bmignore_path()
150
+
151
+ # Create default file if it doesn't exist
152
+ if not bmignore_path.exists():
153
+ create_default_bmignore()
154
+
155
+ patterns = set()
156
+
157
+ try:
158
+ with bmignore_path.open("r", encoding="utf-8") as f:
159
+ for line in f:
160
+ line = line.strip()
161
+ # Skip empty lines and comments
162
+ if line and not line.startswith("#"):
163
+ patterns.add(line)
164
+ except Exception:
165
+ # If we can't read .bmignore, fall back to defaults
166
+ return set(DEFAULT_IGNORE_PATTERNS)
167
+
168
+ # If no patterns were loaded, use defaults
169
+ if not patterns:
170
+ return set(DEFAULT_IGNORE_PATTERNS)
171
+
172
+ return patterns
173
+
174
+
175
+ def load_gitignore_patterns(base_path: Path) -> Set[str]:
176
+ """Load gitignore patterns from .gitignore file and .bmignore.
177
+
178
+ Combines patterns from:
179
+ 1. ~/.basic-memory/.bmignore (user's global ignore patterns)
180
+ 2. {base_path}/.gitignore (project-specific patterns)
181
+
182
+ Args:
183
+ base_path: The base directory to search for .gitignore file
184
+
185
+ Returns:
186
+ Set of patterns to ignore
187
+ """
188
+ # Start with patterns from .bmignore
189
+ patterns = load_bmignore_patterns()
190
+
191
+ gitignore_file = base_path / ".gitignore"
192
+ if gitignore_file.exists():
193
+ try:
194
+ with gitignore_file.open("r", encoding="utf-8") as f:
195
+ for line in f:
196
+ line = line.strip()
197
+ # Skip empty lines and comments
198
+ if line and not line.startswith("#"):
199
+ patterns.add(line)
200
+ except Exception:
201
+ # If we can't read .gitignore, just use default patterns
202
+ pass
203
+
204
+ return patterns
205
+
206
+
207
+ def should_ignore_path(file_path: Path, base_path: Path, ignore_patterns: Set[str]) -> bool:
208
+ """Check if a file path should be ignored based on gitignore patterns.
209
+
210
+ Args:
211
+ file_path: The file path to check
212
+ base_path: The base directory for relative path calculation
213
+ ignore_patterns: Set of patterns to match against
214
+
215
+ Returns:
216
+ True if the path should be ignored, False otherwise
217
+ """
218
+ # Get the relative path from base
219
+ try:
220
+ relative_path = file_path.relative_to(base_path)
221
+ relative_str = str(relative_path)
222
+ relative_posix = relative_path.as_posix() # Use forward slashes for matching
223
+
224
+ # Check each pattern
225
+ for pattern in ignore_patterns:
226
+ # Handle patterns starting with / (root relative)
227
+ if pattern.startswith("/"):
228
+ root_pattern = pattern[1:] # Remove leading /
229
+
230
+ # For directory patterns ending with /
231
+ if root_pattern.endswith("/"):
232
+ dir_name = root_pattern[:-1] # Remove trailing /
233
+ # Check if the first part of the path matches the directory name
234
+ if len(relative_path.parts) > 0 and relative_path.parts[0] == dir_name:
235
+ return True
236
+ else:
237
+ # Regular root-relative pattern
238
+ if fnmatch.fnmatch(relative_posix, root_pattern):
239
+ return True
240
+ continue
241
+
242
+ # Handle directory patterns (ending with /)
243
+ if pattern.endswith("/"):
244
+ dir_name = pattern[:-1] # Remove trailing /
245
+ # Check if any path part matches the directory name
246
+ if dir_name in relative_path.parts:
247
+ return True
248
+ continue
249
+
250
+ # Direct name match (e.g., ".git", "node_modules")
251
+ if pattern in relative_path.parts:
252
+ return True
253
+
254
+ # Check if any individual path part matches the glob pattern
255
+ # This handles cases like ".*" matching ".hidden.md" in "concept/.hidden.md"
256
+ for part in relative_path.parts:
257
+ if fnmatch.fnmatch(part, pattern):
258
+ return True
259
+
260
+ # Glob pattern match on full path
261
+ if fnmatch.fnmatch(relative_posix, pattern) or fnmatch.fnmatch(relative_str, pattern):
262
+ return True
263
+
264
+ return False
265
+ except ValueError:
266
+ # If we can't get relative path, don't ignore
267
+ return False
268
+
269
+
270
+ def filter_files(
271
+ files: list[Path], base_path: Path, ignore_patterns: Set[str] | None = None
272
+ ) -> tuple[list[Path], int]:
273
+ """Filter a list of files based on gitignore patterns.
274
+
275
+ Args:
276
+ files: List of file paths to filter
277
+ base_path: The base directory for relative path calculation
278
+ ignore_patterns: Set of patterns to ignore. If None, loads from .gitignore
279
+
280
+ Returns:
281
+ Tuple of (filtered_files, ignored_count)
282
+ """
283
+ if ignore_patterns is None:
284
+ ignore_patterns = load_gitignore_patterns(base_path)
285
+
286
+ filtered_files = []
287
+ ignored_count = 0
288
+
289
+ for file_path in files:
290
+ if should_ignore_path(file_path, base_path, ignore_patterns):
291
+ ignored_count += 1
292
+ else:
293
+ filtered_files.append(file_path)
294
+
295
+ return filtered_files, ignored_count
@@ -9,6 +9,7 @@ from markdown_it.token import Token
9
9
  def is_observation(token: Token) -> bool:
10
10
  """Check if token looks like our observation format."""
11
11
  import re
12
+
12
13
  if token.type != "inline": # pragma: no cover
13
14
  return False
14
15
  # Use token.tag which contains the actual content for test tokens, fallback to content
@@ -18,15 +19,15 @@ def is_observation(token: Token) -> bool:
18
19
  # if it's a markdown_task, return false
19
20
  if content.startswith("[ ]") or content.startswith("[x]") or content.startswith("[-]"):
20
21
  return False
21
-
22
+
22
23
  # Exclude markdown links: [text](url)
23
24
  if re.match(r"^\[.*?\]\(.*?\)$", content):
24
25
  return False
25
-
26
+
26
27
  # Exclude wiki links: [[text]]
27
28
  if re.match(r"^\[\[.*?\]\]$", content):
28
29
  return False
29
-
30
+
30
31
  # Check for proper observation format: [category] content
31
32
  match = re.match(r"^\[([^\[\]()]+)\]\s+(.+)", content)
32
33
  has_tags = "#" in content
@@ -36,9 +37,10 @@ def is_observation(token: Token) -> bool:
36
37
  def parse_observation(token: Token) -> Dict[str, Any]:
37
38
  """Extract observation parts from token."""
38
39
  import re
40
+
39
41
  # Use token.tag which contains the actual content for test tokens, fallback to content
40
42
  content = (token.tag or token.content).strip()
41
-
43
+
42
44
  # Parse [category] with regex
43
45
  match = re.match(r"^\[([^\[\]()]+)\]\s+(.+)", content)
44
46
  category = None
@@ -50,7 +52,7 @@ def parse_observation(token: Token) -> Dict[str, Any]:
50
52
  empty_match = re.match(r"^\[\]\s+(.+)", content)
51
53
  if empty_match:
52
54
  content = empty_match.group(1).strip()
53
-
55
+
54
56
  # Parse (context)
55
57
  context = None
56
58
  if content.endswith(")"):
@@ -58,7 +60,7 @@ def parse_observation(token: Token) -> Dict[str, Any]:
58
60
  if start != -1:
59
61
  context = content[start + 1 : -1].strip()
60
62
  content = content[:start].strip()
61
-
63
+
62
64
  # Extract tags and keep original content
63
65
  tags = []
64
66
  parts = content.split()
@@ -69,7 +71,7 @@ def parse_observation(token: Token) -> Dict[str, Any]:
69
71
  tags.extend(subtags)
70
72
  else:
71
73
  tags.append(part[1:])
72
-
74
+
73
75
  return {
74
76
  "category": category,
75
77
  "content": content,
@@ -1,4 +1,4 @@
1
- from httpx import ASGITransport, AsyncClient
1
+ from httpx import ASGITransport, AsyncClient, Timeout
2
2
  from loguru import logger
3
3
 
4
4
  from basic_memory.api.app import app as fastapi_app
@@ -9,19 +9,31 @@ def create_client() -> AsyncClient:
9
9
  """Create an HTTP client based on configuration.
10
10
 
11
11
  Returns:
12
- AsyncClient configured for either local ASGI or remote HTTP transport
12
+ AsyncClient configured for either local ASGI or remote proxy
13
13
  """
14
14
  config_manager = ConfigManager()
15
- config = config_manager.load_config()
15
+ config = config_manager.config
16
16
 
17
- if config.api_url:
18
- # Use HTTP transport for remote API
19
- logger.info(f"Creating HTTP client for remote Basic Memory API: {config.api_url}")
20
- return AsyncClient(base_url=config.api_url)
17
+ # Configure timeout for longer operations like write_note
18
+ # Default httpx timeout is 5 seconds which is too short for file operations
19
+ timeout = Timeout(
20
+ connect=10.0, # 10 seconds for connection
21
+ read=30.0, # 30 seconds for reading response
22
+ write=30.0, # 30 seconds for writing request
23
+ pool=30.0, # 30 seconds for connection pool
24
+ )
25
+
26
+ if config.cloud_mode_enabled:
27
+ # Use HTTP transport to proxy endpoint
28
+ proxy_base_url = f"{config.cloud_host}/proxy"
29
+ logger.info(f"Creating HTTP client for proxy at: {proxy_base_url}")
30
+ return AsyncClient(base_url=proxy_base_url, timeout=timeout)
21
31
  else:
22
- # Use ASGI transport for local API
23
- logger.debug("Creating ASGI client for local Basic Memory API")
24
- return AsyncClient(transport=ASGITransport(app=fastapi_app), base_url="http://test")
32
+ # Default: use ASGI transport for local API (development mode)
33
+ logger.info("Creating ASGI client for local Basic Memory API")
34
+ return AsyncClient(
35
+ transport=ASGITransport(app=fastapi_app), base_url="http://test", timeout=timeout
36
+ )
25
37
 
26
38
 
27
39
  # Create shared async client