basic-memory 0.1.1__py3-none-any.whl → 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (77) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/README +1 -0
  3. basic_memory/alembic/env.py +75 -0
  4. basic_memory/alembic/migrations.py +29 -0
  5. basic_memory/alembic/script.py.mako +26 -0
  6. basic_memory/alembic/versions/3dae7c7b1564_initial_schema.py +93 -0
  7. basic_memory/api/__init__.py +2 -1
  8. basic_memory/api/app.py +26 -24
  9. basic_memory/api/routers/knowledge_router.py +28 -26
  10. basic_memory/api/routers/memory_router.py +17 -11
  11. basic_memory/api/routers/search_router.py +6 -12
  12. basic_memory/cli/__init__.py +1 -1
  13. basic_memory/cli/app.py +0 -1
  14. basic_memory/cli/commands/__init__.py +3 -3
  15. basic_memory/cli/commands/db.py +25 -0
  16. basic_memory/cli/commands/import_memory_json.py +35 -31
  17. basic_memory/cli/commands/mcp.py +20 -0
  18. basic_memory/cli/commands/status.py +10 -6
  19. basic_memory/cli/commands/sync.py +5 -56
  20. basic_memory/cli/main.py +5 -38
  21. basic_memory/config.py +3 -3
  22. basic_memory/db.py +15 -22
  23. basic_memory/deps.py +3 -4
  24. basic_memory/file_utils.py +36 -35
  25. basic_memory/markdown/entity_parser.py +13 -30
  26. basic_memory/markdown/markdown_processor.py +7 -7
  27. basic_memory/markdown/plugins.py +109 -123
  28. basic_memory/markdown/schemas.py +7 -8
  29. basic_memory/markdown/utils.py +70 -121
  30. basic_memory/mcp/__init__.py +1 -1
  31. basic_memory/mcp/async_client.py +0 -2
  32. basic_memory/mcp/server.py +3 -27
  33. basic_memory/mcp/tools/__init__.py +5 -3
  34. basic_memory/mcp/tools/knowledge.py +2 -2
  35. basic_memory/mcp/tools/memory.py +8 -4
  36. basic_memory/mcp/tools/search.py +2 -1
  37. basic_memory/mcp/tools/utils.py +1 -1
  38. basic_memory/models/__init__.py +1 -2
  39. basic_memory/models/base.py +3 -3
  40. basic_memory/models/knowledge.py +23 -60
  41. basic_memory/models/search.py +1 -1
  42. basic_memory/repository/__init__.py +5 -3
  43. basic_memory/repository/entity_repository.py +34 -98
  44. basic_memory/repository/relation_repository.py +0 -7
  45. basic_memory/repository/repository.py +2 -39
  46. basic_memory/repository/search_repository.py +20 -25
  47. basic_memory/schemas/__init__.py +4 -4
  48. basic_memory/schemas/base.py +21 -62
  49. basic_memory/schemas/delete.py +2 -3
  50. basic_memory/schemas/discovery.py +4 -1
  51. basic_memory/schemas/memory.py +12 -13
  52. basic_memory/schemas/request.py +4 -23
  53. basic_memory/schemas/response.py +10 -9
  54. basic_memory/schemas/search.py +4 -7
  55. basic_memory/services/__init__.py +2 -7
  56. basic_memory/services/context_service.py +116 -110
  57. basic_memory/services/entity_service.py +25 -62
  58. basic_memory/services/exceptions.py +1 -0
  59. basic_memory/services/file_service.py +73 -109
  60. basic_memory/services/link_resolver.py +9 -9
  61. basic_memory/services/search_service.py +22 -15
  62. basic_memory/services/service.py +3 -24
  63. basic_memory/sync/__init__.py +2 -2
  64. basic_memory/sync/file_change_scanner.py +3 -7
  65. basic_memory/sync/sync_service.py +35 -40
  66. basic_memory/sync/utils.py +6 -38
  67. basic_memory/sync/watch_service.py +26 -5
  68. basic_memory/utils.py +42 -33
  69. {basic_memory-0.1.1.dist-info → basic_memory-0.1.2.dist-info}/METADATA +2 -7
  70. basic_memory-0.1.2.dist-info/RECORD +78 -0
  71. basic_memory/mcp/main.py +0 -21
  72. basic_memory/mcp/tools/ai_edit.py +0 -84
  73. basic_memory/services/database_service.py +0 -159
  74. basic_memory-0.1.1.dist-info/RECORD +0 -74
  75. {basic_memory-0.1.1.dist-info → basic_memory-0.1.2.dist-info}/WHEEL +0 -0
  76. {basic_memory-0.1.1.dist-info → basic_memory-0.1.2.dist-info}/entry_points.txt +0 -0
  77. {basic_memory-0.1.1.dist-info → basic_memory-0.1.2.dist-info}/licenses/LICENSE +0 -0
@@ -19,13 +19,16 @@ from basic_memory.markdown.schemas import EntityMarkdown, EntityFrontmatter, Obs
19
19
 
20
20
  console = Console()
21
21
 
22
- async def process_memory_json(json_path: Path, base_path: Path,markdown_processor: MarkdownProcessor):
22
+
23
+ async def process_memory_json(
24
+ json_path: Path, base_path: Path, markdown_processor: MarkdownProcessor
25
+ ):
23
26
  """Import entities from memory.json using markdown processor."""
24
-
27
+
25
28
  # First pass - collect all relations by source entity
26
29
  entity_relations: Dict[str, List[Relation]] = {}
27
30
  entities: Dict[str, Dict[str, Any]] = {}
28
-
31
+
29
32
  with Progress(
30
33
  SpinnerColumn(),
31
34
  TextColumn("[progress.description]{task.description}"),
@@ -34,12 +37,12 @@ async def process_memory_json(json_path: Path, base_path: Path,markdown_processo
34
37
  console=console,
35
38
  ) as progress:
36
39
  read_task = progress.add_task("Reading memory.json...", total=None)
37
-
40
+
38
41
  # First pass - collect entities and relations
39
42
  with open(json_path) as f:
40
43
  lines = f.readlines()
41
44
  progress.update(read_task, total=len(lines))
42
-
45
+
43
46
  for line in lines:
44
47
  data = json.loads(line)
45
48
  if data["type"] == "entity":
@@ -52,14 +55,14 @@ async def process_memory_json(json_path: Path, base_path: Path,markdown_processo
52
55
  entity_relations[source].append(
53
56
  Relation(
54
57
  type=data.get("relationType") or data.get("relation_type"),
55
- target=data.get("to") or data.get("to_id")
58
+ target=data.get("to") or data.get("to_id"),
56
59
  )
57
60
  )
58
61
  progress.update(read_task, advance=1)
59
62
 
60
63
  # Second pass - create and write entities
61
64
  write_task = progress.add_task("Creating entities...", total=len(entities))
62
-
65
+
63
66
  entities_created = 0
64
67
  for name, entity_data in entities.items():
65
68
  entity = EntityMarkdown(
@@ -67,26 +70,25 @@ async def process_memory_json(json_path: Path, base_path: Path,markdown_processo
67
70
  metadata={
68
71
  "type": entity_data["entityType"],
69
72
  "title": name,
70
- "permalink": f"{entity_data['entityType']}/{name}"
73
+ "permalink": f"{entity_data['entityType']}/{name}",
71
74
  }
72
75
  ),
73
76
  content=f"# {name}\n",
74
- observations=[
75
- Observation(content=obs)
76
- for obs in entity_data["observations"]
77
- ],
78
- relations=entity_relations.get(name, []) # Add any relations where this entity is the source
77
+ observations=[Observation(content=obs) for obs in entity_data["observations"]],
78
+ relations=entity_relations.get(
79
+ name, []
80
+ ), # Add any relations where this entity is the source
79
81
  )
80
-
82
+
81
83
  # Let markdown processor handle writing
82
84
  file_path = base_path / f"{entity_data['entityType']}/{name}.md"
83
85
  await markdown_processor.write_file(file_path, entity)
84
86
  entities_created += 1
85
87
  progress.update(write_task, advance=1)
86
-
88
+
87
89
  return {
88
90
  "entities": entities_created,
89
- "relations": sum(len(rels) for rels in entity_relations.values())
91
+ "relations": sum(len(rels) for rels in entity_relations.values()),
90
92
  }
91
93
 
92
94
 
@@ -101,39 +103,41 @@ def import_json(
101
103
  json_path: Path = typer.Argument(..., help="Path to memory.json file to import"),
102
104
  ):
103
105
  """Import entities and relations from a memory.json file.
104
-
106
+
105
107
  This command will:
106
108
  1. Read entities and relations from the JSON file
107
109
  2. Create markdown files for each entity
108
110
  3. Include outgoing relations in each entity's markdown
109
-
111
+
110
112
  After importing, run 'basic-memory sync' to index the new files.
111
113
  """
112
-
114
+
113
115
  if not json_path.exists():
114
116
  typer.echo(f"Error: File not found: {json_path}", err=True)
115
117
  raise typer.Exit(1)
116
-
118
+
117
119
  try:
118
120
  # Get markdown processor
119
121
  markdown_processor = asyncio.run(get_markdown_processor())
120
-
122
+
121
123
  # Process the file
122
124
  base_path = config.home
123
125
  console.print(f"\nImporting from {json_path}...writing to {base_path}")
124
126
  results = asyncio.run(process_memory_json(json_path, base_path, markdown_processor))
125
-
127
+
126
128
  # Show results
127
- console.print(Panel(
128
- f"[green]Import complete![/green]\n\n"
129
- f"Created {results['entities']} entities\n"
130
- f"Added {results['relations']} relations",
131
- expand=False
132
- ))
133
-
129
+ console.print(
130
+ Panel(
131
+ f"[green]Import complete![/green]\n\n"
132
+ f"Created {results['entities']} entities\n"
133
+ f"Added {results['relations']} relations",
134
+ expand=False,
135
+ )
136
+ )
137
+
134
138
  console.print("\nRun 'basic-memory sync' to index the new files.")
135
-
139
+
136
140
  except Exception as e:
137
141
  logger.exception("Import failed")
138
142
  typer.echo(f"Error during import: {e}", err=True)
139
- raise typer.Exit(1)
143
+ raise typer.Exit(1)
@@ -0,0 +1,20 @@
1
+ """MCP server command."""
2
+
3
+ from loguru import logger
4
+ from basic_memory.cli.app import app
5
+ from basic_memory.config import config
6
+
7
+ # Import mcp instance
8
+ from basic_memory.mcp.server import mcp as mcp_server # pragma: no cover
9
+
10
+ # Import mcp tools to register them
11
+ import basic_memory.mcp.tools # noqa: F401 # pragma: no cover
12
+
13
+
14
+ @app.command()
15
+ def mcp():
16
+ """Run the MCP server for Claude Desktop integration."""
17
+ home_dir = config.home
18
+ logger.info("Starting Basic Memory MCP server")
19
+ logger.info(f"Home directory: {home_dir}")
20
+ mcp_server.run()
@@ -21,7 +21,9 @@ from basic_memory.sync.utils import SyncReport
21
21
  console = Console()
22
22
 
23
23
 
24
- async def get_file_change_scanner(db_type=DatabaseType.FILESYSTEM) -> FileChangeScanner:
24
+ async def get_file_change_scanner(
25
+ db_type=DatabaseType.FILESYSTEM,
26
+ ) -> FileChangeScanner: # pragma: no cover
25
27
  """Get sync service instance."""
26
28
  async with db.engine_session_factory(db_path=config.database_path, db_type=db_type) as (
27
29
  engine,
@@ -32,7 +34,9 @@ async def get_file_change_scanner(db_type=DatabaseType.FILESYSTEM) -> FileChange
32
34
  return file_change_scanner
33
35
 
34
36
 
35
- def add_files_to_tree(tree: Tree, paths: Set[str], style: str, checksums: Dict[str, str] = None):
37
+ def add_files_to_tree(
38
+ tree: Tree, paths: Set[str], style: str, checksums: Dict[str, str] | None = None
39
+ ):
36
40
  """Add files to tree, grouped by directory."""
37
41
  # Group by directory
38
42
  by_dir = {}
@@ -126,7 +130,8 @@ def display_changes(title: str, changes: SyncReport, verbose: bool = False):
126
130
  by_dir = group_changes_by_directory(changes)
127
131
  for dir_name, counts in sorted(by_dir.items()):
128
132
  summary = build_directory_summary(counts)
129
- tree.add(f"[bold]{dir_name}/[/bold] {summary}")
133
+ if summary: # Only show directories with changes
134
+ tree.add(f"[bold]{dir_name}/[/bold] {summary}")
130
135
 
131
136
  console.print(Panel(tree, expand=False))
132
137
 
@@ -145,8 +150,7 @@ def status(
145
150
  """Show sync status between files and database."""
146
151
  try:
147
152
  sync_service = asyncio.run(get_file_change_scanner())
148
- asyncio.run(run_status(sync_service, verbose))
153
+ asyncio.run(run_status(sync_service, verbose)) # pragma: no cover
149
154
  except Exception as e:
150
155
  logger.exception(f"Error checking status: {e}")
151
- typer.echo(f"Error checking status: {e}", err=True)
152
- raise typer.Exit(1)
156
+ raise typer.Exit(code=1) # pragma: no cover
@@ -9,9 +9,6 @@ from typing import List, Dict
9
9
  import typer
10
10
  from loguru import logger
11
11
  from rich.console import Console
12
- from rich.padding import Padding
13
- from rich.panel import Panel
14
- from rich.text import Text
15
12
  from rich.tree import Tree
16
13
 
17
14
  from basic_memory import db
@@ -42,7 +39,7 @@ class ValidationIssue:
42
39
  error: str
43
40
 
44
41
 
45
- async def get_sync_service(db_type=DatabaseType.FILESYSTEM):
42
+ async def get_sync_service(db_type=DatabaseType.FILESYSTEM): # pragma: no cover
46
43
  """Get sync service instance with all dependencies."""
47
44
  async with db.engine_session_factory(db_path=config.database_path, db_type=db_type) as (
48
45
  engine,
@@ -97,53 +94,6 @@ def group_issues_by_directory(issues: List[ValidationIssue]) -> Dict[str, List[V
97
94
  return dict(grouped)
98
95
 
99
96
 
100
- def display_validation_errors(issues: List[ValidationIssue]):
101
- """Display validation errors in a rich, organized format."""
102
- # Create header
103
- console.print()
104
- console.print(
105
- Panel("[red bold]Error:[/red bold] Invalid frontmatter in knowledge files", expand=False)
106
- )
107
- console.print()
108
-
109
- # Group issues by directory
110
- grouped_issues = group_issues_by_directory(issues)
111
-
112
- # Create tree structure
113
- tree = Tree("Knowledge Files")
114
- for dir_name, dir_issues in sorted(grouped_issues.items()):
115
- # Create branch for directory
116
- branch = tree.add(
117
- f"[bold blue]{dir_name}/[/bold blue] ([yellow]{len(dir_issues)} files[/yellow])"
118
- )
119
-
120
- # Add each file issue
121
- for issue in sorted(dir_issues, key=lambda x: x.file_path):
122
- file_name = Path(issue.file_path).name
123
- branch.add(
124
- Text.assemble(("└─ ", "dim"), (file_name, "yellow"), ": ", (issue.error, "red"))
125
- )
126
-
127
- # Display tree
128
- console.print(Padding(tree, (1, 2)))
129
-
130
- # Add help text
131
- console.print()
132
- console.print(
133
- Panel(
134
- Text.assemble(
135
- ("To fix:", "bold"),
136
- "\n1. Add required frontmatter fields to each file",
137
- "\n2. Run ",
138
- ("basic-memory sync", "bold cyan"),
139
- " again",
140
- ),
141
- expand=False,
142
- )
143
- )
144
- console.print()
145
-
146
-
147
97
  def display_sync_summary(knowledge: SyncReport):
148
98
  """Display a one-line summary of sync changes."""
149
99
  total_changes = knowledge.total_changes
@@ -204,7 +154,6 @@ def display_detailed_sync_results(knowledge: SyncReport):
204
154
 
205
155
  async def run_sync(verbose: bool = False, watch: bool = False):
206
156
  """Run sync operation."""
207
-
208
157
  sync_service = await get_sync_service()
209
158
 
210
159
  # Start watching if requested
@@ -212,10 +161,10 @@ async def run_sync(verbose: bool = False, watch: bool = False):
212
161
  watch_service = WatchService(
213
162
  sync_service=sync_service,
214
163
  file_service=sync_service.entity_service.file_service,
215
- config=config
164
+ config=config,
216
165
  )
217
166
  await watch_service.handle_changes(config.home)
218
- await watch_service.run()
167
+ await watch_service.run() # pragma: no cover
219
168
  else:
220
169
  # one time sync
221
170
  knowledge_changes = await sync_service.sync(config.home)
@@ -223,7 +172,7 @@ async def run_sync(verbose: bool = False, watch: bool = False):
223
172
  if verbose:
224
173
  display_detailed_sync_results(knowledge_changes)
225
174
  else:
226
- display_sync_summary(knowledge_changes)
175
+ display_sync_summary(knowledge_changes) # pragma: no cover
227
176
 
228
177
 
229
178
  @app.command()
@@ -246,7 +195,7 @@ def sync(
246
195
  # Run sync
247
196
  asyncio.run(run_sync(verbose=verbose, watch=watch))
248
197
 
249
- except Exception as e:
198
+ except Exception as e: # pragma: no cover
250
199
  if not isinstance(e, typer.Exit):
251
200
  logger.exception("Sync failed")
252
201
  typer.echo(f"Error during sync: {e}", err=True)
basic_memory/cli/main.py CHANGED
@@ -1,47 +1,14 @@
1
- """Main CLI entry point for basic-memory."""
2
- import sys
1
+ """Main CLI entry point for basic-memory.""" # pragma: no cover
3
2
 
4
- import typer
5
- from loguru import logger
6
-
7
- from basic_memory.cli.app import app
3
+ from basic_memory.cli.app import app # pragma: no cover
4
+ from basic_memory.utils import setup_logging # pragma: no cover
8
5
 
9
6
  # Register commands
10
- from basic_memory.cli.commands import status, sync
11
- __all__ = ["status", "sync"]
12
-
13
- from basic_memory.config import config
14
-
15
-
16
- def setup_logging(home_dir: str = config.home, log_file: str = ".basic-memory/basic-memory-tools.log"):
17
- """Configure logging for the application."""
18
-
19
- # Remove default handler and any existing handlers
20
- logger.remove()
21
-
22
- # Add file handler for debug level logs
23
- log = f"{home_dir}/{log_file}"
24
- logger.add(
25
- log,
26
- level="DEBUG",
27
- rotation="100 MB",
28
- retention="10 days",
29
- backtrace=True,
30
- diagnose=True,
31
- enqueue=True,
32
- colorize=False,
33
- )
7
+ from basic_memory.cli.commands import status, sync, db, import_memory_json, mcp # noqa: F401 # pragma: no cover
34
8
 
35
- # Add stderr handler for warnings and errors only
36
- logger.add(
37
- sys.stderr,
38
- level="WARNING",
39
- backtrace=True,
40
- diagnose=True
41
- )
42
9
 
43
10
  # Set up logging when module is imported
44
- setup_logging()
11
+ setup_logging(log_file=".basic-memory/basic-memory-cli.log") # pragma: no cover
45
12
 
46
13
  if __name__ == "__main__": # pragma: no cover
47
14
  app()
basic_memory/config.py CHANGED
@@ -2,7 +2,6 @@
2
2
 
3
3
  from pathlib import Path
4
4
 
5
- from loguru import logger
6
5
  from pydantic import Field, field_validator
7
6
  from pydantic_settings import BaseSettings, SettingsConfigDict
8
7
 
@@ -27,6 +26,8 @@ class ProjectConfig(BaseSettings):
27
26
  default=500, description="Milliseconds to wait after changes before syncing", gt=0
28
27
  )
29
28
 
29
+ log_level: str = "INFO"
30
+
30
31
  model_config = SettingsConfigDict(
31
32
  env_prefix="BASIC_MEMORY_",
32
33
  extra="ignore",
@@ -45,7 +46,7 @@ class ProjectConfig(BaseSettings):
45
46
 
46
47
  @field_validator("home")
47
48
  @classmethod
48
- def ensure_path_exists(cls, v: Path) -> Path:
49
+ def ensure_path_exists(cls, v: Path) -> Path: # pragma: no cover
49
50
  """Ensure project path exists."""
50
51
  if not v.exists():
51
52
  v.mkdir(parents=True)
@@ -54,4 +55,3 @@ class ProjectConfig(BaseSettings):
54
55
 
55
56
  # Load project config
56
57
  config = ProjectConfig()
57
- logger.info(f"project config home: {config.home}")
basic_memory/db.py CHANGED
@@ -14,9 +14,8 @@ from sqlalchemy.ext.asyncio import (
14
14
  async_scoped_session,
15
15
  )
16
16
 
17
- from basic_memory.models import Base, SCHEMA_VERSION
17
+ from basic_memory.models import Base
18
18
  from basic_memory.models.search import CREATE_SEARCH_INDEX
19
- from basic_memory.repository.search_repository import SearchRepository
20
19
 
21
20
  # Module level state
22
21
  _engine: Optional[AsyncEngine] = None
@@ -72,6 +71,8 @@ async def scoped_session(
72
71
 
73
72
  async def init_db() -> None:
74
73
  """Initialize database with required tables."""
74
+ if _session_maker is None: # pragma: no cover
75
+ raise RuntimeError("Database session maker not initialized")
75
76
 
76
77
  logger.info("Initializing database...")
77
78
 
@@ -85,25 +86,11 @@ async def init_db() -> None:
85
86
 
86
87
  await session.commit()
87
88
 
88
- async def drop_db():
89
- """Drop all database tables."""
90
- global _engine, _session_maker
91
-
92
- logger.info("Dropping tables...")
93
- async with scoped_session(_session_maker) as session:
94
- conn = await session.connection()
95
- await conn.run_sync(Base.metadata.drop_all)
96
- await session.commit()
97
-
98
- # reset global engine and session_maker
99
- _engine = None
100
- _session_maker = None
101
-
102
89
 
103
90
  async def get_or_create_db(
104
91
  db_path: Path,
105
92
  db_type: DatabaseType = DatabaseType.FILESYSTEM,
106
- ) -> tuple[AsyncEngine, async_sessionmaker[AsyncSession]]:
93
+ ) -> tuple[AsyncEngine, async_sessionmaker[AsyncSession]]: # pragma: no cover
107
94
  """Get or create database engine and session maker."""
108
95
  global _engine, _session_maker
109
96
 
@@ -116,10 +103,12 @@ async def get_or_create_db(
116
103
  # Initialize database
117
104
  await init_db()
118
105
 
106
+ assert _engine is not None # for type checker
107
+ assert _session_maker is not None # for type checker
119
108
  return _engine, _session_maker
120
109
 
121
110
 
122
- async def shutdown_db():
111
+ async def shutdown_db() -> None: # pragma: no cover
123
112
  """Clean up database connections."""
124
113
  global _engine, _session_maker
125
114
 
@@ -129,7 +118,6 @@ async def shutdown_db():
129
118
  _session_maker = None
130
119
 
131
120
 
132
-
133
121
  @asynccontextmanager
134
122
  async def engine_session_factory(
135
123
  db_path: Path,
@@ -143,10 +131,10 @@ async def engine_session_factory(
143
131
  """
144
132
 
145
133
  global _engine, _session_maker
146
-
134
+
147
135
  db_url = DatabaseType.get_db_url(db_path, db_type)
148
136
  logger.debug(f"Creating engine for db_url: {db_url}")
149
-
137
+
150
138
  _engine = create_async_engine(db_url, connect_args={"check_same_thread": False})
151
139
  try:
152
140
  _session_maker = async_sessionmaker(_engine, expire_on_commit=False)
@@ -154,6 +142,11 @@ async def engine_session_factory(
154
142
  if init:
155
143
  await init_db()
156
144
 
145
+ assert _engine is not None # for type checker
146
+ assert _session_maker is not None # for type checker
157
147
  yield _engine, _session_maker
158
148
  finally:
159
- await _engine.dispose()
149
+ if _engine:
150
+ await _engine.dispose()
151
+ _engine = None
152
+ _session_maker = None
basic_memory/deps.py CHANGED
@@ -29,11 +29,11 @@ from basic_memory.services.search_service import SearchService
29
29
  ## project
30
30
 
31
31
 
32
- def get_project_config() -> ProjectConfig:
32
+ def get_project_config() -> ProjectConfig: # pragma: no cover
33
33
  return config
34
34
 
35
35
 
36
- ProjectConfigDep = Annotated[ProjectConfig, Depends(get_project_config)]
36
+ ProjectConfigDep = Annotated[ProjectConfig, Depends(get_project_config)] # pragma: no cover
37
37
 
38
38
 
39
39
  ## sqlalchemy
@@ -41,7 +41,7 @@ ProjectConfigDep = Annotated[ProjectConfig, Depends(get_project_config)]
41
41
 
42
42
  async def get_engine_factory(
43
43
  project_config: ProjectConfigDep,
44
- ) -> tuple[AsyncEngine, async_sessionmaker[AsyncSession]]:
44
+ ) -> tuple[AsyncEngine, async_sessionmaker[AsyncSession]]: # pragma: no cover
45
45
  """Get engine and session maker."""
46
46
  return await db.get_or_create_db(project_config.database_path)
47
47
 
@@ -129,7 +129,6 @@ async def get_file_service(
129
129
  FileServiceDep = Annotated[FileService, Depends(get_file_service)]
130
130
 
131
131
 
132
-
133
132
  async def get_entity_service(
134
133
  entity_repository: EntityRepositoryDep,
135
134
  observation_repository: ObservationRepositoryDep,