basic-memory 0.7.0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (58) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/alembic.ini +119 -0
  3. basic_memory/alembic/env.py +23 -1
  4. basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink.py +51 -0
  5. basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py +44 -0
  6. basic_memory/api/app.py +0 -4
  7. basic_memory/api/routers/knowledge_router.py +1 -1
  8. basic_memory/api/routers/memory_router.py +16 -16
  9. basic_memory/api/routers/resource_router.py +105 -4
  10. basic_memory/cli/app.py +0 -2
  11. basic_memory/cli/commands/status.py +9 -21
  12. basic_memory/cli/commands/sync.py +12 -16
  13. basic_memory/cli/commands/tools.py +36 -13
  14. basic_memory/cli/main.py +0 -1
  15. basic_memory/config.py +15 -1
  16. basic_memory/file_utils.py +6 -4
  17. basic_memory/markdown/entity_parser.py +3 -3
  18. basic_memory/mcp/async_client.py +1 -1
  19. basic_memory/mcp/main.py +25 -0
  20. basic_memory/mcp/prompts/__init__.py +15 -0
  21. basic_memory/mcp/prompts/ai_assistant_guide.py +28 -0
  22. basic_memory/mcp/prompts/continue_conversation.py +172 -0
  23. basic_memory/mcp/prompts/json_canvas_spec.py +25 -0
  24. basic_memory/mcp/prompts/recent_activity.py +46 -0
  25. basic_memory/mcp/prompts/search.py +127 -0
  26. basic_memory/mcp/prompts/utils.py +98 -0
  27. basic_memory/mcp/server.py +3 -7
  28. basic_memory/mcp/tools/__init__.py +6 -4
  29. basic_memory/mcp/tools/canvas.py +99 -0
  30. basic_memory/mcp/tools/memory.py +12 -5
  31. basic_memory/mcp/tools/notes.py +1 -2
  32. basic_memory/mcp/tools/resource.py +192 -0
  33. basic_memory/mcp/tools/utils.py +2 -1
  34. basic_memory/models/knowledge.py +27 -11
  35. basic_memory/repository/repository.py +1 -1
  36. basic_memory/repository/search_repository.py +14 -4
  37. basic_memory/schemas/__init__.py +0 -11
  38. basic_memory/schemas/base.py +4 -1
  39. basic_memory/schemas/memory.py +11 -2
  40. basic_memory/schemas/search.py +2 -1
  41. basic_memory/services/entity_service.py +19 -12
  42. basic_memory/services/file_service.py +69 -2
  43. basic_memory/services/link_resolver.py +12 -9
  44. basic_memory/services/search_service.py +56 -12
  45. basic_memory/sync/__init__.py +3 -2
  46. basic_memory/sync/sync_service.py +294 -123
  47. basic_memory/sync/watch_service.py +125 -129
  48. basic_memory/utils.py +24 -9
  49. {basic_memory-0.7.0.dist-info → basic_memory-0.8.0.dist-info}/METADATA +2 -1
  50. basic_memory-0.8.0.dist-info/RECORD +91 -0
  51. basic_memory/alembic/README +0 -1
  52. basic_memory/schemas/discovery.py +0 -28
  53. basic_memory/sync/file_change_scanner.py +0 -158
  54. basic_memory/sync/utils.py +0 -31
  55. basic_memory-0.7.0.dist-info/RECORD +0 -82
  56. {basic_memory-0.7.0.dist-info → basic_memory-0.8.0.dist-info}/WHEEL +0 -0
  57. {basic_memory-0.7.0.dist-info → basic_memory-0.8.0.dist-info}/entry_points.txt +0 -0
  58. {basic_memory-0.7.0.dist-info → basic_memory-0.8.0.dist-info}/licenses/LICENSE +0 -0
basic_memory/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  """basic-memory - Local-first knowledge management combining Zettelkasten with knowledge graphs"""
2
2
 
3
- __version__ = "0.7.0"
3
+ __version__ = "0.8.0"
@@ -0,0 +1,119 @@
1
+ # A generic, single database configuration.
2
+
3
+ [alembic]
4
+ # path to migration scripts
5
+ # Use forward slashes (/) also on windows to provide an os agnostic path
6
+ script_location = .
7
+
8
+ # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
9
+ # Uncomment the line below if you want the files to be prepended with date and time
10
+ # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
11
+ # for all available tokens
12
+ # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
13
+
14
+ # sys.path path, will be prepended to sys.path if present.
15
+ # defaults to the current working directory.
16
+ prepend_sys_path = .
17
+
18
+ # timezone to use when rendering the date within the migration file
19
+ # as well as the filename.
20
+ # If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
21
+ # Any required deps can installed by adding `alembic[tz]` to the pip requirements
22
+ # string value is passed to ZoneInfo()
23
+ # leave blank for localtime
24
+ # timezone =
25
+
26
+ # max length of characters to apply to the "slug" field
27
+ # truncate_slug_length = 40
28
+
29
+ # set to 'true' to run the environment during
30
+ # the 'revision' command, regardless of autogenerate
31
+ # revision_environment = false
32
+
33
+ # set to 'true' to allow .pyc and .pyo files without
34
+ # a source .py file to be detected as revisions in the
35
+ # versions/ directory
36
+ # sourceless = false
37
+
38
+ # version location specification; This defaults
39
+ # to migrations/versions. When using multiple version
40
+ # directories, initial revisions must be specified with --version-path.
41
+ # The path separator used here should be the separator specified by "version_path_separator" below.
42
+ # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
43
+
44
+ # version path separator; As mentioned above, this is the character used to split
45
+ # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
46
+ # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
47
+ # Valid values for version_path_separator are:
48
+ #
49
+ # version_path_separator = :
50
+ # version_path_separator = ;
51
+ # version_path_separator = space
52
+ # version_path_separator = newline
53
+ #
54
+ # Use os.pathsep. Default configuration used for new projects.
55
+ version_path_separator = os
56
+
57
+ # set to 'true' to search source files recursively
58
+ # in each "version_locations" directory
59
+ # new in Alembic version 1.10
60
+ # recursive_version_locations = false
61
+
62
+ # the output encoding used when revision files
63
+ # are written from script.py.mako
64
+ # output_encoding = utf-8
65
+
66
+ sqlalchemy.url = driver://user:pass@localhost/dbname
67
+
68
+
69
+ [post_write_hooks]
70
+ # post_write_hooks defines scripts or Python functions that are run
71
+ # on newly generated revision scripts. See the documentation for further
72
+ # detail and examples
73
+
74
+ # format using "black" - use the console_scripts runner, against the "black" entrypoint
75
+ # hooks = black
76
+ # black.type = console_scripts
77
+ # black.entrypoint = black
78
+ # black.options = -l 79 REVISION_SCRIPT_FILENAME
79
+
80
+ # lint with attempts to fix using "ruff" - use the exec runner, execute a binary
81
+ # hooks = ruff
82
+ # ruff.type = exec
83
+ # ruff.executable = %(here)s/.venv/bin/ruff
84
+ # ruff.options = --fix REVISION_SCRIPT_FILENAME
85
+
86
+ # Logging configuration
87
+ [loggers]
88
+ keys = root,sqlalchemy,alembic
89
+
90
+ [handlers]
91
+ keys = console
92
+
93
+ [formatters]
94
+ keys = generic
95
+
96
+ [logger_root]
97
+ level = WARNING
98
+ handlers = console
99
+ qualname =
100
+
101
+ [logger_sqlalchemy]
102
+ level = WARNING
103
+ handlers =
104
+ qualname = sqlalchemy.engine
105
+
106
+ [logger_alembic]
107
+ level = INFO
108
+ handlers =
109
+ qualname = alembic
110
+
111
+ [handler_console]
112
+ class = StreamHandler
113
+ args = (sys.stderr,)
114
+ level = NOTSET
115
+ formatter = generic
116
+
117
+ [formatter_generic]
118
+ format = %(levelname)-5.5s [%(name)s] %(message)s
119
+ datefmt = %H:%M:%S
@@ -1,5 +1,6 @@
1
1
  """Alembic environment configuration."""
2
2
 
3
+ import os
3
4
  from logging.config import fileConfig
4
5
 
5
6
  from sqlalchemy import engine_from_config
@@ -8,6 +9,10 @@ from sqlalchemy import pool
8
9
  from alembic import context
9
10
 
10
11
  from basic_memory.models import Base
12
+
13
+ # set config.env to "test" for pytest to prevent logging to file in utils.setup_logging()
14
+ os.environ["BASIC_MEMORY_ENV"] = "test"
15
+
11
16
  from basic_memory.config import config as app_config
12
17
 
13
18
  # this is the Alembic Config object, which provides
@@ -18,6 +23,8 @@ config = context.config
18
23
  sqlalchemy_url = f"sqlite:///{app_config.database_path}"
19
24
  config.set_main_option("sqlalchemy.url", sqlalchemy_url)
20
25
 
26
+ # print(f"Using SQLAlchemy URL: {sqlalchemy_url}")
27
+
21
28
  # Interpret the config file for Python logging.
22
29
  if config.config_file_name is not None:
23
30
  fileConfig(config.config_file_name)
@@ -27,6 +34,14 @@ if config.config_file_name is not None:
27
34
  target_metadata = Base.metadata
28
35
 
29
36
 
37
+ # Add this function to tell Alembic what to include/exclude
38
+ def include_object(object, name, type_, reflected, compare_to):
39
+ # Ignore SQLite FTS tables
40
+ if type_ == "table" and name.startswith("search_index"):
41
+ return False
42
+ return True
43
+
44
+
30
45
  def run_migrations_offline() -> None:
31
46
  """Run migrations in 'offline' mode.
32
47
 
@@ -44,6 +59,8 @@ def run_migrations_offline() -> None:
44
59
  target_metadata=target_metadata,
45
60
  literal_binds=True,
46
61
  dialect_opts={"paramstyle": "named"},
62
+ include_object=include_object,
63
+ render_as_batch=True,
47
64
  )
48
65
 
49
66
  with context.begin_transaction():
@@ -63,7 +80,12 @@ def run_migrations_online() -> None:
63
80
  )
64
81
 
65
82
  with connectable.connect() as connection:
66
- context.configure(connection=connection, target_metadata=target_metadata)
83
+ context.configure(
84
+ connection=connection,
85
+ target_metadata=target_metadata,
86
+ include_object=include_object,
87
+ render_as_batch=True,
88
+ )
67
89
 
68
90
  with context.begin_transaction():
69
91
  context.run_migrations()
@@ -0,0 +1,51 @@
1
+ """remove required from entity.permalink
2
+
3
+ Revision ID: 502b60eaa905
4
+ Revises: b3c3938bacdb
5
+ Create Date: 2025-02-24 13:33:09.790951
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = "502b60eaa905"
17
+ down_revision: Union[str, None] = "b3c3938bacdb"
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ # ### commands auto generated by Alembic - please adjust! ###
24
+ with op.batch_alter_table("entity", schema=None) as batch_op:
25
+ batch_op.alter_column("permalink", existing_type=sa.VARCHAR(), nullable=True)
26
+ batch_op.drop_index("ix_entity_permalink")
27
+ batch_op.create_index(batch_op.f("ix_entity_permalink"), ["permalink"], unique=False)
28
+ batch_op.drop_constraint("uix_entity_permalink", type_="unique")
29
+ batch_op.create_index(
30
+ "uix_entity_permalink",
31
+ ["permalink"],
32
+ unique=True,
33
+ sqlite_where=sa.text("content_type = 'text/markdown' AND permalink IS NOT NULL"),
34
+ )
35
+
36
+ # ### end Alembic commands ###
37
+
38
+
39
+ def downgrade() -> None:
40
+ # ### commands auto generated by Alembic - please adjust! ###
41
+ with op.batch_alter_table("entity", schema=None) as batch_op:
42
+ batch_op.drop_index(
43
+ "uix_entity_permalink",
44
+ sqlite_where=sa.text("content_type = 'text/markdown' AND permalink IS NOT NULL"),
45
+ )
46
+ batch_op.create_unique_constraint("uix_entity_permalink", ["permalink"])
47
+ batch_op.drop_index(batch_op.f("ix_entity_permalink"))
48
+ batch_op.create_index("ix_entity_permalink", ["permalink"], unique=1)
49
+ batch_op.alter_column("permalink", existing_type=sa.VARCHAR(), nullable=False)
50
+
51
+ # ### end Alembic commands ###
@@ -0,0 +1,44 @@
1
+ """relation to_name unique index
2
+
3
+ Revision ID: b3c3938bacdb
4
+ Revises: 3dae7c7b1564
5
+ Create Date: 2025-02-22 14:59:30.668466
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = "b3c3938bacdb"
16
+ down_revision: Union[str, None] = "3dae7c7b1564"
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # SQLite doesn't support constraint changes through ALTER
23
+ # Need to recreate table with desired constraints
24
+ with op.batch_alter_table("relation") as batch_op:
25
+ # Drop existing unique constraint
26
+ batch_op.drop_constraint("uix_relation", type_="unique")
27
+
28
+ # Add new constraints
29
+ batch_op.create_unique_constraint(
30
+ "uix_relation_from_id_to_id", ["from_id", "to_id", "relation_type"]
31
+ )
32
+ batch_op.create_unique_constraint(
33
+ "uix_relation_from_id_to_name", ["from_id", "to_name", "relation_type"]
34
+ )
35
+
36
+
37
+ def downgrade() -> None:
38
+ with op.batch_alter_table("relation") as batch_op:
39
+ # Drop new constraints
40
+ batch_op.drop_constraint("uix_relation_from_id_to_name", type_="unique")
41
+ batch_op.drop_constraint("uix_relation_from_id_to_id", type_="unique")
42
+
43
+ # Restore original constraint
44
+ batch_op.create_unique_constraint("uix_relation", ["from_id", "to_id", "relation_type"])
basic_memory/api/app.py CHANGED
@@ -7,18 +7,14 @@ from fastapi import FastAPI, HTTPException
7
7
  from fastapi.exception_handlers import http_exception_handler
8
8
  from loguru import logger
9
9
 
10
- import basic_memory
11
10
  from basic_memory import db
12
11
  from basic_memory.config import config as app_config
13
12
  from basic_memory.api.routers import knowledge, search, memory, resource
14
- from basic_memory.utils import setup_logging
15
13
 
16
14
 
17
15
  @asynccontextmanager
18
16
  async def lifespan(app: FastAPI): # pragma: no cover
19
17
  """Lifecycle manager for the FastAPI app."""
20
- setup_logging(log_file=".basic-memory/basic-memory.log")
21
- logger.info(f"Starting Basic Memory API {basic_memory.__version__}")
22
18
  await db.run_migrations(app_config)
23
19
  yield
24
20
  logger.info("Shutting down Basic Memory API")
@@ -133,7 +133,7 @@ async def delete_entity(
133
133
  return DeleteEntitiesResponse(deleted=False)
134
134
 
135
135
  # Delete the entity
136
- deleted = await entity_service.delete_entity(entity.permalink)
136
+ deleted = await entity_service.delete_entity(entity.permalink or entity.id)
137
137
 
138
138
  # Remove from search index
139
139
  background_tasks.add_task(search_service.delete_by_permalink, entity.permalink)
@@ -29,34 +29,32 @@ async def to_graph_context(context, entity_repository: EntityRepository, page: i
29
29
  async def to_summary(item: SearchIndexRow | ContextResultRow):
30
30
  match item.type:
31
31
  case SearchItemType.ENTITY:
32
- assert item.title is not None
33
- assert item.created_at is not None
34
-
35
32
  return EntitySummary(
36
- title=item.title,
33
+ title=item.title, # pyright: ignore
37
34
  permalink=item.permalink,
38
35
  file_path=item.file_path,
39
36
  created_at=item.created_at,
40
37
  )
41
38
  case SearchItemType.OBSERVATION:
42
- assert item.category is not None
43
- assert item.content is not None
44
-
45
39
  return ObservationSummary(
46
- category=item.category, content=item.content, permalink=item.permalink
40
+ title=item.title, # pyright: ignore
41
+ file_path=item.file_path,
42
+ category=item.category, # pyright: ignore
43
+ content=item.content, # pyright: ignore
44
+ permalink=item.permalink, # pyright: ignore
45
+ created_at=item.created_at,
47
46
  )
48
47
  case SearchItemType.RELATION:
49
- assert item.from_id is not None
50
- from_entity = await entity_repository.find_by_id(item.from_id)
51
- assert from_entity is not None
52
-
48
+ from_entity = await entity_repository.find_by_id(item.from_id) # pyright: ignore
53
49
  to_entity = await entity_repository.find_by_id(item.to_id) if item.to_id else None
54
-
55
50
  return RelationSummary(
56
- permalink=item.permalink,
51
+ title=item.title, # pyright: ignore
52
+ file_path=item.file_path,
53
+ permalink=item.permalink, # pyright: ignore
57
54
  relation_type=item.type,
58
- from_id=from_entity.permalink,
55
+ from_id=from_entity.permalink, # pyright: ignore
59
56
  to_id=to_entity.permalink if to_entity else None,
57
+ created_at=item.created_at,
60
58
  )
61
59
  case _: # pragma: no cover
62
60
  raise ValueError(f"Unexpected type: {item.type}")
@@ -104,9 +102,11 @@ async def recent(
104
102
  context = await context_service.build_context(
105
103
  types=types, depth=depth, since=since, limit=limit, offset=offset, max_related=max_related
106
104
  )
107
- return await to_graph_context(
105
+ recent_context = await to_graph_context(
108
106
  context, entity_repository=entity_repository, page=page, page_size=page_size
109
107
  )
108
+ logger.debug(f"Recent context: {recent_context.model_dump_json()}")
109
+ return recent_context
110
110
 
111
111
 
112
112
  # get_memory_context needs to be declared last so other paths can match
@@ -2,9 +2,10 @@
2
2
 
3
3
  import tempfile
4
4
  from pathlib import Path
5
+ from typing import Annotated
5
6
 
6
- from fastapi import APIRouter, HTTPException, BackgroundTasks
7
- from fastapi.responses import FileResponse
7
+ from fastapi import APIRouter, HTTPException, BackgroundTasks, Body
8
+ from fastapi.responses import FileResponse, JSONResponse
8
9
  from loguru import logger
9
10
 
10
11
  from basic_memory.deps import (
@@ -13,10 +14,13 @@ from basic_memory.deps import (
13
14
  SearchServiceDep,
14
15
  EntityServiceDep,
15
16
  FileServiceDep,
17
+ EntityRepositoryDep,
16
18
  )
17
19
  from basic_memory.repository.search_repository import SearchIndexRow
18
20
  from basic_memory.schemas.memory import normalize_memory_url
19
21
  from basic_memory.schemas.search import SearchQuery, SearchItemType
22
+ from basic_memory.models.knowledge import Entity as EntityModel
23
+ from datetime import datetime
20
24
 
21
25
  router = APIRouter(prefix="/resource", tags=["resources"])
22
26
 
@@ -94,8 +98,7 @@ async def get_resource_content(
94
98
  content = await file_service.read_entity_content(result)
95
99
  memory_url = normalize_memory_url(result.permalink)
96
100
  modified_date = result.updated_at.isoformat()
97
- assert result.checksum
98
- checksum = result.checksum[:8]
101
+ checksum = result.checksum[:8] if result.checksum else ""
99
102
 
100
103
  # Prepare the delimited content
101
104
  response_content = f"--- {memory_url} {modified_date} {checksum}\n"
@@ -122,3 +125,101 @@ def cleanup_temp_file(file_path: str):
122
125
  logger.debug(f"Temporary file deleted: {file_path}")
123
126
  except Exception as e: # pragma: no cover
124
127
  logger.error(f"Error deleting temporary file {file_path}: {e}")
128
+
129
+
130
+ @router.put("/{file_path:path}")
131
+ async def write_resource(
132
+ config: ProjectConfigDep,
133
+ file_service: FileServiceDep,
134
+ entity_repository: EntityRepositoryDep,
135
+ search_service: SearchServiceDep,
136
+ file_path: str,
137
+ content: Annotated[str, Body()],
138
+ ) -> JSONResponse:
139
+ """Write content to a file in the project.
140
+
141
+ This endpoint allows writing content directly to a file in the project.
142
+ Also creates an entity record and indexes the file for search.
143
+
144
+ Args:
145
+ file_path: Path to write to, relative to project root
146
+ request: Contains the content to write
147
+
148
+ Returns:
149
+ JSON response with file information
150
+ """
151
+ try:
152
+ # Get content from request body
153
+
154
+ # Ensure it's UTF-8 string content
155
+ if isinstance(content, bytes): # pragma: no cover
156
+ content_str = content.decode("utf-8")
157
+ else:
158
+ content_str = str(content)
159
+
160
+ # Get full file path
161
+ full_path = Path(f"{config.home}/{file_path}")
162
+
163
+ # Ensure parent directory exists
164
+ full_path.parent.mkdir(parents=True, exist_ok=True)
165
+
166
+ # Write content to file
167
+ checksum = await file_service.write_file(full_path, content_str)
168
+
169
+ # Get file info
170
+ file_stats = file_service.file_stats(full_path)
171
+
172
+ # Determine file details
173
+ file_name = Path(file_path).name
174
+ content_type = file_service.content_type(full_path)
175
+
176
+ entity_type = "canvas" if file_path.endswith(".canvas") else "file"
177
+
178
+ # Check if entity already exists
179
+ existing_entity = await entity_repository.get_by_file_path(file_path)
180
+
181
+ if existing_entity:
182
+ # Update existing entity
183
+ entity = await entity_repository.update(
184
+ existing_entity.id,
185
+ {
186
+ "title": file_name,
187
+ "entity_type": entity_type,
188
+ "content_type": content_type,
189
+ "file_path": file_path,
190
+ "checksum": checksum,
191
+ "updated_at": datetime.fromtimestamp(file_stats.st_mtime),
192
+ },
193
+ )
194
+ status_code = 200
195
+ else:
196
+ # Create a new entity model
197
+ entity = EntityModel(
198
+ title=file_name,
199
+ entity_type=entity_type,
200
+ content_type=content_type,
201
+ file_path=file_path,
202
+ checksum=checksum,
203
+ created_at=datetime.fromtimestamp(file_stats.st_ctime),
204
+ updated_at=datetime.fromtimestamp(file_stats.st_mtime),
205
+ )
206
+ entity = await entity_repository.add(entity)
207
+ status_code = 201
208
+
209
+ # Index the file for search
210
+ await search_service.index_entity(entity) # pyright: ignore
211
+
212
+ # Return success response
213
+ return JSONResponse(
214
+ status_code=status_code,
215
+ content={
216
+ "file_path": file_path,
217
+ "checksum": checksum,
218
+ "size": file_stats.st_size,
219
+ "created_at": file_stats.st_ctime,
220
+ "modified_at": file_stats.st_mtime,
221
+ },
222
+ )
223
+ except Exception as e: # pragma: no cover
224
+ logger.error(f"Error writing resource {file_path}: {e}")
225
+ raise HTTPException(status_code=500, detail=f"Failed to write resource: {str(e)}")
basic_memory/cli/app.py CHANGED
@@ -4,9 +4,7 @@ import typer
4
4
 
5
5
  from basic_memory import db
6
6
  from basic_memory.config import config
7
- from basic_memory.utils import setup_logging
8
7
 
9
- setup_logging(log_file=".basic-memory/basic-memory-cli.log", console=False) # pragma: no cover
10
8
 
11
9
  asyncio.run(db.run_migrations(config))
12
10
 
@@ -10,29 +10,16 @@ from rich.console import Console
10
10
  from rich.panel import Panel
11
11
  from rich.tree import Tree
12
12
 
13
- from basic_memory import db
14
13
  from basic_memory.cli.app import app
14
+ from basic_memory.cli.commands.sync import get_sync_service
15
15
  from basic_memory.config import config
16
- from basic_memory.db import DatabaseType
17
- from basic_memory.repository import EntityRepository
18
- from basic_memory.sync import FileChangeScanner
19
- from basic_memory.sync.utils import SyncReport
16
+ from basic_memory.sync import SyncService
17
+ from basic_memory.sync.sync_service import SyncReport
20
18
 
21
19
  # Create rich console
22
20
  console = Console()
23
21
 
24
22
 
25
- async def get_file_change_scanner(
26
- db_type=DatabaseType.FILESYSTEM,
27
- ) -> FileChangeScanner: # pragma: no cover
28
- """Get sync service instance."""
29
- _, session_maker = await db.get_or_create_db(db_path=config.database_path, db_type=db_type)
30
-
31
- entity_repository = EntityRepository(session_maker)
32
- file_change_scanner = FileChangeScanner(entity_repository)
33
- return file_change_scanner
34
-
35
-
36
23
  def add_files_to_tree(
37
24
  tree: Tree, paths: Set[str], style: str, checksums: Dict[str, str] | None = None
38
25
  ):
@@ -104,7 +91,7 @@ def display_changes(title: str, changes: SyncReport, verbose: bool = False):
104
91
  """Display changes using Rich for better visualization."""
105
92
  tree = Tree(title)
106
93
 
107
- if changes.total_changes == 0:
94
+ if changes.total == 0:
108
95
  tree.add("No changes")
109
96
  console.print(Panel(tree, expand=False))
110
97
  return
@@ -135,11 +122,11 @@ def display_changes(title: str, changes: SyncReport, verbose: bool = False):
135
122
  console.print(Panel(tree, expand=False))
136
123
 
137
124
 
138
- async def run_status(sync_service: FileChangeScanner, verbose: bool = False):
125
+ async def run_status(sync_service: SyncService, verbose: bool = False):
139
126
  """Check sync status of files vs database."""
140
127
  # Check knowledge/ directory
141
- knowledge_changes = await sync_service.find_knowledge_changes(config.home)
142
- display_changes("Knowledge Files", knowledge_changes, verbose)
128
+ knowledge_changes = await sync_service.scan(config.home)
129
+ display_changes("Status", knowledge_changes, verbose)
143
130
 
144
131
 
145
132
  @app.command()
@@ -149,8 +136,9 @@ def status(
149
136
  """Show sync status between files and database."""
150
137
  with logfire.span("status"): # pyright: ignore [reportGeneralTypeIssues]
151
138
  try:
152
- sync_service = asyncio.run(get_file_change_scanner())
139
+ sync_service = asyncio.run(get_sync_service())
153
140
  asyncio.run(run_status(sync_service, verbose)) # pragma: no cover
154
141
  except Exception as e:
155
142
  logger.exception(f"Error checking status: {e}")
143
+ typer.echo(f"Error checking status: {e}", err=True)
156
144
  raise typer.Exit(code=1) # pragma: no cover
@@ -25,8 +25,8 @@ from basic_memory.repository.search_repository import SearchRepository
25
25
  from basic_memory.services import EntityService, FileService
26
26
  from basic_memory.services.link_resolver import LinkResolver
27
27
  from basic_memory.services.search_service import SearchService
28
- from basic_memory.sync import SyncService, FileChangeScanner
29
- from basic_memory.sync.utils import SyncReport
28
+ from basic_memory.sync import SyncService
29
+ from basic_memory.sync.sync_service import SyncReport
30
30
  from basic_memory.sync.watch_service import WatchService
31
31
 
32
32
  console = Console()
@@ -58,9 +58,6 @@ async def get_sync_service(): # pragma: no cover
58
58
  search_service = SearchService(search_repository, entity_repository, file_service)
59
59
  link_resolver = LinkResolver(entity_repository, search_service)
60
60
 
61
- # Initialize scanner
62
- file_change_scanner = FileChangeScanner(entity_repository)
63
-
64
61
  # Initialize services
65
62
  entity_service = EntityService(
66
63
  entity_parser,
@@ -73,12 +70,12 @@ async def get_sync_service(): # pragma: no cover
73
70
 
74
71
  # Create sync service
75
72
  sync_service = SyncService(
76
- scanner=file_change_scanner,
77
73
  entity_service=entity_service,
78
74
  entity_parser=entity_parser,
79
75
  entity_repository=entity_repository,
80
76
  relation_repository=relation_repository,
81
77
  search_service=search_service,
78
+ file_service=file_service,
82
79
  )
83
80
 
84
81
  return sync_service
@@ -95,7 +92,7 @@ def group_issues_by_directory(issues: List[ValidationIssue]) -> Dict[str, List[V
95
92
 
96
93
  def display_sync_summary(knowledge: SyncReport):
97
94
  """Display a one-line summary of sync changes."""
98
- total_changes = knowledge.total_changes
95
+ total_changes = knowledge.total
99
96
  if total_changes == 0:
100
97
  console.print("[green]Everything up to date[/green]")
101
98
  return
@@ -121,13 +118,13 @@ def display_sync_summary(knowledge: SyncReport):
121
118
 
122
119
  def display_detailed_sync_results(knowledge: SyncReport):
123
120
  """Display detailed sync results with trees."""
124
- if knowledge.total_changes == 0:
121
+ if knowledge.total == 0:
125
122
  console.print("\n[green]Everything up to date[/green]")
126
123
  return
127
124
 
128
125
  console.print("\n[bold]Sync Results[/bold]")
129
126
 
130
- if knowledge.total_changes > 0:
127
+ if knowledge.total > 0:
131
128
  knowledge_tree = Tree("[bold]Knowledge Files[/bold]")
132
129
  if knowledge.new:
133
130
  created = knowledge_tree.add("[green]Created[/green]")
@@ -163,8 +160,10 @@ async def run_sync(verbose: bool = False, watch: bool = False, console_status: b
163
160
  file_service=sync_service.entity_service.file_service,
164
161
  config=config,
165
162
  )
166
- await watch_service.handle_changes(config.home)
167
- await watch_service.run(console_status=console_status) # pragma: no cover
163
+ # full sync
164
+ await sync_service.sync(config.home)
165
+ # watch changes
166
+ await watch_service.run() # pragma: no cover
168
167
  else:
169
168
  # one time sync
170
169
  knowledge_changes = await sync_service.sync(config.home)
@@ -189,18 +188,15 @@ def sync(
189
188
  "-w",
190
189
  help="Start watching for changes after sync.",
191
190
  ),
192
- console_status: bool = typer.Option(
193
- False, "--console-status", "-c", help="Show live console status"
194
- ),
195
191
  ) -> None:
196
192
  """Sync knowledge files with the database."""
197
193
  try:
198
194
  # Run sync
199
- asyncio.run(run_sync(verbose=verbose, watch=watch, console_status=console_status))
195
+ asyncio.run(run_sync(verbose=verbose, watch=watch))
200
196
 
201
197
  except Exception as e: # pragma: no cover
202
198
  if not isinstance(e, typer.Exit):
203
- logger.exception("Sync failed")
199
+ logger.exception("Sync failed", e)
204
200
  typer.echo(f"Error during sync: {e}", err=True)
205
201
  raise typer.Exit(1)
206
202
  raise