mongrator 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mongrator/__init__.py +3 -0
- mongrator/_templates/migration.py.tmpl +40 -0
- mongrator/cli.py +256 -0
- mongrator/config.py +61 -0
- mongrator/exceptions.py +65 -0
- mongrator/loader.py +74 -0
- mongrator/migration.py +57 -0
- mongrator/ops.py +128 -0
- mongrator/planner.py +76 -0
- mongrator/py.typed +0 -0
- mongrator/runner.py +218 -0
- mongrator/state.py +101 -0
- mongrator-0.1.0.dist-info/METADATA +158 -0
- mongrator-0.1.0.dist-info/RECORD +16 -0
- mongrator-0.1.0.dist-info/WHEEL +4 -0
- mongrator-0.1.0.dist-info/entry_points.txt +3 -0
mongrator/__init__.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"""Migration: {slug}
|
|
2
|
+
|
|
3
|
+
Generated by mongrator on {timestamp}.
|
|
4
|
+
|
|
5
|
+
Use the ops helpers for common reversible operations, or write plain PyMongo
|
|
6
|
+
calls directly against the `db` argument for anything more complex.
|
|
7
|
+
|
|
8
|
+
Example using ops helpers (auto-rollback supported):
|
|
9
|
+
|
|
10
|
+
from mongrator import ops
|
|
11
|
+
|
|
12
|
+
def up(db):
|
|
13
|
+
return [
|
|
14
|
+
ops.create_index("my_collection", {{"field": 1}}, unique=True),
|
|
15
|
+
]
|
|
16
|
+
|
|
17
|
+
Example using plain PyMongo (define down() for rollback):
|
|
18
|
+
|
|
19
|
+
def up(db):
|
|
20
|
+
db["my_collection"].update_many({{}}, {{"$set": {{"new_field": None}}}})
|
|
21
|
+
|
|
22
|
+
def down(db):
|
|
23
|
+
db["my_collection"].update_many({{}}, {{"$unset": {{"new_field": ""}}}})
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
# Uncomment to use the ops helpers:
|
|
28
|
+
# from mongrator import ops
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def up(db):
|
|
32
|
+
pass
|
|
33
|
+
# return [
|
|
34
|
+
# ops.create_index("my_collection", {{"field": 1}}),
|
|
35
|
+
# ]
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
# def down(db):
|
|
39
|
+
# """Optional: define rollback logic here if not using ops helpers."""
|
|
40
|
+
# pass
|
mongrator/cli.py
ADDED
|
@@ -0,0 +1,256 @@
|
|
|
1
|
+
"""Command-line interface for mongrator.
|
|
2
|
+
|
|
3
|
+
Subcommands:
|
|
4
|
+
init — create the migrations directory and a mongrator.toml stub
|
|
5
|
+
create — generate a new timestamped migration file
|
|
6
|
+
status — show applied/pending migration table
|
|
7
|
+
up — apply pending migrations
|
|
8
|
+
down — roll back applied migrations
|
|
9
|
+
validate — verify checksums of applied migrations
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import argparse
|
|
13
|
+
import asyncio
|
|
14
|
+
import sys
|
|
15
|
+
from datetime import UTC, datetime
|
|
16
|
+
from importlib.resources import files
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
|
|
19
|
+
from .config import MigratorConfig
|
|
20
|
+
from .exceptions import MigratorError
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def _build_parser() -> argparse.ArgumentParser:
|
|
24
|
+
parser = argparse.ArgumentParser(
|
|
25
|
+
prog="mongrator",
|
|
26
|
+
description="Lightweight MongoDB schema migration tool",
|
|
27
|
+
)
|
|
28
|
+
parser.add_argument(
|
|
29
|
+
"--config",
|
|
30
|
+
metavar="PATH",
|
|
31
|
+
default="mongrator.toml",
|
|
32
|
+
help="path to config file (default: mongrator.toml)",
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
sub = parser.add_subparsers(dest="command", metavar="COMMAND")
|
|
36
|
+
sub.required = True
|
|
37
|
+
|
|
38
|
+
# init
|
|
39
|
+
sub.add_parser("init", help="create migrations directory and config stub")
|
|
40
|
+
|
|
41
|
+
# create
|
|
42
|
+
p_create = sub.add_parser("create", help="generate a new migration file")
|
|
43
|
+
p_create.add_argument("name", help="short description, e.g. add_users_email_index")
|
|
44
|
+
|
|
45
|
+
# status
|
|
46
|
+
sub.add_parser("status", help="show applied/pending migration table")
|
|
47
|
+
|
|
48
|
+
# up
|
|
49
|
+
p_up = sub.add_parser("up", help="apply pending migrations")
|
|
50
|
+
p_up.add_argument("--target", metavar="ID", help="apply only up to this migration ID")
|
|
51
|
+
p_up.add_argument("--async", dest="use_async", action="store_true", help="use async runner")
|
|
52
|
+
|
|
53
|
+
# down
|
|
54
|
+
p_down = sub.add_parser("down", help="roll back applied migrations")
|
|
55
|
+
p_down.add_argument(
|
|
56
|
+
"--steps", type=int, default=1, metavar="N", help="number of migrations to roll back (default: 1)"
|
|
57
|
+
)
|
|
58
|
+
p_down.add_argument("--async", dest="use_async", action="store_true", help="use async runner")
|
|
59
|
+
|
|
60
|
+
# validate
|
|
61
|
+
sub.add_parser("validate", help="verify checksums of applied migration files")
|
|
62
|
+
|
|
63
|
+
return parser
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _load_config(args: argparse.Namespace) -> MigratorConfig:
|
|
67
|
+
config_path = Path(args.config)
|
|
68
|
+
if config_path.exists():
|
|
69
|
+
return MigratorConfig.from_toml(config_path)
|
|
70
|
+
return MigratorConfig.from_env()
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
# ---------------------------------------------------------------------------
|
|
74
|
+
# Subcommand handlers
|
|
75
|
+
# ---------------------------------------------------------------------------
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def _cmd_init(args: argparse.Namespace) -> int:
|
|
79
|
+
config_path = Path(args.config)
|
|
80
|
+
if not config_path.exists():
|
|
81
|
+
config_path.write_text(
|
|
82
|
+
'[mongrator]\nuri = "mongodb://localhost:27017"\ndatabase = "mydb"\n'
|
|
83
|
+
'migrations_dir = "migrations"\ncollection = "mongrator_migrations"\n'
|
|
84
|
+
)
|
|
85
|
+
print(f"Created {config_path}")
|
|
86
|
+
|
|
87
|
+
migrations_dir = Path("migrations")
|
|
88
|
+
migrations_dir.mkdir(exist_ok=True)
|
|
89
|
+
print(f"Created {migrations_dir}/")
|
|
90
|
+
return 0
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _cmd_create(args: argparse.Namespace) -> int:
|
|
94
|
+
config = _load_config(args)
|
|
95
|
+
config.migrations_dir.mkdir(parents=True, exist_ok=True)
|
|
96
|
+
|
|
97
|
+
slug = args.name.strip().replace(" ", "_").lower()
|
|
98
|
+
timestamp = datetime.now(tz=UTC).strftime("%Y%m%d_%H%M%S")
|
|
99
|
+
filename = f"{timestamp}_{slug}.py"
|
|
100
|
+
dest = config.migrations_dir / filename
|
|
101
|
+
|
|
102
|
+
template_text = files("mongrator._templates").joinpath("migration.py.tmpl").read_text(encoding="utf-8")
|
|
103
|
+
content = template_text.format(slug=slug, timestamp=timestamp)
|
|
104
|
+
dest.write_text(content, encoding="utf-8")
|
|
105
|
+
print(f"Created {dest}")
|
|
106
|
+
return 0
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def _cmd_status(args: argparse.Namespace) -> int:
|
|
110
|
+
from .runner import SyncRunner
|
|
111
|
+
|
|
112
|
+
config = _load_config(args)
|
|
113
|
+
try:
|
|
114
|
+
import pymongo
|
|
115
|
+
except ImportError:
|
|
116
|
+
print("error: pymongo is required. Install with: pip install pymongo", file=sys.stderr)
|
|
117
|
+
return 1
|
|
118
|
+
|
|
119
|
+
client = pymongo.MongoClient(config.uri)
|
|
120
|
+
runner = SyncRunner(client, config)
|
|
121
|
+
statuses = runner.status()
|
|
122
|
+
|
|
123
|
+
if not statuses:
|
|
124
|
+
print("No migrations found.")
|
|
125
|
+
return 0
|
|
126
|
+
|
|
127
|
+
col_width = max(len(s.id) for s in statuses) + 2
|
|
128
|
+
print(f"{'Migration':<{col_width}} {'Status':<10} {'Applied At'}")
|
|
129
|
+
print("-" * (col_width + 30))
|
|
130
|
+
for s in statuses:
|
|
131
|
+
state = "applied" if s.applied else "pending"
|
|
132
|
+
if s.applied and not s.checksum_ok:
|
|
133
|
+
state = "MODIFIED"
|
|
134
|
+
applied_at = s.applied_at.isoformat() if s.applied_at else "-"
|
|
135
|
+
print(f"{s.id:<{col_width}} {state:<10} {applied_at}")
|
|
136
|
+
return 0
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def _cmd_up(args: argparse.Namespace) -> int:
|
|
140
|
+
config = _load_config(args)
|
|
141
|
+
if args.use_async:
|
|
142
|
+
return asyncio.run(_async_up(config, args.target))
|
|
143
|
+
import pymongo
|
|
144
|
+
|
|
145
|
+
from .runner import SyncRunner
|
|
146
|
+
|
|
147
|
+
client = pymongo.MongoClient(config.uri)
|
|
148
|
+
runner = SyncRunner(client, config)
|
|
149
|
+
applied = runner.up(target=args.target)
|
|
150
|
+
if applied:
|
|
151
|
+
for mid in applied:
|
|
152
|
+
print(f" applied {mid}")
|
|
153
|
+
else:
|
|
154
|
+
print("Nothing to apply.")
|
|
155
|
+
return 0
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
async def _async_up(config: MigratorConfig, target: str | None) -> int:
|
|
159
|
+
from pymongo import AsyncMongoClient
|
|
160
|
+
|
|
161
|
+
from .runner import AsyncRunner
|
|
162
|
+
|
|
163
|
+
client = AsyncMongoClient(config.uri)
|
|
164
|
+
runner = AsyncRunner(client, config)
|
|
165
|
+
applied = await runner.up(target=target)
|
|
166
|
+
if applied:
|
|
167
|
+
for mid in applied:
|
|
168
|
+
print(f" applied {mid}")
|
|
169
|
+
else:
|
|
170
|
+
print("Nothing to apply.")
|
|
171
|
+
return 0
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def _cmd_down(args: argparse.Namespace) -> int:
|
|
175
|
+
config = _load_config(args)
|
|
176
|
+
if args.use_async:
|
|
177
|
+
return asyncio.run(_async_down(config, args.steps))
|
|
178
|
+
import pymongo
|
|
179
|
+
|
|
180
|
+
from .runner import SyncRunner
|
|
181
|
+
|
|
182
|
+
client = pymongo.MongoClient(config.uri)
|
|
183
|
+
runner = SyncRunner(client, config)
|
|
184
|
+
rolled_back = runner.down(steps=args.steps)
|
|
185
|
+
if rolled_back:
|
|
186
|
+
for mid in rolled_back:
|
|
187
|
+
print(f" rolled back {mid}")
|
|
188
|
+
else:
|
|
189
|
+
print("Nothing to roll back.")
|
|
190
|
+
return 0
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
async def _async_down(config: MigratorConfig, steps: int) -> int:
|
|
194
|
+
from pymongo import AsyncMongoClient
|
|
195
|
+
|
|
196
|
+
from .runner import AsyncRunner
|
|
197
|
+
|
|
198
|
+
client = AsyncMongoClient(config.uri)
|
|
199
|
+
runner = AsyncRunner(client, config)
|
|
200
|
+
rolled_back = await runner.down(steps=steps)
|
|
201
|
+
if rolled_back:
|
|
202
|
+
for mid in rolled_back:
|
|
203
|
+
print(f" rolled back {mid}")
|
|
204
|
+
else:
|
|
205
|
+
print("Nothing to roll back.")
|
|
206
|
+
return 0
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def _cmd_validate(args: argparse.Namespace) -> int:
|
|
210
|
+
import pymongo
|
|
211
|
+
|
|
212
|
+
from .runner import SyncRunner
|
|
213
|
+
|
|
214
|
+
config = _load_config(args)
|
|
215
|
+
client = pymongo.MongoClient(config.uri)
|
|
216
|
+
runner = SyncRunner(client, config)
|
|
217
|
+
errors = runner.validate()
|
|
218
|
+
|
|
219
|
+
if not errors:
|
|
220
|
+
print("All applied migrations have valid checksums.")
|
|
221
|
+
return 0
|
|
222
|
+
|
|
223
|
+
eg = ExceptionGroup("Checksum mismatches detected", errors)
|
|
224
|
+
print(f"error: {eg}", file=sys.stderr)
|
|
225
|
+
for e in errors:
|
|
226
|
+
print(f" {e}", file=sys.stderr)
|
|
227
|
+
return 1
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
# ---------------------------------------------------------------------------
|
|
231
|
+
# Entry point
|
|
232
|
+
# ---------------------------------------------------------------------------
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def main() -> None:
|
|
236
|
+
parser = _build_parser()
|
|
237
|
+
args = parser.parse_args()
|
|
238
|
+
|
|
239
|
+
dispatch = {
|
|
240
|
+
"init": _cmd_init,
|
|
241
|
+
"create": _cmd_create,
|
|
242
|
+
"status": _cmd_status,
|
|
243
|
+
"up": _cmd_up,
|
|
244
|
+
"down": _cmd_down,
|
|
245
|
+
"validate": _cmd_validate,
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
try:
|
|
249
|
+
rc = dispatch[args.command](args)
|
|
250
|
+
except MigratorError as e:
|
|
251
|
+
print(f"error: {e}", file=sys.stderr)
|
|
252
|
+
sys.exit(1)
|
|
253
|
+
except KeyboardInterrupt:
|
|
254
|
+
sys.exit(130)
|
|
255
|
+
|
|
256
|
+
sys.exit(rc)
|
mongrator/config.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import tomllib
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Self
|
|
6
|
+
|
|
7
|
+
from .exceptions import ConfigurationError
|
|
8
|
+
|
|
9
|
+
_DEFAULT_COLLECTION = "mongrator_migrations"
|
|
10
|
+
_DEFAULT_MIGRATIONS_DIR = Path("migrations")
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass(frozen=True)
|
|
14
|
+
class MigratorConfig:
|
|
15
|
+
"""Immutable configuration for a migrator instance."""
|
|
16
|
+
|
|
17
|
+
uri: str
|
|
18
|
+
database: str
|
|
19
|
+
migrations_dir: Path
|
|
20
|
+
collection: str = _DEFAULT_COLLECTION
|
|
21
|
+
|
|
22
|
+
@classmethod
|
|
23
|
+
def from_toml(cls, path: Path) -> Self:
|
|
24
|
+
"""Load configuration from a TOML file (e.g. mongrator.toml)."""
|
|
25
|
+
try:
|
|
26
|
+
with open(path, "rb") as f:
|
|
27
|
+
data = tomllib.load(f)
|
|
28
|
+
except FileNotFoundError:
|
|
29
|
+
raise ConfigurationError(f"Config file not found: {path}")
|
|
30
|
+
except tomllib.TOMLDecodeError as e:
|
|
31
|
+
raise ConfigurationError(f"Invalid TOML in {path}: {e}")
|
|
32
|
+
|
|
33
|
+
try:
|
|
34
|
+
uri: str = data["uri"]
|
|
35
|
+
database: str = data["database"]
|
|
36
|
+
except KeyError as e:
|
|
37
|
+
raise ConfigurationError(f"Missing required config key: {e}")
|
|
38
|
+
|
|
39
|
+
migrations_dir = Path(data.get("migrations_dir", str(_DEFAULT_MIGRATIONS_DIR)))
|
|
40
|
+
collection: str = data.get("collection", _DEFAULT_COLLECTION)
|
|
41
|
+
return cls(uri=uri, database=database, migrations_dir=migrations_dir, collection=collection)
|
|
42
|
+
|
|
43
|
+
@classmethod
|
|
44
|
+
def from_env(cls) -> Self:
|
|
45
|
+
"""Load configuration from environment variables.
|
|
46
|
+
|
|
47
|
+
Variables:
|
|
48
|
+
MONGRATOR_URI — MongoDB connection URI (required)
|
|
49
|
+
MONGRATOR_DB — database name (required)
|
|
50
|
+
MONGRATOR_MIGRATIONS_DIR — path to migrations directory (default: migrations)
|
|
51
|
+
MONGRATOR_COLLECTION — tracking collection name (default: mongrator_migrations)
|
|
52
|
+
"""
|
|
53
|
+
uri = os.environ.get("MONGRATOR_URI")
|
|
54
|
+
database = os.environ.get("MONGRATOR_DB")
|
|
55
|
+
if not uri:
|
|
56
|
+
raise ConfigurationError("MONGRATOR_URI environment variable is not set")
|
|
57
|
+
if not database:
|
|
58
|
+
raise ConfigurationError("MONGRATOR_DB environment variable is not set")
|
|
59
|
+
migrations_dir = Path(os.environ.get("MONGRATOR_MIGRATIONS_DIR", str(_DEFAULT_MIGRATIONS_DIR)))
|
|
60
|
+
collection = os.environ.get("MONGRATOR_COLLECTION", _DEFAULT_COLLECTION)
|
|
61
|
+
return cls(uri=uri, database=database, migrations_dir=migrations_dir, collection=collection)
|
mongrator/exceptions.py
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
class MigratorError(Exception):
|
|
2
|
+
"""Base class for all mongrator errors."""
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class ConfigurationError(MigratorError):
|
|
6
|
+
"""Invalid or missing configuration."""
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ChecksumMismatchError(MigratorError):
|
|
10
|
+
"""Applied migration file has been modified since it was run."""
|
|
11
|
+
|
|
12
|
+
def __init__(self, migration_id: str, expected: str, actual: str) -> None:
|
|
13
|
+
self.migration_id = migration_id
|
|
14
|
+
self.expected = expected
|
|
15
|
+
self.actual = actual
|
|
16
|
+
super().__init__(
|
|
17
|
+
f"Checksum mismatch for '{migration_id}': "
|
|
18
|
+
f"expected {expected!r}, got {actual!r}. "
|
|
19
|
+
"The migration file has been modified after being applied."
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class DuplicateMigrationIdError(MigratorError):
|
|
24
|
+
"""Two migration files share the same ID."""
|
|
25
|
+
|
|
26
|
+
def __init__(self, migration_id: str) -> None:
|
|
27
|
+
self.migration_id = migration_id
|
|
28
|
+
super().__init__(f"Duplicate migration ID: '{migration_id}'")
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class MigrationImportError(MigratorError):
|
|
32
|
+
"""A migration file could not be imported."""
|
|
33
|
+
|
|
34
|
+
def __init__(self, path: str, cause: Exception) -> None:
|
|
35
|
+
self.path = path
|
|
36
|
+
self.cause = cause
|
|
37
|
+
super().__init__(f"Failed to import migration '{path}': {cause}")
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class InvalidMigrationFileError(MigratorError):
|
|
41
|
+
"""A migration file is missing required callables or has an invalid structure."""
|
|
42
|
+
|
|
43
|
+
def __init__(self, path: str, reason: str) -> None:
|
|
44
|
+
self.path = path
|
|
45
|
+
self.reason = reason
|
|
46
|
+
super().__init__(f"Invalid migration file '{path}': {reason}")
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class NoDownMethodError(MigratorError):
|
|
50
|
+
"""A migration has no rollback path."""
|
|
51
|
+
|
|
52
|
+
def __init__(self, migration_id: str) -> None:
|
|
53
|
+
self.migration_id = migration_id
|
|
54
|
+
super().__init__(
|
|
55
|
+
f"Migration '{migration_id}' has no rollback path. "
|
|
56
|
+
"Define a down() function or use ops.* helpers that support auto-rollback."
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class MigrationNotFoundError(MigratorError):
|
|
61
|
+
"""A referenced migration ID does not exist."""
|
|
62
|
+
|
|
63
|
+
def __init__(self, migration_id: str) -> None:
|
|
64
|
+
self.migration_id = migration_id
|
|
65
|
+
super().__init__(f"Migration not found: '{migration_id}'")
|
mongrator/loader.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import hashlib
|
|
2
|
+
import importlib.util
|
|
3
|
+
import sys
|
|
4
|
+
import types
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from .config import MigratorConfig
|
|
8
|
+
from .exceptions import (
|
|
9
|
+
DuplicateMigrationIdError,
|
|
10
|
+
InvalidMigrationFileError,
|
|
11
|
+
MigrationImportError,
|
|
12
|
+
)
|
|
13
|
+
from .migration import Checksum, MigrationFile, MigrationId
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _checksum(path: Path) -> Checksum:
|
|
17
|
+
return hashlib.sha256(path.read_bytes()).hexdigest()
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _migration_id(path: Path) -> MigrationId:
|
|
21
|
+
return path.stem
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def load(config: MigratorConfig) -> list[MigrationFile]:
|
|
25
|
+
"""Scan migrations_dir, import each .py file, and return an ordered list.
|
|
26
|
+
|
|
27
|
+
Files are sorted lexicographically by filename, which is chronological
|
|
28
|
+
when the recommended {timestamp}_{slug}.py naming convention is used.
|
|
29
|
+
|
|
30
|
+
Raises:
|
|
31
|
+
DuplicateMigrationIdError: if two files share the same stem.
|
|
32
|
+
MigrationImportError: if a file cannot be imported.
|
|
33
|
+
InvalidMigrationFileError: if a file does not define an up() callable.
|
|
34
|
+
"""
|
|
35
|
+
migrations_dir = config.migrations_dir
|
|
36
|
+
if not migrations_dir.exists():
|
|
37
|
+
return []
|
|
38
|
+
|
|
39
|
+
paths = sorted(migrations_dir.glob("*.py"))
|
|
40
|
+
seen: dict[MigrationId, Path] = {}
|
|
41
|
+
results: list[MigrationFile] = []
|
|
42
|
+
|
|
43
|
+
for path in paths:
|
|
44
|
+
migration_id = _migration_id(path)
|
|
45
|
+
|
|
46
|
+
if migration_id in seen:
|
|
47
|
+
raise DuplicateMigrationIdError(migration_id)
|
|
48
|
+
seen[migration_id] = path
|
|
49
|
+
|
|
50
|
+
checksum = _checksum(path)
|
|
51
|
+
module = _import_file(path, migration_id)
|
|
52
|
+
|
|
53
|
+
if not callable(getattr(module, "up", None)):
|
|
54
|
+
raise InvalidMigrationFileError(str(path), "missing a callable up() function")
|
|
55
|
+
|
|
56
|
+
results.append(MigrationFile(id=migration_id, path=path, checksum=checksum, module=module))
|
|
57
|
+
|
|
58
|
+
return results
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _import_file(path: Path, module_name: str) -> types.ModuleType:
|
|
62
|
+
# Use a namespaced module name to avoid collisions with installed packages.
|
|
63
|
+
qualified_name = f"mongrator._migrations.{module_name}"
|
|
64
|
+
spec = importlib.util.spec_from_file_location(qualified_name, path)
|
|
65
|
+
if spec is None or spec.loader is None:
|
|
66
|
+
raise MigrationImportError(str(path), RuntimeError("could not create module spec"))
|
|
67
|
+
module = importlib.util.module_from_spec(spec)
|
|
68
|
+
sys.modules[qualified_name] = module
|
|
69
|
+
try:
|
|
70
|
+
spec.loader.exec_module(module) # type: ignore[union-attr]
|
|
71
|
+
except Exception as e:
|
|
72
|
+
del sys.modules[qualified_name]
|
|
73
|
+
raise MigrationImportError(str(path), e) from e
|
|
74
|
+
return module
|
mongrator/migration.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import types
|
|
2
|
+
from collections.abc import Callable
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any, Literal, TypedDict
|
|
7
|
+
|
|
8
|
+
type MigrationId = str
|
|
9
|
+
type Checksum = str
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class MigrationRecord(TypedDict):
|
|
13
|
+
"""Document stored in the tracking collection for each applied migration."""
|
|
14
|
+
|
|
15
|
+
_id: MigrationId
|
|
16
|
+
applied_at: datetime
|
|
17
|
+
checksum: Checksum
|
|
18
|
+
direction: Literal["up", "down"]
|
|
19
|
+
duration_ms: int
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass
|
|
23
|
+
class MigrationFile:
|
|
24
|
+
"""Represents a discovered migration file on disk."""
|
|
25
|
+
|
|
26
|
+
id: MigrationId
|
|
27
|
+
path: Path
|
|
28
|
+
checksum: Checksum
|
|
29
|
+
module: types.ModuleType | None = field(default=None, repr=False)
|
|
30
|
+
|
|
31
|
+
@property
|
|
32
|
+
def up(self) -> Callable[..., Any] | None:
|
|
33
|
+
if self.module is None:
|
|
34
|
+
return None
|
|
35
|
+
return getattr(self.module, "up", None)
|
|
36
|
+
|
|
37
|
+
@property
|
|
38
|
+
def down(self) -> Callable[..., Any] | None:
|
|
39
|
+
if self.module is None:
|
|
40
|
+
return None
|
|
41
|
+
return getattr(self.module, "down", None)
|
|
42
|
+
|
|
43
|
+
def has_up(self) -> bool:
|
|
44
|
+
return self.up is not None
|
|
45
|
+
|
|
46
|
+
def has_down(self) -> bool:
|
|
47
|
+
return self.down is not None
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class MigrationStatus:
|
|
52
|
+
"""Status of a single migration as reported by runner.status()."""
|
|
53
|
+
|
|
54
|
+
id: MigrationId
|
|
55
|
+
applied: bool
|
|
56
|
+
applied_at: datetime | None = None
|
|
57
|
+
checksum_ok: bool = True
|
mongrator/ops.py
ADDED
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
"""Declarative operation helpers for MongoDB migrations.
|
|
2
|
+
|
|
3
|
+
Each helper returns an Operation whose apply() and revert() methods perform the
|
|
4
|
+
forward and reverse changes respectively. When a migration's up() function returns
|
|
5
|
+
a list[Operation] and no down() is defined, the runner auto-generates rollback by
|
|
6
|
+
calling revert() on each operation in reverse order.
|
|
7
|
+
|
|
8
|
+
Usage in a migration file::
|
|
9
|
+
|
|
10
|
+
from mongrator import ops
|
|
11
|
+
|
|
12
|
+
def up(db):
|
|
13
|
+
return [
|
|
14
|
+
ops.create_index("users", {"email": 1}, unique=True),
|
|
15
|
+
ops.rename_field("users", "name", "full_name"),
|
|
16
|
+
]
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
from dataclasses import dataclass, field
|
|
20
|
+
from typing import Any
|
|
21
|
+
|
|
22
|
+
from pymongo.database import Database
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass
|
|
26
|
+
class Operation:
|
|
27
|
+
"""An atomic, reversible database operation."""
|
|
28
|
+
|
|
29
|
+
description: str
|
|
30
|
+
_apply: Any = field(repr=False)
|
|
31
|
+
_revert: Any = field(repr=False)
|
|
32
|
+
|
|
33
|
+
def apply(self, db: Database) -> None: # type: ignore[type-arg]
|
|
34
|
+
self._apply(db)
|
|
35
|
+
|
|
36
|
+
def revert(self, db: Database) -> None: # type: ignore[type-arg]
|
|
37
|
+
self._revert(db)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def create_index(
|
|
41
|
+
collection: str,
|
|
42
|
+
keys: dict[str, int],
|
|
43
|
+
**kwargs: Any,
|
|
44
|
+
) -> Operation:
|
|
45
|
+
"""Create an index. Reverts by dropping the index."""
|
|
46
|
+
index_name: str | None = kwargs.get("name")
|
|
47
|
+
|
|
48
|
+
def apply(db: Database) -> None: # type: ignore[type-arg]
|
|
49
|
+
db[collection].create_index(list(keys.items()), **kwargs)
|
|
50
|
+
|
|
51
|
+
def revert(db: Database) -> None: # type: ignore[type-arg]
|
|
52
|
+
name = index_name or "_".join(f"{k}_{v}" for k, v in keys.items())
|
|
53
|
+
db[collection].drop_index(name)
|
|
54
|
+
|
|
55
|
+
key_repr = ", ".join(f"{k}: {v}" for k, v in keys.items())
|
|
56
|
+
return Operation(
|
|
57
|
+
description=f"create_index({collection!r}, {{{key_repr}}})",
|
|
58
|
+
_apply=apply,
|
|
59
|
+
_revert=revert,
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def drop_index(collection: str, index_name: str) -> Operation:
|
|
64
|
+
"""Drop an index by name. Not auto-reversible (index spec is unknown)."""
|
|
65
|
+
|
|
66
|
+
def apply(db: Database) -> None: # type: ignore[type-arg]
|
|
67
|
+
db[collection].drop_index(index_name)
|
|
68
|
+
|
|
69
|
+
def revert(db: Database) -> None: # type: ignore[type-arg]
|
|
70
|
+
raise NotImplementedError(
|
|
71
|
+
f"drop_index({collection!r}, {index_name!r}) cannot be auto-reverted. "
|
|
72
|
+
"Define a down() function to recreate the index."
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
return Operation(
|
|
76
|
+
description=f"drop_index({collection!r}, {index_name!r})",
|
|
77
|
+
_apply=apply,
|
|
78
|
+
_revert=revert,
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def rename_field(
|
|
83
|
+
collection: str,
|
|
84
|
+
old_name: str,
|
|
85
|
+
new_name: str,
|
|
86
|
+
filter: dict[str, Any] | None = None,
|
|
87
|
+
) -> Operation:
|
|
88
|
+
"""Rename a field across all (or filtered) documents. Reverts by renaming back."""
|
|
89
|
+
query = filter or {}
|
|
90
|
+
|
|
91
|
+
def apply(db: Database) -> None: # type: ignore[type-arg]
|
|
92
|
+
db[collection].update_many(query, {"$rename": {old_name: new_name}})
|
|
93
|
+
|
|
94
|
+
def revert(db: Database) -> None: # type: ignore[type-arg]
|
|
95
|
+
db[collection].update_many(query, {"$rename": {new_name: old_name}})
|
|
96
|
+
|
|
97
|
+
return Operation(
|
|
98
|
+
description=f"rename_field({collection!r}, {old_name!r} → {new_name!r})",
|
|
99
|
+
_apply=apply,
|
|
100
|
+
_revert=revert,
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def add_field(
|
|
105
|
+
collection: str,
|
|
106
|
+
field_name: str,
|
|
107
|
+
default_value: Any,
|
|
108
|
+
filter: dict[str, Any] | None = None,
|
|
109
|
+
) -> Operation:
|
|
110
|
+
"""Add a field with a default value to all (or filtered) documents.
|
|
111
|
+
Reverts by unsetting the field.
|
|
112
|
+
"""
|
|
113
|
+
query = filter or {}
|
|
114
|
+
|
|
115
|
+
def apply(db: Database) -> None: # type: ignore[type-arg]
|
|
116
|
+
db[collection].update_many(
|
|
117
|
+
{**query, field_name: {"$exists": False}},
|
|
118
|
+
{"$set": {field_name: default_value}},
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
def revert(db: Database) -> None: # type: ignore[type-arg]
|
|
122
|
+
db[collection].update_many(query, {"$unset": {field_name: ""}})
|
|
123
|
+
|
|
124
|
+
return Operation(
|
|
125
|
+
description=f"add_field({collection!r}, {field_name!r}={default_value!r})",
|
|
126
|
+
_apply=apply,
|
|
127
|
+
_revert=revert,
|
|
128
|
+
)
|
mongrator/planner.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
|
|
3
|
+
from .exceptions import MigrationNotFoundError
|
|
4
|
+
from .migration import MigrationFile, MigrationId
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@dataclass
|
|
8
|
+
class MigrationPlan:
|
|
9
|
+
to_apply: list[MigrationFile]
|
|
10
|
+
to_skip: list[MigrationFile]
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def plan_up(
|
|
14
|
+
files: list[MigrationFile],
|
|
15
|
+
applied: set[MigrationId],
|
|
16
|
+
target: MigrationId | None = None,
|
|
17
|
+
) -> MigrationPlan:
|
|
18
|
+
"""Compute which migrations need to be applied in the forward direction.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
files: All known migration files, in chronological order.
|
|
22
|
+
applied: Set of migration IDs already recorded in the tracking collection.
|
|
23
|
+
target: If given, apply only up to and including this migration ID.
|
|
24
|
+
Raises MigrationNotFoundError if target is not in files.
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
A MigrationPlan with to_apply (pending) and to_skip (already applied).
|
|
28
|
+
"""
|
|
29
|
+
if target is not None:
|
|
30
|
+
ids = {f.id for f in files}
|
|
31
|
+
if target not in ids:
|
|
32
|
+
raise MigrationNotFoundError(target)
|
|
33
|
+
|
|
34
|
+
to_apply: list[MigrationFile] = []
|
|
35
|
+
to_skip: list[MigrationFile] = []
|
|
36
|
+
|
|
37
|
+
for f in files:
|
|
38
|
+
if f.id in applied:
|
|
39
|
+
to_skip.append(f)
|
|
40
|
+
else:
|
|
41
|
+
to_apply.append(f)
|
|
42
|
+
if target is not None and f.id == target:
|
|
43
|
+
break
|
|
44
|
+
|
|
45
|
+
return MigrationPlan(to_apply=to_apply, to_skip=to_skip)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def plan_down(
|
|
49
|
+
files: list[MigrationFile],
|
|
50
|
+
applied: set[MigrationId],
|
|
51
|
+
steps: int = 1,
|
|
52
|
+
) -> MigrationPlan:
|
|
53
|
+
"""Compute which migrations to roll back.
|
|
54
|
+
|
|
55
|
+
Rolls back the most recently applied migrations, up to `steps` of them.
|
|
56
|
+
Migrations are identified by their position in the applied set intersected
|
|
57
|
+
with the ordered file list (file order is the canonical order).
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
files: All known migration files, in chronological order.
|
|
61
|
+
applied: Set of migration IDs already recorded in the tracking collection.
|
|
62
|
+
steps: Number of most-recent applied migrations to roll back.
|
|
63
|
+
|
|
64
|
+
Returns:
|
|
65
|
+
A MigrationPlan where to_apply contains the migrations to roll back
|
|
66
|
+
(in reverse order) and to_skip contains those left untouched.
|
|
67
|
+
"""
|
|
68
|
+
if steps < 1:
|
|
69
|
+
raise ValueError(f"steps must be >= 1, got {steps}")
|
|
70
|
+
|
|
71
|
+
applied_in_order = [f for f in files if f.id in applied]
|
|
72
|
+
to_rollback = list(reversed(applied_in_order[-steps:]))
|
|
73
|
+
rollback_ids = {f.id for f in to_rollback}
|
|
74
|
+
to_skip = [f for f in files if f.id in applied and f.id not in rollback_ids]
|
|
75
|
+
|
|
76
|
+
return MigrationPlan(to_apply=to_rollback, to_skip=to_skip)
|
mongrator/py.typed
ADDED
|
File without changes
|
mongrator/runner.py
ADDED
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
"""Migration runners: Protocol definitions and sync/async implementations.
|
|
2
|
+
|
|
3
|
+
SyncRunner wraps a pymongo MongoClient.
|
|
4
|
+
AsyncRunner wraps a pymongo AsyncMongoClient.
|
|
5
|
+
|
|
6
|
+
Both share the same non-IO logic via loader and planner.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import time
|
|
10
|
+
from typing import Any, Protocol, cast, runtime_checkable
|
|
11
|
+
|
|
12
|
+
from pymongo import AsyncMongoClient, MongoClient
|
|
13
|
+
|
|
14
|
+
from . import loader, planner
|
|
15
|
+
from .config import MigratorConfig
|
|
16
|
+
from .exceptions import ChecksumMismatchError, NoDownMethodError
|
|
17
|
+
from .migration import MigrationFile, MigrationId, MigrationStatus
|
|
18
|
+
from .ops import Operation
|
|
19
|
+
from .state import AsyncMongoStateStore, SyncStateStore, make_record
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@runtime_checkable
|
|
23
|
+
class MigrationRunner(Protocol):
|
|
24
|
+
def up(self, target: MigrationId | None = None) -> list[MigrationId]: ...
|
|
25
|
+
def down(self, steps: int = 1) -> list[MigrationId]: ...
|
|
26
|
+
def status(self) -> list[MigrationStatus]: ...
|
|
27
|
+
def validate(self) -> list[ChecksumMismatchError]: ...
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@runtime_checkable
|
|
31
|
+
class AsyncMigrationRunner(Protocol):
|
|
32
|
+
async def up(self, target: MigrationId | None = None) -> list[MigrationId]: ...
|
|
33
|
+
async def down(self, steps: int = 1) -> list[MigrationId]: ...
|
|
34
|
+
async def status(self) -> list[MigrationStatus]: ...
|
|
35
|
+
async def validate(self) -> list[ChecksumMismatchError]: ...
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _resolve_rollback(migration: MigrationFile) -> None:
|
|
39
|
+
"""Raise NoDownMethodError if the migration cannot be rolled back."""
|
|
40
|
+
if migration.has_down():
|
|
41
|
+
return
|
|
42
|
+
# Check if up() returns a list of Operations with revert support
|
|
43
|
+
# (We cannot call up() here; this is checked at rollback time instead.)
|
|
44
|
+
raise NoDownMethodError(migration.id)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _run_up_migration(migration: MigrationFile, db: Any) -> None:
|
|
48
|
+
"""Execute the up() callable, calling Operation.apply() for each op if needed."""
|
|
49
|
+
up_fn = migration.up
|
|
50
|
+
if up_fn is None:
|
|
51
|
+
return
|
|
52
|
+
result = up_fn(db)
|
|
53
|
+
if isinstance(result, list) and all(isinstance(op, Operation) for op in result):
|
|
54
|
+
# ops-based migration: up() returns ops, runner applies them.
|
|
55
|
+
# Raw pymongo migrations return None.
|
|
56
|
+
for op in cast(list[Operation], result):
|
|
57
|
+
op.apply(db) # type: ignore[arg-type]
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def _run_down_migration(migration: MigrationFile, db: Any) -> None:
|
|
61
|
+
"""Execute the down() callable, or auto-revert ops returned by up()."""
|
|
62
|
+
down_fn = migration.down
|
|
63
|
+
if down_fn is not None:
|
|
64
|
+
down_fn(db)
|
|
65
|
+
return
|
|
66
|
+
# Try auto-rollback via ops returned from up()
|
|
67
|
+
up_fn = migration.up
|
|
68
|
+
if up_fn is None:
|
|
69
|
+
raise NoDownMethodError(migration.id)
|
|
70
|
+
result = up_fn(db)
|
|
71
|
+
if isinstance(result, list) and all(isinstance(op, Operation) for op in result):
|
|
72
|
+
for op in reversed(cast(list[Operation], result)):
|
|
73
|
+
op.revert(db) # type: ignore[arg-type]
|
|
74
|
+
return
|
|
75
|
+
raise NoDownMethodError(migration.id)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class SyncRunner:
|
|
79
|
+
"""Synchronous migration runner backed by pymongo."""
|
|
80
|
+
|
|
81
|
+
def __init__(self, client: "MongoClient", config: MigratorConfig) -> None: # type: ignore[type-arg]
|
|
82
|
+
self._db = client[config.database]
|
|
83
|
+
self._store = SyncStateStore(self._db[config.collection])
|
|
84
|
+
self._config = config
|
|
85
|
+
|
|
86
|
+
def up(self, target: MigrationId | None = None) -> list[MigrationId]:
|
|
87
|
+
"""Apply pending migrations, optionally up to `target`."""
|
|
88
|
+
files = loader.load(self._config)
|
|
89
|
+
applied = self._store.get_applied()
|
|
90
|
+
plan = planner.plan_up(files, applied, target)
|
|
91
|
+
applied_ids: list[MigrationId] = []
|
|
92
|
+
for migration in plan.to_apply:
|
|
93
|
+
start = time.monotonic()
|
|
94
|
+
_run_up_migration(migration, self._db)
|
|
95
|
+
duration_ms = int((time.monotonic() - start) * 1000)
|
|
96
|
+
self._store.record_applied(make_record(migration.id, migration.checksum, "up", duration_ms))
|
|
97
|
+
applied_ids.append(migration.id)
|
|
98
|
+
return applied_ids
|
|
99
|
+
|
|
100
|
+
def down(self, steps: int = 1) -> list[MigrationId]:
|
|
101
|
+
"""Roll back the most recently applied migrations."""
|
|
102
|
+
files = loader.load(self._config)
|
|
103
|
+
applied = self._store.get_applied()
|
|
104
|
+
plan = planner.plan_down(files, applied, steps)
|
|
105
|
+
rolled_back: list[MigrationId] = []
|
|
106
|
+
for migration in plan.to_apply:
|
|
107
|
+
start = time.monotonic()
|
|
108
|
+
_run_down_migration(migration, self._db)
|
|
109
|
+
duration_ms = int((time.monotonic() - start) * 1000)
|
|
110
|
+
self._store.record_applied(make_record(migration.id, migration.checksum, "down", duration_ms))
|
|
111
|
+
rolled_back.append(migration.id)
|
|
112
|
+
return rolled_back
|
|
113
|
+
|
|
114
|
+
def status(self) -> list[MigrationStatus]:
|
|
115
|
+
"""Return the status of every known migration."""
|
|
116
|
+
files = loader.load(self._config)
|
|
117
|
+
applied = self._store.get_applied()
|
|
118
|
+
statuses: list[MigrationStatus] = []
|
|
119
|
+
for f in files:
|
|
120
|
+
record = self._store.get_record(f.id)
|
|
121
|
+
checksum_ok = record is None or record["checksum"] == f.checksum
|
|
122
|
+
statuses.append(
|
|
123
|
+
MigrationStatus(
|
|
124
|
+
id=f.id,
|
|
125
|
+
applied=f.id in applied,
|
|
126
|
+
applied_at=record["applied_at"] if record else None,
|
|
127
|
+
checksum_ok=checksum_ok,
|
|
128
|
+
)
|
|
129
|
+
)
|
|
130
|
+
return statuses
|
|
131
|
+
|
|
132
|
+
def validate(self) -> list[ChecksumMismatchError]:
|
|
133
|
+
"""Check that applied migration files match their recorded checksums."""
|
|
134
|
+
files = loader.load(self._config)
|
|
135
|
+
applied = self._store.get_applied()
|
|
136
|
+
errors: list[ChecksumMismatchError] = []
|
|
137
|
+
for f in files:
|
|
138
|
+
if f.id not in applied:
|
|
139
|
+
continue
|
|
140
|
+
record = self._store.get_record(f.id)
|
|
141
|
+
if record and record["checksum"] != f.checksum:
|
|
142
|
+
errors.append(ChecksumMismatchError(f.id, record["checksum"], f.checksum))
|
|
143
|
+
return errors
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
class AsyncRunner:
|
|
147
|
+
"""Asynchronous migration runner backed by pymongo AsyncMongoClient.
|
|
148
|
+
|
|
149
|
+
Migration up()/down() functions always receive a synchronous pymongo Database
|
|
150
|
+
because ops helpers are synchronous. Only state tracking uses the async client.
|
|
151
|
+
"""
|
|
152
|
+
|
|
153
|
+
def __init__(self, client: "AsyncMongoClient", config: MigratorConfig) -> None: # type: ignore[type-arg]
|
|
154
|
+
# Sync DB passed to migration functions — ops helpers are synchronous pymongo.
|
|
155
|
+
self._db = MongoClient(config.uri)[config.database]
|
|
156
|
+
# Async store for non-blocking state tracking.
|
|
157
|
+
async_db = client[config.database]
|
|
158
|
+
self._store = AsyncMongoStateStore(async_db[config.collection])
|
|
159
|
+
self._config = config
|
|
160
|
+
|
|
161
|
+
async def up(self, target: MigrationId | None = None) -> list[MigrationId]:
|
|
162
|
+
"""Apply pending migrations, optionally up to `target`."""
|
|
163
|
+
files = loader.load(self._config)
|
|
164
|
+
applied = await self._store.get_applied()
|
|
165
|
+
plan = planner.plan_up(files, applied, target)
|
|
166
|
+
applied_ids: list[MigrationId] = []
|
|
167
|
+
for migration in plan.to_apply:
|
|
168
|
+
start = time.monotonic()
|
|
169
|
+
_run_up_migration(migration, self._db)
|
|
170
|
+
duration_ms = int((time.monotonic() - start) * 1000)
|
|
171
|
+
await self._store.record_applied(make_record(migration.id, migration.checksum, "up", duration_ms))
|
|
172
|
+
applied_ids.append(migration.id)
|
|
173
|
+
return applied_ids
|
|
174
|
+
|
|
175
|
+
async def down(self, steps: int = 1) -> list[MigrationId]:
|
|
176
|
+
"""Roll back the most recently applied migrations."""
|
|
177
|
+
files = loader.load(self._config)
|
|
178
|
+
applied = await self._store.get_applied()
|
|
179
|
+
plan = planner.plan_down(files, applied, steps)
|
|
180
|
+
rolled_back: list[MigrationId] = []
|
|
181
|
+
for migration in plan.to_apply:
|
|
182
|
+
start = time.monotonic()
|
|
183
|
+
_run_down_migration(migration, self._db)
|
|
184
|
+
duration_ms = int((time.monotonic() - start) * 1000)
|
|
185
|
+
await self._store.record_applied(make_record(migration.id, migration.checksum, "down", duration_ms))
|
|
186
|
+
rolled_back.append(migration.id)
|
|
187
|
+
return rolled_back
|
|
188
|
+
|
|
189
|
+
async def status(self) -> list[MigrationStatus]:
|
|
190
|
+
"""Return the status of every known migration."""
|
|
191
|
+
files = loader.load(self._config)
|
|
192
|
+
applied = await self._store.get_applied()
|
|
193
|
+
statuses: list[MigrationStatus] = []
|
|
194
|
+
for f in files:
|
|
195
|
+
record = await self._store.get_record(f.id)
|
|
196
|
+
checksum_ok = record is None or record["checksum"] == f.checksum
|
|
197
|
+
statuses.append(
|
|
198
|
+
MigrationStatus(
|
|
199
|
+
id=f.id,
|
|
200
|
+
applied=f.id in applied,
|
|
201
|
+
applied_at=record["applied_at"] if record else None,
|
|
202
|
+
checksum_ok=checksum_ok,
|
|
203
|
+
)
|
|
204
|
+
)
|
|
205
|
+
return statuses
|
|
206
|
+
|
|
207
|
+
async def validate(self) -> list[ChecksumMismatchError]:
|
|
208
|
+
"""Check that applied migration files match their recorded checksums."""
|
|
209
|
+
files = loader.load(self._config)
|
|
210
|
+
applied = await self._store.get_applied()
|
|
211
|
+
errors: list[ChecksumMismatchError] = []
|
|
212
|
+
for f in files:
|
|
213
|
+
if f.id not in applied:
|
|
214
|
+
continue
|
|
215
|
+
record = await self._store.get_record(f.id)
|
|
216
|
+
if record and record["checksum"] != f.checksum:
|
|
217
|
+
errors.append(ChecksumMismatchError(f.id, record["checksum"], f.checksum))
|
|
218
|
+
return errors
|
mongrator/state.py
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
"""Migration state storage: Protocol definitions and sync/async implementations.
|
|
2
|
+
|
|
3
|
+
The tracking collection stores one document per applied migration::
|
|
4
|
+
|
|
5
|
+
{
|
|
6
|
+
"_id": "20260408_143022_add_users_email_index",
|
|
7
|
+
"applied_at": ISODate("2026-04-08T14:30:22Z"),
|
|
8
|
+
"checksum": "e3b0c44298fc1c149afb...",
|
|
9
|
+
"direction": "up",
|
|
10
|
+
"duration_ms": 42
|
|
11
|
+
}
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
from datetime import UTC, datetime
|
|
15
|
+
from typing import Literal, Protocol, runtime_checkable
|
|
16
|
+
|
|
17
|
+
from pymongo.asynchronous.collection import AsyncCollection
|
|
18
|
+
from pymongo.collection import Collection
|
|
19
|
+
|
|
20
|
+
from .migration import MigrationId, MigrationRecord
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@runtime_checkable
|
|
24
|
+
class StateStore(Protocol):
|
|
25
|
+
"""Synchronous migration state store."""
|
|
26
|
+
|
|
27
|
+
def get_applied(self) -> set[MigrationId]: ...
|
|
28
|
+
|
|
29
|
+
def record_applied(self, record: MigrationRecord) -> None: ...
|
|
30
|
+
|
|
31
|
+
def remove_record(self, migration_id: MigrationId) -> None: ...
|
|
32
|
+
|
|
33
|
+
def get_record(self, migration_id: MigrationId) -> MigrationRecord | None: ...
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@runtime_checkable
|
|
37
|
+
class AsyncStateStore(Protocol):
|
|
38
|
+
"""Asynchronous migration state store."""
|
|
39
|
+
|
|
40
|
+
async def get_applied(self) -> set[MigrationId]: ...
|
|
41
|
+
|
|
42
|
+
async def record_applied(self, record: MigrationRecord) -> None: ...
|
|
43
|
+
|
|
44
|
+
async def remove_record(self, migration_id: MigrationId) -> None: ...
|
|
45
|
+
|
|
46
|
+
async def get_record(self, migration_id: MigrationId) -> MigrationRecord | None: ...
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class SyncStateStore:
|
|
50
|
+
"""StateStore backed by a synchronous pymongo collection."""
|
|
51
|
+
|
|
52
|
+
def __init__(self, collection: "Collection") -> None: # type: ignore[type-arg]
|
|
53
|
+
self._col = collection
|
|
54
|
+
|
|
55
|
+
def get_applied(self) -> set[MigrationId]:
|
|
56
|
+
return {doc["_id"] for doc in self._col.find({"direction": "up"}, {"_id": 1})}
|
|
57
|
+
|
|
58
|
+
def record_applied(self, record: MigrationRecord) -> None:
|
|
59
|
+
self._col.replace_one({"_id": record["_id"]}, record, upsert=True)
|
|
60
|
+
|
|
61
|
+
def remove_record(self, migration_id: MigrationId) -> None:
|
|
62
|
+
self._col.delete_one({"_id": migration_id})
|
|
63
|
+
|
|
64
|
+
def get_record(self, migration_id: MigrationId) -> MigrationRecord | None:
|
|
65
|
+
return self._col.find_one({"_id": migration_id}) # type: ignore[return-value]
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class AsyncMongoStateStore:
|
|
69
|
+
"""AsyncStateStore backed by a pymongo AsyncCollection."""
|
|
70
|
+
|
|
71
|
+
def __init__(self, collection: "AsyncCollection") -> None: # type: ignore[type-arg]
|
|
72
|
+
self._col = collection
|
|
73
|
+
|
|
74
|
+
async def get_applied(self) -> set[MigrationId]:
|
|
75
|
+
cursor = self._col.find({"direction": "up"}, {"_id": 1})
|
|
76
|
+
return {doc["_id"] async for doc in cursor}
|
|
77
|
+
|
|
78
|
+
async def record_applied(self, record: MigrationRecord) -> None:
|
|
79
|
+
await self._col.replace_one({"_id": record["_id"]}, record, upsert=True)
|
|
80
|
+
|
|
81
|
+
async def remove_record(self, migration_id: MigrationId) -> None:
|
|
82
|
+
await self._col.delete_one({"_id": migration_id})
|
|
83
|
+
|
|
84
|
+
async def get_record(self, migration_id: MigrationId) -> MigrationRecord | None:
|
|
85
|
+
return await self._col.find_one({"_id": migration_id}) # type: ignore[return-value]
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def make_record(
|
|
89
|
+
migration_id: MigrationId,
|
|
90
|
+
checksum: str,
|
|
91
|
+
direction: Literal["up", "down"],
|
|
92
|
+
duration_ms: int,
|
|
93
|
+
) -> MigrationRecord:
|
|
94
|
+
"""Construct a MigrationRecord with the current UTC timestamp."""
|
|
95
|
+
return MigrationRecord(
|
|
96
|
+
_id=migration_id,
|
|
97
|
+
applied_at=datetime.now(tz=UTC),
|
|
98
|
+
checksum=checksum,
|
|
99
|
+
direction=direction,
|
|
100
|
+
duration_ms=duration_ms,
|
|
101
|
+
)
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: mongrator
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Lightweight MongoDB schema migration tool
|
|
5
|
+
Author: Sasha Gerrand
|
|
6
|
+
Author-email: Sasha Gerrand <mongrator@sgerrand.dev>
|
|
7
|
+
Requires-Dist: pymongo>=4.10
|
|
8
|
+
Requires-Python: >=3.13
|
|
9
|
+
Description-Content-Type: text/markdown
|
|
10
|
+
|
|
11
|
+
# mongrator
|
|
12
|
+
|
|
13
|
+
[](https://github.com/sgerrand/pymongrator/actions/workflows/ci.yml)
|
|
14
|
+
|
|
15
|
+
Lightweight MongoDB schema migration tool with synchronous and asynchronous PyMongo support.
|
|
16
|
+
|
|
17
|
+
## Installation
|
|
18
|
+
|
|
19
|
+
```sh
|
|
20
|
+
pip install mongrator
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
## Quick start
|
|
24
|
+
|
|
25
|
+
```sh
|
|
26
|
+
# Create config and migrations directory
|
|
27
|
+
mongrator init
|
|
28
|
+
|
|
29
|
+
# Generate a new migration file
|
|
30
|
+
mongrator create add_users_email_index
|
|
31
|
+
|
|
32
|
+
# Check migration status
|
|
33
|
+
mongrator status
|
|
34
|
+
|
|
35
|
+
# Apply pending migrations
|
|
36
|
+
mongrator up
|
|
37
|
+
|
|
38
|
+
# Roll back the last migration
|
|
39
|
+
mongrator down
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
## Configuration
|
|
43
|
+
|
|
44
|
+
`mongrator init` creates a `mongrator.toml` stub:
|
|
45
|
+
|
|
46
|
+
```toml
|
|
47
|
+
uri = "mongodb://localhost:27017"
|
|
48
|
+
database = "mydb"
|
|
49
|
+
migrations_dir = "migrations"
|
|
50
|
+
collection = "mongrator_migrations" # optional
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
Alternatively, configure via environment variables:
|
|
54
|
+
|
|
55
|
+
| Variable | Description | Required |
|
|
56
|
+
|----------|-------------|----------|
|
|
57
|
+
| `MONGRATOR_URI` | MongoDB connection URI | yes |
|
|
58
|
+
| `MONGRATOR_DB` | Database name | yes |
|
|
59
|
+
| `MONGRATOR_MIGRATIONS_DIR` | Path to migrations directory | no (default: `migrations`) |
|
|
60
|
+
| `MONGRATOR_COLLECTION` | Tracking collection name | no (default: `mongrator_migrations`) |
|
|
61
|
+
|
|
62
|
+
## Writing migrations
|
|
63
|
+
|
|
64
|
+
Migration files are plain Python named `{timestamp}_{slug}.py` (e.g. `20260408_143022_add_users_email_index.py`). Each file must define an `up(db)` function. A `down(db)` function is optional but enables rollback.
|
|
65
|
+
|
|
66
|
+
### Using the ops helpers (recommended)
|
|
67
|
+
|
|
68
|
+
The `ops` helpers record their own inverses, so `down()` is generated automatically:
|
|
69
|
+
|
|
70
|
+
```python
|
|
71
|
+
from mongrator import ops
|
|
72
|
+
|
|
73
|
+
def up(db):
|
|
74
|
+
return [
|
|
75
|
+
ops.create_index("users", {"email": 1}, unique=True),
|
|
76
|
+
ops.rename_field("users", "username", "handle"),
|
|
77
|
+
ops.add_field("users", "verified", default_value=False),
|
|
78
|
+
]
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
### Using plain PyMongo
|
|
82
|
+
|
|
83
|
+
For complex logic, write directly against the `db` argument and define `down()` manually:
|
|
84
|
+
|
|
85
|
+
```python
|
|
86
|
+
def up(db):
|
|
87
|
+
db["orders"].update_many(
|
|
88
|
+
{"status": {"$exists": False}},
|
|
89
|
+
{"$set": {"status": "pending"}},
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
def down(db):
|
|
93
|
+
db["orders"].update_many({}, {"$unset": {"status": ""}})
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
### Available ops helpers
|
|
97
|
+
|
|
98
|
+
| Helper | Reversible | Description |
|
|
99
|
+
|--------|-----------|-------------|
|
|
100
|
+
| `ops.create_index(collection, keys, **kwargs)` | yes | Create an index |
|
|
101
|
+
| `ops.drop_index(collection, index_name)` | no | Drop an index by name |
|
|
102
|
+
| `ops.rename_field(collection, old, new, filter=None)` | yes | Rename a field across documents |
|
|
103
|
+
| `ops.add_field(collection, field, default_value, filter=None)` | yes | Add a field with a default value |
|
|
104
|
+
|
|
105
|
+
## CLI reference
|
|
106
|
+
|
|
107
|
+
```
|
|
108
|
+
mongrator init create migrations dir and mongrator.toml
|
|
109
|
+
mongrator create <name> generate a new migration file
|
|
110
|
+
mongrator status show applied/pending migrations
|
|
111
|
+
mongrator up [--target ID] apply pending migrations
|
|
112
|
+
mongrator up --async [--target ID] apply using async runner
|
|
113
|
+
mongrator down [--steps N] roll back N migrations (default: 1)
|
|
114
|
+
mongrator down --async [--steps N] roll back using async runner
|
|
115
|
+
mongrator validate verify checksums of applied migrations
|
|
116
|
+
mongrator --config PATH <command> use an alternate config file
|
|
117
|
+
```
|
|
118
|
+
|
|
119
|
+
## Async usage
|
|
120
|
+
|
|
121
|
+
Pass `--async` to `up` or `down` to use the async runner (backed by `pymongo.AsyncMongoClient`):
|
|
122
|
+
|
|
123
|
+
```sh
|
|
124
|
+
mongrator up --async
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
To use the runners programmatically:
|
|
128
|
+
|
|
129
|
+
```python
|
|
130
|
+
# Synchronous
|
|
131
|
+
from pathlib import Path
|
|
132
|
+
import pymongo
|
|
133
|
+
from mongrator.config import MigratorConfig
|
|
134
|
+
from mongrator.runner import SyncRunner
|
|
135
|
+
|
|
136
|
+
config = MigratorConfig(uri="mongodb://localhost:27017", database="mydb", migrations_dir=Path("migrations"))
|
|
137
|
+
runner = SyncRunner(pymongo.MongoClient(config.uri), config)
|
|
138
|
+
runner.up()
|
|
139
|
+
|
|
140
|
+
# Asynchronous
|
|
141
|
+
from pymongo import AsyncMongoClient
|
|
142
|
+
from mongrator.runner import AsyncRunner
|
|
143
|
+
|
|
144
|
+
runner = AsyncRunner(AsyncMongoClient(config.uri), config)
|
|
145
|
+
await runner.up()
|
|
146
|
+
```
|
|
147
|
+
|
|
148
|
+
## Migration tracking
|
|
149
|
+
|
|
150
|
+
Applied migrations are recorded in the `mongrator_migrations` collection (configurable) within the target database. Each document stores:
|
|
151
|
+
|
|
152
|
+
- `_id` — migration file stem
|
|
153
|
+
- `applied_at` — UTC timestamp
|
|
154
|
+
- `checksum` — SHA-256 of the migration file at time of application
|
|
155
|
+
- `direction` — `"up"` or `"down"`
|
|
156
|
+
- `duration_ms` — execution time in milliseconds
|
|
157
|
+
|
|
158
|
+
Running `mongrator validate` compares current file checksums against recorded values and reports any modifications.
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
mongrator/__init__.py,sha256=oKsCpoiQSSXx57DGs2VfYzeO1ukJQxdGnV-2hC3BI0M,42
|
|
2
|
+
mongrator/_templates/migration.py.tmpl,sha256=GMVZV8hOPEMkQiATScQgDcvswRpWJ3JJCQgSFB6PUiM,950
|
|
3
|
+
mongrator/cli.py,sha256=LhQNfR5hFPe1sJDIZz_6VxgugpEUXmZLlxaITJGw1UI,7637
|
|
4
|
+
mongrator/config.py,sha256=sna2uNl_5GjGqbGfybfL8UpPbVDugT0LYfGf89YqBfg,2412
|
|
5
|
+
mongrator/exceptions.py,sha256=5SzfkbThOIDBN3FaUg5mTzB7thaDmiK1SVkkGxDkDMI,2173
|
|
6
|
+
mongrator/loader.py,sha256=zKBDBiMNfwffgJMs2lSPijsUiT3nQJZlxbIv6858iZY,2478
|
|
7
|
+
mongrator/migration.py,sha256=W1axR4h8-RTwMfiwHejYdc8i-DJQqAE4ICizeXwBlX0,1385
|
|
8
|
+
mongrator/ops.py,sha256=a3PsKoRlayBUwgESxLF5aYHsL_ea5k14qyCH1rSTXj8,3922
|
|
9
|
+
mongrator/planner.py,sha256=t6xmIEWgGybpVqoeQ70S-Domsr0_ntyPruzc2YBI4pA,2521
|
|
10
|
+
mongrator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
+
mongrator/runner.py,sha256=_XCkks-KCxWIgxGVbtRVyLWIswjITSA3aqNGN9KPm00,9271
|
|
12
|
+
mongrator/state.py,sha256=qw7VBvNe8dhePm3k20g6jCaaZzy39iKht5S9m_T_ick,3408
|
|
13
|
+
mongrator-0.1.0.dist-info/WHEEL,sha256=lh7MMMfiuFQLQaR9J7pNBODdWf-aa5UOeuuDAol3xps,79
|
|
14
|
+
mongrator-0.1.0.dist-info/entry_points.txt,sha256=zu1IQipcRKmYmNXcxqY6_xSfWYBpjFlsXeBHa2yFsQo,46
|
|
15
|
+
mongrator-0.1.0.dist-info/METADATA,sha256=LX_NsgZ91syMTeN6e5vISEDJ-2fQtYRGCdYNMgMuwFk,4762
|
|
16
|
+
mongrator-0.1.0.dist-info/RECORD,,
|