crochet-migration 0.1.0__tar.gz → 0.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. crochet_migration-0.1.1/.github/workflows/publish.yml +34 -0
  2. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/PKG-INFO +2 -2
  3. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/README.md +1 -1
  4. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/pyproject.toml +1 -1
  5. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/migrations/engine.py +5 -3
  6. crochet_migration-0.1.1/src/crochet/migrations/template.py +304 -0
  7. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/tests/test_migrations.py +36 -0
  8. crochet_migration-0.1.0/src/crochet/migrations/template.py +0 -105
  9. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/.gitignore +0 -0
  10. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/LICENSE +0 -0
  11. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/__init__.py +0 -0
  12. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/cli.py +0 -0
  13. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/config.py +0 -0
  14. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/errors.py +0 -0
  15. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/ingest/__init__.py +0 -0
  16. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/ingest/batch.py +0 -0
  17. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/ir/__init__.py +0 -0
  18. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/ir/diff.py +0 -0
  19. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/ir/hash.py +0 -0
  20. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/ir/parser.py +0 -0
  21. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/ir/schema.py +0 -0
  22. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/ledger/__init__.py +0 -0
  23. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/ledger/sqlite.py +0 -0
  24. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/migrations/__init__.py +0 -0
  25. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/migrations/operations.py +0 -0
  26. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/scaffold/__init__.py +0 -0
  27. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/scaffold/node.py +0 -0
  28. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/scaffold/relationship.py +0 -0
  29. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/src/crochet/verify.py +0 -0
  30. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/tests/__init__.py +0 -0
  31. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/tests/conftest.py +0 -0
  32. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/tests/test_cli.py +0 -0
  33. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/tests/test_config.py +0 -0
  34. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/tests/test_ingest.py +0 -0
  35. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/tests/test_ir.py +0 -0
  36. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/tests/test_ledger.py +0 -0
  37. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/tests/test_scaffold.py +0 -0
  38. {crochet_migration-0.1.0 → crochet_migration-0.1.1}/tests/test_verify.py +0 -0
@@ -0,0 +1,34 @@
1
+ name: Publish to PyPI
2
+
3
+ on:
4
+ push:
5
+ tags:
6
+ - 'v*'
7
+
8
+ jobs:
9
+ build-n-publish:
10
+ name: Build and publish Python 🐍 distributions 📦 to PyPI
11
+ runs-on: ubuntu-latest
12
+ environment:
13
+ name: pypi
14
+ url: https://pypi.org/p/crochet-migration
15
+ permissions:
16
+ id-token: write # IMPORTANT: mandatory for trusted publishing
17
+
18
+ steps:
19
+ - name: Checkout source
20
+ uses: actions/checkout@v4
21
+
22
+ - name: Set up Python
23
+ uses: actions/setup-python@v5
24
+ with:
25
+ python-version: "3.10"
26
+
27
+ - name: Install hatch
28
+ run: pip install hatch
29
+
30
+ - name: Build packages
31
+ run: hatch build
32
+
33
+ - name: Publish package distributions to PyPI
34
+ uses: pypa/gh-action-pypi-publish@release/v1
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: crochet-migration
3
- Version: 0.1.0
3
+ Version: 0.1.1
4
4
  Summary: Versioned schema & data migrations for neomodel Neo4j graphs
5
5
  Project-URL: Homepage, https://github.com/keshavd/crochet
6
6
  Project-URL: Repository, https://github.com/keshavd/crochet
@@ -52,7 +52,7 @@ graph.
52
52
  ## Installation
53
53
 
54
54
  ```bash
55
- pip install crochet
55
+ pip install crochet-migration
56
56
  ```
57
57
 
58
58
  For development:
@@ -20,7 +20,7 @@ graph.
20
20
  ## Installation
21
21
 
22
22
  ```bash
23
- pip install crochet
23
+ pip install crochet-migration
24
24
  ```
25
25
 
26
26
  For development:
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "crochet-migration"
7
- version = "0.1.0"
7
+ version = "0.1.1"
8
8
  description = "Versioned schema & data migrations for neomodel Neo4j graphs"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.10"
@@ -153,6 +153,7 @@ class MigrationEngine:
153
153
  # Compute schema hash and diff
154
154
  schema_hash = ""
155
155
  diff_summary = ""
156
+ diff_obj = None
156
157
  if current_snapshot is not None:
157
158
  current_snapshot = hash_snapshot(current_snapshot)
158
159
  schema_hash = current_snapshot.schema_hash
@@ -166,9 +167,9 @@ class MigrationEngine:
166
167
  prev_json = self._ledger.get_snapshot(prev_hash)
167
168
  if prev_json:
168
169
  prev_snapshot = SchemaSnapshot.from_json(prev_json)
169
- diff = diff_snapshots(prev_snapshot, current_snapshot)
170
- if diff.has_changes:
171
- diff_summary = diff.summary()
170
+ diff_obj = diff_snapshots(prev_snapshot, current_snapshot)
171
+ if diff_obj.has_changes:
172
+ diff_summary = diff_obj.summary()
172
173
 
173
174
  content = render_migration(
174
175
  revision_id=revision_id,
@@ -177,6 +178,7 @@ class MigrationEngine:
177
178
  schema_hash=schema_hash,
178
179
  rollback_safe=rollback_safe,
179
180
  diff_summary=diff_summary,
181
+ diff=diff_obj,
180
182
  )
181
183
 
182
184
  return write_migration_file(
@@ -0,0 +1,304 @@
1
+ """Migration file scaffolding and template generation."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import re
6
+ from datetime import datetime, timezone
7
+ from pathlib import Path
8
+ from typing import TYPE_CHECKING
9
+
10
+ if TYPE_CHECKING:
11
+ from crochet.ir.diff import SchemaDiff
12
+
13
+ _MIGRATION_TEMPLATE = '''\
14
+ """
15
+ {description}
16
+
17
+ Revision: {revision_id}
18
+ Parent: {parent_id}
19
+ Created: {created_at}
20
+ Schema: {schema_hash}
21
+ """
22
+
23
+ from crochet.migrations.operations import MigrationContext
24
+
25
+ # -- Migration metadata --------------------------------------------------
26
+
27
+ revision_id = "{revision_id}"
28
+ parent_id = {parent_id_repr}
29
+ schema_hash = "{schema_hash}"
30
+ rollback_safe = {rollback_safe}
31
+
32
+
33
+ def upgrade(ctx: MigrationContext) -> None:
34
+ """Apply this migration."""
35
+ {upgrade_body}
36
+
37
+
38
+ def downgrade(ctx: MigrationContext) -> None:
39
+ """Revert this migration."""
40
+ {downgrade_body}
41
+ '''
42
+
43
+ _DIFF_COMMENT_HEADER = " # Detected schema changes:\n"
44
+
45
+
46
+ def slugify(text: str) -> str:
47
+ """Convert a description into a filesystem-safe slug."""
48
+ text = text.lower().strip()
49
+ text = re.sub(r"[^a-z0-9]+", "_", text)
50
+ return text.strip("_")[:60]
51
+
52
+
53
+ def generate_revision_id(seq: int, description: str) -> str:
54
+ """Generate a revision id like ``0001_initial``."""
55
+ slug = slugify(description)
56
+ return f"{seq:04d}_{slug}"
57
+
58
+
59
+ def generate_operations_from_diff(diff: "SchemaDiff") -> tuple[str, str]:
60
+ """Generate upgrade and downgrade operation code from a SchemaDiff.
61
+
62
+ Returns a tuple of (upgrade_code, downgrade_code) as strings.
63
+ """
64
+ upgrade_lines: list[str] = []
65
+ downgrade_lines: list[str] = []
66
+
67
+ # Process node changes
68
+ for nc in diff.node_changes:
69
+ if nc.kind == "added":
70
+ # Node added - no automatic operations (user should handle data)
71
+ upgrade_lines.append(f"# TODO: Handle new node '{nc.new.label}' (kgid={nc.kgid})")
72
+ downgrade_lines.append(f"# TODO: Clean up node '{nc.new.label}' (kgid={nc.kgid})")
73
+ elif nc.kind == "removed":
74
+ # Node removed - no automatic operations (user should handle data)
75
+ upgrade_lines.append(f"# TODO: Handle removed node '{nc.old.label}' (kgid={nc.kgid})")
76
+ downgrade_lines.append(f"# TODO: Restore node '{nc.old.label}' (kgid={nc.kgid})")
77
+ elif nc.kind == "modified":
78
+ label = nc.new.label if nc.new else nc.old.label
79
+
80
+ # Handle label rename
81
+ if nc.label_renamed and nc.old and nc.new:
82
+ upgrade_lines.append(
83
+ f'ctx.rename_label("{nc.old.label}", "{nc.new.label}")'
84
+ )
85
+ downgrade_lines.append(
86
+ f'ctx.rename_label("{nc.new.label}", "{nc.old.label}")'
87
+ )
88
+
89
+ # Handle property changes
90
+ for pc in nc.property_changes:
91
+ if pc.kind == "added":
92
+ # Property added
93
+ upgrade_lines.append(
94
+ f'ctx.add_node_property("{label}", "{pc.property_name}")'
95
+ )
96
+ downgrade_lines.append(
97
+ f'ctx.remove_node_property("{label}", "{pc.property_name}")'
98
+ )
99
+
100
+ # Handle constraints/indexes for new property
101
+ if pc.new:
102
+ if pc.new.unique_index:
103
+ upgrade_lines.append(
104
+ f'ctx.add_unique_constraint("{label}", "{pc.property_name}")'
105
+ )
106
+ downgrade_lines.append(
107
+ f'ctx.drop_unique_constraint("{label}", "{pc.property_name}")'
108
+ )
109
+ elif pc.new.index:
110
+ upgrade_lines.append(
111
+ f'ctx.add_index("{label}", "{pc.property_name}")'
112
+ )
113
+ downgrade_lines.append(
114
+ f'ctx.drop_index("{label}", "{pc.property_name}")'
115
+ )
116
+ if pc.new.required:
117
+ upgrade_lines.append(
118
+ f'ctx.add_node_property_existence_constraint("{label}", "{pc.property_name}")'
119
+ )
120
+ downgrade_lines.append(
121
+ f'ctx.drop_node_property_existence_constraint("{label}", "{pc.property_name}")'
122
+ )
123
+
124
+ elif pc.kind == "removed":
125
+ # Property removed
126
+ upgrade_lines.append(
127
+ f'ctx.remove_node_property("{label}", "{pc.property_name}")'
128
+ )
129
+ downgrade_lines.append(
130
+ f'ctx.add_node_property("{label}", "{pc.property_name}")'
131
+ )
132
+
133
+ # Handle constraints/indexes for removed property
134
+ if pc.old:
135
+ if pc.old.unique_index:
136
+ upgrade_lines.append(
137
+ f'ctx.drop_unique_constraint("{label}", "{pc.property_name}")'
138
+ )
139
+ downgrade_lines.append(
140
+ f'ctx.add_unique_constraint("{label}", "{pc.property_name}")'
141
+ )
142
+ elif pc.old.index:
143
+ upgrade_lines.append(
144
+ f'ctx.drop_index("{label}", "{pc.property_name}")'
145
+ )
146
+ downgrade_lines.append(
147
+ f'ctx.add_index("{label}", "{pc.property_name}")'
148
+ )
149
+ if pc.old.required:
150
+ upgrade_lines.append(
151
+ f'ctx.drop_node_property_existence_constraint("{label}", "{pc.property_name}")'
152
+ )
153
+ downgrade_lines.append(
154
+ f'ctx.add_node_property_existence_constraint("{label}", "{pc.property_name}")'
155
+ )
156
+
157
+ elif pc.kind == "modified" and pc.old and pc.new:
158
+ # Property modified - handle constraint/index changes
159
+ if pc.old.unique_index != pc.new.unique_index:
160
+ if pc.new.unique_index:
161
+ upgrade_lines.append(
162
+ f'ctx.add_unique_constraint("{label}", "{pc.property_name}")'
163
+ )
164
+ downgrade_lines.append(
165
+ f'ctx.drop_unique_constraint("{label}", "{pc.property_name}")'
166
+ )
167
+ else:
168
+ upgrade_lines.append(
169
+ f'ctx.drop_unique_constraint("{label}", "{pc.property_name}")'
170
+ )
171
+ downgrade_lines.append(
172
+ f'ctx.add_unique_constraint("{label}", "{pc.property_name}")'
173
+ )
174
+
175
+ if pc.old.index != pc.new.index:
176
+ if pc.new.index:
177
+ upgrade_lines.append(
178
+ f'ctx.add_index("{label}", "{pc.property_name}")'
179
+ )
180
+ downgrade_lines.append(
181
+ f'ctx.drop_index("{label}", "{pc.property_name}")'
182
+ )
183
+ else:
184
+ upgrade_lines.append(
185
+ f'ctx.drop_index("{label}", "{pc.property_name}")'
186
+ )
187
+ downgrade_lines.append(
188
+ f'ctx.add_index("{label}", "{pc.property_name}")'
189
+ )
190
+
191
+ if pc.old.required != pc.new.required:
192
+ if pc.new.required:
193
+ upgrade_lines.append(
194
+ f'ctx.add_node_property_existence_constraint("{label}", "{pc.property_name}")'
195
+ )
196
+ downgrade_lines.append(
197
+ f'ctx.drop_node_property_existence_constraint("{label}", "{pc.property_name}")'
198
+ )
199
+ else:
200
+ upgrade_lines.append(
201
+ f'ctx.drop_node_property_existence_constraint("{label}", "{pc.property_name}")'
202
+ )
203
+ downgrade_lines.append(
204
+ f'ctx.add_node_property_existence_constraint("{label}", "{pc.property_name}")'
205
+ )
206
+
207
+ # Process relationship changes
208
+ for rc in diff.relationship_changes:
209
+ if rc.kind == "added":
210
+ upgrade_lines.append(f"# TODO: Handle new relationship '{rc.new.rel_type}' (kgid={rc.kgid})")
211
+ downgrade_lines.append(f"# TODO: Clean up relationship '{rc.new.rel_type}' (kgid={rc.kgid})")
212
+ elif rc.kind == "removed":
213
+ upgrade_lines.append(f"# TODO: Handle removed relationship '{rc.old.rel_type}' (kgid={rc.kgid})")
214
+ downgrade_lines.append(f"# TODO: Restore relationship '{rc.old.rel_type}' (kgid={rc.kgid})")
215
+ elif rc.kind == "modified":
216
+ # Handle relationship property changes similarly to nodes
217
+ rel_type = rc.new.rel_type if rc.new else rc.old.rel_type
218
+ for pc in rc.property_changes:
219
+ if pc.kind == "added":
220
+ upgrade_lines.append(
221
+ f'# TODO: Add property "{pc.property_name}" to relationship {rel_type}'
222
+ )
223
+ elif pc.kind == "removed":
224
+ upgrade_lines.append(
225
+ f'# TODO: Remove property "{pc.property_name}" from relationship {rel_type}'
226
+ )
227
+
228
+ # Format the code
229
+ if upgrade_lines:
230
+ upgrade_code = " " + "\n ".join(upgrade_lines)
231
+ else:
232
+ upgrade_code = " pass"
233
+
234
+ if downgrade_lines:
235
+ downgrade_code = " " + "\n ".join(downgrade_lines)
236
+ else:
237
+ downgrade_code = " pass"
238
+
239
+ return upgrade_code, downgrade_code
240
+
241
+
242
+ def render_migration(
243
+ revision_id: str,
244
+ parent_id: str | None,
245
+ description: str,
246
+ schema_hash: str,
247
+ rollback_safe: bool = True,
248
+ diff_summary: str = "",
249
+ diff: "SchemaDiff | None" = None,
250
+ ) -> str:
251
+ """Render a migration file from template."""
252
+ now = datetime.now(timezone.utc).isoformat()
253
+
254
+ # Generate operations from diff if available
255
+ if diff is not None and diff.has_changes:
256
+ upgrade_lines, downgrade_lines = generate_operations_from_diff(diff)
257
+ # Prepend comment header showing what was detected
258
+ if diff_summary:
259
+ comment_header = _DIFF_COMMENT_HEADER
260
+ for line in diff_summary.splitlines():
261
+ comment_header += f" # {line}\n"
262
+ upgrade_lines = comment_header + "\n" + upgrade_lines
263
+ downgrade_lines = comment_header + "\n" + downgrade_lines
264
+ elif diff_summary:
265
+ # Fallback to old behavior if only summary is provided
266
+ upgrade_lines = _DIFF_COMMENT_HEADER
267
+ for line in diff_summary.splitlines():
268
+ upgrade_lines += f" # {line}\n"
269
+ upgrade_lines += " pass"
270
+ downgrade_lines = upgrade_lines
271
+ else:
272
+ upgrade_lines = " pass"
273
+ downgrade_lines = " pass"
274
+
275
+ return _MIGRATION_TEMPLATE.format(
276
+ description=description,
277
+ revision_id=revision_id,
278
+ parent_id=parent_id or "None",
279
+ parent_id_repr=repr(parent_id),
280
+ created_at=now,
281
+ schema_hash=schema_hash,
282
+ rollback_safe=rollback_safe,
283
+ upgrade_body=upgrade_lines,
284
+ downgrade_body=downgrade_lines,
285
+ )
286
+
287
+
288
+ def write_migration_file(
289
+ migrations_dir: Path,
290
+ revision_id: str,
291
+ content: str,
292
+ ) -> Path:
293
+ """Write a migration file to disk and return the path."""
294
+ migrations_dir.mkdir(parents=True, exist_ok=True)
295
+
296
+ # Ensure __init__.py exists
297
+ init_path = migrations_dir / "__init__.py"
298
+ if not init_path.exists():
299
+ init_path.write_text("")
300
+
301
+ filename = f"{revision_id}.py"
302
+ file_path = migrations_dir / filename
303
+ file_path.write_text(content)
304
+ return file_path
@@ -60,6 +60,42 @@ class TestTemplate:
60
60
  assert "Detected schema changes" in content
61
61
  assert "Person" in content
62
62
 
63
+ def test_render_with_operations(self):
64
+ from crochet.ir.diff import SchemaDiff, NodeChange, PropertyChange
65
+ from crochet.ir.schema import NodeIR, PropertyIR
66
+
67
+ # Mock a diff where a property is added and another is removed
68
+ prop_added = PropertyIR(name="email", property_type="StringProperty", unique_index=True)
69
+ prop_removed = PropertyIR(name="age", property_type="IntegerProperty")
70
+
71
+ pc_added = PropertyChange(kind="added", property_name="email", new=prop_added)
72
+ pc_removed = PropertyChange(kind="removed", property_name="age", old=prop_removed)
73
+
74
+ node_old = NodeIR(kgid="p1", label="Person", class_name="Person", module_path="m", properties=(prop_removed,))
75
+ node_new = NodeIR(kgid="p1", label="Person", class_name="Person", module_path="m", properties=(prop_added,))
76
+
77
+ nc = NodeChange(kind="modified", kgid="p1", old=node_old, new=node_new, property_changes=[pc_added, pc_removed])
78
+ diff = SchemaDiff(node_changes=[nc])
79
+
80
+ content = render_migration(
81
+ revision_id="0002_update_person",
82
+ parent_id="0001_init",
83
+ description="Update Person",
84
+ schema_hash="def456",
85
+ diff_summary=diff.summary(),
86
+ diff=diff
87
+ )
88
+
89
+ # Verify upgrade contains operations
90
+ assert 'ctx.add_node_property("Person", "email")' in content
91
+ assert 'ctx.add_unique_constraint("Person", "email")' in content
92
+ assert 'ctx.remove_node_property("Person", "age")' in content
93
+
94
+ # Verify downgrade contains inverse operations
95
+ assert 'ctx.remove_node_property("Person", "email")' in content
96
+ assert 'ctx.drop_unique_constraint("Person", "email")' in content
97
+ assert 'ctx.add_node_property("Person", "age")' in content
98
+
63
99
  def test_write_migration_file(self, tmp_path):
64
100
  mig_dir = tmp_path / "migrations"
65
101
  path = write_migration_file(mig_dir, "0001_init", "# test\n")
@@ -1,105 +0,0 @@
1
- """Migration file scaffolding and template generation."""
2
-
3
- from __future__ import annotations
4
-
5
- import re
6
- from datetime import datetime, timezone
7
- from pathlib import Path
8
-
9
- _MIGRATION_TEMPLATE = '''\
10
- """
11
- {description}
12
-
13
- Revision: {revision_id}
14
- Parent: {parent_id}
15
- Created: {created_at}
16
- Schema: {schema_hash}
17
- """
18
-
19
- from crochet.migrations.operations import MigrationContext
20
-
21
- # -- Migration metadata --------------------------------------------------
22
-
23
- revision_id = "{revision_id}"
24
- parent_id = {parent_id_repr}
25
- schema_hash = "{schema_hash}"
26
- rollback_safe = {rollback_safe}
27
-
28
-
29
- def upgrade(ctx: MigrationContext) -> None:
30
- """Apply this migration."""
31
- {upgrade_body}
32
-
33
-
34
- def downgrade(ctx: MigrationContext) -> None:
35
- """Revert this migration."""
36
- {downgrade_body}
37
- '''
38
-
39
- _DIFF_COMMENT_HEADER = " # Detected schema changes:\n"
40
-
41
-
42
- def slugify(text: str) -> str:
43
- """Convert a description into a filesystem-safe slug."""
44
- text = text.lower().strip()
45
- text = re.sub(r"[^a-z0-9]+", "_", text)
46
- return text.strip("_")[:60]
47
-
48
-
49
- def generate_revision_id(seq: int, description: str) -> str:
50
- """Generate a revision id like ``0001_initial``."""
51
- slug = slugify(description)
52
- return f"{seq:04d}_{slug}"
53
-
54
-
55
- def render_migration(
56
- revision_id: str,
57
- parent_id: str | None,
58
- description: str,
59
- schema_hash: str,
60
- rollback_safe: bool = True,
61
- diff_summary: str = "",
62
- ) -> str:
63
- """Render a migration file from template."""
64
- now = datetime.now(timezone.utc).isoformat()
65
-
66
- if diff_summary:
67
- upgrade_lines = _DIFF_COMMENT_HEADER
68
- for line in diff_summary.splitlines():
69
- upgrade_lines += f" # {line}\n"
70
- upgrade_lines += " pass"
71
- downgrade_lines = upgrade_lines
72
- else:
73
- upgrade_lines = " pass"
74
- downgrade_lines = " pass"
75
-
76
- return _MIGRATION_TEMPLATE.format(
77
- description=description,
78
- revision_id=revision_id,
79
- parent_id=parent_id or "None",
80
- parent_id_repr=repr(parent_id),
81
- created_at=now,
82
- schema_hash=schema_hash,
83
- rollback_safe=rollback_safe,
84
- upgrade_body=upgrade_lines,
85
- downgrade_body=downgrade_lines,
86
- )
87
-
88
-
89
- def write_migration_file(
90
- migrations_dir: Path,
91
- revision_id: str,
92
- content: str,
93
- ) -> Path:
94
- """Write a migration file to disk and return the path."""
95
- migrations_dir.mkdir(parents=True, exist_ok=True)
96
-
97
- # Ensure __init__.py exists
98
- init_path = migrations_dir / "__init__.py"
99
- if not init_path.exists():
100
- init_path.write_text("")
101
-
102
- filename = f"{revision_id}.py"
103
- file_path = migrations_dir / filename
104
- file_path.write_text(content)
105
- return file_path