grai-build 0.3.0__py3-none-any.whl → 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- grai/__init__.py +1 -2
- grai/cli/main.py +287 -0
- grai/core/migrations/__init__.py +30 -0
- grai/core/migrations/differ.py +285 -0
- grai/core/migrations/executor.py +346 -0
- grai/core/migrations/generator.py +431 -0
- grai/core/migrations/models.py +160 -0
- grai/core/models.py +20 -0
- {grai_build-0.3.0.dist-info → grai_build-0.4.1.dist-info}/METADATA +6 -6
- {grai_build-0.3.0.dist-info → grai_build-0.4.1.dist-info}/RECORD +14 -9
- {grai_build-0.3.0.dist-info → grai_build-0.4.1.dist-info}/WHEEL +1 -1
- {grai_build-0.3.0.dist-info → grai_build-0.4.1.dist-info}/licenses/LICENSE +1 -1
- {grai_build-0.3.0.dist-info → grai_build-0.4.1.dist-info}/entry_points.txt +0 -0
- {grai_build-0.3.0.dist-info → grai_build-0.4.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,431 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Migration generator for creating migration files from schema changes.
|
|
3
|
+
|
|
4
|
+
This module generates migration files by comparing the current schema
|
|
5
|
+
with the last known state and creating appropriate up/down Cypher scripts.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import hashlib
|
|
9
|
+
from datetime import datetime
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import List, Optional
|
|
12
|
+
|
|
13
|
+
import yaml
|
|
14
|
+
|
|
15
|
+
from grai.core.migrations.differ import diff_schemas
|
|
16
|
+
from grai.core.migrations.models import ChangeType, Migration, SchemaChanges
|
|
17
|
+
from grai.core.models import Entity, Relation
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class MigrationGenerator:
|
|
21
|
+
"""
|
|
22
|
+
Generates migration files from schema changes.
|
|
23
|
+
|
|
24
|
+
This class compares the current schema (entities and relations) with
|
|
25
|
+
the last known state and creates a migration file with up/down scripts.
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
def __init__(self, project_root: Path):
|
|
29
|
+
"""
|
|
30
|
+
Initialize the migration generator.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
project_root: Path to the project root directory.
|
|
34
|
+
"""
|
|
35
|
+
self.project_root = project_root
|
|
36
|
+
self.migrations_dir = project_root / "migrations"
|
|
37
|
+
self.migrations_dir.mkdir(exist_ok=True)
|
|
38
|
+
|
|
39
|
+
def generate(
|
|
40
|
+
self,
|
|
41
|
+
current_entities: List[Entity],
|
|
42
|
+
current_relations: List[Relation],
|
|
43
|
+
description: Optional[str] = None,
|
|
44
|
+
) -> Migration:
|
|
45
|
+
"""
|
|
46
|
+
Generate a new migration from current schema state.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
current_entities: Current entity definitions.
|
|
50
|
+
current_relations: Current relation definitions.
|
|
51
|
+
description: Optional description for the migration.
|
|
52
|
+
|
|
53
|
+
Returns:
|
|
54
|
+
Migration object representing the changes.
|
|
55
|
+
"""
|
|
56
|
+
# Get the last known schema state
|
|
57
|
+
previous_entities, previous_relations = self._get_last_schema_state()
|
|
58
|
+
|
|
59
|
+
# Compute differences
|
|
60
|
+
changes = diff_schemas(
|
|
61
|
+
old_entities=previous_entities,
|
|
62
|
+
old_relations=previous_relations,
|
|
63
|
+
new_entities=current_entities,
|
|
64
|
+
new_relations=current_relations,
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
# Generate migration version (timestamp)
|
|
68
|
+
version = self._generate_version()
|
|
69
|
+
|
|
70
|
+
# Generate description if not provided
|
|
71
|
+
if not description:
|
|
72
|
+
description = changes.summary()
|
|
73
|
+
|
|
74
|
+
# Generate Cypher scripts
|
|
75
|
+
up_cypher = self._generate_up_cypher(changes)
|
|
76
|
+
down_cypher = self._generate_down_cypher(changes)
|
|
77
|
+
|
|
78
|
+
# Create migration object
|
|
79
|
+
migration = Migration(
|
|
80
|
+
version=version,
|
|
81
|
+
description=description,
|
|
82
|
+
changes=changes,
|
|
83
|
+
up_cypher=up_cypher,
|
|
84
|
+
down_cypher=down_cypher,
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
# Calculate checksum
|
|
88
|
+
migration.checksum = self._calculate_checksum(migration)
|
|
89
|
+
|
|
90
|
+
return migration
|
|
91
|
+
|
|
92
|
+
def save_migration(self, migration: Migration) -> Path:
|
|
93
|
+
"""
|
|
94
|
+
Save migration to a YAML file.
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
migration: Migration to save.
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
Path to the saved migration file.
|
|
101
|
+
"""
|
|
102
|
+
filename = f"{migration.version}_{self._slugify(migration.description)}.yml"
|
|
103
|
+
filepath = self.migrations_dir / filename
|
|
104
|
+
|
|
105
|
+
# Convert to dict for YAML serialization
|
|
106
|
+
migration_dict = {
|
|
107
|
+
"version": migration.version,
|
|
108
|
+
"description": migration.description,
|
|
109
|
+
"author": migration.author,
|
|
110
|
+
"timestamp": migration.timestamp.isoformat(),
|
|
111
|
+
"checksum": migration.checksum,
|
|
112
|
+
"changes": {
|
|
113
|
+
"entities": [
|
|
114
|
+
{
|
|
115
|
+
"name": e.name,
|
|
116
|
+
"change_type": e.change_type.value,
|
|
117
|
+
"properties_added": e.properties_added,
|
|
118
|
+
"properties_modified": [
|
|
119
|
+
{
|
|
120
|
+
"name": p.name,
|
|
121
|
+
"old_type": p.old_type,
|
|
122
|
+
"new_type": p.new_type,
|
|
123
|
+
"old_required": p.old_required,
|
|
124
|
+
"new_required": p.new_required,
|
|
125
|
+
"change_type": p.change_type.value,
|
|
126
|
+
}
|
|
127
|
+
for p in e.properties_modified
|
|
128
|
+
],
|
|
129
|
+
"properties_removed": e.properties_removed,
|
|
130
|
+
"keys_changed": e.keys_changed,
|
|
131
|
+
"old_keys": e.old_keys,
|
|
132
|
+
"new_keys": e.new_keys,
|
|
133
|
+
}
|
|
134
|
+
for e in migration.changes.entities
|
|
135
|
+
],
|
|
136
|
+
"relations": [
|
|
137
|
+
{
|
|
138
|
+
"name": r.name,
|
|
139
|
+
"change_type": r.change_type.value,
|
|
140
|
+
"from_entity_changed": r.from_entity_changed,
|
|
141
|
+
"to_entity_changed": r.to_entity_changed,
|
|
142
|
+
"old_from": r.old_from,
|
|
143
|
+
"new_from": r.new_from,
|
|
144
|
+
"old_to": r.old_to,
|
|
145
|
+
"new_to": r.new_to,
|
|
146
|
+
"properties_added": r.properties_added,
|
|
147
|
+
"properties_modified": [
|
|
148
|
+
{
|
|
149
|
+
"name": p.name,
|
|
150
|
+
"old_type": p.old_type,
|
|
151
|
+
"new_type": p.new_type,
|
|
152
|
+
"old_required": p.old_required,
|
|
153
|
+
"new_required": p.new_required,
|
|
154
|
+
"change_type": p.change_type.value,
|
|
155
|
+
}
|
|
156
|
+
for p in r.properties_modified
|
|
157
|
+
],
|
|
158
|
+
"properties_removed": r.properties_removed,
|
|
159
|
+
}
|
|
160
|
+
for r in migration.changes.relations
|
|
161
|
+
],
|
|
162
|
+
},
|
|
163
|
+
"up": migration.up_cypher,
|
|
164
|
+
"down": migration.down_cypher,
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
with open(filepath, "w") as f:
|
|
168
|
+
yaml.safe_dump(migration_dict, f, default_flow_style=False, sort_keys=False)
|
|
169
|
+
|
|
170
|
+
return filepath
|
|
171
|
+
|
|
172
|
+
def _get_last_schema_state(self) -> tuple[List[Entity], List[Relation]]:
|
|
173
|
+
"""
|
|
174
|
+
Get the schema state from the last migration.
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
Tuple of (entities, relations) from last migration, or empty lists if none.
|
|
178
|
+
"""
|
|
179
|
+
# List all migration files
|
|
180
|
+
migration_files = sorted(self.migrations_dir.glob("*.yml"))
|
|
181
|
+
|
|
182
|
+
if not migration_files:
|
|
183
|
+
# No previous migrations, return empty state
|
|
184
|
+
return [], []
|
|
185
|
+
|
|
186
|
+
# Load the last migration
|
|
187
|
+
last_migration_file = migration_files[-1]
|
|
188
|
+
with open(last_migration_file) as f:
|
|
189
|
+
_ = yaml.safe_load(f)
|
|
190
|
+
|
|
191
|
+
# Reconstruct entities and relations from the migration
|
|
192
|
+
# For now, return empty lists - we'll implement state reconstruction later
|
|
193
|
+
# TODO: Implement proper state reconstruction from migration history
|
|
194
|
+
return [], []
|
|
195
|
+
|
|
196
|
+
def _generate_version(self) -> str:
|
|
197
|
+
"""
|
|
198
|
+
Generate a migration version string (timestamp-based).
|
|
199
|
+
|
|
200
|
+
Returns:
|
|
201
|
+
Version string in format YYYYMMDDHHmmss.
|
|
202
|
+
"""
|
|
203
|
+
now = datetime.now()
|
|
204
|
+
return now.strftime("%Y%m%d_%H%M%S")
|
|
205
|
+
|
|
206
|
+
def _calculate_checksum(self, migration: Migration) -> str:
|
|
207
|
+
"""
|
|
208
|
+
Calculate checksum for migration integrity.
|
|
209
|
+
|
|
210
|
+
Args:
|
|
211
|
+
migration: Migration to calculate checksum for.
|
|
212
|
+
|
|
213
|
+
Returns:
|
|
214
|
+
SHA256 checksum hex string.
|
|
215
|
+
"""
|
|
216
|
+
# Create a deterministic string representation
|
|
217
|
+
content = f"{migration.version}|{migration.description}|{len(migration.up_cypher)}"
|
|
218
|
+
return hashlib.sha256(content.encode()).hexdigest()
|
|
219
|
+
|
|
220
|
+
def _slugify(self, text: str, max_length: int = 50) -> str:
|
|
221
|
+
"""
|
|
222
|
+
Convert text to a URL-friendly slug.
|
|
223
|
+
|
|
224
|
+
Args:
|
|
225
|
+
text: Text to slugify.
|
|
226
|
+
max_length: Maximum length of slug.
|
|
227
|
+
|
|
228
|
+
Returns:
|
|
229
|
+
Slugified text.
|
|
230
|
+
"""
|
|
231
|
+
# Replace spaces and special chars with underscores
|
|
232
|
+
slug = "".join(c if c.isalnum() or c == "_" else "_" for c in text.lower())
|
|
233
|
+
# Remove consecutive underscores
|
|
234
|
+
slug = "_".join(filter(None, slug.split("_")))
|
|
235
|
+
# Truncate to max length
|
|
236
|
+
return slug[:max_length]
|
|
237
|
+
|
|
238
|
+
def _generate_up_cypher(self, changes: SchemaChanges) -> List[str]:
|
|
239
|
+
"""
|
|
240
|
+
Generate Cypher statements to apply the migration (up).
|
|
241
|
+
|
|
242
|
+
Args:
|
|
243
|
+
changes: Schema changes to apply.
|
|
244
|
+
|
|
245
|
+
Returns:
|
|
246
|
+
List of Cypher statements.
|
|
247
|
+
"""
|
|
248
|
+
cypher_statements = []
|
|
249
|
+
|
|
250
|
+
# Process entity changes
|
|
251
|
+
for entity_change in changes.entities:
|
|
252
|
+
if entity_change.change_type == ChangeType.ADDED:
|
|
253
|
+
cypher_statements.extend(self._generate_add_entity_cypher(entity_change))
|
|
254
|
+
elif entity_change.change_type == ChangeType.REMOVED:
|
|
255
|
+
cypher_statements.extend(self._generate_remove_entity_cypher(entity_change))
|
|
256
|
+
elif entity_change.change_type == ChangeType.MODIFIED:
|
|
257
|
+
cypher_statements.extend(self._generate_modify_entity_cypher(entity_change))
|
|
258
|
+
|
|
259
|
+
# Process relation changes
|
|
260
|
+
for relation_change in changes.relations:
|
|
261
|
+
if relation_change.change_type == ChangeType.ADDED:
|
|
262
|
+
cypher_statements.extend(self._generate_add_relation_cypher(relation_change))
|
|
263
|
+
elif relation_change.change_type == ChangeType.REMOVED:
|
|
264
|
+
cypher_statements.extend(self._generate_remove_relation_cypher(relation_change))
|
|
265
|
+
elif relation_change.change_type == ChangeType.MODIFIED:
|
|
266
|
+
cypher_statements.extend(self._generate_modify_relation_cypher(relation_change))
|
|
267
|
+
|
|
268
|
+
return cypher_statements
|
|
269
|
+
|
|
270
|
+
def _generate_down_cypher(self, changes: SchemaChanges) -> List[str]:
|
|
271
|
+
"""
|
|
272
|
+
Generate Cypher statements to rollback the migration (down).
|
|
273
|
+
|
|
274
|
+
Args:
|
|
275
|
+
changes: Schema changes to rollback.
|
|
276
|
+
|
|
277
|
+
Returns:
|
|
278
|
+
List of Cypher statements.
|
|
279
|
+
"""
|
|
280
|
+
cypher_statements = []
|
|
281
|
+
|
|
282
|
+
# Reverse the changes - added becomes removed, removed becomes added
|
|
283
|
+
for entity_change in changes.entities:
|
|
284
|
+
if entity_change.change_type == ChangeType.ADDED:
|
|
285
|
+
cypher_statements.extend(self._generate_remove_entity_cypher(entity_change))
|
|
286
|
+
elif entity_change.change_type == ChangeType.REMOVED:
|
|
287
|
+
cypher_statements.extend(self._generate_add_entity_cypher(entity_change))
|
|
288
|
+
elif entity_change.change_type == ChangeType.MODIFIED:
|
|
289
|
+
# For modifications, reverse the property changes
|
|
290
|
+
cypher_statements.extend(self._generate_reverse_modify_entity_cypher(entity_change))
|
|
291
|
+
|
|
292
|
+
for relation_change in changes.relations:
|
|
293
|
+
if relation_change.change_type == ChangeType.ADDED:
|
|
294
|
+
cypher_statements.extend(self._generate_remove_relation_cypher(relation_change))
|
|
295
|
+
elif relation_change.change_type == ChangeType.REMOVED:
|
|
296
|
+
cypher_statements.extend(self._generate_add_relation_cypher(relation_change))
|
|
297
|
+
elif relation_change.change_type == ChangeType.MODIFIED:
|
|
298
|
+
cypher_statements.extend(
|
|
299
|
+
self._generate_reverse_modify_relation_cypher(relation_change)
|
|
300
|
+
)
|
|
301
|
+
|
|
302
|
+
return cypher_statements
|
|
303
|
+
|
|
304
|
+
def _generate_add_entity_cypher(self, entity_change) -> List[str]:
|
|
305
|
+
"""Generate Cypher for adding an entity (creating constraint)."""
|
|
306
|
+
statements = []
|
|
307
|
+
entity_name = entity_change.name
|
|
308
|
+
|
|
309
|
+
# Add constraint for keys
|
|
310
|
+
if entity_change.new_keys:
|
|
311
|
+
key_props = ", ".join(f"n.{key}" for key in entity_change.new_keys)
|
|
312
|
+
statements.append(
|
|
313
|
+
f"CREATE CONSTRAINT {entity_name}_unique IF NOT EXISTS "
|
|
314
|
+
f"FOR (n:{entity_name}) REQUIRE ({key_props}) IS UNIQUE"
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
return statements
|
|
318
|
+
|
|
319
|
+
def _generate_remove_entity_cypher(self, entity_change) -> List[str]:
|
|
320
|
+
"""Generate Cypher for removing an entity."""
|
|
321
|
+
statements = []
|
|
322
|
+
entity_name = entity_change.name
|
|
323
|
+
|
|
324
|
+
# Remove all nodes of this type
|
|
325
|
+
statements.append(f"MATCH (n:{entity_name}) DETACH DELETE n")
|
|
326
|
+
|
|
327
|
+
# Drop constraint
|
|
328
|
+
statements.append(f"DROP CONSTRAINT {entity_name}_unique IF EXISTS")
|
|
329
|
+
|
|
330
|
+
return statements
|
|
331
|
+
|
|
332
|
+
def _generate_modify_entity_cypher(self, entity_change) -> List[str]:
|
|
333
|
+
"""Generate Cypher for modifying an entity."""
|
|
334
|
+
statements = []
|
|
335
|
+
entity_name = entity_change.name
|
|
336
|
+
|
|
337
|
+
# Add new properties (set to null initially)
|
|
338
|
+
for prop in entity_change.properties_added:
|
|
339
|
+
prop_name = prop["name"]
|
|
340
|
+
statements.append(f"MATCH (n:{entity_name}) SET n.{prop_name} = null")
|
|
341
|
+
|
|
342
|
+
# Remove properties
|
|
343
|
+
for prop_name in entity_change.properties_removed:
|
|
344
|
+
statements.append(f"MATCH (n:{entity_name}) REMOVE n.{prop_name}")
|
|
345
|
+
|
|
346
|
+
# Handle key changes
|
|
347
|
+
if entity_change.keys_changed:
|
|
348
|
+
# Drop old constraint
|
|
349
|
+
statements.append(f"DROP CONSTRAINT {entity_name}_unique IF EXISTS")
|
|
350
|
+
# Create new constraint
|
|
351
|
+
if entity_change.new_keys:
|
|
352
|
+
key_props = ", ".join(f"n.{key}" for key in entity_change.new_keys)
|
|
353
|
+
statements.append(
|
|
354
|
+
f"CREATE CONSTRAINT {entity_name}_unique IF NOT EXISTS "
|
|
355
|
+
f"FOR (n:{entity_name}) REQUIRE ({key_props}) IS UNIQUE"
|
|
356
|
+
)
|
|
357
|
+
|
|
358
|
+
return statements
|
|
359
|
+
|
|
360
|
+
def _generate_reverse_modify_entity_cypher(self, entity_change) -> List[str]:
|
|
361
|
+
"""Generate Cypher to reverse entity modifications."""
|
|
362
|
+
statements = []
|
|
363
|
+
entity_name = entity_change.name
|
|
364
|
+
|
|
365
|
+
# Reverse: remove added properties
|
|
366
|
+
for prop in entity_change.properties_added:
|
|
367
|
+
prop_name = prop["name"]
|
|
368
|
+
statements.append(f"MATCH (n:{entity_name}) REMOVE n.{prop_name}")
|
|
369
|
+
|
|
370
|
+
# Reverse: add back removed properties (set to null)
|
|
371
|
+
for prop_name in entity_change.properties_removed:
|
|
372
|
+
statements.append(f"MATCH (n:{entity_name}) SET n.{prop_name} = null")
|
|
373
|
+
|
|
374
|
+
# Reverse key changes
|
|
375
|
+
if entity_change.keys_changed:
|
|
376
|
+
statements.append(f"DROP CONSTRAINT {entity_name}_unique IF EXISTS")
|
|
377
|
+
if entity_change.old_keys:
|
|
378
|
+
key_props = ", ".join(f"n.{key}" for key in entity_change.old_keys)
|
|
379
|
+
statements.append(
|
|
380
|
+
f"CREATE CONSTRAINT {entity_name}_unique IF NOT EXISTS "
|
|
381
|
+
f"FOR (n:{entity_name}) REQUIRE ({key_props}) IS UNIQUE"
|
|
382
|
+
)
|
|
383
|
+
|
|
384
|
+
return statements
|
|
385
|
+
|
|
386
|
+
def _generate_add_relation_cypher(self, relation_change) -> List[str]:
|
|
387
|
+
"""Generate Cypher for adding a relation."""
|
|
388
|
+
# For now, just return a comment - actual relation creation happens via data loading
|
|
389
|
+
return [f"// Relation {relation_change.name} added - schema only"]
|
|
390
|
+
|
|
391
|
+
def _generate_remove_relation_cypher(self, relation_change) -> List[str]:
|
|
392
|
+
"""Generate Cypher for removing a relation."""
|
|
393
|
+
statements = []
|
|
394
|
+
relation_name = relation_change.name
|
|
395
|
+
|
|
396
|
+
# Delete all relationships of this type
|
|
397
|
+
statements.append(f"MATCH ()-[r:{relation_name}]->() DELETE r")
|
|
398
|
+
|
|
399
|
+
return statements
|
|
400
|
+
|
|
401
|
+
def _generate_modify_relation_cypher(self, relation_change) -> List[str]:
|
|
402
|
+
"""Generate Cypher for modifying a relation."""
|
|
403
|
+
statements = []
|
|
404
|
+
relation_name = relation_change.name
|
|
405
|
+
|
|
406
|
+
# Add new properties to existing relationships
|
|
407
|
+
for prop in relation_change.properties_added:
|
|
408
|
+
prop_name = prop["name"]
|
|
409
|
+
statements.append(f"MATCH ()-[r:{relation_name}]->() SET r.{prop_name} = null")
|
|
410
|
+
|
|
411
|
+
# Remove properties from relationships
|
|
412
|
+
for prop_name in relation_change.properties_removed:
|
|
413
|
+
statements.append(f"MATCH ()-[r:{relation_name}]->() REMOVE r.{prop_name}")
|
|
414
|
+
|
|
415
|
+
return statements
|
|
416
|
+
|
|
417
|
+
def _generate_reverse_modify_relation_cypher(self, relation_change) -> List[str]:
|
|
418
|
+
"""Generate Cypher to reverse relation modifications."""
|
|
419
|
+
statements = []
|
|
420
|
+
relation_name = relation_change.name
|
|
421
|
+
|
|
422
|
+
# Reverse: remove added properties
|
|
423
|
+
for prop in relation_change.properties_added:
|
|
424
|
+
prop_name = prop["name"]
|
|
425
|
+
statements.append(f"MATCH ()-[r:{relation_name}]->() REMOVE r.{prop_name}")
|
|
426
|
+
|
|
427
|
+
# Reverse: add back removed properties
|
|
428
|
+
for prop_name in relation_change.properties_removed:
|
|
429
|
+
statements.append(f"MATCH ()-[r:{relation_name}]->() SET r.{prop_name} = null")
|
|
430
|
+
|
|
431
|
+
return statements
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Migration models for schema versioning and change management.
|
|
3
|
+
|
|
4
|
+
This module defines the Pydantic models used to represent migrations,
|
|
5
|
+
schema changes, and migration history.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from enum import Enum
|
|
10
|
+
from typing import Any, Dict, List, Optional
|
|
11
|
+
|
|
12
|
+
from pydantic import BaseModel, Field
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class MigrationStatus(str, Enum):
|
|
16
|
+
"""Status of a migration."""
|
|
17
|
+
|
|
18
|
+
PENDING = "pending"
|
|
19
|
+
APPLIED = "applied"
|
|
20
|
+
FAILED = "failed"
|
|
21
|
+
ROLLED_BACK = "rolled_back"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class ChangeType(str, Enum):
|
|
25
|
+
"""Type of schema change."""
|
|
26
|
+
|
|
27
|
+
ADDED = "added"
|
|
28
|
+
MODIFIED = "modified"
|
|
29
|
+
REMOVED = "removed"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class PropertyChange(BaseModel):
|
|
33
|
+
"""Represents a change to a property."""
|
|
34
|
+
|
|
35
|
+
name: str = Field(..., description="Property name")
|
|
36
|
+
old_type: Optional[str] = Field(None, description="Previous type (for modifications)")
|
|
37
|
+
new_type: Optional[str] = Field(None, description="New type (for modifications)")
|
|
38
|
+
old_required: Optional[bool] = Field(None, description="Previous required status")
|
|
39
|
+
new_required: Optional[bool] = Field(None, description="New required status")
|
|
40
|
+
change_type: ChangeType = Field(..., description="Type of change")
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class EntityChange(BaseModel):
|
|
44
|
+
"""Represents changes to an entity."""
|
|
45
|
+
|
|
46
|
+
name: str = Field(..., description="Entity name")
|
|
47
|
+
change_type: ChangeType = Field(..., description="Type of change")
|
|
48
|
+
properties_added: List[Dict[str, Any]] = Field(
|
|
49
|
+
default_factory=list, description="Properties added"
|
|
50
|
+
)
|
|
51
|
+
properties_modified: List[PropertyChange] = Field(
|
|
52
|
+
default_factory=list, description="Properties modified"
|
|
53
|
+
)
|
|
54
|
+
properties_removed: List[str] = Field(default_factory=list, description="Properties removed")
|
|
55
|
+
keys_changed: bool = Field(default=False, description="Whether keys were changed")
|
|
56
|
+
old_keys: Optional[List[str]] = Field(None, description="Previous keys")
|
|
57
|
+
new_keys: Optional[List[str]] = Field(None, description="New keys")
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class RelationChange(BaseModel):
|
|
61
|
+
"""Represents changes to a relation."""
|
|
62
|
+
|
|
63
|
+
name: str = Field(..., description="Relation name")
|
|
64
|
+
change_type: ChangeType = Field(..., description="Type of change")
|
|
65
|
+
from_entity_changed: bool = Field(default=False, description="Whether from entity changed")
|
|
66
|
+
to_entity_changed: bool = Field(default=False, description="Whether to entity changed")
|
|
67
|
+
old_from: Optional[str] = Field(None, description="Previous from entity")
|
|
68
|
+
new_from: Optional[str] = Field(None, description="New from entity")
|
|
69
|
+
old_to: Optional[str] = Field(None, description="Previous to entity")
|
|
70
|
+
new_to: Optional[str] = Field(None, description="New to entity")
|
|
71
|
+
properties_added: List[Dict[str, Any]] = Field(
|
|
72
|
+
default_factory=list, description="Properties added"
|
|
73
|
+
)
|
|
74
|
+
properties_modified: List[PropertyChange] = Field(
|
|
75
|
+
default_factory=list, description="Properties modified"
|
|
76
|
+
)
|
|
77
|
+
properties_removed: List[str] = Field(default_factory=list, description="Properties removed")
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class SchemaChanges(BaseModel):
|
|
81
|
+
"""Collection of all schema changes in a migration."""
|
|
82
|
+
|
|
83
|
+
entities: List[EntityChange] = Field(default_factory=list, description="Entity changes")
|
|
84
|
+
relations: List[RelationChange] = Field(default_factory=list, description="Relation changes")
|
|
85
|
+
|
|
86
|
+
def has_changes(self) -> bool:
|
|
87
|
+
"""Check if there are any changes."""
|
|
88
|
+
return len(self.entities) > 0 or len(self.relations) > 0
|
|
89
|
+
|
|
90
|
+
def summary(self) -> str:
|
|
91
|
+
"""Generate a human-readable summary of changes."""
|
|
92
|
+
parts = []
|
|
93
|
+
|
|
94
|
+
entities_added = sum(1 for e in self.entities if e.change_type == ChangeType.ADDED)
|
|
95
|
+
entities_modified = sum(1 for e in self.entities if e.change_type == ChangeType.MODIFIED)
|
|
96
|
+
entities_removed = sum(1 for e in self.entities if e.change_type == ChangeType.REMOVED)
|
|
97
|
+
|
|
98
|
+
relations_added = sum(1 for r in self.relations if r.change_type == ChangeType.ADDED)
|
|
99
|
+
relations_modified = sum(1 for r in self.relations if r.change_type == ChangeType.MODIFIED)
|
|
100
|
+
relations_removed = sum(1 for r in self.relations if r.change_type == ChangeType.REMOVED)
|
|
101
|
+
|
|
102
|
+
if entities_added:
|
|
103
|
+
parts.append(f"{entities_added} entities added")
|
|
104
|
+
if entities_modified:
|
|
105
|
+
parts.append(f"{entities_modified} entities modified")
|
|
106
|
+
if entities_removed:
|
|
107
|
+
parts.append(f"{entities_removed} entities removed")
|
|
108
|
+
if relations_added:
|
|
109
|
+
parts.append(f"{relations_added} relations added")
|
|
110
|
+
if relations_modified:
|
|
111
|
+
parts.append(f"{relations_modified} relations modified")
|
|
112
|
+
if relations_removed:
|
|
113
|
+
parts.append(f"{relations_removed} relations removed")
|
|
114
|
+
|
|
115
|
+
return ", ".join(parts) if parts else "No changes"
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
class Migration(BaseModel):
|
|
119
|
+
"""
|
|
120
|
+
Represents a schema migration.
|
|
121
|
+
|
|
122
|
+
Attributes:
|
|
123
|
+
version: Unique version identifier (timestamp-based).
|
|
124
|
+
description: Human-readable description of the migration.
|
|
125
|
+
author: Who created the migration (user or 'auto-generated').
|
|
126
|
+
timestamp: When the migration was created.
|
|
127
|
+
changes: Structured representation of schema changes.
|
|
128
|
+
up_cypher: Cypher statements to apply the migration.
|
|
129
|
+
down_cypher: Cypher statements to rollback the migration.
|
|
130
|
+
checksum: Hash of the migration content for integrity checking.
|
|
131
|
+
"""
|
|
132
|
+
|
|
133
|
+
version: str = Field(..., pattern=r"^\d{8}_\d{6}$", description="Migration version")
|
|
134
|
+
description: str = Field(..., min_length=1, description="Migration description")
|
|
135
|
+
author: str = Field(default="auto-generated", description="Migration author")
|
|
136
|
+
timestamp: datetime = Field(default_factory=datetime.utcnow, description="Creation timestamp")
|
|
137
|
+
changes: SchemaChanges = Field(..., description="Schema changes")
|
|
138
|
+
up_cypher: List[str] = Field(default_factory=list, description="Cypher to apply migration")
|
|
139
|
+
down_cypher: List[str] = Field(default_factory=list, description="Cypher to rollback migration")
|
|
140
|
+
checksum: Optional[str] = Field(None, description="Migration content checksum")
|
|
141
|
+
|
|
142
|
+
model_config = {"json_schema_extra": {"example": {"version": "20251112_120000"}}}
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
class MigrationHistory(BaseModel):
|
|
146
|
+
"""
|
|
147
|
+
Represents a migration's execution history.
|
|
148
|
+
|
|
149
|
+
This is stored in Neo4j as __GraiMigration nodes.
|
|
150
|
+
"""
|
|
151
|
+
|
|
152
|
+
version: str = Field(..., description="Migration version")
|
|
153
|
+
description: str = Field(..., description="Migration description")
|
|
154
|
+
applied_at: datetime = Field(..., description="When migration was applied")
|
|
155
|
+
status: MigrationStatus = Field(..., description="Migration status")
|
|
156
|
+
checksum: str = Field(..., description="Migration content checksum")
|
|
157
|
+
execution_time_ms: Optional[int] = Field(None, description="Execution time in milliseconds")
|
|
158
|
+
error_message: Optional[str] = Field(None, description="Error message if failed")
|
|
159
|
+
|
|
160
|
+
model_config = {"json_schema_extra": {"example": {"version": "20251112_120000"}}}
|
grai/core/models.py
CHANGED
|
@@ -206,6 +206,16 @@ class Entity(BaseModel):
|
|
|
206
206
|
return self.source
|
|
207
207
|
return SourceConfig.from_string(str(self.source))
|
|
208
208
|
|
|
209
|
+
@property
|
|
210
|
+
def name(self) -> str:
|
|
211
|
+
"""
|
|
212
|
+
Alias for entity name for convenience.
|
|
213
|
+
|
|
214
|
+
Returns:
|
|
215
|
+
Entity name.
|
|
216
|
+
"""
|
|
217
|
+
return self.entity
|
|
218
|
+
|
|
209
219
|
|
|
210
220
|
class RelationMapping(BaseModel):
|
|
211
221
|
"""
|
|
@@ -300,6 +310,16 @@ class Relation(BaseModel):
|
|
|
300
310
|
return self.source
|
|
301
311
|
return SourceConfig.from_string(str(self.source))
|
|
302
312
|
|
|
313
|
+
@property
|
|
314
|
+
def name(self) -> str:
|
|
315
|
+
"""
|
|
316
|
+
Alias for relation name for convenience.
|
|
317
|
+
|
|
318
|
+
Returns:
|
|
319
|
+
Relation name.
|
|
320
|
+
"""
|
|
321
|
+
return self.relation
|
|
322
|
+
|
|
303
323
|
|
|
304
324
|
class Project(BaseModel):
|
|
305
325
|
"""
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: grai-build
|
|
3
|
-
Version: 0.
|
|
4
|
-
Summary:
|
|
5
|
-
Author-email:
|
|
3
|
+
Version: 0.4.1
|
|
4
|
+
Summary: Declarative knowledge graph modeling tool inspired by dbt
|
|
5
|
+
Author-email: "grai.build" <hello@grai.build>
|
|
6
6
|
License: MIT
|
|
7
7
|
Classifier: Development Status :: 3 - Alpha
|
|
8
8
|
Classifier: Intended Audience :: Developers
|
|
@@ -20,7 +20,7 @@ Requires-Dist: neo4j>=5.0
|
|
|
20
20
|
Provides-Extra: dev
|
|
21
21
|
Requires-Dist: pytest>=7.4; extra == "dev"
|
|
22
22
|
Requires-Dist: pytest-cov>=4.1; extra == "dev"
|
|
23
|
-
Requires-Dist: black
|
|
23
|
+
Requires-Dist: black==25.9.0; extra == "dev"
|
|
24
24
|
Requires-Dist: ruff>=0.1; extra == "dev"
|
|
25
25
|
Requires-Dist: mypy>=1.5; extra == "dev"
|
|
26
26
|
Requires-Dist: pre-commit>=3.0; extra == "dev"
|
|
@@ -320,14 +320,14 @@ For development guidance, check out the [instructions](.github/instructions/inst
|
|
|
320
320
|
- [x] Graph IR export (JSON)
|
|
321
321
|
- [x] Documentation generation (dbt-style)
|
|
322
322
|
- [x] Lineage visualization (Mermaid + D3.js)
|
|
323
|
+
- [x] Schema versioning and migrations
|
|
323
324
|
- [ ] Graph visualization improvements
|
|
324
325
|
- [ ] Gremlin backend support
|
|
325
326
|
- [ ] Incremental sync
|
|
326
|
-
- [ ] Schema versioning and migrations
|
|
327
327
|
|
|
328
328
|
## 📊 Current Status
|
|
329
329
|
|
|
330
|
-
**v0.3.
|
|
330
|
+
**v0.3.2** - Feature-complete MVP with migrations
|
|
331
331
|
|
|
332
332
|
- ✅ **Core Models** - Pydantic models for Entity, Relation, Property
|
|
333
333
|
- ✅ **YAML Parser** - Parse and load entity/relation definitions
|