graphitedb 0.1.2__tar.gz → 0.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. {graphitedb-0.1.2/src/graphitedb.egg-info → graphitedb-0.2}/PKG-INFO +2 -4
  2. {graphitedb-0.1.2 → graphitedb-0.2}/README.md +0 -2
  3. {graphitedb-0.1.2 → graphitedb-0.2}/pyproject.toml +2 -2
  4. graphitedb-0.2/src/graphite/__init__.py +26 -0
  5. graphitedb-0.2/src/graphite/engine.py +564 -0
  6. graphitedb-0.2/src/graphite/exceptions.py +89 -0
  7. graphitedb-0.2/src/graphite/instances.py +50 -0
  8. graphitedb-0.2/src/graphite/migration.py +104 -0
  9. graphitedb-0.2/src/graphite/parser.py +228 -0
  10. graphitedb-0.2/src/graphite/query.py +199 -0
  11. graphitedb-0.2/src/graphite/serialization.py +174 -0
  12. graphitedb-0.2/src/graphite/types.py +65 -0
  13. graphitedb-0.2/src/graphite/utils.py +34 -0
  14. {graphitedb-0.1.2 → graphitedb-0.2/src/graphitedb.egg-info}/PKG-INFO +2 -4
  15. graphitedb-0.2/src/graphitedb.egg-info/SOURCES.txt +28 -0
  16. {graphitedb-0.1.2 → graphitedb-0.2}/src/graphitedb.egg-info/top_level.txt +0 -1
  17. graphitedb-0.2/tests/test_integration_basic.py +204 -0
  18. graphitedb-0.2/tests/test_integration_dsl.py +236 -0
  19. graphitedb-0.2/tests/test_integration_persistence.py +232 -0
  20. graphitedb-0.2/tests/test_unit_engine.py +438 -0
  21. graphitedb-0.2/tests/test_unit_instances.py +128 -0
  22. graphitedb-0.2/tests/test_unit_migration.py +156 -0
  23. graphitedb-0.2/tests/test_unit_parser.py +221 -0
  24. graphitedb-0.2/tests/test_unit_query.py +302 -0
  25. graphitedb-0.2/tests/test_unit_serialization.py +198 -0
  26. graphitedb-0.2/tests/test_unit_types.py +170 -0
  27. graphitedb-0.2/tests/test_utils.py +73 -0
  28. graphitedb-0.1.2/src/__init__.py +0 -0
  29. graphitedb-0.1.2/src/graphite/__init__.py +0 -687
  30. graphitedb-0.1.2/src/graphitedb.egg-info/SOURCES.txt +0 -10
  31. graphitedb-0.1.2/tests/test_placeholder.py +0 -3
  32. {graphitedb-0.1.2 → graphitedb-0.2}/LICENSE +0 -0
  33. {graphitedb-0.1.2 → graphitedb-0.2}/setup.cfg +0 -0
  34. {graphitedb-0.1.2 → graphitedb-0.2}/src/graphitedb.egg-info/dependency_links.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: graphitedb
3
- Version: 0.1.2
4
- Summary: A clean graph database engine
3
+ Version: 0.2
4
+ Summary: A clean, embedded graph database engine for Python.
5
5
  Author-email: Mahan Khalili <khalili1388mahan@gmail.com>
6
6
  Maintainer-email: Mahan Khalili <khalili1388mahan@gmail.com>
7
7
  License-Expression: MIT
@@ -201,5 +201,3 @@ def example_complete_dsl_loading():
201
201
  ```
202
202
 
203
203
  More examples are available in `example.py` in the GitHub repository.
204
- ::contentReference[oaicite:0]{index=0}
205
- ```
@@ -185,5 +185,3 @@ def example_complete_dsl_loading():
185
185
  ```
186
186
 
187
187
  More examples are available in `example.py` in the GitHub repository.
188
- ::contentReference[oaicite:0]{index=0}
189
- ```
@@ -4,14 +4,14 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "graphitedb"
7
- version = "0.1.2"
7
+ version = "0.2"
8
8
  authors = [
9
9
  { name="Mahan Khalili", email="khalili1388mahan@gmail.com" },
10
10
  ]
11
11
  maintainers = [
12
12
  { name="Mahan Khalili", email="khalili1388mahan@gmail.com" },
13
13
  ]
14
- description = "A clean graph database engine"
14
+ description = "A clean, embedded graph database engine for Python."
15
15
  readme = "README.md"
16
16
  requires-python = ">=3.9"
17
17
  classifiers = [
@@ -0,0 +1,26 @@
1
+ """
2
+ Graphite: A clean, embedded graph database engine for Python.
3
+
4
+ This is graphite module (installation: ``pip install graphitedb``).
5
+ You can use it with ``import graphite``.
6
+ """
7
+ from warnings import simplefilter
8
+
9
+ from .types import DataType, Field, NodeType, RelationType
10
+ from .instances import Node, Relation
11
+ from .serialization import GraphiteJSONEncoder
12
+ from .parser import GraphiteParser
13
+ from .query import QueryResult, QueryBuilder
14
+ from .engine import GraphiteEngine
15
+ from .migration import Migration
16
+ from .utils import node, relation, engine, SecurityWarning
17
+
18
+ simplefilter('always', SecurityWarning)
19
+
20
+ __all__ = [
21
+ 'DataType', 'Field', 'NodeType', 'RelationType',
22
+ 'Node', 'Relation', 'GraphiteJSONEncoder',
23
+ 'GraphiteParser', 'QueryResult', 'QueryBuilder',
24
+ 'GraphiteEngine', 'Migration', 'SecurityWarning',
25
+ 'node', 'relation', 'engine'
26
+ ]
@@ -0,0 +1,564 @@
1
+ """
2
+ Main graph database engine of Graphite
3
+ """
4
+ import json
5
+ import warnings
6
+ import os
7
+ from collections import defaultdict
8
+ from typing import Dict, List, Optional, Any, Union
9
+
10
+ from .exceptions import (
11
+ FileSizeError, InvalidJSONError, InvalidPropertiesError, NotFoundError,
12
+ SafeLoadExtensionError, TooNestedJSONError, ValidationError,
13
+ )
14
+ from .types import Field, NodeType, RelationType
15
+ from .instances import Node, Relation
16
+ from .parser import GraphiteParser
17
+ from .query import QueryBuilder
18
+ from .serialization import GraphiteJSONEncoder, graphite_object_hook
19
+
20
+ class GraphiteEngine: # pylint: disable=too-many-instance-attributes
21
+ """Main graph database engine"""
22
+
23
+ def __init__(self):
24
+ self.node_types: Dict[str, NodeType] = {}
25
+ self.relation_types: Dict[str, RelationType] = {}
26
+ self.nodes: Dict[str, Node] = {}
27
+ self.relations: List[Relation] = []
28
+ self.node_by_type: Dict[str, List[Node]] = defaultdict(list)
29
+ self.relations_by_type: Dict[str, List[Relation]] = defaultdict(list)
30
+ self.relations_by_from: Dict[str, List[Relation]] = defaultdict(list)
31
+ self.relations_by_to: Dict[str, List[Relation]] = defaultdict(list)
32
+ self.parser = GraphiteParser()
33
+ self.query = QueryBuilder(self)
34
+
35
+ # =============== SCHEMA DEFINITION ===============
36
+
37
+ def define_node(self, definition: str):
38
+ """Define a node type from DSL"""
39
+ node_name, fields, parent_name = self.parser.parse_node_definition(definition)
40
+
41
+ parent = None
42
+ if parent_name:
43
+ if parent_name not in self.node_types:
44
+ raise NotFoundError(
45
+ "Parent node type",
46
+ parent_name,
47
+ )
48
+ parent = self.node_types[parent_name]
49
+
50
+ node_type = NodeType(node_name, fields, parent)
51
+ self.node_types[node_name] = node_type
52
+
53
+ def define_relation(self, definition: str):
54
+ """Define a relation type from DSL"""
55
+ (rel_name, from_type, to_type, fields,
56
+ reverse_name, is_bidirectional) = self.parser.parse_relation_definition(definition)
57
+
58
+ # Validate node types exist
59
+ if from_type not in self.node_types:
60
+ raise NotFoundError(
61
+ "Node type",
62
+ from_type,
63
+ )
64
+ if to_type not in self.node_types:
65
+ raise NotFoundError(
66
+ "Node type",
67
+ to_type,
68
+ )
69
+
70
+ rel_type = RelationType(
71
+ rel_name, from_type, to_type,
72
+ fields, reverse_name, is_bidirectional
73
+ )
74
+ self.relation_types[rel_name] = rel_type
75
+
76
+ # Register reverse relation if specified
77
+ if reverse_name:
78
+ reverse_rel = RelationType(
79
+ reverse_name, to_type, from_type,
80
+ fields, rel_name, is_bidirectional
81
+ )
82
+ self.relation_types[reverse_name] = reverse_rel
83
+
84
+ # =============== DATA MANIPULATION ===============
85
+
86
+ def create_node(self, node_type: str, node_id: str, *values) -> Node:
87
+ """Create a node instance"""
88
+ if node_type not in self.node_types:
89
+ raise NotFoundError(
90
+ "Node type",
91
+ node_type
92
+ )
93
+
94
+ node_type_obj = self.node_types[node_type]
95
+ all_fields = node_type_obj.get_all_fields()
96
+
97
+ if len(values) != len(all_fields):
98
+ raise InvalidPropertiesError(
99
+ all_fields,
100
+ len(values)
101
+ )
102
+
103
+ # Create values dictionary
104
+ node_values = {}
105
+ for current_field, value in zip(all_fields, values):
106
+ node_values[current_field.name] = self.parser.parse_field_value(value, current_field)
107
+
108
+ new_node = Node(node_type, node_id, node_values, node_type_obj)
109
+ self.nodes[node_id] = new_node
110
+ self.node_by_type[node_type].append(new_node)
111
+ return new_node
112
+
113
+ def create_relation(self, from_id: str, to_id: str, rel_type: str, *values) -> Relation:
114
+ """Create a relation instance"""
115
+ if rel_type not in self.relation_types:
116
+ raise NotFoundError(
117
+ "Relation type",
118
+ rel_type,
119
+ )
120
+
121
+ rel_type_obj = self.relation_types[rel_type]
122
+
123
+ # Check if nodes exist
124
+ if from_id not in self.nodes:
125
+ raise NotFoundError(
126
+ "Node",
127
+ from_id,
128
+ )
129
+ if to_id not in self.nodes:
130
+ raise NotFoundError(
131
+ "Node",
132
+ to_id
133
+ )
134
+
135
+ if len(values) != len(rel_type_obj.fields):
136
+ raise InvalidPropertiesError(
137
+ rel_type_obj.fields,
138
+ len(values)
139
+ )
140
+
141
+ # Create values dictionary
142
+ rel_values = {}
143
+ for i, rel_field in enumerate(rel_type_obj.fields):
144
+ rel_values[rel_field.name] = self.parser.parse_value(values[i])
145
+
146
+ new_relation = Relation(rel_type, from_id, to_id, rel_values, rel_type_obj)
147
+ self.relations.append(new_relation)
148
+ self.relations_by_type[rel_type].append(new_relation)
149
+ self.relations_by_from[from_id].append(new_relation)
150
+ self.relations_by_to[to_id].append(new_relation)
151
+
152
+ # If relation is bidirectional, create reverse automatically
153
+ if rel_type_obj.is_bidirectional:
154
+ reverse_rel = Relation(rel_type, to_id, from_id, rel_values, rel_type_obj)
155
+ self.relations.append(reverse_rel)
156
+ self.relations_by_type[rel_type].append(reverse_rel)
157
+ self.relations_by_from[to_id].append(reverse_rel)
158
+ self.relations_by_to[from_id].append(reverse_rel)
159
+
160
+ return new_relation
161
+
162
+ # =============== QUERY METHODS ===============
163
+
164
+ def get_node(self, node_id: str) -> Optional[Node]:
165
+ """Get node by ID"""
166
+ return self.nodes.get(node_id)
167
+
168
+ def get_nodes_of_type(self, node_type: str, with_subtypes: bool = True) -> List[Node]:
169
+ """Get all nodes of a specific type"""
170
+ nodes: List[Node] = self.node_by_type.get(node_type, [])
171
+ if with_subtypes:
172
+ for ntype in self.node_types.values():
173
+ if ntype.parent and ntype.parent.name == node_type:
174
+ for new_node in self.get_nodes_of_type(ntype.name):
175
+ if new_node not in nodes:
176
+ nodes.append(new_node)
177
+ return nodes
178
+
179
+ def get_relations_from(self, node_id: str, rel_type: str = None) -> List[Relation]:
180
+ """Get relations from a node"""
181
+ all_rels = self.relations_by_from.get(node_id, [])
182
+ if rel_type:
183
+ return [r for r in all_rels if r.type_name == rel_type]
184
+ return all_rels
185
+
186
+ def get_relations_to(self, node_id: str, rel_type: str = None) -> List[Relation]:
187
+ """Get relations to a node"""
188
+ all_rels = self.relations_by_to.get(node_id, [])
189
+ if rel_type:
190
+ return [r for r in all_rels if r.type_name == rel_type]
191
+ return all_rels
192
+
193
+ # =============== BULK LOADING ===============
194
+
195
+ def load_dsl(self, dsl: str):
196
+ """Load Graphite DSL"""
197
+ lines = dsl.strip().split('\n')
198
+ i = 0
199
+
200
+ while i < len(lines):
201
+ line = lines[i].strip()
202
+ if not line or line.startswith('#'):
203
+ i += 1
204
+ continue
205
+
206
+ if line.startswith('node'):
207
+ # Collect multiline node definition
208
+ node_def = [line]
209
+ i += 1
210
+ while (
211
+ i < len(lines)
212
+ and lines[i].strip()
213
+ and not lines[i].strip().startswith(('node', 'relation'))
214
+ ):
215
+ node_def.append(lines[i])
216
+ i += 1
217
+ self.define_node('\n'.join(node_def))
218
+
219
+ elif line.startswith('relation'):
220
+ # Collect multiline relation definition
221
+ rel_def = [line]
222
+ i += 1
223
+ while (
224
+ i < len(lines)
225
+ and lines[i].strip()
226
+ and not lines[i].strip().startswith(('node', 'relation'))
227
+ ):
228
+ rel_def.append(lines[i])
229
+ i += 1
230
+ self.define_relation('\n'.join(rel_def))
231
+
232
+ elif '[' not in line:
233
+ # Node instance
234
+ node_type, node_id, values = self.parser.parse_node_instance(line)
235
+ self.create_node(node_type, node_id, *values)
236
+ i += 1
237
+
238
+ elif '-[' in line and (']->' in line or ']-' in line):
239
+ # Relation instance
240
+ from_id, to_id, rel_type, values, _ = self.parser.parse_relation_instance(line)
241
+ self.create_relation(from_id, to_id, rel_type, *values)
242
+ i += 1
243
+ else:
244
+ i += 1
245
+
246
+ # =============== PERSISTENCE ===============
247
+
248
+ @staticmethod
249
+ def _graphite_object_hook(dct: Dict[str, Any]) -> Any:
250
+ """Object hook for decoding Graphite objects from JSON."""
251
+ return graphite_object_hook(dct)
252
+
253
+ def save(self, filename: str):
254
+ """Save database to file using JSON"""
255
+ data = self._build_save_payload()
256
+ with open(filename, 'w', encoding='utf-8') as f:
257
+ # noinspection PyTypeChecker
258
+ json.dump(data, f, cls=GraphiteJSONEncoder, indent=2, ensure_ascii=False)
259
+
260
+ def load_safe(
261
+ self, filename: str, max_size_mb: Union[int, float] = 100, validate_schema: bool = True
262
+ ) -> None:
263
+ """
264
+ Safely load database with security checks
265
+
266
+ Args:
267
+ filename: File to load
268
+ max_size_mb: Maximum allowed file size in MB
269
+ validate_schema: Whether to validate schema consistency
270
+
271
+ Returns:
272
+ True if loaded successfully, False otherwise
273
+ """
274
+ # Check file size
275
+ file_size = os.path.getsize(filename)
276
+ if file_size > max_size_mb * 1024 * 1024:
277
+ raise FileSizeError(
278
+ file_size / 1024 / 1024,
279
+ max_size_mb
280
+ )
281
+
282
+ # Check file extension
283
+ if not filename.lower().endswith('.json'):
284
+ raise SafeLoadExtensionError()
285
+
286
+ try:
287
+ with open(filename, 'r', encoding='utf-8') as f:
288
+ data = json.load(f, object_hook=self._graphite_object_hook)
289
+ except json.JSONDecodeError as e:
290
+ raise InvalidJSONError() from e
291
+ except RecursionError as exc:
292
+ raise TooNestedJSONError() from exc
293
+
294
+ # Validate structure
295
+ if validate_schema:
296
+ self._validate_loaded_data(data)
297
+
298
+ # Load normally
299
+ self._load_from_dict(data)
300
+
301
+ @staticmethod
302
+ # pylint: disable=too-many-branches
303
+ def _validate_loaded_data(data: Dict[str, Any]):
304
+ """Validate loaded data for consistency"""
305
+ if not isinstance(data, dict):
306
+ raise ValidationError(
307
+ "Loaded data must be a dictionary",
308
+ "data",
309
+ str(type(data))
310
+ )
311
+
312
+ required_keys = ['version', 'node_types', 'relation_types', 'nodes']
313
+ for key in required_keys:
314
+ if key not in data:
315
+ raise ValidationError(
316
+ f"Missing required key {key}",
317
+ key,
318
+ "'Missing'"
319
+ )
320
+
321
+ if not isinstance(data.get('node_types'), list):
322
+ raise ValidationError(
323
+ "node_types must be a list",
324
+ "node_types",
325
+ str(type(data.get('node_types')))
326
+ )
327
+ if not isinstance(data.get('relation_types'), list):
328
+ raise ValidationError(
329
+ "relation_types must be a list",
330
+ "relation_types",
331
+ str(type(data.get('relation_types')))
332
+ )
333
+ if not isinstance(data.get('nodes'), list):
334
+ raise ValidationError(
335
+ "nodes must be a list",
336
+ "nodes",
337
+ str(type(data.get('nodes')))
338
+ )
339
+ if 'relations' in data and not isinstance(data.get('relations'), list):
340
+ raise ValidationError(
341
+ "relations must be a list",
342
+ "relations",
343
+ str(type(data.get('relations')))
344
+ )
345
+
346
+ # Check for unexpected keys
347
+ allowed_keys = {
348
+ 'version', 'node_types', 'relation_types', 'nodes', 'relations',
349
+ 'node_by_type', 'relations_by_type', 'relations_by_from', 'relations_by_to'
350
+ }
351
+ for key in data.keys():
352
+ if key not in allowed_keys:
353
+ warnings.warn(f"Unexpected key in data: {key}", UserWarning)
354
+
355
+ # Validate nodes reference existing types
356
+ node_type_names = set()
357
+ for node_type in data.get('node_types', []):
358
+ if isinstance(node_type, NodeType):
359
+ node_type_names.add(node_type.name)
360
+ elif isinstance(node_type, dict) and 'name' in node_type:
361
+ node_type_names.add(node_type['name'])
362
+
363
+ for check_node in data.get('nodes', []):
364
+ type_name = check_node.type_name if isinstance(check_node, Node) else check_node.get('type_name')
365
+ if type_name not in node_type_names:
366
+ raise NotFoundError(
367
+ "Node type",
368
+ type_name,
369
+ )
370
+
371
+ # pylint: disable=too-many-branches, too-many-locals
372
+ def _load_from_dict(self, data: Dict[str, Any]):
373
+ """Internal method to load from dictionary (used by both load and load_safe)"""
374
+ # Clear existing data
375
+ self.clear()
376
+
377
+ node_types_data = data.get('node_types', [])
378
+ relation_types_data = data.get('relation_types', [])
379
+ nodes_data = data.get('nodes', [])
380
+ relations_data = data.get('relations', [])
381
+
382
+ # Restore node types
383
+ for nt_dict in node_types_data:
384
+ if isinstance(nt_dict, NodeType):
385
+ nt = nt_dict
386
+ else:
387
+ # Convert from dict if needed
388
+ fields: List[Field] = list(map(
389
+ lambda fld: Field(fld["name"], fld["dtype"], fld["default"]),
390
+ nt_dict.get("fields", [])
391
+ ))
392
+ nt = NodeType(
393
+ name=nt_dict['name'],
394
+ fields=fields,
395
+ parent=None # Will be restored later
396
+ )
397
+ self.node_types[nt.name] = nt
398
+
399
+ # Restore parent references for node types
400
+ for nt in node_types_data:
401
+ if isinstance(nt, dict):
402
+ parent_name = nt.get('parent')
403
+ name = nt.get('name')
404
+ else:
405
+ parent_name = nt.parent.name if nt.parent else None
406
+ name = nt.name
407
+ if isinstance(parent_name, dict):
408
+ parent_name = parent_name["name"]
409
+ if parent_name and parent_name in self.node_types and name in self.node_types:
410
+ self.node_types[name].parent = self.node_types[parent_name]
411
+
412
+ # Restore relation types
413
+ for rt_dict in relation_types_data:
414
+ if isinstance(rt_dict, RelationType):
415
+ rt = rt_dict
416
+ else:
417
+ rt = RelationType(
418
+ name=rt_dict['name'],
419
+ from_type=rt_dict['from_type'],
420
+ to_type=rt_dict['to_type'],
421
+ fields=rt_dict.get('fields', []),
422
+ reverse_name=rt_dict.get('reverse_name'),
423
+ is_bidirectional=rt_dict.get('is_bidirectional', False)
424
+ )
425
+ self.relation_types[rt.name] = rt
426
+
427
+ # Restore nodes
428
+ for node_data in nodes_data:
429
+ if isinstance(node_data, Node):
430
+ loading_node = node_data
431
+ else:
432
+ loading_node = Node(
433
+ type_name=node_data['type_name'],
434
+ id=node_data['id'],
435
+ values=node_data['values'],
436
+ type_ref=None
437
+ )
438
+
439
+ # Restore type reference
440
+ if loading_node.type_name in self.node_types:
441
+ loading_node.type_ref = self.node_types[loading_node.type_name]
442
+
443
+ self.nodes[loading_node.id] = loading_node
444
+
445
+ # Restore relations
446
+ for rel_data in relations_data:
447
+ if isinstance(rel_data, Relation):
448
+ rel = rel_data
449
+ else:
450
+ rel = Relation(
451
+ type_name=rel_data['type_name'],
452
+ from_node=rel_data['from_node'],
453
+ to_node=rel_data['to_node'],
454
+ values=rel_data['values'],
455
+ type_ref=None
456
+ )
457
+
458
+ # Restore type reference
459
+ if rel.type_name in self.relation_types:
460
+ rel.type_ref = self.relation_types[rel.type_name]
461
+
462
+ self.relations.append(rel)
463
+
464
+ # Rebuild all indexes
465
+ self._rebuild_all_indexes()
466
+
467
+ def _build_save_payload(self) -> Dict[str, Any]:
468
+ """Build a JSON-serializable payload for persistence."""
469
+ return {
470
+ "version" : "1.0",
471
+ "node_types" : list(self.node_types.values()),
472
+ "relation_types" : list(self.relation_types.values()),
473
+ "nodes" : list(self.nodes.values()),
474
+ "relations" : list(self.relations),
475
+ "node_by_type" : dict(self.node_by_type.items()),
476
+ "relations_by_type": dict(self.relations_by_type.items()),
477
+ "relations_by_from": dict(self.relations_by_from.items()),
478
+ "relations_by_to" : dict(self.relations_by_to.items()),
479
+ }
480
+
481
+ def _rebuild_all_indexes(self):
482
+ self._rebuild_node_by_type()
483
+ self._rebuild_relations_indexes()
484
+
485
+ def load(self, filename: str, safe_mode: bool = True) -> None:
486
+ """
487
+ Load database from file
488
+
489
+ Args:
490
+ filename: File to load
491
+ safe_mode: If True, use safe loading with validation (default: True)
492
+ """
493
+ if safe_mode:
494
+ self.load_safe(filename)
495
+ return
496
+
497
+ # Legacy unsafe loading (for backward compatibility)
498
+ warnings.warn(
499
+ "Unsafe loading mode will be deprecated in next versions. Use safe_mode=True for security. "
500
+ "You can use 'graphite.Migration.convert_pickle_to_json()' to update your database.",
501
+ PendingDeprecationWarning
502
+ )
503
+ self._load_unsafe(filename)
504
+
505
+ def _load_unsafe(self, filename: str):
506
+ """Legacy unsafe loading (kept for compatibility)"""
507
+ with open(filename, 'r', encoding='utf-8') as f:
508
+ data = json.load(f, object_hook=self._graphite_object_hook)
509
+ self._load_from_dict(data)
510
+
511
+ def _rebuild_node_by_type(self):
512
+ """Rebuild node_by_type index"""
513
+ self.node_by_type = defaultdict(list)
514
+ for node_instance in self.nodes.values():
515
+ self.node_by_type[node_instance.type_name].append(node_instance)
516
+
517
+ def _rebuild_relations_indexes(self):
518
+ """Rebuild all relation indexes"""
519
+ self.relations_by_type = defaultdict(list)
520
+ self.relations_by_from = defaultdict(list)
521
+ self.relations_by_to = defaultdict(list)
522
+
523
+ for rel in self.relations:
524
+ self.relations_by_type[rel.type_name].append(rel)
525
+ self.relations_by_from[rel.from_node].append(rel)
526
+ self.relations_by_to[rel.to_node].append(rel)
527
+
528
+ def _rebuild_remaining_indexes(self):
529
+ """Rebuild indexes that might not be in the saved data"""
530
+ # Ensure relations_by_from and relations_by_to are built
531
+ if not self.relations_by_from or not self.relations_by_to:
532
+ self.relations_by_from = defaultdict(list)
533
+ self.relations_by_to = defaultdict(list)
534
+ for rel in self.relations:
535
+ self.relations_by_from[rel.from_node].append(rel)
536
+ self.relations_by_to[rel.to_node].append(rel)
537
+
538
+ # =============== UTILITY METHODS ===============
539
+
540
+ def clear(self):
541
+ """Clear all data"""
542
+ self.node_types.clear()
543
+ self.relation_types.clear()
544
+ self.nodes.clear()
545
+ self.relations.clear()
546
+ self.node_by_type.clear()
547
+ self.relations_by_type.clear()
548
+ self.relations_by_from.clear()
549
+ self.relations_by_to.clear()
550
+
551
+ def stats(self) -> Dict[str, Any]:
552
+ """Get database statistics"""
553
+ return {
554
+ 'node_types' : len(self.node_types),
555
+ 'relation_types': len(self.relation_types),
556
+ 'nodes' : len(self.nodes),
557
+ 'relations' : len(self.relations),
558
+ }
559
+
560
+ # =============== SYNTAX SUGAR ===============
561
+
562
+ def parse(self, data: str):
563
+ """Parse data into nodes and relations (structure or data)"""
564
+ self.load_dsl(data)