graphglot 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. graphglot/__init__.py +5 -0
  2. graphglot/analysis/__init__.py +11 -0
  3. graphglot/analysis/analyzer.py +68 -0
  4. graphglot/analysis/models.py +81 -0
  5. graphglot/analysis/rules/__init__.py +24 -0
  6. graphglot/analysis/rules/_registry.py +36 -0
  7. graphglot/analysis/rules/scope_rules.py +524 -0
  8. graphglot/analysis/rules/scope_validator.py +813 -0
  9. graphglot/analysis/rules/structural_rules.py +729 -0
  10. graphglot/ast/__init__.py +1461 -0
  11. graphglot/ast/base.py +517 -0
  12. graphglot/ast/cypher.py +719 -0
  13. graphglot/ast/expressions.py +6396 -0
  14. graphglot/ast/functions.py +414 -0
  15. graphglot/ast/macros.py +27 -0
  16. graphglot/ast/validation.py +37 -0
  17. graphglot/cli/__init__.py +57 -0
  18. graphglot/cli/_ast.py +58 -0
  19. graphglot/cli/_dialects.py +48 -0
  20. graphglot/cli/_features.py +104 -0
  21. graphglot/cli/_lineage.py +291 -0
  22. graphglot/cli/_parse.py +57 -0
  23. graphglot/cli/_shared.py +171 -0
  24. graphglot/cli/_tokenize.py +110 -0
  25. graphglot/cli/_transpile.py +98 -0
  26. graphglot/cli/_type.py +142 -0
  27. graphglot/cli/_validate.py +83 -0
  28. graphglot/dialect/__init__.py +8 -0
  29. graphglot/dialect/base.py +579 -0
  30. graphglot/dialect/coregql.py +19 -0
  31. graphglot/dialect/cypher.py +3494 -0
  32. graphglot/dialect/cypher_features.py +40 -0
  33. graphglot/dialect/fullgql.py +19 -0
  34. graphglot/dialect/neo4j.py +315 -0
  35. graphglot/error.py +444 -0
  36. graphglot/features.py +434 -0
  37. graphglot/generator/__init__.py +6 -0
  38. graphglot/generator/base.py +257 -0
  39. graphglot/generator/fragment.py +233 -0
  40. graphglot/generator/generators/__init__.py +22 -0
  41. graphglot/generator/generators/clauses.py +166 -0
  42. graphglot/generator/generators/commands.py +436 -0
  43. graphglot/generator/generators/core.py +316 -0
  44. graphglot/generator/generators/expressions.py +827 -0
  45. graphglot/generator/generators/literals.py +101 -0
  46. graphglot/generator/generators/macros.py +25 -0
  47. graphglot/generator/generators/patterns.py +469 -0
  48. graphglot/generator/generators/predicates.py +191 -0
  49. graphglot/generator/generators/statements.py +382 -0
  50. graphglot/generator/generators/types.py +338 -0
  51. graphglot/generator/registry.py +51 -0
  52. graphglot/lexer/__init__.py +10 -0
  53. graphglot/lexer/lexer.py +1318 -0
  54. graphglot/lexer/token.py +545 -0
  55. graphglot/lineage/__init__.py +75 -0
  56. graphglot/lineage/analyzer.py +1294 -0
  57. graphglot/lineage/dependency_extractor.py +133 -0
  58. graphglot/lineage/exporter.py +172 -0
  59. graphglot/lineage/impact.py +898 -0
  60. graphglot/lineage/models.py +420 -0
  61. graphglot/lineage/pattern_analyzer.py +268 -0
  62. graphglot/parser/__init__.py +3 -0
  63. graphglot/parser/base.py +574 -0
  64. graphglot/parser/functions.py +82 -0
  65. graphglot/parser/parsers/__init__.py +16 -0
  66. graphglot/parser/parsers/clauses.py +386 -0
  67. graphglot/parser/parsers/commands.py +82 -0
  68. graphglot/parser/parsers/core.py +6215 -0
  69. graphglot/parser/parsers/expressions.py +1123 -0
  70. graphglot/parser/parsers/literals.py +308 -0
  71. graphglot/parser/parsers/macros.py +40 -0
  72. graphglot/parser/parsers/patterns.py +278 -0
  73. graphglot/parser/parsers/predicates.py +275 -0
  74. graphglot/parser/parsers/statements.py +767 -0
  75. graphglot/parser/parsers/types.py +1239 -0
  76. graphglot/parser/registry.py +100 -0
  77. graphglot/scope.py +188 -0
  78. graphglot/transformations.py +203 -0
  79. graphglot/typing/__init__.py +15 -0
  80. graphglot/typing/annotator.py +107 -0
  81. graphglot/typing/errors.py +29 -0
  82. graphglot/typing/rules/__init__.py +39 -0
  83. graphglot/typing/rules/cast.py +63 -0
  84. graphglot/typing/rules/functions.py +197 -0
  85. graphglot/typing/rules/literals.py +109 -0
  86. graphglot/typing/rules/operators.py +256 -0
  87. graphglot/typing/rules/resolution.py +114 -0
  88. graphglot/typing/rules/variables.py +121 -0
  89. graphglot/typing/scope.py +59 -0
  90. graphglot/typing/types.py +269 -0
  91. graphglot/utils/__init__.py +0 -0
  92. graphglot/utils/deprecation.py +17 -0
  93. graphglot/utils/helper.py +58 -0
  94. graphglot/utils/trie.py +86 -0
  95. graphglot/visualization/__init__.py +5 -0
  96. graphglot/visualization/tree.py +120 -0
  97. graphglot-0.1.4.dist-info/METADATA +429 -0
  98. graphglot-0.1.4.dist-info/RECORD +101 -0
  99. graphglot-0.1.4.dist-info/WHEEL +4 -0
  100. graphglot-0.1.4.dist-info/entry_points.txt +2 -0
  101. graphglot-0.1.4.dist-info/licenses/LICENSE +190 -0
graphglot/__init__.py ADDED
@@ -0,0 +1,5 @@
1
+ """GraphGlot - A Graph Query Language Toolkit."""
2
+
3
+ from importlib.metadata import version
4
+
5
+ __version__ = version("graphglot")
@@ -0,0 +1,11 @@
1
+ """Semantic analysis module for GQL queries."""
2
+
3
+ from graphglot.analysis.analyzer import SemanticAnalyzer
4
+ from graphglot.analysis.models import AnalysisContext, AnalysisResult, SemanticDiagnostic
5
+
6
+ __all__ = [
7
+ "AnalysisContext",
8
+ "AnalysisResult",
9
+ "SemanticAnalyzer",
10
+ "SemanticDiagnostic",
11
+ ]
@@ -0,0 +1,68 @@
1
+ """Semantic analyzer — runs registered rules and collects diagnostics."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing as t
6
+
7
+ from graphglot.analysis.models import AnalysisContext, AnalysisResult
8
+ from graphglot.analysis.rules import RULE_REGISTRY, STRUCTURAL_RULES
9
+
10
+ if t.TYPE_CHECKING:
11
+ from graphglot.ast.base import Expression
12
+ from graphglot.dialect.base import Dialect
13
+ from graphglot.lineage.models import LineageGraph
14
+ from graphglot.typing.annotator import ExternalContext
15
+
16
+
17
+ class SemanticAnalyzer:
18
+ """Orchestrates semantic analysis rules against an AST."""
19
+
20
+ def analyze(
21
+ self,
22
+ expression: Expression,
23
+ dialect: Dialect,
24
+ lineage: LineageGraph | None = None,
25
+ annotate_types: bool = True,
26
+ external_context: ExternalContext | None = None,
27
+ disabled_rules: set[str] | None = None,
28
+ ) -> AnalysisResult:
29
+ """Run all registered analysis rules and collect feature usage.
30
+
31
+ When *annotate_types* is True (the default), a :class:`TypeAnnotator`
32
+ pass runs before the rules so that ``_resolved_type`` is available on
33
+ every AST node.
34
+
35
+ *external_context* is forwarded to the :class:`TypeAnnotator` so that
36
+ user-supplied parameter types (e.g. graph or binding-table parameters)
37
+ are available during type inference.
38
+
39
+ *disabled_rules* is an optional set of rule IDs (feature IDs for
40
+ feature-gated rules, or structural rule names) to skip entirely.
41
+ """
42
+
43
+ if annotate_types:
44
+ from graphglot.typing import TypeAnnotator
45
+
46
+ TypeAnnotator(dialect=dialect, external_context=external_context).annotate(expression)
47
+
48
+ ctx = AnalysisContext(expression=expression, dialect=dialect, lineage=lineage)
49
+ result = AnalysisResult()
50
+ for feature_id, rule_fn in RULE_REGISTRY.items():
51
+ if disabled_rules and feature_id in disabled_rules:
52
+ continue
53
+ rule_diagnostics = rule_fn(ctx)
54
+ if not rule_diagnostics:
55
+ continue
56
+ result.features.add(feature_id)
57
+ if not dialect.is_feature_supported(feature_id):
58
+ result.diagnostics.extend(rule_diagnostics)
59
+
60
+ # Structural rules always fire (not feature-gated).
61
+ for rule_id, rule_fn in STRUCTURAL_RULES.items():
62
+ if disabled_rules and rule_id in disabled_rules:
63
+ continue
64
+ rule_diagnostics = rule_fn(ctx)
65
+ if rule_diagnostics:
66
+ result.diagnostics.extend(rule_diagnostics)
67
+
68
+ return result
@@ -0,0 +1,81 @@
1
+ """Data models for semantic analysis."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing as t
6
+
7
+ from dataclasses import dataclass, field
8
+
9
+ from graphglot.error import Diagnostic, Severity, Span
10
+
11
+ if t.TYPE_CHECKING:
12
+ from graphglot.ast.base import Expression
13
+ from graphglot.dialect.base import Dialect
14
+ from graphglot.lineage.models import LineageGraph
15
+
16
+
17
+ @dataclass
18
+ class AnalysisContext:
19
+ """Context passed to each analysis rule."""
20
+
21
+ expression: Expression
22
+ dialect: Dialect
23
+ lineage: LineageGraph | None = None
24
+
25
+
26
+ @dataclass
27
+ class SemanticDiagnostic:
28
+ """A single diagnostic emitted by a semantic analysis rule."""
29
+
30
+ feature_id: str
31
+ message: str
32
+ node: Expression | None = None
33
+ line: int | None = field(default=None, init=False)
34
+ col: int | None = field(default=None, init=False)
35
+ span: Span | None = field(default=None, init=False)
36
+
37
+ def __post_init__(self) -> None:
38
+ cur = self.node
39
+ while cur is not None:
40
+ start_tok = getattr(cur, "_start_token", None)
41
+ if start_tok is not None:
42
+ self.line = start_tok.line
43
+ self.col = start_tok.col
44
+ end_tok = getattr(cur, "_end_token", None) or start_tok
45
+ self.span = Span(
46
+ start_line=start_tok.line,
47
+ start_column=start_tok.col,
48
+ end_line=end_tok.line,
49
+ end_column=end_tok.col,
50
+ start_offset=start_tok.start,
51
+ end_offset=end_tok.end + 1,
52
+ )
53
+ return
54
+ cur = getattr(cur, "_parent", None)
55
+
56
+ def __str__(self) -> str:
57
+ pos = f" (line {self.line}, col {self.col})" if self.line is not None else ""
58
+ return f"[{self.feature_id}] {self.message}{pos}"
59
+
60
+ def to_diagnostic(self) -> Diagnostic:
61
+ return Diagnostic(
62
+ code=self.feature_id,
63
+ message=self.message,
64
+ severity=Severity.ERROR,
65
+ phase="analysis",
66
+ span=self.span,
67
+ node=self.node,
68
+ )
69
+
70
+
71
+ @dataclass
72
+ class AnalysisResult:
73
+ """Result of running semantic analysis on an expression."""
74
+
75
+ diagnostics: list[SemanticDiagnostic] = field(default_factory=list)
76
+ features: set[str] = field(default_factory=set)
77
+
78
+ @property
79
+ def ok(self) -> bool:
80
+ """True when no diagnostics were emitted."""
81
+ return len(self.diagnostics) == 0
@@ -0,0 +1,24 @@
1
+ """Rule registry for semantic analysis."""
2
+
3
+ import graphglot.analysis.rules.scope_rules as scope_rules
4
+ import graphglot.analysis.rules.scope_validator as scope_validator
5
+ import graphglot.analysis.rules.structural_rules as structural_rules
6
+
7
+ from graphglot.analysis.rules._registry import (
8
+ RULE_REGISTRY,
9
+ STRUCTURAL_RULES,
10
+ AnalysisRuleFn,
11
+ analysis_rule,
12
+ structural_rule,
13
+ )
14
+
15
+ __all__ = [
16
+ "RULE_REGISTRY",
17
+ "STRUCTURAL_RULES",
18
+ "AnalysisRuleFn",
19
+ "analysis_rule",
20
+ "scope_rules",
21
+ "scope_validator",
22
+ "structural_rule",
23
+ "structural_rules",
24
+ ]
@@ -0,0 +1,36 @@
1
+ """Core registry data structures and decorators for analysis rules."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing as t
6
+
7
+ from graphglot.analysis.models import AnalysisContext, SemanticDiagnostic
8
+
9
+ AnalysisRuleFn = t.Callable[[AnalysisContext], list[SemanticDiagnostic]]
10
+
11
+ RULE_REGISTRY: dict[str, AnalysisRuleFn] = {}
12
+ STRUCTURAL_RULES: dict[str, AnalysisRuleFn] = {}
13
+
14
+
15
+ def analysis_rule(feature_id: str):
16
+ """Register a function as the analysis rule for a feature.
17
+
18
+ Rules fire when the feature is **absent** from the dialect — if the dialect
19
+ supports the feature, the restriction doesn't apply.
20
+ """
21
+
22
+ def decorator(fn: AnalysisRuleFn) -> AnalysisRuleFn:
23
+ RULE_REGISTRY[feature_id] = fn
24
+ return fn
25
+
26
+ return decorator
27
+
28
+
29
+ def structural_rule(rule_id: str):
30
+ """Register a rule that always fires (not feature-gated)."""
31
+
32
+ def decorator(fn: AnalysisRuleFn) -> AnalysisRuleFn:
33
+ STRUCTURAL_RULES[rule_id] = fn
34
+ return fn
35
+
36
+ return decorator