natural-agi-common 0.1.36__py3-none-any.whl → 0.1.38__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- common/model/enums.py +7 -7
- common/traversal/visitors/__init__.py +0 -4
- common/traversal/visitors/angle_visitor.py +12 -3
- {natural_agi_common-0.1.36.dist-info → natural_agi_common-0.1.38.dist-info}/METADATA +7 -1
- {natural_agi_common-0.1.36.dist-info → natural_agi_common-0.1.38.dist-info}/RECORD +7 -11
- {natural_agi_common-0.1.36.dist-info → natural_agi_common-0.1.38.dist-info}/WHEEL +1 -1
- common/minor_check.py +0 -509
- common/models.py +0 -16
- common/traversal/visitors/half_plane_visitor.py +0 -64
- common/traversal/visitors/relative_position_visitor.py +0 -209
- {natural_agi_common-0.1.36.dist-info → natural_agi_common-0.1.38.dist-info}/top_level.txt +0 -0
common/model/enums.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from enum import Enum
|
|
1
|
+
from enum import Enum, auto
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
class ContourType(Enum):
|
|
@@ -13,12 +13,12 @@ class ContourDevelopment(Enum):
|
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
class HorizontalDirection(Enum):
|
|
16
|
-
LEFT =
|
|
17
|
-
RIGHT =
|
|
18
|
-
NONE =
|
|
16
|
+
LEFT = auto()
|
|
17
|
+
RIGHT = auto()
|
|
18
|
+
NONE = auto()
|
|
19
19
|
|
|
20
20
|
|
|
21
21
|
class VerticalDirection(Enum):
|
|
22
|
-
TOP =
|
|
23
|
-
BOTTOM =
|
|
24
|
-
NONE =
|
|
22
|
+
TOP = auto()
|
|
23
|
+
BOTTOM = auto()
|
|
24
|
+
NONE = auto()
|
|
@@ -1,19 +1,15 @@
|
|
|
1
1
|
from .visitor import Visitor
|
|
2
2
|
from .angle_visitor import AngleVisitor
|
|
3
3
|
from .direction_visitor import DirectionVisitor
|
|
4
|
-
from .half_plane_visitor import HalfPlaneVisitor
|
|
5
4
|
from .length_comparison_visitor import LengthComparisonVisitor
|
|
6
5
|
from .quadrant_visitor import QuadrantVisitor
|
|
7
|
-
from .relative_position_visitor import RelativePositionVisitor
|
|
8
6
|
from .visitor_result_persistence_service import VisitorResultPersistenceService
|
|
9
7
|
|
|
10
8
|
__all__ = [
|
|
11
9
|
"Visitor",
|
|
12
10
|
"AngleVisitor",
|
|
13
11
|
"DirectionVisitor",
|
|
14
|
-
"HalfPlaneVisitor",
|
|
15
12
|
"LengthComparisonVisitor",
|
|
16
13
|
"QuadrantVisitor",
|
|
17
|
-
"RelativePositionVisitor",
|
|
18
14
|
"VisitorResultPersistenceService",
|
|
19
15
|
]
|
|
@@ -11,7 +11,7 @@ import networkx as nx
|
|
|
11
11
|
class AngleVisitor(Visitor):
|
|
12
12
|
def __init__(self, graph: nx.Graph):
|
|
13
13
|
super().__init__(graph)
|
|
14
|
-
self.point_angles: Dict[str,
|
|
14
|
+
self.point_angles: Dict[str, List[float]] = {}
|
|
15
15
|
self.line_angles: Dict[str, float] = {}
|
|
16
16
|
|
|
17
17
|
def visit_point(self, point: Point) -> Dict[str, Any]:
|
|
@@ -20,10 +20,10 @@ class AngleVisitor(Visitor):
|
|
|
20
20
|
angles = []
|
|
21
21
|
for i in range(len(connected_lines)):
|
|
22
22
|
for j in range(i + 1, len(connected_lines)):
|
|
23
|
-
|
|
23
|
+
angle_pair = self._calculate_angle_between_lines(
|
|
24
24
|
connected_lines[i], connected_lines[j]
|
|
25
25
|
)
|
|
26
|
-
angles.extend(
|
|
26
|
+
angles.extend(angle_pair)
|
|
27
27
|
self.point_angles[point.id] = angles
|
|
28
28
|
self.graph.nodes[point.id]["angles"] = angles
|
|
29
29
|
return {"angles": angles, "point_id": point.id}
|
|
@@ -151,6 +151,10 @@ class AngleVisitor(Visitor):
|
|
|
151
151
|
magnitude1 = math.sqrt(vector1[0] ** 2 + vector1[1] ** 2)
|
|
152
152
|
magnitude2 = math.sqrt(vector2[0] ** 2 + vector2[1] ** 2)
|
|
153
153
|
|
|
154
|
+
# Handle zero-length vectors
|
|
155
|
+
if magnitude1 < 1e-10 or magnitude2 < 1e-10:
|
|
156
|
+
return [0.0, 0.0]
|
|
157
|
+
|
|
154
158
|
cos_angle = dot_product / (magnitude1 * magnitude2)
|
|
155
159
|
angle1 = math.degrees(math.acos(max(-1.0, min(1.0, cos_angle))))
|
|
156
160
|
angle2 = 360 - angle1
|
|
@@ -169,6 +173,11 @@ class AngleVisitor(Visitor):
|
|
|
169
173
|
vector = (end_coords[0] - start_coords[0], end_coords[1] - start_coords[1])
|
|
170
174
|
dot_product = vector[0] * 1 + vector[1] * 0
|
|
171
175
|
magnitude = math.sqrt(vector[0] ** 2 + vector[1] ** 2)
|
|
176
|
+
|
|
177
|
+
# Handle zero-length vectors
|
|
178
|
+
if magnitude < 1e-10:
|
|
179
|
+
return 0.0
|
|
180
|
+
|
|
172
181
|
cos_angle = dot_product / magnitude
|
|
173
182
|
angle = math.degrees(math.acos(max(-1.0, min(1.0, cos_angle))))
|
|
174
183
|
return round(angle / 10) * 10
|
|
@@ -1,7 +1,13 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: natural-agi-common
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.38
|
|
4
|
+
Summary: UNKNOWN
|
|
5
|
+
Home-page: UNKNOWN
|
|
6
|
+
License: UNKNOWN
|
|
7
|
+
Platform: UNKNOWN
|
|
4
8
|
Requires-Dist: pydantic
|
|
5
9
|
Requires-Dist: networkx
|
|
6
10
|
Requires-Dist: neo4j
|
|
7
11
|
|
|
12
|
+
UNKNOWN
|
|
13
|
+
|
|
@@ -3,12 +3,10 @@ common/critical_graph_utils.py,sha256=mqTopyoF5HELBZs4zB7RiMBL4wJy8V11zAp_OGgV_B
|
|
|
3
3
|
common/critical_point.py,sha256=OeiAY7-WLk0o1zwWwSdbr5T1875oyE5REJ0q6et77jA,573
|
|
4
4
|
common/decorator.py,sha256=ql68Rfh-sGvPUw_vVLblKkFPGgqlGbYXWrrwN5E1jh8,1423
|
|
5
5
|
common/graph_utils.py,sha256=Fd9SjGrpYQ7YW8HnoyTojvXfFZmgHu2EIyAVd-AjgHg,4362
|
|
6
|
-
common/minor_check.py,sha256=13lWp1x31IUrlGSii3COrBc_tGL6nsh4XXHWpTza4Zs,17735
|
|
7
|
-
common/models.py,sha256=7zMzl46M1MCEG00dZV9jS2_MpHHeZQO17OfLk7VfGb8,324
|
|
8
6
|
common/params.py,sha256=GjT0ZlF99D0Sn35c_cTZax0wxlKXVKSi3lMz8Y1zW04,562
|
|
9
7
|
common/model/__init__.py,sha256=7wNxQtYv-JuKMP2PhoyxZT46BscxT-j8uVZlxmThSAs,644
|
|
10
8
|
common/model/dlq.py,sha256=kzHcddbeOkj_YqBghldJOe9CjtO43xxC9Df3Tr5pKD4,111
|
|
11
|
-
common/model/enums.py,sha256=
|
|
9
|
+
common/model/enums.py,sha256=rTJLjJ1AhLkUYRL8ZQvrMuCt46bZ24_tdGMoTgUjF5w,395
|
|
12
10
|
common/model/half_plane.py,sha256=AEeY79Uot-mzwe-DgSKn39K-jCJPstXatxT9zN-IUXw,146
|
|
13
11
|
common/model/length_comparison_result.py,sha256=PEU5QyqpAb9sO0K42cLHILhE_VUMEsNczYhwrFB6UeA,140
|
|
14
12
|
common/model/point.py,sha256=GlGTfku0K1Wfs4db7kYe6cHXyx8P9TNx0FoKZqfkspA,703
|
|
@@ -16,16 +14,14 @@ common/model/vector.py,sha256=Gnoc-WUU_VCZ-BKO9QLj9a4uzIM81ZW5QZ3yhWpmmSY,651
|
|
|
16
14
|
common/traversal/__init__.py,sha256=pkj_5-jsIbDoZbj2DyDePsuqICjuAJ2Rzahw-ezWqJk,149
|
|
17
15
|
common/traversal/graph_traversal.py,sha256=F_QBFYPyVckWkxjU-_Jos3WKSSUqaSOg1TbsU27PUPE,2535
|
|
18
16
|
common/traversal/start_point_selector.py,sha256=saW8G3X3bEiXmZ_aTSCtb8hbquzae4qcaicyvsQ_mVw,4608
|
|
19
|
-
common/traversal/visitors/__init__.py,sha256=
|
|
20
|
-
common/traversal/visitors/angle_visitor.py,sha256=
|
|
17
|
+
common/traversal/visitors/__init__.py,sha256=TR7G8-IlF_ytfvqBSKVfOND3cQb1-wiMIMR100duYus,472
|
|
18
|
+
common/traversal/visitors/angle_visitor.py,sha256=ieqQyIHfsej8JMijM9jJrZtEUhF-MDnHHzXcW7nymP0,6920
|
|
21
19
|
common/traversal/visitors/direction_visitor.py,sha256=Dn4oPLmhEsL6EpWNC44X2FZU2sIoWMitEiQ5nBty2Po,5944
|
|
22
|
-
common/traversal/visitors/half_plane_visitor.py,sha256=d1DUVtx4gH-dDhicTjN7lR69xvz2SXKQqaP6kzo3orM,2063
|
|
23
20
|
common/traversal/visitors/length_comparison_visitor.py,sha256=hW56siLZodjsNWEFcyg9k0fclJLzT2ArneZCZZLVx9Y,2819
|
|
24
21
|
common/traversal/visitors/quadrant_visitor.py,sha256=N8Vb91FqtRYoQ2G9cK91KAfDaC7u2KbPeQ3sDNqgvcw,3603
|
|
25
|
-
common/traversal/visitors/relative_position_visitor.py,sha256=3VfJWiuyvC9kc4C9F65LCJ7V4ecmbdz68KMahN2cOxc,7058
|
|
26
22
|
common/traversal/visitors/visitor.py,sha256=30J0GEqzgeQrOng-rjPncXlcLE16WE7sqLlJ7-8SuwU,775
|
|
27
23
|
common/traversal/visitors/visitor_result_persistence_service.py,sha256=lpr6KzZt22tjTpBnepbTMZ2gi9eLLNrojEv9ABTbvhE,760
|
|
28
|
-
natural_agi_common-0.1.
|
|
29
|
-
natural_agi_common-0.1.
|
|
30
|
-
natural_agi_common-0.1.
|
|
31
|
-
natural_agi_common-0.1.
|
|
24
|
+
natural_agi_common-0.1.38.dist-info/METADATA,sha256=nhbnISQhmXALBx6AJ_xtdiPzJAQ4kEqGKXcnb5e98QY,213
|
|
25
|
+
natural_agi_common-0.1.38.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
|
|
26
|
+
natural_agi_common-0.1.38.dist-info/top_level.txt,sha256=LOtYx8KZTmnxM_zLK4rwrcI3PRc40Ihwp5rgaQ-ceaI,7
|
|
27
|
+
natural_agi_common-0.1.38.dist-info/RECORD,,
|
common/minor_check.py
DELETED
|
@@ -1,509 +0,0 @@
|
|
|
1
|
-
import networkx as nx
|
|
2
|
-
from itertools import combinations
|
|
3
|
-
from typing import Optional
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
def is_minor(graph: nx.Graph, concept: nx.Graph) -> bool:
|
|
7
|
-
"""
|
|
8
|
-
Checks if 'concept' is a minor of 'graph' by enumerating possible node deletions
|
|
9
|
-
and then recursively contracting edges until the subgraph has the same number
|
|
10
|
-
of edges as 'concept'.
|
|
11
|
-
|
|
12
|
-
- Retains all node data in the subgraph/contracted graph.
|
|
13
|
-
- Only uses 'labels' field to compare nodes for isomorphism.
|
|
14
|
-
"""
|
|
15
|
-
|
|
16
|
-
# Quick size checks
|
|
17
|
-
if concept.number_of_nodes() > graph.number_of_nodes():
|
|
18
|
-
return False
|
|
19
|
-
if concept.number_of_edges() > graph.number_of_edges():
|
|
20
|
-
return False
|
|
21
|
-
|
|
22
|
-
# Prune early if the graph doesn't have enough labels to match concept
|
|
23
|
-
if not _check_label_feasibility(graph, concept):
|
|
24
|
-
return False
|
|
25
|
-
|
|
26
|
-
# We'll define a node_match function that ONLY checks labels overlap.
|
|
27
|
-
# We ignore everything else in node data for the match.
|
|
28
|
-
def node_match(n1_data: dict, n2_data: dict) -> bool:
|
|
29
|
-
labels1 = n1_data.get("labels", set())
|
|
30
|
-
labels2 = n2_data.get("labels", set())
|
|
31
|
-
# Two nodes match if they share at least one label
|
|
32
|
-
return bool(labels1 & labels2)
|
|
33
|
-
|
|
34
|
-
# How many nodes must we remove from 'graph'?
|
|
35
|
-
to_remove = graph.number_of_nodes() - concept.number_of_nodes()
|
|
36
|
-
|
|
37
|
-
# For each subset of nodes to delete
|
|
38
|
-
for nodes_subset in combinations(list(graph.nodes()), to_remove):
|
|
39
|
-
# Check if removing these nodes kills feasibility of labels
|
|
40
|
-
if not _delete_feasibility_check(graph, nodes_subset, concept):
|
|
41
|
-
continue
|
|
42
|
-
|
|
43
|
-
# Build a subgraph with those nodes removed
|
|
44
|
-
G_sub = graph.copy()
|
|
45
|
-
G_sub.remove_nodes_from(nodes_subset)
|
|
46
|
-
|
|
47
|
-
# We now need to contract edges so that G_sub has the same # of edges as concept
|
|
48
|
-
needed_contractions = G_sub.number_of_edges() - concept.number_of_edges()
|
|
49
|
-
|
|
50
|
-
if needed_contractions < 0:
|
|
51
|
-
# Not enough edges to match concept's connectivity
|
|
52
|
-
continue
|
|
53
|
-
if needed_contractions == 0:
|
|
54
|
-
# Just check isomorphism right away
|
|
55
|
-
if nx.is_isomorphic(G_sub, concept, node_match=node_match):
|
|
56
|
-
return True
|
|
57
|
-
continue
|
|
58
|
-
|
|
59
|
-
# Otherwise, recursively contract the necessary number of edges
|
|
60
|
-
if _contract_recursive(G_sub, needed_contractions, concept, node_match):
|
|
61
|
-
return True
|
|
62
|
-
|
|
63
|
-
return False
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
def _contract_recursive(
|
|
67
|
-
G_sub: nx.Graph, needed_contractions: int, concept: nx.Graph, node_match
|
|
68
|
-
) -> bool:
|
|
69
|
-
"""
|
|
70
|
-
Recursively contract 'needed_contractions' edges in G_sub.
|
|
71
|
-
Maintains a list of original node UUIDs that were merged into each node.
|
|
72
|
-
"""
|
|
73
|
-
# If we have contracted enough edges, check isomorphism
|
|
74
|
-
if needed_contractions == 0:
|
|
75
|
-
return nx.is_isomorphic(G_sub, concept, node_match=node_match)
|
|
76
|
-
|
|
77
|
-
edges = list(G_sub.edges())
|
|
78
|
-
if len(edges) < needed_contractions:
|
|
79
|
-
return False
|
|
80
|
-
|
|
81
|
-
# Try each edge in turn
|
|
82
|
-
for u, v in edges:
|
|
83
|
-
G_copy = G_sub.copy()
|
|
84
|
-
|
|
85
|
-
# Merge the original_nodes lists
|
|
86
|
-
u_nodes = G_copy.nodes[u].get("original_nodes", [u])
|
|
87
|
-
v_nodes = G_copy.nodes[v].get("original_nodes", [v])
|
|
88
|
-
merged_nodes = u_nodes + v_nodes
|
|
89
|
-
|
|
90
|
-
# Do the contraction
|
|
91
|
-
G_copy = nx.contracted_nodes(G_copy, u, v, self_loops=False)
|
|
92
|
-
|
|
93
|
-
# Store the list of original nodes that were merged into this node
|
|
94
|
-
G_copy.nodes[u]["original_nodes"] = merged_nodes
|
|
95
|
-
|
|
96
|
-
# Keep only labels for isomorphism checking
|
|
97
|
-
G_copy.nodes[u]["labels"] = set(G_sub.nodes[u].get("labels", set())) | set(
|
|
98
|
-
G_sub.nodes[v].get("labels", set())
|
|
99
|
-
)
|
|
100
|
-
|
|
101
|
-
# Recurse with one fewer contraction needed
|
|
102
|
-
if _contract_recursive(G_copy, needed_contractions - 1, concept, node_match):
|
|
103
|
-
return True
|
|
104
|
-
|
|
105
|
-
return False
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
def _merge_node_data(data_u: dict, data_v: dict) -> dict:
|
|
109
|
-
"""
|
|
110
|
-
Merge the data dictionaries from two nodes u and v.
|
|
111
|
-
- 'labels' fields are unioned
|
|
112
|
-
- All other fields are merged according to specific rules
|
|
113
|
-
"""
|
|
114
|
-
merged = {}
|
|
115
|
-
|
|
116
|
-
# Special handling for labels - always union them
|
|
117
|
-
labels_u = data_u.get("labels", set())
|
|
118
|
-
labels_v = data_v.get("labels", set())
|
|
119
|
-
merged["labels"] = labels_u.union(labels_v)
|
|
120
|
-
|
|
121
|
-
# Merge all other fields
|
|
122
|
-
all_keys = set(data_u.keys()) | set(data_v.keys())
|
|
123
|
-
for key in all_keys:
|
|
124
|
-
if key == "labels":
|
|
125
|
-
continue # already handled
|
|
126
|
-
|
|
127
|
-
# For numeric values, take average
|
|
128
|
-
if key in data_u and key in data_v:
|
|
129
|
-
val_u = data_u[key]
|
|
130
|
-
val_v = data_v[key]
|
|
131
|
-
if isinstance(val_u, (int, float)) and isinstance(val_v, (int, float)):
|
|
132
|
-
merged[key] = (val_u + val_v) / 2
|
|
133
|
-
else:
|
|
134
|
-
# For non-numeric values, prefer data_u's value
|
|
135
|
-
merged[key] = data_u[key]
|
|
136
|
-
elif key in data_u:
|
|
137
|
-
merged[key] = data_u[key]
|
|
138
|
-
else:
|
|
139
|
-
merged[key] = data_v[key]
|
|
140
|
-
|
|
141
|
-
return merged
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
def _check_label_feasibility(graph: nx.Graph, concept: nx.Graph) -> bool:
|
|
145
|
-
"""
|
|
146
|
-
Quick check: If concept requires more occurrences of any label
|
|
147
|
-
than graph can provide, skip immediately.
|
|
148
|
-
"""
|
|
149
|
-
from collections import Counter
|
|
150
|
-
|
|
151
|
-
graph_labels = Counter()
|
|
152
|
-
concept_labels = Counter()
|
|
153
|
-
|
|
154
|
-
# Count labels in the graph
|
|
155
|
-
for _, data in graph.nodes(data=True):
|
|
156
|
-
lbls = data.get("labels", set())
|
|
157
|
-
graph_labels.update(lbls)
|
|
158
|
-
|
|
159
|
-
# Count labels in the concept
|
|
160
|
-
for _, data in concept.nodes(data=True):
|
|
161
|
-
lbls = data.get("labels", set())
|
|
162
|
-
concept_labels.update(lbls)
|
|
163
|
-
|
|
164
|
-
# If concept needs more of any label than the graph has, return False
|
|
165
|
-
for label, needed_count in concept_labels.items():
|
|
166
|
-
if graph_labels[label] < needed_count:
|
|
167
|
-
return False
|
|
168
|
-
|
|
169
|
-
return True
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
def _delete_feasibility_check(graph: nx.Graph, nodes_subset, concept: nx.Graph) -> bool:
|
|
173
|
-
"""
|
|
174
|
-
If removing 'nodes_subset' from 'graph' eliminates critical labels
|
|
175
|
-
needed to match 'concept', return False immediately.
|
|
176
|
-
"""
|
|
177
|
-
from collections import Counter
|
|
178
|
-
|
|
179
|
-
to_delete_set = set(nodes_subset)
|
|
180
|
-
remaining_labels = Counter()
|
|
181
|
-
|
|
182
|
-
for node in graph.nodes():
|
|
183
|
-
if node not in to_delete_set:
|
|
184
|
-
node_labels = graph.nodes[node].get("labels", set())
|
|
185
|
-
remaining_labels.update(node_labels)
|
|
186
|
-
|
|
187
|
-
concept_labels = Counter()
|
|
188
|
-
for _, data in concept.nodes(data=True):
|
|
189
|
-
concept_labels.update(data.get("labels", set()))
|
|
190
|
-
|
|
191
|
-
for label, cnt_needed in concept_labels.items():
|
|
192
|
-
if remaining_labels[label] < cnt_needed:
|
|
193
|
-
return False
|
|
194
|
-
|
|
195
|
-
return True
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
#######################################
|
|
199
|
-
# 2) Enumerate All Minors of a Graph #
|
|
200
|
-
#######################################
|
|
201
|
-
def enumerate_minors(
|
|
202
|
-
base_graph: nx.Graph, max_nodes: int = 5, max_enumerations: int = 1000
|
|
203
|
-
):
|
|
204
|
-
"""
|
|
205
|
-
Generates minor graphs by enumerating all sub-node sets (up to max_nodes)
|
|
206
|
-
and recursively contracting edges to produce unique minors.
|
|
207
|
-
|
|
208
|
-
Args:
|
|
209
|
-
base_graph: The graph from which to enumerate minors
|
|
210
|
-
max_nodes: Maximum number of nodes in the enumerated minors
|
|
211
|
-
max_enumerations: How many minors to generate before stopping
|
|
212
|
-
(just to avoid infinite blow-ups)
|
|
213
|
-
|
|
214
|
-
Returns:
|
|
215
|
-
A set of minors (each minor is returned as a "canonical" form).
|
|
216
|
-
"""
|
|
217
|
-
all_minors = set()
|
|
218
|
-
enumerations_count = 0
|
|
219
|
-
|
|
220
|
-
graph_nodes = list(base_graph.nodes())
|
|
221
|
-
n = len(graph_nodes)
|
|
222
|
-
|
|
223
|
-
for subset_size in range(1, min(max_nodes, n) + 1):
|
|
224
|
-
for subset in combinations(graph_nodes, subset_size):
|
|
225
|
-
induced_sub = base_graph.subgraph(subset).copy()
|
|
226
|
-
|
|
227
|
-
# Now consider possible edge contractions
|
|
228
|
-
for minor_graph in _enumerate_contractions(induced_sub):
|
|
229
|
-
enumerations_count += 1
|
|
230
|
-
if enumerations_count > max_enumerations:
|
|
231
|
-
return all_minors # early exit
|
|
232
|
-
cstring = _canonical_label(minor_graph)
|
|
233
|
-
all_minors.add(cstring)
|
|
234
|
-
|
|
235
|
-
return all_minors
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
def _enumerate_contractions(graph: nx.Graph):
|
|
239
|
-
"""
|
|
240
|
-
Yields all possible contractions from the given graph.
|
|
241
|
-
We do a DFS: each edge can be contracted or not.
|
|
242
|
-
"""
|
|
243
|
-
# Yield the original graph
|
|
244
|
-
yield graph
|
|
245
|
-
|
|
246
|
-
edges = list(graph.edges())
|
|
247
|
-
for i, (u, v) in enumerate(edges):
|
|
248
|
-
G_copy = graph.copy()
|
|
249
|
-
merged_labels = set(G_copy.nodes[u].get("labels", set())) | set(
|
|
250
|
-
G_copy.nodes[v].get("labels", set())
|
|
251
|
-
)
|
|
252
|
-
nx.contracted_nodes(G_copy, u, v, self_loops=False, copy=False)
|
|
253
|
-
G_copy.nodes[u]["labels"] = merged_labels
|
|
254
|
-
|
|
255
|
-
# Recurse
|
|
256
|
-
yield from _enumerate_contractions(G_copy)
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
def _canonical_label(g: nx.Graph) -> str:
|
|
260
|
-
"""
|
|
261
|
-
Create a string that uniquely identifies a graph's structure and node labels.
|
|
262
|
-
(Naive approach—real solutions might use NAUTY/Bliss for a perfect canonical form.)
|
|
263
|
-
"""
|
|
264
|
-
labels_map = {}
|
|
265
|
-
for node in sorted(g.nodes()):
|
|
266
|
-
lbls = sorted(g.nodes[node].get("labels", []))
|
|
267
|
-
labels_map[node] = ",".join(lbls)
|
|
268
|
-
|
|
269
|
-
edges_sorted = []
|
|
270
|
-
for a, b in g.edges():
|
|
271
|
-
edges_sorted.append(tuple(sorted([a, b])))
|
|
272
|
-
edges_sorted.sort()
|
|
273
|
-
|
|
274
|
-
node_part = ";".join([f"{node}:{labels_map[node]}" for node in sorted(labels_map)])
|
|
275
|
-
edge_part = ",".join([f"({u}=>{v})" for (u, v) in edges_sorted])
|
|
276
|
-
return f"N[{node_part}] E[{edge_part}]"
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
############################################
|
|
280
|
-
# 3) Rebuild Graph from Canonical String #
|
|
281
|
-
############################################
|
|
282
|
-
def parse_cstring_to_graph(cstr: str) -> nx.Graph:
|
|
283
|
-
"""
|
|
284
|
-
Parse the canonical string back into a networkx.Graph with 'labels' attributes.
|
|
285
|
-
Format: N[node_entries]E[edge_entries] where:
|
|
286
|
-
- node_entries are semicolon-separated: node_info:labels
|
|
287
|
-
- edge_entries are comma-separated: (node1=>node2)
|
|
288
|
-
|
|
289
|
-
Example:
|
|
290
|
-
N[4:Point,EndPoint;5:Point,Vector]E[(4=>5),(5=>6)]
|
|
291
|
-
"""
|
|
292
|
-
import re
|
|
293
|
-
|
|
294
|
-
g = nx.Graph()
|
|
295
|
-
|
|
296
|
-
match_nodes = re.search(r"N\[(.*?)\]", cstr)
|
|
297
|
-
match_edges = re.search(r"E\[(.*?)\]", cstr)
|
|
298
|
-
if not match_nodes:
|
|
299
|
-
return g
|
|
300
|
-
|
|
301
|
-
# Parse nodes
|
|
302
|
-
nodes_part = match_nodes.group(1)
|
|
303
|
-
node_entries = nodes_part.split(";") if nodes_part else []
|
|
304
|
-
|
|
305
|
-
for node_entry in node_entries:
|
|
306
|
-
if not node_entry:
|
|
307
|
-
continue
|
|
308
|
-
|
|
309
|
-
# Split by the last colon to separate labels from node info
|
|
310
|
-
parts = node_entry.rsplit(":", 1)
|
|
311
|
-
if len(parts) != 2:
|
|
312
|
-
continue
|
|
313
|
-
|
|
314
|
-
node_info, lbl_str = parts
|
|
315
|
-
labels = set(lbl_str.split(",")) if lbl_str else set()
|
|
316
|
-
g.add_node(node_info, labels=labels)
|
|
317
|
-
|
|
318
|
-
# Parse edges
|
|
319
|
-
if match_edges:
|
|
320
|
-
edges_part = match_edges.group(1)
|
|
321
|
-
# Split on closing parenthesis and remove empty strings
|
|
322
|
-
edge_entries = [e.strip() for e in edges_part.split(")") if e.strip()]
|
|
323
|
-
|
|
324
|
-
for e in edge_entries:
|
|
325
|
-
# Clean up the edge entry
|
|
326
|
-
e = e.replace("(", "").strip()
|
|
327
|
-
if "=>" not in e:
|
|
328
|
-
continue
|
|
329
|
-
|
|
330
|
-
u, v = e.split("=>")
|
|
331
|
-
u, v = u.strip(), v.strip() # Remove any whitespace
|
|
332
|
-
if u in g.nodes and v in g.nodes:
|
|
333
|
-
g.add_edge(u, v)
|
|
334
|
-
|
|
335
|
-
return g
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
############################################
|
|
339
|
-
# 4) find_common_minors_in_dataset (Revised)
|
|
340
|
-
############################################
|
|
341
|
-
def find_common_minors_in_dataset_return_biggest(
|
|
342
|
-
graphs: list[nx.Graph], max_nodes: int = 5
|
|
343
|
-
) -> Optional[nx.Graph]:
|
|
344
|
-
"""
|
|
345
|
-
Modified to restore properties after finding the common minor.
|
|
346
|
-
"""
|
|
347
|
-
# 1) Pick the smallest graph
|
|
348
|
-
smallest_graph = min(graphs, key=lambda g: g.number_of_nodes())
|
|
349
|
-
|
|
350
|
-
# 2) Enumerate minors (as canonical strings)
|
|
351
|
-
print("Enumerating minors from the smallest graph...")
|
|
352
|
-
candidate_minors_cstrings = enumerate_minors(
|
|
353
|
-
smallest_graph, max_nodes=max_nodes, max_enumerations=2000
|
|
354
|
-
)
|
|
355
|
-
print(
|
|
356
|
-
f"Found {len(candidate_minors_cstrings)} candidate minors (some might be duplicates)."
|
|
357
|
-
)
|
|
358
|
-
|
|
359
|
-
# 3) For each candidate, parse into a graph, and check if it's a minor of all other graphs
|
|
360
|
-
all_other_graphs = [g for g in graphs if g is not smallest_graph]
|
|
361
|
-
common_minors_as_graphs = []
|
|
362
|
-
visited_cstrings = set()
|
|
363
|
-
|
|
364
|
-
print("Checking each candidate minor against all other graphs...")
|
|
365
|
-
for cstr in candidate_minors_cstrings:
|
|
366
|
-
if cstr in visited_cstrings:
|
|
367
|
-
continue
|
|
368
|
-
visited_cstrings.add(cstr)
|
|
369
|
-
|
|
370
|
-
minor_graph = parse_cstring_to_graph(cstr)
|
|
371
|
-
|
|
372
|
-
# Check if it's a minor of all other graphs
|
|
373
|
-
is_common = True
|
|
374
|
-
for g in all_other_graphs:
|
|
375
|
-
if not is_minor(g, minor_graph):
|
|
376
|
-
is_common = False
|
|
377
|
-
break
|
|
378
|
-
|
|
379
|
-
if is_common:
|
|
380
|
-
common_minors_as_graphs.append(minor_graph)
|
|
381
|
-
|
|
382
|
-
# 4) Return the 'largest' common minor if any exist
|
|
383
|
-
if common_minors_as_graphs:
|
|
384
|
-
# Sort descending by (#nodes, #edges)
|
|
385
|
-
common_minors_as_graphs.sort(
|
|
386
|
-
key=lambda mg: (mg.number_of_nodes(), mg.number_of_edges()), reverse=True
|
|
387
|
-
)
|
|
388
|
-
biggest_minor = common_minors_as_graphs[0]
|
|
389
|
-
# Restore properties from original graphs
|
|
390
|
-
return restore_node_properties(biggest_minor, graphs)
|
|
391
|
-
else:
|
|
392
|
-
return None
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
def restore_node_properties(
|
|
396
|
-
minor_graph: nx.Graph, source_graphs: list[nx.Graph]
|
|
397
|
-
) -> nx.Graph:
|
|
398
|
-
"""
|
|
399
|
-
Restores original node properties from source graphs based on stored original_nodes.
|
|
400
|
-
For merged nodes, properties are averaged or combined according to type.
|
|
401
|
-
"""
|
|
402
|
-
result = minor_graph.copy()
|
|
403
|
-
|
|
404
|
-
for node in result.nodes():
|
|
405
|
-
original_nodes = result.nodes[node].get("original_nodes", [node])
|
|
406
|
-
merged_data = {}
|
|
407
|
-
|
|
408
|
-
# Collect all properties from original nodes across all source graphs
|
|
409
|
-
for source_graph in source_graphs:
|
|
410
|
-
for orig_node in original_nodes:
|
|
411
|
-
if orig_node in source_graph:
|
|
412
|
-
node_data = source_graph.nodes[orig_node]
|
|
413
|
-
for key, value in node_data.items():
|
|
414
|
-
if key == "original_nodes":
|
|
415
|
-
continue
|
|
416
|
-
if key not in merged_data:
|
|
417
|
-
merged_data[key] = []
|
|
418
|
-
merged_data[key].append(value)
|
|
419
|
-
|
|
420
|
-
# Merge collected properties
|
|
421
|
-
final_data = {}
|
|
422
|
-
for key, values in merged_data.items():
|
|
423
|
-
if not values:
|
|
424
|
-
continue
|
|
425
|
-
|
|
426
|
-
if key == "labels":
|
|
427
|
-
# Union all label sets
|
|
428
|
-
final_data[key] = set().union(
|
|
429
|
-
*[v if isinstance(v, set) else {v} for v in values]
|
|
430
|
-
)
|
|
431
|
-
elif all(isinstance(v, (int, float)) for v in values):
|
|
432
|
-
# Average numeric values
|
|
433
|
-
final_data[key] = sum(values) / len(values)
|
|
434
|
-
else:
|
|
435
|
-
# For other types, take the most common value
|
|
436
|
-
final_data[key] = max(set(values), key=values.count)
|
|
437
|
-
|
|
438
|
-
# Update node data
|
|
439
|
-
result.nodes[node].update(final_data)
|
|
440
|
-
|
|
441
|
-
return result
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
#######################################
|
|
445
|
-
# 5) Example Usage / Demo
|
|
446
|
-
#######################################
|
|
447
|
-
if __name__ == "__main__":
|
|
448
|
-
import time
|
|
449
|
-
|
|
450
|
-
# Graph 1: L-shape with additional properties
|
|
451
|
-
G1 = nx.Graph()
|
|
452
|
-
G1.add_node("p1", labels={"EndPoint", "Point"}, x=0, y=0, weight=1.0)
|
|
453
|
-
G1.add_node("p2", labels={"CornerPoint", "Point"}, x=0, y=1, weight=2.0)
|
|
454
|
-
G1.add_node("p3", labels={"EndPoint", "Point"}, x=0, y=2, weight=3.0)
|
|
455
|
-
G1.add_node("p4", labels={"EndPoint", "Point"}, x=1, y=1, weight=4.0)
|
|
456
|
-
G1.add_node("v1", labels={"VerticalVector", "Vector"}, length=1.0)
|
|
457
|
-
G1.add_node("v2", labels={"VerticalVector", "Vector"}, length=1.0)
|
|
458
|
-
G1.add_node("v3", labels={"HorizontalVector", "Vector"}, length=1.0)
|
|
459
|
-
G1.add_edge("p1", "v1")
|
|
460
|
-
G1.add_edge("v1", "p2")
|
|
461
|
-
G1.add_edge("p2", "v2")
|
|
462
|
-
G1.add_edge("v2", "p3")
|
|
463
|
-
G1.add_edge("p2", "v3")
|
|
464
|
-
G1.add_edge("v3", "p4")
|
|
465
|
-
|
|
466
|
-
# Graph 2: a smaller shape with properties
|
|
467
|
-
G2 = nx.Graph()
|
|
468
|
-
G2.add_node("p1", labels={"EndPoint", "Point"}, x=5, y=5, weight=10.0)
|
|
469
|
-
G2.add_node("p2", labels={"EndPoint", "Point"}, x=5, y=6, weight=20.0)
|
|
470
|
-
G2.add_node("v1", labels={"VerticalVector", "Vector"}, length=1.0)
|
|
471
|
-
G2.add_edge("p1", "v1")
|
|
472
|
-
G2.add_edge("v1", "p2")
|
|
473
|
-
|
|
474
|
-
# Graph 3: Just a single line segment with a corner and properties
|
|
475
|
-
G3 = nx.Graph()
|
|
476
|
-
G3.add_node("p1", labels={"CornerPoint", "Point"}, x=10, y=10, weight=100.0)
|
|
477
|
-
G3.add_node("p2", labels={"EndPoint", "Point"}, x=10, y=11, weight=200.0)
|
|
478
|
-
G3.add_node("v1", labels={"VerticalVector", "Vector"}, length=1.0)
|
|
479
|
-
G3.add_edge("p1", "v1")
|
|
480
|
-
G3.add_edge("v1", "p2")
|
|
481
|
-
|
|
482
|
-
dataset = [G1, G2, G3]
|
|
483
|
-
|
|
484
|
-
start = time.time()
|
|
485
|
-
common_minors_graph = find_common_minors_in_dataset_return_biggest(
|
|
486
|
-
dataset, max_nodes=3
|
|
487
|
-
)
|
|
488
|
-
elapsed = time.time() - start
|
|
489
|
-
|
|
490
|
-
print("\n=== Testing Property Retention ===")
|
|
491
|
-
print("\nCommon Minor Found:")
|
|
492
|
-
print("Nodes with all properties:")
|
|
493
|
-
for node, data in common_minors_graph.nodes(data=True):
|
|
494
|
-
print(f"\nNode {node}:")
|
|
495
|
-
for key, value in data.items():
|
|
496
|
-
print(f" {key}: {value}")
|
|
497
|
-
|
|
498
|
-
print("\nEdges:", common_minors_graph.edges())
|
|
499
|
-
print(f"\nExecution time: {elapsed:.4f} seconds")
|
|
500
|
-
|
|
501
|
-
# Verify that properties were merged correctly
|
|
502
|
-
print("\nVerifying property merging:")
|
|
503
|
-
for node, data in common_minors_graph.nodes(data=True):
|
|
504
|
-
if "weight" in data:
|
|
505
|
-
print(f"Node {node} has weight: {data['weight']}")
|
|
506
|
-
if "x" in data and "y" in data:
|
|
507
|
-
print(f"Node {node} has coordinates: ({data['x']}, {data['y']})")
|
|
508
|
-
if "length" in data:
|
|
509
|
-
print(f"Node {node} has length: {data['length']}")
|
common/models.py
DELETED
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
from enum import Enum
|
|
2
|
-
from pydantic import BaseModel
|
|
3
|
-
|
|
4
|
-
class DLQModel(BaseModel):
|
|
5
|
-
source: str
|
|
6
|
-
error: dict
|
|
7
|
-
value: dict
|
|
8
|
-
|
|
9
|
-
class ContourType(Enum):
|
|
10
|
-
CLOSED = "Closed"
|
|
11
|
-
OPEN = "Open"
|
|
12
|
-
|
|
13
|
-
class ContourDevelopment(Enum):
|
|
14
|
-
MONOTONIC = "Monotonic"
|
|
15
|
-
NON_MONOTONIC = "Non-monotonic"
|
|
16
|
-
UNKNOWN = "Unknown"
|
|
@@ -1,64 +0,0 @@
|
|
|
1
|
-
from typing import Any, Dict
|
|
2
|
-
|
|
3
|
-
from neo4j import ManagedTransaction
|
|
4
|
-
from ...model.half_plane import HalfPlane
|
|
5
|
-
from .visitor import Visitor
|
|
6
|
-
from ...model.point import Point
|
|
7
|
-
from ...model.vector import Vector
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class HalfPlaneVisitor(Visitor):
|
|
11
|
-
def __init__(self, graph):
|
|
12
|
-
super().__init__(graph)
|
|
13
|
-
self.half_planes: Dict[str, HalfPlane] = {}
|
|
14
|
-
|
|
15
|
-
def visit_point(self, _: Point) -> None:
|
|
16
|
-
# This visitor does not handle points
|
|
17
|
-
return None
|
|
18
|
-
|
|
19
|
-
def visit_line(self, line: Vector) -> Dict[str, Any]:
|
|
20
|
-
dx = line.x2 - line.x1
|
|
21
|
-
dy = line.y2 - line.y1
|
|
22
|
-
half_plane = self.determine_half_plane(dx, dy)
|
|
23
|
-
self.half_planes[line.id] = half_plane
|
|
24
|
-
self.graph.nodes[line.id]["half_plane"] = half_plane.value
|
|
25
|
-
|
|
26
|
-
def save_result(
|
|
27
|
-
self,
|
|
28
|
-
tx: ManagedTransaction,
|
|
29
|
-
image_id: str,
|
|
30
|
-
session_id: str,
|
|
31
|
-
result: Dict[str, Any],
|
|
32
|
-
) -> None:
|
|
33
|
-
query = """
|
|
34
|
-
MATCH (v:Vector {id: $id})
|
|
35
|
-
MERGE (hp:HalfPlane:Feature {value: $half_plane, session_id: $session_id})
|
|
36
|
-
ON CREATE SET hp.samples = [$image_id], v.half_plane = $half_plane
|
|
37
|
-
ON MATCH SET hp.samples = CASE
|
|
38
|
-
WHEN NOT $image_id IN hp.samples THEN hp.samples + $image_id
|
|
39
|
-
ELSE hp.samples
|
|
40
|
-
END, v.half_plane = $half_plane
|
|
41
|
-
MERGE (v)-[:IS_IN_HALF_PLANE]->(hp)
|
|
42
|
-
"""
|
|
43
|
-
tx.run(
|
|
44
|
-
query,
|
|
45
|
-
id=result["line_id"],
|
|
46
|
-
half_plane=result["half_plane"],
|
|
47
|
-
session_id=session_id,
|
|
48
|
-
image_id=image_id,
|
|
49
|
-
)
|
|
50
|
-
|
|
51
|
-
def get_results(self) -> Dict[str, str]:
|
|
52
|
-
return {k: v.value for k, v in self.half_planes.items()}
|
|
53
|
-
|
|
54
|
-
def reset(self) -> None:
|
|
55
|
-
self.half_planes.clear()
|
|
56
|
-
|
|
57
|
-
@staticmethod
|
|
58
|
-
def determine_half_plane(dx: float, dy: float) -> HalfPlane:
|
|
59
|
-
if dx == 0 and dy == 0:
|
|
60
|
-
return HalfPlane.ORIGIN
|
|
61
|
-
elif abs(dy) > abs(dx):
|
|
62
|
-
return HalfPlane.UPPER if dy > 0 else HalfPlane.LOWER
|
|
63
|
-
else:
|
|
64
|
-
return HalfPlane.RIGHT if dx > 0 else HalfPlane.LEFT
|
|
@@ -1,209 +0,0 @@
|
|
|
1
|
-
from dataclasses import dataclass
|
|
2
|
-
from enum import Enum
|
|
3
|
-
from typing import Dict, Any, List
|
|
4
|
-
import math
|
|
5
|
-
import networkx as nx
|
|
6
|
-
from neo4j import ManagedTransaction
|
|
7
|
-
|
|
8
|
-
from .visitor import Visitor
|
|
9
|
-
from ...model.point import Point
|
|
10
|
-
from ...model.vector import Vector
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
class RelativeSegment(Enum):
|
|
14
|
-
TOP = "top"
|
|
15
|
-
BOTTOM = "bottom"
|
|
16
|
-
LEFT = "left"
|
|
17
|
-
RIGHT = "right"
|
|
18
|
-
CENTER_VERTICAL = "center_vertical"
|
|
19
|
-
CENTER_HORIZONTAL = "center_horizontal"
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
@dataclass
|
|
23
|
-
class RelativePosition:
|
|
24
|
-
distance_from_center: float
|
|
25
|
-
segments: List[RelativeSegment]
|
|
26
|
-
normalized_x: float # -1 to 1
|
|
27
|
-
normalized_y: float # -1 to 1
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
class RelativePositionVisitor(Visitor):
|
|
31
|
-
def __init__(self, graph: nx.Graph):
|
|
32
|
-
super().__init__(graph)
|
|
33
|
-
|
|
34
|
-
# Find the bounding box and calculate center
|
|
35
|
-
self._calculate_bounding_box_center()
|
|
36
|
-
|
|
37
|
-
self.segment_threshold = 0.05 # 20% threshold for center segments
|
|
38
|
-
self.point_positions: Dict[str, RelativePosition] = {}
|
|
39
|
-
self.vector_positions: Dict[str, RelativePosition] = {}
|
|
40
|
-
|
|
41
|
-
def _calculate_bounding_box_center(self) -> None:
|
|
42
|
-
"""Calculate the center based on the bounding box of all points in the graph."""
|
|
43
|
-
min_x = float("inf")
|
|
44
|
-
min_y = float("inf")
|
|
45
|
-
max_x = float("-inf")
|
|
46
|
-
max_y = float("-inf")
|
|
47
|
-
|
|
48
|
-
# Find min and max coordinates to determine the bounding box
|
|
49
|
-
for node_id, node_data in self.graph.nodes(data=True):
|
|
50
|
-
if "Point" not in node_data["labels"]:
|
|
51
|
-
continue
|
|
52
|
-
|
|
53
|
-
x = node_data["x"]
|
|
54
|
-
y = node_data["y"]
|
|
55
|
-
min_x = min(min_x, x)
|
|
56
|
-
min_y = min(min_y, y)
|
|
57
|
-
max_x = max(max_x, x)
|
|
58
|
-
max_y = max(max_y, y)
|
|
59
|
-
|
|
60
|
-
# Calculate center of the bounding box
|
|
61
|
-
self.center_x = (min_x + max_x) / 2
|
|
62
|
-
self.center_y = (min_y + max_y) / 2
|
|
63
|
-
|
|
64
|
-
# Calculate half-width and half-height of the bounding box
|
|
65
|
-
half_width = (max_x - min_x) / 2
|
|
66
|
-
half_height = (max_y - min_y) / 2
|
|
67
|
-
|
|
68
|
-
# Maximum distance is from center to the corner of the bounding box
|
|
69
|
-
self.max_distance = math.sqrt(half_width**2 + half_height**2)
|
|
70
|
-
|
|
71
|
-
def visit_point(self, point: Point) -> Dict[str, Any]:
|
|
72
|
-
position = self._calculate_relative_position(point.x, point.y)
|
|
73
|
-
self.point_positions[point.id] = position
|
|
74
|
-
node = self.graph.nodes[point.id]
|
|
75
|
-
node["relative_distance"] = position.distance_from_center
|
|
76
|
-
node["normalized_x"] = position.normalized_x
|
|
77
|
-
node["normalized_y"] = position.normalized_y
|
|
78
|
-
node["segments"] = [seg.value for seg in position.segments]
|
|
79
|
-
|
|
80
|
-
return {
|
|
81
|
-
"point_id": point.id,
|
|
82
|
-
"distance": position.distance_from_center,
|
|
83
|
-
"segments": [seg.value for seg in position.segments],
|
|
84
|
-
"normalized_x": position.normalized_x,
|
|
85
|
-
"normalized_y": position.normalized_y,
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
def visit_line(self, line: Vector) -> Dict[str, Any]:
|
|
89
|
-
# Calculate midpoint of the line
|
|
90
|
-
mid_x = (line.x1 + line.x2) / 2
|
|
91
|
-
mid_y = (line.y1 + line.y2) / 2
|
|
92
|
-
|
|
93
|
-
position = self._calculate_relative_position(mid_x, mid_y)
|
|
94
|
-
self.vector_positions[line.id] = position
|
|
95
|
-
node = self.graph.nodes[line.id]
|
|
96
|
-
node["relative_distance"] = position.distance_from_center
|
|
97
|
-
node["normalized_x"] = position.normalized_x
|
|
98
|
-
node["normalized_y"] = position.normalized_y
|
|
99
|
-
node["segments"] = [seg.value for seg in position.segments]
|
|
100
|
-
|
|
101
|
-
return {
|
|
102
|
-
"line_id": line.id,
|
|
103
|
-
"distance": position.distance_from_center,
|
|
104
|
-
"segments": [seg.value for seg in position.segments],
|
|
105
|
-
"normalized_x": position.normalized_x,
|
|
106
|
-
"normalized_y": position.normalized_y,
|
|
107
|
-
}
|
|
108
|
-
|
|
109
|
-
def _calculate_relative_position(self, x: float, y: float) -> RelativePosition:
|
|
110
|
-
# Calculate normalized coordinates (-1 to 1) relative to bounding box center
|
|
111
|
-
normalized_x = (x - self.center_x) / (
|
|
112
|
-
self.max_distance if self.max_distance > 0 else 1
|
|
113
|
-
)
|
|
114
|
-
normalized_y = (y - self.center_y) / (
|
|
115
|
-
self.max_distance if self.max_distance > 0 else 1
|
|
116
|
-
)
|
|
117
|
-
|
|
118
|
-
# Calculate distance from center
|
|
119
|
-
dx = x - self.center_x
|
|
120
|
-
dy = y - self.center_y
|
|
121
|
-
distance = math.sqrt(dx**2 + dy**2) / self.max_distance
|
|
122
|
-
|
|
123
|
-
# Determine segments
|
|
124
|
-
segments = []
|
|
125
|
-
|
|
126
|
-
# Vertical segments
|
|
127
|
-
if abs(normalized_y) < self.segment_threshold:
|
|
128
|
-
segments.append(RelativeSegment.CENTER_HORIZONTAL)
|
|
129
|
-
elif normalized_y < 0:
|
|
130
|
-
segments.append(RelativeSegment.TOP)
|
|
131
|
-
else:
|
|
132
|
-
segments.append(RelativeSegment.BOTTOM)
|
|
133
|
-
|
|
134
|
-
# Horizontal segments
|
|
135
|
-
if abs(normalized_x) < self.segment_threshold:
|
|
136
|
-
segments.append(RelativeSegment.CENTER_VERTICAL)
|
|
137
|
-
elif normalized_x < 0:
|
|
138
|
-
segments.append(RelativeSegment.LEFT)
|
|
139
|
-
else:
|
|
140
|
-
segments.append(RelativeSegment.RIGHT)
|
|
141
|
-
|
|
142
|
-
return RelativePosition(
|
|
143
|
-
distance_from_center=round(distance, 2),
|
|
144
|
-
segments=segments,
|
|
145
|
-
normalized_x=round(normalized_x, 2),
|
|
146
|
-
normalized_y=round(normalized_y, 2),
|
|
147
|
-
)
|
|
148
|
-
|
|
149
|
-
def save_result(
|
|
150
|
-
self,
|
|
151
|
-
tx: ManagedTransaction,
|
|
152
|
-
image_id: str,
|
|
153
|
-
session_id: str,
|
|
154
|
-
result: Dict[str, Any],
|
|
155
|
-
) -> None:
|
|
156
|
-
if "point_id" in result:
|
|
157
|
-
self._save_point_position(tx, result)
|
|
158
|
-
elif "line_id" in result:
|
|
159
|
-
self._save_line_position(tx, result)
|
|
160
|
-
|
|
161
|
-
def _save_point_position(
|
|
162
|
-
self,
|
|
163
|
-
tx: ManagedTransaction,
|
|
164
|
-
result: Dict[str, Any],
|
|
165
|
-
) -> None:
|
|
166
|
-
query = """
|
|
167
|
-
MATCH (p:Point {id: $point_id})
|
|
168
|
-
SET p.relative_distance = $distance,
|
|
169
|
-
p.normalized_x = $normalized_x,
|
|
170
|
-
p.normalized_y = $normalized_y,
|
|
171
|
-
p.segments = $segments
|
|
172
|
-
RETURN p
|
|
173
|
-
"""
|
|
174
|
-
tx.run(
|
|
175
|
-
query,
|
|
176
|
-
point_id=result["point_id"],
|
|
177
|
-
distance=result["distance"],
|
|
178
|
-
normalized_x=result["normalized_x"],
|
|
179
|
-
normalized_y=result["normalized_y"],
|
|
180
|
-
segments=result["segments"],
|
|
181
|
-
)
|
|
182
|
-
|
|
183
|
-
def _save_line_position(
|
|
184
|
-
self,
|
|
185
|
-
tx: ManagedTransaction,
|
|
186
|
-
result: Dict[str, Any],
|
|
187
|
-
) -> None:
|
|
188
|
-
query = """
|
|
189
|
-
MATCH (v:Vector {id: $line_id})
|
|
190
|
-
SET v.relative_distance = $distance,
|
|
191
|
-
v.normalized_x = $normalized_x,
|
|
192
|
-
v.normalized_y = $normalized_y,
|
|
193
|
-
v.segments = $segments
|
|
194
|
-
"""
|
|
195
|
-
tx.run(
|
|
196
|
-
query,
|
|
197
|
-
line_id=result["line_id"],
|
|
198
|
-
distance=result["distance"],
|
|
199
|
-
normalized_x=result["normalized_x"],
|
|
200
|
-
normalized_y=result["normalized_y"],
|
|
201
|
-
segments=result["segments"],
|
|
202
|
-
)
|
|
203
|
-
|
|
204
|
-
def get_results(self) -> Dict[str, Dict[str, RelativePosition]]:
|
|
205
|
-
return {"points": self.point_positions, "vectors": self.vector_positions}
|
|
206
|
-
|
|
207
|
-
def reset(self) -> None:
|
|
208
|
-
self.point_positions.clear()
|
|
209
|
-
self.vector_positions.clear()
|
|
File without changes
|