graflo 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of graflo might be problematic. Click here for more details.
- graflo/README.md +18 -0
- graflo/__init__.py +39 -0
- graflo/architecture/__init__.py +37 -0
- graflo/architecture/actor.py +974 -0
- graflo/architecture/actor_util.py +425 -0
- graflo/architecture/edge.py +295 -0
- graflo/architecture/onto.py +374 -0
- graflo/architecture/resource.py +161 -0
- graflo/architecture/schema.py +136 -0
- graflo/architecture/transform.py +292 -0
- graflo/architecture/util.py +93 -0
- graflo/architecture/vertex.py +277 -0
- graflo/caster.py +409 -0
- graflo/cli/__init__.py +14 -0
- graflo/cli/ingest.py +144 -0
- graflo/cli/manage_dbs.py +193 -0
- graflo/cli/plot_schema.py +132 -0
- graflo/cli/xml2json.py +93 -0
- graflo/db/__init__.py +32 -0
- graflo/db/arango/__init__.py +16 -0
- graflo/db/arango/conn.py +734 -0
- graflo/db/arango/query.py +180 -0
- graflo/db/arango/util.py +88 -0
- graflo/db/connection.py +304 -0
- graflo/db/manager.py +104 -0
- graflo/db/neo4j/__init__.py +16 -0
- graflo/db/neo4j/conn.py +432 -0
- graflo/db/util.py +49 -0
- graflo/filter/__init__.py +21 -0
- graflo/filter/onto.py +400 -0
- graflo/logging.conf +22 -0
- graflo/onto.py +186 -0
- graflo/plot/__init__.py +17 -0
- graflo/plot/plotter.py +556 -0
- graflo/util/__init__.py +23 -0
- graflo/util/chunker.py +739 -0
- graflo/util/merge.py +148 -0
- graflo/util/misc.py +37 -0
- graflo/util/onto.py +63 -0
- graflo/util/transform.py +406 -0
- graflo-1.1.0.dist-info/METADATA +157 -0
- graflo-1.1.0.dist-info/RECORD +45 -0
- graflo-1.1.0.dist-info/WHEEL +4 -0
- graflo-1.1.0.dist-info/entry_points.txt +5 -0
- graflo-1.1.0.dist-info/licenses/LICENSE +126 -0
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
"""ArangoDB query utilities for graph operations.
|
|
2
|
+
|
|
3
|
+
This module provides utility functions for executing and profiling AQL queries
|
|
4
|
+
in ArangoDB. It includes functions for basic query execution, query profiling,
|
|
5
|
+
and field fetching operations.
|
|
6
|
+
|
|
7
|
+
Key Functions:
|
|
8
|
+
- basic_query: Execute a basic AQL query with configurable parameters
|
|
9
|
+
- profile_query: Profile query execution and save results
|
|
10
|
+
- fetch_fields_query: Generate and execute field-fetching queries
|
|
11
|
+
|
|
12
|
+
Example:
|
|
13
|
+
>>> cursor = basic_query("FOR doc IN users RETURN doc", db_name="mydb")
|
|
14
|
+
>>> profile_query("FOR doc IN users RETURN doc", nq=1, profile_times=3, fpath=".")
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
import gzip
|
|
18
|
+
import json
|
|
19
|
+
import logging
|
|
20
|
+
from os.path import join
|
|
21
|
+
|
|
22
|
+
from arango import ArangoClient
|
|
23
|
+
|
|
24
|
+
from graflo.filter.onto import Expression
|
|
25
|
+
from graflo.onto import DBFlavor
|
|
26
|
+
|
|
27
|
+
logger = logging.getLogger(__name__)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def basic_query(
|
|
31
|
+
query,
|
|
32
|
+
port=8529,
|
|
33
|
+
hostname="127.0.0.1",
|
|
34
|
+
cred_name="root",
|
|
35
|
+
cred_pass="123",
|
|
36
|
+
db_name="_system",
|
|
37
|
+
profile=False,
|
|
38
|
+
batch_size=10000,
|
|
39
|
+
bind_vars=None,
|
|
40
|
+
):
|
|
41
|
+
"""Execute a basic AQL query in ArangoDB.
|
|
42
|
+
|
|
43
|
+
This function provides a simple interface for executing AQL queries with
|
|
44
|
+
configurable connection parameters and query options.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
query: AQL query string to execute
|
|
48
|
+
port: ArangoDB server port
|
|
49
|
+
hostname: ArangoDB server hostname
|
|
50
|
+
cred_name: Database username
|
|
51
|
+
cred_pass: Database password
|
|
52
|
+
db_name: Database name
|
|
53
|
+
profile: Whether to enable query profiling
|
|
54
|
+
batch_size: Size of result batches
|
|
55
|
+
bind_vars: Query bind variables
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
Cursor: ArangoDB cursor for the query results
|
|
59
|
+
"""
|
|
60
|
+
hosts = f"http://{hostname}:{port}"
|
|
61
|
+
client = ArangoClient(hosts=hosts)
|
|
62
|
+
|
|
63
|
+
sys_db = client.db(db_name, username=cred_name, password=cred_pass)
|
|
64
|
+
cursor = sys_db.aql.execute(
|
|
65
|
+
query,
|
|
66
|
+
profile=profile,
|
|
67
|
+
stream=True,
|
|
68
|
+
batch_size=batch_size,
|
|
69
|
+
bind_vars=bind_vars,
|
|
70
|
+
)
|
|
71
|
+
return cursor
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def profile_query(query, nq, profile_times, fpath, limit=None, **kwargs):
|
|
75
|
+
"""Profile AQL query execution and save results.
|
|
76
|
+
|
|
77
|
+
This function executes a query multiple times with profiling enabled and
|
|
78
|
+
saves both the profiling results and query results to files.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
query: AQL query string to profile
|
|
82
|
+
nq: Query number for file naming
|
|
83
|
+
profile_times: Number of times to profile the query
|
|
84
|
+
fpath: Path to save results
|
|
85
|
+
limit: Optional limit on query results
|
|
86
|
+
**kwargs: Additional query parameters passed to basic_query
|
|
87
|
+
|
|
88
|
+
Note:
|
|
89
|
+
Results are saved in two formats:
|
|
90
|
+
- Profiling results: query{nq}_profile{limit}.json
|
|
91
|
+
- Query results: query{nq}_result{limit}_batch_{n}.json.gz
|
|
92
|
+
"""
|
|
93
|
+
limit_str = f"_limit_{limit}" if limit else ""
|
|
94
|
+
if profile_times:
|
|
95
|
+
logger.info(f"starting profiling: {limit}")
|
|
96
|
+
profiling = []
|
|
97
|
+
for n in range(profile_times):
|
|
98
|
+
cursor = basic_query(query, profile=True, **kwargs)
|
|
99
|
+
profiling += [cursor.profile()]
|
|
100
|
+
cursor.close()
|
|
101
|
+
with open(join(fpath, f"query{nq}_profile{limit_str}.json"), "w") as fp:
|
|
102
|
+
json.dump(profiling, fp, indent=4)
|
|
103
|
+
|
|
104
|
+
logger.info(f"starting actual query at {limit}")
|
|
105
|
+
|
|
106
|
+
cnt = 0
|
|
107
|
+
cursor = basic_query(query, **kwargs)
|
|
108
|
+
chunk = list(cursor.batch())
|
|
109
|
+
with gzip.open(
|
|
110
|
+
join(fpath, f"./query{nq}_result{limit_str}_batch_{cnt}.json.gz"),
|
|
111
|
+
"wt",
|
|
112
|
+
encoding="ascii",
|
|
113
|
+
) as fp:
|
|
114
|
+
json.dump(chunk, fp, indent=4)
|
|
115
|
+
|
|
116
|
+
while cursor.has_more():
|
|
117
|
+
cnt += 1
|
|
118
|
+
with gzip.open(
|
|
119
|
+
join(fpath, f"./query{nq}_result{limit_str}_batch_{cnt}.json.gz"),
|
|
120
|
+
"wt",
|
|
121
|
+
encoding="ascii",
|
|
122
|
+
) as fp:
|
|
123
|
+
chunk = list(cursor.fetch()["batch"])
|
|
124
|
+
json.dump(chunk, fp, indent=4)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def fetch_fields_query(
|
|
128
|
+
collection_name,
|
|
129
|
+
docs,
|
|
130
|
+
match_keys,
|
|
131
|
+
keep_keys,
|
|
132
|
+
filters: list | dict | None = None,
|
|
133
|
+
):
|
|
134
|
+
"""Generate and execute a field-fetching AQL query.
|
|
135
|
+
|
|
136
|
+
This function generates an AQL query to fetch specific fields from documents
|
|
137
|
+
that match the given criteria. It supports filtering and field projection.
|
|
138
|
+
|
|
139
|
+
Args:
|
|
140
|
+
collection_name: Collection to query
|
|
141
|
+
docs: List of documents to match against
|
|
142
|
+
match_keys: Keys to use for matching documents
|
|
143
|
+
keep_keys: Keys to return in the result
|
|
144
|
+
filters: Additional query filters
|
|
145
|
+
|
|
146
|
+
Returns:
|
|
147
|
+
str: Generated AQL query string
|
|
148
|
+
|
|
149
|
+
Example:
|
|
150
|
+
>>> query = fetch_fields_query(
|
|
151
|
+
... "users",
|
|
152
|
+
... [{"email": "user@example.com"}],
|
|
153
|
+
... ["email"],
|
|
154
|
+
... ["name", "age"]
|
|
155
|
+
... )
|
|
156
|
+
"""
|
|
157
|
+
docs_ = [{k: doc[k] for k in match_keys if k in doc} for doc in docs]
|
|
158
|
+
for i, doc in enumerate(docs_):
|
|
159
|
+
doc.update({"__i": i})
|
|
160
|
+
|
|
161
|
+
docs_str = json.dumps(docs_)
|
|
162
|
+
|
|
163
|
+
match_str = " &&".join([f" _cdoc['{key}'] == _doc['{key}']" for key in match_keys])
|
|
164
|
+
|
|
165
|
+
keep_clause = f"KEEP(_x, {list(keep_keys)})" if keep_keys is not None else "_x"
|
|
166
|
+
|
|
167
|
+
if filters is not None:
|
|
168
|
+
ff = Expression.from_dict(filters)
|
|
169
|
+
extrac_filter_clause = f" && {ff(doc_name='_cdoc', kind=DBFlavor.ARANGO)}"
|
|
170
|
+
else:
|
|
171
|
+
extrac_filter_clause = ""
|
|
172
|
+
|
|
173
|
+
q0 = f"""
|
|
174
|
+
FOR _cdoc in {collection_name}
|
|
175
|
+
FOR _doc in {docs_str}
|
|
176
|
+
FILTER {match_str} {extrac_filter_clause}
|
|
177
|
+
COLLECT i = _doc['__i'] into _group = _cdoc
|
|
178
|
+
LET gp = (for _x in _group return {keep_clause})
|
|
179
|
+
RETURN {{'__i' : i, '_group': gp}}"""
|
|
180
|
+
return q0
|
graflo/db/arango/util.py
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
"""ArangoDB utility functions for graph operations.
|
|
2
|
+
|
|
3
|
+
This module provides utility functions for working with ArangoDB graphs and
|
|
4
|
+
queries. It includes functions for edge definition, filter rendering, and
|
|
5
|
+
query generation.
|
|
6
|
+
|
|
7
|
+
Key Functions:
|
|
8
|
+
- define_extra_edges: Generate queries for creating derived edges
|
|
9
|
+
- render_filters: Convert filter expressions to AQL filter clauses
|
|
10
|
+
|
|
11
|
+
Example:
|
|
12
|
+
>>> query = define_extra_edges(edge_config)
|
|
13
|
+
>>> filter_clause = render_filters({"field": "value"}, doc_name="d")
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
import logging
|
|
17
|
+
|
|
18
|
+
from graflo.architecture.edge import Edge
|
|
19
|
+
from graflo.filter.onto import Clause, Expression
|
|
20
|
+
from graflo.onto import DBFlavor
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def define_extra_edges(g: Edge):
|
|
26
|
+
"""Generate AQL query for creating derived edges.
|
|
27
|
+
|
|
28
|
+
This function creates a query to generate edges from source to target
|
|
29
|
+
vertices through an intermediate vertex, copying properties from the
|
|
30
|
+
intermediate vertex to the new edge.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
g: Edge configuration containing source, target, and intermediate
|
|
34
|
+
vertex information
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
str: AQL query string for creating the derived edges
|
|
38
|
+
|
|
39
|
+
Example:
|
|
40
|
+
>>> edge = Edge(source="user", target="post", by="comment")
|
|
41
|
+
>>> query = define_extra_edges(edge)
|
|
42
|
+
>>> # Generates query to create user->post edges through comments
|
|
43
|
+
"""
|
|
44
|
+
ucol, vcol, wcol = g.source, g.target, g.by
|
|
45
|
+
weight = g.weight_dict
|
|
46
|
+
s = f"""FOR w IN {wcol}
|
|
47
|
+
LET uset = (FOR u IN 1..1 INBOUND w {ucol}_{wcol}_edges RETURN u)
|
|
48
|
+
LET vset = (FOR v IN 1..1 INBOUND w {vcol}_{wcol}_edges RETURN v)
|
|
49
|
+
FOR u in uset
|
|
50
|
+
FOR v in vset
|
|
51
|
+
"""
|
|
52
|
+
s_ins_ = ", ".join([f"{v}: w.{k}" for k, v in weight.items()])
|
|
53
|
+
s_ins_ = f"_from: u._id, _to: v._id, {s_ins_}"
|
|
54
|
+
s_ins = f" INSERT {{{s_ins_}}} "
|
|
55
|
+
s_last = f"IN {ucol}_{vcol}_edges"
|
|
56
|
+
query0 = s + s_ins + s_last
|
|
57
|
+
return query0
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def render_filters(filters: None | list | dict | Clause = None, doc_name="d") -> str:
|
|
61
|
+
"""Convert filter expressions to AQL filter clauses.
|
|
62
|
+
|
|
63
|
+
This function converts filter expressions into AQL filter clauses that
|
|
64
|
+
can be used in queries. It supports various filter types and formats.
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
filters: Filter expression to convert
|
|
68
|
+
doc_name: Name of the document variable in the query
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
str: AQL filter clause string
|
|
72
|
+
|
|
73
|
+
Example:
|
|
74
|
+
>>> filters = {"field": "value", "age": {"$gt": 18}}
|
|
75
|
+
>>> clause = render_filters(filters, doc_name="user")
|
|
76
|
+
>>> # Returns: "FILTER user.field == 'value' && user.age > 18"
|
|
77
|
+
"""
|
|
78
|
+
if filters is not None:
|
|
79
|
+
if not isinstance(filters, Clause):
|
|
80
|
+
ff = Expression.from_dict(filters)
|
|
81
|
+
else:
|
|
82
|
+
ff = filters
|
|
83
|
+
literal_condition = ff(doc_name=doc_name, kind=DBFlavor.ARANGO)
|
|
84
|
+
filter_clause = f"FILTER {literal_condition}"
|
|
85
|
+
else:
|
|
86
|
+
filter_clause = ""
|
|
87
|
+
|
|
88
|
+
return filter_clause
|
graflo/db/connection.py
ADDED
|
@@ -0,0 +1,304 @@
|
|
|
1
|
+
"""Abstract database connection interface for graph databases.
|
|
2
|
+
|
|
3
|
+
This module defines the abstract interface for database connections, providing
|
|
4
|
+
a unified API for different graph database implementations. It includes methods
|
|
5
|
+
for database management, collection operations, and data manipulation.
|
|
6
|
+
|
|
7
|
+
Key Components:
|
|
8
|
+
- Connection: Abstract base class for database connections
|
|
9
|
+
- ConnectionType: Type variable for connection implementations
|
|
10
|
+
|
|
11
|
+
The connection interface supports:
|
|
12
|
+
- Database creation and deletion
|
|
13
|
+
- Collection management
|
|
14
|
+
- Index definition
|
|
15
|
+
- Document operations (insert, update, fetch)
|
|
16
|
+
- Edge operations
|
|
17
|
+
- Aggregation queries
|
|
18
|
+
|
|
19
|
+
Example:
|
|
20
|
+
>>> class MyConnection(Connection):
|
|
21
|
+
... def create_database(self, name: str):
|
|
22
|
+
... # Implementation
|
|
23
|
+
... def execute(self, query, **kwargs):
|
|
24
|
+
... # Implementation
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
import abc
|
|
28
|
+
import logging
|
|
29
|
+
from typing import TypeVar
|
|
30
|
+
|
|
31
|
+
from graflo.architecture.edge import Edge
|
|
32
|
+
from graflo.architecture.schema import Schema
|
|
33
|
+
from graflo.architecture.vertex import VertexConfig
|
|
34
|
+
from graflo.onto import AggregationType
|
|
35
|
+
|
|
36
|
+
logger = logging.getLogger(__name__)
|
|
37
|
+
ConnectionType = TypeVar("ConnectionType", bound="Connection")
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class Connection(abc.ABC):
|
|
41
|
+
"""Abstract base class for database connections.
|
|
42
|
+
|
|
43
|
+
This class defines the interface that all database connection implementations
|
|
44
|
+
must follow. It provides methods for database operations, collection management,
|
|
45
|
+
and data manipulation.
|
|
46
|
+
|
|
47
|
+
Note:
|
|
48
|
+
All methods marked with @abc.abstractmethod must be implemented by
|
|
49
|
+
concrete connection classes.
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
def __init__(self):
|
|
53
|
+
"""Initialize the connection."""
|
|
54
|
+
pass
|
|
55
|
+
|
|
56
|
+
@abc.abstractmethod
|
|
57
|
+
def create_database(self, name: str):
|
|
58
|
+
"""Create a new database.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
name: Name of the database to create
|
|
62
|
+
"""
|
|
63
|
+
pass
|
|
64
|
+
|
|
65
|
+
@abc.abstractmethod
|
|
66
|
+
def delete_database(self, name: str):
|
|
67
|
+
"""Delete a database.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
name: Name of the database to delete
|
|
71
|
+
"""
|
|
72
|
+
pass
|
|
73
|
+
|
|
74
|
+
@abc.abstractmethod
|
|
75
|
+
def execute(self, query, **kwargs):
|
|
76
|
+
"""Execute a database query.
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
query: Query to execute
|
|
80
|
+
**kwargs: Additional query parameters
|
|
81
|
+
"""
|
|
82
|
+
pass
|
|
83
|
+
|
|
84
|
+
@abc.abstractmethod
|
|
85
|
+
def close(self):
|
|
86
|
+
"""Close the database connection."""
|
|
87
|
+
pass
|
|
88
|
+
|
|
89
|
+
def define_indexes(self, schema: Schema):
|
|
90
|
+
"""Define indexes for vertices and edges in the schema.
|
|
91
|
+
|
|
92
|
+
Args:
|
|
93
|
+
schema: Schema containing vertex and edge configurations
|
|
94
|
+
"""
|
|
95
|
+
self.define_vertex_indices(schema.vertex_config)
|
|
96
|
+
self.define_edge_indices(schema.edge_config.edges_list(include_aux=True))
|
|
97
|
+
|
|
98
|
+
@abc.abstractmethod
|
|
99
|
+
def define_collections(self, schema: Schema):
|
|
100
|
+
"""Define collections based on the schema.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
schema: Schema containing collection definitions
|
|
104
|
+
"""
|
|
105
|
+
pass
|
|
106
|
+
|
|
107
|
+
@abc.abstractmethod
|
|
108
|
+
def delete_collections(self, cnames=(), gnames=(), delete_all=False):
|
|
109
|
+
"""Delete collections from the database.
|
|
110
|
+
|
|
111
|
+
Args:
|
|
112
|
+
cnames: Collection names to delete
|
|
113
|
+
gnames: Graph names to delete
|
|
114
|
+
delete_all: Whether to delete all collections
|
|
115
|
+
"""
|
|
116
|
+
pass
|
|
117
|
+
|
|
118
|
+
@abc.abstractmethod
|
|
119
|
+
def init_db(self, schema: Schema, clean_start):
|
|
120
|
+
"""Initialize the database with the given schema.
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
schema: Schema to initialize the database with
|
|
124
|
+
clean_start: Whether to clean existing data
|
|
125
|
+
"""
|
|
126
|
+
pass
|
|
127
|
+
|
|
128
|
+
@abc.abstractmethod
|
|
129
|
+
def upsert_docs_batch(self, docs, class_name, match_keys, **kwargs):
|
|
130
|
+
"""Upsert a batch of documents.
|
|
131
|
+
|
|
132
|
+
Args:
|
|
133
|
+
docs: Documents to upsert
|
|
134
|
+
class_name: Name of the collection
|
|
135
|
+
match_keys: Keys to match for upsert
|
|
136
|
+
**kwargs: Additional upsert parameters
|
|
137
|
+
"""
|
|
138
|
+
pass
|
|
139
|
+
|
|
140
|
+
@abc.abstractmethod
|
|
141
|
+
def insert_edges_batch(
|
|
142
|
+
self,
|
|
143
|
+
docs_edges,
|
|
144
|
+
source_class,
|
|
145
|
+
target_class,
|
|
146
|
+
relation_name,
|
|
147
|
+
collection_name,
|
|
148
|
+
match_keys_source,
|
|
149
|
+
match_keys_target,
|
|
150
|
+
filter_uniques=True,
|
|
151
|
+
uniq_weight_fields=None,
|
|
152
|
+
uniq_weight_collections=None,
|
|
153
|
+
upsert_option=False,
|
|
154
|
+
head=None,
|
|
155
|
+
**kwargs,
|
|
156
|
+
):
|
|
157
|
+
"""Insert a batch of edges.
|
|
158
|
+
|
|
159
|
+
Args:
|
|
160
|
+
docs_edges: Edge documents to insert
|
|
161
|
+
source_class: Source vertex class
|
|
162
|
+
target_class: Target vertex class
|
|
163
|
+
relation_name: Name of the relation
|
|
164
|
+
collection_name: Name of the edge collection
|
|
165
|
+
match_keys_source: Keys to match source vertices
|
|
166
|
+
match_keys_target: Keys to match target vertices
|
|
167
|
+
filter_uniques: Whether to filter unique edges
|
|
168
|
+
uniq_weight_fields: Fields to consider for uniqueness
|
|
169
|
+
uniq_weight_collections: Collections to consider for uniqueness
|
|
170
|
+
upsert_option: Whether to upsert existing edges
|
|
171
|
+
head: Optional head document
|
|
172
|
+
**kwargs: Additional insertion parameters
|
|
173
|
+
"""
|
|
174
|
+
pass
|
|
175
|
+
|
|
176
|
+
@abc.abstractmethod
|
|
177
|
+
def insert_return_batch(self, docs, class_name):
|
|
178
|
+
"""Insert documents and return the inserted documents.
|
|
179
|
+
|
|
180
|
+
Args:
|
|
181
|
+
docs: Documents to insert
|
|
182
|
+
class_name: Name of the collection
|
|
183
|
+
|
|
184
|
+
Returns:
|
|
185
|
+
list: Inserted documents
|
|
186
|
+
"""
|
|
187
|
+
pass
|
|
188
|
+
|
|
189
|
+
@abc.abstractmethod
|
|
190
|
+
def fetch_docs(self, class_name, filters, limit, return_keys, unset_keys):
|
|
191
|
+
"""Fetch documents from a collection.
|
|
192
|
+
|
|
193
|
+
Args:
|
|
194
|
+
class_name: Name of the collection
|
|
195
|
+
filters: Query filters
|
|
196
|
+
limit: Maximum number of documents to return
|
|
197
|
+
return_keys: Keys to return
|
|
198
|
+
unset_keys: Keys to unset
|
|
199
|
+
|
|
200
|
+
Returns:
|
|
201
|
+
list: Fetched documents
|
|
202
|
+
"""
|
|
203
|
+
pass
|
|
204
|
+
|
|
205
|
+
@abc.abstractmethod
|
|
206
|
+
def fetch_present_documents(
|
|
207
|
+
self,
|
|
208
|
+
batch,
|
|
209
|
+
class_name,
|
|
210
|
+
match_keys,
|
|
211
|
+
keep_keys,
|
|
212
|
+
flatten=False,
|
|
213
|
+
filters: list | dict | None = None,
|
|
214
|
+
):
|
|
215
|
+
"""Fetch documents that exist in the database.
|
|
216
|
+
|
|
217
|
+
Args:
|
|
218
|
+
batch: Batch of documents to check
|
|
219
|
+
class_name: Name of the collection
|
|
220
|
+
match_keys: Keys to match
|
|
221
|
+
keep_keys: Keys to keep in result
|
|
222
|
+
flatten: Whether to flatten the result
|
|
223
|
+
filters: Additional query filters
|
|
224
|
+
|
|
225
|
+
Returns:
|
|
226
|
+
list: Documents that exist in the database
|
|
227
|
+
"""
|
|
228
|
+
pass
|
|
229
|
+
|
|
230
|
+
@abc.abstractmethod
|
|
231
|
+
def aggregate(
|
|
232
|
+
self,
|
|
233
|
+
class_name,
|
|
234
|
+
aggregation_function: AggregationType,
|
|
235
|
+
discriminant: str | None = None,
|
|
236
|
+
aggregated_field: str | None = None,
|
|
237
|
+
filters: list | dict | None = None,
|
|
238
|
+
):
|
|
239
|
+
"""Perform aggregation on a collection.
|
|
240
|
+
|
|
241
|
+
Args:
|
|
242
|
+
class_name: Name of the collection
|
|
243
|
+
aggregation_function: Type of aggregation to perform
|
|
244
|
+
discriminant: Field to group by
|
|
245
|
+
aggregated_field: Field to aggregate
|
|
246
|
+
filters: Query filters
|
|
247
|
+
|
|
248
|
+
Returns:
|
|
249
|
+
dict: Aggregation results
|
|
250
|
+
"""
|
|
251
|
+
pass
|
|
252
|
+
|
|
253
|
+
@abc.abstractmethod
|
|
254
|
+
def keep_absent_documents(
|
|
255
|
+
self,
|
|
256
|
+
batch,
|
|
257
|
+
class_name,
|
|
258
|
+
match_keys,
|
|
259
|
+
keep_keys,
|
|
260
|
+
filters: list | dict | None = None,
|
|
261
|
+
):
|
|
262
|
+
"""Keep documents that don't exist in the database.
|
|
263
|
+
|
|
264
|
+
Args:
|
|
265
|
+
batch: Batch of documents to check
|
|
266
|
+
class_name: Name of the collection
|
|
267
|
+
match_keys: Keys to match
|
|
268
|
+
keep_keys: Keys to keep in result
|
|
269
|
+
filters: Additional query filters
|
|
270
|
+
|
|
271
|
+
Returns:
|
|
272
|
+
list: Documents that don't exist in the database
|
|
273
|
+
"""
|
|
274
|
+
pass
|
|
275
|
+
|
|
276
|
+
@abc.abstractmethod
|
|
277
|
+
def define_vertex_indices(self, vertex_config: VertexConfig):
|
|
278
|
+
"""Define indices for vertex collections.
|
|
279
|
+
|
|
280
|
+
Args:
|
|
281
|
+
vertex_config: Vertex configuration containing index definitions
|
|
282
|
+
"""
|
|
283
|
+
pass
|
|
284
|
+
|
|
285
|
+
@abc.abstractmethod
|
|
286
|
+
def define_edge_indices(self, edges: list[Edge]):
|
|
287
|
+
"""Define indices for edge collections.
|
|
288
|
+
|
|
289
|
+
Args:
|
|
290
|
+
edges: List of edge configurations containing index definitions
|
|
291
|
+
"""
|
|
292
|
+
pass
|
|
293
|
+
|
|
294
|
+
# @abc.abstractmethod
|
|
295
|
+
# def define_vertex_collections(self, graph_config, vertex_config):
|
|
296
|
+
# pass
|
|
297
|
+
#
|
|
298
|
+
# @abc.abstractmethod
|
|
299
|
+
# def define_edge_collections(self, graph_config):
|
|
300
|
+
# pass
|
|
301
|
+
|
|
302
|
+
# @abc.abstractmethod
|
|
303
|
+
# def create_collection_if_absent(self, g, vcol, index, unique=True):
|
|
304
|
+
# pass
|
graflo/db/manager.py
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
"""Database connection manager for graph databases.
|
|
2
|
+
|
|
3
|
+
This module provides a connection manager for handling database connections
|
|
4
|
+
to different graph database implementations (ArangoDB, Neo4j). It manages
|
|
5
|
+
connection lifecycle and configuration.
|
|
6
|
+
|
|
7
|
+
Key Components:
|
|
8
|
+
- ConnectionManager: Main class for managing database connections
|
|
9
|
+
- ConnectionKind: Enum for supported database types
|
|
10
|
+
|
|
11
|
+
The manager supports:
|
|
12
|
+
- Multiple database types (ArangoDB, Neo4j)
|
|
13
|
+
- Connection configuration
|
|
14
|
+
- Context manager interface
|
|
15
|
+
- Automatic connection cleanup
|
|
16
|
+
|
|
17
|
+
Example:
|
|
18
|
+
>>> with ConnectionManager(secret_path="config.json") as conn:
|
|
19
|
+
... conn.execute("FOR doc IN collection RETURN doc")
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
from typing import Optional
|
|
23
|
+
|
|
24
|
+
from suthing import ConfigFactory, ConnectionKind, ProtoConnectionConfig
|
|
25
|
+
|
|
26
|
+
from graflo.db.arango.conn import ArangoConnection
|
|
27
|
+
from graflo.db.neo4j.conn import Neo4jConnection
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class ConnectionManager:
|
|
31
|
+
"""Manager for database connections.
|
|
32
|
+
|
|
33
|
+
This class manages database connections to different graph database
|
|
34
|
+
implementations. It provides a context manager interface for safe
|
|
35
|
+
connection handling and automatic cleanup.
|
|
36
|
+
|
|
37
|
+
Attributes:
|
|
38
|
+
conn_class_mapping: Mapping of connection types to connection classes
|
|
39
|
+
config: Connection configuration
|
|
40
|
+
working_db: Current working database name
|
|
41
|
+
conn: Active database connection
|
|
42
|
+
"""
|
|
43
|
+
|
|
44
|
+
conn_class_mapping = {
|
|
45
|
+
ConnectionKind.ARANGO: ArangoConnection,
|
|
46
|
+
ConnectionKind.NEO4J: Neo4jConnection,
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
def __init__(
|
|
50
|
+
self,
|
|
51
|
+
secret_path=None,
|
|
52
|
+
args=None,
|
|
53
|
+
connection_config: Optional[ProtoConnectionConfig] = None,
|
|
54
|
+
**kwargs,
|
|
55
|
+
):
|
|
56
|
+
"""Initialize the connection manager.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
secret_path: Path to configuration file
|
|
60
|
+
args: Command line arguments
|
|
61
|
+
connection_config: Optional connection configuration
|
|
62
|
+
**kwargs: Additional configuration parameters
|
|
63
|
+
"""
|
|
64
|
+
self.config: ProtoConnectionConfig = (
|
|
65
|
+
ConfigFactory.create_config(secret_path, args)
|
|
66
|
+
if connection_config is None
|
|
67
|
+
else connection_config
|
|
68
|
+
)
|
|
69
|
+
self.working_db = kwargs.pop("working_db", None)
|
|
70
|
+
self.conn = None
|
|
71
|
+
|
|
72
|
+
def __enter__(self):
|
|
73
|
+
"""Enter the context manager.
|
|
74
|
+
|
|
75
|
+
Creates and returns a new database connection.
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
Connection: Database connection instance
|
|
79
|
+
"""
|
|
80
|
+
cls = self.conn_class_mapping[self.config.connection_type]
|
|
81
|
+
if self.working_db is not None:
|
|
82
|
+
self.config.database = self.working_db
|
|
83
|
+
self.conn = cls(config=self.config)
|
|
84
|
+
return self.conn
|
|
85
|
+
|
|
86
|
+
def close(self):
|
|
87
|
+
"""Close the database connection.
|
|
88
|
+
|
|
89
|
+
Closes the active connection and performs any necessary cleanup.
|
|
90
|
+
"""
|
|
91
|
+
if self.conn is not None:
|
|
92
|
+
self.conn.close()
|
|
93
|
+
|
|
94
|
+
def __exit__(self, exc_type, exc_value, exc_traceback):
|
|
95
|
+
"""Exit the context manager.
|
|
96
|
+
|
|
97
|
+
Ensures the connection is properly closed when exiting the context.
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
exc_type: Exception type if an exception occurred
|
|
101
|
+
exc_value: Exception value if an exception occurred
|
|
102
|
+
exc_traceback: Exception traceback if an exception occurred
|
|
103
|
+
"""
|
|
104
|
+
self.close()
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"""Neo4j database implementation.
|
|
2
|
+
|
|
3
|
+
This package provides Neo4j-specific implementations of the database interface,
|
|
4
|
+
including connection management, query execution, and utility functions.
|
|
5
|
+
|
|
6
|
+
Key Components:
|
|
7
|
+
- Neo4jConnection: Neo4j connection implementation
|
|
8
|
+
- Query: Cypher query execution and profiling
|
|
9
|
+
- Util: Neo4j-specific utility functions
|
|
10
|
+
|
|
11
|
+
Example:
|
|
12
|
+
>>> from graflo.db.neo4j import Neo4jConnection
|
|
13
|
+
>>> conn = Neo4jConnection(config)
|
|
14
|
+
>>> result = conn.execute("MATCH (n:User) RETURN n")
|
|
15
|
+
>>> nodes = result.data()
|
|
16
|
+
"""
|