robosystems-client 0.2.1__py3-none-any.whl → 0.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of robosystems-client might be problematic. Click here for more details.
- robosystems_client/api/query/execute_cypher_query.py +5 -0
- robosystems_client/extensions/__init__.py +11 -0
- robosystems_client/extensions/extensions.py +3 -0
- robosystems_client/extensions/graph_client.py +321 -0
- robosystems_client/extensions/query_client.py +74 -1
- {robosystems_client-0.2.1.dist-info → robosystems_client-0.2.2.dist-info}/METADATA +1 -1
- {robosystems_client-0.2.1.dist-info → robosystems_client-0.2.2.dist-info}/RECORD +9 -8
- {robosystems_client-0.2.1.dist-info → robosystems_client-0.2.2.dist-info}/WHEEL +0 -0
- {robosystems_client-0.2.1.dist-info → robosystems_client-0.2.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -67,6 +67,11 @@ def _parse_response(
|
|
|
67
67
|
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
|
|
68
68
|
) -> Optional[Union[Any, HTTPValidationError]]:
|
|
69
69
|
if response.status_code == 200:
|
|
70
|
+
# Check if this is NDJSON - if so, skip parsing (will be handled by client)
|
|
71
|
+
content_type = response.headers.get("content-type", "")
|
|
72
|
+
stream_format = response.headers.get("x-stream-format", "")
|
|
73
|
+
if "application/x-ndjson" in content_type or stream_format == "ndjson":
|
|
74
|
+
return None # Skip parsing, client will handle NDJSON
|
|
70
75
|
response_200 = response.json()
|
|
71
76
|
return response_200
|
|
72
77
|
|
|
@@ -27,6 +27,12 @@ from .table_ingest_client import (
|
|
|
27
27
|
UploadResult,
|
|
28
28
|
TableInfo,
|
|
29
29
|
)
|
|
30
|
+
from .graph_client import (
|
|
31
|
+
GraphClient,
|
|
32
|
+
GraphMetadata,
|
|
33
|
+
InitialEntityData,
|
|
34
|
+
GraphInfo,
|
|
35
|
+
)
|
|
30
36
|
from .extensions import (
|
|
31
37
|
RoboSystemsExtensions,
|
|
32
38
|
RoboSystemsExtensionConfig,
|
|
@@ -122,6 +128,11 @@ __all__ = [
|
|
|
122
128
|
"IngestOptions",
|
|
123
129
|
"UploadResult",
|
|
124
130
|
"TableInfo",
|
|
131
|
+
# Graph Client
|
|
132
|
+
"GraphClient",
|
|
133
|
+
"GraphMetadata",
|
|
134
|
+
"InitialEntityData",
|
|
135
|
+
"GraphInfo",
|
|
125
136
|
# Utilities
|
|
126
137
|
"QueryBuilder",
|
|
127
138
|
"ResultProcessor",
|
|
@@ -9,6 +9,7 @@ from typing import Dict, Any, Optional, Callable
|
|
|
9
9
|
from .query_client import QueryClient
|
|
10
10
|
from .operation_client import OperationClient
|
|
11
11
|
from .table_ingest_client import TableIngestClient
|
|
12
|
+
from .graph_client import GraphClient
|
|
12
13
|
from .sse_client import SSEClient
|
|
13
14
|
|
|
14
15
|
|
|
@@ -59,6 +60,7 @@ class RoboSystemsExtensions:
|
|
|
59
60
|
self.query = QueryClient(self.config)
|
|
60
61
|
self.operations = OperationClient(self.config)
|
|
61
62
|
self.tables = TableIngestClient(self.config)
|
|
63
|
+
self.graphs = GraphClient(self.config)
|
|
62
64
|
|
|
63
65
|
def monitor_operation(
|
|
64
66
|
self, operation_id: str, on_progress: Optional[Callable] = None
|
|
@@ -88,6 +90,7 @@ class RoboSystemsExtensions:
|
|
|
88
90
|
self.query.close()
|
|
89
91
|
self.operations.close_all()
|
|
90
92
|
self.tables.close()
|
|
93
|
+
self.graphs.close()
|
|
91
94
|
|
|
92
95
|
# Convenience methods that delegate to the appropriate clients
|
|
93
96
|
def execute_query(self, graph_id: str, query: str, parameters: Dict[str, Any] = None):
|
|
@@ -0,0 +1,321 @@
|
|
|
1
|
+
"""Graph Management Client
|
|
2
|
+
|
|
3
|
+
Provides high-level graph management operations with automatic operation monitoring.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from typing import Dict, Any, Optional, Callable
|
|
8
|
+
import time
|
|
9
|
+
import logging
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class GraphMetadata:
|
|
16
|
+
"""Graph metadata for creation"""
|
|
17
|
+
|
|
18
|
+
graph_name: str
|
|
19
|
+
description: Optional[str] = None
|
|
20
|
+
schema_extensions: Optional[list] = None
|
|
21
|
+
tags: Optional[list] = None
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass
|
|
25
|
+
class InitialEntityData:
|
|
26
|
+
"""Initial entity data for graph creation"""
|
|
27
|
+
|
|
28
|
+
name: str
|
|
29
|
+
uri: str
|
|
30
|
+
category: Optional[str] = None
|
|
31
|
+
sic: Optional[str] = None
|
|
32
|
+
sic_description: Optional[str] = None
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@dataclass
|
|
36
|
+
class GraphInfo:
|
|
37
|
+
"""Information about a graph"""
|
|
38
|
+
|
|
39
|
+
graph_id: str
|
|
40
|
+
graph_name: str
|
|
41
|
+
description: Optional[str] = None
|
|
42
|
+
schema_extensions: Optional[list] = None
|
|
43
|
+
tags: Optional[list] = None
|
|
44
|
+
created_at: Optional[str] = None
|
|
45
|
+
status: Optional[str] = None
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class GraphClient:
|
|
49
|
+
"""Client for graph management operations"""
|
|
50
|
+
|
|
51
|
+
def __init__(self, config: Dict[str, Any]):
|
|
52
|
+
self.config = config
|
|
53
|
+
self.base_url = config["base_url"]
|
|
54
|
+
self.headers = config.get("headers", {})
|
|
55
|
+
self.token = config.get("token")
|
|
56
|
+
|
|
57
|
+
def create_graph_and_wait(
|
|
58
|
+
self,
|
|
59
|
+
metadata: GraphMetadata,
|
|
60
|
+
initial_entity: Optional[InitialEntityData] = None,
|
|
61
|
+
timeout: int = 60,
|
|
62
|
+
poll_interval: int = 2,
|
|
63
|
+
on_progress: Optional[Callable[[str], None]] = None,
|
|
64
|
+
) -> str:
|
|
65
|
+
"""
|
|
66
|
+
Create a graph and wait for completion.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
metadata: Graph metadata
|
|
70
|
+
initial_entity: Optional initial entity data
|
|
71
|
+
timeout: Maximum time to wait in seconds
|
|
72
|
+
poll_interval: Time between status checks in seconds
|
|
73
|
+
on_progress: Callback for progress updates
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
graph_id when creation completes
|
|
77
|
+
|
|
78
|
+
Raises:
|
|
79
|
+
Exception: If creation fails or times out
|
|
80
|
+
"""
|
|
81
|
+
from ..client import AuthenticatedClient
|
|
82
|
+
from ..api.graphs.create_graph import sync_detailed as create_graph
|
|
83
|
+
from ..api.operations.get_operation_status import sync_detailed as get_status
|
|
84
|
+
from ..models.create_graph_request import CreateGraphRequest
|
|
85
|
+
from ..models.graph_metadata import GraphMetadata as APIGraphMetadata
|
|
86
|
+
|
|
87
|
+
if not self.token:
|
|
88
|
+
raise ValueError("No API key provided. Set X-API-Key in headers.")
|
|
89
|
+
|
|
90
|
+
client = AuthenticatedClient(
|
|
91
|
+
base_url=self.base_url,
|
|
92
|
+
token=self.token,
|
|
93
|
+
prefix="",
|
|
94
|
+
auth_header_name="X-API-Key",
|
|
95
|
+
headers=self.headers,
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
# Build API metadata
|
|
99
|
+
api_metadata = APIGraphMetadata(
|
|
100
|
+
graph_name=metadata.graph_name,
|
|
101
|
+
description=metadata.description,
|
|
102
|
+
schema_extensions=metadata.schema_extensions or [],
|
|
103
|
+
tags=metadata.tags or [],
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
# Build initial entity if provided
|
|
107
|
+
initial_entity_dict = None
|
|
108
|
+
if initial_entity:
|
|
109
|
+
initial_entity_dict = {
|
|
110
|
+
"name": initial_entity.name,
|
|
111
|
+
"uri": initial_entity.uri,
|
|
112
|
+
}
|
|
113
|
+
if initial_entity.category:
|
|
114
|
+
initial_entity_dict["category"] = initial_entity.category
|
|
115
|
+
if initial_entity.sic:
|
|
116
|
+
initial_entity_dict["sic"] = initial_entity.sic
|
|
117
|
+
if initial_entity.sic_description:
|
|
118
|
+
initial_entity_dict["sic_description"] = initial_entity.sic_description
|
|
119
|
+
|
|
120
|
+
# Create graph request
|
|
121
|
+
graph_create = CreateGraphRequest(
|
|
122
|
+
metadata=api_metadata,
|
|
123
|
+
initial_entity=initial_entity_dict,
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
if on_progress:
|
|
127
|
+
on_progress(f"Creating graph: {metadata.graph_name}")
|
|
128
|
+
|
|
129
|
+
# Execute create request
|
|
130
|
+
response = create_graph(client=client, body=graph_create)
|
|
131
|
+
|
|
132
|
+
if not response.parsed:
|
|
133
|
+
raise RuntimeError(f"Failed to create graph: {response.status_code}")
|
|
134
|
+
|
|
135
|
+
# Extract graph_id or operation_id
|
|
136
|
+
if isinstance(response.parsed, dict):
|
|
137
|
+
graph_id = response.parsed.get("graph_id")
|
|
138
|
+
operation_id = response.parsed.get("operation_id")
|
|
139
|
+
else:
|
|
140
|
+
graph_id = getattr(response.parsed, "graph_id", None)
|
|
141
|
+
operation_id = getattr(response.parsed, "operation_id", None)
|
|
142
|
+
|
|
143
|
+
# If graph_id returned immediately, we're done
|
|
144
|
+
if graph_id:
|
|
145
|
+
if on_progress:
|
|
146
|
+
on_progress(f"Graph created: {graph_id}")
|
|
147
|
+
return graph_id
|
|
148
|
+
|
|
149
|
+
# Otherwise, poll operation until complete
|
|
150
|
+
if not operation_id:
|
|
151
|
+
raise RuntimeError("No graph_id or operation_id in response")
|
|
152
|
+
|
|
153
|
+
if on_progress:
|
|
154
|
+
on_progress(f"Graph creation queued (operation: {operation_id})")
|
|
155
|
+
|
|
156
|
+
max_attempts = timeout // poll_interval
|
|
157
|
+
for attempt in range(max_attempts):
|
|
158
|
+
time.sleep(poll_interval)
|
|
159
|
+
|
|
160
|
+
status_response = get_status(operation_id=operation_id, client=client)
|
|
161
|
+
|
|
162
|
+
if not status_response.parsed:
|
|
163
|
+
continue
|
|
164
|
+
|
|
165
|
+
# Handle both dict and object responses
|
|
166
|
+
status_data = status_response.parsed
|
|
167
|
+
if isinstance(status_data, dict):
|
|
168
|
+
status = status_data.get("status")
|
|
169
|
+
else:
|
|
170
|
+
# Check for additional_properties first (common in generated clients)
|
|
171
|
+
if hasattr(status_data, "additional_properties"):
|
|
172
|
+
status = status_data.additional_properties.get("status")
|
|
173
|
+
else:
|
|
174
|
+
status = getattr(status_data, "status", None)
|
|
175
|
+
|
|
176
|
+
if on_progress:
|
|
177
|
+
on_progress(f"Status: {status} (attempt {attempt + 1}/{max_attempts})")
|
|
178
|
+
|
|
179
|
+
if status == "completed":
|
|
180
|
+
# Extract graph_id from result
|
|
181
|
+
if isinstance(status_data, dict):
|
|
182
|
+
result = status_data.get("result", {})
|
|
183
|
+
elif hasattr(status_data, "additional_properties"):
|
|
184
|
+
result = status_data.additional_properties.get("result", {})
|
|
185
|
+
else:
|
|
186
|
+
result = getattr(status_data, "result", {})
|
|
187
|
+
|
|
188
|
+
if isinstance(result, dict):
|
|
189
|
+
graph_id = result.get("graph_id")
|
|
190
|
+
else:
|
|
191
|
+
graph_id = getattr(result, "graph_id", None)
|
|
192
|
+
|
|
193
|
+
if graph_id:
|
|
194
|
+
if on_progress:
|
|
195
|
+
on_progress(f"Graph created: {graph_id}")
|
|
196
|
+
return graph_id
|
|
197
|
+
else:
|
|
198
|
+
raise RuntimeError("Operation completed but no graph_id in result")
|
|
199
|
+
|
|
200
|
+
elif status == "failed":
|
|
201
|
+
# Extract error message
|
|
202
|
+
if isinstance(status_data, dict):
|
|
203
|
+
error = (
|
|
204
|
+
status_data.get("error") or status_data.get("message") or "Unknown error"
|
|
205
|
+
)
|
|
206
|
+
elif hasattr(status_data, "additional_properties"):
|
|
207
|
+
props = status_data.additional_properties
|
|
208
|
+
error = props.get("error") or props.get("message") or "Unknown error"
|
|
209
|
+
else:
|
|
210
|
+
error = getattr(status_data, "message", "Unknown error")
|
|
211
|
+
raise RuntimeError(f"Graph creation failed: {error}")
|
|
212
|
+
|
|
213
|
+
raise TimeoutError(f"Graph creation timed out after {timeout}s")
|
|
214
|
+
|
|
215
|
+
def get_graph_info(self, graph_id: str) -> GraphInfo:
|
|
216
|
+
"""
|
|
217
|
+
Get information about a graph.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
graph_id: The graph ID
|
|
221
|
+
|
|
222
|
+
Returns:
|
|
223
|
+
GraphInfo with graph details
|
|
224
|
+
|
|
225
|
+
Raises:
|
|
226
|
+
ValueError: If graph not found
|
|
227
|
+
"""
|
|
228
|
+
from ..client import AuthenticatedClient
|
|
229
|
+
from ..api.graphs.get_graphs import sync_detailed as get_graphs
|
|
230
|
+
|
|
231
|
+
if not self.token:
|
|
232
|
+
raise ValueError("No API key provided. Set X-API-Key in headers.")
|
|
233
|
+
|
|
234
|
+
client = AuthenticatedClient(
|
|
235
|
+
base_url=self.base_url,
|
|
236
|
+
token=self.token,
|
|
237
|
+
prefix="",
|
|
238
|
+
auth_header_name="X-API-Key",
|
|
239
|
+
headers=self.headers,
|
|
240
|
+
)
|
|
241
|
+
|
|
242
|
+
# Use get_graphs and filter for the specific graph
|
|
243
|
+
response = get_graphs(client=client)
|
|
244
|
+
|
|
245
|
+
if not response.parsed:
|
|
246
|
+
raise RuntimeError(f"Failed to get graphs: {response.status_code}")
|
|
247
|
+
|
|
248
|
+
data = response.parsed
|
|
249
|
+
graphs = None
|
|
250
|
+
|
|
251
|
+
# Extract graphs list from response
|
|
252
|
+
if isinstance(data, dict):
|
|
253
|
+
graphs = data.get("graphs", [])
|
|
254
|
+
elif hasattr(data, "additional_properties"):
|
|
255
|
+
graphs = data.additional_properties.get("graphs", [])
|
|
256
|
+
elif hasattr(data, "graphs"):
|
|
257
|
+
graphs = data.graphs
|
|
258
|
+
else:
|
|
259
|
+
raise RuntimeError("Unexpected response format from get_graphs")
|
|
260
|
+
|
|
261
|
+
# Find the specific graph by ID
|
|
262
|
+
graph_data = None
|
|
263
|
+
for graph in graphs:
|
|
264
|
+
if isinstance(graph, dict):
|
|
265
|
+
if graph.get("graph_id") == graph_id or graph.get("id") == graph_id:
|
|
266
|
+
graph_data = graph
|
|
267
|
+
break
|
|
268
|
+
elif hasattr(graph, "graph_id"):
|
|
269
|
+
if graph.graph_id == graph_id or getattr(graph, "id", None) == graph_id:
|
|
270
|
+
graph_data = graph
|
|
271
|
+
break
|
|
272
|
+
|
|
273
|
+
if not graph_data:
|
|
274
|
+
raise ValueError(f"Graph not found: {graph_id}")
|
|
275
|
+
|
|
276
|
+
# Build GraphInfo from the found graph
|
|
277
|
+
if isinstance(graph_data, dict):
|
|
278
|
+
return GraphInfo(
|
|
279
|
+
graph_id=graph_data.get("graph_id") or graph_data.get("id", graph_id),
|
|
280
|
+
graph_name=graph_data.get("graph_name") or graph_data.get("name", ""),
|
|
281
|
+
description=graph_data.get("description"),
|
|
282
|
+
schema_extensions=graph_data.get("schema_extensions"),
|
|
283
|
+
tags=graph_data.get("tags"),
|
|
284
|
+
created_at=graph_data.get("created_at"),
|
|
285
|
+
status=graph_data.get("status"),
|
|
286
|
+
)
|
|
287
|
+
else:
|
|
288
|
+
return GraphInfo(
|
|
289
|
+
graph_id=getattr(graph_data, "graph_id", None)
|
|
290
|
+
or getattr(graph_data, "id", graph_id),
|
|
291
|
+
graph_name=getattr(graph_data, "graph_name", None)
|
|
292
|
+
or getattr(graph_data, "name", ""),
|
|
293
|
+
description=getattr(graph_data, "description", None),
|
|
294
|
+
schema_extensions=getattr(graph_data, "schema_extensions", None),
|
|
295
|
+
tags=getattr(graph_data, "tags", None),
|
|
296
|
+
created_at=getattr(graph_data, "created_at", None),
|
|
297
|
+
status=getattr(graph_data, "status", None),
|
|
298
|
+
)
|
|
299
|
+
|
|
300
|
+
def delete_graph(self, graph_id: str) -> None:
|
|
301
|
+
"""
|
|
302
|
+
Delete a graph.
|
|
303
|
+
|
|
304
|
+
Note: This method is not yet available as the delete_graph endpoint
|
|
305
|
+
is not included in the generated SDK. This will be implemented when
|
|
306
|
+
the endpoint is added to the API specification.
|
|
307
|
+
|
|
308
|
+
Args:
|
|
309
|
+
graph_id: The graph ID to delete
|
|
310
|
+
|
|
311
|
+
Raises:
|
|
312
|
+
NotImplementedError: This feature is not yet available
|
|
313
|
+
"""
|
|
314
|
+
raise NotImplementedError(
|
|
315
|
+
"Graph deletion is not yet available. "
|
|
316
|
+
"The delete_graph endpoint needs to be added to the API specification."
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
def close(self):
|
|
320
|
+
"""Clean up resources (placeholder for consistency)"""
|
|
321
|
+
pass
|
|
@@ -113,9 +113,30 @@ class QueryClient:
|
|
|
113
113
|
)
|
|
114
114
|
|
|
115
115
|
try:
|
|
116
|
-
kwargs = {
|
|
116
|
+
kwargs = {
|
|
117
|
+
"graph_id": graph_id,
|
|
118
|
+
"client": client,
|
|
119
|
+
"body": query_request,
|
|
120
|
+
"mode": options.mode if options.mode else None,
|
|
121
|
+
"chunk_size": options.chunk_size if options.chunk_size else 1000,
|
|
122
|
+
"test_mode": options.test_mode if options.test_mode else False,
|
|
123
|
+
}
|
|
117
124
|
response = execute_cypher_query(**kwargs)
|
|
118
125
|
|
|
126
|
+
# Check if this is an NDJSON streaming response (parsed will be None for NDJSON)
|
|
127
|
+
if (
|
|
128
|
+
hasattr(response, "headers")
|
|
129
|
+
and (
|
|
130
|
+
"application/x-ndjson" in response.headers.get("content-type", "")
|
|
131
|
+
or response.headers.get("x-stream-format") == "ndjson"
|
|
132
|
+
)
|
|
133
|
+
) or (
|
|
134
|
+
hasattr(response, "parsed")
|
|
135
|
+
and response.parsed is None
|
|
136
|
+
and response.status_code == 200
|
|
137
|
+
):
|
|
138
|
+
return self._parse_ndjson_response(response, graph_id)
|
|
139
|
+
|
|
119
140
|
# Check response type and handle accordingly
|
|
120
141
|
if hasattr(response, "parsed") and response.parsed:
|
|
121
142
|
response_data = response.parsed
|
|
@@ -187,6 +208,58 @@ class QueryClient:
|
|
|
187
208
|
# Unexpected response format
|
|
188
209
|
raise Exception("Unexpected response format from query endpoint")
|
|
189
210
|
|
|
211
|
+
def _parse_ndjson_response(self, response, graph_id: str) -> QueryResult:
|
|
212
|
+
"""Parse NDJSON streaming response and aggregate into QueryResult"""
|
|
213
|
+
import json
|
|
214
|
+
|
|
215
|
+
all_data = []
|
|
216
|
+
columns = None
|
|
217
|
+
total_rows = 0
|
|
218
|
+
execution_time_ms = 0
|
|
219
|
+
|
|
220
|
+
# Parse NDJSON line by line
|
|
221
|
+
content = (
|
|
222
|
+
response.content.decode("utf-8")
|
|
223
|
+
if isinstance(response.content, bytes)
|
|
224
|
+
else response.content
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
for line in content.strip().split("\n"):
|
|
228
|
+
if not line.strip():
|
|
229
|
+
continue
|
|
230
|
+
|
|
231
|
+
try:
|
|
232
|
+
chunk = json.loads(line)
|
|
233
|
+
|
|
234
|
+
# Extract columns from first chunk
|
|
235
|
+
if columns is None and "columns" in chunk:
|
|
236
|
+
columns = chunk["columns"]
|
|
237
|
+
|
|
238
|
+
# Aggregate data rows (NDJSON uses "rows", regular JSON uses "data")
|
|
239
|
+
if "rows" in chunk:
|
|
240
|
+
all_data.extend(chunk["rows"])
|
|
241
|
+
total_rows += len(chunk["rows"])
|
|
242
|
+
elif "data" in chunk:
|
|
243
|
+
all_data.extend(chunk["data"])
|
|
244
|
+
total_rows += len(chunk["data"])
|
|
245
|
+
|
|
246
|
+
# Track execution time (use max from all chunks)
|
|
247
|
+
if "execution_time_ms" in chunk:
|
|
248
|
+
execution_time_ms = max(execution_time_ms, chunk["execution_time_ms"])
|
|
249
|
+
|
|
250
|
+
except json.JSONDecodeError as e:
|
|
251
|
+
raise Exception(f"Failed to parse NDJSON line: {e}")
|
|
252
|
+
|
|
253
|
+
# Return aggregated result
|
|
254
|
+
return QueryResult(
|
|
255
|
+
data=all_data,
|
|
256
|
+
columns=columns or [],
|
|
257
|
+
row_count=total_rows,
|
|
258
|
+
execution_time_ms=execution_time_ms,
|
|
259
|
+
graph_id=graph_id,
|
|
260
|
+
timestamp=datetime.now().isoformat(),
|
|
261
|
+
)
|
|
262
|
+
|
|
190
263
|
def _stream_query_results(
|
|
191
264
|
self, operation_id: str, options: QueryOptions
|
|
192
265
|
) -> Iterator[Any]:
|
|
@@ -79,7 +79,7 @@ robosystems_client/api/operations/cancel_operation.py,sha256=3IiKfMfSANQS_inSg9a
|
|
|
79
79
|
robosystems_client/api/operations/get_operation_status.py,sha256=r77igqYIVW9x34tlNFiWrueXGYzfTrNJ3Bsb-qb7urI,8513
|
|
80
80
|
robosystems_client/api/operations/stream_operation_events.py,sha256=1pl-BNZSaNynIKwaT03U8WXEbL_B5KOPSZuEb1vS4CM,14042
|
|
81
81
|
robosystems_client/api/query/__init__.py,sha256=5vd9uJWAjRqa9xzxzYkLD1yoZ12Ld_bAaNB5WX4fbE8,56
|
|
82
|
-
robosystems_client/api/query/execute_cypher_query.py,sha256=
|
|
82
|
+
robosystems_client/api/query/execute_cypher_query.py,sha256=FgXjAjj9_sXFxpNuflQ9mmKVSJzdQr509AlsEdp41hk,18410
|
|
83
83
|
robosystems_client/api/schema/__init__.py,sha256=5vd9uJWAjRqa9xzxzYkLD1yoZ12Ld_bAaNB5WX4fbE8,56
|
|
84
84
|
robosystems_client/api/schema/export_graph_schema.py,sha256=rcx9UzQHTaTQCUEUI2HIqJ5fj_n8L096-8awrt4qfa8,7589
|
|
85
85
|
robosystems_client/api/schema/get_graph_schema.py,sha256=a86EAFzysQdjowI4eJF25MeRVicc3IEQSZi-3Dl16jA,8503
|
|
@@ -128,12 +128,13 @@ robosystems_client/api/user_subscriptions/get_user_shared_subscriptions.py,sha25
|
|
|
128
128
|
robosystems_client/api/user_subscriptions/subscribe_to_shared_repository.py,sha256=GBr_AahYctNKUVg5O-wqju3sdL-DYFZLPtYnjfR9c5c,6606
|
|
129
129
|
robosystems_client/api/user_subscriptions/upgrade_shared_repository_subscription.py,sha256=-kfP0zkXo9-OojY8-EvxK-hFiJBCdR-MV4puqCcHtrA,6742
|
|
130
130
|
robosystems_client/extensions/README.md,sha256=qfHFjdgA_J-zNXziNZE6M1MKJiwVkocBi01w_HhvzEk,16136
|
|
131
|
-
robosystems_client/extensions/__init__.py,sha256=
|
|
131
|
+
robosystems_client/extensions/__init__.py,sha256=BH2VcEDBL8-HcT-nmwmEPUveRxoWVTEnTog6X3oJZAY,4993
|
|
132
132
|
robosystems_client/extensions/auth_integration.py,sha256=9dBuLP-dWeZecaf7hXZcJA-L6eiSCeTHIPFIUMmASkU,6610
|
|
133
133
|
robosystems_client/extensions/dataframe_utils.py,sha256=gK1bgkVqBF0TvWVdGQvqWrt-ur_Rw11j8uNtMoulLWE,12312
|
|
134
|
-
robosystems_client/extensions/extensions.py,sha256=
|
|
134
|
+
robosystems_client/extensions/extensions.py,sha256=ROnCobUek4Dke9dVx2sTzNKhz309NOG40EDSYHtNmWs,6257
|
|
135
|
+
robosystems_client/extensions/graph_client.py,sha256=GwsFwETti6xxrQbqk7C_ckxZAVhMO05DrS-qACacLfk,10012
|
|
135
136
|
robosystems_client/extensions/operation_client.py,sha256=B1qju-wWQrnrnVJixKGgsA_KEInviwJwdlJxzm_i7P0,13359
|
|
136
|
-
robosystems_client/extensions/query_client.py,sha256=
|
|
137
|
+
robosystems_client/extensions/query_client.py,sha256=NVaoTrYzSdR_pzrSwi8AsZ7WaUi7yTAcN9eJ5HFeFa8,16885
|
|
137
138
|
robosystems_client/extensions/sse_client.py,sha256=fWO6EPiLmS3LJ8iph5wup57ldtHSLBkDoiAPfBNNCyk,15048
|
|
138
139
|
robosystems_client/extensions/table_ingest_client.py,sha256=MyCnBAijFlkWY88dY4CJkouY0w5d4D3cxftw_wmSt60,12790
|
|
139
140
|
robosystems_client/extensions/token_utils.py,sha256=qCK_s1vBzRnSYwtgncPZRLJVIw3WXmzqNTWjdEEpdgs,10899
|
|
@@ -342,7 +343,7 @@ robosystems_client/models/user_usage_response_graphs.py,sha256=xAH-ZnhaUfWQ_2EpZ
|
|
|
342
343
|
robosystems_client/models/user_usage_summary_response.py,sha256=4hthwTH7bXyzdYlHoekDYOgDLI-stGRH507Bl2rUjYA,3655
|
|
343
344
|
robosystems_client/models/user_usage_summary_response_usage_vs_limits.py,sha256=XrZnRcy1nD3xtKX4svbww7QfEHrN7_XIfeL9j5ZMbyQ,1298
|
|
344
345
|
robosystems_client/models/validation_error.py,sha256=R77OuQG2nJ3WDFfY--xbEhg6x1D7gAAp_1UdnG8Ka2A,1949
|
|
345
|
-
robosystems_client-0.2.
|
|
346
|
-
robosystems_client-0.2.
|
|
347
|
-
robosystems_client-0.2.
|
|
348
|
-
robosystems_client-0.2.
|
|
346
|
+
robosystems_client-0.2.2.dist-info/METADATA,sha256=qMZLxDrWEKUYi1o1EDIr1zPQVuMIzkw61hBupaUEP-M,3959
|
|
347
|
+
robosystems_client-0.2.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
348
|
+
robosystems_client-0.2.2.dist-info/licenses/LICENSE,sha256=LjFqQPU4eQh7jAQ04SmE9eC0j74HCdXvzbo0hjW4mWo,1063
|
|
349
|
+
robosystems_client-0.2.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|