robosystems-client 0.2.1__py3-none-any.whl → 0.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of robosystems-client might be problematic. Click here for more details.

Files changed (36) hide show
  1. robosystems_client/api/tables/delete_file.py +437 -0
  2. robosystems_client/api/tables/get_file_info.py +397 -0
  3. robosystems_client/api/tables/get_upload_url.py +548 -0
  4. robosystems_client/api/tables/ingest_tables.py +616 -0
  5. robosystems_client/api/tables/list_table_files.py +509 -0
  6. robosystems_client/api/tables/list_tables.py +488 -0
  7. robosystems_client/api/tables/query_tables.py +487 -0
  8. robosystems_client/api/tables/update_file_status.py +539 -0
  9. robosystems_client/extensions/__init__.py +11 -0
  10. robosystems_client/extensions/extensions.py +3 -0
  11. robosystems_client/extensions/graph_client.py +326 -0
  12. robosystems_client/extensions/query_client.py +74 -1
  13. robosystems_client/extensions/table_ingest_client.py +31 -40
  14. robosystems_client/models/__init__.py +13 -17
  15. robosystems_client/models/create_graph_request.py +11 -0
  16. robosystems_client/models/{delete_file_v1_graphs_graph_id_tables_files_file_id_delete_response_delete_file_v1_graphs_graph_id_tables_files_file_id_delete.py → delete_file_response.py} +45 -9
  17. robosystems_client/models/file_info.py +169 -0
  18. robosystems_client/models/file_status_update.py +41 -0
  19. robosystems_client/models/get_file_info_response.py +205 -0
  20. robosystems_client/models/list_table_files_response.py +105 -0
  21. robosystems_client/models/{get_file_info_v1_graphs_graph_id_tables_files_file_id_get_response_get_file_info_v1_graphs_graph_id_tables_files_file_id_get.py → update_file_status_response_updatefilestatus.py} +5 -8
  22. {robosystems_client-0.2.1.dist-info → robosystems_client-0.2.3.dist-info}/METADATA +1 -1
  23. {robosystems_client-0.2.1.dist-info → robosystems_client-0.2.3.dist-info}/RECORD +25 -23
  24. robosystems_client/api/tables/delete_file_v1_graphs_graph_id_tables_files_file_id_delete.py +0 -287
  25. robosystems_client/api/tables/get_file_info_v1_graphs_graph_id_tables_files_file_id_get.py +0 -283
  26. robosystems_client/api/tables/get_upload_url_v1_graphs_graph_id_tables_table_name_files_post.py +0 -260
  27. robosystems_client/api/tables/ingest_tables_v1_graphs_graph_id_tables_ingest_post.py +0 -251
  28. robosystems_client/api/tables/list_table_files_v1_graphs_graph_id_tables_table_name_files_get.py +0 -283
  29. robosystems_client/api/tables/list_tables_v1_graphs_graph_id_tables_get.py +0 -224
  30. robosystems_client/api/tables/query_tables_v1_graphs_graph_id_tables_query_post.py +0 -247
  31. robosystems_client/api/tables/update_file_v1_graphs_graph_id_tables_files_file_id_patch.py +0 -306
  32. robosystems_client/models/file_update_request.py +0 -62
  33. robosystems_client/models/list_table_files_v1_graphs_graph_id_tables_table_name_files_get_response_list_table_files_v1_graphs_graph_id_tables_table_name_files_get.py +0 -47
  34. robosystems_client/models/update_file_v1_graphs_graph_id_tables_files_file_id_patch_response_update_file_v1_graphs_graph_id_tables_files_file_id_patch.py +0 -47
  35. {robosystems_client-0.2.1.dist-info → robosystems_client-0.2.3.dist-info}/WHEEL +0 -0
  36. {robosystems_client-0.2.1.dist-info → robosystems_client-0.2.3.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,326 @@
1
+ """Graph Management Client
2
+
3
+ Provides high-level graph management operations with automatic operation monitoring.
4
+ """
5
+
6
+ from dataclasses import dataclass
7
+ from typing import Dict, Any, Optional, Callable
8
+ import time
9
+ import logging
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ @dataclass
15
+ class GraphMetadata:
16
+ """Graph metadata for creation"""
17
+
18
+ graph_name: str
19
+ description: Optional[str] = None
20
+ schema_extensions: Optional[list] = None
21
+ tags: Optional[list] = None
22
+
23
+
24
+ @dataclass
25
+ class InitialEntityData:
26
+ """Initial entity data for graph creation"""
27
+
28
+ name: str
29
+ uri: str
30
+ category: Optional[str] = None
31
+ sic: Optional[str] = None
32
+ sic_description: Optional[str] = None
33
+
34
+
35
+ @dataclass
36
+ class GraphInfo:
37
+ """Information about a graph"""
38
+
39
+ graph_id: str
40
+ graph_name: str
41
+ description: Optional[str] = None
42
+ schema_extensions: Optional[list] = None
43
+ tags: Optional[list] = None
44
+ created_at: Optional[str] = None
45
+ status: Optional[str] = None
46
+
47
+
48
+ class GraphClient:
49
+ """Client for graph management operations"""
50
+
51
+ def __init__(self, config: Dict[str, Any]):
52
+ self.config = config
53
+ self.base_url = config["base_url"]
54
+ self.headers = config.get("headers", {})
55
+ self.token = config.get("token")
56
+
57
+ def create_graph_and_wait(
58
+ self,
59
+ metadata: GraphMetadata,
60
+ initial_entity: Optional[InitialEntityData] = None,
61
+ create_entity: bool = True,
62
+ timeout: int = 60,
63
+ poll_interval: int = 2,
64
+ on_progress: Optional[Callable[[str], None]] = None,
65
+ ) -> str:
66
+ """
67
+ Create a graph and wait for completion.
68
+
69
+ Args:
70
+ metadata: Graph metadata
71
+ initial_entity: Optional initial entity data
72
+ create_entity: Whether to create the entity node and upload initial data.
73
+ Only applies when initial_entity is provided. Set to False to create
74
+ graph without populating entity data (useful for file-based ingestion).
75
+ timeout: Maximum time to wait in seconds
76
+ poll_interval: Time between status checks in seconds
77
+ on_progress: Callback for progress updates
78
+
79
+ Returns:
80
+ graph_id when creation completes
81
+
82
+ Raises:
83
+ Exception: If creation fails or times out
84
+ """
85
+ from ..client import AuthenticatedClient
86
+ from ..api.graphs.create_graph import sync_detailed as create_graph
87
+ from ..api.operations.get_operation_status import sync_detailed as get_status
88
+ from ..models.create_graph_request import CreateGraphRequest
89
+ from ..models.graph_metadata import GraphMetadata as APIGraphMetadata
90
+
91
+ if not self.token:
92
+ raise ValueError("No API key provided. Set X-API-Key in headers.")
93
+
94
+ client = AuthenticatedClient(
95
+ base_url=self.base_url,
96
+ token=self.token,
97
+ prefix="",
98
+ auth_header_name="X-API-Key",
99
+ headers=self.headers,
100
+ )
101
+
102
+ # Build API metadata
103
+ api_metadata = APIGraphMetadata(
104
+ graph_name=metadata.graph_name,
105
+ description=metadata.description,
106
+ schema_extensions=metadata.schema_extensions or [],
107
+ tags=metadata.tags or [],
108
+ )
109
+
110
+ # Build initial entity if provided
111
+ initial_entity_dict = None
112
+ if initial_entity:
113
+ initial_entity_dict = {
114
+ "name": initial_entity.name,
115
+ "uri": initial_entity.uri,
116
+ }
117
+ if initial_entity.category:
118
+ initial_entity_dict["category"] = initial_entity.category
119
+ if initial_entity.sic:
120
+ initial_entity_dict["sic"] = initial_entity.sic
121
+ if initial_entity.sic_description:
122
+ initial_entity_dict["sic_description"] = initial_entity.sic_description
123
+
124
+ # Create graph request
125
+ graph_create = CreateGraphRequest(
126
+ metadata=api_metadata,
127
+ initial_entity=initial_entity_dict,
128
+ create_entity=create_entity,
129
+ )
130
+
131
+ if on_progress:
132
+ on_progress(f"Creating graph: {metadata.graph_name}")
133
+
134
+ # Execute create request
135
+ response = create_graph(client=client, body=graph_create)
136
+
137
+ if not response.parsed:
138
+ raise RuntimeError(f"Failed to create graph: {response.status_code}")
139
+
140
+ # Extract graph_id or operation_id
141
+ if isinstance(response.parsed, dict):
142
+ graph_id = response.parsed.get("graph_id")
143
+ operation_id = response.parsed.get("operation_id")
144
+ else:
145
+ graph_id = getattr(response.parsed, "graph_id", None)
146
+ operation_id = getattr(response.parsed, "operation_id", None)
147
+
148
+ # If graph_id returned immediately, we're done
149
+ if graph_id:
150
+ if on_progress:
151
+ on_progress(f"Graph created: {graph_id}")
152
+ return graph_id
153
+
154
+ # Otherwise, poll operation until complete
155
+ if not operation_id:
156
+ raise RuntimeError("No graph_id or operation_id in response")
157
+
158
+ if on_progress:
159
+ on_progress(f"Graph creation queued (operation: {operation_id})")
160
+
161
+ max_attempts = timeout // poll_interval
162
+ for attempt in range(max_attempts):
163
+ time.sleep(poll_interval)
164
+
165
+ status_response = get_status(operation_id=operation_id, client=client)
166
+
167
+ if not status_response.parsed:
168
+ continue
169
+
170
+ # Handle both dict and object responses
171
+ status_data = status_response.parsed
172
+ if isinstance(status_data, dict):
173
+ status = status_data.get("status")
174
+ else:
175
+ # Check for additional_properties first (common in generated clients)
176
+ if hasattr(status_data, "additional_properties"):
177
+ status = status_data.additional_properties.get("status")
178
+ else:
179
+ status = getattr(status_data, "status", None)
180
+
181
+ if on_progress:
182
+ on_progress(f"Status: {status} (attempt {attempt + 1}/{max_attempts})")
183
+
184
+ if status == "completed":
185
+ # Extract graph_id from result
186
+ if isinstance(status_data, dict):
187
+ result = status_data.get("result", {})
188
+ elif hasattr(status_data, "additional_properties"):
189
+ result = status_data.additional_properties.get("result", {})
190
+ else:
191
+ result = getattr(status_data, "result", {})
192
+
193
+ if isinstance(result, dict):
194
+ graph_id = result.get("graph_id")
195
+ else:
196
+ graph_id = getattr(result, "graph_id", None)
197
+
198
+ if graph_id:
199
+ if on_progress:
200
+ on_progress(f"Graph created: {graph_id}")
201
+ return graph_id
202
+ else:
203
+ raise RuntimeError("Operation completed but no graph_id in result")
204
+
205
+ elif status == "failed":
206
+ # Extract error message
207
+ if isinstance(status_data, dict):
208
+ error = (
209
+ status_data.get("error") or status_data.get("message") or "Unknown error"
210
+ )
211
+ elif hasattr(status_data, "additional_properties"):
212
+ props = status_data.additional_properties
213
+ error = props.get("error") or props.get("message") or "Unknown error"
214
+ else:
215
+ error = getattr(status_data, "message", "Unknown error")
216
+ raise RuntimeError(f"Graph creation failed: {error}")
217
+
218
+ raise TimeoutError(f"Graph creation timed out after {timeout}s")
219
+
220
+ def get_graph_info(self, graph_id: str) -> GraphInfo:
221
+ """
222
+ Get information about a graph.
223
+
224
+ Args:
225
+ graph_id: The graph ID
226
+
227
+ Returns:
228
+ GraphInfo with graph details
229
+
230
+ Raises:
231
+ ValueError: If graph not found
232
+ """
233
+ from ..client import AuthenticatedClient
234
+ from ..api.graphs.get_graphs import sync_detailed as get_graphs
235
+
236
+ if not self.token:
237
+ raise ValueError("No API key provided. Set X-API-Key in headers.")
238
+
239
+ client = AuthenticatedClient(
240
+ base_url=self.base_url,
241
+ token=self.token,
242
+ prefix="",
243
+ auth_header_name="X-API-Key",
244
+ headers=self.headers,
245
+ )
246
+
247
+ # Use get_graphs and filter for the specific graph
248
+ response = get_graphs(client=client)
249
+
250
+ if not response.parsed:
251
+ raise RuntimeError(f"Failed to get graphs: {response.status_code}")
252
+
253
+ data = response.parsed
254
+ graphs = None
255
+
256
+ # Extract graphs list from response
257
+ if isinstance(data, dict):
258
+ graphs = data.get("graphs", [])
259
+ elif hasattr(data, "additional_properties"):
260
+ graphs = data.additional_properties.get("graphs", [])
261
+ elif hasattr(data, "graphs"):
262
+ graphs = data.graphs
263
+ else:
264
+ raise RuntimeError("Unexpected response format from get_graphs")
265
+
266
+ # Find the specific graph by ID
267
+ graph_data = None
268
+ for graph in graphs:
269
+ if isinstance(graph, dict):
270
+ if graph.get("graph_id") == graph_id or graph.get("id") == graph_id:
271
+ graph_data = graph
272
+ break
273
+ elif hasattr(graph, "graph_id"):
274
+ if graph.graph_id == graph_id or getattr(graph, "id", None) == graph_id:
275
+ graph_data = graph
276
+ break
277
+
278
+ if not graph_data:
279
+ raise ValueError(f"Graph not found: {graph_id}")
280
+
281
+ # Build GraphInfo from the found graph
282
+ if isinstance(graph_data, dict):
283
+ return GraphInfo(
284
+ graph_id=graph_data.get("graph_id") or graph_data.get("id", graph_id),
285
+ graph_name=graph_data.get("graph_name") or graph_data.get("name", ""),
286
+ description=graph_data.get("description"),
287
+ schema_extensions=graph_data.get("schema_extensions"),
288
+ tags=graph_data.get("tags"),
289
+ created_at=graph_data.get("created_at"),
290
+ status=graph_data.get("status"),
291
+ )
292
+ else:
293
+ return GraphInfo(
294
+ graph_id=getattr(graph_data, "graph_id", None)
295
+ or getattr(graph_data, "id", graph_id),
296
+ graph_name=getattr(graph_data, "graph_name", None)
297
+ or getattr(graph_data, "name", ""),
298
+ description=getattr(graph_data, "description", None),
299
+ schema_extensions=getattr(graph_data, "schema_extensions", None),
300
+ tags=getattr(graph_data, "tags", None),
301
+ created_at=getattr(graph_data, "created_at", None),
302
+ status=getattr(graph_data, "status", None),
303
+ )
304
+
305
+ def delete_graph(self, graph_id: str) -> None:
306
+ """
307
+ Delete a graph.
308
+
309
+ Note: This method is not yet available as the delete_graph endpoint
310
+ is not included in the generated SDK. This will be implemented when
311
+ the endpoint is added to the API specification.
312
+
313
+ Args:
314
+ graph_id: The graph ID to delete
315
+
316
+ Raises:
317
+ NotImplementedError: This feature is not yet available
318
+ """
319
+ raise NotImplementedError(
320
+ "Graph deletion is not yet available. "
321
+ "The delete_graph endpoint needs to be added to the API specification."
322
+ )
323
+
324
+ def close(self):
325
+ """Clean up resources (placeholder for consistency)"""
326
+ pass
@@ -113,9 +113,30 @@ class QueryClient:
113
113
  )
114
114
 
115
115
  try:
116
- kwargs = {"graph_id": graph_id, "client": client, "body": query_request}
116
+ kwargs = {
117
+ "graph_id": graph_id,
118
+ "client": client,
119
+ "body": query_request,
120
+ "mode": options.mode if options.mode else None,
121
+ "chunk_size": options.chunk_size if options.chunk_size else 1000,
122
+ "test_mode": options.test_mode if options.test_mode else False,
123
+ }
117
124
  response = execute_cypher_query(**kwargs)
118
125
 
126
+ # Check if this is an NDJSON streaming response (parsed will be None for NDJSON)
127
+ if (
128
+ hasattr(response, "headers")
129
+ and (
130
+ "application/x-ndjson" in response.headers.get("content-type", "")
131
+ or response.headers.get("x-stream-format") == "ndjson"
132
+ )
133
+ ) or (
134
+ hasattr(response, "parsed")
135
+ and response.parsed is None
136
+ and response.status_code == 200
137
+ ):
138
+ return self._parse_ndjson_response(response, graph_id)
139
+
119
140
  # Check response type and handle accordingly
120
141
  if hasattr(response, "parsed") and response.parsed:
121
142
  response_data = response.parsed
@@ -187,6 +208,58 @@ class QueryClient:
187
208
  # Unexpected response format
188
209
  raise Exception("Unexpected response format from query endpoint")
189
210
 
211
+ def _parse_ndjson_response(self, response, graph_id: str) -> QueryResult:
212
+ """Parse NDJSON streaming response and aggregate into QueryResult"""
213
+ import json
214
+
215
+ all_data = []
216
+ columns = None
217
+ total_rows = 0
218
+ execution_time_ms = 0
219
+
220
+ # Parse NDJSON line by line
221
+ content = (
222
+ response.content.decode("utf-8")
223
+ if isinstance(response.content, bytes)
224
+ else response.content
225
+ )
226
+
227
+ for line in content.strip().split("\n"):
228
+ if not line.strip():
229
+ continue
230
+
231
+ try:
232
+ chunk = json.loads(line)
233
+
234
+ # Extract columns from first chunk
235
+ if columns is None and "columns" in chunk:
236
+ columns = chunk["columns"]
237
+
238
+ # Aggregate data rows (NDJSON uses "rows", regular JSON uses "data")
239
+ if "rows" in chunk:
240
+ all_data.extend(chunk["rows"])
241
+ total_rows += len(chunk["rows"])
242
+ elif "data" in chunk:
243
+ all_data.extend(chunk["data"])
244
+ total_rows += len(chunk["data"])
245
+
246
+ # Track execution time (use max from all chunks)
247
+ if "execution_time_ms" in chunk:
248
+ execution_time_ms = max(execution_time_ms, chunk["execution_time_ms"])
249
+
250
+ except json.JSONDecodeError as e:
251
+ raise Exception(f"Failed to parse NDJSON line: {e}")
252
+
253
+ # Return aggregated result
254
+ return QueryResult(
255
+ data=all_data,
256
+ columns=columns or [],
257
+ row_count=total_rows,
258
+ execution_time_ms=execution_time_ms,
259
+ graph_id=graph_id,
260
+ timestamp=datetime.now().isoformat(),
261
+ )
262
+
190
263
  def _stream_query_results(
191
264
  self, operation_id: str, options: QueryOptions
192
265
  ) -> Iterator[Any]:
@@ -11,20 +11,20 @@ import json
11
11
  import logging
12
12
  import httpx
13
13
 
14
- from ..api.tables.get_upload_url_v1_graphs_graph_id_tables_table_name_files_post import (
14
+ from ..api.tables.get_upload_url import (
15
15
  sync_detailed as get_upload_url,
16
16
  )
17
- from ..api.tables.update_file_v1_graphs_graph_id_tables_files_file_id_patch import (
18
- sync_detailed as update_file,
17
+ from ..api.tables.update_file_status import (
18
+ sync_detailed as update_file_status,
19
19
  )
20
- from ..api.tables.list_tables_v1_graphs_graph_id_tables_get import (
20
+ from ..api.tables.list_tables import (
21
21
  sync_detailed as list_tables,
22
22
  )
23
- from ..api.tables.ingest_tables_v1_graphs_graph_id_tables_ingest_post import (
23
+ from ..api.tables.ingest_tables import (
24
24
  sync_detailed as ingest_tables,
25
25
  )
26
26
  from ..models.file_upload_request import FileUploadRequest
27
- from ..models.file_update_request import FileUpdateRequest
27
+ from ..models.file_status_update import FileStatusUpdate
28
28
  from ..models.bulk_ingest_request import BulkIngestRequest
29
29
 
30
30
  logger = logging.getLogger(__name__)
@@ -95,7 +95,7 @@ class TableIngestClient:
95
95
  This method handles the complete 3-step upload process:
96
96
  1. Get presigned upload URL
97
97
  2. Upload file to S3
98
- 3. Update file metadata
98
+ 3. Mark file as 'uploaded' (backend validates, calculates size/row count)
99
99
 
100
100
  Args:
101
101
  graph_id: The graph ID
@@ -104,7 +104,7 @@ class TableIngestClient:
104
104
  options: Upload options
105
105
 
106
106
  Returns:
107
- UploadResult with upload details
107
+ UploadResult with upload details (size/row count calculated by backend)
108
108
  """
109
109
  if options is None:
110
110
  options = UploadOptions()
@@ -216,12 +216,10 @@ class TableIngestClient:
216
216
  # BinaryIO or file-like object
217
217
  file_or_buffer.seek(0)
218
218
  file_content = file_or_buffer.read()
219
- file_size = len(file_content)
220
219
  else:
221
220
  # Read from file path
222
221
  with open(file_path, "rb") as f:
223
222
  file_content = f.read()
224
- file_size = len(file_content)
225
223
 
226
224
  s3_response = self._http_client.put(
227
225
  upload_url,
@@ -230,54 +228,47 @@ class TableIngestClient:
230
228
  )
231
229
  s3_response.raise_for_status()
232
230
 
233
- # Step 3: Get row count and update file metadata
231
+ # Step 3: Mark file as uploaded (backend validates and calculates size/row count)
234
232
  if options.on_progress:
235
- options.on_progress(f"Updating file metadata for {file_name}...")
233
+ options.on_progress(f"Marking {file_name} as uploaded...")
236
234
 
237
- try:
238
- import pyarrow.parquet as pq
239
-
240
- if is_buffer:
241
- # Read from buffer for row count
242
- if hasattr(file_or_buffer, "seek"):
243
- file_or_buffer.seek(0)
244
- parquet_table = pq.read_table(file_or_buffer)
245
- else:
246
- # Read from file path
247
- parquet_table = pq.read_table(file_path)
248
-
249
- row_count = parquet_table.num_rows
250
- except ImportError:
251
- logger.warning(
252
- "pyarrow not installed, row count will be estimated from file size"
253
- )
254
- # Rough estimate: ~100 bytes per row for typical data
255
- row_count = file_size // 100
256
-
257
- metadata_update = FileUpdateRequest(
258
- file_size_bytes=file_size, row_count=row_count
259
- )
235
+ status_update = FileStatusUpdate(status="uploaded")
260
236
 
261
237
  kwargs = {
262
238
  "graph_id": graph_id,
263
239
  "file_id": file_id,
264
240
  "client": client,
265
- "body": metadata_update,
241
+ "body": status_update,
266
242
  }
267
243
 
268
- update_response = update_file(**kwargs)
244
+ update_response = update_file_status(**kwargs)
269
245
 
270
246
  if not update_response.parsed:
247
+ logger.error(
248
+ f"No parsed response from update_file_status. Status code: {update_response.status_code}"
249
+ )
271
250
  return UploadResult(
272
251
  file_id=file_id,
273
- file_size=file_size,
274
- row_count=row_count,
252
+ file_size=0,
253
+ row_count=0,
275
254
  table_name=table_name,
276
255
  file_name=file_name,
277
256
  success=False,
278
- error="Failed to update file metadata",
257
+ error="Failed to complete file upload",
279
258
  )
280
259
 
260
+ response_data = update_response.parsed
261
+
262
+ if isinstance(response_data, dict):
263
+ file_size = response_data.get("file_size_bytes", 0)
264
+ row_count = response_data.get("row_count", 0)
265
+ elif hasattr(response_data, "additional_properties"):
266
+ file_size = response_data.additional_properties.get("file_size_bytes", 0)
267
+ row_count = response_data.additional_properties.get("row_count", 0)
268
+ else:
269
+ file_size = getattr(response_data, "file_size_bytes", 0)
270
+ row_count = getattr(response_data, "row_count", 0)
271
+
281
272
  if options.on_progress:
282
273
  options.on_progress(
283
274
  f"✅ Uploaded {file_name} ({file_size:,} bytes, {row_count:,} rows)"
@@ -75,9 +75,7 @@ from .cypher_query_request import CypherQueryRequest
75
75
  from .cypher_query_request_parameters_type_0 import CypherQueryRequestParametersType0
76
76
  from .database_health_response import DatabaseHealthResponse
77
77
  from .database_info_response import DatabaseInfoResponse
78
- from .delete_file_v1_graphs_graph_id_tables_files_file_id_delete_response_delete_file_v1_graphs_graph_id_tables_files_file_id_delete import (
79
- DeleteFileV1GraphsGraphIdTablesFilesFileIdDeleteResponseDeleteFileV1GraphsGraphIdTablesFilesFileIdDelete,
80
- )
78
+ from .delete_file_response import DeleteFileResponse
81
79
  from .delete_subgraph_request import DeleteSubgraphRequest
82
80
  from .delete_subgraph_response import DeleteSubgraphResponse
83
81
  from .detailed_transactions_response import DetailedTransactionsResponse
@@ -93,7 +91,8 @@ from .enhanced_credit_transaction_response_metadata import (
93
91
  from .error_response import ErrorResponse
94
92
  from .exchange_token_request import ExchangeTokenRequest
95
93
  from .exchange_token_request_metadata_type_0 import ExchangeTokenRequestMetadataType0
96
- from .file_update_request import FileUpdateRequest
94
+ from .file_info import FileInfo
95
+ from .file_status_update import FileStatusUpdate
97
96
  from .file_upload_request import FileUploadRequest
98
97
  from .file_upload_response import FileUploadResponse
99
98
  from .forgot_password_request import ForgotPasswordRequest
@@ -115,9 +114,7 @@ from .get_current_auth_user_response_getcurrentauthuser import (
115
114
  from .get_current_graph_bill_response_getcurrentgraphbill import (
116
115
  GetCurrentGraphBillResponseGetcurrentgraphbill,
117
116
  )
118
- from .get_file_info_v1_graphs_graph_id_tables_files_file_id_get_response_get_file_info_v1_graphs_graph_id_tables_files_file_id_get import (
119
- GetFileInfoV1GraphsGraphIdTablesFilesFileIdGetResponseGetFileInfoV1GraphsGraphIdTablesFilesFileIdGet,
120
- )
117
+ from .get_file_info_response import GetFileInfoResponse
121
118
  from .get_graph_billing_history_response_getgraphbillinghistory import (
122
119
  GetGraphBillingHistoryResponseGetgraphbillinghistory,
123
120
  )
@@ -164,9 +161,7 @@ from .link_token_request_options_type_0 import LinkTokenRequestOptionsType0
164
161
  from .link_token_request_provider_type_0 import LinkTokenRequestProviderType0
165
162
  from .list_connections_provider_type_0 import ListConnectionsProviderType0
166
163
  from .list_subgraphs_response import ListSubgraphsResponse
167
- from .list_table_files_v1_graphs_graph_id_tables_table_name_files_get_response_list_table_files_v1_graphs_graph_id_tables_table_name_files_get import (
168
- ListTableFilesV1GraphsGraphIdTablesTableNameFilesGetResponseListTableFilesV1GraphsGraphIdTablesTableNameFilesGet,
169
- )
164
+ from .list_table_files_response import ListTableFilesResponse
170
165
  from .login_request import LoginRequest
171
166
  from .logout_user_response_logoutuser import LogoutUserResponseLogoutuser
172
167
  from .mcp_tool_call import MCPToolCall
@@ -249,8 +244,8 @@ from .table_query_response import TableQueryResponse
249
244
  from .tier_upgrade_request import TierUpgradeRequest
250
245
  from .transaction_summary_response import TransactionSummaryResponse
251
246
  from .update_api_key_request import UpdateAPIKeyRequest
252
- from .update_file_v1_graphs_graph_id_tables_files_file_id_patch_response_update_file_v1_graphs_graph_id_tables_files_file_id_patch import (
253
- UpdateFileV1GraphsGraphIdTablesFilesFileIdPatchResponseUpdateFileV1GraphsGraphIdTablesFilesFileIdPatch,
247
+ from .update_file_status_response_updatefilestatus import (
248
+ UpdateFileStatusResponseUpdatefilestatus,
254
249
  )
255
250
  from .update_password_request import UpdatePasswordRequest
256
251
  from .update_user_request import UpdateUserRequest
@@ -339,7 +334,7 @@ __all__ = (
339
334
  "CypherQueryRequestParametersType0",
340
335
  "DatabaseHealthResponse",
341
336
  "DatabaseInfoResponse",
342
- "DeleteFileV1GraphsGraphIdTablesFilesFileIdDeleteResponseDeleteFileV1GraphsGraphIdTablesFilesFileIdDelete",
337
+ "DeleteFileResponse",
343
338
  "DeleteSubgraphRequest",
344
339
  "DeleteSubgraphResponse",
345
340
  "DetailedTransactionsResponse",
@@ -351,7 +346,8 @@ __all__ = (
351
346
  "ErrorResponse",
352
347
  "ExchangeTokenRequest",
353
348
  "ExchangeTokenRequestMetadataType0",
354
- "FileUpdateRequest",
349
+ "FileInfo",
350
+ "FileStatusUpdate",
355
351
  "FileUploadRequest",
356
352
  "FileUploadResponse",
357
353
  "ForgotPasswordRequest",
@@ -361,7 +357,7 @@ __all__ = (
361
357
  "GetBackupDownloadUrlResponseGetbackupdownloadurl",
362
358
  "GetCurrentAuthUserResponseGetcurrentauthuser",
363
359
  "GetCurrentGraphBillResponseGetcurrentgraphbill",
364
- "GetFileInfoV1GraphsGraphIdTablesFilesFileIdGetResponseGetFileInfoV1GraphsGraphIdTablesFilesFileIdGet",
360
+ "GetFileInfoResponse",
365
361
  "GetGraphBillingHistoryResponseGetgraphbillinghistory",
366
362
  "GetGraphLimitsResponseGetgraphlimits",
367
363
  "GetGraphMonthlyBillResponseGetgraphmonthlybill",
@@ -390,7 +386,7 @@ __all__ = (
390
386
  "LinkTokenRequestProviderType0",
391
387
  "ListConnectionsProviderType0",
392
388
  "ListSubgraphsResponse",
393
- "ListTableFilesV1GraphsGraphIdTablesTableNameFilesGetResponseListTableFilesV1GraphsGraphIdTablesTableNameFilesGet",
389
+ "ListTableFilesResponse",
394
390
  "LoginRequest",
395
391
  "LogoutUserResponseLogoutuser",
396
392
  "MCPToolCall",
@@ -455,7 +451,7 @@ __all__ = (
455
451
  "TierUpgradeRequest",
456
452
  "TransactionSummaryResponse",
457
453
  "UpdateAPIKeyRequest",
458
- "UpdateFileV1GraphsGraphIdTablesFilesFileIdPatchResponseUpdateFileV1GraphsGraphIdTablesFilesFileIdPatch",
454
+ "UpdateFileStatusResponseUpdatefilestatus",
459
455
  "UpdatePasswordRequest",
460
456
  "UpdateUserRequest",
461
457
  "UserAnalyticsResponse",