robosystems-client 0.2.4__py3-none-any.whl → 0.2.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of robosystems-client might be problematic. Click here for more details.

Files changed (31) hide show
  1. robosystems_client/api/agent/auto_select_agent.py +164 -32
  2. robosystems_client/api/backup/create_backup.py +72 -0
  3. robosystems_client/api/backup/get_backup_download_url.py +12 -28
  4. robosystems_client/api/backup/restore_backup.py +92 -0
  5. robosystems_client/api/graph_limits/get_graph_limits.py +12 -14
  6. robosystems_client/api/graphs/create_graph.py +136 -36
  7. robosystems_client/api/graphs/get_available_graph_tiers.py +279 -0
  8. robosystems_client/api/query/execute_cypher_query.py +13 -11
  9. robosystems_client/models/__init__.py +22 -8
  10. robosystems_client/models/agent_response.py +1 -1
  11. robosystems_client/models/auth_response.py +40 -0
  12. robosystems_client/models/backup_download_url_response.py +92 -0
  13. robosystems_client/models/backup_limits.py +76 -0
  14. robosystems_client/models/batch_agent_request.py +1 -1
  15. robosystems_client/models/batch_agent_response.py +2 -2
  16. robosystems_client/models/copy_operation_limits.py +100 -0
  17. robosystems_client/models/create_graph_request.py +16 -17
  18. robosystems_client/models/credit_limits.py +84 -0
  19. robosystems_client/models/custom_schema_definition.py +14 -10
  20. robosystems_client/models/execute_cypher_query_response_200.py +135 -0
  21. robosystems_client/models/{get_graph_limits_response_getgraphlimits.py → execute_cypher_query_response_200_data_item.py} +5 -5
  22. robosystems_client/models/graph_limits_response.py +174 -0
  23. robosystems_client/models/initial_entity_data.py +15 -12
  24. robosystems_client/models/query_limits.py +84 -0
  25. robosystems_client/models/rate_limits.py +76 -0
  26. robosystems_client/models/storage_limits.py +90 -0
  27. {robosystems_client-0.2.4.dist-info → robosystems_client-0.2.6.dist-info}/METADATA +1 -1
  28. {robosystems_client-0.2.4.dist-info → robosystems_client-0.2.6.dist-info}/RECORD +30 -21
  29. robosystems_client/models/get_backup_download_url_response_getbackupdownloadurl.py +0 -44
  30. {robosystems_client-0.2.4.dist-info → robosystems_client-0.2.6.dist-info}/WHEEL +0 -0
  31. {robosystems_client-0.2.4.dist-info → robosystems_client-0.2.6.dist-info}/licenses/LICENSE +0 -0
@@ -6,6 +6,7 @@ import httpx
6
6
  from ... import errors
7
7
  from ...client import AuthenticatedClient, Client
8
8
  from ...models.cypher_query_request import CypherQueryRequest
9
+ from ...models.execute_cypher_query_response_200 import ExecuteCypherQueryResponse200
9
10
  from ...models.http_validation_error import HTTPValidationError
10
11
  from ...models.response_mode import ResponseMode
11
12
  from ...types import UNSET, Response, Unset
@@ -59,7 +60,7 @@ def _get_kwargs(
59
60
 
60
61
  def _parse_response(
61
62
  *, client: Union[AuthenticatedClient, Client], response: httpx.Response
62
- ) -> Optional[Union[Any, HTTPValidationError]]:
63
+ ) -> Optional[Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]]:
63
64
  if response.status_code == 200:
64
65
  content_type = response.headers.get("content-type", "")
65
66
  if (
@@ -67,7 +68,8 @@ def _parse_response(
67
68
  or response.headers.get("x-stream-format") == "ndjson"
68
69
  ):
69
70
  return None
70
- response_200 = response.json()
71
+ response_200 = ExecuteCypherQueryResponse200.from_dict(response.json())
72
+
71
73
  return response_200
72
74
 
73
75
  if response.status_code == 202:
@@ -111,7 +113,7 @@ def _parse_response(
111
113
 
112
114
  def _build_response(
113
115
  *, client: Union[AuthenticatedClient, Client], response: httpx.Response
114
- ) -> Response[Union[Any, HTTPValidationError]]:
116
+ ) -> Response[Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]]:
115
117
  return Response(
116
118
  status_code=HTTPStatus(response.status_code),
117
119
  content=response.content,
@@ -128,7 +130,7 @@ def sync_detailed(
128
130
  mode: Union[None, ResponseMode, Unset] = UNSET,
129
131
  chunk_size: Union[None, Unset, int] = UNSET,
130
132
  test_mode: Union[Unset, bool] = False,
131
- ) -> Response[Union[Any, HTTPValidationError]]:
133
+ ) -> Response[Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]]:
132
134
  r"""Execute Cypher Query (Read-Only)
133
135
 
134
136
  Execute a read-only Cypher query with intelligent response optimization.
@@ -207,7 +209,7 @@ def sync_detailed(
207
209
  httpx.TimeoutException: If the request takes longer than Client.timeout.
208
210
 
209
211
  Returns:
210
- Response[Union[Any, HTTPValidationError]]
212
+ Response[Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]]
211
213
  """
212
214
 
213
215
  kwargs = _get_kwargs(
@@ -233,7 +235,7 @@ def sync(
233
235
  mode: Union[None, ResponseMode, Unset] = UNSET,
234
236
  chunk_size: Union[None, Unset, int] = UNSET,
235
237
  test_mode: Union[Unset, bool] = False,
236
- ) -> Optional[Union[Any, HTTPValidationError]]:
238
+ ) -> Optional[Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]]:
237
239
  r"""Execute Cypher Query (Read-Only)
238
240
 
239
241
  Execute a read-only Cypher query with intelligent response optimization.
@@ -312,7 +314,7 @@ def sync(
312
314
  httpx.TimeoutException: If the request takes longer than Client.timeout.
313
315
 
314
316
  Returns:
315
- Union[Any, HTTPValidationError]
317
+ Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]
316
318
  """
317
319
 
318
320
  return sync_detailed(
@@ -333,7 +335,7 @@ async def asyncio_detailed(
333
335
  mode: Union[None, ResponseMode, Unset] = UNSET,
334
336
  chunk_size: Union[None, Unset, int] = UNSET,
335
337
  test_mode: Union[Unset, bool] = False,
336
- ) -> Response[Union[Any, HTTPValidationError]]:
338
+ ) -> Response[Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]]:
337
339
  r"""Execute Cypher Query (Read-Only)
338
340
 
339
341
  Execute a read-only Cypher query with intelligent response optimization.
@@ -412,7 +414,7 @@ async def asyncio_detailed(
412
414
  httpx.TimeoutException: If the request takes longer than Client.timeout.
413
415
 
414
416
  Returns:
415
- Response[Union[Any, HTTPValidationError]]
417
+ Response[Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]]
416
418
  """
417
419
 
418
420
  kwargs = _get_kwargs(
@@ -436,7 +438,7 @@ async def asyncio(
436
438
  mode: Union[None, ResponseMode, Unset] = UNSET,
437
439
  chunk_size: Union[None, Unset, int] = UNSET,
438
440
  test_mode: Union[Unset, bool] = False,
439
- ) -> Optional[Union[Any, HTTPValidationError]]:
441
+ ) -> Optional[Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]]:
440
442
  r"""Execute Cypher Query (Read-Only)
441
443
 
442
444
  Execute a read-only Cypher query with intelligent response optimization.
@@ -515,7 +517,7 @@ async def asyncio(
515
517
  httpx.TimeoutException: If the request takes longer than Client.timeout.
516
518
 
517
519
  Returns:
518
- Union[Any, HTTPValidationError]
520
+ Union[Any, ExecuteCypherQueryResponse200, HTTPValidationError]
519
521
  """
520
522
 
521
523
  return (
@@ -29,6 +29,8 @@ from .auth_response_user import AuthResponseUser
29
29
  from .available_extension import AvailableExtension
30
30
  from .available_extensions_response import AvailableExtensionsResponse
31
31
  from .backup_create_request import BackupCreateRequest
32
+ from .backup_download_url_response import BackupDownloadUrlResponse
33
+ from .backup_limits import BackupLimits
32
34
  from .backup_list_response import BackupListResponse
33
35
  from .backup_response import BackupResponse
34
36
  from .backup_restore_request import BackupRestoreRequest
@@ -52,6 +54,7 @@ from .connection_provider_info_provider import ConnectionProviderInfoProvider
52
54
  from .connection_response import ConnectionResponse
53
55
  from .connection_response_metadata import ConnectionResponseMetadata
54
56
  from .connection_response_provider import ConnectionResponseProvider
57
+ from .copy_operation_limits import CopyOperationLimits
55
58
  from .create_api_key_request import CreateAPIKeyRequest
56
59
  from .create_api_key_response import CreateAPIKeyResponse
57
60
  from .create_connection_request import CreateConnectionRequest
@@ -59,6 +62,7 @@ from .create_connection_request_provider import CreateConnectionRequestProvider
59
62
  from .create_graph_request import CreateGraphRequest
60
63
  from .create_subgraph_request import CreateSubgraphRequest
61
64
  from .create_subgraph_request_metadata_type_0 import CreateSubgraphRequestMetadataType0
65
+ from .credit_limits import CreditLimits
62
66
  from .credit_summary import CreditSummary
63
67
  from .credit_summary_response import CreditSummaryResponse
64
68
  from .credits_summary_response import CreditsSummaryResponse
@@ -91,6 +95,10 @@ from .enhanced_credit_transaction_response_metadata import (
91
95
  from .error_response import ErrorResponse
92
96
  from .exchange_token_request import ExchangeTokenRequest
93
97
  from .exchange_token_request_metadata_type_0 import ExchangeTokenRequestMetadataType0
98
+ from .execute_cypher_query_response_200 import ExecuteCypherQueryResponse200
99
+ from .execute_cypher_query_response_200_data_item import (
100
+ ExecuteCypherQueryResponse200DataItem,
101
+ )
94
102
  from .file_info import FileInfo
95
103
  from .file_status_update import FileStatusUpdate
96
104
  from .file_upload_request import FileUploadRequest
@@ -105,9 +113,6 @@ from .get_all_credit_summaries_response_getallcreditsummaries import (
105
113
  from .get_all_shared_repository_limits_response_getallsharedrepositorylimits import (
106
114
  GetAllSharedRepositoryLimitsResponseGetallsharedrepositorylimits,
107
115
  )
108
- from .get_backup_download_url_response_getbackupdownloadurl import (
109
- GetBackupDownloadUrlResponseGetbackupdownloadurl,
110
- )
111
116
  from .get_current_auth_user_response_getcurrentauthuser import (
112
117
  GetCurrentAuthUserResponseGetcurrentauthuser,
113
118
  )
@@ -118,9 +123,6 @@ from .get_file_info_response import GetFileInfoResponse
118
123
  from .get_graph_billing_history_response_getgraphbillinghistory import (
119
124
  GetGraphBillingHistoryResponseGetgraphbillinghistory,
120
125
  )
121
- from .get_graph_limits_response_getgraphlimits import (
122
- GetGraphLimitsResponseGetgraphlimits,
123
- )
124
126
  from .get_graph_monthly_bill_response_getgraphmonthlybill import (
125
127
  GetGraphMonthlyBillResponseGetgraphmonthlybill,
126
128
  )
@@ -137,6 +139,7 @@ from .get_storage_usage_response_getstorageusage import (
137
139
  GetStorageUsageResponseGetstorageusage,
138
140
  )
139
141
  from .graph_info import GraphInfo
142
+ from .graph_limits_response import GraphLimitsResponse
140
143
  from .graph_metadata import GraphMetadata
141
144
  from .graph_metrics_response import GraphMetricsResponse
142
145
  from .graph_metrics_response_estimated_size import GraphMetricsResponseEstimatedSize
@@ -183,7 +186,9 @@ from .plaid_connection_config_accounts_type_0_item import (
183
186
  from .plaid_connection_config_institution_type_0 import (
184
187
  PlaidConnectionConfigInstitutionType0,
185
188
  )
189
+ from .query_limits import QueryLimits
186
190
  from .quick_books_connection_config import QuickBooksConnectionConfig
191
+ from .rate_limits import RateLimits
187
192
  from .register_request import RegisterRequest
188
193
  from .repository_credits_response import RepositoryCreditsResponse
189
194
  from .repository_plan import RepositoryPlan
@@ -217,6 +222,7 @@ from .sso_exchange_request import SSOExchangeRequest
217
222
  from .sso_exchange_response import SSOExchangeResponse
218
223
  from .sso_token_response import SSOTokenResponse
219
224
  from .storage_limit_response import StorageLimitResponse
225
+ from .storage_limits import StorageLimits
220
226
  from .subgraph_quota_response import SubgraphQuotaResponse
221
227
  from .subgraph_response import SubgraphResponse
222
228
  from .subgraph_response_metadata_type_0 import SubgraphResponseMetadataType0
@@ -295,6 +301,8 @@ __all__ = (
295
301
  "AvailableExtension",
296
302
  "AvailableExtensionsResponse",
297
303
  "BackupCreateRequest",
304
+ "BackupDownloadUrlResponse",
305
+ "BackupLimits",
298
306
  "BackupListResponse",
299
307
  "BackupResponse",
300
308
  "BackupRestoreRequest",
@@ -314,6 +322,7 @@ __all__ = (
314
322
  "ConnectionResponse",
315
323
  "ConnectionResponseMetadata",
316
324
  "ConnectionResponseProvider",
325
+ "CopyOperationLimits",
317
326
  "CreateAPIKeyRequest",
318
327
  "CreateAPIKeyResponse",
319
328
  "CreateConnectionRequest",
@@ -321,6 +330,7 @@ __all__ = (
321
330
  "CreateGraphRequest",
322
331
  "CreateSubgraphRequest",
323
332
  "CreateSubgraphRequestMetadataType0",
333
+ "CreditLimits",
324
334
  "CreditsSummaryResponse",
325
335
  "CreditsSummaryResponseCreditsByAddonType0Item",
326
336
  "CreditSummary",
@@ -345,6 +355,8 @@ __all__ = (
345
355
  "ErrorResponse",
346
356
  "ExchangeTokenRequest",
347
357
  "ExchangeTokenRequestMetadataType0",
358
+ "ExecuteCypherQueryResponse200",
359
+ "ExecuteCypherQueryResponse200DataItem",
348
360
  "FileInfo",
349
361
  "FileStatusUpdate",
350
362
  "FileUploadRequest",
@@ -353,18 +365,17 @@ __all__ = (
353
365
  "ForgotPasswordResponseForgotpassword",
354
366
  "GetAllCreditSummariesResponseGetallcreditsummaries",
355
367
  "GetAllSharedRepositoryLimitsResponseGetallsharedrepositorylimits",
356
- "GetBackupDownloadUrlResponseGetbackupdownloadurl",
357
368
  "GetCurrentAuthUserResponseGetcurrentauthuser",
358
369
  "GetCurrentGraphBillResponseGetcurrentgraphbill",
359
370
  "GetFileInfoResponse",
360
371
  "GetGraphBillingHistoryResponseGetgraphbillinghistory",
361
- "GetGraphLimitsResponseGetgraphlimits",
362
372
  "GetGraphMonthlyBillResponseGetgraphmonthlybill",
363
373
  "GetGraphUsageDetailsResponseGetgraphusagedetails",
364
374
  "GetOperationStatusResponseGetoperationstatus",
365
375
  "GetSharedRepositoryLimitsResponseGetsharedrepositorylimits",
366
376
  "GetStorageUsageResponseGetstorageusage",
367
377
  "GraphInfo",
378
+ "GraphLimitsResponse",
368
379
  "GraphMetadata",
369
380
  "GraphMetricsResponse",
370
381
  "GraphMetricsResponseEstimatedSize",
@@ -403,7 +414,9 @@ __all__ = (
403
414
  "PlaidConnectionConfig",
404
415
  "PlaidConnectionConfigAccountsType0Item",
405
416
  "PlaidConnectionConfigInstitutionType0",
417
+ "QueryLimits",
406
418
  "QuickBooksConnectionConfig",
419
+ "RateLimits",
407
420
  "RegisterRequest",
408
421
  "RepositoryCreditsResponse",
409
422
  "RepositoryPlan",
@@ -429,6 +442,7 @@ __all__ = (
429
442
  "SSOExchangeResponse",
430
443
  "SSOTokenResponse",
431
444
  "StorageLimitResponse",
445
+ "StorageLimits",
432
446
  "SubgraphQuotaResponse",
433
447
  "SubgraphResponse",
434
448
  "SubgraphResponseMetadataType0",
@@ -30,7 +30,7 @@ class AgentResponse:
30
30
  mode_used (AgentMode): Agent execution modes.
31
31
  metadata (Union['AgentResponseMetadataType0', None, Unset]): Response metadata including routing info
32
32
  tokens_used (Union['AgentResponseTokensUsedType0', None, Unset]): Token usage statistics
33
- confidence_score (Union[None, Unset, float]): Confidence score of the response
33
+ confidence_score (Union[None, Unset, float]): Confidence score of the response (0.0-1.0 scale)
34
34
  operation_id (Union[None, Unset, str]): Operation ID for SSE monitoring
35
35
  is_partial (Union[Unset, bool]): Whether this is a partial response Default: False.
36
36
  error_details (Union['AgentResponseErrorDetailsType0', None, Unset]): Error details if any
@@ -21,11 +21,15 @@ class AuthResponse:
21
21
  user (AuthResponseUser): User information
22
22
  message (str): Success message
23
23
  token (Union[None, Unset, str]): JWT authentication token (optional for cookie-based auth)
24
+ expires_in (Union[None, Unset, int]): Token expiry time in seconds from now
25
+ refresh_threshold (Union[None, Unset, int]): Recommended refresh threshold in seconds before expiry
24
26
  """
25
27
 
26
28
  user: "AuthResponseUser"
27
29
  message: str
28
30
  token: Union[None, Unset, str] = UNSET
31
+ expires_in: Union[None, Unset, int] = UNSET
32
+ refresh_threshold: Union[None, Unset, int] = UNSET
29
33
  additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
30
34
 
31
35
  def to_dict(self) -> dict[str, Any]:
@@ -39,6 +43,18 @@ class AuthResponse:
39
43
  else:
40
44
  token = self.token
41
45
 
46
+ expires_in: Union[None, Unset, int]
47
+ if isinstance(self.expires_in, Unset):
48
+ expires_in = UNSET
49
+ else:
50
+ expires_in = self.expires_in
51
+
52
+ refresh_threshold: Union[None, Unset, int]
53
+ if isinstance(self.refresh_threshold, Unset):
54
+ refresh_threshold = UNSET
55
+ else:
56
+ refresh_threshold = self.refresh_threshold
57
+
42
58
  field_dict: dict[str, Any] = {}
43
59
  field_dict.update(self.additional_properties)
44
60
  field_dict.update(
@@ -49,6 +65,10 @@ class AuthResponse:
49
65
  )
50
66
  if token is not UNSET:
51
67
  field_dict["token"] = token
68
+ if expires_in is not UNSET:
69
+ field_dict["expires_in"] = expires_in
70
+ if refresh_threshold is not UNSET:
71
+ field_dict["refresh_threshold"] = refresh_threshold
52
72
 
53
73
  return field_dict
54
74
 
@@ -70,10 +90,30 @@ class AuthResponse:
70
90
 
71
91
  token = _parse_token(d.pop("token", UNSET))
72
92
 
93
+ def _parse_expires_in(data: object) -> Union[None, Unset, int]:
94
+ if data is None:
95
+ return data
96
+ if isinstance(data, Unset):
97
+ return data
98
+ return cast(Union[None, Unset, int], data)
99
+
100
+ expires_in = _parse_expires_in(d.pop("expires_in", UNSET))
101
+
102
+ def _parse_refresh_threshold(data: object) -> Union[None, Unset, int]:
103
+ if data is None:
104
+ return data
105
+ if isinstance(data, Unset):
106
+ return data
107
+ return cast(Union[None, Unset, int], data)
108
+
109
+ refresh_threshold = _parse_refresh_threshold(d.pop("refresh_threshold", UNSET))
110
+
73
111
  auth_response = cls(
74
112
  user=user,
75
113
  message=message,
76
114
  token=token,
115
+ expires_in=expires_in,
116
+ refresh_threshold=refresh_threshold,
77
117
  )
78
118
 
79
119
  auth_response.additional_properties = d
@@ -0,0 +1,92 @@
1
+ from collections.abc import Mapping
2
+ from typing import Any, TypeVar
3
+
4
+ from attrs import define as _attrs_define
5
+ from attrs import field as _attrs_field
6
+
7
+ T = TypeVar("T", bound="BackupDownloadUrlResponse")
8
+
9
+
10
+ @_attrs_define
11
+ class BackupDownloadUrlResponse:
12
+ """Response model for backup download URL generation.
13
+
14
+ Attributes:
15
+ download_url (str): Pre-signed S3 URL for downloading the backup file
16
+ expires_in (int): URL expiration time in seconds from now
17
+ expires_at (float): Unix timestamp when the URL expires
18
+ backup_id (str): Backup identifier
19
+ graph_id (str): Graph database identifier
20
+ """
21
+
22
+ download_url: str
23
+ expires_in: int
24
+ expires_at: float
25
+ backup_id: str
26
+ graph_id: str
27
+ additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
28
+
29
+ def to_dict(self) -> dict[str, Any]:
30
+ download_url = self.download_url
31
+
32
+ expires_in = self.expires_in
33
+
34
+ expires_at = self.expires_at
35
+
36
+ backup_id = self.backup_id
37
+
38
+ graph_id = self.graph_id
39
+
40
+ field_dict: dict[str, Any] = {}
41
+ field_dict.update(self.additional_properties)
42
+ field_dict.update(
43
+ {
44
+ "download_url": download_url,
45
+ "expires_in": expires_in,
46
+ "expires_at": expires_at,
47
+ "backup_id": backup_id,
48
+ "graph_id": graph_id,
49
+ }
50
+ )
51
+
52
+ return field_dict
53
+
54
+ @classmethod
55
+ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
56
+ d = dict(src_dict)
57
+ download_url = d.pop("download_url")
58
+
59
+ expires_in = d.pop("expires_in")
60
+
61
+ expires_at = d.pop("expires_at")
62
+
63
+ backup_id = d.pop("backup_id")
64
+
65
+ graph_id = d.pop("graph_id")
66
+
67
+ backup_download_url_response = cls(
68
+ download_url=download_url,
69
+ expires_in=expires_in,
70
+ expires_at=expires_at,
71
+ backup_id=backup_id,
72
+ graph_id=graph_id,
73
+ )
74
+
75
+ backup_download_url_response.additional_properties = d
76
+ return backup_download_url_response
77
+
78
+ @property
79
+ def additional_keys(self) -> list[str]:
80
+ return list(self.additional_properties.keys())
81
+
82
+ def __getitem__(self, key: str) -> Any:
83
+ return self.additional_properties[key]
84
+
85
+ def __setitem__(self, key: str, value: Any) -> None:
86
+ self.additional_properties[key] = value
87
+
88
+ def __delitem__(self, key: str) -> None:
89
+ del self.additional_properties[key]
90
+
91
+ def __contains__(self, key: str) -> bool:
92
+ return key in self.additional_properties
@@ -0,0 +1,76 @@
1
+ from collections.abc import Mapping
2
+ from typing import Any, TypeVar
3
+
4
+ from attrs import define as _attrs_define
5
+ from attrs import field as _attrs_field
6
+
7
+ T = TypeVar("T", bound="BackupLimits")
8
+
9
+
10
+ @_attrs_define
11
+ class BackupLimits:
12
+ """Backup operation limits.
13
+
14
+ Attributes:
15
+ max_backup_size_gb (float): Maximum backup size in GB
16
+ backup_retention_days (int): Backup retention period in days
17
+ max_backups_per_day (int): Maximum backups per day
18
+ """
19
+
20
+ max_backup_size_gb: float
21
+ backup_retention_days: int
22
+ max_backups_per_day: int
23
+ additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
24
+
25
+ def to_dict(self) -> dict[str, Any]:
26
+ max_backup_size_gb = self.max_backup_size_gb
27
+
28
+ backup_retention_days = self.backup_retention_days
29
+
30
+ max_backups_per_day = self.max_backups_per_day
31
+
32
+ field_dict: dict[str, Any] = {}
33
+ field_dict.update(self.additional_properties)
34
+ field_dict.update(
35
+ {
36
+ "max_backup_size_gb": max_backup_size_gb,
37
+ "backup_retention_days": backup_retention_days,
38
+ "max_backups_per_day": max_backups_per_day,
39
+ }
40
+ )
41
+
42
+ return field_dict
43
+
44
+ @classmethod
45
+ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
46
+ d = dict(src_dict)
47
+ max_backup_size_gb = d.pop("max_backup_size_gb")
48
+
49
+ backup_retention_days = d.pop("backup_retention_days")
50
+
51
+ max_backups_per_day = d.pop("max_backups_per_day")
52
+
53
+ backup_limits = cls(
54
+ max_backup_size_gb=max_backup_size_gb,
55
+ backup_retention_days=backup_retention_days,
56
+ max_backups_per_day=max_backups_per_day,
57
+ )
58
+
59
+ backup_limits.additional_properties = d
60
+ return backup_limits
61
+
62
+ @property
63
+ def additional_keys(self) -> list[str]:
64
+ return list(self.additional_properties.keys())
65
+
66
+ def __getitem__(self, key: str) -> Any:
67
+ return self.additional_properties[key]
68
+
69
+ def __setitem__(self, key: str, value: Any) -> None:
70
+ self.additional_properties[key] = value
71
+
72
+ def __delitem__(self, key: str) -> None:
73
+ del self.additional_properties[key]
74
+
75
+ def __contains__(self, key: str) -> bool:
76
+ return key in self.additional_properties
@@ -18,7 +18,7 @@ class BatchAgentRequest:
18
18
  """Request for batch processing multiple queries.
19
19
 
20
20
  Attributes:
21
- queries (list['AgentRequest']): List of queries to process
21
+ queries (list['AgentRequest']): List of queries to process (max 10)
22
22
  parallel (Union[Unset, bool]): Process queries in parallel Default: False.
23
23
  """
24
24
 
@@ -16,8 +16,8 @@ class BatchAgentResponse:
16
16
  """Response for batch processing.
17
17
 
18
18
  Attributes:
19
- results (list['AgentResponse']): List of agent responses
20
- total_execution_time (float): Total execution time
19
+ results (list['AgentResponse']): List of agent responses (includes successes and failures)
20
+ total_execution_time (float): Total execution time in seconds
21
21
  parallel_processed (bool): Whether queries were processed in parallel
22
22
  """
23
23
 
@@ -0,0 +1,100 @@
1
+ from collections.abc import Mapping
2
+ from typing import Any, TypeVar, cast
3
+
4
+ from attrs import define as _attrs_define
5
+ from attrs import field as _attrs_field
6
+
7
+ T = TypeVar("T", bound="CopyOperationLimits")
8
+
9
+
10
+ @_attrs_define
11
+ class CopyOperationLimits:
12
+ """Copy/ingestion operation limits.
13
+
14
+ Attributes:
15
+ max_file_size_gb (float): Maximum file size in GB
16
+ timeout_seconds (int): Operation timeout in seconds
17
+ concurrent_operations (int): Maximum concurrent operations
18
+ max_files_per_operation (int): Maximum files per operation
19
+ daily_copy_operations (int): Daily operation limit
20
+ supported_formats (list[str]): Supported file formats
21
+ """
22
+
23
+ max_file_size_gb: float
24
+ timeout_seconds: int
25
+ concurrent_operations: int
26
+ max_files_per_operation: int
27
+ daily_copy_operations: int
28
+ supported_formats: list[str]
29
+ additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
30
+
31
+ def to_dict(self) -> dict[str, Any]:
32
+ max_file_size_gb = self.max_file_size_gb
33
+
34
+ timeout_seconds = self.timeout_seconds
35
+
36
+ concurrent_operations = self.concurrent_operations
37
+
38
+ max_files_per_operation = self.max_files_per_operation
39
+
40
+ daily_copy_operations = self.daily_copy_operations
41
+
42
+ supported_formats = self.supported_formats
43
+
44
+ field_dict: dict[str, Any] = {}
45
+ field_dict.update(self.additional_properties)
46
+ field_dict.update(
47
+ {
48
+ "max_file_size_gb": max_file_size_gb,
49
+ "timeout_seconds": timeout_seconds,
50
+ "concurrent_operations": concurrent_operations,
51
+ "max_files_per_operation": max_files_per_operation,
52
+ "daily_copy_operations": daily_copy_operations,
53
+ "supported_formats": supported_formats,
54
+ }
55
+ )
56
+
57
+ return field_dict
58
+
59
+ @classmethod
60
+ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
61
+ d = dict(src_dict)
62
+ max_file_size_gb = d.pop("max_file_size_gb")
63
+
64
+ timeout_seconds = d.pop("timeout_seconds")
65
+
66
+ concurrent_operations = d.pop("concurrent_operations")
67
+
68
+ max_files_per_operation = d.pop("max_files_per_operation")
69
+
70
+ daily_copy_operations = d.pop("daily_copy_operations")
71
+
72
+ supported_formats = cast(list[str], d.pop("supported_formats"))
73
+
74
+ copy_operation_limits = cls(
75
+ max_file_size_gb=max_file_size_gb,
76
+ timeout_seconds=timeout_seconds,
77
+ concurrent_operations=concurrent_operations,
78
+ max_files_per_operation=max_files_per_operation,
79
+ daily_copy_operations=daily_copy_operations,
80
+ supported_formats=supported_formats,
81
+ )
82
+
83
+ copy_operation_limits.additional_properties = d
84
+ return copy_operation_limits
85
+
86
+ @property
87
+ def additional_keys(self) -> list[str]:
88
+ return list(self.additional_properties.keys())
89
+
90
+ def __getitem__(self, key: str) -> Any:
91
+ return self.additional_properties[key]
92
+
93
+ def __setitem__(self, key: str, value: Any) -> None:
94
+ self.additional_properties[key] = value
95
+
96
+ def __delitem__(self, key: str) -> None:
97
+ del self.additional_properties[key]
98
+
99
+ def __contains__(self, key: str) -> bool:
100
+ return key in self.additional_properties
@@ -19,23 +19,22 @@ T = TypeVar("T", bound="CreateGraphRequest")
19
19
  class CreateGraphRequest:
20
20
  """Request model for creating a new graph.
21
21
 
22
- Example:
23
- {'initial_entity': {'cik': '0001234567', 'name': 'Acme Consulting LLC', 'uri': 'https://acmeconsulting.com'},
24
- 'instance_tier': 'kuzu-standard', 'metadata': {'description': 'Professional consulting services with full
25
- accounting integration', 'graph_name': 'Acme Consulting LLC', 'schema_extensions': ['roboledger']}, 'tags':
26
- ['consulting', 'professional-services']}
27
-
28
- Attributes:
29
- metadata (GraphMetadata): Metadata for graph creation.
30
- instance_tier (Union[Unset, str]): Instance tier: kuzu-standard, kuzu-large, kuzu-xlarge, neo4j-community-large,
31
- neo4j-enterprise-xlarge Default: 'kuzu-standard'.
32
- custom_schema (Union['CustomSchemaDefinition', None, Unset]): Custom schema definition to apply
33
- initial_entity (Union['InitialEntityData', None, Unset]): Optional initial entity to create in the graph. If
34
- provided, creates a entity-focused graph.
35
- create_entity (Union[Unset, bool]): Whether to create the entity node and upload initial data. Only applies when
36
- initial_entity is provided. Set to False to create graph without populating entity data (useful for file-based
37
- ingestion workflows). Default: True.
38
- tags (Union[Unset, list[str]]): Optional tags for organization
22
+ Use this to create either:
23
+ - **Entity graphs**: Standard graphs with entity schema and optional extensions
24
+ - **Custom graphs**: Generic graphs with fully custom schema definitions
25
+
26
+ Attributes:
27
+ metadata (GraphMetadata): Metadata for graph creation.
28
+ instance_tier (Union[Unset, str]): Instance tier: kuzu-standard, kuzu-large, kuzu-xlarge, neo4j-community-large,
29
+ neo4j-enterprise-xlarge Default: 'kuzu-standard'.
30
+ custom_schema (Union['CustomSchemaDefinition', None, Unset]): Custom schema definition to apply. If provided,
31
+ creates a generic custom graph. If omitted, creates an entity graph using schema_extensions.
32
+ initial_entity (Union['InitialEntityData', None, Unset]): Optional initial entity to create in the graph. If
33
+ provided with entity graph, populates the first entity node.
34
+ create_entity (Union[Unset, bool]): Whether to create the entity node and upload initial data. Only applies when
35
+ initial_entity is provided. Set to False to create graph without populating entity data (useful for file-based
36
+ ingestion workflows). Default: True.
37
+ tags (Union[Unset, list[str]]): Optional tags for organization
39
38
  """
40
39
 
41
40
  metadata: "GraphMetadata"