files-com 1.6.7__py3-none-any.whl → 1.6.113__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of files-com might be problematic. Click here for more details.
- README.md +80 -18
- _VERSION +1 -1
- {files_com-1.6.7.dist-info → files_com-1.6.113.dist-info}/METADATA +81 -19
- {files_com-1.6.7.dist-info → files_com-1.6.113.dist-info}/RECORD +53 -46
- files_sdk/__init__.py +17 -1
- files_sdk/error.py +98 -23
- files_sdk/models/__init__.py +9 -0
- files_sdk/models/api_key.py +10 -0
- files_sdk/models/api_request_log.py +21 -3
- files_sdk/models/as2_partner.py +31 -9
- files_sdk/models/as2_station.py +1 -1
- files_sdk/models/automation.py +65 -5
- files_sdk/models/automation_log.py +20 -3
- files_sdk/models/behavior.py +2 -16
- files_sdk/models/bundle.py +1 -1
- files_sdk/models/bundle_action.py +5 -1
- files_sdk/models/bundle_registration.py +1 -1
- files_sdk/models/child_site_management_policy.py +278 -0
- files_sdk/models/email_log.py +17 -7
- files_sdk/models/exavault_api_request_log.py +20 -3
- files_sdk/models/file.py +8 -0
- files_sdk/models/file_migration_log.py +17 -7
- files_sdk/models/folder.py +11 -1
- files_sdk/models/ftp_action_log.py +20 -3
- files_sdk/models/gpg_key.py +61 -9
- files_sdk/models/history_export.py +4 -4
- files_sdk/models/history_export_result.py +2 -2
- files_sdk/models/holiday_region.py +58 -0
- files_sdk/models/inbox_registration.py +1 -1
- files_sdk/models/invoice_line_item.py +5 -0
- files_sdk/models/outbound_connection_log.py +20 -3
- files_sdk/models/partner.py +296 -0
- files_sdk/models/permission.py +10 -2
- files_sdk/models/public_hosting_request_log.py +27 -8
- files_sdk/models/public_key.py +59 -3
- files_sdk/models/remote_mount_backend.py +438 -0
- files_sdk/models/remote_server.py +19 -91
- files_sdk/models/remote_server_configuration_file.py +1 -0
- files_sdk/models/scim_log.py +88 -0
- files_sdk/models/sftp_action_log.py +20 -3
- files_sdk/models/siem_http_destination.py +98 -19
- files_sdk/models/site.py +37 -31
- files_sdk/models/sso_strategy.py +2 -1
- files_sdk/models/sync.py +574 -0
- files_sdk/models/sync_log.py +19 -8
- files_sdk/models/sync_run.py +123 -0
- files_sdk/models/user.py +105 -12
- files_sdk/models/user_cipher_use.py +24 -1
- files_sdk/models/user_lifecycle_rule.py +94 -39
- files_sdk/models/web_dav_action_log.py +20 -3
- {files_com-1.6.7.dist-info → files_com-1.6.113.dist-info}/WHEEL +0 -0
- {files_com-1.6.7.dist-info → files_com-1.6.113.dist-info}/licenses/LICENSE +0 -0
- {files_com-1.6.7.dist-info → files_com-1.6.113.dist-info}/top_level.txt +0 -0
|
@@ -20,6 +20,7 @@ class RemoteServer:
|
|
|
20
20
|
"remote_home_path": None, # string - Initial home folder on remote server
|
|
21
21
|
"name": None, # string - Internal name for your reference
|
|
22
22
|
"port": None, # int64 - Port for remote server. Not needed for S3.
|
|
23
|
+
"buffer_uploads_always": None, # boolean - If true, uploads to this server will be uploaded first to Files.com before being sent to the remote server. This can improve performance in certain access patterns, such as high-latency connections. It will cause data to be temporarily stored in Files.com.
|
|
23
24
|
"max_connections": None, # int64 - Max number of parallel connections. Ignored for S3 connections (we will parallelize these as much as possible).
|
|
24
25
|
"pin_to_site_region": None, # boolean - If true, we will ensure that all communications with this remote server are made through the primary region of the site. This setting can also be overridden by a site-wide setting which will force it to true.
|
|
25
26
|
"pinned_region": None, # string - If set, all communications with this remote server are made through the provided region.
|
|
@@ -39,9 +40,6 @@ class RemoteServer:
|
|
|
39
40
|
"wasabi_bucket": None, # string - Wasabi: Bucket name
|
|
40
41
|
"wasabi_region": None, # string - Wasabi: Region
|
|
41
42
|
"wasabi_access_key": None, # string - Wasabi: Access Key.
|
|
42
|
-
"rackspace_username": None, # string - Rackspace: username used to login to the Rackspace Cloud Control Panel.
|
|
43
|
-
"rackspace_region": None, # string - Rackspace: Three letter code for Rackspace region. See https://support.rackspace.com/how-to/about-regions/
|
|
44
|
-
"rackspace_container": None, # string - Rackspace: The name of the container (top level directory) where files will sync.
|
|
45
43
|
"auth_status": None, # string - Either `in_setup` or `complete`
|
|
46
44
|
"auth_account_name": None, # string - Describes the authorized account
|
|
47
45
|
"one_drive_account_type": None, # string - OneDrive: Either personal or business_other account types
|
|
@@ -88,7 +86,6 @@ class RemoteServer:
|
|
|
88
86
|
"google_cloud_storage_credentials_json": None, # string - Google Cloud Storage: JSON file that contains the private key. To generate see https://cloud.google.com/storage/docs/json_api/v1/how-tos/authorizing#APIKey
|
|
89
87
|
"google_cloud_storage_s3_compatible_secret_key": None, # string - Google Cloud Storage: S3-compatible secret key
|
|
90
88
|
"linode_secret_key": None, # string - Linode: Secret Key
|
|
91
|
-
"rackspace_api_key": None, # string - Rackspace: API key from the Rackspace Cloud Control Panel
|
|
92
89
|
"s3_compatible_secret_key": None, # string - S3-compatible: Secret Key
|
|
93
90
|
"wasabi_secret_key": None, # string - Wasabi: Secret Key
|
|
94
91
|
}
|
|
@@ -215,7 +212,6 @@ class RemoteServer:
|
|
|
215
212
|
# google_cloud_storage_credentials_json - string - Google Cloud Storage: JSON file that contains the private key. To generate see https://cloud.google.com/storage/docs/json_api/v1/how-tos/authorizing#APIKey
|
|
216
213
|
# google_cloud_storage_s3_compatible_secret_key - string - Google Cloud Storage: S3-compatible secret key
|
|
217
214
|
# linode_secret_key - string - Linode: Secret Key
|
|
218
|
-
# rackspace_api_key - string - Rackspace: API key from the Rackspace Cloud Control Panel
|
|
219
215
|
# s3_compatible_secret_key - string - S3-compatible: Secret Key
|
|
220
216
|
# wasabi_secret_key - string - Wasabi: Secret Key
|
|
221
217
|
# aws_access_key - string - AWS Access Key.
|
|
@@ -228,6 +224,7 @@ class RemoteServer:
|
|
|
228
224
|
# azure_files_storage_share_name - string - Azure Files: Storage Share name
|
|
229
225
|
# backblaze_b2_bucket - string - Backblaze B2 Cloud Storage: Bucket name
|
|
230
226
|
# backblaze_b2_s3_endpoint - string - Backblaze B2 Cloud Storage: S3 Endpoint
|
|
227
|
+
# buffer_uploads_always - boolean - If true, uploads to this server will be uploaded first to Files.com before being sent to the remote server. This can improve performance in certain access patterns, such as high-latency connections. It will cause data to be temporarily stored in Files.com.
|
|
231
228
|
# cloudflare_access_key - string - Cloudflare: Access Key.
|
|
232
229
|
# cloudflare_bucket - string - Cloudflare: Bucket name
|
|
233
230
|
# cloudflare_endpoint - string - Cloudflare: endpoint
|
|
@@ -250,9 +247,6 @@ class RemoteServer:
|
|
|
250
247
|
# one_drive_account_type - string - OneDrive: Either personal or business_other account types
|
|
251
248
|
# pin_to_site_region - boolean - If true, we will ensure that all communications with this remote server are made through the primary region of the site. This setting can also be overridden by a site-wide setting which will force it to true.
|
|
252
249
|
# port - int64 - Port for remote server. Not needed for S3.
|
|
253
|
-
# rackspace_container - string - Rackspace: The name of the container (top level directory) where files will sync.
|
|
254
|
-
# rackspace_region - string - Rackspace: Three letter code for Rackspace region. See https://support.rackspace.com/how-to/about-regions/
|
|
255
|
-
# rackspace_username - string - Rackspace: username used to login to the Rackspace Cloud Control Panel.
|
|
256
250
|
# s3_bucket - string - S3 bucket name
|
|
257
251
|
# s3_compatible_access_key - string - S3-compatible: Access Key
|
|
258
252
|
# s3_compatible_bucket - string - S3-compatible: Bucket name
|
|
@@ -379,12 +373,6 @@ class RemoteServer:
|
|
|
379
373
|
raise InvalidParameterError(
|
|
380
374
|
"Bad parameter: linode_secret_key must be an str"
|
|
381
375
|
)
|
|
382
|
-
if "rackspace_api_key" in params and not isinstance(
|
|
383
|
-
params["rackspace_api_key"], str
|
|
384
|
-
):
|
|
385
|
-
raise InvalidParameterError(
|
|
386
|
-
"Bad parameter: rackspace_api_key must be an str"
|
|
387
|
-
)
|
|
388
376
|
if "s3_compatible_secret_key" in params and not isinstance(
|
|
389
377
|
params["s3_compatible_secret_key"], str
|
|
390
378
|
):
|
|
@@ -558,24 +546,6 @@ class RemoteServer:
|
|
|
558
546
|
)
|
|
559
547
|
if "port" in params and not isinstance(params["port"], int):
|
|
560
548
|
raise InvalidParameterError("Bad parameter: port must be an int")
|
|
561
|
-
if "rackspace_container" in params and not isinstance(
|
|
562
|
-
params["rackspace_container"], str
|
|
563
|
-
):
|
|
564
|
-
raise InvalidParameterError(
|
|
565
|
-
"Bad parameter: rackspace_container must be an str"
|
|
566
|
-
)
|
|
567
|
-
if "rackspace_region" in params and not isinstance(
|
|
568
|
-
params["rackspace_region"], str
|
|
569
|
-
):
|
|
570
|
-
raise InvalidParameterError(
|
|
571
|
-
"Bad parameter: rackspace_region must be an str"
|
|
572
|
-
)
|
|
573
|
-
if "rackspace_username" in params and not isinstance(
|
|
574
|
-
params["rackspace_username"], str
|
|
575
|
-
):
|
|
576
|
-
raise InvalidParameterError(
|
|
577
|
-
"Bad parameter: rackspace_username must be an str"
|
|
578
|
-
)
|
|
579
549
|
if "s3_bucket" in params and not isinstance(params["s3_bucket"], str):
|
|
580
550
|
raise InvalidParameterError(
|
|
581
551
|
"Bad parameter: s3_bucket must be an str"
|
|
@@ -694,9 +664,9 @@ class RemoteServer:
|
|
|
694
664
|
# Parameters:
|
|
695
665
|
# cursor - string - Used for pagination. When a list request has more records available, cursors are provided in the response headers `X-Files-Cursor-Next` and `X-Files-Cursor-Prev`. Send one of those cursor value here to resume an existing list from the next available record. Note: many of our SDKs have iterator methods that will automatically handle cursor-based pagination.
|
|
696
666
|
# per_page - int64 - Number of records to show per page. (Max: 10,000, 1,000 or less is recommended).
|
|
697
|
-
# sort_by - object - If set, sort records by the specified field in either `asc` or `desc` direction. Valid fields are `name`, `server_type`, `backblaze_b2_bucket`, `google_cloud_storage_bucket`, `wasabi_bucket`, `s3_bucket`, `
|
|
698
|
-
# filter - object - If set, return records where the specified field is equal to the supplied value. Valid fields are `name`, `server_type`, `backblaze_b2_bucket`, `google_cloud_storage_bucket`, `wasabi_bucket`, `s3_bucket`, `
|
|
699
|
-
# filter_prefix - object - If set, return records where the specified field is prefixed by the supplied value. Valid fields are `name`, `backblaze_b2_bucket`, `google_cloud_storage_bucket`, `wasabi_bucket`, `s3_bucket`, `
|
|
667
|
+
# sort_by - object - If set, sort records by the specified field in either `asc` or `desc` direction. Valid fields are `name`, `server_type`, `backblaze_b2_bucket`, `google_cloud_storage_bucket`, `wasabi_bucket`, `s3_bucket`, `azure_blob_storage_container`, `azure_files_storage_share_name`, `s3_compatible_bucket`, `filebase_bucket`, `cloudflare_bucket` or `linode_bucket`.
|
|
668
|
+
# filter - object - If set, return records where the specified field is equal to the supplied value. Valid fields are `name`, `server_type`, `backblaze_b2_bucket`, `google_cloud_storage_bucket`, `wasabi_bucket`, `s3_bucket`, `azure_blob_storage_container`, `azure_files_storage_share_name`, `s3_compatible_bucket`, `filebase_bucket`, `cloudflare_bucket` or `linode_bucket`. Valid field combinations are `[ server_type, name ]`, `[ backblaze_b2_bucket, name ]`, `[ google_cloud_storage_bucket, name ]`, `[ wasabi_bucket, name ]`, `[ s3_bucket, name ]`, `[ azure_blob_storage_container, name ]`, `[ azure_files_storage_share_name, name ]`, `[ s3_compatible_bucket, name ]`, `[ filebase_bucket, name ]`, `[ cloudflare_bucket, name ]` or `[ linode_bucket, name ]`.
|
|
669
|
+
# filter_prefix - object - If set, return records where the specified field is prefixed by the supplied value. Valid fields are `name`, `backblaze_b2_bucket`, `google_cloud_storage_bucket`, `wasabi_bucket`, `s3_bucket`, `azure_blob_storage_container`, `azure_files_storage_share_name`, `s3_compatible_bucket`, `filebase_bucket`, `cloudflare_bucket` or `linode_bucket`. Valid field combinations are `[ backblaze_b2_bucket, name ]`, `[ google_cloud_storage_bucket, name ]`, `[ wasabi_bucket, name ]`, `[ s3_bucket, name ]`, `[ azure_blob_storage_container, name ]`, `[ azure_files_storage_share_name, name ]`, `[ s3_compatible_bucket, name ]`, `[ filebase_bucket, name ]`, `[ cloudflare_bucket, name ]` or `[ linode_bucket, name ]`.
|
|
700
670
|
def list(params=None, options=None):
|
|
701
671
|
if not isinstance(params, dict):
|
|
702
672
|
params = {}
|
|
@@ -784,7 +754,6 @@ def find_configuration_file(id, params=None, options=None):
|
|
|
784
754
|
# google_cloud_storage_credentials_json - string - Google Cloud Storage: JSON file that contains the private key. To generate see https://cloud.google.com/storage/docs/json_api/v1/how-tos/authorizing#APIKey
|
|
785
755
|
# google_cloud_storage_s3_compatible_secret_key - string - Google Cloud Storage: S3-compatible secret key
|
|
786
756
|
# linode_secret_key - string - Linode: Secret Key
|
|
787
|
-
# rackspace_api_key - string - Rackspace: API key from the Rackspace Cloud Control Panel
|
|
788
757
|
# s3_compatible_secret_key - string - S3-compatible: Secret Key
|
|
789
758
|
# wasabi_secret_key - string - Wasabi: Secret Key
|
|
790
759
|
# aws_access_key - string - AWS Access Key.
|
|
@@ -797,6 +766,7 @@ def find_configuration_file(id, params=None, options=None):
|
|
|
797
766
|
# azure_files_storage_share_name - string - Azure Files: Storage Share name
|
|
798
767
|
# backblaze_b2_bucket - string - Backblaze B2 Cloud Storage: Bucket name
|
|
799
768
|
# backblaze_b2_s3_endpoint - string - Backblaze B2 Cloud Storage: S3 Endpoint
|
|
769
|
+
# buffer_uploads_always - boolean - If true, uploads to this server will be uploaded first to Files.com before being sent to the remote server. This can improve performance in certain access patterns, such as high-latency connections. It will cause data to be temporarily stored in Files.com.
|
|
800
770
|
# cloudflare_access_key - string - Cloudflare: Access Key.
|
|
801
771
|
# cloudflare_bucket - string - Cloudflare: Bucket name
|
|
802
772
|
# cloudflare_endpoint - string - Cloudflare: endpoint
|
|
@@ -819,9 +789,6 @@ def find_configuration_file(id, params=None, options=None):
|
|
|
819
789
|
# one_drive_account_type - string - OneDrive: Either personal or business_other account types
|
|
820
790
|
# pin_to_site_region - boolean - If true, we will ensure that all communications with this remote server are made through the primary region of the site. This setting can also be overridden by a site-wide setting which will force it to true.
|
|
821
791
|
# port - int64 - Port for remote server. Not needed for S3.
|
|
822
|
-
# rackspace_container - string - Rackspace: The name of the container (top level directory) where files will sync.
|
|
823
|
-
# rackspace_region - string - Rackspace: Three letter code for Rackspace region. See https://support.rackspace.com/how-to/about-regions/
|
|
824
|
-
# rackspace_username - string - Rackspace: username used to login to the Rackspace Cloud Control Panel.
|
|
825
792
|
# s3_bucket - string - S3 bucket name
|
|
826
793
|
# s3_compatible_access_key - string - S3-compatible: Access Key
|
|
827
794
|
# s3_compatible_bucket - string - S3-compatible: Bucket name
|
|
@@ -940,12 +907,6 @@ def create(params=None, options=None):
|
|
|
940
907
|
raise InvalidParameterError(
|
|
941
908
|
"Bad parameter: linode_secret_key must be an str"
|
|
942
909
|
)
|
|
943
|
-
if "rackspace_api_key" in params and not isinstance(
|
|
944
|
-
params["rackspace_api_key"], str
|
|
945
|
-
):
|
|
946
|
-
raise InvalidParameterError(
|
|
947
|
-
"Bad parameter: rackspace_api_key must be an str"
|
|
948
|
-
)
|
|
949
910
|
if "s3_compatible_secret_key" in params and not isinstance(
|
|
950
911
|
params["s3_compatible_secret_key"], str
|
|
951
912
|
):
|
|
@@ -1021,6 +982,12 @@ def create(params=None, options=None):
|
|
|
1021
982
|
raise InvalidParameterError(
|
|
1022
983
|
"Bad parameter: backblaze_b2_s3_endpoint must be an str"
|
|
1023
984
|
)
|
|
985
|
+
if "buffer_uploads_always" in params and not isinstance(
|
|
986
|
+
params["buffer_uploads_always"], bool
|
|
987
|
+
):
|
|
988
|
+
raise InvalidParameterError(
|
|
989
|
+
"Bad parameter: buffer_uploads_always must be an bool"
|
|
990
|
+
)
|
|
1024
991
|
if "cloudflare_access_key" in params and not isinstance(
|
|
1025
992
|
params["cloudflare_access_key"], str
|
|
1026
993
|
):
|
|
@@ -1144,24 +1111,6 @@ def create(params=None, options=None):
|
|
|
1144
1111
|
)
|
|
1145
1112
|
if "port" in params and not isinstance(params["port"], int):
|
|
1146
1113
|
raise InvalidParameterError("Bad parameter: port must be an int")
|
|
1147
|
-
if "rackspace_container" in params and not isinstance(
|
|
1148
|
-
params["rackspace_container"], str
|
|
1149
|
-
):
|
|
1150
|
-
raise InvalidParameterError(
|
|
1151
|
-
"Bad parameter: rackspace_container must be an str"
|
|
1152
|
-
)
|
|
1153
|
-
if "rackspace_region" in params and not isinstance(
|
|
1154
|
-
params["rackspace_region"], str
|
|
1155
|
-
):
|
|
1156
|
-
raise InvalidParameterError(
|
|
1157
|
-
"Bad parameter: rackspace_region must be an str"
|
|
1158
|
-
)
|
|
1159
|
-
if "rackspace_username" in params and not isinstance(
|
|
1160
|
-
params["rackspace_username"], str
|
|
1161
|
-
):
|
|
1162
|
-
raise InvalidParameterError(
|
|
1163
|
-
"Bad parameter: rackspace_username must be an str"
|
|
1164
|
-
)
|
|
1165
1114
|
if "s3_bucket" in params and not isinstance(params["s3_bucket"], str):
|
|
1166
1115
|
raise InvalidParameterError("Bad parameter: s3_bucket must be an str")
|
|
1167
1116
|
if "s3_compatible_access_key" in params and not isinstance(
|
|
@@ -1321,7 +1270,6 @@ def configuration_file(id, params=None, options=None):
|
|
|
1321
1270
|
# google_cloud_storage_credentials_json - string - Google Cloud Storage: JSON file that contains the private key. To generate see https://cloud.google.com/storage/docs/json_api/v1/how-tos/authorizing#APIKey
|
|
1322
1271
|
# google_cloud_storage_s3_compatible_secret_key - string - Google Cloud Storage: S3-compatible secret key
|
|
1323
1272
|
# linode_secret_key - string - Linode: Secret Key
|
|
1324
|
-
# rackspace_api_key - string - Rackspace: API key from the Rackspace Cloud Control Panel
|
|
1325
1273
|
# s3_compatible_secret_key - string - S3-compatible: Secret Key
|
|
1326
1274
|
# wasabi_secret_key - string - Wasabi: Secret Key
|
|
1327
1275
|
# aws_access_key - string - AWS Access Key.
|
|
@@ -1334,6 +1282,7 @@ def configuration_file(id, params=None, options=None):
|
|
|
1334
1282
|
# azure_files_storage_share_name - string - Azure Files: Storage Share name
|
|
1335
1283
|
# backblaze_b2_bucket - string - Backblaze B2 Cloud Storage: Bucket name
|
|
1336
1284
|
# backblaze_b2_s3_endpoint - string - Backblaze B2 Cloud Storage: S3 Endpoint
|
|
1285
|
+
# buffer_uploads_always - boolean - If true, uploads to this server will be uploaded first to Files.com before being sent to the remote server. This can improve performance in certain access patterns, such as high-latency connections. It will cause data to be temporarily stored in Files.com.
|
|
1337
1286
|
# cloudflare_access_key - string - Cloudflare: Access Key.
|
|
1338
1287
|
# cloudflare_bucket - string - Cloudflare: Bucket name
|
|
1339
1288
|
# cloudflare_endpoint - string - Cloudflare: endpoint
|
|
@@ -1356,9 +1305,6 @@ def configuration_file(id, params=None, options=None):
|
|
|
1356
1305
|
# one_drive_account_type - string - OneDrive: Either personal or business_other account types
|
|
1357
1306
|
# pin_to_site_region - boolean - If true, we will ensure that all communications with this remote server are made through the primary region of the site. This setting can also be overridden by a site-wide setting which will force it to true.
|
|
1358
1307
|
# port - int64 - Port for remote server. Not needed for S3.
|
|
1359
|
-
# rackspace_container - string - Rackspace: The name of the container (top level directory) where files will sync.
|
|
1360
|
-
# rackspace_region - string - Rackspace: Three letter code for Rackspace region. See https://support.rackspace.com/how-to/about-regions/
|
|
1361
|
-
# rackspace_username - string - Rackspace: username used to login to the Rackspace Cloud Control Panel.
|
|
1362
1308
|
# s3_bucket - string - S3 bucket name
|
|
1363
1309
|
# s3_compatible_access_key - string - S3-compatible: Access Key
|
|
1364
1310
|
# s3_compatible_bucket - string - S3-compatible: Bucket name
|
|
@@ -1480,12 +1426,6 @@ def update(id, params=None, options=None):
|
|
|
1480
1426
|
raise InvalidParameterError(
|
|
1481
1427
|
"Bad parameter: linode_secret_key must be an str"
|
|
1482
1428
|
)
|
|
1483
|
-
if "rackspace_api_key" in params and not isinstance(
|
|
1484
|
-
params["rackspace_api_key"], str
|
|
1485
|
-
):
|
|
1486
|
-
raise InvalidParameterError(
|
|
1487
|
-
"Bad parameter: rackspace_api_key must be an str"
|
|
1488
|
-
)
|
|
1489
1429
|
if "s3_compatible_secret_key" in params and not isinstance(
|
|
1490
1430
|
params["s3_compatible_secret_key"], str
|
|
1491
1431
|
):
|
|
@@ -1561,6 +1501,12 @@ def update(id, params=None, options=None):
|
|
|
1561
1501
|
raise InvalidParameterError(
|
|
1562
1502
|
"Bad parameter: backblaze_b2_s3_endpoint must be an str"
|
|
1563
1503
|
)
|
|
1504
|
+
if "buffer_uploads_always" in params and not isinstance(
|
|
1505
|
+
params["buffer_uploads_always"], bool
|
|
1506
|
+
):
|
|
1507
|
+
raise InvalidParameterError(
|
|
1508
|
+
"Bad parameter: buffer_uploads_always must be an bool"
|
|
1509
|
+
)
|
|
1564
1510
|
if "cloudflare_access_key" in params and not isinstance(
|
|
1565
1511
|
params["cloudflare_access_key"], str
|
|
1566
1512
|
):
|
|
@@ -1684,24 +1630,6 @@ def update(id, params=None, options=None):
|
|
|
1684
1630
|
)
|
|
1685
1631
|
if "port" in params and not isinstance(params["port"], int):
|
|
1686
1632
|
raise InvalidParameterError("Bad parameter: port must be an int")
|
|
1687
|
-
if "rackspace_container" in params and not isinstance(
|
|
1688
|
-
params["rackspace_container"], str
|
|
1689
|
-
):
|
|
1690
|
-
raise InvalidParameterError(
|
|
1691
|
-
"Bad parameter: rackspace_container must be an str"
|
|
1692
|
-
)
|
|
1693
|
-
if "rackspace_region" in params and not isinstance(
|
|
1694
|
-
params["rackspace_region"], str
|
|
1695
|
-
):
|
|
1696
|
-
raise InvalidParameterError(
|
|
1697
|
-
"Bad parameter: rackspace_region must be an str"
|
|
1698
|
-
)
|
|
1699
|
-
if "rackspace_username" in params and not isinstance(
|
|
1700
|
-
params["rackspace_username"], str
|
|
1701
|
-
):
|
|
1702
|
-
raise InvalidParameterError(
|
|
1703
|
-
"Bad parameter: rackspace_username must be an str"
|
|
1704
|
-
)
|
|
1705
1633
|
if "s3_bucket" in params and not isinstance(params["s3_bucket"], str):
|
|
1706
1634
|
raise InvalidParameterError("Bad parameter: s3_bucket must be an str")
|
|
1707
1635
|
if "s3_compatible_access_key" in params and not isinstance(
|
|
@@ -36,6 +36,7 @@ class RemoteServerConfigurationFile:
|
|
|
36
36
|
# * 10 requests/minute: '10-M'
|
|
37
37
|
# * 1000 requests/hour: '1000-H'
|
|
38
38
|
# * 2000 requests/day: '2000-D'
|
|
39
|
+
"auto_update_policy": None, # string - Auto update policy ['manual_trigger', 'critical_only', 'always', 'never'] (default critical_only)
|
|
39
40
|
"api_token": None, # string - Files Agent API Token
|
|
40
41
|
"port": None, # int64 - Incoming port for files agent connections
|
|
41
42
|
"hostname": None, # string
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import builtins # noqa: F401
|
|
2
|
+
from files_sdk.api import Api # noqa: F401
|
|
3
|
+
from files_sdk.list_obj import ListObj
|
|
4
|
+
from files_sdk.error import ( # noqa: F401
|
|
5
|
+
InvalidParameterError,
|
|
6
|
+
MissingParameterError,
|
|
7
|
+
NotImplementedError,
|
|
8
|
+
)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ScimLog:
|
|
12
|
+
default_attributes = {
|
|
13
|
+
"id": None, # int64 - The unique ID of this SCIM request.
|
|
14
|
+
"created_at": None, # string - The date and time when this SCIM request occurred.
|
|
15
|
+
"request_path": None, # string - The path portion of the URL requested.
|
|
16
|
+
"request_method": None, # string - The HTTP method used for this request.
|
|
17
|
+
"http_response_code": None, # string - The HTTP response code returned for this request.
|
|
18
|
+
"user_agent": None, # string - The User-Agent header sent with the request.
|
|
19
|
+
"request_json": None, # string - The JSON payload sent with the request.
|
|
20
|
+
"response_json": None, # string - The JSON payload returned in the response.
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
def __init__(self, attributes=None, options=None):
|
|
24
|
+
if not isinstance(attributes, dict):
|
|
25
|
+
attributes = {}
|
|
26
|
+
if not isinstance(options, dict):
|
|
27
|
+
options = {}
|
|
28
|
+
self.set_attributes(attributes)
|
|
29
|
+
self.options = options
|
|
30
|
+
|
|
31
|
+
def set_attributes(self, attributes):
|
|
32
|
+
for attribute, default_value in ScimLog.default_attributes.items():
|
|
33
|
+
setattr(self, attribute, attributes.get(attribute, default_value))
|
|
34
|
+
|
|
35
|
+
def get_attributes(self):
|
|
36
|
+
return {
|
|
37
|
+
k: getattr(self, k, None)
|
|
38
|
+
for k in ScimLog.default_attributes
|
|
39
|
+
if getattr(self, k, None) is not None
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
# Parameters:
|
|
44
|
+
# cursor - string - Used for pagination. When a list request has more records available, cursors are provided in the response headers `X-Files-Cursor-Next` and `X-Files-Cursor-Prev`. Send one of those cursor value here to resume an existing list from the next available record. Note: many of our SDKs have iterator methods that will automatically handle cursor-based pagination.
|
|
45
|
+
# per_page - int64 - Number of records to show per page. (Max: 10,000, 1,000 or less is recommended).
|
|
46
|
+
# sort_by - object - If set, sort records by the specified field in either `asc` or `desc` direction. Valid fields are `created_at`.
|
|
47
|
+
def list(params=None, options=None):
|
|
48
|
+
if not isinstance(params, dict):
|
|
49
|
+
params = {}
|
|
50
|
+
if not isinstance(options, dict):
|
|
51
|
+
options = {}
|
|
52
|
+
if "cursor" in params and not isinstance(params["cursor"], str):
|
|
53
|
+
raise InvalidParameterError("Bad parameter: cursor must be an str")
|
|
54
|
+
if "per_page" in params and not isinstance(params["per_page"], int):
|
|
55
|
+
raise InvalidParameterError("Bad parameter: per_page must be an int")
|
|
56
|
+
if "sort_by" in params and not isinstance(params["sort_by"], dict):
|
|
57
|
+
raise InvalidParameterError("Bad parameter: sort_by must be an dict")
|
|
58
|
+
return ListObj(ScimLog, "GET", "/scim_logs", params, options)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def all(params=None, options=None):
|
|
62
|
+
list(params, options)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
# Parameters:
|
|
66
|
+
# id (required) - int64 - Scim Log ID.
|
|
67
|
+
def find(id, params=None, options=None):
|
|
68
|
+
if not isinstance(params, dict):
|
|
69
|
+
params = {}
|
|
70
|
+
if not isinstance(options, dict):
|
|
71
|
+
options = {}
|
|
72
|
+
params["id"] = id
|
|
73
|
+
if "id" in params and not isinstance(params["id"], int):
|
|
74
|
+
raise InvalidParameterError("Bad parameter: id must be an int")
|
|
75
|
+
if "id" not in params:
|
|
76
|
+
raise MissingParameterError("Parameter missing: id")
|
|
77
|
+
response, options = Api.send_request(
|
|
78
|
+
"GET", "/scim_logs/{id}".format(id=params["id"]), params, options
|
|
79
|
+
)
|
|
80
|
+
return ScimLog(response.data, options)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def get(id, params=None, options=None):
|
|
84
|
+
find(id, params, options)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def new(*args, **kwargs):
|
|
88
|
+
return ScimLog(*args, **kwargs)
|