files-com 1.6.32__py3-none-any.whl → 1.6.137__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- README.md +81 -18
- _VERSION +1 -1
- {files_com-1.6.32.dist-info → files_com-1.6.137.dist-info}/METADATA +82 -19
- {files_com-1.6.32.dist-info → files_com-1.6.137.dist-info}/RECORD +49 -46
- files_sdk/__init__.py +9 -1
- files_sdk/error.py +113 -23
- files_sdk/models/__init__.py +5 -0
- files_sdk/models/api_key.py +10 -0
- files_sdk/models/api_request_log.py +6 -6
- files_sdk/models/automation.py +48 -5
- files_sdk/models/automation_log.py +6 -6
- files_sdk/models/behavior.py +2 -2
- files_sdk/models/bundle_action.py +5 -1
- files_sdk/models/bundle_registration.py +1 -1
- files_sdk/models/child_site_management_policy.py +278 -0
- files_sdk/models/email_log.py +5 -12
- files_sdk/models/exavault_api_request_log.py +6 -6
- files_sdk/models/file.py +8 -0
- files_sdk/models/file_migration_log.py +5 -12
- files_sdk/models/folder.py +11 -1
- files_sdk/models/ftp_action_log.py +6 -6
- files_sdk/models/gpg_key.py +29 -9
- files_sdk/models/history_export.py +4 -4
- files_sdk/models/history_export_result.py +2 -2
- files_sdk/models/inbox_registration.py +1 -1
- files_sdk/models/invoice_line_item.py +5 -0
- files_sdk/models/outbound_connection_log.py +6 -6
- files_sdk/models/partner.py +296 -0
- files_sdk/models/permission.py +10 -2
- files_sdk/models/public_hosting_request_log.py +6 -6
- files_sdk/models/public_key.py +7 -3
- files_sdk/models/remote_mount_backend.py +1 -0
- files_sdk/models/remote_server.py +65 -91
- files_sdk/models/remote_server_configuration_file.py +1 -0
- files_sdk/models/scim_log.py +88 -0
- files_sdk/models/sftp_action_log.py +6 -6
- files_sdk/models/siem_http_destination.py +98 -19
- files_sdk/models/site.py +37 -12
- files_sdk/models/sso_strategy.py +2 -1
- files_sdk/models/sync.py +74 -10
- files_sdk/models/sync_log.py +7 -13
- files_sdk/models/sync_run.py +31 -17
- files_sdk/models/user.py +79 -2
- files_sdk/models/user_cipher_use.py +24 -1
- files_sdk/models/user_lifecycle_rule.py +81 -40
- files_sdk/models/web_dav_action_log.py +6 -6
- {files_com-1.6.32.dist-info → files_com-1.6.137.dist-info}/WHEEL +0 -0
- {files_com-1.6.32.dist-info → files_com-1.6.137.dist-info}/licenses/LICENSE +0 -0
- {files_com-1.6.32.dist-info → files_com-1.6.137.dist-info}/top_level.txt +0 -0
|
@@ -19,7 +19,9 @@ class RemoteServer:
|
|
|
19
19
|
"hostname": None, # string - Hostname or IP address
|
|
20
20
|
"remote_home_path": None, # string - Initial home folder on remote server
|
|
21
21
|
"name": None, # string - Internal name for your reference
|
|
22
|
+
"description": None, # string - Internal description for your reference
|
|
22
23
|
"port": None, # int64 - Port for remote server. Not needed for S3.
|
|
24
|
+
"buffer_uploads": None, # string - If set to always, uploads to this server will be uploaded first to Files.com before being sent to the remote server. This can improve performance in certain access patterns, such as high-latency connections. It will cause data to be temporarily stored in Files.com. If set to auto, we will perform this optimization if we believe it to be a benefit in a given situation.
|
|
23
25
|
"max_connections": None, # int64 - Max number of parallel connections. Ignored for S3 connections (we will parallelize these as much as possible).
|
|
24
26
|
"pin_to_site_region": None, # boolean - If true, we will ensure that all communications with this remote server are made through the primary region of the site. This setting can also be overridden by a site-wide setting which will force it to true.
|
|
25
27
|
"pinned_region": None, # string - If set, all communications with this remote server are made through the provided region.
|
|
@@ -39,9 +41,6 @@ class RemoteServer:
|
|
|
39
41
|
"wasabi_bucket": None, # string - Wasabi: Bucket name
|
|
40
42
|
"wasabi_region": None, # string - Wasabi: Region
|
|
41
43
|
"wasabi_access_key": None, # string - Wasabi: Access Key.
|
|
42
|
-
"rackspace_username": None, # string - Rackspace: username used to login to the Rackspace Cloud Control Panel.
|
|
43
|
-
"rackspace_region": None, # string - Rackspace: Three letter code for Rackspace region. See https://support.rackspace.com/how-to/about-regions/
|
|
44
|
-
"rackspace_container": None, # string - Rackspace: The name of the container (top level directory) where files will sync.
|
|
45
44
|
"auth_status": None, # string - Either `in_setup` or `complete`
|
|
46
45
|
"auth_account_name": None, # string - Describes the authorized account
|
|
47
46
|
"one_drive_account_type": None, # string - OneDrive: Either personal or business_other account types
|
|
@@ -61,6 +60,7 @@ class RemoteServer:
|
|
|
61
60
|
"files_agent_root": None, # string - Agent local root path
|
|
62
61
|
"files_agent_api_token": None, # string - Files Agent API Token
|
|
63
62
|
"files_agent_version": None, # string - Files Agent version
|
|
63
|
+
"outbound_agent_id": None, # int64 - Route traffic to outbound on a files-agent
|
|
64
64
|
"filebase_bucket": None, # string - Filebase: Bucket name
|
|
65
65
|
"filebase_access_key": None, # string - Filebase: Access Key.
|
|
66
66
|
"cloudflare_bucket": None, # string - Cloudflare: Bucket name
|
|
@@ -88,7 +88,6 @@ class RemoteServer:
|
|
|
88
88
|
"google_cloud_storage_credentials_json": None, # string - Google Cloud Storage: JSON file that contains the private key. To generate see https://cloud.google.com/storage/docs/json_api/v1/how-tos/authorizing#APIKey
|
|
89
89
|
"google_cloud_storage_s3_compatible_secret_key": None, # string - Google Cloud Storage: S3-compatible secret key
|
|
90
90
|
"linode_secret_key": None, # string - Linode: Secret Key
|
|
91
|
-
"rackspace_api_key": None, # string - Rackspace: API key from the Rackspace Cloud Control Panel
|
|
92
91
|
"s3_compatible_secret_key": None, # string - S3-compatible: Secret Key
|
|
93
92
|
"wasabi_secret_key": None, # string - Wasabi: Secret Key
|
|
94
93
|
}
|
|
@@ -215,7 +214,6 @@ class RemoteServer:
|
|
|
215
214
|
# google_cloud_storage_credentials_json - string - Google Cloud Storage: JSON file that contains the private key. To generate see https://cloud.google.com/storage/docs/json_api/v1/how-tos/authorizing#APIKey
|
|
216
215
|
# google_cloud_storage_s3_compatible_secret_key - string - Google Cloud Storage: S3-compatible secret key
|
|
217
216
|
# linode_secret_key - string - Linode: Secret Key
|
|
218
|
-
# rackspace_api_key - string - Rackspace: API key from the Rackspace Cloud Control Panel
|
|
219
217
|
# s3_compatible_secret_key - string - S3-compatible: Secret Key
|
|
220
218
|
# wasabi_secret_key - string - Wasabi: Secret Key
|
|
221
219
|
# aws_access_key - string - AWS Access Key.
|
|
@@ -228,9 +226,11 @@ class RemoteServer:
|
|
|
228
226
|
# azure_files_storage_share_name - string - Azure Files: Storage Share name
|
|
229
227
|
# backblaze_b2_bucket - string - Backblaze B2 Cloud Storage: Bucket name
|
|
230
228
|
# backblaze_b2_s3_endpoint - string - Backblaze B2 Cloud Storage: S3 Endpoint
|
|
229
|
+
# buffer_uploads - string - If set to always, uploads to this server will be uploaded first to Files.com before being sent to the remote server. This can improve performance in certain access patterns, such as high-latency connections. It will cause data to be temporarily stored in Files.com. If set to auto, we will perform this optimization if we believe it to be a benefit in a given situation.
|
|
231
230
|
# cloudflare_access_key - string - Cloudflare: Access Key.
|
|
232
231
|
# cloudflare_bucket - string - Cloudflare: Bucket name
|
|
233
232
|
# cloudflare_endpoint - string - Cloudflare: endpoint
|
|
233
|
+
# description - string - Internal description for your reference
|
|
234
234
|
# dropbox_teams - boolean - Dropbox: If true, list Team folders in root?
|
|
235
235
|
# enable_dedicated_ips - boolean - `true` if remote server only accepts connections from dedicated IPs
|
|
236
236
|
# filebase_access_key - string - Filebase: Access Key.
|
|
@@ -238,6 +238,7 @@ class RemoteServer:
|
|
|
238
238
|
# files_agent_permission_set - string - Local permissions for files agent. read_only, write_only, or read_write
|
|
239
239
|
# files_agent_root - string - Agent local root path
|
|
240
240
|
# files_agent_version - string - Files Agent version
|
|
241
|
+
# outbound_agent_id - int64 - Route traffic to outbound on a files-agent
|
|
241
242
|
# google_cloud_storage_bucket - string - Google Cloud Storage: Bucket Name
|
|
242
243
|
# google_cloud_storage_project_id - string - Google Cloud Storage: Project ID
|
|
243
244
|
# google_cloud_storage_s3_compatible_access_key - string - Google Cloud Storage: S3-compatible Access Key.
|
|
@@ -250,9 +251,6 @@ class RemoteServer:
|
|
|
250
251
|
# one_drive_account_type - string - OneDrive: Either personal or business_other account types
|
|
251
252
|
# pin_to_site_region - boolean - If true, we will ensure that all communications with this remote server are made through the primary region of the site. This setting can also be overridden by a site-wide setting which will force it to true.
|
|
252
253
|
# port - int64 - Port for remote server. Not needed for S3.
|
|
253
|
-
# rackspace_container - string - Rackspace: The name of the container (top level directory) where files will sync.
|
|
254
|
-
# rackspace_region - string - Rackspace: Three letter code for Rackspace region. See https://support.rackspace.com/how-to/about-regions/
|
|
255
|
-
# rackspace_username - string - Rackspace: username used to login to the Rackspace Cloud Control Panel.
|
|
256
254
|
# s3_bucket - string - S3 bucket name
|
|
257
255
|
# s3_compatible_access_key - string - S3-compatible: Access Key
|
|
258
256
|
# s3_compatible_bucket - string - S3-compatible: Bucket name
|
|
@@ -379,12 +377,6 @@ class RemoteServer:
|
|
|
379
377
|
raise InvalidParameterError(
|
|
380
378
|
"Bad parameter: linode_secret_key must be an str"
|
|
381
379
|
)
|
|
382
|
-
if "rackspace_api_key" in params and not isinstance(
|
|
383
|
-
params["rackspace_api_key"], str
|
|
384
|
-
):
|
|
385
|
-
raise InvalidParameterError(
|
|
386
|
-
"Bad parameter: rackspace_api_key must be an str"
|
|
387
|
-
)
|
|
388
380
|
if "s3_compatible_secret_key" in params and not isinstance(
|
|
389
381
|
params["s3_compatible_secret_key"], str
|
|
390
382
|
):
|
|
@@ -451,6 +443,12 @@ class RemoteServer:
|
|
|
451
443
|
raise InvalidParameterError(
|
|
452
444
|
"Bad parameter: backblaze_b2_s3_endpoint must be an str"
|
|
453
445
|
)
|
|
446
|
+
if "buffer_uploads" in params and not isinstance(
|
|
447
|
+
params["buffer_uploads"], str
|
|
448
|
+
):
|
|
449
|
+
raise InvalidParameterError(
|
|
450
|
+
"Bad parameter: buffer_uploads must be an str"
|
|
451
|
+
)
|
|
454
452
|
if "cloudflare_access_key" in params and not isinstance(
|
|
455
453
|
params["cloudflare_access_key"], str
|
|
456
454
|
):
|
|
@@ -469,6 +467,12 @@ class RemoteServer:
|
|
|
469
467
|
raise InvalidParameterError(
|
|
470
468
|
"Bad parameter: cloudflare_endpoint must be an str"
|
|
471
469
|
)
|
|
470
|
+
if "description" in params and not isinstance(
|
|
471
|
+
params["description"], str
|
|
472
|
+
):
|
|
473
|
+
raise InvalidParameterError(
|
|
474
|
+
"Bad parameter: description must be an str"
|
|
475
|
+
)
|
|
472
476
|
if "filebase_access_key" in params and not isinstance(
|
|
473
477
|
params["filebase_access_key"], str
|
|
474
478
|
):
|
|
@@ -499,6 +503,12 @@ class RemoteServer:
|
|
|
499
503
|
raise InvalidParameterError(
|
|
500
504
|
"Bad parameter: files_agent_version must be an str"
|
|
501
505
|
)
|
|
506
|
+
if "outbound_agent_id" in params and not isinstance(
|
|
507
|
+
params["outbound_agent_id"], int
|
|
508
|
+
):
|
|
509
|
+
raise InvalidParameterError(
|
|
510
|
+
"Bad parameter: outbound_agent_id must be an int"
|
|
511
|
+
)
|
|
502
512
|
if "google_cloud_storage_bucket" in params and not isinstance(
|
|
503
513
|
params["google_cloud_storage_bucket"], str
|
|
504
514
|
):
|
|
@@ -558,24 +568,6 @@ class RemoteServer:
|
|
|
558
568
|
)
|
|
559
569
|
if "port" in params and not isinstance(params["port"], int):
|
|
560
570
|
raise InvalidParameterError("Bad parameter: port must be an int")
|
|
561
|
-
if "rackspace_container" in params and not isinstance(
|
|
562
|
-
params["rackspace_container"], str
|
|
563
|
-
):
|
|
564
|
-
raise InvalidParameterError(
|
|
565
|
-
"Bad parameter: rackspace_container must be an str"
|
|
566
|
-
)
|
|
567
|
-
if "rackspace_region" in params and not isinstance(
|
|
568
|
-
params["rackspace_region"], str
|
|
569
|
-
):
|
|
570
|
-
raise InvalidParameterError(
|
|
571
|
-
"Bad parameter: rackspace_region must be an str"
|
|
572
|
-
)
|
|
573
|
-
if "rackspace_username" in params and not isinstance(
|
|
574
|
-
params["rackspace_username"], str
|
|
575
|
-
):
|
|
576
|
-
raise InvalidParameterError(
|
|
577
|
-
"Bad parameter: rackspace_username must be an str"
|
|
578
|
-
)
|
|
579
571
|
if "s3_bucket" in params and not isinstance(params["s3_bucket"], str):
|
|
580
572
|
raise InvalidParameterError(
|
|
581
573
|
"Bad parameter: s3_bucket must be an str"
|
|
@@ -694,9 +686,9 @@ class RemoteServer:
|
|
|
694
686
|
# Parameters:
|
|
695
687
|
# cursor - string - Used for pagination. When a list request has more records available, cursors are provided in the response headers `X-Files-Cursor-Next` and `X-Files-Cursor-Prev`. Send one of those cursor value here to resume an existing list from the next available record. Note: many of our SDKs have iterator methods that will automatically handle cursor-based pagination.
|
|
696
688
|
# per_page - int64 - Number of records to show per page. (Max: 10,000, 1,000 or less is recommended).
|
|
697
|
-
# sort_by - object - If set, sort records by the specified field in either `asc` or `desc` direction. Valid fields are `name`, `server_type`, `backblaze_b2_bucket`, `google_cloud_storage_bucket`, `wasabi_bucket`, `s3_bucket`, `
|
|
698
|
-
# filter - object - If set, return records where the specified field is equal to the supplied value. Valid fields are `name`, `server_type`, `backblaze_b2_bucket`, `google_cloud_storage_bucket`, `wasabi_bucket`, `s3_bucket`, `
|
|
699
|
-
# filter_prefix - object - If set, return records where the specified field is prefixed by the supplied value. Valid fields are `name`, `backblaze_b2_bucket`, `google_cloud_storage_bucket`, `wasabi_bucket`, `s3_bucket`, `
|
|
689
|
+
# sort_by - object - If set, sort records by the specified field in either `asc` or `desc` direction. Valid fields are `name`, `server_type`, `backblaze_b2_bucket`, `google_cloud_storage_bucket`, `wasabi_bucket`, `s3_bucket`, `azure_blob_storage_container`, `azure_files_storage_share_name`, `s3_compatible_bucket`, `filebase_bucket`, `cloudflare_bucket` or `linode_bucket`.
|
|
690
|
+
# filter - object - If set, return records where the specified field is equal to the supplied value. Valid fields are `name`, `server_type`, `backblaze_b2_bucket`, `google_cloud_storage_bucket`, `wasabi_bucket`, `s3_bucket`, `azure_blob_storage_container`, `azure_files_storage_share_name`, `s3_compatible_bucket`, `filebase_bucket`, `cloudflare_bucket` or `linode_bucket`. Valid field combinations are `[ server_type, name ]`, `[ backblaze_b2_bucket, name ]`, `[ google_cloud_storage_bucket, name ]`, `[ wasabi_bucket, name ]`, `[ s3_bucket, name ]`, `[ azure_blob_storage_container, name ]`, `[ azure_files_storage_share_name, name ]`, `[ s3_compatible_bucket, name ]`, `[ filebase_bucket, name ]`, `[ cloudflare_bucket, name ]` or `[ linode_bucket, name ]`.
|
|
691
|
+
# filter_prefix - object - If set, return records where the specified field is prefixed by the supplied value. Valid fields are `name`, `backblaze_b2_bucket`, `google_cloud_storage_bucket`, `wasabi_bucket`, `s3_bucket`, `azure_blob_storage_container`, `azure_files_storage_share_name`, `s3_compatible_bucket`, `filebase_bucket`, `cloudflare_bucket` or `linode_bucket`. Valid field combinations are `[ backblaze_b2_bucket, name ]`, `[ google_cloud_storage_bucket, name ]`, `[ wasabi_bucket, name ]`, `[ s3_bucket, name ]`, `[ azure_blob_storage_container, name ]`, `[ azure_files_storage_share_name, name ]`, `[ s3_compatible_bucket, name ]`, `[ filebase_bucket, name ]`, `[ cloudflare_bucket, name ]` or `[ linode_bucket, name ]`.
|
|
700
692
|
def list(params=None, options=None):
|
|
701
693
|
if not isinstance(params, dict):
|
|
702
694
|
params = {}
|
|
@@ -784,7 +776,6 @@ def find_configuration_file(id, params=None, options=None):
|
|
|
784
776
|
# google_cloud_storage_credentials_json - string - Google Cloud Storage: JSON file that contains the private key. To generate see https://cloud.google.com/storage/docs/json_api/v1/how-tos/authorizing#APIKey
|
|
785
777
|
# google_cloud_storage_s3_compatible_secret_key - string - Google Cloud Storage: S3-compatible secret key
|
|
786
778
|
# linode_secret_key - string - Linode: Secret Key
|
|
787
|
-
# rackspace_api_key - string - Rackspace: API key from the Rackspace Cloud Control Panel
|
|
788
779
|
# s3_compatible_secret_key - string - S3-compatible: Secret Key
|
|
789
780
|
# wasabi_secret_key - string - Wasabi: Secret Key
|
|
790
781
|
# aws_access_key - string - AWS Access Key.
|
|
@@ -797,9 +788,11 @@ def find_configuration_file(id, params=None, options=None):
|
|
|
797
788
|
# azure_files_storage_share_name - string - Azure Files: Storage Share name
|
|
798
789
|
# backblaze_b2_bucket - string - Backblaze B2 Cloud Storage: Bucket name
|
|
799
790
|
# backblaze_b2_s3_endpoint - string - Backblaze B2 Cloud Storage: S3 Endpoint
|
|
791
|
+
# buffer_uploads - string - If set to always, uploads to this server will be uploaded first to Files.com before being sent to the remote server. This can improve performance in certain access patterns, such as high-latency connections. It will cause data to be temporarily stored in Files.com. If set to auto, we will perform this optimization if we believe it to be a benefit in a given situation.
|
|
800
792
|
# cloudflare_access_key - string - Cloudflare: Access Key.
|
|
801
793
|
# cloudflare_bucket - string - Cloudflare: Bucket name
|
|
802
794
|
# cloudflare_endpoint - string - Cloudflare: endpoint
|
|
795
|
+
# description - string - Internal description for your reference
|
|
803
796
|
# dropbox_teams - boolean - Dropbox: If true, list Team folders in root?
|
|
804
797
|
# enable_dedicated_ips - boolean - `true` if remote server only accepts connections from dedicated IPs
|
|
805
798
|
# filebase_access_key - string - Filebase: Access Key.
|
|
@@ -807,6 +800,7 @@ def find_configuration_file(id, params=None, options=None):
|
|
|
807
800
|
# files_agent_permission_set - string - Local permissions for files agent. read_only, write_only, or read_write
|
|
808
801
|
# files_agent_root - string - Agent local root path
|
|
809
802
|
# files_agent_version - string - Files Agent version
|
|
803
|
+
# outbound_agent_id - int64 - Route traffic to outbound on a files-agent
|
|
810
804
|
# google_cloud_storage_bucket - string - Google Cloud Storage: Bucket Name
|
|
811
805
|
# google_cloud_storage_project_id - string - Google Cloud Storage: Project ID
|
|
812
806
|
# google_cloud_storage_s3_compatible_access_key - string - Google Cloud Storage: S3-compatible Access Key.
|
|
@@ -819,9 +813,6 @@ def find_configuration_file(id, params=None, options=None):
|
|
|
819
813
|
# one_drive_account_type - string - OneDrive: Either personal or business_other account types
|
|
820
814
|
# pin_to_site_region - boolean - If true, we will ensure that all communications with this remote server are made through the primary region of the site. This setting can also be overridden by a site-wide setting which will force it to true.
|
|
821
815
|
# port - int64 - Port for remote server. Not needed for S3.
|
|
822
|
-
# rackspace_container - string - Rackspace: The name of the container (top level directory) where files will sync.
|
|
823
|
-
# rackspace_region - string - Rackspace: Three letter code for Rackspace region. See https://support.rackspace.com/how-to/about-regions/
|
|
824
|
-
# rackspace_username - string - Rackspace: username used to login to the Rackspace Cloud Control Panel.
|
|
825
816
|
# s3_bucket - string - S3 bucket name
|
|
826
817
|
# s3_compatible_access_key - string - S3-compatible: Access Key
|
|
827
818
|
# s3_compatible_bucket - string - S3-compatible: Bucket name
|
|
@@ -940,12 +931,6 @@ def create(params=None, options=None):
|
|
|
940
931
|
raise InvalidParameterError(
|
|
941
932
|
"Bad parameter: linode_secret_key must be an str"
|
|
942
933
|
)
|
|
943
|
-
if "rackspace_api_key" in params and not isinstance(
|
|
944
|
-
params["rackspace_api_key"], str
|
|
945
|
-
):
|
|
946
|
-
raise InvalidParameterError(
|
|
947
|
-
"Bad parameter: rackspace_api_key must be an str"
|
|
948
|
-
)
|
|
949
934
|
if "s3_compatible_secret_key" in params and not isinstance(
|
|
950
935
|
params["s3_compatible_secret_key"], str
|
|
951
936
|
):
|
|
@@ -1021,6 +1006,12 @@ def create(params=None, options=None):
|
|
|
1021
1006
|
raise InvalidParameterError(
|
|
1022
1007
|
"Bad parameter: backblaze_b2_s3_endpoint must be an str"
|
|
1023
1008
|
)
|
|
1009
|
+
if "buffer_uploads" in params and not isinstance(
|
|
1010
|
+
params["buffer_uploads"], str
|
|
1011
|
+
):
|
|
1012
|
+
raise InvalidParameterError(
|
|
1013
|
+
"Bad parameter: buffer_uploads must be an str"
|
|
1014
|
+
)
|
|
1024
1015
|
if "cloudflare_access_key" in params and not isinstance(
|
|
1025
1016
|
params["cloudflare_access_key"], str
|
|
1026
1017
|
):
|
|
@@ -1039,6 +1030,10 @@ def create(params=None, options=None):
|
|
|
1039
1030
|
raise InvalidParameterError(
|
|
1040
1031
|
"Bad parameter: cloudflare_endpoint must be an str"
|
|
1041
1032
|
)
|
|
1033
|
+
if "description" in params and not isinstance(params["description"], str):
|
|
1034
|
+
raise InvalidParameterError(
|
|
1035
|
+
"Bad parameter: description must be an str"
|
|
1036
|
+
)
|
|
1042
1037
|
if "dropbox_teams" in params and not isinstance(
|
|
1043
1038
|
params["dropbox_teams"], bool
|
|
1044
1039
|
):
|
|
@@ -1081,6 +1076,12 @@ def create(params=None, options=None):
|
|
|
1081
1076
|
raise InvalidParameterError(
|
|
1082
1077
|
"Bad parameter: files_agent_version must be an str"
|
|
1083
1078
|
)
|
|
1079
|
+
if "outbound_agent_id" in params and not isinstance(
|
|
1080
|
+
params["outbound_agent_id"], int
|
|
1081
|
+
):
|
|
1082
|
+
raise InvalidParameterError(
|
|
1083
|
+
"Bad parameter: outbound_agent_id must be an int"
|
|
1084
|
+
)
|
|
1084
1085
|
if "google_cloud_storage_bucket" in params and not isinstance(
|
|
1085
1086
|
params["google_cloud_storage_bucket"], str
|
|
1086
1087
|
):
|
|
@@ -1144,24 +1145,6 @@ def create(params=None, options=None):
|
|
|
1144
1145
|
)
|
|
1145
1146
|
if "port" in params and not isinstance(params["port"], int):
|
|
1146
1147
|
raise InvalidParameterError("Bad parameter: port must be an int")
|
|
1147
|
-
if "rackspace_container" in params and not isinstance(
|
|
1148
|
-
params["rackspace_container"], str
|
|
1149
|
-
):
|
|
1150
|
-
raise InvalidParameterError(
|
|
1151
|
-
"Bad parameter: rackspace_container must be an str"
|
|
1152
|
-
)
|
|
1153
|
-
if "rackspace_region" in params and not isinstance(
|
|
1154
|
-
params["rackspace_region"], str
|
|
1155
|
-
):
|
|
1156
|
-
raise InvalidParameterError(
|
|
1157
|
-
"Bad parameter: rackspace_region must be an str"
|
|
1158
|
-
)
|
|
1159
|
-
if "rackspace_username" in params and not isinstance(
|
|
1160
|
-
params["rackspace_username"], str
|
|
1161
|
-
):
|
|
1162
|
-
raise InvalidParameterError(
|
|
1163
|
-
"Bad parameter: rackspace_username must be an str"
|
|
1164
|
-
)
|
|
1165
1148
|
if "s3_bucket" in params and not isinstance(params["s3_bucket"], str):
|
|
1166
1149
|
raise InvalidParameterError("Bad parameter: s3_bucket must be an str")
|
|
1167
1150
|
if "s3_compatible_access_key" in params and not isinstance(
|
|
@@ -1321,7 +1304,6 @@ def configuration_file(id, params=None, options=None):
|
|
|
1321
1304
|
# google_cloud_storage_credentials_json - string - Google Cloud Storage: JSON file that contains the private key. To generate see https://cloud.google.com/storage/docs/json_api/v1/how-tos/authorizing#APIKey
|
|
1322
1305
|
# google_cloud_storage_s3_compatible_secret_key - string - Google Cloud Storage: S3-compatible secret key
|
|
1323
1306
|
# linode_secret_key - string - Linode: Secret Key
|
|
1324
|
-
# rackspace_api_key - string - Rackspace: API key from the Rackspace Cloud Control Panel
|
|
1325
1307
|
# s3_compatible_secret_key - string - S3-compatible: Secret Key
|
|
1326
1308
|
# wasabi_secret_key - string - Wasabi: Secret Key
|
|
1327
1309
|
# aws_access_key - string - AWS Access Key.
|
|
@@ -1334,9 +1316,11 @@ def configuration_file(id, params=None, options=None):
|
|
|
1334
1316
|
# azure_files_storage_share_name - string - Azure Files: Storage Share name
|
|
1335
1317
|
# backblaze_b2_bucket - string - Backblaze B2 Cloud Storage: Bucket name
|
|
1336
1318
|
# backblaze_b2_s3_endpoint - string - Backblaze B2 Cloud Storage: S3 Endpoint
|
|
1319
|
+
# buffer_uploads - string - If set to always, uploads to this server will be uploaded first to Files.com before being sent to the remote server. This can improve performance in certain access patterns, such as high-latency connections. It will cause data to be temporarily stored in Files.com. If set to auto, we will perform this optimization if we believe it to be a benefit in a given situation.
|
|
1337
1320
|
# cloudflare_access_key - string - Cloudflare: Access Key.
|
|
1338
1321
|
# cloudflare_bucket - string - Cloudflare: Bucket name
|
|
1339
1322
|
# cloudflare_endpoint - string - Cloudflare: endpoint
|
|
1323
|
+
# description - string - Internal description for your reference
|
|
1340
1324
|
# dropbox_teams - boolean - Dropbox: If true, list Team folders in root?
|
|
1341
1325
|
# enable_dedicated_ips - boolean - `true` if remote server only accepts connections from dedicated IPs
|
|
1342
1326
|
# filebase_access_key - string - Filebase: Access Key.
|
|
@@ -1344,6 +1328,7 @@ def configuration_file(id, params=None, options=None):
|
|
|
1344
1328
|
# files_agent_permission_set - string - Local permissions for files agent. read_only, write_only, or read_write
|
|
1345
1329
|
# files_agent_root - string - Agent local root path
|
|
1346
1330
|
# files_agent_version - string - Files Agent version
|
|
1331
|
+
# outbound_agent_id - int64 - Route traffic to outbound on a files-agent
|
|
1347
1332
|
# google_cloud_storage_bucket - string - Google Cloud Storage: Bucket Name
|
|
1348
1333
|
# google_cloud_storage_project_id - string - Google Cloud Storage: Project ID
|
|
1349
1334
|
# google_cloud_storage_s3_compatible_access_key - string - Google Cloud Storage: S3-compatible Access Key.
|
|
@@ -1356,9 +1341,6 @@ def configuration_file(id, params=None, options=None):
|
|
|
1356
1341
|
# one_drive_account_type - string - OneDrive: Either personal or business_other account types
|
|
1357
1342
|
# pin_to_site_region - boolean - If true, we will ensure that all communications with this remote server are made through the primary region of the site. This setting can also be overridden by a site-wide setting which will force it to true.
|
|
1358
1343
|
# port - int64 - Port for remote server. Not needed for S3.
|
|
1359
|
-
# rackspace_container - string - Rackspace: The name of the container (top level directory) where files will sync.
|
|
1360
|
-
# rackspace_region - string - Rackspace: Three letter code for Rackspace region. See https://support.rackspace.com/how-to/about-regions/
|
|
1361
|
-
# rackspace_username - string - Rackspace: username used to login to the Rackspace Cloud Control Panel.
|
|
1362
1344
|
# s3_bucket - string - S3 bucket name
|
|
1363
1345
|
# s3_compatible_access_key - string - S3-compatible: Access Key
|
|
1364
1346
|
# s3_compatible_bucket - string - S3-compatible: Bucket name
|
|
@@ -1480,12 +1462,6 @@ def update(id, params=None, options=None):
|
|
|
1480
1462
|
raise InvalidParameterError(
|
|
1481
1463
|
"Bad parameter: linode_secret_key must be an str"
|
|
1482
1464
|
)
|
|
1483
|
-
if "rackspace_api_key" in params and not isinstance(
|
|
1484
|
-
params["rackspace_api_key"], str
|
|
1485
|
-
):
|
|
1486
|
-
raise InvalidParameterError(
|
|
1487
|
-
"Bad parameter: rackspace_api_key must be an str"
|
|
1488
|
-
)
|
|
1489
1465
|
if "s3_compatible_secret_key" in params and not isinstance(
|
|
1490
1466
|
params["s3_compatible_secret_key"], str
|
|
1491
1467
|
):
|
|
@@ -1561,6 +1537,12 @@ def update(id, params=None, options=None):
|
|
|
1561
1537
|
raise InvalidParameterError(
|
|
1562
1538
|
"Bad parameter: backblaze_b2_s3_endpoint must be an str"
|
|
1563
1539
|
)
|
|
1540
|
+
if "buffer_uploads" in params and not isinstance(
|
|
1541
|
+
params["buffer_uploads"], str
|
|
1542
|
+
):
|
|
1543
|
+
raise InvalidParameterError(
|
|
1544
|
+
"Bad parameter: buffer_uploads must be an str"
|
|
1545
|
+
)
|
|
1564
1546
|
if "cloudflare_access_key" in params and not isinstance(
|
|
1565
1547
|
params["cloudflare_access_key"], str
|
|
1566
1548
|
):
|
|
@@ -1579,6 +1561,10 @@ def update(id, params=None, options=None):
|
|
|
1579
1561
|
raise InvalidParameterError(
|
|
1580
1562
|
"Bad parameter: cloudflare_endpoint must be an str"
|
|
1581
1563
|
)
|
|
1564
|
+
if "description" in params and not isinstance(params["description"], str):
|
|
1565
|
+
raise InvalidParameterError(
|
|
1566
|
+
"Bad parameter: description must be an str"
|
|
1567
|
+
)
|
|
1582
1568
|
if "dropbox_teams" in params and not isinstance(
|
|
1583
1569
|
params["dropbox_teams"], bool
|
|
1584
1570
|
):
|
|
@@ -1621,6 +1607,12 @@ def update(id, params=None, options=None):
|
|
|
1621
1607
|
raise InvalidParameterError(
|
|
1622
1608
|
"Bad parameter: files_agent_version must be an str"
|
|
1623
1609
|
)
|
|
1610
|
+
if "outbound_agent_id" in params and not isinstance(
|
|
1611
|
+
params["outbound_agent_id"], int
|
|
1612
|
+
):
|
|
1613
|
+
raise InvalidParameterError(
|
|
1614
|
+
"Bad parameter: outbound_agent_id must be an int"
|
|
1615
|
+
)
|
|
1624
1616
|
if "google_cloud_storage_bucket" in params and not isinstance(
|
|
1625
1617
|
params["google_cloud_storage_bucket"], str
|
|
1626
1618
|
):
|
|
@@ -1684,24 +1676,6 @@ def update(id, params=None, options=None):
|
|
|
1684
1676
|
)
|
|
1685
1677
|
if "port" in params and not isinstance(params["port"], int):
|
|
1686
1678
|
raise InvalidParameterError("Bad parameter: port must be an int")
|
|
1687
|
-
if "rackspace_container" in params and not isinstance(
|
|
1688
|
-
params["rackspace_container"], str
|
|
1689
|
-
):
|
|
1690
|
-
raise InvalidParameterError(
|
|
1691
|
-
"Bad parameter: rackspace_container must be an str"
|
|
1692
|
-
)
|
|
1693
|
-
if "rackspace_region" in params and not isinstance(
|
|
1694
|
-
params["rackspace_region"], str
|
|
1695
|
-
):
|
|
1696
|
-
raise InvalidParameterError(
|
|
1697
|
-
"Bad parameter: rackspace_region must be an str"
|
|
1698
|
-
)
|
|
1699
|
-
if "rackspace_username" in params and not isinstance(
|
|
1700
|
-
params["rackspace_username"], str
|
|
1701
|
-
):
|
|
1702
|
-
raise InvalidParameterError(
|
|
1703
|
-
"Bad parameter: rackspace_username must be an str"
|
|
1704
|
-
)
|
|
1705
1679
|
if "s3_bucket" in params and not isinstance(params["s3_bucket"], str):
|
|
1706
1680
|
raise InvalidParameterError("Bad parameter: s3_bucket must be an str")
|
|
1707
1681
|
if "s3_compatible_access_key" in params and not isinstance(
|
|
@@ -36,6 +36,7 @@ class RemoteServerConfigurationFile:
|
|
|
36
36
|
# * 10 requests/minute: '10-M'
|
|
37
37
|
# * 1000 requests/hour: '1000-H'
|
|
38
38
|
# * 2000 requests/day: '2000-D'
|
|
39
|
+
"auto_update_policy": None, # string - Auto update policy ['manual_trigger', 'critical_only', 'always', 'never'] (default critical_only)
|
|
39
40
|
"api_token": None, # string - Files Agent API Token
|
|
40
41
|
"port": None, # int64 - Incoming port for files agent connections
|
|
41
42
|
"hostname": None, # string
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import builtins # noqa: F401
|
|
2
|
+
from files_sdk.api import Api # noqa: F401
|
|
3
|
+
from files_sdk.list_obj import ListObj
|
|
4
|
+
from files_sdk.error import ( # noqa: F401
|
|
5
|
+
InvalidParameterError,
|
|
6
|
+
MissingParameterError,
|
|
7
|
+
NotImplementedError,
|
|
8
|
+
)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ScimLog:
|
|
12
|
+
default_attributes = {
|
|
13
|
+
"id": None, # int64 - The unique ID of this SCIM request.
|
|
14
|
+
"created_at": None, # string - The date and time when this SCIM request occurred.
|
|
15
|
+
"request_path": None, # string - The path portion of the URL requested.
|
|
16
|
+
"request_method": None, # string - The HTTP method used for this request.
|
|
17
|
+
"http_response_code": None, # string - The HTTP response code returned for this request.
|
|
18
|
+
"user_agent": None, # string - The User-Agent header sent with the request.
|
|
19
|
+
"request_json": None, # string - The JSON payload sent with the request.
|
|
20
|
+
"response_json": None, # string - The JSON payload returned in the response.
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
def __init__(self, attributes=None, options=None):
|
|
24
|
+
if not isinstance(attributes, dict):
|
|
25
|
+
attributes = {}
|
|
26
|
+
if not isinstance(options, dict):
|
|
27
|
+
options = {}
|
|
28
|
+
self.set_attributes(attributes)
|
|
29
|
+
self.options = options
|
|
30
|
+
|
|
31
|
+
def set_attributes(self, attributes):
|
|
32
|
+
for attribute, default_value in ScimLog.default_attributes.items():
|
|
33
|
+
setattr(self, attribute, attributes.get(attribute, default_value))
|
|
34
|
+
|
|
35
|
+
def get_attributes(self):
|
|
36
|
+
return {
|
|
37
|
+
k: getattr(self, k, None)
|
|
38
|
+
for k in ScimLog.default_attributes
|
|
39
|
+
if getattr(self, k, None) is not None
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
# Parameters:
|
|
44
|
+
# cursor - string - Used for pagination. When a list request has more records available, cursors are provided in the response headers `X-Files-Cursor-Next` and `X-Files-Cursor-Prev`. Send one of those cursor value here to resume an existing list from the next available record. Note: many of our SDKs have iterator methods that will automatically handle cursor-based pagination.
|
|
45
|
+
# per_page - int64 - Number of records to show per page. (Max: 10,000, 1,000 or less is recommended).
|
|
46
|
+
# sort_by - object - If set, sort records by the specified field in either `asc` or `desc` direction. Valid fields are `created_at`.
|
|
47
|
+
def list(params=None, options=None):
|
|
48
|
+
if not isinstance(params, dict):
|
|
49
|
+
params = {}
|
|
50
|
+
if not isinstance(options, dict):
|
|
51
|
+
options = {}
|
|
52
|
+
if "cursor" in params and not isinstance(params["cursor"], str):
|
|
53
|
+
raise InvalidParameterError("Bad parameter: cursor must be an str")
|
|
54
|
+
if "per_page" in params and not isinstance(params["per_page"], int):
|
|
55
|
+
raise InvalidParameterError("Bad parameter: per_page must be an int")
|
|
56
|
+
if "sort_by" in params and not isinstance(params["sort_by"], dict):
|
|
57
|
+
raise InvalidParameterError("Bad parameter: sort_by must be an dict")
|
|
58
|
+
return ListObj(ScimLog, "GET", "/scim_logs", params, options)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def all(params=None, options=None):
|
|
62
|
+
list(params, options)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
# Parameters:
|
|
66
|
+
# id (required) - int64 - Scim Log ID.
|
|
67
|
+
def find(id, params=None, options=None):
|
|
68
|
+
if not isinstance(params, dict):
|
|
69
|
+
params = {}
|
|
70
|
+
if not isinstance(options, dict):
|
|
71
|
+
options = {}
|
|
72
|
+
params["id"] = id
|
|
73
|
+
if "id" in params and not isinstance(params["id"], int):
|
|
74
|
+
raise InvalidParameterError("Bad parameter: id must be an int")
|
|
75
|
+
if "id" not in params:
|
|
76
|
+
raise MissingParameterError("Parameter missing: id")
|
|
77
|
+
response, options = Api.send_request(
|
|
78
|
+
"GET", "/scim_logs/{id}".format(id=params["id"]), params, options
|
|
79
|
+
)
|
|
80
|
+
return ScimLog(response.data, options)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def get(id, params=None, options=None):
|
|
84
|
+
find(id, params, options)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def new(*args, **kwargs):
|
|
88
|
+
return ScimLog(*args, **kwargs)
|