pulumi-gcp 8.28.0a1745598508__py3-none-any.whl → 8.29.0a1746076904__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pulumi_gcp/__init__.py +88 -0
- pulumi_gcp/accesscontextmanager/_inputs.py +399 -0
- pulumi_gcp/accesscontextmanager/gcp_user_access_binding.py +54 -0
- pulumi_gcp/accesscontextmanager/outputs.py +369 -0
- pulumi_gcp/alloydb/_inputs.py +47 -1
- pulumi_gcp/alloydb/outputs.py +61 -3
- pulumi_gcp/apigee/keystores_aliases_self_signed_cert.py +8 -2
- pulumi_gcp/applicationintegration/client.py +34 -12
- pulumi_gcp/artifactregistry/outputs.py +2 -2
- pulumi_gcp/certificateauthority/_inputs.py +32 -0
- pulumi_gcp/certificateauthority/ca_pool.py +2 -0
- pulumi_gcp/certificateauthority/outputs.py +22 -0
- pulumi_gcp/clouddeploy/_inputs.py +0 -6
- pulumi_gcp/clouddeploy/delivery_pipeline.py +84 -77
- pulumi_gcp/clouddeploy/outputs.py +0 -4
- pulumi_gcp/cloudrunv2/_inputs.py +3 -3
- pulumi_gcp/cloudrunv2/outputs.py +4 -4
- pulumi_gcp/colab/runtime_template.py +3 -3
- pulumi_gcp/compute/__init__.py +7 -0
- pulumi_gcp/compute/_inputs.py +809 -1
- pulumi_gcp/compute/backend_service.py +89 -0
- pulumi_gcp/compute/firewall_policy_with_rules.py +2 -2
- pulumi_gcp/compute/get_backend_service.py +12 -1
- pulumi_gcp/compute/get_storage_pool_iam_policy.py +183 -0
- pulumi_gcp/compute/get_storage_pool_types.py +268 -0
- pulumi_gcp/compute/outputs.py +743 -3
- pulumi_gcp/compute/resource_policy_attachment.py +476 -0
- pulumi_gcp/compute/storage_pool.py +1045 -0
- pulumi_gcp/compute/storage_pool_iam_binding.py +1088 -0
- pulumi_gcp/compute/storage_pool_iam_member.py +1088 -0
- pulumi_gcp/compute/storage_pool_iam_policy.py +907 -0
- pulumi_gcp/config/__init__.pyi +2 -0
- pulumi_gcp/config/vars.py +4 -0
- pulumi_gcp/container/_inputs.py +63 -3
- pulumi_gcp/container/outputs.py +70 -6
- pulumi_gcp/dataproc/get_metastore_service.py +12 -1
- pulumi_gcp/dataproc/metastore_service.py +61 -0
- pulumi_gcp/datastream/connection_profile.py +40 -0
- pulumi_gcp/discoveryengine/_inputs.py +38 -0
- pulumi_gcp/discoveryengine/chat_engine.py +6 -4
- pulumi_gcp/discoveryengine/outputs.py +26 -0
- pulumi_gcp/firebaserules/_inputs.py +6 -6
- pulumi_gcp/firebaserules/outputs.py +4 -4
- pulumi_gcp/gkebackup/__init__.py +2 -0
- pulumi_gcp/gkebackup/backup_channel.py +737 -0
- pulumi_gcp/gkebackup/restore_channel.py +737 -0
- pulumi_gcp/gkehub/_inputs.py +208 -0
- pulumi_gcp/gkehub/outputs.py +171 -1
- pulumi_gcp/healthcare/pipeline_job.py +2 -2
- pulumi_gcp/iap/__init__.py +4 -0
- pulumi_gcp/iap/_inputs.py +130 -0
- pulumi_gcp/iap/get_web_cloud_run_service_iam_policy.py +183 -0
- pulumi_gcp/iap/outputs.py +76 -0
- pulumi_gcp/iap/web_cloud_run_service_iam_binding.py +1089 -0
- pulumi_gcp/iap/web_cloud_run_service_iam_member.py +1089 -0
- pulumi_gcp/iap/web_cloud_run_service_iam_policy.py +908 -0
- pulumi_gcp/kms/key_ring_import_job.py +7 -7
- pulumi_gcp/managedkafka/connect_cluster.py +8 -4
- pulumi_gcp/managedkafka/connector.py +8 -4
- pulumi_gcp/monitoring/uptime_check_config.py +49 -0
- pulumi_gcp/networkconnectivity/internal_range.py +82 -0
- pulumi_gcp/projects/_inputs.py +0 -6
- pulumi_gcp/projects/api_key.py +28 -0
- pulumi_gcp/projects/outputs.py +0 -4
- pulumi_gcp/provider.py +20 -0
- pulumi_gcp/pulumi-plugin.json +1 -1
- pulumi_gcp/redis/get_instance.py +12 -1
- pulumi_gcp/redis/instance.py +61 -0
- pulumi_gcp/sql/_inputs.py +86 -6
- pulumi_gcp/sql/outputs.py +156 -12
- pulumi_gcp/storage/__init__.py +1 -0
- pulumi_gcp/storage/_inputs.py +457 -0
- pulumi_gcp/storage/batch_operations_job.py +776 -0
- pulumi_gcp/storage/outputs.py +403 -0
- {pulumi_gcp-8.28.0a1745598508.dist-info → pulumi_gcp-8.29.0a1746076904.dist-info}/METADATA +1 -1
- {pulumi_gcp-8.28.0a1745598508.dist-info → pulumi_gcp-8.29.0a1746076904.dist-info}/RECORD +78 -64
- {pulumi_gcp-8.28.0a1745598508.dist-info → pulumi_gcp-8.29.0a1746076904.dist-info}/WHEEL +1 -1
- {pulumi_gcp-8.28.0a1745598508.dist-info → pulumi_gcp-8.29.0a1746076904.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,776 @@
|
|
1
|
+
# coding=utf-8
|
2
|
+
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
|
3
|
+
# *** Do not edit by hand unless you're certain you know what you are doing! ***
|
4
|
+
|
5
|
+
import builtins
|
6
|
+
import copy
|
7
|
+
import warnings
|
8
|
+
import sys
|
9
|
+
import pulumi
|
10
|
+
import pulumi.runtime
|
11
|
+
from typing import Any, Mapping, Optional, Sequence, Union, overload
|
12
|
+
if sys.version_info >= (3, 11):
|
13
|
+
from typing import NotRequired, TypedDict, TypeAlias
|
14
|
+
else:
|
15
|
+
from typing_extensions import NotRequired, TypedDict, TypeAlias
|
16
|
+
from .. import _utilities
|
17
|
+
from . import outputs
|
18
|
+
from ._inputs import *
|
19
|
+
|
20
|
+
__all__ = ['BatchOperationsJobArgs', 'BatchOperationsJob']
|
21
|
+
|
22
|
+
@pulumi.input_type
|
23
|
+
class BatchOperationsJobArgs:
|
24
|
+
def __init__(__self__, *,
|
25
|
+
bucket_list: Optional[pulumi.Input['BatchOperationsJobBucketListArgs']] = None,
|
26
|
+
delete_object: Optional[pulumi.Input['BatchOperationsJobDeleteObjectArgs']] = None,
|
27
|
+
delete_protection: Optional[pulumi.Input[builtins.bool]] = None,
|
28
|
+
job_id: Optional[pulumi.Input[builtins.str]] = None,
|
29
|
+
project: Optional[pulumi.Input[builtins.str]] = None,
|
30
|
+
put_metadata: Optional[pulumi.Input['BatchOperationsJobPutMetadataArgs']] = None,
|
31
|
+
put_object_hold: Optional[pulumi.Input['BatchOperationsJobPutObjectHoldArgs']] = None,
|
32
|
+
rewrite_object: Optional[pulumi.Input['BatchOperationsJobRewriteObjectArgs']] = None):
|
33
|
+
"""
|
34
|
+
The set of arguments for constructing a BatchOperationsJob resource.
|
35
|
+
:param pulumi.Input['BatchOperationsJobBucketListArgs'] bucket_list: List of buckets and their objects to be transformed. Currently, only one bucket configuration is supported. If multiple buckets are specified, an error will be returned
|
36
|
+
Structure is documented below.
|
37
|
+
:param pulumi.Input['BatchOperationsJobDeleteObjectArgs'] delete_object: allows batch operations to delete objects in bucket
|
38
|
+
Structure is documented below.
|
39
|
+
:param pulumi.Input[builtins.bool] delete_protection: If set to `true`, the storage batch operation job will not be deleted and new job will be created.
|
40
|
+
:param pulumi.Input[builtins.str] job_id: The ID of the job.
|
41
|
+
:param pulumi.Input[builtins.str] project: The ID of the project in which the resource belongs.
|
42
|
+
If it is not provided, the provider project is used.
|
43
|
+
:param pulumi.Input['BatchOperationsJobPutMetadataArgs'] put_metadata: allows batch operations to update metadata for objects in bucket
|
44
|
+
Structure is documented below.
|
45
|
+
:param pulumi.Input['BatchOperationsJobPutObjectHoldArgs'] put_object_hold: allows to update temporary hold or eventBased hold for objects in bucket.
|
46
|
+
Structure is documented below.
|
47
|
+
:param pulumi.Input['BatchOperationsJobRewriteObjectArgs'] rewrite_object: allows to update encryption key for objects in bucket.
|
48
|
+
Structure is documented below.
|
49
|
+
"""
|
50
|
+
if bucket_list is not None:
|
51
|
+
pulumi.set(__self__, "bucket_list", bucket_list)
|
52
|
+
if delete_object is not None:
|
53
|
+
pulumi.set(__self__, "delete_object", delete_object)
|
54
|
+
if delete_protection is not None:
|
55
|
+
pulumi.set(__self__, "delete_protection", delete_protection)
|
56
|
+
if job_id is not None:
|
57
|
+
pulumi.set(__self__, "job_id", job_id)
|
58
|
+
if project is not None:
|
59
|
+
pulumi.set(__self__, "project", project)
|
60
|
+
if put_metadata is not None:
|
61
|
+
pulumi.set(__self__, "put_metadata", put_metadata)
|
62
|
+
if put_object_hold is not None:
|
63
|
+
pulumi.set(__self__, "put_object_hold", put_object_hold)
|
64
|
+
if rewrite_object is not None:
|
65
|
+
pulumi.set(__self__, "rewrite_object", rewrite_object)
|
66
|
+
|
67
|
+
@property
|
68
|
+
@pulumi.getter(name="bucketList")
|
69
|
+
def bucket_list(self) -> Optional[pulumi.Input['BatchOperationsJobBucketListArgs']]:
|
70
|
+
"""
|
71
|
+
List of buckets and their objects to be transformed. Currently, only one bucket configuration is supported. If multiple buckets are specified, an error will be returned
|
72
|
+
Structure is documented below.
|
73
|
+
"""
|
74
|
+
return pulumi.get(self, "bucket_list")
|
75
|
+
|
76
|
+
@bucket_list.setter
|
77
|
+
def bucket_list(self, value: Optional[pulumi.Input['BatchOperationsJobBucketListArgs']]):
|
78
|
+
pulumi.set(self, "bucket_list", value)
|
79
|
+
|
80
|
+
@property
|
81
|
+
@pulumi.getter(name="deleteObject")
|
82
|
+
def delete_object(self) -> Optional[pulumi.Input['BatchOperationsJobDeleteObjectArgs']]:
|
83
|
+
"""
|
84
|
+
allows batch operations to delete objects in bucket
|
85
|
+
Structure is documented below.
|
86
|
+
"""
|
87
|
+
return pulumi.get(self, "delete_object")
|
88
|
+
|
89
|
+
@delete_object.setter
|
90
|
+
def delete_object(self, value: Optional[pulumi.Input['BatchOperationsJobDeleteObjectArgs']]):
|
91
|
+
pulumi.set(self, "delete_object", value)
|
92
|
+
|
93
|
+
@property
|
94
|
+
@pulumi.getter(name="deleteProtection")
|
95
|
+
def delete_protection(self) -> Optional[pulumi.Input[builtins.bool]]:
|
96
|
+
"""
|
97
|
+
If set to `true`, the storage batch operation job will not be deleted and new job will be created.
|
98
|
+
"""
|
99
|
+
return pulumi.get(self, "delete_protection")
|
100
|
+
|
101
|
+
@delete_protection.setter
|
102
|
+
def delete_protection(self, value: Optional[pulumi.Input[builtins.bool]]):
|
103
|
+
pulumi.set(self, "delete_protection", value)
|
104
|
+
|
105
|
+
@property
|
106
|
+
@pulumi.getter(name="jobId")
|
107
|
+
def job_id(self) -> Optional[pulumi.Input[builtins.str]]:
|
108
|
+
"""
|
109
|
+
The ID of the job.
|
110
|
+
"""
|
111
|
+
return pulumi.get(self, "job_id")
|
112
|
+
|
113
|
+
@job_id.setter
|
114
|
+
def job_id(self, value: Optional[pulumi.Input[builtins.str]]):
|
115
|
+
pulumi.set(self, "job_id", value)
|
116
|
+
|
117
|
+
@property
|
118
|
+
@pulumi.getter
|
119
|
+
def project(self) -> Optional[pulumi.Input[builtins.str]]:
|
120
|
+
"""
|
121
|
+
The ID of the project in which the resource belongs.
|
122
|
+
If it is not provided, the provider project is used.
|
123
|
+
"""
|
124
|
+
return pulumi.get(self, "project")
|
125
|
+
|
126
|
+
@project.setter
|
127
|
+
def project(self, value: Optional[pulumi.Input[builtins.str]]):
|
128
|
+
pulumi.set(self, "project", value)
|
129
|
+
|
130
|
+
@property
|
131
|
+
@pulumi.getter(name="putMetadata")
|
132
|
+
def put_metadata(self) -> Optional[pulumi.Input['BatchOperationsJobPutMetadataArgs']]:
|
133
|
+
"""
|
134
|
+
allows batch operations to update metadata for objects in bucket
|
135
|
+
Structure is documented below.
|
136
|
+
"""
|
137
|
+
return pulumi.get(self, "put_metadata")
|
138
|
+
|
139
|
+
@put_metadata.setter
|
140
|
+
def put_metadata(self, value: Optional[pulumi.Input['BatchOperationsJobPutMetadataArgs']]):
|
141
|
+
pulumi.set(self, "put_metadata", value)
|
142
|
+
|
143
|
+
@property
|
144
|
+
@pulumi.getter(name="putObjectHold")
|
145
|
+
def put_object_hold(self) -> Optional[pulumi.Input['BatchOperationsJobPutObjectHoldArgs']]:
|
146
|
+
"""
|
147
|
+
allows to update temporary hold or eventBased hold for objects in bucket.
|
148
|
+
Structure is documented below.
|
149
|
+
"""
|
150
|
+
return pulumi.get(self, "put_object_hold")
|
151
|
+
|
152
|
+
@put_object_hold.setter
|
153
|
+
def put_object_hold(self, value: Optional[pulumi.Input['BatchOperationsJobPutObjectHoldArgs']]):
|
154
|
+
pulumi.set(self, "put_object_hold", value)
|
155
|
+
|
156
|
+
@property
|
157
|
+
@pulumi.getter(name="rewriteObject")
|
158
|
+
def rewrite_object(self) -> Optional[pulumi.Input['BatchOperationsJobRewriteObjectArgs']]:
|
159
|
+
"""
|
160
|
+
allows to update encryption key for objects in bucket.
|
161
|
+
Structure is documented below.
|
162
|
+
"""
|
163
|
+
return pulumi.get(self, "rewrite_object")
|
164
|
+
|
165
|
+
@rewrite_object.setter
|
166
|
+
def rewrite_object(self, value: Optional[pulumi.Input['BatchOperationsJobRewriteObjectArgs']]):
|
167
|
+
pulumi.set(self, "rewrite_object", value)
|
168
|
+
|
169
|
+
|
170
|
+
@pulumi.input_type
|
171
|
+
class _BatchOperationsJobState:
|
172
|
+
def __init__(__self__, *,
|
173
|
+
bucket_list: Optional[pulumi.Input['BatchOperationsJobBucketListArgs']] = None,
|
174
|
+
complete_time: Optional[pulumi.Input[builtins.str]] = None,
|
175
|
+
create_time: Optional[pulumi.Input[builtins.str]] = None,
|
176
|
+
delete_object: Optional[pulumi.Input['BatchOperationsJobDeleteObjectArgs']] = None,
|
177
|
+
delete_protection: Optional[pulumi.Input[builtins.bool]] = None,
|
178
|
+
job_id: Optional[pulumi.Input[builtins.str]] = None,
|
179
|
+
project: Optional[pulumi.Input[builtins.str]] = None,
|
180
|
+
put_metadata: Optional[pulumi.Input['BatchOperationsJobPutMetadataArgs']] = None,
|
181
|
+
put_object_hold: Optional[pulumi.Input['BatchOperationsJobPutObjectHoldArgs']] = None,
|
182
|
+
rewrite_object: Optional[pulumi.Input['BatchOperationsJobRewriteObjectArgs']] = None,
|
183
|
+
schedule_time: Optional[pulumi.Input[builtins.str]] = None,
|
184
|
+
state: Optional[pulumi.Input[builtins.str]] = None,
|
185
|
+
update_time: Optional[pulumi.Input[builtins.str]] = None):
|
186
|
+
"""
|
187
|
+
Input properties used for looking up and filtering BatchOperationsJob resources.
|
188
|
+
:param pulumi.Input['BatchOperationsJobBucketListArgs'] bucket_list: List of buckets and their objects to be transformed. Currently, only one bucket configuration is supported. If multiple buckets are specified, an error will be returned
|
189
|
+
Structure is documented below.
|
190
|
+
:param pulumi.Input[builtins.str] complete_time: The time that the job was completed.
|
191
|
+
:param pulumi.Input[builtins.str] create_time: The timestamp at which this storage batch operation was created.
|
192
|
+
:param pulumi.Input['BatchOperationsJobDeleteObjectArgs'] delete_object: allows batch operations to delete objects in bucket
|
193
|
+
Structure is documented below.
|
194
|
+
:param pulumi.Input[builtins.bool] delete_protection: If set to `true`, the storage batch operation job will not be deleted and new job will be created.
|
195
|
+
:param pulumi.Input[builtins.str] job_id: The ID of the job.
|
196
|
+
:param pulumi.Input[builtins.str] project: The ID of the project in which the resource belongs.
|
197
|
+
If it is not provided, the provider project is used.
|
198
|
+
:param pulumi.Input['BatchOperationsJobPutMetadataArgs'] put_metadata: allows batch operations to update metadata for objects in bucket
|
199
|
+
Structure is documented below.
|
200
|
+
:param pulumi.Input['BatchOperationsJobPutObjectHoldArgs'] put_object_hold: allows to update temporary hold or eventBased hold for objects in bucket.
|
201
|
+
Structure is documented below.
|
202
|
+
:param pulumi.Input['BatchOperationsJobRewriteObjectArgs'] rewrite_object: allows to update encryption key for objects in bucket.
|
203
|
+
Structure is documented below.
|
204
|
+
:param pulumi.Input[builtins.str] schedule_time: The time that the job was scheduled.
|
205
|
+
:param pulumi.Input[builtins.str] state: State of the job.
|
206
|
+
:param pulumi.Input[builtins.str] update_time: The timestamp at which this storage batch operation was most recently updated.
|
207
|
+
"""
|
208
|
+
if bucket_list is not None:
|
209
|
+
pulumi.set(__self__, "bucket_list", bucket_list)
|
210
|
+
if complete_time is not None:
|
211
|
+
pulumi.set(__self__, "complete_time", complete_time)
|
212
|
+
if create_time is not None:
|
213
|
+
pulumi.set(__self__, "create_time", create_time)
|
214
|
+
if delete_object is not None:
|
215
|
+
pulumi.set(__self__, "delete_object", delete_object)
|
216
|
+
if delete_protection is not None:
|
217
|
+
pulumi.set(__self__, "delete_protection", delete_protection)
|
218
|
+
if job_id is not None:
|
219
|
+
pulumi.set(__self__, "job_id", job_id)
|
220
|
+
if project is not None:
|
221
|
+
pulumi.set(__self__, "project", project)
|
222
|
+
if put_metadata is not None:
|
223
|
+
pulumi.set(__self__, "put_metadata", put_metadata)
|
224
|
+
if put_object_hold is not None:
|
225
|
+
pulumi.set(__self__, "put_object_hold", put_object_hold)
|
226
|
+
if rewrite_object is not None:
|
227
|
+
pulumi.set(__self__, "rewrite_object", rewrite_object)
|
228
|
+
if schedule_time is not None:
|
229
|
+
pulumi.set(__self__, "schedule_time", schedule_time)
|
230
|
+
if state is not None:
|
231
|
+
pulumi.set(__self__, "state", state)
|
232
|
+
if update_time is not None:
|
233
|
+
pulumi.set(__self__, "update_time", update_time)
|
234
|
+
|
235
|
+
@property
|
236
|
+
@pulumi.getter(name="bucketList")
|
237
|
+
def bucket_list(self) -> Optional[pulumi.Input['BatchOperationsJobBucketListArgs']]:
|
238
|
+
"""
|
239
|
+
List of buckets and their objects to be transformed. Currently, only one bucket configuration is supported. If multiple buckets are specified, an error will be returned
|
240
|
+
Structure is documented below.
|
241
|
+
"""
|
242
|
+
return pulumi.get(self, "bucket_list")
|
243
|
+
|
244
|
+
@bucket_list.setter
|
245
|
+
def bucket_list(self, value: Optional[pulumi.Input['BatchOperationsJobBucketListArgs']]):
|
246
|
+
pulumi.set(self, "bucket_list", value)
|
247
|
+
|
248
|
+
@property
|
249
|
+
@pulumi.getter(name="completeTime")
|
250
|
+
def complete_time(self) -> Optional[pulumi.Input[builtins.str]]:
|
251
|
+
"""
|
252
|
+
The time that the job was completed.
|
253
|
+
"""
|
254
|
+
return pulumi.get(self, "complete_time")
|
255
|
+
|
256
|
+
@complete_time.setter
|
257
|
+
def complete_time(self, value: Optional[pulumi.Input[builtins.str]]):
|
258
|
+
pulumi.set(self, "complete_time", value)
|
259
|
+
|
260
|
+
@property
|
261
|
+
@pulumi.getter(name="createTime")
|
262
|
+
def create_time(self) -> Optional[pulumi.Input[builtins.str]]:
|
263
|
+
"""
|
264
|
+
The timestamp at which this storage batch operation was created.
|
265
|
+
"""
|
266
|
+
return pulumi.get(self, "create_time")
|
267
|
+
|
268
|
+
@create_time.setter
|
269
|
+
def create_time(self, value: Optional[pulumi.Input[builtins.str]]):
|
270
|
+
pulumi.set(self, "create_time", value)
|
271
|
+
|
272
|
+
@property
|
273
|
+
@pulumi.getter(name="deleteObject")
|
274
|
+
def delete_object(self) -> Optional[pulumi.Input['BatchOperationsJobDeleteObjectArgs']]:
|
275
|
+
"""
|
276
|
+
allows batch operations to delete objects in bucket
|
277
|
+
Structure is documented below.
|
278
|
+
"""
|
279
|
+
return pulumi.get(self, "delete_object")
|
280
|
+
|
281
|
+
@delete_object.setter
|
282
|
+
def delete_object(self, value: Optional[pulumi.Input['BatchOperationsJobDeleteObjectArgs']]):
|
283
|
+
pulumi.set(self, "delete_object", value)
|
284
|
+
|
285
|
+
@property
|
286
|
+
@pulumi.getter(name="deleteProtection")
|
287
|
+
def delete_protection(self) -> Optional[pulumi.Input[builtins.bool]]:
|
288
|
+
"""
|
289
|
+
If set to `true`, the storage batch operation job will not be deleted and new job will be created.
|
290
|
+
"""
|
291
|
+
return pulumi.get(self, "delete_protection")
|
292
|
+
|
293
|
+
@delete_protection.setter
|
294
|
+
def delete_protection(self, value: Optional[pulumi.Input[builtins.bool]]):
|
295
|
+
pulumi.set(self, "delete_protection", value)
|
296
|
+
|
297
|
+
@property
|
298
|
+
@pulumi.getter(name="jobId")
|
299
|
+
def job_id(self) -> Optional[pulumi.Input[builtins.str]]:
|
300
|
+
"""
|
301
|
+
The ID of the job.
|
302
|
+
"""
|
303
|
+
return pulumi.get(self, "job_id")
|
304
|
+
|
305
|
+
@job_id.setter
|
306
|
+
def job_id(self, value: Optional[pulumi.Input[builtins.str]]):
|
307
|
+
pulumi.set(self, "job_id", value)
|
308
|
+
|
309
|
+
@property
|
310
|
+
@pulumi.getter
|
311
|
+
def project(self) -> Optional[pulumi.Input[builtins.str]]:
|
312
|
+
"""
|
313
|
+
The ID of the project in which the resource belongs.
|
314
|
+
If it is not provided, the provider project is used.
|
315
|
+
"""
|
316
|
+
return pulumi.get(self, "project")
|
317
|
+
|
318
|
+
@project.setter
|
319
|
+
def project(self, value: Optional[pulumi.Input[builtins.str]]):
|
320
|
+
pulumi.set(self, "project", value)
|
321
|
+
|
322
|
+
@property
|
323
|
+
@pulumi.getter(name="putMetadata")
|
324
|
+
def put_metadata(self) -> Optional[pulumi.Input['BatchOperationsJobPutMetadataArgs']]:
|
325
|
+
"""
|
326
|
+
allows batch operations to update metadata for objects in bucket
|
327
|
+
Structure is documented below.
|
328
|
+
"""
|
329
|
+
return pulumi.get(self, "put_metadata")
|
330
|
+
|
331
|
+
@put_metadata.setter
|
332
|
+
def put_metadata(self, value: Optional[pulumi.Input['BatchOperationsJobPutMetadataArgs']]):
|
333
|
+
pulumi.set(self, "put_metadata", value)
|
334
|
+
|
335
|
+
@property
|
336
|
+
@pulumi.getter(name="putObjectHold")
|
337
|
+
def put_object_hold(self) -> Optional[pulumi.Input['BatchOperationsJobPutObjectHoldArgs']]:
|
338
|
+
"""
|
339
|
+
allows to update temporary hold or eventBased hold for objects in bucket.
|
340
|
+
Structure is documented below.
|
341
|
+
"""
|
342
|
+
return pulumi.get(self, "put_object_hold")
|
343
|
+
|
344
|
+
@put_object_hold.setter
|
345
|
+
def put_object_hold(self, value: Optional[pulumi.Input['BatchOperationsJobPutObjectHoldArgs']]):
|
346
|
+
pulumi.set(self, "put_object_hold", value)
|
347
|
+
|
348
|
+
@property
|
349
|
+
@pulumi.getter(name="rewriteObject")
|
350
|
+
def rewrite_object(self) -> Optional[pulumi.Input['BatchOperationsJobRewriteObjectArgs']]:
|
351
|
+
"""
|
352
|
+
allows to update encryption key for objects in bucket.
|
353
|
+
Structure is documented below.
|
354
|
+
"""
|
355
|
+
return pulumi.get(self, "rewrite_object")
|
356
|
+
|
357
|
+
@rewrite_object.setter
|
358
|
+
def rewrite_object(self, value: Optional[pulumi.Input['BatchOperationsJobRewriteObjectArgs']]):
|
359
|
+
pulumi.set(self, "rewrite_object", value)
|
360
|
+
|
361
|
+
@property
|
362
|
+
@pulumi.getter(name="scheduleTime")
|
363
|
+
def schedule_time(self) -> Optional[pulumi.Input[builtins.str]]:
|
364
|
+
"""
|
365
|
+
The time that the job was scheduled.
|
366
|
+
"""
|
367
|
+
return pulumi.get(self, "schedule_time")
|
368
|
+
|
369
|
+
@schedule_time.setter
|
370
|
+
def schedule_time(self, value: Optional[pulumi.Input[builtins.str]]):
|
371
|
+
pulumi.set(self, "schedule_time", value)
|
372
|
+
|
373
|
+
@property
|
374
|
+
@pulumi.getter
|
375
|
+
def state(self) -> Optional[pulumi.Input[builtins.str]]:
|
376
|
+
"""
|
377
|
+
State of the job.
|
378
|
+
"""
|
379
|
+
return pulumi.get(self, "state")
|
380
|
+
|
381
|
+
@state.setter
|
382
|
+
def state(self, value: Optional[pulumi.Input[builtins.str]]):
|
383
|
+
pulumi.set(self, "state", value)
|
384
|
+
|
385
|
+
@property
|
386
|
+
@pulumi.getter(name="updateTime")
|
387
|
+
def update_time(self) -> Optional[pulumi.Input[builtins.str]]:
|
388
|
+
"""
|
389
|
+
The timestamp at which this storage batch operation was most recently updated.
|
390
|
+
"""
|
391
|
+
return pulumi.get(self, "update_time")
|
392
|
+
|
393
|
+
@update_time.setter
|
394
|
+
def update_time(self, value: Optional[pulumi.Input[builtins.str]]):
|
395
|
+
pulumi.set(self, "update_time", value)
|
396
|
+
|
397
|
+
|
398
|
+
class BatchOperationsJob(pulumi.CustomResource):
|
399
|
+
@overload
|
400
|
+
def __init__(__self__,
|
401
|
+
resource_name: str,
|
402
|
+
opts: Optional[pulumi.ResourceOptions] = None,
|
403
|
+
bucket_list: Optional[pulumi.Input[Union['BatchOperationsJobBucketListArgs', 'BatchOperationsJobBucketListArgsDict']]] = None,
|
404
|
+
delete_object: Optional[pulumi.Input[Union['BatchOperationsJobDeleteObjectArgs', 'BatchOperationsJobDeleteObjectArgsDict']]] = None,
|
405
|
+
delete_protection: Optional[pulumi.Input[builtins.bool]] = None,
|
406
|
+
job_id: Optional[pulumi.Input[builtins.str]] = None,
|
407
|
+
project: Optional[pulumi.Input[builtins.str]] = None,
|
408
|
+
put_metadata: Optional[pulumi.Input[Union['BatchOperationsJobPutMetadataArgs', 'BatchOperationsJobPutMetadataArgsDict']]] = None,
|
409
|
+
put_object_hold: Optional[pulumi.Input[Union['BatchOperationsJobPutObjectHoldArgs', 'BatchOperationsJobPutObjectHoldArgsDict']]] = None,
|
410
|
+
rewrite_object: Optional[pulumi.Input[Union['BatchOperationsJobRewriteObjectArgs', 'BatchOperationsJobRewriteObjectArgsDict']]] = None,
|
411
|
+
__props__=None):
|
412
|
+
"""
|
413
|
+
Storage Batch Operations (SBO) is a Cloud Storage management feature that offers a
|
414
|
+
seamless experience to perform single batch operations on millions of GCS objects in a
|
415
|
+
serverless manner.
|
416
|
+
|
417
|
+
## Example Usage
|
418
|
+
|
419
|
+
### Storage Batch Operations
|
420
|
+
|
421
|
+
```python
|
422
|
+
import pulumi
|
423
|
+
import pulumi_gcp as gcp
|
424
|
+
|
425
|
+
bucket = gcp.storage.Bucket("bucket",
|
426
|
+
name="tf-sample-bucket",
|
427
|
+
location="us-central1",
|
428
|
+
force_destroy=True)
|
429
|
+
tf_job = gcp.storage.BatchOperationsJob("tf-job",
|
430
|
+
job_id="tf-job",
|
431
|
+
bucket_list={
|
432
|
+
"buckets": {
|
433
|
+
"bucket": bucket.name,
|
434
|
+
"prefix_list": {
|
435
|
+
"included_object_prefixes": ["bkt"],
|
436
|
+
},
|
437
|
+
},
|
438
|
+
},
|
439
|
+
put_metadata={
|
440
|
+
"custom_metadata": {
|
441
|
+
"key": "value",
|
442
|
+
},
|
443
|
+
},
|
444
|
+
delete_protection=False)
|
445
|
+
```
|
446
|
+
|
447
|
+
## Import
|
448
|
+
|
449
|
+
Job can be imported using any of these accepted formats:
|
450
|
+
|
451
|
+
* `projects/{{project}}/locations/global/jobs/{{job_id}}`
|
452
|
+
|
453
|
+
* `{{project}}/{{job_id}}`
|
454
|
+
|
455
|
+
* `{{job_id}}`
|
456
|
+
|
457
|
+
When using the `pulumi import` command, Job can be imported using one of the formats above. For example:
|
458
|
+
|
459
|
+
```sh
|
460
|
+
$ pulumi import gcp:storage/batchOperationsJob:BatchOperationsJob default projects/{{project}}/locations/global/jobs/{{job_id}}
|
461
|
+
```
|
462
|
+
|
463
|
+
```sh
|
464
|
+
$ pulumi import gcp:storage/batchOperationsJob:BatchOperationsJob default {{project}}/{{job_id}}
|
465
|
+
```
|
466
|
+
|
467
|
+
```sh
|
468
|
+
$ pulumi import gcp:storage/batchOperationsJob:BatchOperationsJob default {{job_id}}
|
469
|
+
```
|
470
|
+
|
471
|
+
:param str resource_name: The name of the resource.
|
472
|
+
:param pulumi.ResourceOptions opts: Options for the resource.
|
473
|
+
:param pulumi.Input[Union['BatchOperationsJobBucketListArgs', 'BatchOperationsJobBucketListArgsDict']] bucket_list: List of buckets and their objects to be transformed. Currently, only one bucket configuration is supported. If multiple buckets are specified, an error will be returned
|
474
|
+
Structure is documented below.
|
475
|
+
:param pulumi.Input[Union['BatchOperationsJobDeleteObjectArgs', 'BatchOperationsJobDeleteObjectArgsDict']] delete_object: allows batch operations to delete objects in bucket
|
476
|
+
Structure is documented below.
|
477
|
+
:param pulumi.Input[builtins.bool] delete_protection: If set to `true`, the storage batch operation job will not be deleted and new job will be created.
|
478
|
+
:param pulumi.Input[builtins.str] job_id: The ID of the job.
|
479
|
+
:param pulumi.Input[builtins.str] project: The ID of the project in which the resource belongs.
|
480
|
+
If it is not provided, the provider project is used.
|
481
|
+
:param pulumi.Input[Union['BatchOperationsJobPutMetadataArgs', 'BatchOperationsJobPutMetadataArgsDict']] put_metadata: allows batch operations to update metadata for objects in bucket
|
482
|
+
Structure is documented below.
|
483
|
+
:param pulumi.Input[Union['BatchOperationsJobPutObjectHoldArgs', 'BatchOperationsJobPutObjectHoldArgsDict']] put_object_hold: allows to update temporary hold or eventBased hold for objects in bucket.
|
484
|
+
Structure is documented below.
|
485
|
+
:param pulumi.Input[Union['BatchOperationsJobRewriteObjectArgs', 'BatchOperationsJobRewriteObjectArgsDict']] rewrite_object: allows to update encryption key for objects in bucket.
|
486
|
+
Structure is documented below.
|
487
|
+
"""
|
488
|
+
...
|
489
|
+
@overload
|
490
|
+
def __init__(__self__,
|
491
|
+
resource_name: str,
|
492
|
+
args: Optional[BatchOperationsJobArgs] = None,
|
493
|
+
opts: Optional[pulumi.ResourceOptions] = None):
|
494
|
+
"""
|
495
|
+
Storage Batch Operations (SBO) is a Cloud Storage management feature that offers a
|
496
|
+
seamless experience to perform single batch operations on millions of GCS objects in a
|
497
|
+
serverless manner.
|
498
|
+
|
499
|
+
## Example Usage
|
500
|
+
|
501
|
+
### Storage Batch Operations
|
502
|
+
|
503
|
+
```python
|
504
|
+
import pulumi
|
505
|
+
import pulumi_gcp as gcp
|
506
|
+
|
507
|
+
bucket = gcp.storage.Bucket("bucket",
|
508
|
+
name="tf-sample-bucket",
|
509
|
+
location="us-central1",
|
510
|
+
force_destroy=True)
|
511
|
+
tf_job = gcp.storage.BatchOperationsJob("tf-job",
|
512
|
+
job_id="tf-job",
|
513
|
+
bucket_list={
|
514
|
+
"buckets": {
|
515
|
+
"bucket": bucket.name,
|
516
|
+
"prefix_list": {
|
517
|
+
"included_object_prefixes": ["bkt"],
|
518
|
+
},
|
519
|
+
},
|
520
|
+
},
|
521
|
+
put_metadata={
|
522
|
+
"custom_metadata": {
|
523
|
+
"key": "value",
|
524
|
+
},
|
525
|
+
},
|
526
|
+
delete_protection=False)
|
527
|
+
```
|
528
|
+
|
529
|
+
## Import
|
530
|
+
|
531
|
+
Job can be imported using any of these accepted formats:
|
532
|
+
|
533
|
+
* `projects/{{project}}/locations/global/jobs/{{job_id}}`
|
534
|
+
|
535
|
+
* `{{project}}/{{job_id}}`
|
536
|
+
|
537
|
+
* `{{job_id}}`
|
538
|
+
|
539
|
+
When using the `pulumi import` command, Job can be imported using one of the formats above. For example:
|
540
|
+
|
541
|
+
```sh
|
542
|
+
$ pulumi import gcp:storage/batchOperationsJob:BatchOperationsJob default projects/{{project}}/locations/global/jobs/{{job_id}}
|
543
|
+
```
|
544
|
+
|
545
|
+
```sh
|
546
|
+
$ pulumi import gcp:storage/batchOperationsJob:BatchOperationsJob default {{project}}/{{job_id}}
|
547
|
+
```
|
548
|
+
|
549
|
+
```sh
|
550
|
+
$ pulumi import gcp:storage/batchOperationsJob:BatchOperationsJob default {{job_id}}
|
551
|
+
```
|
552
|
+
|
553
|
+
:param str resource_name: The name of the resource.
|
554
|
+
:param BatchOperationsJobArgs args: The arguments to use to populate this resource's properties.
|
555
|
+
:param pulumi.ResourceOptions opts: Options for the resource.
|
556
|
+
"""
|
557
|
+
...
|
558
|
+
def __init__(__self__, resource_name: str, *args, **kwargs):
|
559
|
+
resource_args, opts = _utilities.get_resource_args_opts(BatchOperationsJobArgs, pulumi.ResourceOptions, *args, **kwargs)
|
560
|
+
if resource_args is not None:
|
561
|
+
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
|
562
|
+
else:
|
563
|
+
__self__._internal_init(resource_name, *args, **kwargs)
|
564
|
+
|
565
|
+
def _internal_init(__self__,
|
566
|
+
resource_name: str,
|
567
|
+
opts: Optional[pulumi.ResourceOptions] = None,
|
568
|
+
bucket_list: Optional[pulumi.Input[Union['BatchOperationsJobBucketListArgs', 'BatchOperationsJobBucketListArgsDict']]] = None,
|
569
|
+
delete_object: Optional[pulumi.Input[Union['BatchOperationsJobDeleteObjectArgs', 'BatchOperationsJobDeleteObjectArgsDict']]] = None,
|
570
|
+
delete_protection: Optional[pulumi.Input[builtins.bool]] = None,
|
571
|
+
job_id: Optional[pulumi.Input[builtins.str]] = None,
|
572
|
+
project: Optional[pulumi.Input[builtins.str]] = None,
|
573
|
+
put_metadata: Optional[pulumi.Input[Union['BatchOperationsJobPutMetadataArgs', 'BatchOperationsJobPutMetadataArgsDict']]] = None,
|
574
|
+
put_object_hold: Optional[pulumi.Input[Union['BatchOperationsJobPutObjectHoldArgs', 'BatchOperationsJobPutObjectHoldArgsDict']]] = None,
|
575
|
+
rewrite_object: Optional[pulumi.Input[Union['BatchOperationsJobRewriteObjectArgs', 'BatchOperationsJobRewriteObjectArgsDict']]] = None,
|
576
|
+
__props__=None):
|
577
|
+
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
|
578
|
+
if not isinstance(opts, pulumi.ResourceOptions):
|
579
|
+
raise TypeError('Expected resource options to be a ResourceOptions instance')
|
580
|
+
if opts.id is None:
|
581
|
+
if __props__ is not None:
|
582
|
+
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
|
583
|
+
__props__ = BatchOperationsJobArgs.__new__(BatchOperationsJobArgs)
|
584
|
+
|
585
|
+
__props__.__dict__["bucket_list"] = bucket_list
|
586
|
+
__props__.__dict__["delete_object"] = delete_object
|
587
|
+
__props__.__dict__["delete_protection"] = delete_protection
|
588
|
+
__props__.__dict__["job_id"] = job_id
|
589
|
+
__props__.__dict__["project"] = project
|
590
|
+
__props__.__dict__["put_metadata"] = put_metadata
|
591
|
+
__props__.__dict__["put_object_hold"] = put_object_hold
|
592
|
+
__props__.__dict__["rewrite_object"] = rewrite_object
|
593
|
+
__props__.__dict__["complete_time"] = None
|
594
|
+
__props__.__dict__["create_time"] = None
|
595
|
+
__props__.__dict__["schedule_time"] = None
|
596
|
+
__props__.__dict__["state"] = None
|
597
|
+
__props__.__dict__["update_time"] = None
|
598
|
+
super(BatchOperationsJob, __self__).__init__(
|
599
|
+
'gcp:storage/batchOperationsJob:BatchOperationsJob',
|
600
|
+
resource_name,
|
601
|
+
__props__,
|
602
|
+
opts)
|
603
|
+
|
604
|
+
@staticmethod
|
605
|
+
def get(resource_name: str,
|
606
|
+
id: pulumi.Input[str],
|
607
|
+
opts: Optional[pulumi.ResourceOptions] = None,
|
608
|
+
bucket_list: Optional[pulumi.Input[Union['BatchOperationsJobBucketListArgs', 'BatchOperationsJobBucketListArgsDict']]] = None,
|
609
|
+
complete_time: Optional[pulumi.Input[builtins.str]] = None,
|
610
|
+
create_time: Optional[pulumi.Input[builtins.str]] = None,
|
611
|
+
delete_object: Optional[pulumi.Input[Union['BatchOperationsJobDeleteObjectArgs', 'BatchOperationsJobDeleteObjectArgsDict']]] = None,
|
612
|
+
delete_protection: Optional[pulumi.Input[builtins.bool]] = None,
|
613
|
+
job_id: Optional[pulumi.Input[builtins.str]] = None,
|
614
|
+
project: Optional[pulumi.Input[builtins.str]] = None,
|
615
|
+
put_metadata: Optional[pulumi.Input[Union['BatchOperationsJobPutMetadataArgs', 'BatchOperationsJobPutMetadataArgsDict']]] = None,
|
616
|
+
put_object_hold: Optional[pulumi.Input[Union['BatchOperationsJobPutObjectHoldArgs', 'BatchOperationsJobPutObjectHoldArgsDict']]] = None,
|
617
|
+
rewrite_object: Optional[pulumi.Input[Union['BatchOperationsJobRewriteObjectArgs', 'BatchOperationsJobRewriteObjectArgsDict']]] = None,
|
618
|
+
schedule_time: Optional[pulumi.Input[builtins.str]] = None,
|
619
|
+
state: Optional[pulumi.Input[builtins.str]] = None,
|
620
|
+
update_time: Optional[pulumi.Input[builtins.str]] = None) -> 'BatchOperationsJob':
|
621
|
+
"""
|
622
|
+
Get an existing BatchOperationsJob resource's state with the given name, id, and optional extra
|
623
|
+
properties used to qualify the lookup.
|
624
|
+
|
625
|
+
:param str resource_name: The unique name of the resulting resource.
|
626
|
+
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
|
627
|
+
:param pulumi.ResourceOptions opts: Options for the resource.
|
628
|
+
:param pulumi.Input[Union['BatchOperationsJobBucketListArgs', 'BatchOperationsJobBucketListArgsDict']] bucket_list: List of buckets and their objects to be transformed. Currently, only one bucket configuration is supported. If multiple buckets are specified, an error will be returned
|
629
|
+
Structure is documented below.
|
630
|
+
:param pulumi.Input[builtins.str] complete_time: The time that the job was completed.
|
631
|
+
:param pulumi.Input[builtins.str] create_time: The timestamp at which this storage batch operation was created.
|
632
|
+
:param pulumi.Input[Union['BatchOperationsJobDeleteObjectArgs', 'BatchOperationsJobDeleteObjectArgsDict']] delete_object: allows batch operations to delete objects in bucket
|
633
|
+
Structure is documented below.
|
634
|
+
:param pulumi.Input[builtins.bool] delete_protection: If set to `true`, the storage batch operation job will not be deleted and new job will be created.
|
635
|
+
:param pulumi.Input[builtins.str] job_id: The ID of the job.
|
636
|
+
:param pulumi.Input[builtins.str] project: The ID of the project in which the resource belongs.
|
637
|
+
If it is not provided, the provider project is used.
|
638
|
+
:param pulumi.Input[Union['BatchOperationsJobPutMetadataArgs', 'BatchOperationsJobPutMetadataArgsDict']] put_metadata: allows batch operations to update metadata for objects in bucket
|
639
|
+
Structure is documented below.
|
640
|
+
:param pulumi.Input[Union['BatchOperationsJobPutObjectHoldArgs', 'BatchOperationsJobPutObjectHoldArgsDict']] put_object_hold: allows to update temporary hold or eventBased hold for objects in bucket.
|
641
|
+
Structure is documented below.
|
642
|
+
:param pulumi.Input[Union['BatchOperationsJobRewriteObjectArgs', 'BatchOperationsJobRewriteObjectArgsDict']] rewrite_object: allows to update encryption key for objects in bucket.
|
643
|
+
Structure is documented below.
|
644
|
+
:param pulumi.Input[builtins.str] schedule_time: The time that the job was scheduled.
|
645
|
+
:param pulumi.Input[builtins.str] state: State of the job.
|
646
|
+
:param pulumi.Input[builtins.str] update_time: The timestamp at which this storage batch operation was most recently updated.
|
647
|
+
"""
|
648
|
+
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
|
649
|
+
|
650
|
+
__props__ = _BatchOperationsJobState.__new__(_BatchOperationsJobState)
|
651
|
+
|
652
|
+
__props__.__dict__["bucket_list"] = bucket_list
|
653
|
+
__props__.__dict__["complete_time"] = complete_time
|
654
|
+
__props__.__dict__["create_time"] = create_time
|
655
|
+
__props__.__dict__["delete_object"] = delete_object
|
656
|
+
__props__.__dict__["delete_protection"] = delete_protection
|
657
|
+
__props__.__dict__["job_id"] = job_id
|
658
|
+
__props__.__dict__["project"] = project
|
659
|
+
__props__.__dict__["put_metadata"] = put_metadata
|
660
|
+
__props__.__dict__["put_object_hold"] = put_object_hold
|
661
|
+
__props__.__dict__["rewrite_object"] = rewrite_object
|
662
|
+
__props__.__dict__["schedule_time"] = schedule_time
|
663
|
+
__props__.__dict__["state"] = state
|
664
|
+
__props__.__dict__["update_time"] = update_time
|
665
|
+
return BatchOperationsJob(resource_name, opts=opts, __props__=__props__)
|
666
|
+
|
667
|
+
@property
|
668
|
+
@pulumi.getter(name="bucketList")
|
669
|
+
def bucket_list(self) -> pulumi.Output[Optional['outputs.BatchOperationsJobBucketList']]:
|
670
|
+
"""
|
671
|
+
List of buckets and their objects to be transformed. Currently, only one bucket configuration is supported. If multiple buckets are specified, an error will be returned
|
672
|
+
Structure is documented below.
|
673
|
+
"""
|
674
|
+
return pulumi.get(self, "bucket_list")
|
675
|
+
|
676
|
+
@property
|
677
|
+
@pulumi.getter(name="completeTime")
|
678
|
+
def complete_time(self) -> pulumi.Output[builtins.str]:
|
679
|
+
"""
|
680
|
+
The time that the job was completed.
|
681
|
+
"""
|
682
|
+
return pulumi.get(self, "complete_time")
|
683
|
+
|
684
|
+
@property
|
685
|
+
@pulumi.getter(name="createTime")
|
686
|
+
def create_time(self) -> pulumi.Output[builtins.str]:
|
687
|
+
"""
|
688
|
+
The timestamp at which this storage batch operation was created.
|
689
|
+
"""
|
690
|
+
return pulumi.get(self, "create_time")
|
691
|
+
|
692
|
+
@property
|
693
|
+
@pulumi.getter(name="deleteObject")
|
694
|
+
def delete_object(self) -> pulumi.Output[Optional['outputs.BatchOperationsJobDeleteObject']]:
|
695
|
+
"""
|
696
|
+
allows batch operations to delete objects in bucket
|
697
|
+
Structure is documented below.
|
698
|
+
"""
|
699
|
+
return pulumi.get(self, "delete_object")
|
700
|
+
|
701
|
+
@property
|
702
|
+
@pulumi.getter(name="deleteProtection")
|
703
|
+
def delete_protection(self) -> pulumi.Output[Optional[builtins.bool]]:
|
704
|
+
"""
|
705
|
+
If set to `true`, the storage batch operation job will not be deleted and new job will be created.
|
706
|
+
"""
|
707
|
+
return pulumi.get(self, "delete_protection")
|
708
|
+
|
709
|
+
@property
|
710
|
+
@pulumi.getter(name="jobId")
|
711
|
+
def job_id(self) -> pulumi.Output[Optional[builtins.str]]:
|
712
|
+
"""
|
713
|
+
The ID of the job.
|
714
|
+
"""
|
715
|
+
return pulumi.get(self, "job_id")
|
716
|
+
|
717
|
+
@property
|
718
|
+
@pulumi.getter
|
719
|
+
def project(self) -> pulumi.Output[builtins.str]:
|
720
|
+
"""
|
721
|
+
The ID of the project in which the resource belongs.
|
722
|
+
If it is not provided, the provider project is used.
|
723
|
+
"""
|
724
|
+
return pulumi.get(self, "project")
|
725
|
+
|
726
|
+
@property
|
727
|
+
@pulumi.getter(name="putMetadata")
|
728
|
+
def put_metadata(self) -> pulumi.Output[Optional['outputs.BatchOperationsJobPutMetadata']]:
|
729
|
+
"""
|
730
|
+
allows batch operations to update metadata for objects in bucket
|
731
|
+
Structure is documented below.
|
732
|
+
"""
|
733
|
+
return pulumi.get(self, "put_metadata")
|
734
|
+
|
735
|
+
@property
|
736
|
+
@pulumi.getter(name="putObjectHold")
|
737
|
+
def put_object_hold(self) -> pulumi.Output[Optional['outputs.BatchOperationsJobPutObjectHold']]:
|
738
|
+
"""
|
739
|
+
allows to update temporary hold or eventBased hold for objects in bucket.
|
740
|
+
Structure is documented below.
|
741
|
+
"""
|
742
|
+
return pulumi.get(self, "put_object_hold")
|
743
|
+
|
744
|
+
@property
|
745
|
+
@pulumi.getter(name="rewriteObject")
|
746
|
+
def rewrite_object(self) -> pulumi.Output[Optional['outputs.BatchOperationsJobRewriteObject']]:
|
747
|
+
"""
|
748
|
+
allows to update encryption key for objects in bucket.
|
749
|
+
Structure is documented below.
|
750
|
+
"""
|
751
|
+
return pulumi.get(self, "rewrite_object")
|
752
|
+
|
753
|
+
@property
|
754
|
+
@pulumi.getter(name="scheduleTime")
|
755
|
+
def schedule_time(self) -> pulumi.Output[builtins.str]:
|
756
|
+
"""
|
757
|
+
The time that the job was scheduled.
|
758
|
+
"""
|
759
|
+
return pulumi.get(self, "schedule_time")
|
760
|
+
|
761
|
+
@property
|
762
|
+
@pulumi.getter
|
763
|
+
def state(self) -> pulumi.Output[builtins.str]:
|
764
|
+
"""
|
765
|
+
State of the job.
|
766
|
+
"""
|
767
|
+
return pulumi.get(self, "state")
|
768
|
+
|
769
|
+
@property
|
770
|
+
@pulumi.getter(name="updateTime")
|
771
|
+
def update_time(self) -> pulumi.Output[builtins.str]:
|
772
|
+
"""
|
773
|
+
The timestamp at which this storage batch operation was most recently updated.
|
774
|
+
"""
|
775
|
+
return pulumi.get(self, "update_time")
|
776
|
+
|