pulumi-gcp 7.34.0a1722421695__py3-none-any.whl → 7.35.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pulumi-gcp might be problematic. Click here for more details.
- pulumi_gcp/__init__.py +40 -0
- pulumi_gcp/accesscontextmanager/_inputs.py +12 -3
- pulumi_gcp/accesscontextmanager/access_policy.py +18 -18
- pulumi_gcp/accesscontextmanager/outputs.py +8 -2
- pulumi_gcp/alloydb/_inputs.py +174 -0
- pulumi_gcp/alloydb/instance.py +54 -0
- pulumi_gcp/alloydb/outputs.py +133 -0
- pulumi_gcp/apigee/__init__.py +2 -0
- pulumi_gcp/apigee/environment_keyvaluemaps.py +370 -0
- pulumi_gcp/apigee/environment_keyvaluemaps_entries.py +440 -0
- pulumi_gcp/apigee/instance.py +2 -2
- pulumi_gcp/apigee/nat_address.py +2 -2
- pulumi_gcp/apigee/organization.py +4 -4
- pulumi_gcp/apphub/service_project_attachment.py +11 -11
- pulumi_gcp/bigquery/_inputs.py +36 -0
- pulumi_gcp/bigquery/app_profile.py +54 -0
- pulumi_gcp/bigquery/data_transfer_config.py +52 -0
- pulumi_gcp/bigquery/outputs.py +38 -0
- pulumi_gcp/bigquery/reservation.py +34 -4
- pulumi_gcp/bigquery/table.py +65 -21
- pulumi_gcp/bigtable/table.py +27 -26
- pulumi_gcp/certificateauthority/authority.py +4 -4
- pulumi_gcp/cloudfunctions/function.py +47 -0
- pulumi_gcp/cloudfunctions/get_function.py +11 -1
- pulumi_gcp/cloudfunctionsv2/function.py +2 -2
- pulumi_gcp/cloudrun/_inputs.py +24 -21
- pulumi_gcp/cloudrun/outputs.py +20 -24
- pulumi_gcp/cloudrunv2/_inputs.py +3 -0
- pulumi_gcp/cloudrunv2/outputs.py +4 -0
- pulumi_gcp/compute/__init__.py +2 -0
- pulumi_gcp/compute/_inputs.py +2358 -353
- pulumi_gcp/compute/backend_service.py +6 -0
- pulumi_gcp/compute/disk.py +75 -0
- pulumi_gcp/compute/get_disk.py +11 -1
- pulumi_gcp/compute/get_hc_vpn_gateway.py +11 -1
- pulumi_gcp/compute/get_instance_template.py +2 -2
- pulumi_gcp/compute/get_region_instance_template.py +2 -2
- pulumi_gcp/compute/get_snapshot.py +2 -2
- pulumi_gcp/compute/ha_vpn_gateway.py +68 -7
- pulumi_gcp/compute/outputs.py +1490 -31
- pulumi_gcp/compute/public_advertised_prefix.py +30 -2
- pulumi_gcp/compute/resize_request.py +782 -0
- pulumi_gcp/compute/router_peer.py +437 -0
- pulumi_gcp/compute/router_route_policy.py +616 -0
- pulumi_gcp/compute/service_attachment.py +7 -14
- pulumi_gcp/container/_inputs.py +218 -21
- pulumi_gcp/container/node_pool.py +0 -14
- pulumi_gcp/container/outputs.py +228 -14
- pulumi_gcp/databasemigrationservice/private_connection.py +10 -6
- pulumi_gcp/dataloss/_inputs.py +707 -21
- pulumi_gcp/dataloss/outputs.py +588 -14
- pulumi_gcp/datastore/data_store_index.py +24 -12
- pulumi_gcp/datastream/_inputs.py +83 -3
- pulumi_gcp/datastream/outputs.py +51 -3
- pulumi_gcp/datastream/stream.py +170 -0
- pulumi_gcp/firebase/database_instance.py +8 -8
- pulumi_gcp/firebase/hosting_site.py +8 -8
- pulumi_gcp/firebase/project.py +10 -2
- pulumi_gcp/firestore/index.py +10 -10
- pulumi_gcp/gkeonprem/_inputs.py +78 -78
- pulumi_gcp/gkeonprem/outputs.py +52 -52
- pulumi_gcp/iap/client.py +4 -4
- pulumi_gcp/integrationconnectors/_inputs.py +30 -30
- pulumi_gcp/integrationconnectors/outputs.py +20 -20
- pulumi_gcp/kms/key_handle.py +7 -7
- pulumi_gcp/migrationcenter/_inputs.py +21 -129
- pulumi_gcp/migrationcenter/outputs.py +14 -86
- pulumi_gcp/netapp/volume.py +1 -1
- pulumi_gcp/networkconnectivity/_inputs.py +3 -6
- pulumi_gcp/networkconnectivity/hub.py +129 -49
- pulumi_gcp/networkconnectivity/outputs.py +2 -4
- pulumi_gcp/networkconnectivity/spoke.py +159 -104
- pulumi_gcp/networksecurity/tls_inspection_policy.py +2 -2
- pulumi_gcp/organizations/project.py +16 -7
- pulumi_gcp/orgpolicy/policy.py +4 -4
- pulumi_gcp/projects/get_project_service.py +11 -1
- pulumi_gcp/projects/service.py +68 -0
- pulumi_gcp/projects/service_identity.py +30 -2
- pulumi_gcp/pubsub/subscription.py +6 -6
- pulumi_gcp/pulumi-plugin.json +1 -1
- pulumi_gcp/securesourcemanager/instance.py +528 -4
- pulumi_gcp/securitycenter/__init__.py +1 -0
- pulumi_gcp/securitycenter/v2_organization_mute_config.py +673 -0
- pulumi_gcp/sql/_inputs.py +35 -15
- pulumi_gcp/sql/database_instance.py +2 -2
- pulumi_gcp/sql/outputs.py +50 -14
- pulumi_gcp/vertex/ai_feature_online_store_featureview.py +4 -4
- pulumi_gcp/vmwareengine/get_private_cloud.py +21 -1
- pulumi_gcp/vmwareengine/private_cloud.py +121 -2
- pulumi_gcp/workbench/_inputs.py +77 -0
- pulumi_gcp/workbench/instance.py +18 -4
- pulumi_gcp/workbench/outputs.py +67 -1
- {pulumi_gcp-7.34.0a1722421695.dist-info → pulumi_gcp-7.35.0.dist-info}/METADATA +1 -1
- {pulumi_gcp-7.34.0a1722421695.dist-info → pulumi_gcp-7.35.0.dist-info}/RECORD +96 -91
- {pulumi_gcp-7.34.0a1722421695.dist-info → pulumi_gcp-7.35.0.dist-info}/WHEEL +0 -0
- {pulumi_gcp-7.34.0a1722421695.dist-info → pulumi_gcp-7.35.0.dist-info}/top_level.txt +0 -0
@@ -216,7 +216,9 @@ class DataStoreIndex(pulumi.CustomResource):
|
|
216
216
|
properties: Optional[pulumi.Input[Sequence[pulumi.Input[Union['DataStoreIndexPropertyArgs', 'DataStoreIndexPropertyArgsDict']]]]] = None,
|
217
217
|
__props__=None):
|
218
218
|
"""
|
219
|
-
|
219
|
+
> **Warning:** `datastore_index` is deprecated and will be removed in a future major release. Use `firestore_index` instead; this resource is deprecated because it only supports the (default) database. `firestore_index` supports both Firestore in Datastore Mode and Firestore Native indexes and supports both named and the (default) database.
|
220
|
+
|
221
|
+
Describes a composite index for Firestore in Datastore Mode.
|
220
222
|
|
221
223
|
To get more information about Index, see:
|
222
224
|
|
@@ -224,11 +226,15 @@ class DataStoreIndex(pulumi.CustomResource):
|
|
224
226
|
* How-to Guides
|
225
227
|
* [Official Documentation](https://cloud.google.com/datastore/docs/concepts/indexes)
|
226
228
|
|
227
|
-
> **Warning:**
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
229
|
+
> **Warning:** `datastore_index` is deprecated and will be removed in a future major release.
|
230
|
+
Use `firestore_index` instead; this resource is deprecated because it only supports the (default) database.
|
231
|
+
This resource creates a Datastore Index on a project that has already
|
232
|
+
enabled a Datastore-compatible database. If you haven't already created it, you may
|
233
|
+
create a `firestore.Database` resource with `location_id` set
|
234
|
+
to your chosen location, and `type` set to `"DATASTORE_MODE"`.
|
235
|
+
If you wish to use App Engine, you may instead create a `appengine.Application` resource with
|
236
|
+
`database_type` set to `"CLOUD_DATASTORE_COMPATIBILITY"`.
|
237
|
+
Your Datastore location will be the same as the App Engine location specified.
|
232
238
|
|
233
239
|
## Example Usage
|
234
240
|
|
@@ -305,7 +311,9 @@ class DataStoreIndex(pulumi.CustomResource):
|
|
305
311
|
args: DataStoreIndexArgs,
|
306
312
|
opts: Optional[pulumi.ResourceOptions] = None):
|
307
313
|
"""
|
308
|
-
|
314
|
+
> **Warning:** `datastore_index` is deprecated and will be removed in a future major release. Use `firestore_index` instead; this resource is deprecated because it only supports the (default) database. `firestore_index` supports both Firestore in Datastore Mode and Firestore Native indexes and supports both named and the (default) database.
|
315
|
+
|
316
|
+
Describes a composite index for Firestore in Datastore Mode.
|
309
317
|
|
310
318
|
To get more information about Index, see:
|
311
319
|
|
@@ -313,11 +321,15 @@ class DataStoreIndex(pulumi.CustomResource):
|
|
313
321
|
* How-to Guides
|
314
322
|
* [Official Documentation](https://cloud.google.com/datastore/docs/concepts/indexes)
|
315
323
|
|
316
|
-
> **Warning:**
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
324
|
+
> **Warning:** `datastore_index` is deprecated and will be removed in a future major release.
|
325
|
+
Use `firestore_index` instead; this resource is deprecated because it only supports the (default) database.
|
326
|
+
This resource creates a Datastore Index on a project that has already
|
327
|
+
enabled a Datastore-compatible database. If you haven't already created it, you may
|
328
|
+
create a `firestore.Database` resource with `location_id` set
|
329
|
+
to your chosen location, and `type` set to `"DATASTORE_MODE"`.
|
330
|
+
If you wish to use App Engine, you may instead create a `appengine.Application` resource with
|
331
|
+
`database_type` set to `"CLOUD_DATASTORE_COMPATIBILITY"`.
|
332
|
+
Your Datastore location will be the same as the App Engine location specified.
|
321
333
|
|
322
334
|
## Example Usage
|
323
335
|
|
pulumi_gcp/datastream/_inputs.py
CHANGED
@@ -77,6 +77,10 @@ __all__ = [
|
|
77
77
|
'StreamDestinationConfigArgsDict',
|
78
78
|
'StreamDestinationConfigBigqueryDestinationConfigArgs',
|
79
79
|
'StreamDestinationConfigBigqueryDestinationConfigArgsDict',
|
80
|
+
'StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgs',
|
81
|
+
'StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgsDict',
|
82
|
+
'StreamDestinationConfigBigqueryDestinationConfigMergeArgs',
|
83
|
+
'StreamDestinationConfigBigqueryDestinationConfigMergeArgsDict',
|
80
84
|
'StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs',
|
81
85
|
'StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgsDict',
|
82
86
|
'StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs',
|
@@ -2571,7 +2575,7 @@ if not MYPY:
|
|
2571
2575
|
"""
|
2572
2576
|
bigquery_destination_config: NotRequired[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigArgsDict']]
|
2573
2577
|
"""
|
2574
|
-
A configuration for how data should be loaded to
|
2578
|
+
A configuration for how data should be loaded to Google BigQuery.
|
2575
2579
|
Structure is documented below.
|
2576
2580
|
"""
|
2577
2581
|
gcs_destination_config: NotRequired[pulumi.Input['StreamDestinationConfigGcsDestinationConfigArgsDict']]
|
@@ -2590,7 +2594,7 @@ class StreamDestinationConfigArgs:
|
|
2590
2594
|
gcs_destination_config: Optional[pulumi.Input['StreamDestinationConfigGcsDestinationConfigArgs']] = None):
|
2591
2595
|
"""
|
2592
2596
|
:param pulumi.Input[str] destination_connection_profile: Destination connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
|
2593
|
-
:param pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigArgs'] bigquery_destination_config: A configuration for how data should be loaded to
|
2597
|
+
:param pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigArgs'] bigquery_destination_config: A configuration for how data should be loaded to Google BigQuery.
|
2594
2598
|
Structure is documented below.
|
2595
2599
|
:param pulumi.Input['StreamDestinationConfigGcsDestinationConfigArgs'] gcs_destination_config: A configuration for how data should be loaded to Cloud Storage.
|
2596
2600
|
Structure is documented below.
|
@@ -2617,7 +2621,7 @@ class StreamDestinationConfigArgs:
|
|
2617
2621
|
@pulumi.getter(name="bigqueryDestinationConfig")
|
2618
2622
|
def bigquery_destination_config(self) -> Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigArgs']]:
|
2619
2623
|
"""
|
2620
|
-
A configuration for how data should be loaded to
|
2624
|
+
A configuration for how data should be loaded to Google BigQuery.
|
2621
2625
|
Structure is documented below.
|
2622
2626
|
"""
|
2623
2627
|
return pulumi.get(self, "bigquery_destination_config")
|
@@ -2642,6 +2646,12 @@ class StreamDestinationConfigArgs:
|
|
2642
2646
|
|
2643
2647
|
if not MYPY:
|
2644
2648
|
class StreamDestinationConfigBigqueryDestinationConfigArgsDict(TypedDict):
|
2649
|
+
append_only: NotRequired[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgsDict']]
|
2650
|
+
"""
|
2651
|
+
AppendOnly mode defines that the stream of changes (INSERT, UPDATE-INSERT, UPDATE-DELETE and DELETE
|
2652
|
+
events) to a source table will be written to the destination Google BigQuery table, retaining the
|
2653
|
+
historical state of the data.
|
2654
|
+
"""
|
2645
2655
|
data_freshness: NotRequired[pulumi.Input[str]]
|
2646
2656
|
"""
|
2647
2657
|
The guaranteed data freshness (in seconds) when querying tables created by the stream.
|
@@ -2649,6 +2659,12 @@ if not MYPY:
|
|
2649
2659
|
will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost.
|
2650
2660
|
A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
|
2651
2661
|
"""
|
2662
|
+
merge: NotRequired[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigMergeArgsDict']]
|
2663
|
+
"""
|
2664
|
+
Merge mode defines that all changes to a table will be merged at the destination Google BigQuery
|
2665
|
+
table. This is the default write mode. When selected, BigQuery reflects the way the data is stored
|
2666
|
+
in the source database. With Merge mode, no historical record of the change events is kept.
|
2667
|
+
"""
|
2652
2668
|
single_target_dataset: NotRequired[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgsDict']]
|
2653
2669
|
"""
|
2654
2670
|
A single target dataset to which all data will be streamed.
|
@@ -2665,26 +2681,52 @@ elif False:
|
|
2665
2681
|
@pulumi.input_type
|
2666
2682
|
class StreamDestinationConfigBigqueryDestinationConfigArgs:
|
2667
2683
|
def __init__(__self__, *,
|
2684
|
+
append_only: Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgs']] = None,
|
2668
2685
|
data_freshness: Optional[pulumi.Input[str]] = None,
|
2686
|
+
merge: Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigMergeArgs']] = None,
|
2669
2687
|
single_target_dataset: Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs']] = None,
|
2670
2688
|
source_hierarchy_datasets: Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs']] = None):
|
2671
2689
|
"""
|
2690
|
+
:param pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgs'] append_only: AppendOnly mode defines that the stream of changes (INSERT, UPDATE-INSERT, UPDATE-DELETE and DELETE
|
2691
|
+
events) to a source table will be written to the destination Google BigQuery table, retaining the
|
2692
|
+
historical state of the data.
|
2672
2693
|
:param pulumi.Input[str] data_freshness: The guaranteed data freshness (in seconds) when querying tables created by the stream.
|
2673
2694
|
Editing this field will only affect new tables created in the future, but existing tables
|
2674
2695
|
will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost.
|
2675
2696
|
A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
|
2697
|
+
:param pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigMergeArgs'] merge: Merge mode defines that all changes to a table will be merged at the destination Google BigQuery
|
2698
|
+
table. This is the default write mode. When selected, BigQuery reflects the way the data is stored
|
2699
|
+
in the source database. With Merge mode, no historical record of the change events is kept.
|
2676
2700
|
:param pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs'] single_target_dataset: A single target dataset to which all data will be streamed.
|
2677
2701
|
Structure is documented below.
|
2678
2702
|
:param pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs'] source_hierarchy_datasets: Destination datasets are created so that hierarchy of the destination data objects matches the source hierarchy.
|
2679
2703
|
Structure is documented below.
|
2680
2704
|
"""
|
2705
|
+
if append_only is not None:
|
2706
|
+
pulumi.set(__self__, "append_only", append_only)
|
2681
2707
|
if data_freshness is not None:
|
2682
2708
|
pulumi.set(__self__, "data_freshness", data_freshness)
|
2709
|
+
if merge is not None:
|
2710
|
+
pulumi.set(__self__, "merge", merge)
|
2683
2711
|
if single_target_dataset is not None:
|
2684
2712
|
pulumi.set(__self__, "single_target_dataset", single_target_dataset)
|
2685
2713
|
if source_hierarchy_datasets is not None:
|
2686
2714
|
pulumi.set(__self__, "source_hierarchy_datasets", source_hierarchy_datasets)
|
2687
2715
|
|
2716
|
+
@property
|
2717
|
+
@pulumi.getter(name="appendOnly")
|
2718
|
+
def append_only(self) -> Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgs']]:
|
2719
|
+
"""
|
2720
|
+
AppendOnly mode defines that the stream of changes (INSERT, UPDATE-INSERT, UPDATE-DELETE and DELETE
|
2721
|
+
events) to a source table will be written to the destination Google BigQuery table, retaining the
|
2722
|
+
historical state of the data.
|
2723
|
+
"""
|
2724
|
+
return pulumi.get(self, "append_only")
|
2725
|
+
|
2726
|
+
@append_only.setter
|
2727
|
+
def append_only(self, value: Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgs']]):
|
2728
|
+
pulumi.set(self, "append_only", value)
|
2729
|
+
|
2688
2730
|
@property
|
2689
2731
|
@pulumi.getter(name="dataFreshness")
|
2690
2732
|
def data_freshness(self) -> Optional[pulumi.Input[str]]:
|
@@ -2700,6 +2742,20 @@ class StreamDestinationConfigBigqueryDestinationConfigArgs:
|
|
2700
2742
|
def data_freshness(self, value: Optional[pulumi.Input[str]]):
|
2701
2743
|
pulumi.set(self, "data_freshness", value)
|
2702
2744
|
|
2745
|
+
@property
|
2746
|
+
@pulumi.getter
|
2747
|
+
def merge(self) -> Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigMergeArgs']]:
|
2748
|
+
"""
|
2749
|
+
Merge mode defines that all changes to a table will be merged at the destination Google BigQuery
|
2750
|
+
table. This is the default write mode. When selected, BigQuery reflects the way the data is stored
|
2751
|
+
in the source database. With Merge mode, no historical record of the change events is kept.
|
2752
|
+
"""
|
2753
|
+
return pulumi.get(self, "merge")
|
2754
|
+
|
2755
|
+
@merge.setter
|
2756
|
+
def merge(self, value: Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigMergeArgs']]):
|
2757
|
+
pulumi.set(self, "merge", value)
|
2758
|
+
|
2703
2759
|
@property
|
2704
2760
|
@pulumi.getter(name="singleTargetDataset")
|
2705
2761
|
def single_target_dataset(self) -> Optional[pulumi.Input['StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs']]:
|
@@ -2727,6 +2783,30 @@ class StreamDestinationConfigBigqueryDestinationConfigArgs:
|
|
2727
2783
|
pulumi.set(self, "source_hierarchy_datasets", value)
|
2728
2784
|
|
2729
2785
|
|
2786
|
+
if not MYPY:
|
2787
|
+
class StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgsDict(TypedDict):
|
2788
|
+
pass
|
2789
|
+
elif False:
|
2790
|
+
StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgsDict: TypeAlias = Mapping[str, Any]
|
2791
|
+
|
2792
|
+
@pulumi.input_type
|
2793
|
+
class StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgs:
|
2794
|
+
def __init__(__self__):
|
2795
|
+
pass
|
2796
|
+
|
2797
|
+
|
2798
|
+
if not MYPY:
|
2799
|
+
class StreamDestinationConfigBigqueryDestinationConfigMergeArgsDict(TypedDict):
|
2800
|
+
pass
|
2801
|
+
elif False:
|
2802
|
+
StreamDestinationConfigBigqueryDestinationConfigMergeArgsDict: TypeAlias = Mapping[str, Any]
|
2803
|
+
|
2804
|
+
@pulumi.input_type
|
2805
|
+
class StreamDestinationConfigBigqueryDestinationConfigMergeArgs:
|
2806
|
+
def __init__(__self__):
|
2807
|
+
pass
|
2808
|
+
|
2809
|
+
|
2730
2810
|
if not MYPY:
|
2731
2811
|
class StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgsDict(TypedDict):
|
2732
2812
|
dataset_id: pulumi.Input[str]
|
pulumi_gcp/datastream/outputs.py
CHANGED
@@ -47,6 +47,8 @@ __all__ = [
|
|
47
47
|
'StreamBackfillNone',
|
48
48
|
'StreamDestinationConfig',
|
49
49
|
'StreamDestinationConfigBigqueryDestinationConfig',
|
50
|
+
'StreamDestinationConfigBigqueryDestinationConfigAppendOnly',
|
51
|
+
'StreamDestinationConfigBigqueryDestinationConfigMerge',
|
50
52
|
'StreamDestinationConfigBigqueryDestinationConfigSingleTargetDataset',
|
51
53
|
'StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasets',
|
52
54
|
'StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplate',
|
@@ -1923,7 +1925,7 @@ class StreamDestinationConfig(dict):
|
|
1923
1925
|
gcs_destination_config: Optional['outputs.StreamDestinationConfigGcsDestinationConfig'] = None):
|
1924
1926
|
"""
|
1925
1927
|
:param str destination_connection_profile: Destination connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
|
1926
|
-
:param 'StreamDestinationConfigBigqueryDestinationConfigArgs' bigquery_destination_config: A configuration for how data should be loaded to
|
1928
|
+
:param 'StreamDestinationConfigBigqueryDestinationConfigArgs' bigquery_destination_config: A configuration for how data should be loaded to Google BigQuery.
|
1927
1929
|
Structure is documented below.
|
1928
1930
|
:param 'StreamDestinationConfigGcsDestinationConfigArgs' gcs_destination_config: A configuration for how data should be loaded to Cloud Storage.
|
1929
1931
|
Structure is documented below.
|
@@ -1946,7 +1948,7 @@ class StreamDestinationConfig(dict):
|
|
1946
1948
|
@pulumi.getter(name="bigqueryDestinationConfig")
|
1947
1949
|
def bigquery_destination_config(self) -> Optional['outputs.StreamDestinationConfigBigqueryDestinationConfig']:
|
1948
1950
|
"""
|
1949
|
-
A configuration for how data should be loaded to
|
1951
|
+
A configuration for how data should be loaded to Google BigQuery.
|
1950
1952
|
Structure is documented below.
|
1951
1953
|
"""
|
1952
1954
|
return pulumi.get(self, "bigquery_destination_config")
|
@@ -1966,7 +1968,9 @@ class StreamDestinationConfigBigqueryDestinationConfig(dict):
|
|
1966
1968
|
@staticmethod
|
1967
1969
|
def __key_warning(key: str):
|
1968
1970
|
suggest = None
|
1969
|
-
if key == "
|
1971
|
+
if key == "appendOnly":
|
1972
|
+
suggest = "append_only"
|
1973
|
+
elif key == "dataFreshness":
|
1970
1974
|
suggest = "data_freshness"
|
1971
1975
|
elif key == "singleTargetDataset":
|
1972
1976
|
suggest = "single_target_dataset"
|
@@ -1985,26 +1989,48 @@ class StreamDestinationConfigBigqueryDestinationConfig(dict):
|
|
1985
1989
|
return super().get(key, default)
|
1986
1990
|
|
1987
1991
|
def __init__(__self__, *,
|
1992
|
+
append_only: Optional['outputs.StreamDestinationConfigBigqueryDestinationConfigAppendOnly'] = None,
|
1988
1993
|
data_freshness: Optional[str] = None,
|
1994
|
+
merge: Optional['outputs.StreamDestinationConfigBigqueryDestinationConfigMerge'] = None,
|
1989
1995
|
single_target_dataset: Optional['outputs.StreamDestinationConfigBigqueryDestinationConfigSingleTargetDataset'] = None,
|
1990
1996
|
source_hierarchy_datasets: Optional['outputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasets'] = None):
|
1991
1997
|
"""
|
1998
|
+
:param 'StreamDestinationConfigBigqueryDestinationConfigAppendOnlyArgs' append_only: AppendOnly mode defines that the stream of changes (INSERT, UPDATE-INSERT, UPDATE-DELETE and DELETE
|
1999
|
+
events) to a source table will be written to the destination Google BigQuery table, retaining the
|
2000
|
+
historical state of the data.
|
1992
2001
|
:param str data_freshness: The guaranteed data freshness (in seconds) when querying tables created by the stream.
|
1993
2002
|
Editing this field will only affect new tables created in the future, but existing tables
|
1994
2003
|
will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost.
|
1995
2004
|
A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
|
2005
|
+
:param 'StreamDestinationConfigBigqueryDestinationConfigMergeArgs' merge: Merge mode defines that all changes to a table will be merged at the destination Google BigQuery
|
2006
|
+
table. This is the default write mode. When selected, BigQuery reflects the way the data is stored
|
2007
|
+
in the source database. With Merge mode, no historical record of the change events is kept.
|
1996
2008
|
:param 'StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs' single_target_dataset: A single target dataset to which all data will be streamed.
|
1997
2009
|
Structure is documented below.
|
1998
2010
|
:param 'StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs' source_hierarchy_datasets: Destination datasets are created so that hierarchy of the destination data objects matches the source hierarchy.
|
1999
2011
|
Structure is documented below.
|
2000
2012
|
"""
|
2013
|
+
if append_only is not None:
|
2014
|
+
pulumi.set(__self__, "append_only", append_only)
|
2001
2015
|
if data_freshness is not None:
|
2002
2016
|
pulumi.set(__self__, "data_freshness", data_freshness)
|
2017
|
+
if merge is not None:
|
2018
|
+
pulumi.set(__self__, "merge", merge)
|
2003
2019
|
if single_target_dataset is not None:
|
2004
2020
|
pulumi.set(__self__, "single_target_dataset", single_target_dataset)
|
2005
2021
|
if source_hierarchy_datasets is not None:
|
2006
2022
|
pulumi.set(__self__, "source_hierarchy_datasets", source_hierarchy_datasets)
|
2007
2023
|
|
2024
|
+
@property
|
2025
|
+
@pulumi.getter(name="appendOnly")
|
2026
|
+
def append_only(self) -> Optional['outputs.StreamDestinationConfigBigqueryDestinationConfigAppendOnly']:
|
2027
|
+
"""
|
2028
|
+
AppendOnly mode defines that the stream of changes (INSERT, UPDATE-INSERT, UPDATE-DELETE and DELETE
|
2029
|
+
events) to a source table will be written to the destination Google BigQuery table, retaining the
|
2030
|
+
historical state of the data.
|
2031
|
+
"""
|
2032
|
+
return pulumi.get(self, "append_only")
|
2033
|
+
|
2008
2034
|
@property
|
2009
2035
|
@pulumi.getter(name="dataFreshness")
|
2010
2036
|
def data_freshness(self) -> Optional[str]:
|
@@ -2016,6 +2042,16 @@ class StreamDestinationConfigBigqueryDestinationConfig(dict):
|
|
2016
2042
|
"""
|
2017
2043
|
return pulumi.get(self, "data_freshness")
|
2018
2044
|
|
2045
|
+
@property
|
2046
|
+
@pulumi.getter
|
2047
|
+
def merge(self) -> Optional['outputs.StreamDestinationConfigBigqueryDestinationConfigMerge']:
|
2048
|
+
"""
|
2049
|
+
Merge mode defines that all changes to a table will be merged at the destination Google BigQuery
|
2050
|
+
table. This is the default write mode. When selected, BigQuery reflects the way the data is stored
|
2051
|
+
in the source database. With Merge mode, no historical record of the change events is kept.
|
2052
|
+
"""
|
2053
|
+
return pulumi.get(self, "merge")
|
2054
|
+
|
2019
2055
|
@property
|
2020
2056
|
@pulumi.getter(name="singleTargetDataset")
|
2021
2057
|
def single_target_dataset(self) -> Optional['outputs.StreamDestinationConfigBigqueryDestinationConfigSingleTargetDataset']:
|
@@ -2035,6 +2071,18 @@ class StreamDestinationConfigBigqueryDestinationConfig(dict):
|
|
2035
2071
|
return pulumi.get(self, "source_hierarchy_datasets")
|
2036
2072
|
|
2037
2073
|
|
2074
|
+
@pulumi.output_type
|
2075
|
+
class StreamDestinationConfigBigqueryDestinationConfigAppendOnly(dict):
|
2076
|
+
def __init__(__self__):
|
2077
|
+
pass
|
2078
|
+
|
2079
|
+
|
2080
|
+
@pulumi.output_type
|
2081
|
+
class StreamDestinationConfigBigqueryDestinationConfigMerge(dict):
|
2082
|
+
def __init__(__self__):
|
2083
|
+
pass
|
2084
|
+
|
2085
|
+
|
2038
2086
|
@pulumi.output_type
|
2039
2087
|
class StreamDestinationConfigBigqueryDestinationConfigSingleTargetDataset(dict):
|
2040
2088
|
@staticmethod
|
pulumi_gcp/datastream/stream.py
CHANGED
@@ -1117,6 +1117,91 @@ class Stream(pulumi.CustomResource):
|
|
1117
1117
|
backfill_none={},
|
1118
1118
|
opts = pulumi.ResourceOptions(depends_on=[bigquery_key_user]))
|
1119
1119
|
```
|
1120
|
+
### Datastream Stream Bigquery Append Only
|
1121
|
+
|
1122
|
+
```python
|
1123
|
+
import pulumi
|
1124
|
+
import pulumi_gcp as gcp
|
1125
|
+
import pulumi_random as random
|
1126
|
+
|
1127
|
+
project = gcp.organizations.get_project()
|
1128
|
+
instance = gcp.sql.DatabaseInstance("instance",
|
1129
|
+
name="my-instance",
|
1130
|
+
database_version="MYSQL_8_0",
|
1131
|
+
region="us-central1",
|
1132
|
+
settings={
|
1133
|
+
"tier": "db-f1-micro",
|
1134
|
+
"backup_configuration": {
|
1135
|
+
"enabled": True,
|
1136
|
+
"binary_log_enabled": True,
|
1137
|
+
},
|
1138
|
+
"ip_configuration": {
|
1139
|
+
"authorized_networks": [
|
1140
|
+
{
|
1141
|
+
"value": "34.71.242.81",
|
1142
|
+
},
|
1143
|
+
{
|
1144
|
+
"value": "34.72.28.29",
|
1145
|
+
},
|
1146
|
+
{
|
1147
|
+
"value": "34.67.6.157",
|
1148
|
+
},
|
1149
|
+
{
|
1150
|
+
"value": "34.67.234.134",
|
1151
|
+
},
|
1152
|
+
{
|
1153
|
+
"value": "34.72.239.218",
|
1154
|
+
},
|
1155
|
+
],
|
1156
|
+
},
|
1157
|
+
},
|
1158
|
+
deletion_protection=True)
|
1159
|
+
db = gcp.sql.Database("db",
|
1160
|
+
instance=instance.name,
|
1161
|
+
name="db")
|
1162
|
+
pwd = random.RandomPassword("pwd",
|
1163
|
+
length=16,
|
1164
|
+
special=False)
|
1165
|
+
user = gcp.sql.User("user",
|
1166
|
+
name="user",
|
1167
|
+
instance=instance.name,
|
1168
|
+
host="%",
|
1169
|
+
password=pwd.result)
|
1170
|
+
source_connection_profile = gcp.datastream.ConnectionProfile("source_connection_profile",
|
1171
|
+
display_name="Source connection profile",
|
1172
|
+
location="us-central1",
|
1173
|
+
connection_profile_id="source-profile",
|
1174
|
+
mysql_profile={
|
1175
|
+
"hostname": instance.public_ip_address,
|
1176
|
+
"username": user.name,
|
1177
|
+
"password": user.password,
|
1178
|
+
})
|
1179
|
+
destination_connection_profile = gcp.datastream.ConnectionProfile("destination_connection_profile",
|
1180
|
+
display_name="Connection profile",
|
1181
|
+
location="us-central1",
|
1182
|
+
connection_profile_id="destination-profile",
|
1183
|
+
bigquery_profile={})
|
1184
|
+
default = gcp.datastream.Stream("default",
|
1185
|
+
stream_id="my-stream",
|
1186
|
+
location="us-central1",
|
1187
|
+
display_name="my stream",
|
1188
|
+
source_config={
|
1189
|
+
"source_connection_profile": source_connection_profile.id,
|
1190
|
+
"mysql_source_config": {},
|
1191
|
+
},
|
1192
|
+
destination_config={
|
1193
|
+
"destination_connection_profile": destination_connection_profile.id,
|
1194
|
+
"bigquery_destination_config": {
|
1195
|
+
"source_hierarchy_datasets": {
|
1196
|
+
"dataset_template": {
|
1197
|
+
"location": "us-central1",
|
1198
|
+
},
|
1199
|
+
},
|
1200
|
+
"append_only": {},
|
1201
|
+
},
|
1202
|
+
},
|
1203
|
+
backfill_none={})
|
1204
|
+
```
|
1120
1205
|
|
1121
1206
|
## Import
|
1122
1207
|
|
@@ -1778,6 +1863,91 @@ class Stream(pulumi.CustomResource):
|
|
1778
1863
|
backfill_none={},
|
1779
1864
|
opts = pulumi.ResourceOptions(depends_on=[bigquery_key_user]))
|
1780
1865
|
```
|
1866
|
+
### Datastream Stream Bigquery Append Only
|
1867
|
+
|
1868
|
+
```python
|
1869
|
+
import pulumi
|
1870
|
+
import pulumi_gcp as gcp
|
1871
|
+
import pulumi_random as random
|
1872
|
+
|
1873
|
+
project = gcp.organizations.get_project()
|
1874
|
+
instance = gcp.sql.DatabaseInstance("instance",
|
1875
|
+
name="my-instance",
|
1876
|
+
database_version="MYSQL_8_0",
|
1877
|
+
region="us-central1",
|
1878
|
+
settings={
|
1879
|
+
"tier": "db-f1-micro",
|
1880
|
+
"backup_configuration": {
|
1881
|
+
"enabled": True,
|
1882
|
+
"binary_log_enabled": True,
|
1883
|
+
},
|
1884
|
+
"ip_configuration": {
|
1885
|
+
"authorized_networks": [
|
1886
|
+
{
|
1887
|
+
"value": "34.71.242.81",
|
1888
|
+
},
|
1889
|
+
{
|
1890
|
+
"value": "34.72.28.29",
|
1891
|
+
},
|
1892
|
+
{
|
1893
|
+
"value": "34.67.6.157",
|
1894
|
+
},
|
1895
|
+
{
|
1896
|
+
"value": "34.67.234.134",
|
1897
|
+
},
|
1898
|
+
{
|
1899
|
+
"value": "34.72.239.218",
|
1900
|
+
},
|
1901
|
+
],
|
1902
|
+
},
|
1903
|
+
},
|
1904
|
+
deletion_protection=True)
|
1905
|
+
db = gcp.sql.Database("db",
|
1906
|
+
instance=instance.name,
|
1907
|
+
name="db")
|
1908
|
+
pwd = random.RandomPassword("pwd",
|
1909
|
+
length=16,
|
1910
|
+
special=False)
|
1911
|
+
user = gcp.sql.User("user",
|
1912
|
+
name="user",
|
1913
|
+
instance=instance.name,
|
1914
|
+
host="%",
|
1915
|
+
password=pwd.result)
|
1916
|
+
source_connection_profile = gcp.datastream.ConnectionProfile("source_connection_profile",
|
1917
|
+
display_name="Source connection profile",
|
1918
|
+
location="us-central1",
|
1919
|
+
connection_profile_id="source-profile",
|
1920
|
+
mysql_profile={
|
1921
|
+
"hostname": instance.public_ip_address,
|
1922
|
+
"username": user.name,
|
1923
|
+
"password": user.password,
|
1924
|
+
})
|
1925
|
+
destination_connection_profile = gcp.datastream.ConnectionProfile("destination_connection_profile",
|
1926
|
+
display_name="Connection profile",
|
1927
|
+
location="us-central1",
|
1928
|
+
connection_profile_id="destination-profile",
|
1929
|
+
bigquery_profile={})
|
1930
|
+
default = gcp.datastream.Stream("default",
|
1931
|
+
stream_id="my-stream",
|
1932
|
+
location="us-central1",
|
1933
|
+
display_name="my stream",
|
1934
|
+
source_config={
|
1935
|
+
"source_connection_profile": source_connection_profile.id,
|
1936
|
+
"mysql_source_config": {},
|
1937
|
+
},
|
1938
|
+
destination_config={
|
1939
|
+
"destination_connection_profile": destination_connection_profile.id,
|
1940
|
+
"bigquery_destination_config": {
|
1941
|
+
"source_hierarchy_datasets": {
|
1942
|
+
"dataset_template": {
|
1943
|
+
"location": "us-central1",
|
1944
|
+
},
|
1945
|
+
},
|
1946
|
+
"append_only": {},
|
1947
|
+
},
|
1948
|
+
},
|
1949
|
+
backfill_none={})
|
1950
|
+
```
|
1781
1951
|
|
1782
1952
|
## Import
|
1783
1953
|
|
@@ -145,8 +145,8 @@ class _DatabaseInstanceState:
|
|
145
145
|
|
146
146
|
|
147
147
|
- - -
|
148
|
-
:param pulumi.Input[str] name: The fully-qualified resource name of the Firebase Realtime Database, in
|
149
|
-
format: projects/PROJECT_NUMBER/locations/REGION_IDENTIFIER/instances/INSTANCE_ID
|
148
|
+
:param pulumi.Input[str] name: The fully-qualified resource name of the Firebase Realtime Database, in
|
149
|
+
the format: projects/PROJECT_NUMBER/locations/REGION_IDENTIFIER/instances/INSTANCE_ID
|
150
150
|
PROJECT_NUMBER: The Firebase project's [`ProjectNumber`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number)
|
151
151
|
Learn more about using project identifiers in Google's [AIP 2510 standard](https://google.aip.dev/cloud/2510).
|
152
152
|
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
|
@@ -223,8 +223,8 @@ class _DatabaseInstanceState:
|
|
223
223
|
@pulumi.getter
|
224
224
|
def name(self) -> Optional[pulumi.Input[str]]:
|
225
225
|
"""
|
226
|
-
The fully-qualified resource name of the Firebase Realtime Database, in
|
227
|
-
format: projects/PROJECT_NUMBER/locations/REGION_IDENTIFIER/instances/INSTANCE_ID
|
226
|
+
The fully-qualified resource name of the Firebase Realtime Database, in
|
227
|
+
the format: projects/PROJECT_NUMBER/locations/REGION_IDENTIFIER/instances/INSTANCE_ID
|
228
228
|
PROJECT_NUMBER: The Firebase project's [`ProjectNumber`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number)
|
229
229
|
Learn more about using project identifiers in Google's [AIP 2510 standard](https://google.aip.dev/cloud/2510).
|
230
230
|
"""
|
@@ -564,8 +564,8 @@ class DatabaseInstance(pulumi.CustomResource):
|
|
564
564
|
|
565
565
|
|
566
566
|
- - -
|
567
|
-
:param pulumi.Input[str] name: The fully-qualified resource name of the Firebase Realtime Database, in
|
568
|
-
format: projects/PROJECT_NUMBER/locations/REGION_IDENTIFIER/instances/INSTANCE_ID
|
567
|
+
:param pulumi.Input[str] name: The fully-qualified resource name of the Firebase Realtime Database, in
|
568
|
+
the format: projects/PROJECT_NUMBER/locations/REGION_IDENTIFIER/instances/INSTANCE_ID
|
569
569
|
PROJECT_NUMBER: The Firebase project's [`ProjectNumber`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number)
|
570
570
|
Learn more about using project identifiers in Google's [AIP 2510 standard](https://google.aip.dev/cloud/2510).
|
571
571
|
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
|
@@ -627,8 +627,8 @@ class DatabaseInstance(pulumi.CustomResource):
|
|
627
627
|
@pulumi.getter
|
628
628
|
def name(self) -> pulumi.Output[str]:
|
629
629
|
"""
|
630
|
-
The fully-qualified resource name of the Firebase Realtime Database, in
|
631
|
-
format: projects/PROJECT_NUMBER/locations/REGION_IDENTIFIER/instances/INSTANCE_ID
|
630
|
+
The fully-qualified resource name of the Firebase Realtime Database, in
|
631
|
+
the format: projects/PROJECT_NUMBER/locations/REGION_IDENTIFIER/instances/INSTANCE_ID
|
632
632
|
PROJECT_NUMBER: The Firebase project's [`ProjectNumber`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number)
|
633
633
|
Learn more about using project identifiers in Google's [AIP 2510 standard](https://google.aip.dev/cloud/2510).
|
634
634
|
"""
|
@@ -93,8 +93,8 @@ class _HostingSiteState:
|
|
93
93
|
:param pulumi.Input[str] app_id: Optional. The [ID of a Web App](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects.webApps#WebApp.FIELDS.app_id)
|
94
94
|
associated with the Hosting site.
|
95
95
|
:param pulumi.Input[str] default_url: The default URL for the site in the form of https://{name}.web.app
|
96
|
-
:param pulumi.Input[str] name: Output only. The fully-qualified resource name of the Hosting site, in
|
97
|
-
format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the
|
96
|
+
:param pulumi.Input[str] name: Output only. The fully-qualified resource name of the Hosting site, in
|
97
|
+
the format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the
|
98
98
|
Firebase project's
|
99
99
|
[`ProjectNumber`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number) ***(recommended)*** or its
|
100
100
|
[`ProjectId`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_id).
|
@@ -146,8 +146,8 @@ class _HostingSiteState:
|
|
146
146
|
@pulumi.getter
|
147
147
|
def name(self) -> Optional[pulumi.Input[str]]:
|
148
148
|
"""
|
149
|
-
Output only. The fully-qualified resource name of the Hosting site, in
|
150
|
-
format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the
|
149
|
+
Output only. The fully-qualified resource name of the Hosting site, in
|
150
|
+
the format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the
|
151
151
|
Firebase project's
|
152
152
|
[`ProjectNumber`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number) ***(recommended)*** or its
|
153
153
|
[`ProjectId`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_id).
|
@@ -386,8 +386,8 @@ class HostingSite(pulumi.CustomResource):
|
|
386
386
|
:param pulumi.Input[str] app_id: Optional. The [ID of a Web App](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects.webApps#WebApp.FIELDS.app_id)
|
387
387
|
associated with the Hosting site.
|
388
388
|
:param pulumi.Input[str] default_url: The default URL for the site in the form of https://{name}.web.app
|
389
|
-
:param pulumi.Input[str] name: Output only. The fully-qualified resource name of the Hosting site, in
|
390
|
-
format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the
|
389
|
+
:param pulumi.Input[str] name: Output only. The fully-qualified resource name of the Hosting site, in
|
390
|
+
the format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the
|
391
391
|
Firebase project's
|
392
392
|
[`ProjectNumber`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number) ***(recommended)*** or its
|
393
393
|
[`ProjectId`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_id).
|
@@ -431,8 +431,8 @@ class HostingSite(pulumi.CustomResource):
|
|
431
431
|
@pulumi.getter
|
432
432
|
def name(self) -> pulumi.Output[str]:
|
433
433
|
"""
|
434
|
-
Output only. The fully-qualified resource name of the Hosting site, in
|
435
|
-
format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the
|
434
|
+
Output only. The fully-qualified resource name of the Hosting site, in
|
435
|
+
the format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the
|
436
436
|
Firebase project's
|
437
437
|
[`ProjectNumber`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number) ***(recommended)*** or its
|
438
438
|
[`ProjectId`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_id).
|