pulumi-vsphere 4.10.3a1723624830__py3-none-any.whl → 4.11.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pulumi-vsphere might be problematic. Click here for more details.
- pulumi_vsphere/_inputs.py +6 -12
- pulumi_vsphere/_utilities.py +4 -40
- pulumi_vsphere/compute_cluster.py +20 -20
- pulumi_vsphere/compute_cluster_vm_affinity_rule.py +18 -14
- pulumi_vsphere/content_library.py +10 -10
- pulumi_vsphere/datacenter.py +28 -7
- pulumi_vsphere/datastore_cluster.py +14 -0
- pulumi_vsphere/distributed_port_group.py +12 -61
- pulumi_vsphere/distributed_virtual_switch.py +43 -22
- pulumi_vsphere/entity_permissions.py +38 -59
- pulumi_vsphere/folder.py +21 -0
- pulumi_vsphere/get_compute_cluster_host_group.py +16 -18
- pulumi_vsphere/get_content_library.py +6 -10
- pulumi_vsphere/get_content_library_item.py +8 -12
- pulumi_vsphere/get_datastore.py +9 -9
- pulumi_vsphere/get_datastore_stats.py +32 -34
- pulumi_vsphere/get_dynamic.py +12 -14
- pulumi_vsphere/get_guest_os_customization.py +43 -8
- pulumi_vsphere/get_host_base_images.py +6 -6
- pulumi_vsphere/get_host_pci_device.py +2 -4
- pulumi_vsphere/get_host_thumbprint.py +12 -12
- pulumi_vsphere/get_host_vgpu_profile.py +2 -4
- pulumi_vsphere/get_license.py +1 -2
- pulumi_vsphere/get_network.py +14 -14
- pulumi_vsphere/get_resource_pool.py +8 -12
- pulumi_vsphere/get_role.py +4 -4
- pulumi_vsphere/get_virtual_machine.py +35 -60
- pulumi_vsphere/guest_os_customization.py +31 -31
- pulumi_vsphere/host_port_group.py +2 -2
- pulumi_vsphere/nas_datastore.py +7 -7
- pulumi_vsphere/offline_software_depot.py +2 -2
- pulumi_vsphere/outputs.py +40 -48
- pulumi_vsphere/provider.py +6 -2
- pulumi_vsphere/pulumi-plugin.json +1 -1
- pulumi_vsphere/resource_pool.py +2 -2
- pulumi_vsphere/supervisor.py +30 -134
- pulumi_vsphere/virtual_disk.py +30 -38
- pulumi_vsphere/virtual_machine.py +32 -32
- pulumi_vsphere/virtual_machine_class.py +0 -2
- pulumi_vsphere/virtual_machine_snapshot.py +2 -2
- pulumi_vsphere/vm_storage_policy.py +67 -67
- pulumi_vsphere/vnic.py +93 -89
- {pulumi_vsphere-4.10.3a1723624830.dist-info → pulumi_vsphere-4.11.0a1.dist-info}/METADATA +1 -1
- pulumi_vsphere-4.11.0a1.dist-info/RECORD +86 -0
- {pulumi_vsphere-4.10.3a1723624830.dist-info → pulumi_vsphere-4.11.0a1.dist-info}/WHEEL +1 -1
- pulumi_vsphere-4.10.3a1723624830.dist-info/RECORD +0 -86
- {pulumi_vsphere-4.10.3a1723624830.dist-info → pulumi_vsphere-4.11.0a1.dist-info}/top_level.txt +0 -0
|
@@ -131,7 +131,7 @@ class VmStoragePolicy(pulumi.CustomResource):
|
|
|
131
131
|
opts: Optional[pulumi.ResourceOptions] = None,
|
|
132
132
|
description: Optional[pulumi.Input[str]] = None,
|
|
133
133
|
name: Optional[pulumi.Input[str]] = None,
|
|
134
|
-
tag_rules: Optional[pulumi.Input[Sequence[pulumi.Input[
|
|
134
|
+
tag_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VmStoragePolicyTagRuleArgs']]]]] = None,
|
|
135
135
|
__props__=None):
|
|
136
136
|
"""
|
|
137
137
|
The `VmStoragePolicy` resource can be used to create and manage storage
|
|
@@ -189,45 +189,45 @@ class VmStoragePolicy(pulumi.CustomResource):
|
|
|
189
189
|
name="prod_platinum_replicated",
|
|
190
190
|
description="prod_platinum_replicated",
|
|
191
191
|
tag_rules=[
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
192
|
+
vsphere.VmStoragePolicyTagRuleArgs(
|
|
193
|
+
tag_category=environment["name"],
|
|
194
|
+
tags=[production["name"]],
|
|
195
|
+
include_datastores_with_tags=True,
|
|
196
|
+
),
|
|
197
|
+
vsphere.VmStoragePolicyTagRuleArgs(
|
|
198
|
+
tag_category=service_level["name"],
|
|
199
|
+
tags=[platinum["name"]],
|
|
200
|
+
include_datastores_with_tags=True,
|
|
201
|
+
),
|
|
202
|
+
vsphere.VmStoragePolicyTagRuleArgs(
|
|
203
|
+
tag_category=replication["name"],
|
|
204
|
+
tags=[replicated["name"]],
|
|
205
|
+
include_datastores_with_tags=True,
|
|
206
|
+
),
|
|
207
207
|
])
|
|
208
208
|
dev_silver_nonreplicated = vsphere.VmStoragePolicy("dev_silver_nonreplicated",
|
|
209
209
|
name="dev_silver_nonreplicated",
|
|
210
210
|
description="dev_silver_nonreplicated",
|
|
211
211
|
tag_rules=[
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
212
|
+
vsphere.VmStoragePolicyTagRuleArgs(
|
|
213
|
+
tag_category=environment["name"],
|
|
214
|
+
tags=[development["name"]],
|
|
215
|
+
include_datastores_with_tags=True,
|
|
216
|
+
),
|
|
217
|
+
vsphere.VmStoragePolicyTagRuleArgs(
|
|
218
|
+
tag_category=service_level["name"],
|
|
219
|
+
tags=[silver["name"]],
|
|
220
|
+
include_datastores_with_tags=True,
|
|
221
|
+
),
|
|
222
|
+
vsphere.VmStoragePolicyTagRuleArgs(
|
|
223
|
+
tag_category=replication["name"],
|
|
224
|
+
tags=[non_replicated["name"]],
|
|
225
|
+
include_datastores_with_tags=True,
|
|
226
|
+
),
|
|
227
227
|
])
|
|
228
228
|
```
|
|
229
229
|
|
|
230
|
-
|
|
230
|
+
Lasttly, when creating a virtual machine resource, a storage policy can be specificed to direct virtual machine placement to a datastore which matches the policy's `tags_rules`.
|
|
231
231
|
|
|
232
232
|
```python
|
|
233
233
|
import pulumi
|
|
@@ -243,7 +243,7 @@ class VmStoragePolicy(pulumi.CustomResource):
|
|
|
243
243
|
:param pulumi.ResourceOptions opts: Options for the resource.
|
|
244
244
|
:param pulumi.Input[str] description: Description of the storage policy.
|
|
245
245
|
:param pulumi.Input[str] name: The name of the storage policy.
|
|
246
|
-
:param pulumi.Input[Sequence[pulumi.Input[
|
|
246
|
+
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VmStoragePolicyTagRuleArgs']]]] tag_rules: List of tag rules. The tag category and tags to be associated to this storage policy.
|
|
247
247
|
"""
|
|
248
248
|
...
|
|
249
249
|
@overload
|
|
@@ -307,45 +307,45 @@ class VmStoragePolicy(pulumi.CustomResource):
|
|
|
307
307
|
name="prod_platinum_replicated",
|
|
308
308
|
description="prod_platinum_replicated",
|
|
309
309
|
tag_rules=[
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
310
|
+
vsphere.VmStoragePolicyTagRuleArgs(
|
|
311
|
+
tag_category=environment["name"],
|
|
312
|
+
tags=[production["name"]],
|
|
313
|
+
include_datastores_with_tags=True,
|
|
314
|
+
),
|
|
315
|
+
vsphere.VmStoragePolicyTagRuleArgs(
|
|
316
|
+
tag_category=service_level["name"],
|
|
317
|
+
tags=[platinum["name"]],
|
|
318
|
+
include_datastores_with_tags=True,
|
|
319
|
+
),
|
|
320
|
+
vsphere.VmStoragePolicyTagRuleArgs(
|
|
321
|
+
tag_category=replication["name"],
|
|
322
|
+
tags=[replicated["name"]],
|
|
323
|
+
include_datastores_with_tags=True,
|
|
324
|
+
),
|
|
325
325
|
])
|
|
326
326
|
dev_silver_nonreplicated = vsphere.VmStoragePolicy("dev_silver_nonreplicated",
|
|
327
327
|
name="dev_silver_nonreplicated",
|
|
328
328
|
description="dev_silver_nonreplicated",
|
|
329
329
|
tag_rules=[
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
330
|
+
vsphere.VmStoragePolicyTagRuleArgs(
|
|
331
|
+
tag_category=environment["name"],
|
|
332
|
+
tags=[development["name"]],
|
|
333
|
+
include_datastores_with_tags=True,
|
|
334
|
+
),
|
|
335
|
+
vsphere.VmStoragePolicyTagRuleArgs(
|
|
336
|
+
tag_category=service_level["name"],
|
|
337
|
+
tags=[silver["name"]],
|
|
338
|
+
include_datastores_with_tags=True,
|
|
339
|
+
),
|
|
340
|
+
vsphere.VmStoragePolicyTagRuleArgs(
|
|
341
|
+
tag_category=replication["name"],
|
|
342
|
+
tags=[non_replicated["name"]],
|
|
343
|
+
include_datastores_with_tags=True,
|
|
344
|
+
),
|
|
345
345
|
])
|
|
346
346
|
```
|
|
347
347
|
|
|
348
|
-
|
|
348
|
+
Lasttly, when creating a virtual machine resource, a storage policy can be specificed to direct virtual machine placement to a datastore which matches the policy's `tags_rules`.
|
|
349
349
|
|
|
350
350
|
```python
|
|
351
351
|
import pulumi
|
|
@@ -374,7 +374,7 @@ class VmStoragePolicy(pulumi.CustomResource):
|
|
|
374
374
|
opts: Optional[pulumi.ResourceOptions] = None,
|
|
375
375
|
description: Optional[pulumi.Input[str]] = None,
|
|
376
376
|
name: Optional[pulumi.Input[str]] = None,
|
|
377
|
-
tag_rules: Optional[pulumi.Input[Sequence[pulumi.Input[
|
|
377
|
+
tag_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VmStoragePolicyTagRuleArgs']]]]] = None,
|
|
378
378
|
__props__=None):
|
|
379
379
|
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
|
|
380
380
|
if not isinstance(opts, pulumi.ResourceOptions):
|
|
@@ -401,7 +401,7 @@ class VmStoragePolicy(pulumi.CustomResource):
|
|
|
401
401
|
opts: Optional[pulumi.ResourceOptions] = None,
|
|
402
402
|
description: Optional[pulumi.Input[str]] = None,
|
|
403
403
|
name: Optional[pulumi.Input[str]] = None,
|
|
404
|
-
tag_rules: Optional[pulumi.Input[Sequence[pulumi.Input[
|
|
404
|
+
tag_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VmStoragePolicyTagRuleArgs']]]]] = None) -> 'VmStoragePolicy':
|
|
405
405
|
"""
|
|
406
406
|
Get an existing VmStoragePolicy resource's state with the given name, id, and optional extra
|
|
407
407
|
properties used to qualify the lookup.
|
|
@@ -411,7 +411,7 @@ class VmStoragePolicy(pulumi.CustomResource):
|
|
|
411
411
|
:param pulumi.ResourceOptions opts: Options for the resource.
|
|
412
412
|
:param pulumi.Input[str] description: Description of the storage policy.
|
|
413
413
|
:param pulumi.Input[str] name: The name of the storage policy.
|
|
414
|
-
:param pulumi.Input[Sequence[pulumi.Input[
|
|
414
|
+
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VmStoragePolicyTagRuleArgs']]]] tag_rules: List of tag rules. The tag category and tags to be associated to this storage policy.
|
|
415
415
|
"""
|
|
416
416
|
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
|
|
417
417
|
|
pulumi_vsphere/vnic.py
CHANGED
|
@@ -30,7 +30,7 @@ class VnicArgs:
|
|
|
30
30
|
The set of arguments for constructing a Vnic resource.
|
|
31
31
|
:param pulumi.Input[str] host: ESX host the interface belongs to
|
|
32
32
|
:param pulumi.Input[str] distributed_port_group: Key of the distributed portgroup the nic will connect to.
|
|
33
|
-
:param pulumi.Input[str] distributed_switch_port: UUID of the
|
|
33
|
+
:param pulumi.Input[str] distributed_switch_port: UUID of the DVSwitch the nic will be attached to. Do not set if you set portgroup.
|
|
34
34
|
:param pulumi.Input['VnicIpv4Args'] ipv4: IPv4 settings. Either this or `ipv6` needs to be set. See IPv4 options below.
|
|
35
35
|
:param pulumi.Input['VnicIpv6Args'] ipv6: IPv6 settings. Either this or `ipv6` needs to be set. See IPv6 options below.
|
|
36
36
|
:param pulumi.Input[str] mac: MAC address of the interface.
|
|
@@ -87,7 +87,7 @@ class VnicArgs:
|
|
|
87
87
|
@pulumi.getter(name="distributedSwitchPort")
|
|
88
88
|
def distributed_switch_port(self) -> Optional[pulumi.Input[str]]:
|
|
89
89
|
"""
|
|
90
|
-
UUID of the
|
|
90
|
+
UUID of the DVSwitch the nic will be attached to. Do not set if you set portgroup.
|
|
91
91
|
"""
|
|
92
92
|
return pulumi.get(self, "distributed_switch_port")
|
|
93
93
|
|
|
@@ -196,7 +196,7 @@ class _VnicState:
|
|
|
196
196
|
"""
|
|
197
197
|
Input properties used for looking up and filtering Vnic resources.
|
|
198
198
|
:param pulumi.Input[str] distributed_port_group: Key of the distributed portgroup the nic will connect to.
|
|
199
|
-
:param pulumi.Input[str] distributed_switch_port: UUID of the
|
|
199
|
+
:param pulumi.Input[str] distributed_switch_port: UUID of the DVSwitch the nic will be attached to. Do not set if you set portgroup.
|
|
200
200
|
:param pulumi.Input[str] host: ESX host the interface belongs to
|
|
201
201
|
:param pulumi.Input['VnicIpv4Args'] ipv4: IPv4 settings. Either this or `ipv6` needs to be set. See IPv4 options below.
|
|
202
202
|
:param pulumi.Input['VnicIpv6Args'] ipv6: IPv6 settings. Either this or `ipv6` needs to be set. See IPv6 options below.
|
|
@@ -243,7 +243,7 @@ class _VnicState:
|
|
|
243
243
|
@pulumi.getter(name="distributedSwitchPort")
|
|
244
244
|
def distributed_switch_port(self) -> Optional[pulumi.Input[str]]:
|
|
245
245
|
"""
|
|
246
|
-
UUID of the
|
|
246
|
+
UUID of the DVSwitch the nic will be attached to. Do not set if you set portgroup.
|
|
247
247
|
"""
|
|
248
248
|
return pulumi.get(self, "distributed_switch_port")
|
|
249
249
|
|
|
@@ -356,8 +356,8 @@ class Vnic(pulumi.CustomResource):
|
|
|
356
356
|
distributed_port_group: Optional[pulumi.Input[str]] = None,
|
|
357
357
|
distributed_switch_port: Optional[pulumi.Input[str]] = None,
|
|
358
358
|
host: Optional[pulumi.Input[str]] = None,
|
|
359
|
-
ipv4: Optional[pulumi.Input[
|
|
360
|
-
ipv6: Optional[pulumi.Input[
|
|
359
|
+
ipv4: Optional[pulumi.Input[pulumi.InputType['VnicIpv4Args']]] = None,
|
|
360
|
+
ipv6: Optional[pulumi.Input[pulumi.InputType['VnicIpv6Args']]] = None,
|
|
361
361
|
mac: Optional[pulumi.Input[str]] = None,
|
|
362
362
|
mtu: Optional[pulumi.Input[int]] = None,
|
|
363
363
|
netstack: Optional[pulumi.Input[str]] = None,
|
|
@@ -369,33 +369,35 @@ class Vnic(pulumi.CustomResource):
|
|
|
369
369
|
|
|
370
370
|
## Example Usage
|
|
371
371
|
|
|
372
|
+
### S
|
|
373
|
+
|
|
372
374
|
### Create a vnic attached to a distributed virtual switch using the vmotion TCP/IP stack
|
|
373
375
|
|
|
374
376
|
```python
|
|
375
377
|
import pulumi
|
|
376
378
|
import pulumi_vsphere as vsphere
|
|
377
379
|
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
datacenter_id=
|
|
381
|
-
|
|
382
|
-
name="
|
|
383
|
-
datacenter_id=
|
|
384
|
-
hosts=[
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
name="pg
|
|
380
|
+
dc = vsphere.get_datacenter(name="mydc")
|
|
381
|
+
h1 = vsphere.get_host(name="esxi1.host.test",
|
|
382
|
+
datacenter_id=dc.id)
|
|
383
|
+
d1 = vsphere.DistributedVirtualSwitch("d1",
|
|
384
|
+
name="dc_DVPG0",
|
|
385
|
+
datacenter_id=dc.id,
|
|
386
|
+
hosts=[vsphere.DistributedVirtualSwitchHostArgs(
|
|
387
|
+
host_system_id=h1.id,
|
|
388
|
+
devices=["vnic3"],
|
|
389
|
+
)])
|
|
390
|
+
p1 = vsphere.DistributedPortGroup("p1",
|
|
391
|
+
name="test-pg",
|
|
390
392
|
vlan_id=1234,
|
|
391
|
-
distributed_virtual_switch_uuid=
|
|
392
|
-
|
|
393
|
-
host=
|
|
394
|
-
distributed_switch_port=
|
|
395
|
-
distributed_port_group=
|
|
396
|
-
ipv4=
|
|
397
|
-
|
|
398
|
-
|
|
393
|
+
distributed_virtual_switch_uuid=d1.id)
|
|
394
|
+
v1 = vsphere.Vnic("v1",
|
|
395
|
+
host=h1.id,
|
|
396
|
+
distributed_switch_port=d1.id,
|
|
397
|
+
distributed_port_group=p1.id,
|
|
398
|
+
ipv4=vsphere.VnicIpv4Args(
|
|
399
|
+
dhcp=True,
|
|
400
|
+
),
|
|
399
401
|
netstack="vmotion")
|
|
400
402
|
```
|
|
401
403
|
|
|
@@ -405,28 +407,28 @@ class Vnic(pulumi.CustomResource):
|
|
|
405
407
|
import pulumi
|
|
406
408
|
import pulumi_vsphere as vsphere
|
|
407
409
|
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
datacenter_id=
|
|
411
|
-
|
|
412
|
-
name="
|
|
413
|
-
host_system_id=
|
|
410
|
+
dc = vsphere.get_datacenter(name="mydc")
|
|
411
|
+
h1 = vsphere.get_host(name="esxi1.host.test",
|
|
412
|
+
datacenter_id=dc.id)
|
|
413
|
+
hvs1 = vsphere.HostVirtualSwitch("hvs1",
|
|
414
|
+
name="dc_HPG0",
|
|
415
|
+
host_system_id=h1.id,
|
|
414
416
|
network_adapters=[
|
|
415
417
|
"vmnic3",
|
|
416
418
|
"vmnic4",
|
|
417
419
|
],
|
|
418
420
|
active_nics=["vmnic3"],
|
|
419
421
|
standby_nics=["vmnic4"])
|
|
420
|
-
|
|
421
|
-
name="pg
|
|
422
|
-
virtual_switch_name=
|
|
423
|
-
host_system_id=
|
|
424
|
-
|
|
425
|
-
host=
|
|
426
|
-
portgroup=
|
|
427
|
-
ipv4=
|
|
428
|
-
|
|
429
|
-
|
|
422
|
+
p1 = vsphere.HostPortGroup("p1",
|
|
423
|
+
name="my-pg",
|
|
424
|
+
virtual_switch_name=hvs1.name,
|
|
425
|
+
host_system_id=h1.id)
|
|
426
|
+
v1 = vsphere.Vnic("v1",
|
|
427
|
+
host=h1.id,
|
|
428
|
+
portgroup=p1.name,
|
|
429
|
+
ipv4=vsphere.VnicIpv4Args(
|
|
430
|
+
dhcp=True,
|
|
431
|
+
),
|
|
430
432
|
services=[
|
|
431
433
|
"vsan",
|
|
432
434
|
"management",
|
|
@@ -445,10 +447,10 @@ class Vnic(pulumi.CustomResource):
|
|
|
445
447
|
:param str resource_name: The name of the resource.
|
|
446
448
|
:param pulumi.ResourceOptions opts: Options for the resource.
|
|
447
449
|
:param pulumi.Input[str] distributed_port_group: Key of the distributed portgroup the nic will connect to.
|
|
448
|
-
:param pulumi.Input[str] distributed_switch_port: UUID of the
|
|
450
|
+
:param pulumi.Input[str] distributed_switch_port: UUID of the DVSwitch the nic will be attached to. Do not set if you set portgroup.
|
|
449
451
|
:param pulumi.Input[str] host: ESX host the interface belongs to
|
|
450
|
-
:param pulumi.Input[
|
|
451
|
-
:param pulumi.Input[
|
|
452
|
+
:param pulumi.Input[pulumi.InputType['VnicIpv4Args']] ipv4: IPv4 settings. Either this or `ipv6` needs to be set. See IPv4 options below.
|
|
453
|
+
:param pulumi.Input[pulumi.InputType['VnicIpv6Args']] ipv6: IPv6 settings. Either this or `ipv6` needs to be set. See IPv6 options below.
|
|
452
454
|
:param pulumi.Input[str] mac: MAC address of the interface.
|
|
453
455
|
:param pulumi.Input[int] mtu: MTU of the interface.
|
|
454
456
|
:param pulumi.Input[str] netstack: TCP/IP stack setting for this interface. Possible values are `defaultTcpipStack``, 'vmotion', 'vSphereProvisioning'. Changing this will force the creation of a new interface since it's not possible to change the stack once it gets created. (Default:`defaultTcpipStack`)
|
|
@@ -466,33 +468,35 @@ class Vnic(pulumi.CustomResource):
|
|
|
466
468
|
|
|
467
469
|
## Example Usage
|
|
468
470
|
|
|
471
|
+
### S
|
|
472
|
+
|
|
469
473
|
### Create a vnic attached to a distributed virtual switch using the vmotion TCP/IP stack
|
|
470
474
|
|
|
471
475
|
```python
|
|
472
476
|
import pulumi
|
|
473
477
|
import pulumi_vsphere as vsphere
|
|
474
478
|
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
datacenter_id=
|
|
478
|
-
|
|
479
|
-
name="
|
|
480
|
-
datacenter_id=
|
|
481
|
-
hosts=[
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
name="pg
|
|
479
|
+
dc = vsphere.get_datacenter(name="mydc")
|
|
480
|
+
h1 = vsphere.get_host(name="esxi1.host.test",
|
|
481
|
+
datacenter_id=dc.id)
|
|
482
|
+
d1 = vsphere.DistributedVirtualSwitch("d1",
|
|
483
|
+
name="dc_DVPG0",
|
|
484
|
+
datacenter_id=dc.id,
|
|
485
|
+
hosts=[vsphere.DistributedVirtualSwitchHostArgs(
|
|
486
|
+
host_system_id=h1.id,
|
|
487
|
+
devices=["vnic3"],
|
|
488
|
+
)])
|
|
489
|
+
p1 = vsphere.DistributedPortGroup("p1",
|
|
490
|
+
name="test-pg",
|
|
487
491
|
vlan_id=1234,
|
|
488
|
-
distributed_virtual_switch_uuid=
|
|
489
|
-
|
|
490
|
-
host=
|
|
491
|
-
distributed_switch_port=
|
|
492
|
-
distributed_port_group=
|
|
493
|
-
ipv4=
|
|
494
|
-
|
|
495
|
-
|
|
492
|
+
distributed_virtual_switch_uuid=d1.id)
|
|
493
|
+
v1 = vsphere.Vnic("v1",
|
|
494
|
+
host=h1.id,
|
|
495
|
+
distributed_switch_port=d1.id,
|
|
496
|
+
distributed_port_group=p1.id,
|
|
497
|
+
ipv4=vsphere.VnicIpv4Args(
|
|
498
|
+
dhcp=True,
|
|
499
|
+
),
|
|
496
500
|
netstack="vmotion")
|
|
497
501
|
```
|
|
498
502
|
|
|
@@ -502,28 +506,28 @@ class Vnic(pulumi.CustomResource):
|
|
|
502
506
|
import pulumi
|
|
503
507
|
import pulumi_vsphere as vsphere
|
|
504
508
|
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
datacenter_id=
|
|
508
|
-
|
|
509
|
-
name="
|
|
510
|
-
host_system_id=
|
|
509
|
+
dc = vsphere.get_datacenter(name="mydc")
|
|
510
|
+
h1 = vsphere.get_host(name="esxi1.host.test",
|
|
511
|
+
datacenter_id=dc.id)
|
|
512
|
+
hvs1 = vsphere.HostVirtualSwitch("hvs1",
|
|
513
|
+
name="dc_HPG0",
|
|
514
|
+
host_system_id=h1.id,
|
|
511
515
|
network_adapters=[
|
|
512
516
|
"vmnic3",
|
|
513
517
|
"vmnic4",
|
|
514
518
|
],
|
|
515
519
|
active_nics=["vmnic3"],
|
|
516
520
|
standby_nics=["vmnic4"])
|
|
517
|
-
|
|
518
|
-
name="pg
|
|
519
|
-
virtual_switch_name=
|
|
520
|
-
host_system_id=
|
|
521
|
-
|
|
522
|
-
host=
|
|
523
|
-
portgroup=
|
|
524
|
-
ipv4=
|
|
525
|
-
|
|
526
|
-
|
|
521
|
+
p1 = vsphere.HostPortGroup("p1",
|
|
522
|
+
name="my-pg",
|
|
523
|
+
virtual_switch_name=hvs1.name,
|
|
524
|
+
host_system_id=h1.id)
|
|
525
|
+
v1 = vsphere.Vnic("v1",
|
|
526
|
+
host=h1.id,
|
|
527
|
+
portgroup=p1.name,
|
|
528
|
+
ipv4=vsphere.VnicIpv4Args(
|
|
529
|
+
dhcp=True,
|
|
530
|
+
),
|
|
527
531
|
services=[
|
|
528
532
|
"vsan",
|
|
529
533
|
"management",
|
|
@@ -557,8 +561,8 @@ class Vnic(pulumi.CustomResource):
|
|
|
557
561
|
distributed_port_group: Optional[pulumi.Input[str]] = None,
|
|
558
562
|
distributed_switch_port: Optional[pulumi.Input[str]] = None,
|
|
559
563
|
host: Optional[pulumi.Input[str]] = None,
|
|
560
|
-
ipv4: Optional[pulumi.Input[
|
|
561
|
-
ipv6: Optional[pulumi.Input[
|
|
564
|
+
ipv4: Optional[pulumi.Input[pulumi.InputType['VnicIpv4Args']]] = None,
|
|
565
|
+
ipv6: Optional[pulumi.Input[pulumi.InputType['VnicIpv6Args']]] = None,
|
|
562
566
|
mac: Optional[pulumi.Input[str]] = None,
|
|
563
567
|
mtu: Optional[pulumi.Input[int]] = None,
|
|
564
568
|
netstack: Optional[pulumi.Input[str]] = None,
|
|
@@ -598,8 +602,8 @@ class Vnic(pulumi.CustomResource):
|
|
|
598
602
|
distributed_port_group: Optional[pulumi.Input[str]] = None,
|
|
599
603
|
distributed_switch_port: Optional[pulumi.Input[str]] = None,
|
|
600
604
|
host: Optional[pulumi.Input[str]] = None,
|
|
601
|
-
ipv4: Optional[pulumi.Input[
|
|
602
|
-
ipv6: Optional[pulumi.Input[
|
|
605
|
+
ipv4: Optional[pulumi.Input[pulumi.InputType['VnicIpv4Args']]] = None,
|
|
606
|
+
ipv6: Optional[pulumi.Input[pulumi.InputType['VnicIpv6Args']]] = None,
|
|
603
607
|
mac: Optional[pulumi.Input[str]] = None,
|
|
604
608
|
mtu: Optional[pulumi.Input[int]] = None,
|
|
605
609
|
netstack: Optional[pulumi.Input[str]] = None,
|
|
@@ -613,10 +617,10 @@ class Vnic(pulumi.CustomResource):
|
|
|
613
617
|
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
|
|
614
618
|
:param pulumi.ResourceOptions opts: Options for the resource.
|
|
615
619
|
:param pulumi.Input[str] distributed_port_group: Key of the distributed portgroup the nic will connect to.
|
|
616
|
-
:param pulumi.Input[str] distributed_switch_port: UUID of the
|
|
620
|
+
:param pulumi.Input[str] distributed_switch_port: UUID of the DVSwitch the nic will be attached to. Do not set if you set portgroup.
|
|
617
621
|
:param pulumi.Input[str] host: ESX host the interface belongs to
|
|
618
|
-
:param pulumi.Input[
|
|
619
|
-
:param pulumi.Input[
|
|
622
|
+
:param pulumi.Input[pulumi.InputType['VnicIpv4Args']] ipv4: IPv4 settings. Either this or `ipv6` needs to be set. See IPv4 options below.
|
|
623
|
+
:param pulumi.Input[pulumi.InputType['VnicIpv6Args']] ipv6: IPv6 settings. Either this or `ipv6` needs to be set. See IPv6 options below.
|
|
620
624
|
:param pulumi.Input[str] mac: MAC address of the interface.
|
|
621
625
|
:param pulumi.Input[int] mtu: MTU of the interface.
|
|
622
626
|
:param pulumi.Input[str] netstack: TCP/IP stack setting for this interface. Possible values are `defaultTcpipStack``, 'vmotion', 'vSphereProvisioning'. Changing this will force the creation of a new interface since it's not possible to change the stack once it gets created. (Default:`defaultTcpipStack`)
|
|
@@ -651,7 +655,7 @@ class Vnic(pulumi.CustomResource):
|
|
|
651
655
|
@pulumi.getter(name="distributedSwitchPort")
|
|
652
656
|
def distributed_switch_port(self) -> pulumi.Output[Optional[str]]:
|
|
653
657
|
"""
|
|
654
|
-
UUID of the
|
|
658
|
+
UUID of the DVSwitch the nic will be attached to. Do not set if you set portgroup.
|
|
655
659
|
"""
|
|
656
660
|
return pulumi.get(self, "distributed_switch_port")
|
|
657
661
|
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
pulumi_vsphere/__init__.py,sha256=7zI4b3F2NEW8hz7TIVEWb1MZI_HqHV4GUM2o9lJKez0,10798
|
|
2
|
+
pulumi_vsphere/_inputs.py,sha256=ixkfGk_Yv3BJXWUEX2vtyKxe-1k_g501JLA0LCRjsAE,140246
|
|
3
|
+
pulumi_vsphere/_utilities.py,sha256=b6gJn0IIeM1t6Q7EVjqw3yhuGyP-uENQhtL5yp7aHR8,9248
|
|
4
|
+
pulumi_vsphere/compute_cluster.py,sha256=FLa9KrvlULmst9Cip4LkGhKmxnM_6-x2bwHkjvxMRPs,226411
|
|
5
|
+
pulumi_vsphere/compute_cluster_host_group.py,sha256=QUCBKwC2RaJI6ZuC_qBDZJl5gey-rs3iVsbPrFFK-1g,13437
|
|
6
|
+
pulumi_vsphere/compute_cluster_vm_affinity_rule.py,sha256=UuLsBkI_YfSdpuGVMurBxC_if3nbjKGZwryeQNkIRGw,25317
|
|
7
|
+
pulumi_vsphere/compute_cluster_vm_anti_affinity_rule.py,sha256=-5K0E6HRhIWGBKAcMZsqXNFQLWIAhJs07_PKVi-3PBg,17554
|
|
8
|
+
pulumi_vsphere/compute_cluster_vm_dependency_rule.py,sha256=_q1lrkr1pWzl10dGA1PdRSOXibmYvLKBLTdZG0O0YNQ,21613
|
|
9
|
+
pulumi_vsphere/compute_cluster_vm_group.py,sha256=l3gk0qAc9UiX6DpkCZl30L6qmTigu4pGo5O1YBJoatU,13459
|
|
10
|
+
pulumi_vsphere/compute_cluster_vm_host_rule.py,sha256=Am9PFFU8PghrdOTRbqJzeFS6nLxgX5Pob-SKqCEaY3k,24422
|
|
11
|
+
pulumi_vsphere/content_library.py,sha256=-3mst76bjX3vq4izMl8pIlZR2B9RjEG3A0VA7jpOfRc,15087
|
|
12
|
+
pulumi_vsphere/content_library_item.py,sha256=qjHPcnsxpNiFOzCPxQXngZXtR1R1RrsqAhdJgpqgZUc,15640
|
|
13
|
+
pulumi_vsphere/custom_attribute.py,sha256=wTuv97G4uXpJ9kv_TaKZc1OYj1e_68c9dAsFc9Ds5nQ,9031
|
|
14
|
+
pulumi_vsphere/datacenter.py,sha256=VheWVrfjRFZMQjI2M_8dNKG93ixCwuKj1ecGyqNTok0,19614
|
|
15
|
+
pulumi_vsphere/datastore_cluster.py,sha256=76rYqp4f7WjBTYtn_1xpb5TE2taxUBSAkGk8z_hrZ4w,82939
|
|
16
|
+
pulumi_vsphere/datastore_cluster_vm_anti_affinity_rule.py,sha256=8eKwKS9ZYaZICVxjNrNLDDBqBKyQqOJmC_nt0jlqYJY,16842
|
|
17
|
+
pulumi_vsphere/distributed_port_group.py,sha256=4Lm22mCkYArB_SdSW7sS-_n0QVc-jPQaTburJgPVNic,132133
|
|
18
|
+
pulumi_vsphere/distributed_virtual_switch.py,sha256=Hu1MuRC9eZhVYOfGJgnSssDosYsynbUXeUZMxND_c5Q,249390
|
|
19
|
+
pulumi_vsphere/dpm_host_override.py,sha256=i3eG4PkI3Hi2KodP6OFAGC6QLczmNDRcSIt9MbK7U1Q,15371
|
|
20
|
+
pulumi_vsphere/drs_vm_override.py,sha256=qGlUe9gq9StYW4OHDSZKA03j0ru3Ruvb9SEaVcpHo1c,16657
|
|
21
|
+
pulumi_vsphere/entity_permissions.py,sha256=uPVnBk47c6csM0kQLwJC3Niml9uCTq23KGOfhNyNrrk,12429
|
|
22
|
+
pulumi_vsphere/file.py,sha256=uJeBCHJ2IXJO6Dj5vYeZvkYaHNNoWhkQ6PzmWVwkRIE,23397
|
|
23
|
+
pulumi_vsphere/folder.py,sha256=YP47W3BA_1Z8prCz2NTwMQlyxIHesu5o4FpT3XIkozY,25623
|
|
24
|
+
pulumi_vsphere/get_compute_cluster.py,sha256=nBgxOLhF9RV5vGGwgCxeNCyVNKr_tQi44VnaBhBbX1M,5997
|
|
25
|
+
pulumi_vsphere/get_compute_cluster_host_group.py,sha256=J-pjwUhXIc4f0S9swi-UaNszb0c8x4m7Ja5wsgLr4gw,6174
|
|
26
|
+
pulumi_vsphere/get_content_library.py,sha256=r1v63MViQ-uXD92M81VCWv-6dzpmB82AHjDytMR6x6I,3218
|
|
27
|
+
pulumi_vsphere/get_content_library_item.py,sha256=-yztx7VLc3cyG8nwjZ4iA4QbwvVGgL6K81YHOrYHJ4U,4496
|
|
28
|
+
pulumi_vsphere/get_custom_attribute.py,sha256=skM0hjQmPyYCQdEgXjbofu9Ulez4ylw9jQKb9g3Zz2I,4354
|
|
29
|
+
pulumi_vsphere/get_datacenter.py,sha256=sfXiHyHNnm9S-bUpcbaGw1ei7ZMNJ1-s_lV6Z7B0UH4,4083
|
|
30
|
+
pulumi_vsphere/get_datastore.py,sha256=vAkqhKmMU5DEVVgFnaGMb7GL8FTX6aDcg2CIiaOfkO8,5979
|
|
31
|
+
pulumi_vsphere/get_datastore_cluster.py,sha256=4nkwUsNLdqWIoxYhiFzkFoYjr4zyOf-5UToK_0vbELg,5157
|
|
32
|
+
pulumi_vsphere/get_datastore_stats.py,sha256=2WAAe8o9BQL6Uq3jsPUd4MOx2quy_Cw-_okOBqIGKI0,7625
|
|
33
|
+
pulumi_vsphere/get_distributed_virtual_switch.py,sha256=icwEpCtIYmPBXvT_3M_dHV0fjwPzbR7cQWQtN-75F0c,6922
|
|
34
|
+
pulumi_vsphere/get_dynamic.py,sha256=-s--cho8svTCbBr9C2jvqPuhyV39CVpWWyGgfwwDx2c,5974
|
|
35
|
+
pulumi_vsphere/get_folder.py,sha256=rkdRDdJGuZpbcuA1PIVYr90vSTZIk26L-FCSql1H7rE,3999
|
|
36
|
+
pulumi_vsphere/get_guest_os_customization.py,sha256=IWLONiIdg-z06VbYAVL-fBBoQjRiVw7NQVa4DNf32ck,6370
|
|
37
|
+
pulumi_vsphere/get_host.py,sha256=aZ-2kism0hoSyeCGALJr759ILDrk2ohDskmexXRwSb8,5200
|
|
38
|
+
pulumi_vsphere/get_host_base_images.py,sha256=Z3sBapAoI6XeUs6XxWEAwtfb-8ka_uEUjmtbRy0jucw,2934
|
|
39
|
+
pulumi_vsphere/get_host_pci_device.py,sha256=LCow2auxsoSB7p45FnQ4u0WDGUJTjyXrxcvQ2ZpSoqE,7612
|
|
40
|
+
pulumi_vsphere/get_host_thumbprint.py,sha256=Xj4plQOyPh5QXBFZFQFhR8VULbuozkWISNDuCdCizhM,4963
|
|
41
|
+
pulumi_vsphere/get_host_vgpu_profile.py,sha256=afbe_YuQVS676bCz96W0d-3VrPqYY0Cxa0j5dFo8xJ8,6348
|
|
42
|
+
pulumi_vsphere/get_license.py,sha256=NPtNrjcdrDFODRrd6gFnFo9IgVlGOocwrUW24r71lI4,5258
|
|
43
|
+
pulumi_vsphere/get_network.py,sha256=bm8QyrAhHs32jZnfwpqMOiSV8Pmyr03DqTMEOMGo-NI,7133
|
|
44
|
+
pulumi_vsphere/get_ovf_vm_template.py,sha256=1C3sBBXBAzpxaZBiv4knADbzSNN7XaV3ItNLwRcFwb4,25791
|
|
45
|
+
pulumi_vsphere/get_policy.py,sha256=ntQk22TpTCHRg4X5aYx4-oTfxyofEqfywYmFHmCF-II,3425
|
|
46
|
+
pulumi_vsphere/get_resource_pool.py,sha256=AYqjQdLIVB_kIcqRx8l14Aey3KebJE_ybTjAdVAAsjU,7257
|
|
47
|
+
pulumi_vsphere/get_role.py,sha256=XpjnMAWOVCp9-L8HUkEi5q7KKnQVtbl4jbLG2_XHWPM,5211
|
|
48
|
+
pulumi_vsphere/get_tag.py,sha256=NJIo7J9NsSr0SxCScu7QrNn41dWVc5vXAcIQN5VmAj8,4816
|
|
49
|
+
pulumi_vsphere/get_tag_category.py,sha256=R-kLcRAuATkFp8PPKt01OGp8lpmceKnRf9lEn_ZJzUk,5034
|
|
50
|
+
pulumi_vsphere/get_vapp_container.py,sha256=1OUABHMT3qFbSQRqFBCQNltmuo_OF983GJR09vnJUsY,4473
|
|
51
|
+
pulumi_vsphere/get_virtual_machine.py,sha256=tvaOuo8cvtOTXyGxMKaQ8-tKxd5-gBVfQ0cWpfM2LXs,58531
|
|
52
|
+
pulumi_vsphere/get_vmfs_disks.py,sha256=KotPjKN7ncgdWzl6qSU1DuuxIxu8BoQ5CLDH1JAgShg,6717
|
|
53
|
+
pulumi_vsphere/guest_os_customization.py,sha256=yfWCEDEdo2MIACZwJo7DhbUyLEp8dhYm_b0d7QPwGKs,17018
|
|
54
|
+
pulumi_vsphere/ha_vm_override.py,sha256=Z2p_A1fowtS4_UGq-SHP_dfw2ruZuYVMTo5DizkeOEQ,52698
|
|
55
|
+
pulumi_vsphere/host.py,sha256=iLAODc_Ez09iYoOZ_opB3xfINe17ZohHnN_ZTq98owc,45815
|
|
56
|
+
pulumi_vsphere/host_port_group.py,sha256=jtSm7mmPU0SWP54n-dr_5fgEHz6Y3fHb5YNl_ArRuKE,56940
|
|
57
|
+
pulumi_vsphere/host_virtual_switch.py,sha256=WTZtqM4GDzLIcmviM_fQ47SHNXQrEHVqV0q3wEB0sFA,57317
|
|
58
|
+
pulumi_vsphere/license.py,sha256=4IhaSHnAjOFSlSKu4AYGcbraULzzTRASs29wB7nw7aU,12022
|
|
59
|
+
pulumi_vsphere/nas_datastore.py,sha256=X5PeUipclE4MG0uHS_SlmpaHBrz4jtaLhVEfOybjYQY,48202
|
|
60
|
+
pulumi_vsphere/offline_software_depot.py,sha256=GKoYb1AzKVXA34GNOIvrcslSHN7IVySWAkDZ-APVkLc,7319
|
|
61
|
+
pulumi_vsphere/outputs.py,sha256=pHQWKY3l15rEvlhXYBFL8IRAztYGlPwFJqXnK_R8XJg,153273
|
|
62
|
+
pulumi_vsphere/provider.py,sha256=ljEs3n-BiTMtFb7qqPn6YDZh3M-L5Oq2LAbX1QfV274,20511
|
|
63
|
+
pulumi_vsphere/pulumi-plugin.json,sha256=NFN02Cgqt3zP6lw3HH3uZxeX8D4Ufg_TeytUR8x0fOo,75
|
|
64
|
+
pulumi_vsphere/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
65
|
+
pulumi_vsphere/resource_pool.py,sha256=WMj87tZbw5JBb-wAcvncdnVY8FvmDHpy0go7Go5fNvg,54815
|
|
66
|
+
pulumi_vsphere/role.py,sha256=kO9DfdLWHq5bfwD0EBAsZuWQxrcX7amJBVUrfREHf3c,8528
|
|
67
|
+
pulumi_vsphere/storage_drs_vm_override.py,sha256=f1DoLBiMk4_JQdPaemv5RW6xPksw5p4w-AFLZsC6Jr8,20828
|
|
68
|
+
pulumi_vsphere/supervisor.py,sha256=HeS61exRz8wkjxoapwlkDjvhpR-zY28BWID10NRm4fM,42959
|
|
69
|
+
pulumi_vsphere/tag.py,sha256=gF7uOgj1Xm_k2cV7YIiwBGakQvhi-jlsNYKDm7JDFWw,9943
|
|
70
|
+
pulumi_vsphere/tag_category.py,sha256=FzqdUOMwmAo7W6Xuw3-CByCbBSmsvkCGXiLRaMUVVQ4,15120
|
|
71
|
+
pulumi_vsphere/vapp_container.py,sha256=1On1t7PnLEqnQvNIt1XIh9VFu7LaSegyRyDaJf8lQmQ,48255
|
|
72
|
+
pulumi_vsphere/vapp_entity.py,sha256=JePz7OF__e1x7aFp_9I257S61HnDT7gdHsWK-u17nlM,29555
|
|
73
|
+
pulumi_vsphere/virtual_disk.py,sha256=wFycNUG7WdYogDgWDRc5zP1u9n2afPtQOUiX_987ito,28633
|
|
74
|
+
pulumi_vsphere/virtual_machine.py,sha256=it5F-ML0tRVEsXti0MNLi6eqoUvSYlNVot1tt23IA2U,214932
|
|
75
|
+
pulumi_vsphere/virtual_machine_class.py,sha256=PuDyxZffdDfR3nOouNiXEBkIf-NPiXx0to2z0q_WM8g,17161
|
|
76
|
+
pulumi_vsphere/virtual_machine_snapshot.py,sha256=znCP8B7dt2lvsEa7nTzzqACttXjQNKWNjxCZGjThYNs,23926
|
|
77
|
+
pulumi_vsphere/vm_storage_policy.py,sha256=IgoanyDCih8ALdG-wdewI9KvrYfSiew1Z7Xhb3SSqwA,20198
|
|
78
|
+
pulumi_vsphere/vmfs_datastore.py,sha256=gNhmetE0wMDKMEM_ImATFpnTJhqMEPd0q8eOgqpedr0,34596
|
|
79
|
+
pulumi_vsphere/vnic.py,sha256=uaj0DqvKOflKM4Sb_W6F3TCp1uikL8WhGxWCu4907aw,30503
|
|
80
|
+
pulumi_vsphere/config/__init__.py,sha256=cfY0smRZD3fDVc93ZIAxEl_IM2pynmXB52n3Ahzi030,285
|
|
81
|
+
pulumi_vsphere/config/__init__.pyi,sha256=ZO6ktIIpO1bKQNe2__l8JqDti_ZKgnRvHTcXcRWzb0M,1351
|
|
82
|
+
pulumi_vsphere/config/vars.py,sha256=fcurb1Hwqp3evWnRD4s2t--MUjqR9R11nIm04F1UMW0,3210
|
|
83
|
+
pulumi_vsphere-4.11.0a1.dist-info/METADATA,sha256=BzxJkAE7jURFnsgp7VKvkTaw_m5bLT-rAToLB_KE8xg,4949
|
|
84
|
+
pulumi_vsphere-4.11.0a1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
|
85
|
+
pulumi_vsphere-4.11.0a1.dist-info/top_level.txt,sha256=00BIE8zaYtdsw0_tBfXR8E5sTs3lRnwlcZ6lUdu4loI,15
|
|
86
|
+
pulumi_vsphere-4.11.0a1.dist-info/RECORD,,
|