vagrant-conoha 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +22 -0
  3. data/.rubocop.yml +35 -0
  4. data/CHANGELOG.md +3 -0
  5. data/Gemfile +19 -0
  6. data/LICENSE +23 -0
  7. data/Rakefile +25 -0
  8. data/Vagrantfile +71 -0
  9. data/dummy.box +0 -0
  10. data/example_box/README.md +13 -0
  11. data/example_box/metadata.json +3 -0
  12. data/functional_tests/Vagrantfile +58 -0
  13. data/functional_tests/keys/vagrant-openstack +27 -0
  14. data/functional_tests/keys/vagrant-openstack.pub +1 -0
  15. data/functional_tests/run_tests.sh +142 -0
  16. data/lib/vagrant-conoha.rb +29 -0
  17. data/lib/vagrant-conoha/action.rb +227 -0
  18. data/lib/vagrant-conoha/action/abstract_action.rb +22 -0
  19. data/lib/vagrant-conoha/action/connect_openstack.rb +60 -0
  20. data/lib/vagrant-conoha/action/create_server.rb +154 -0
  21. data/lib/vagrant-conoha/action/create_stack.rb +68 -0
  22. data/lib/vagrant-conoha/action/delete_server.rb +53 -0
  23. data/lib/vagrant-conoha/action/delete_stack.rb +73 -0
  24. data/lib/vagrant-conoha/action/message.rb +19 -0
  25. data/lib/vagrant-conoha/action/provision.rb +60 -0
  26. data/lib/vagrant-conoha/action/read_ssh_info.rb +72 -0
  27. data/lib/vagrant-conoha/action/read_state.rb +43 -0
  28. data/lib/vagrant-conoha/action/resume.rb +24 -0
  29. data/lib/vagrant-conoha/action/start_server.rb +24 -0
  30. data/lib/vagrant-conoha/action/stop_server.rb +25 -0
  31. data/lib/vagrant-conoha/action/suspend.rb +24 -0
  32. data/lib/vagrant-conoha/action/sync_folders.rb +129 -0
  33. data/lib/vagrant-conoha/action/wait_accessible.rb +61 -0
  34. data/lib/vagrant-conoha/action/wait_active.rb +33 -0
  35. data/lib/vagrant-conoha/action/wait_stop.rb +33 -0
  36. data/lib/vagrant-conoha/catalog/openstack_catalog.rb +67 -0
  37. data/lib/vagrant-conoha/client/cinder.rb +39 -0
  38. data/lib/vagrant-conoha/client/domain.rb +159 -0
  39. data/lib/vagrant-conoha/client/glance.rb +65 -0
  40. data/lib/vagrant-conoha/client/heat.rb +49 -0
  41. data/lib/vagrant-conoha/client/http_utils.rb +116 -0
  42. data/lib/vagrant-conoha/client/keystone.rb +77 -0
  43. data/lib/vagrant-conoha/client/neutron.rb +48 -0
  44. data/lib/vagrant-conoha/client/nova.rb +212 -0
  45. data/lib/vagrant-conoha/client/openstack.rb +59 -0
  46. data/lib/vagrant-conoha/client/request_logger.rb +23 -0
  47. data/lib/vagrant-conoha/client/rest_utils.rb +25 -0
  48. data/lib/vagrant-conoha/command/abstract_command.rb +51 -0
  49. data/lib/vagrant-conoha/command/flavor_list.rb +24 -0
  50. data/lib/vagrant-conoha/command/image_list.rb +29 -0
  51. data/lib/vagrant-conoha/command/main.rb +51 -0
  52. data/lib/vagrant-conoha/command/network_list.rb +25 -0
  53. data/lib/vagrant-conoha/command/openstack_command.rb +16 -0
  54. data/lib/vagrant-conoha/command/reset.rb +20 -0
  55. data/lib/vagrant-conoha/command/subnet_list.rb +22 -0
  56. data/lib/vagrant-conoha/command/utils.rb +22 -0
  57. data/lib/vagrant-conoha/command/volume_list.rb +25 -0
  58. data/lib/vagrant-conoha/config.rb +390 -0
  59. data/lib/vagrant-conoha/config/http.rb +39 -0
  60. data/lib/vagrant-conoha/config_resolver.rb +285 -0
  61. data/lib/vagrant-conoha/errors.rb +187 -0
  62. data/lib/vagrant-conoha/logging.rb +39 -0
  63. data/lib/vagrant-conoha/plugin.rb +48 -0
  64. data/lib/vagrant-conoha/provider.rb +50 -0
  65. data/lib/vagrant-conoha/utils.rb +26 -0
  66. data/lib/vagrant-conoha/version.rb +15 -0
  67. data/lib/vagrant-conoha/version_checker.rb +76 -0
  68. data/locales/en.yml +393 -0
  69. data/spec/vagrant-conoha/action/connect_openstack_spec.rb +695 -0
  70. data/spec/vagrant-conoha/action/create_server_spec.rb +225 -0
  71. data/spec/vagrant-conoha/action/create_stack_spec.rb +99 -0
  72. data/spec/vagrant-conoha/action/delete_server_spec.rb +89 -0
  73. data/spec/vagrant-conoha/action/delete_stack_spec.rb +63 -0
  74. data/spec/vagrant-conoha/action/message_spec.rb +33 -0
  75. data/spec/vagrant-conoha/action/provision_spec.rb +104 -0
  76. data/spec/vagrant-conoha/action/read_ssh_info_spec.rb +190 -0
  77. data/spec/vagrant-conoha/action/read_state_spec.rb +81 -0
  78. data/spec/vagrant-conoha/action/resume_server_spec.rb +49 -0
  79. data/spec/vagrant-conoha/action/start_server_spec.rb +49 -0
  80. data/spec/vagrant-conoha/action/stop_server_spec.rb +49 -0
  81. data/spec/vagrant-conoha/action/suspend_server_spec.rb +49 -0
  82. data/spec/vagrant-conoha/action/sync_folders_spec.rb +155 -0
  83. data/spec/vagrant-conoha/action/wait_accessible_spec.rb +67 -0
  84. data/spec/vagrant-conoha/action/wait_active_spec.rb +53 -0
  85. data/spec/vagrant-conoha/action/wait_stop_spec.rb +53 -0
  86. data/spec/vagrant-conoha/action_spec.rb +120 -0
  87. data/spec/vagrant-conoha/client/cinder_spec.rb +127 -0
  88. data/spec/vagrant-conoha/client/glance_spec.rb +143 -0
  89. data/spec/vagrant-conoha/client/heat_spec.rb +128 -0
  90. data/spec/vagrant-conoha/client/keystone_spec.rb +150 -0
  91. data/spec/vagrant-conoha/client/neutron_spec.rb +171 -0
  92. data/spec/vagrant-conoha/client/nova_spec.rb +757 -0
  93. data/spec/vagrant-conoha/client/utils_spec.rb +176 -0
  94. data/spec/vagrant-conoha/command/flavor_list_spec.rb +43 -0
  95. data/spec/vagrant-conoha/command/image_list_spec.rb +95 -0
  96. data/spec/vagrant-conoha/command/network_list_spec.rb +65 -0
  97. data/spec/vagrant-conoha/command/reset_spec.rb +24 -0
  98. data/spec/vagrant-conoha/command/subnet_list_spec.rb +45 -0
  99. data/spec/vagrant-conoha/command/volume_list_spec.rb +40 -0
  100. data/spec/vagrant-conoha/config_resolver_spec.rb +860 -0
  101. data/spec/vagrant-conoha/config_spec.rb +373 -0
  102. data/spec/vagrant-conoha/e2e_spec.rb.save +27 -0
  103. data/spec/vagrant-conoha/provider_spec.rb +13 -0
  104. data/spec/vagrant-conoha/spec_helper.rb +37 -0
  105. data/spec/vagrant-conoha/utils_spec.rb +129 -0
  106. data/spec/vagrant-conoha/version_checker_spec.rb +39 -0
  107. data/stackrc +25 -0
  108. data/vagrant-conoha.gemspec +32 -0
  109. metadata +343 -0
@@ -0,0 +1,39 @@
1
+ module VagrantPlugins
2
+ module ConoHa
3
+ module Logging
4
+ # This initializes the logging so that our logs are outputted at
5
+ # the same level as Vagrant core logs.
6
+ def self.init
7
+ # Initialize logging
8
+ level = nil
9
+ begin
10
+ level = Log4r.const_get(ENV['VAGRANT_LOG'].upcase)
11
+ rescue NameError
12
+ # This means that the logging constant wasn't found,
13
+ # which is fine. We just keep `level` as `nil`. But
14
+ # we tell the user.
15
+ begin
16
+ level = Log4r.const_get(ENV['VAGRANT_OPENSTACK_LOG'].upcase)
17
+ rescue NameError
18
+ level = nil
19
+ end
20
+ end
21
+
22
+ # Some constants, such as "true" resolve to booleans, so the
23
+ # above error checking doesn't catch it. This will check to make
24
+ # sure that the log level is an integer, as Log4r requires.
25
+ level = nil unless level.is_a?(Integer)
26
+
27
+ # Set the logging level
28
+ # logs as long as we have a valid level.
29
+ if level
30
+ logger = Log4r::Logger.new('vagrant_openstack')
31
+ out = Log4r::Outputter.stdout
32
+ out.formatter = Log4r::PatternFormatter.new(pattern: '%d | %5l | %m', date_pattern: '%Y-%m-%d %H:%M')
33
+ logger.outputters = out
34
+ logger.level = level
35
+ end
36
+ end
37
+ end
38
+ end
39
+ end
@@ -0,0 +1,48 @@
1
+ begin
2
+ require 'vagrant'
3
+ rescue LoadError
4
+ raise 'The ConoHa provider must be run within Vagrant.'
5
+ end
6
+
7
+ require 'vagrant-conoha/version_checker'
8
+
9
+ # This is a sanity check to make sure no one is attempting to install
10
+ # this into an early Vagrant version.
11
+ if Vagrant::VERSION < '1.4.0'
12
+ fail 'ConoHa provider is only compatible with Vagrant 1.4+'
13
+ end
14
+
15
+ module VagrantPlugins
16
+ module ConoHa
17
+ class Plugin < Vagrant.plugin('2')
18
+ name 'ConoHa'
19
+ description <<-DESC
20
+ This plugin enables Vagrant to manage machines in ConoHa.
21
+ DESC
22
+
23
+ config(:conoha, :provider) do
24
+ require_relative 'config'
25
+ Config
26
+ end
27
+
28
+ provider(:conoha, box_optional: true) do
29
+ ConoHa.init_i18n
30
+ ConoHa.init_logging
31
+ VagrantPlugins::ConoHa.check_version
32
+
33
+ # Load the actual provider
34
+ require_relative 'provider'
35
+ Provider
36
+ end
37
+
38
+ command('openstack') do
39
+ ConoHa.init_i18n
40
+ ConoHa.init_logging
41
+ VagrantPlugins::ConoHa.check_version
42
+
43
+ require_relative 'command/main'
44
+ Command::Main
45
+ end
46
+ end
47
+ end
48
+ end
@@ -0,0 +1,50 @@
1
+ require 'vagrant'
2
+
3
+ require 'vagrant-conoha/action'
4
+
5
+ module VagrantPlugins
6
+ module ConoHa
7
+ class Provider < Vagrant.plugin('2', :provider)
8
+ def initialize(machine)
9
+ @machine = machine
10
+ end
11
+
12
+ def action(name)
13
+ # Attempt to get the action method from the Action class if it
14
+ # exists, otherwise return nil to show that we don't support the
15
+ # given action.
16
+ action_method = "action_#{name}"
17
+ return Action.send(action_method) if Action.respond_to?(action_method)
18
+ nil
19
+ end
20
+
21
+ def ssh_info
22
+ # Run a custom action called "read_ssh_info" which does what it
23
+ # says and puts the resulting SSH info into the `:machine_ssh_info`
24
+ # key in the environment.
25
+ env = @machine.action('read_ssh_info')
26
+ env[:machine_ssh_info]
27
+ end
28
+
29
+ def state
30
+ # Run a custom action we define called "read_state" which does
31
+ # what it says. It puts the state in the `:machine_state_id`
32
+ # key in the environment.
33
+ env = @machine.action('read_state')
34
+
35
+ state_id = env[:machine_state_id]
36
+
37
+ # Get the short and long description
38
+ short = I18n.t("vagrant_openstack.states.short_#{state_id}")
39
+ long = I18n.t("vagrant_openstack.states.long_#{state_id}")
40
+
41
+ # Return the MachineState object
42
+ Vagrant::MachineState.new(state_id, short, long)
43
+ end
44
+
45
+ def to_s
46
+ 'Vagrant ConoHa provider'
47
+ end
48
+ end
49
+ end
50
+ end
@@ -0,0 +1,26 @@
1
+ module VagrantPlugins
2
+ module ConoHa
3
+ class Utils
4
+ def initialize
5
+ @logger = Log4r::Logger.new('vagrant_openstack::action::config_resolver')
6
+ end
7
+
8
+ def get_ip_address(env)
9
+ addresses = env[:openstack_client].nova.get_server_details(env, env[:machine].id)['addresses']
10
+ addresses.each do |_, network|
11
+ network.each do |network_detail|
12
+ return network_detail['addr'] if network_detail['OS-EXT-IPS:type'] == 'fixed' && network_detail['version'] == 4
13
+ end
14
+ end
15
+ fail Errors::UnableToResolveIP if addresses.size == 0
16
+ if addresses.size == 1
17
+ net_addresses = addresses.first[1]
18
+ else
19
+ net_addresses = addresses[env[:machine].provider_config.networks[0]]
20
+ end
21
+ fail Errors::UnableToResolveIP if net_addresses.size == 0
22
+ net_addresses[0]['addr']
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,15 @@
1
+ module VagrantPlugins
2
+ module ConoHa
3
+ #
4
+ # Stable versions must respect the pattern given
5
+ # by VagrantPlugins::ConoHa::VERSION_PATTERN
6
+ #
7
+ VERSION = '0.1.0'
8
+
9
+ #
10
+ # Stable version must respect the naming convention 'x.y.z'
11
+ # where x, y and z are integers inside the range [0, 999]
12
+ #
13
+ VERSION_PATTERN = /^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$/
14
+ end
15
+ end
@@ -0,0 +1,76 @@
1
+ require 'colorize'
2
+ require 'singleton'
3
+ require 'vagrant-conoha/version'
4
+
5
+ module VagrantPlugins
6
+ module ConoHa
7
+ class VersionChecker
8
+ include Singleton
9
+
10
+ #
11
+ # :latest, :outdated or :unstable
12
+ #
13
+ # A version is considered unstable if it does not
14
+ # respect the pattern or if it is greater than the
15
+ # latest from rubygem
16
+ #
17
+ attr_accessor :status
18
+
19
+ def initialize
20
+ @status = nil
21
+ end
22
+
23
+ #
24
+ # Check the latest version from rubygem and set the status
25
+ #
26
+ def check
27
+ return @status unless @status.nil?
28
+ latest = Gem.latest_spec_for('vagrant-conoha').version.version
29
+ current = VagrantPlugins::ConoHa::VERSION
30
+
31
+ unless current =~ VERSION_PATTERN
32
+ @status = :unstable
33
+ print I18n.t('vagrant_openstack.version_unstable')
34
+ return
35
+ end
36
+
37
+ if latest.eql? current
38
+ @status = :latest
39
+ return
40
+ end
41
+
42
+ v_latest = latest.split('.').map(&:to_i)
43
+ v_current = current.split('.').map(&:to_i)
44
+
45
+ i_latest = v_latest[2] + v_latest[1] * 1000 + v_latest[0] * 1_000_000
46
+ i_current = v_current[2] + v_current[1] * 1000 + v_current[0] * 1_000_000
47
+
48
+ if i_current > i_latest
49
+ @status = :unstable
50
+ print I18n.t('vagrant_openstack.version_unstable')
51
+ return
52
+ end
53
+
54
+ @status = :outdated
55
+ print I18n.t('vagrant_openstack.version_outdated', latest: latest, current: current)
56
+ end
57
+
58
+ private
59
+
60
+ def print(message)
61
+ puts message.yellow
62
+ puts ''
63
+ end
64
+ end
65
+
66
+ # rubocop:disable Lint/HandleExceptions
67
+ def self.check_version
68
+ timeout(3, Errors::Timeout) do
69
+ VersionChecker.instance.check
70
+ end
71
+ rescue
72
+ # Do nothing whatever the failure cause
73
+ end
74
+ # rubocop:enable Lint/HandleExceptions
75
+ end
76
+ end
data/locales/en.yml ADDED
@@ -0,0 +1,393 @@
1
+ en:
2
+ vagrant_openstack:
3
+ global_error: |-
4
+ An unknown error happened in Vagrant ConoHa plugin
5
+
6
+ To easily debug what happened, we recommend to set the environment
7
+ variable VAGRANT_OPENSTACK_LOG to debug
8
+
9
+ $ export VAGRANT_OPENSTACK_LOG=debug
10
+
11
+ If doing this does not help fixing your issue, there may be a bug
12
+ in the provider. Please submit an issue on Github at
13
+ https://github.com/hironobu-s/vagrant-conoha
14
+ with the stracktrace and the logs.
15
+
16
+ We are looking for feedback, so feel free to ask questions or
17
+ describe features you would like to see in this provider.
18
+ version_outdated: |-
19
+ You're not using the latest version of the 'vagrant-conoha' plugin.
20
+ The latest version is %{latest}, yours is %{current}. You should update to the latest
21
+ version running the command :
22
+
23
+ $ vagrant plugin update vagrant-conoha
24
+ version_unstable: |-
25
+ You're not running a stable version of the 'vagrant-conoha' plugin.
26
+ Unless you are either developing on vagrant or have deliberatly installed a not
27
+ stable version, you should uninstall it an install the latest stable version
28
+ running commands :
29
+
30
+ $ vagrant plugin uninstall vagrant-conoha
31
+ $ vagrant plugin install vagrant-conoha
32
+ already_created: |-
33
+ The server is already created.
34
+ create_stack: |-
35
+ Creating Heat Stack.
36
+ delete_stack: |-
37
+ Deleting Heat Stack.
38
+ already_suspended: |-
39
+ The server is already suspended.
40
+ ongoing_task: |-
41
+ The server is currently running a task and cannot execute your request.
42
+ not_suspended: |-
43
+ The server is not currently "suspended" and cannot be resumed.
44
+ deleting_server: |-
45
+ Deleting server...
46
+ finding_flavor: |-
47
+ Finding flavor for server...
48
+ finding_image: |-
49
+ Finding image for server...
50
+ finding_networks: |-
51
+ Finding network(s) for server...
52
+ finding_volumes: |-
53
+ Finding volume(s) to attach on server...
54
+ launching_server: |-
55
+ Launching a server with the following settings...
56
+ not_created: |-
57
+ The server hasn't been created yet. Run `vagrant up` first.
58
+ ready: |-
59
+ The server is ready!
60
+ stopping_server: |-
61
+ Stopping server...
62
+ starting_server: |-
63
+ Starting server...
64
+ timeout: |-
65
+ Timeout!
66
+ trying_authentication: |-
67
+ Trying authentication...
68
+ rsync_folder: |-
69
+ Rsyncing folder: %{hostpath} => %{guestpath}
70
+ using_floating_ip: |-
71
+ Using floating IP %{floating_ip}
72
+ waiting_for_build: |-
73
+ Waiting for the server to be built...
74
+ waiting_for_ssh: |-
75
+ Waiting for SSH to become available...
76
+ waiting_stop: |-
77
+ Waiting for the server to stop...
78
+ waiting_start: |-
79
+ Waiting for the server to start...
80
+ waiting_deleted: |-
81
+ Waiting for the server to be deleted...
82
+ waiting_for_stack: |-
83
+ Waiting for Heat Stack to be created...
84
+ waiting_for_stack_deleted: |-
85
+ Waiting for Heat Stack to be created...
86
+ warn_networks: |-
87
+ Warning! This provider doesn't support any of the Vagrant
88
+ high-level network configurations (`config.vm.network`). They
89
+ will be silently ignored.
90
+ warn_network_identifier_is_assumed_to_be_an_id: |-
91
+ As Neutron endpoint is not available, the identifier '%{network}' is assumed to be an id (not a name).
92
+ warn_volume_identifier_is_assumed_to_be_an_id: |-
93
+ As Cinder endpoint is not available, the identifier '%{volume}' is assumed to be an id (not a name).
94
+ ssh_disabled_provisioning: |-
95
+ Provisioning will not be performed because provider config ssh_disabled is set to true
96
+ ssh_disabled_sync_folders: |-
97
+ Folders will not be synced because provider config ssh_disabled is set to true
98
+ disabled_sync_folders: |-
99
+ Sync folders are disabled in the provider configuration
100
+
101
+ config:
102
+ password_required: |-
103
+ A password is required.
104
+ username_required: |-
105
+ A username is required.
106
+ tenant_name_required: |-
107
+ A tenant name is required.
108
+ invalid_uri: |-
109
+ The value for %{key} is not a valid URI: %{uri}
110
+ invalid_stack: |-
111
+ One of the stacks in the stacks provider configuration is invalid
112
+ The configuration option should have the following format:
113
+ os.stacks = [{
114
+ name: 'mystack',
115
+ template: '/path/to/heat_template.yml',
116
+ }]
117
+ invalid_endpoint_type: |-
118
+ endpoint_type must be publicURL, adminURL or internalURL (if not provided, default is publicURL)
119
+ metadata_must_be_hash: |-
120
+ Metadata must be a hash.
121
+ keypair_name_required: |-
122
+ Warning! You have specified ssh.private_key_path in your Vagrant configuration.
123
+ but nor keypair_name neither public_key_path are present. This provider
124
+ will automatically generate a new keypair and your configuration option
125
+ ssh.private_key_path will be overriden
126
+ private_key_missing: |-
127
+ config.ssh.private_key_path is required when either keypair_name or
128
+ public_key_path is set in Vagrantfile
129
+ ssh_username_deprecated: |-
130
+ ssh_username provider config is deprecated for vagrant-conoha.
131
+ If you are using it, it will continue to work but we recommend to switch to the
132
+ standard vagrant configuration option `config.ssh.username` instead
133
+ ssh_username_required: |-
134
+ vagrant standard configuration option `ssh.username` is required
135
+ invalid_value_for_parameter: |-
136
+ Invalid value '%{value}' for parameter '%{parameter}'
137
+
138
+ errors:
139
+ default: |-
140
+ %{message}
141
+ timeout: |-
142
+ Timeout occurred
143
+ authentication_required: |-
144
+ Authentication token is missing or no longer valid.
145
+ floating_ip_already_assigned: |-
146
+ Floating IP %{floating_ip} already assigned to another server
147
+ floating_ip_not_available: |-
148
+ Floating IP %{floating_ip} not available for this tenant
149
+ authentication_failed: |-
150
+ Authentication failed.
151
+ bad_authentication_endpoint: |-
152
+ Bad authentication endpoint.
153
+ create_bad_state: |-
154
+ While creating the server, it transitioned to an unexpected
155
+ state: '%{state}', instead of properly booting. Run `vagrant status`
156
+ to find out what can be done about this state, or `vagrant destroy`
157
+ if you want to start over.
158
+ no_matching_api_version: |-
159
+ No matching version found for %{api_name} API
160
+
161
+ %{version_list}
162
+ You must specify the desired %{api_name} API url by setting
163
+ the provider's property '%{url_property}'.
164
+ no_matching_flavor: |-
165
+ No matching flavor was found! Please check your flavor setting
166
+ to make sure you have a valid flavor chosen.
167
+ no_matching_image: |-
168
+ No matching image was found! Please check your image setting to
169
+ make sure you have a valid image chosen.
170
+ conflict_boot_volume: |-
171
+ When booting from an existing volume it is not authorized to specify in your Vagrantfile either 'image' or 'size' or 'delete_on_destroy'.
172
+ When booting from a newly creating volume it is not authorized to specify in your Vagrantfile either 'id' or 'name'.
173
+ sync_method_error: |-
174
+ Value '%{sync_method_value}' is not allowed for 'sync_method' configuration parameter. Valid values are 'rsync' and 'none'
175
+ rsync_error: |-
176
+ There was an error when attemping to rsync a share folder.
177
+ Please inspect the error message below for more info.
178
+
179
+ Host path: %{hostpath}
180
+ Guest path: %{guestpath}
181
+ Error: %{stderr}
182
+ ssh_unavailble: |-
183
+ SSH server anavailable on instance %{host}. You should maybe increase the timeout value which currently is %{timeout} second(s).
184
+ no_arg_required_for_command: |-
185
+ Command '%{cmd}' does not required any argument.
186
+ unrecognized_arg_for_command: |-
187
+ Argument '%{arg}' unrecognized for command '%{cmd}'.
188
+ unable_to_resolve_floating_ip: |-
189
+ Vagrant was unable to resolve a floating ip to communicate with your OpenStack instance. Please specify in your Vagrantfile either `floating_ip` or `floating_ip_pool`.
190
+ unable_to_resolve_ip: |-
191
+ Vagrant was unable to resolve a valid ip to ssh on your OpenStack instance.
192
+ unable_to_resolve_ssh_key: |-
193
+ Vagrant was unable to resolve a valid ssh key to connect to your OpenStack instance. Please specify in your Vagrantfile either `public_key_path` or `keypair_name`.
194
+ invalid_network_format: |-
195
+ Network '%{network}' is not valid.
196
+ unresolved_network: |-
197
+ No matching network with id or name '%{network}'
198
+ unresolved_network_id: |-
199
+ No matching network with id '%{id}'
200
+ unresolved_network_name: |-
201
+ No matching network with name '%{name}'
202
+ conflict_network_name_id: |-
203
+ One (and only one) of 'id' or 'name' must be specified in network definition : %{network}
204
+ multiple_network_name: |-
205
+ More than one network exists with name '%{name}'. In this case you can't use name in network definition. Please, use id instead.
206
+ invalid_volume_format: |-
207
+ Volume '%{volume}' is not valid.
208
+ unresolved_volume: |-
209
+ No matching volume with id or name '%{volume}'
210
+ unresolved_volume_id: |-
211
+ No matching volume with id '%{id}'
212
+ unresolved_volume_name: |-
213
+ No matching volume with name '%{name}'
214
+ conflict_volume_name_id: |-
215
+ One (and only one) of 'id' or 'name' must be specified in volume definition : %{volume}
216
+ multiple_volume_name: |-
217
+ More than one volume exists with name '%{name}'. In this case you can't use name in volume definition. Please, use id instead.
218
+ missing_boot_option: |-
219
+ Either 'image' or 'volume_boot' configuration must be provided
220
+ conflict_boot_option: |-
221
+ Only one of 'image' and 'volume_boot' configuration must be provided
222
+ ssh_username_missing: |-
223
+ Vagrant was unable to resolve which ssh username to use to connect to the machine. Please provide config parameter `ssh.username`
224
+ instance_not_found: |-
225
+ Vagrant was unable to find the OpenStack instance used for your vagrant machine.
226
+ This can happen when the instance has been deleted via OpenStack APIs or OpenStack
227
+ Dashboard instead of using vagrant commands.
228
+ We recommend using the command `vagrant openstack reset` to reset
229
+ vagrant to a clear state
230
+ stack_not_found: |-
231
+ Heat stack not found
232
+ nerwork_service_unavailable: |-
233
+ Neutron service endpoint is not available, thus there is not way to retrieve
234
+ network id from its name. You have to provide only ids in your Vagrantfile.
235
+ volume_service_unavailable: |-
236
+ Cinder service endpoint is not available, thus there is not way to retrieve
237
+ volume id from its name. You have to provide only ids in your Vagrantfile.
238
+ server_status_error: |-
239
+ Server '%{server}' is in error status.
240
+ stack_status_error: |-
241
+ Heat Stack '%{stack}' is in error status.
242
+ missing_nova_endpoint: |-
243
+ Nova endpoint must either be present in your keystone service catalog or be
244
+ specified using the provider's attribute 'openstack_compute_url'
245
+
246
+ states:
247
+ short_active: |-
248
+ active
249
+ long_active: |-
250
+ The server is up and running. Run `vagrant ssh` to access it.
251
+ short_build: |-
252
+ building
253
+ long_build: |-
254
+ The server is currently being built. You must wait for this to
255
+ complete before you can access it. You can delete the server, however,
256
+ by running `vagrant destroy`.
257
+ short_error: |-
258
+ error
259
+ long_error: |-
260
+ The server is in an erroneous state. Contact your OpenStack administrator
261
+ or destroy the machine with `vagrant destroy`.
262
+ short_hard_reboot: |-
263
+ hard reboot
264
+ long_hard_reboot: |-
265
+ The server is hard rebooting. This is equivalent to pulling the power plug
266
+ on a physical server, plugging it back in, and rebooting it.
267
+ short_password: |-
268
+ password reset
269
+ long_password: |-
270
+ The password is being reset on the server.
271
+ short_reboot: |-
272
+ reboot
273
+ long_reboot: |-
274
+ The server is in a soft reboot state. A reboot command was passed to the operating system.
275
+ short_rebuild: |-
276
+ rebuild
277
+ long_rebuild: |-
278
+ The server is currently being rebuilt from an image.
279
+ short_rescue: |-
280
+ rescue
281
+ long_rescue: |-
282
+ The server is in rescue mode.
283
+ short_resize: |-
284
+ resize
285
+ long_resize: |-
286
+ Server is performing the differential copy of data that changed during
287
+ its initial copy. Server is down for this stage.
288
+ short_revert_resize: |-
289
+ revert resize
290
+ long_revert_resize: |-
291
+ The resize or migration of a server failed for some reason. The destination
292
+ server is being cleaned up and the original source server is restarting.
293
+ short_shutoff: |-
294
+ shutoff
295
+ long_shutoff: |-
296
+ The virtual machine (VM) was powered down by the user, but not through the
297
+ OpenStack Compute API. For example, the user issued a shutdown -h command
298
+ from within the server instance. If the OpenStack Compute manager detects
299
+ that the VM was powered down, it transitions the server instance to the
300
+ SHUTOFF status. If you use the OpenStack Compute API to restart the instance,
301
+ the instance might be deleted first, depending on the value in the
302
+ shutdown_terminate database field on the Instance model.
303
+ short_suspended: |-
304
+ suspended
305
+ long_suspended: |-
306
+ The server is suspended, either by request or necessity. This status appears
307
+ for only the following hypervisors: XenServer/XCP, KVM, and ESXi.
308
+ short_unknown: |-
309
+ unknown
310
+ long_unknown: |-
311
+ The state of the server is unknown. Contact your cloud provider.
312
+ short_verify_resize: |-
313
+ verify resize
314
+ long_verifiy_resize: |-
315
+ System is awaiting confirmation that the server is operational after a move or resize.
316
+ short_not_created: |-
317
+ not created
318
+ long_not_created: |-
319
+ The server is not created. Run `vagrant up` to create it.
320
+ short_suspending: |-
321
+ suspending
322
+ long_suspending: |-
323
+ The server is active and currently suspending, either by request or necessity.
324
+ short_resuming: |-
325
+ resuming
326
+ long_resuming: |-
327
+ The server is suspended and currently resuming by request.
328
+ short_spawning: |-
329
+ spawning
330
+ long_spawning: |-
331
+ The server is not created yet but is currently spawning by request.
332
+ short_scheduling: |-
333
+ scheduling
334
+ long_scheduling: |-
335
+ The server is building and currently scheduling by necessity.
336
+ short_deleting: |-
337
+ deleting
338
+ long_deleting: |-
339
+ The server is created and currently deleting by request.
340
+ short_rebooting: |-
341
+ rebooting
342
+ long_rebooting: |-
343
+ The server is currently rebooting, either by request or necessity.
344
+ short_reboot_pending: |-
345
+ reboot_pending
346
+ long_reboot_pending: |-
347
+ The server reboot is pending by necessity.
348
+ short_reboot_started: |-
349
+ reboot_started
350
+ long_reboot_started: |-
351
+ The server is starting to reboot, either by request or necessity.
352
+ short_rebooting_hard: |-
353
+ rebooting_hard
354
+ long_rebooting_hard: |-
355
+ The server is currently hard rebooting, either by request.
356
+ short_reboot_pending_hard: |-
357
+ reboot_pending_hard
358
+ long_reboot_pending_hard: |-
359
+ The server hard reboot is pending by necessity.
360
+ short_reboot_started_hard: |-
361
+ reboot_started_hard
362
+ long_reboot_started_hard: |-
363
+ The server is starting to hard reboot by request.
364
+
365
+ client:
366
+ looking_for_available_endpoints: |-
367
+ Looking for available endpoints...
368
+ multiple_endpoint: |-
369
+ %{size} endpoints are available for service '%{type}' but only the first one will be used
370
+ authentication: |-
371
+ Authentication on project %{project} with user %{user}
372
+
373
+ command:
374
+ main_synopsis: |-
375
+ this provider specific commands
376
+ main_usage : |-
377
+ Usage: vagrant openstack command
378
+ available_subcommands: |-
379
+ Available subcommands:
380
+ image_list_synopsis : |-
381
+ List available images
382
+ flavor_list_synopsis : |-
383
+ List available flavors
384
+ network_list_synopsis : |-
385
+ List private networks in project
386
+ subnet_list_synopsis : |-
387
+ List subnets for available networks
388
+ flaotingip_list_synopsis : |-
389
+ List floating IP and floating IP pools
390
+ volume_list_synopsis : |-
391
+ List existing volumes
392
+ reset : |-
393
+ Reset provider to a clear state