fog-hyperv 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +10 -0
- data/.travis.yml +11 -0
- data/Gemfile +4 -0
- data/README.md +53 -0
- data/Rakefile +10 -0
- data/fog-hyperv.gemspec +28 -0
- data/lib/fog/bin/hyperv.rb +28 -0
- data/lib/fog/collection.rb +114 -0
- data/lib/fog/hyperv.rb +123 -0
- data/lib/fog/hyperv/compute.rb +327 -0
- data/lib/fog/hyperv/fog_extensions/enum.rb +51 -0
- data/lib/fog/hyperv/models/compute/bios.rb +52 -0
- data/lib/fog/hyperv/models/compute/cluster.rb +37 -0
- data/lib/fog/hyperv/models/compute/clusters.rb +11 -0
- data/lib/fog/hyperv/models/compute/com_port.rb +22 -0
- data/lib/fog/hyperv/models/compute/dvd_drive.rb +62 -0
- data/lib/fog/hyperv/models/compute/dvd_drives.rb +11 -0
- data/lib/fog/hyperv/models/compute/firmware.rb +52 -0
- data/lib/fog/hyperv/models/compute/floppy_drive.rb +48 -0
- data/lib/fog/hyperv/models/compute/floppy_drives.rb +11 -0
- data/lib/fog/hyperv/models/compute/hard_drive.rb +110 -0
- data/lib/fog/hyperv/models/compute/hard_drives.rb +11 -0
- data/lib/fog/hyperv/models/compute/host.rb +20 -0
- data/lib/fog/hyperv/models/compute/hosts.rb +15 -0
- data/lib/fog/hyperv/models/compute/network_adapter.rb +142 -0
- data/lib/fog/hyperv/models/compute/network_adapters.rb +19 -0
- data/lib/fog/hyperv/models/compute/server.rb +192 -0
- data/lib/fog/hyperv/models/compute/servers.rb +30 -0
- data/lib/fog/hyperv/models/compute/switch.rb +58 -0
- data/lib/fog/hyperv/models/compute/switches.rb +15 -0
- data/lib/fog/hyperv/models/compute/vhd.rb +100 -0
- data/lib/fog/hyperv/models/compute/vhds.rb +16 -0
- data/lib/fog/hyperv/requests/compute/add_vm_hard_disk_drive.rb +12 -0
- data/lib/fog/hyperv/requests/compute/add_vm_network_adapter.rb +12 -0
- data/lib/fog/hyperv/requests/compute/connect_vm_network_adapter.rb +12 -0
- data/lib/fog/hyperv/requests/compute/disconnect_vm_network_adapter.rb +12 -0
- data/lib/fog/hyperv/requests/compute/get_cluster.rb +11 -0
- data/lib/fog/hyperv/requests/compute/get_cluster_node.rb +19 -0
- data/lib/fog/hyperv/requests/compute/get_vhd.rb +34 -0
- data/lib/fog/hyperv/requests/compute/get_vm.rb +20 -0
- data/lib/fog/hyperv/requests/compute/get_vm_bios.rb +21 -0
- data/lib/fog/hyperv/requests/compute/get_vm_dvd_drive.rb +20 -0
- data/lib/fog/hyperv/requests/compute/get_vm_firmware.rb +21 -0
- data/lib/fog/hyperv/requests/compute/get_vm_floppy_disk_drive.rb +20 -0
- data/lib/fog/hyperv/requests/compute/get_vm_group.rb +23 -0
- data/lib/fog/hyperv/requests/compute/get_vm_hard_disk_drive.rb +20 -0
- data/lib/fog/hyperv/requests/compute/get_vm_host.rb +12 -0
- data/lib/fog/hyperv/requests/compute/get_vm_host_cluster.rb +25 -0
- data/lib/fog/hyperv/requests/compute/get_vm_network_adapter.rb +27 -0
- data/lib/fog/hyperv/requests/compute/get_vm_switch.rb +27 -0
- data/lib/fog/hyperv/requests/compute/mock_files/get_cluster.json +1 -0
- data/lib/fog/hyperv/requests/compute/mock_files/get_cluster_node.json +1 -0
- data/lib/fog/hyperv/requests/compute/mock_files/get_vhd.json +1 -0
- data/lib/fog/hyperv/requests/compute/mock_files/get_vm.json +1 -0
- data/lib/fog/hyperv/requests/compute/mock_files/get_vm_bios.json +1 -0
- data/lib/fog/hyperv/requests/compute/mock_files/get_vm_dvd_drive.json +1 -0
- data/lib/fog/hyperv/requests/compute/mock_files/get_vm_firmware.json +1 -0
- data/lib/fog/hyperv/requests/compute/mock_files/get_vm_floppy_disk_drive.json +1 -0
- data/lib/fog/hyperv/requests/compute/mock_files/get_vm_hard_disk_drive.json +1 -0
- data/lib/fog/hyperv/requests/compute/mock_files/get_vm_host.json +1 -0
- data/lib/fog/hyperv/requests/compute/mock_files/get_vm_network_adapter.json +1 -0
- data/lib/fog/hyperv/requests/compute/mock_files/get_vm_switch.json +1 -0
- data/lib/fog/hyperv/requests/compute/new_vhd.rb +12 -0
- data/lib/fog/hyperv/requests/compute/new_vm.rb +15 -0
- data/lib/fog/hyperv/requests/compute/new_vm_switch.rb +13 -0
- data/lib/fog/hyperv/requests/compute/remove_item.rb +13 -0
- data/lib/fog/hyperv/requests/compute/remove_vm.rb +15 -0
- data/lib/fog/hyperv/requests/compute/remove_vm_hard_disk_drive.rb +12 -0
- data/lib/fog/hyperv/requests/compute/remove_vm_network_adapter.rb +12 -0
- data/lib/fog/hyperv/requests/compute/restart_vm.rb +15 -0
- data/lib/fog/hyperv/requests/compute/set_vm.rb +12 -0
- data/lib/fog/hyperv/requests/compute/set_vm_bios.rb +13 -0
- data/lib/fog/hyperv/requests/compute/set_vm_dvd_drive.rb +12 -0
- data/lib/fog/hyperv/requests/compute/set_vm_firmware.rb +13 -0
- data/lib/fog/hyperv/requests/compute/set_vm_hard_disk_drive.rb +12 -0
- data/lib/fog/hyperv/requests/compute/set_vm_network_adapter.rb +12 -0
- data/lib/fog/hyperv/requests/compute/set_vm_switch.rb +13 -0
- data/lib/fog/hyperv/requests/compute/start_vm.rb +15 -0
- data/lib/fog/hyperv/requests/compute/stop_vm.rb +15 -0
- data/lib/fog/hyperv/version.rb +5 -0
- data/lib/fog/model.rb +64 -0
- data/test/fog/hyperv_test.rb +7 -0
- data/test/test_helper.rb +4 -0
- metadata +199 -0
checksums.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
---
|
|
2
|
+
SHA1:
|
|
3
|
+
metadata.gz: 4f4fc7f6bb6e4beb182866dc8c274d15c25dbbdd
|
|
4
|
+
data.tar.gz: da491628457575effecb17779afa392f0d02276b
|
|
5
|
+
SHA512:
|
|
6
|
+
metadata.gz: 9bcc6fa50ef5d9d3ab0eef2fdb56937090186df326016c93e36d6a606defb257d52f3e66df9ded5512eddcebd996cf118fd84fb50437ea084dde3ec91b1270c7
|
|
7
|
+
data.tar.gz: 003df173d2ab645056e6eeac5feea8b6cc987fabd1b80d65867684f395f84f36799e645da1b76c8286c7065510cdaae6b5c6aba99c7d45721544e72656deceeb
|
data/.gitignore
ADDED
data/.travis.yml
ADDED
data/Gemfile
ADDED
data/README.md
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
# Fog Hyper-V
|
|
2
|
+
|
|
3
|
+
[](https://travis-ci.org/ace13/fog-hyperv)
|
|
4
|
+
|
|
5
|
+
Manage your Hyper-V instance with the help of the Fog cloud service abstractions.
|
|
6
|
+
|
|
7
|
+
## Installation
|
|
8
|
+
|
|
9
|
+
Add this line to your application's Gemfile:
|
|
10
|
+
|
|
11
|
+
```ruby
|
|
12
|
+
gem 'fog-hyperv'
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
And then execute:
|
|
16
|
+
|
|
17
|
+
$ bundle
|
|
18
|
+
|
|
19
|
+
Or install it yourself as:
|
|
20
|
+
|
|
21
|
+
$ gem install fog-hyperv
|
|
22
|
+
|
|
23
|
+
## Usage
|
|
24
|
+
|
|
25
|
+
To remotely manage your Hyper-V instance;
|
|
26
|
+
|
|
27
|
+
```ruby
|
|
28
|
+
require 'fog/hyperv'
|
|
29
|
+
|
|
30
|
+
compute = Fog::Compute.new(
|
|
31
|
+
provider: :hyperv,
|
|
32
|
+
hyperv_host: 'hyperv.example.com',
|
|
33
|
+
hyperv_username: 'domain\\user',
|
|
34
|
+
hyperv_password: 'password'
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
compute.servers.all
|
|
38
|
+
#=> [<Fog::Compute::Hyperv::Server
|
|
39
|
+
#=> id='',
|
|
40
|
+
#=> name='example',
|
|
41
|
+
#=> computer_name='HYPERV',
|
|
42
|
+
#=> dynamic_memory_enabled=false,
|
|
43
|
+
#=> ...
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
## Development
|
|
47
|
+
|
|
48
|
+
After checking out the repo, run `bundle install` to install dependencies. Then, run `rake test` to run the tests. You can also run `bundle exec irb` for an interactive prompt that will allow you to experiment.
|
|
49
|
+
|
|
50
|
+
## Contributing
|
|
51
|
+
|
|
52
|
+
Bug reports and pull requests are welcome on GitHub at https://github.com/ace13/fog-hyperv.
|
|
53
|
+
|
data/Rakefile
ADDED
data/fog-hyperv.gemspec
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
# coding: utf-8
|
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
|
4
|
+
require 'fog/hyperv/version'
|
|
5
|
+
|
|
6
|
+
Gem::Specification.new do |spec|
|
|
7
|
+
spec.name = 'fog-hyperv'
|
|
8
|
+
spec.version = Fog::Hyperv::VERSION
|
|
9
|
+
spec.authors = ['Alexander Olofsson']
|
|
10
|
+
spec.email = ['alexander.olofsson@liu.se']
|
|
11
|
+
|
|
12
|
+
spec.summary = 'Module for the `fog` gem to support Microsoft Hyper-V.'
|
|
13
|
+
spec.description = 'This library wraps Hyper-V in the `fog` concepts.'
|
|
14
|
+
spec.homepage = 'https://github.com/ace13/fog-hyperv'
|
|
15
|
+
spec.license = 'MIT'
|
|
16
|
+
|
|
17
|
+
spec.files = `git ls-files -z`.split("\x0")
|
|
18
|
+
spec.test_files = spec.files.grep(%r{^test\/})
|
|
19
|
+
|
|
20
|
+
spec.require_paths = ['lib']
|
|
21
|
+
|
|
22
|
+
spec.add_runtime_dependency 'fog-core', '~> 1.44'
|
|
23
|
+
spec.add_runtime_dependency 'fog-json', '~> 1.0'
|
|
24
|
+
spec.add_runtime_dependency 'winrm', '~> 2.2'
|
|
25
|
+
|
|
26
|
+
spec.add_development_dependency 'rake', '~> 10.0'
|
|
27
|
+
spec.add_development_dependency 'minitest', '~> 5.0'
|
|
28
|
+
end
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
class Hyperv < Fog::Bin
|
|
2
|
+
class << self
|
|
3
|
+
def class_for(key)
|
|
4
|
+
case key
|
|
5
|
+
when :compute
|
|
6
|
+
Fog::Compute::Hyperv
|
|
7
|
+
else
|
|
8
|
+
raise ArgumentError, "Unsupported #{self} service: #{key}"
|
|
9
|
+
end
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
def [](service)
|
|
13
|
+
@@connections ||= Hash.new do |h, k|
|
|
14
|
+
h[k] = case key
|
|
15
|
+
when :compute
|
|
16
|
+
Fog::Compute.new(provider: 'Hyperv')
|
|
17
|
+
else
|
|
18
|
+
raise ArgumentError, "Unrecognized service: #{key.inspect}"
|
|
19
|
+
end
|
|
20
|
+
end
|
|
21
|
+
@@connections[service]
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def services
|
|
25
|
+
Fog::Hyperv.services
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
end
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
require 'fog/core/collection'
|
|
2
|
+
|
|
3
|
+
module Fog
|
|
4
|
+
module Hyperv
|
|
5
|
+
class Collection < Fog::Collection
|
|
6
|
+
def self.get_method(method = nil)
|
|
7
|
+
@get_method ||= method
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
def self.requires?
|
|
11
|
+
@requires ||= []
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def search_attributes
|
|
15
|
+
attributes.dup.merge(
|
|
16
|
+
_return_fields: model.attributes - model.lazy_attributes,
|
|
17
|
+
_json_depth: 1
|
|
18
|
+
)
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
def all(filters = {})
|
|
22
|
+
requires(*self.class.requires?)
|
|
23
|
+
data = service.send(method, search_attributes.merge(filters))
|
|
24
|
+
data = [] unless data
|
|
25
|
+
|
|
26
|
+
load [data].flatten
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def get(filters = {})
|
|
30
|
+
data = self.all(filters).first
|
|
31
|
+
data if data
|
|
32
|
+
rescue Fog::Hyperv::Errors::PSError => err
|
|
33
|
+
raise Fog::Errors::NotFound, err if err.message =~ /Hyper-V was unable to find|^No .* is found|/
|
|
34
|
+
raise err
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def new(options = {})
|
|
38
|
+
requires(*self.class.requires?)
|
|
39
|
+
super(search_attributes.merge(options))
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
def create(attributes = {})
|
|
43
|
+
object = new(attributes)
|
|
44
|
+
object.save
|
|
45
|
+
object
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
private
|
|
49
|
+
|
|
50
|
+
def method
|
|
51
|
+
self.class.get_method
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
class ComputerCollection < Fog::Hyperv::Collection
|
|
56
|
+
def self.requires_computer
|
|
57
|
+
requires? << :computer
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
attr_accessor :computer
|
|
61
|
+
|
|
62
|
+
def search_attributes
|
|
63
|
+
attrs = super
|
|
64
|
+
attrs[:computer_name] ||= computer.name if computer
|
|
65
|
+
attrs
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
class VMCollection < Fog::Hyperv::ComputerCollection
|
|
70
|
+
def self.match_on(attr = nil)
|
|
71
|
+
@match_on ||= attr
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
def self.requires_vm
|
|
75
|
+
requires? << :vm
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
attr_accessor :vm
|
|
79
|
+
|
|
80
|
+
def search_attributes
|
|
81
|
+
attrs = super
|
|
82
|
+
if vm
|
|
83
|
+
attrs[:computer_name] ||= vm.computer_name
|
|
84
|
+
attrs[match] = vm.send(match)
|
|
85
|
+
end
|
|
86
|
+
attrs
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
def create(attributes = {})
|
|
90
|
+
object = new(attributes)
|
|
91
|
+
# Ensure both ID and Name are populated, regardless of `match_on`
|
|
92
|
+
object.vm_id ||= vm.id if vm && object.respond_to?(:vm_id)
|
|
93
|
+
object.vm_name ||= vm.name if vm && object.respond_to?(:vm_name)
|
|
94
|
+
object.save
|
|
95
|
+
object
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
def inspect
|
|
99
|
+
# To avoid recursing on VM
|
|
100
|
+
to_s
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
private
|
|
104
|
+
|
|
105
|
+
def logger
|
|
106
|
+
service.logger
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
def match
|
|
110
|
+
self.class.match_on || :vm_name
|
|
111
|
+
end
|
|
112
|
+
end
|
|
113
|
+
end
|
|
114
|
+
end
|
data/lib/fog/hyperv.rb
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
require 'fog/core'
|
|
2
|
+
|
|
3
|
+
module Fog
|
|
4
|
+
module Attributes
|
|
5
|
+
autoload :Enum, File.expand_path('../hyperv/fog_extensions/enum.rb', __FILE__)
|
|
6
|
+
end
|
|
7
|
+
|
|
8
|
+
module Compute
|
|
9
|
+
autoload :Hyperv, File.expand_path('../hyperv/compute', __FILE__)
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
module Hyperv
|
|
13
|
+
extend Fog::Provider
|
|
14
|
+
|
|
15
|
+
module Errors
|
|
16
|
+
class ServiceError < Fog::Errors::Error; end
|
|
17
|
+
class VersionError < ServiceError
|
|
18
|
+
attr_reader :version, :required_version, :function
|
|
19
|
+
|
|
20
|
+
def initialize(required_version, version, function)
|
|
21
|
+
@function = function
|
|
22
|
+
@required_version = required_version
|
|
23
|
+
@version = version
|
|
24
|
+
|
|
25
|
+
super "#{function} requires at least Hyper-V v#{required_version}, you have v#{version}"
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
class PSError < ServiceError
|
|
30
|
+
attr_reader :stdout, :stderr, :exitcode, :info, :message
|
|
31
|
+
|
|
32
|
+
def initialize(output, info)
|
|
33
|
+
@stdout = output.stdout
|
|
34
|
+
@stderr = output.stderr
|
|
35
|
+
@exitcode = output.exitcode
|
|
36
|
+
@info = info
|
|
37
|
+
@message = @stderr.split("\n").first
|
|
38
|
+
super @message
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def to_s
|
|
42
|
+
ret = [super]
|
|
43
|
+
ret << info unless info.nil? || info.empty?
|
|
44
|
+
ret.join "\n"
|
|
45
|
+
end
|
|
46
|
+
end
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
autoload :Collection, File.expand_path('../collection', __FILE__)
|
|
50
|
+
autoload :Model, File.expand_path('../model', __FILE__)
|
|
51
|
+
autoload :ModelExtends, File.expand_path('../model', __FILE__)
|
|
52
|
+
autoload :ModelIncludes, File.expand_path('../model', __FILE__)
|
|
53
|
+
autoload :VMCollection, File.expand_path('../collection', __FILE__)
|
|
54
|
+
|
|
55
|
+
service(:compute, 'Compute')
|
|
56
|
+
|
|
57
|
+
def self.shell_quoted(data, always = false)
|
|
58
|
+
case data
|
|
59
|
+
when String
|
|
60
|
+
if !data.start_with?('$') && (data =~ /(^$)|\s/ || always)
|
|
61
|
+
data.gsub(/`/, '``')
|
|
62
|
+
.gsub(/\0/, '`0')
|
|
63
|
+
.gsub(/\n/, '`n')
|
|
64
|
+
.gsub(/\r/, '`r')
|
|
65
|
+
.inspect
|
|
66
|
+
.gsub(/\\\\/, '\\')
|
|
67
|
+
else
|
|
68
|
+
data
|
|
69
|
+
end
|
|
70
|
+
when Array
|
|
71
|
+
'@(' + data.map { |e| shell_quoted(e, true) }.join(', ') + ')'
|
|
72
|
+
when FalseClass
|
|
73
|
+
'$false'
|
|
74
|
+
when TrueClass
|
|
75
|
+
'$true'
|
|
76
|
+
else
|
|
77
|
+
shell_quoted data.to_s
|
|
78
|
+
end
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
def self.camelize(data)
|
|
82
|
+
case data
|
|
83
|
+
when Array
|
|
84
|
+
data.collect { |d| camelize(d) }
|
|
85
|
+
when Hash
|
|
86
|
+
data.each_with_object({}) do |(k, v), hash|
|
|
87
|
+
value = v
|
|
88
|
+
value = camelize(v) if v.is_a?(Hash) || (v.is_a?(Array) && v.all? { |h| h.is_a?(Hash) })
|
|
89
|
+
hash[camelize(k)] = value
|
|
90
|
+
end
|
|
91
|
+
when Symbol
|
|
92
|
+
camelize(data.to_s).to_sym
|
|
93
|
+
when String
|
|
94
|
+
data.split('_').collect(&:capitalize).join
|
|
95
|
+
else
|
|
96
|
+
data
|
|
97
|
+
end
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
def self.uncamelize(data)
|
|
101
|
+
case data
|
|
102
|
+
when Array
|
|
103
|
+
data.collect { |d| uncamelize(d) }
|
|
104
|
+
when Hash
|
|
105
|
+
data.each_with_object({}) do |(k, v), hash|
|
|
106
|
+
value = v
|
|
107
|
+
value = uncamelize(v) if v.is_a?(Hash) || (v.is_a?(Array) && v.all? { |h| h.is_a?(Hash) })
|
|
108
|
+
hash[uncamelize(k)] = value
|
|
109
|
+
end
|
|
110
|
+
when Symbol
|
|
111
|
+
uncamelize(data.to_s).to_sym
|
|
112
|
+
when String
|
|
113
|
+
data.to_s
|
|
114
|
+
.gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2')
|
|
115
|
+
.gsub(/([a-z\d])([A-Z])/, '\1_\2')
|
|
116
|
+
.tr('-', '_')
|
|
117
|
+
.downcase.to_sym
|
|
118
|
+
else
|
|
119
|
+
data
|
|
120
|
+
end
|
|
121
|
+
end
|
|
122
|
+
end
|
|
123
|
+
end
|
|
@@ -0,0 +1,327 @@
|
|
|
1
|
+
module Fog
|
|
2
|
+
module Compute
|
|
3
|
+
class Hyperv < Fog::Service
|
|
4
|
+
STATUS_ENUM_VALUES = [
|
|
5
|
+
:Unknown, # 0
|
|
6
|
+
:Other, # 1
|
|
7
|
+
:Ok, # 2
|
|
8
|
+
:Degraded, # 3
|
|
9
|
+
:Stressed, # 4
|
|
10
|
+
:PredictiveFailure, # 5
|
|
11
|
+
:Error, # 6
|
|
12
|
+
:NonRecoverableError, # 7
|
|
13
|
+
:Starting, # 8
|
|
14
|
+
:Stopping, # 9
|
|
15
|
+
:Stopped, # 10
|
|
16
|
+
:InService, # 11
|
|
17
|
+
:NoContact, # 12
|
|
18
|
+
:LostCommunication, # 13
|
|
19
|
+
:Aborted, # 14
|
|
20
|
+
:Dormant, # 15
|
|
21
|
+
:SupportingEntity, # 16
|
|
22
|
+
:Completed, # 17
|
|
23
|
+
:PowerMode # 18
|
|
24
|
+
# :ProtocolVersion # 32775
|
|
25
|
+
].freeze
|
|
26
|
+
|
|
27
|
+
requires :hyperv_username, :hyperv_password
|
|
28
|
+
recognizes :hyperv_endpoint, :hyperv_host,
|
|
29
|
+
:hyperv_transport,
|
|
30
|
+
:hyperv_debug
|
|
31
|
+
secrets :hyperv_password, :connection
|
|
32
|
+
|
|
33
|
+
model_path 'fog/hyperv/models/compute'
|
|
34
|
+
model :bios
|
|
35
|
+
model :cluster
|
|
36
|
+
collection :clusters
|
|
37
|
+
model :com_port
|
|
38
|
+
model :dvd_drive
|
|
39
|
+
collection :dvd_drives
|
|
40
|
+
model :firmware
|
|
41
|
+
model :floppy_drive
|
|
42
|
+
collection :floppy_drives
|
|
43
|
+
model :hard_drive
|
|
44
|
+
collection :hard_drives
|
|
45
|
+
model :host
|
|
46
|
+
collection :hosts
|
|
47
|
+
model :network_adapter
|
|
48
|
+
collection :network_adapters
|
|
49
|
+
model :server
|
|
50
|
+
collection :servers
|
|
51
|
+
model :switch
|
|
52
|
+
collection :switches
|
|
53
|
+
model :vhd
|
|
54
|
+
collection :vhds
|
|
55
|
+
|
|
56
|
+
request_path 'fog/hyperv/requests/compute'
|
|
57
|
+
request :add_vm_hard_disk_drive
|
|
58
|
+
request :add_vm_network_adapter
|
|
59
|
+
request :connect_vm_network_adapter
|
|
60
|
+
request :disconnect_vm_network_adapter
|
|
61
|
+
request :get_cluster
|
|
62
|
+
request :get_cluster_node
|
|
63
|
+
request :get_vhd
|
|
64
|
+
request :get_vm
|
|
65
|
+
request :get_vm_bios
|
|
66
|
+
request :get_vm_dvd_drive
|
|
67
|
+
request :get_vm_firmware
|
|
68
|
+
request :get_vm_floppy_disk_drive
|
|
69
|
+
request :get_vm_group
|
|
70
|
+
request :get_vm_hard_disk_drive
|
|
71
|
+
request :get_vm_host
|
|
72
|
+
request :get_vm_host_cluster
|
|
73
|
+
request :get_vm_network_adapter
|
|
74
|
+
request :get_vm_switch
|
|
75
|
+
request :new_vhd
|
|
76
|
+
request :new_vm
|
|
77
|
+
request :new_vm_switch
|
|
78
|
+
request :remove_item
|
|
79
|
+
request :remove_vm
|
|
80
|
+
request :remove_vm_hard_disk_drive
|
|
81
|
+
request :remove_vm_network_adapter
|
|
82
|
+
request :restart_vm
|
|
83
|
+
request :set_vm
|
|
84
|
+
request :set_vm_bios
|
|
85
|
+
request :set_vm_dvd_drive
|
|
86
|
+
request :set_vm_hard_disk_drive
|
|
87
|
+
request :set_vm_firmware
|
|
88
|
+
request :set_vm_network_adapter
|
|
89
|
+
request :set_vm_switch
|
|
90
|
+
request :start_vm
|
|
91
|
+
request :stop_vm
|
|
92
|
+
|
|
93
|
+
class Shared
|
|
94
|
+
def version
|
|
95
|
+
'0.0'
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
protected
|
|
99
|
+
|
|
100
|
+
def requires(opts, *args)
|
|
101
|
+
missing = args - opts.keys
|
|
102
|
+
return if missing.none?
|
|
103
|
+
|
|
104
|
+
method = caller[0][/`.*'/][1..-2]
|
|
105
|
+
if missing.length == 1
|
|
106
|
+
raise(ArgumentError, "#{missing.first} is required for #{method}")
|
|
107
|
+
elsif missing.any?
|
|
108
|
+
raise(ArgumentError, "#{missing[0...-1].join(', ')}, and #{missing[-1]} are required for #{method}")
|
|
109
|
+
end
|
|
110
|
+
end
|
|
111
|
+
|
|
112
|
+
def requires_one(opts, *args)
|
|
113
|
+
missing = args - opts.keys
|
|
114
|
+
return if missing.length < args.length
|
|
115
|
+
|
|
116
|
+
method = caller[0][/`.*'/][1..-2]
|
|
117
|
+
raise(ArgumentError, "#{missing[0...-1].join(', ')}, or #{missing[-1]} are required for #{method}")
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
def requires_version(required_version)
|
|
121
|
+
method = caller[0][/`.*'/][1..-2].split('_')
|
|
122
|
+
method = method[0].capitalize + "-" + Fog::Hyperv.camelize(method[1..-1].join('_'))
|
|
123
|
+
|
|
124
|
+
raise Fog::Hyperv::Errors::VersionError.new(required_version, version, method) \
|
|
125
|
+
unless Gem::Version.new(version) >= Gem::Version.new(required_version)
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
class Real < Shared
|
|
131
|
+
attr_reader :logger
|
|
132
|
+
|
|
133
|
+
def initialize(options = {})
|
|
134
|
+
# require 'ostruct'
|
|
135
|
+
require 'fog/json'
|
|
136
|
+
require 'logging'
|
|
137
|
+
|
|
138
|
+
@hyperv_endpoint = options[:hyperv_endpoint]
|
|
139
|
+
@hyperv_endpoint = "http://#{options[:hyperv_host]}:5985/wsman" if !@hyperv_endpoint && options[:hyperv_host]
|
|
140
|
+
@hyperv_username = options[:hyperv_username]
|
|
141
|
+
@hyperv_password = options[:hyperv_password]
|
|
142
|
+
@hyperv_transport = options[:hyperv_transport] || :negotiate
|
|
143
|
+
@logger = Logging.logger['hyper-v']
|
|
144
|
+
if options[:hyperv_debug]
|
|
145
|
+
logger.level = :debug
|
|
146
|
+
logger.add_appenders Logging.appenders.stdout
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
connect
|
|
150
|
+
end
|
|
151
|
+
|
|
152
|
+
def local?
|
|
153
|
+
false # @hyperv_endpoint.nil?
|
|
154
|
+
end
|
|
155
|
+
|
|
156
|
+
def valid?
|
|
157
|
+
if local?
|
|
158
|
+
run_shell('Get-VMHost', _return_fields: :name) && true
|
|
159
|
+
else
|
|
160
|
+
run_wql('SELECT Name FROM Msvm_ComputerSystem WHERE Caption = "Hosting Computer System"') && true
|
|
161
|
+
end
|
|
162
|
+
rescue Fog::Hyperv::Errors::ServiceError
|
|
163
|
+
false
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
def version
|
|
167
|
+
@version ||= run_wql('SELECT Version FROM Win32_OperatingSystem', _namespace: 'root/cimv2/*')[:xml_fragment].first[:version] rescue \
|
|
168
|
+
run_shell("$VMMS = if ([environment]::Is64BitProcess) { \"$($env:SystemRoot)\\System32\\vmms.exe\" } else { \"$($env:SystemRoot)\\Sysnative\\vmms.exe\" }\n(Get-Item $VMMS).VersionInfo.ProductVersion", _skip_json: true).stdout.strip
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
private
|
|
172
|
+
|
|
173
|
+
def hash_to_optmap(options = {})
|
|
174
|
+
args = options.reject { |k, v| v.nil? || v.is_a?(FalseClass) || k.to_s.start_with?('_') }.map do |k, v|
|
|
175
|
+
"'#{k}'=#{Fog::Hyperv.shell_quoted(v, true)}"
|
|
176
|
+
end
|
|
177
|
+
"@{#{args.join ';'}}"
|
|
178
|
+
end
|
|
179
|
+
|
|
180
|
+
def run_shell_with_vm(command, vm_options, options = {})
|
|
181
|
+
# $VM = Get-VM @vm_options
|
|
182
|
+
# $Result = <command> @options
|
|
183
|
+
# $Result | select <return_fields> | ConvertTo-Json
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
def run_wql(query, options = {})
|
|
187
|
+
skip_camelize = options.delete :_skip_camelize
|
|
188
|
+
namespace = options.delete(:_namespace) || 'root/virtualization/v2/*'
|
|
189
|
+
|
|
190
|
+
options = Fog::Hyperv.camelize(options) unless skip_camelize
|
|
191
|
+
args = options.reject { |k, v| v.nil? || v.is_a?(FalseClass) || k.to_s.start_with?('_') }.map do |k, v|
|
|
192
|
+
"#{k} = #{(v.is_a?(String) || v.to_s =~ /\s/) && v.inspect || v}"
|
|
193
|
+
end
|
|
194
|
+
|
|
195
|
+
query = "#{query}#{" WHERE #{args.join ' AND '}" unless args.none?}"
|
|
196
|
+
data = \
|
|
197
|
+
if local?
|
|
198
|
+
# TODO
|
|
199
|
+
else
|
|
200
|
+
logger.debug "WQL; #{namespace} >>> #{query}"
|
|
201
|
+
@connection.run_wql(query, namespace)
|
|
202
|
+
end
|
|
203
|
+
|
|
204
|
+
logger.debug "WQL; <<< #{data}"
|
|
205
|
+
data
|
|
206
|
+
end
|
|
207
|
+
|
|
208
|
+
def run_shell(command, options = {})
|
|
209
|
+
return_fields = options.delete :_return_fields
|
|
210
|
+
return_fields = "| select #{Fog::Hyperv.camelize([return_fields].flatten).join ','}" if return_fields
|
|
211
|
+
suffix = options.delete :_suffix
|
|
212
|
+
json_depth = options.delete :_json_depth
|
|
213
|
+
skip_json = options.delete :_skip_json
|
|
214
|
+
skip_camelize = options.delete :_skip_camelize
|
|
215
|
+
skip_uncamelize = options.delete :_skip_uncamelize
|
|
216
|
+
options = Fog::Hyperv.camelize(options) unless skip_camelize
|
|
217
|
+
|
|
218
|
+
# commandline = "$Args = #{hash_to_optmap options}\n$Ret = #{command} @Args#{"\n$Ret #{return_fields} | ConvertTo-Json -Compress #{"-Depth #{json_depth}" if json_depth}" unless skip_json}"
|
|
219
|
+
# puts " > #{commandline.split("\n").join "\n > "}" if @hyperv_debug
|
|
220
|
+
args = options.reject { |k, v| v.nil? || v.is_a?(FalseClass) || k.to_s.start_with?('_') || (v.is_a?(String) && v.empty?) }.map do |k, v|
|
|
221
|
+
"-#{k} #{Fog::Hyperv.shell_quoted v unless v.is_a?(TrueClass)}"
|
|
222
|
+
end
|
|
223
|
+
command_args = "#{command} #{args.join ' ' unless args.empty?}"
|
|
224
|
+
commandline = "#{command_args} #{suffix} #{return_fields} #{"| ConvertTo-Json -Compress #{"-Depth #{json_depth}" if json_depth}" unless skip_json}"
|
|
225
|
+
logger.debug "PS; >>> #{commandline}"
|
|
226
|
+
|
|
227
|
+
out = nil # OpenStruct.new stdout: '',
|
|
228
|
+
# stderr: '',
|
|
229
|
+
# exitcode: -1
|
|
230
|
+
|
|
231
|
+
if local?
|
|
232
|
+
commanddata = [
|
|
233
|
+
'powershell',
|
|
234
|
+
'-NoLogo',
|
|
235
|
+
'-NoProfile',
|
|
236
|
+
'-NonInteractive',
|
|
237
|
+
commandline
|
|
238
|
+
]
|
|
239
|
+
begin
|
|
240
|
+
out.stdout, out.stderr, out.exitcode = Open3.capture3(*commanddata)
|
|
241
|
+
out.exitcode = out.exitcode.exitstatus
|
|
242
|
+
rescue StandardError => ex
|
|
243
|
+
out.stderr = ex.inspect
|
|
244
|
+
out.exitcode = -1
|
|
245
|
+
end
|
|
246
|
+
else
|
|
247
|
+
@connection.shell(:powershell) do |shell|
|
|
248
|
+
out = shell.run(commandline)
|
|
249
|
+
end
|
|
250
|
+
end
|
|
251
|
+
|
|
252
|
+
# TODO: Map error codes in some manner?
|
|
253
|
+
raise Fog::Hyperv::Errors::ServiceError, "Failed to execute #{commandline}" unless out
|
|
254
|
+
raise Fog::Hyperv::Errors::PSError.new(out, "When executing #{command_args}") unless out.exitcode.zero?
|
|
255
|
+
|
|
256
|
+
logger.debug "PS; <<< OUT=[#{out.stdout.inspect}] ERR=[#{out.stderr.inspect}] EXIT=[#{out.exitcode}]"
|
|
257
|
+
|
|
258
|
+
if skip_json
|
|
259
|
+
out
|
|
260
|
+
else
|
|
261
|
+
return nil if out.stdout.empty?
|
|
262
|
+
json = Fog::JSON.decode(out.stdout)
|
|
263
|
+
json = Fog::Hyperv.uncamelize(json) unless skip_uncamelize
|
|
264
|
+
json
|
|
265
|
+
end
|
|
266
|
+
end
|
|
267
|
+
|
|
268
|
+
def connect
|
|
269
|
+
# return require 'open3' if local?
|
|
270
|
+
|
|
271
|
+
require 'winrm'
|
|
272
|
+
@connection = WinRM::Connection.new(
|
|
273
|
+
endpoint: @hyperv_endpoint,
|
|
274
|
+
user: @hyperv_username,
|
|
275
|
+
password: @hyperv_password,
|
|
276
|
+
transport: @hyperv_transport
|
|
277
|
+
)
|
|
278
|
+
Logging.logger['WinRM::HTTP::HttpNegotiate'].level = :error
|
|
279
|
+
@connection.logger.level = :error
|
|
280
|
+
end
|
|
281
|
+
end
|
|
282
|
+
|
|
283
|
+
class Mock < Shared
|
|
284
|
+
def initialize(_options = {})
|
|
285
|
+
require 'fog/json'
|
|
286
|
+
end
|
|
287
|
+
|
|
288
|
+
def method_missing(method, *args)
|
|
289
|
+
if requests.find { |_, k| k == method }
|
|
290
|
+
handle_mock_response((args.first || {}).merge(_method: method))
|
|
291
|
+
else
|
|
292
|
+
super
|
|
293
|
+
end
|
|
294
|
+
end
|
|
295
|
+
|
|
296
|
+
def respond_to_missing?(method, include_private = false)
|
|
297
|
+
requests.find { |_, k| k == method } || super
|
|
298
|
+
end
|
|
299
|
+
|
|
300
|
+
def self.method_defined?(method)
|
|
301
|
+
Fog::Compute::Hyperv.requests.find { |_, k| k == method } || super
|
|
302
|
+
end
|
|
303
|
+
|
|
304
|
+
private
|
|
305
|
+
|
|
306
|
+
def handle_mock_response(args = {})
|
|
307
|
+
method = args.delete :_method
|
|
308
|
+
method ||= caller[0][/`.*'/][1..-2]
|
|
309
|
+
method ||= caller[1][/`.*'/][1..-2]
|
|
310
|
+
|
|
311
|
+
path = File.join File.dirname(__FILE__), 'requests', 'compute', 'mock_files', "#{method}.json"
|
|
312
|
+
Fog::Mock.not_implemented unless File.exist? path
|
|
313
|
+
raise Fog::Errors::MockNotImplemented, 'Not implementing skipping of json' if args[:_skip_json]
|
|
314
|
+
raise Fog::Errors::MockNotImplemented, 'Not implementing skipping of uncamelize' if args[:_skip_uncamelize]
|
|
315
|
+
|
|
316
|
+
ret = Fog::JSON.decode(open(path).read)
|
|
317
|
+
ret = Fog::Hyperv.uncamelize(ret)
|
|
318
|
+
|
|
319
|
+
ret = ret.map do |obj|
|
|
320
|
+
obj.select { |k, _| args[:_return_fields].include? k }
|
|
321
|
+
end if args[:_return_fields]
|
|
322
|
+
ret
|
|
323
|
+
end
|
|
324
|
+
end
|
|
325
|
+
end
|
|
326
|
+
end
|
|
327
|
+
end
|